This repository was archived by the owner on Jul 1, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 137
/
Copy pathUpsampling.swift
133 lines (118 loc) · 4.79 KB
/
Upsampling.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
// Copyright 2019 The TensorFlow Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import _Differentiation
/// An upsampling layer for 1-D inputs.
@frozen
public struct UpSampling1D<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer {
public typealias TangentVector = EmptyTangentVector
@noDerivative public let size: Int
/// Creates an upsampling layer.
///
/// - Parameter size: The upsampling factor for timesteps.
public init(size: Int) {
self.size = size
}
/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: The output.
@differentiable
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
let shape = input.shape
let (batchSize, timesteps, channels) = (shape[0], shape[1], shape[2])
let scaleOnes = Tensor<Scalar>(ones: [1, 1, size, 1], on: input.device)
let upSampling = input.reshaped(to: [batchSize, timesteps, 1, channels]) * scaleOnes
return upSampling.reshaped(to: [batchSize, timesteps * size, channels])
}
}
/// An upsampling layer for 2-D inputs.
@frozen
public struct UpSampling2D<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer {
public typealias TangentVector = EmptyTangentVector
@noDerivative public let size: Int
/// Creates an upsampling layer.
///
/// - Parameter size: The upsampling factor for rows and columns.
public init(size: Int) {
self.size = size
}
/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: The output.
@differentiable
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
let device = input.device
let shape = input.shape
let (batchSize, height, width, channels) = (shape[0], shape[1], shape[2], shape[3])
let scaleOnes = Tensor<Scalar>(ones: [1, 1, size, 1, size, 1], on: device)
let upSampling = input.reshaped(to: [batchSize, height, 1, width, 1, channels]) * scaleOnes
return upSampling.reshaped(to: [batchSize, height * size, width * size, channels])
}
}
/// An upsampling layer for 3-D inputs.
@frozen
public struct UpSampling3D<Scalar: TensorFlowFloatingPoint>: ParameterlessLayer {
public typealias TangentVector = EmptyTangentVector
@noDerivative public let size: Int
/// Creates an upsampling layer.
///
/// - Parameter size: The upsampling factor for rows and columns.
public init(size: Int) {
self.size = size
}
/// Repeats the elements of a tensor along an axis, like `np.repeat`.
/// Function adapted from `def repeat_elements`:
/// https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/keras/backend.py
@differentiable
private func repeatingElements(
_ input: Tensor<Scalar>, alongAxis axis: Int, count: Int
) -> Tensor<Scalar> {
let splits = _Raw.split(
splitDim: Tensor<Int32>(Int32(axis), on: input.device),
value: input,
numSplit: Int64(input.shape[axis]))
let repeated = splits.flatMap { x in Array(repeating: x, count: count) }
return Tensor<Scalar>(concatenating: repeated, alongAxis: axis)
}
@derivative(of: repeatingElements)
private func _vjpRepeatingElements(
_ input: Tensor<Scalar>, alongAxis axis: Int, count: Int
) -> (value: Tensor<Scalar>, pullback: (Tensor<Scalar>) -> (TangentVector, Tensor<Scalar>)) {
let value = repeatingElements(input, alongAxis: axis, count: count)
return (
value,
{ v in
let splits = _Raw.split(
splitDim: Tensor<Int32>(Int32(axis), on: v.device),
value: v,
numSplit: Int64(input.shape[axis]))
let summed = splits.map { x in x.sum(alongAxes: axis) }
let concatenated = Tensor<Scalar>(concatenating: summed, alongAxis: axis)
return (.zero, concatenated)
}
)
}
/// Returns the output obtained from applying the layer to the given input.
///
/// - Parameter input: The input to the layer.
/// - Returns: The output.
@differentiable
public func callAsFunction(_ input: Tensor<Scalar>) -> Tensor<Scalar> {
var result = repeatingElements(input, alongAxis: 1, count: size)
result = repeatingElements(result, alongAxis: 2, count: size)
result = repeatingElements(result, alongAxis: 3, count: size)
return result
}
}