-
Notifications
You must be signed in to change notification settings - Fork 87
/
DeepNeuralNetwork.swift
328 lines (277 loc) · 11.7 KB
/
DeepNeuralNetwork.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
//
// DeepNeuralNetwork.swift
// AIToolbox
//
// Created by Kevin Coble on 7/1/16.
// Copyright © 2016 Kevin Coble. All rights reserved.
//
import Foundation
import Foundation
import Accelerate
final public class DeepNeuralNetwork : DeepNetworkOperator
{
var activation : NeuralActivationFunction
var numInputs = 0
var numNodes : Int
var resultSize : DeepChannelSize
var weights : [Float] = []
var lastNodeSums : [Float]
var lastOutputs : [Float]
fileprivate var inputsWithBias : [Float] = []
var weightAccumulations : [Float] = []
public init(activation : NeuralActivationFunction, size: DeepChannelSize)
{
self.activation = activation
self.resultSize = size
// Get the number of nodes
numNodes = resultSize.totalSize
// Allocate the arrays for results
lastNodeSums = [Float](repeating: 0.0, count: numNodes)
lastOutputs = [Float](repeating: 0.0, count: numNodes)
}
public init?(fromDictionary: [String: AnyObject])
{
// Init for nil return (hopefully Swift 3 removes this need)
resultSize = DeepChannelSize(dimensionCount: 0, dimensionValues: [])
numNodes = 0
weights = []
lastNodeSums = []
lastOutputs = []
// Get the activation type
let activationTypeValue = fromDictionary["activation"] as? NSInteger
if activationTypeValue == nil { return nil }
let tempActivationType = NeuralActivationFunction(rawValue: activationTypeValue!)
if (tempActivationType == nil) { return nil }
activation = tempActivationType!
// Get the number of dimension
let dimensionValue = fromDictionary["numDimension"] as? NSInteger
if dimensionValue == nil { return nil }
let numDimensions = dimensionValue!
// Get the dimensions levels
let tempArray = getIntArray(fromDictionary, identifier: "dimensions")
if (tempArray == nil) { return nil }
let dimensions = tempArray!
resultSize = DeepChannelSize(dimensionCount: numDimensions, dimensionValues: dimensions)
// Get the number of nodes
numNodes = resultSize.totalSize
// Get the weights
let tempWeights = getFloatArray(fromDictionary, identifier: "weights")
if (tempWeights == nil) { return nil }
weights = tempWeights!
numInputs = (weights.count / numNodes) - 1
// Allocate the arrays for results
lastNodeSums = [Float](repeating: 0.0, count: numNodes)
lastOutputs = [Float](repeating: 0.0, count: numNodes)
}
public func getType() -> DeepNetworkOperatorType
{
return .feedForwardNetOperation
}
public func getDetails() -> String
{
var result = activation.getString() + " ["
if (resultSize.numDimensions > 0) { result += "\(resultSize.dimensions[0])" }
if (resultSize.numDimensions > 1) {
for i in 1..<resultSize.numDimensions {
result += ", \(resultSize.dimensions[i])"
}
}
result += "]"
return result
}
public func getResultingSize(_ inputSize: DeepChannelSize) -> DeepChannelSize
{
// Input size does not affect output size. However, it does change the weight sizing
let newInputCount = inputSize.totalSize
if (newInputCount != numInputs) {
numInputs = newInputCount
initializeParameters()
}
return resultSize
}
public func initializeParameters()
{
// Allocate the weight array using 'Xavier' initialization
let numWeights = (numInputs + 1) * numNodes // Add bias offset
var weightDiviser: Float
if (activation == .rectifiedLinear) {
weightDiviser = 1 / sqrt(Float(numInputs) * 0.5)
}
else {
weightDiviser = 1 / sqrt(Float(numInputs))
}
weights = []
for _ in 0..<numWeights {
weights.append(Gaussian.gaussianRandomFloat(0.0, standardDeviation : 1.0) * weightDiviser)
}
}
public func feedForward(_ inputs: [Float], inputSize: DeepChannelSize) -> [Float]
{
// Get inputs with a bias term
inputsWithBias = inputs
inputsWithBias.append(1.0)
// Multiply the weight matrix by the inputs to get the node sum values
vDSP_mmul(weights, 1, inputsWithBias, 1, &lastNodeSums, 1, vDSP_Length(numNodes), 1, vDSP_Length(numInputs+1))
// Perform the non-linearity
switch (activation) {
case .none:
lastOutputs = lastNodeSums
break
case .hyperbolicTangent:
lastOutputs = lastNodeSums.map({ tanh($0) })
break
case .sigmoidWithCrossEntropy:
fallthrough
case .sigmoid:
lastOutputs = lastNodeSums.map( { 1.0 / (1.0 + exp(-$0)) } )
break
case .rectifiedLinear:
lastOutputs = lastNodeSums.map( { $0 < 0 ? 0.0 : $0 } )
break
case .softSign:
lastOutputs = lastNodeSums.map( { $0 / (1.0 + exp($0)) } )
break
case .softMax:
lastOutputs = lastNodeSums.map( { exp($0) } )
break
}
return lastOutputs
}
public func getResults() -> [Float]
{
return lastOutputs
}
public func getResultSize() -> DeepChannelSize
{
return resultSize
}
public func getResultRange() ->(minimum: Float, maximum: Float)
{
if activation == .hyperbolicTangent {
return (minimum: -1.0, maximum: 1.0)
}
return (minimum: 0.0, maximum: 1.0)
}
public func startBatch()
{
// Clear the weight accumulations
weightAccumulations = [Float](repeating: 0.0, count: weights.count)
}
// 𝟃E/𝟃h comes in, 𝟃E/𝟃x goes out
public func backPropogateGradient(_ upStreamGradient: [Float]) -> [Float]
{
// Forward equation is h = fn(Wx), where fn is the activation function
// The 𝟃E/𝟃h comes in, we need to calculate 𝟃E/𝟃W and 𝟃E/𝟃x
// 𝟃E/𝟃W = 𝟃E/𝟃h ⋅ 𝟃h/𝟃z ⋅ 𝟃z/𝟃W
// = upStreamGradient ⋅ activation' ⋅ input
// Get 𝟃E/𝟃z
var 𝟃E𝟃z : [Float]
switch (activation) {
case .none:
𝟃E𝟃z = upStreamGradient
break
case .hyperbolicTangent:
𝟃E𝟃z = upStreamGradient
for index in 0..<lastOutputs.count {
𝟃E𝟃z[index] *= (1 - lastOutputs[index] * lastOutputs[index])
}
break
case .sigmoidWithCrossEntropy:
fallthrough
case .sigmoid:
𝟃E𝟃z = upStreamGradient
for index in 0..<lastOutputs.count {
𝟃E𝟃z[index] *= (lastOutputs[index] - (lastOutputs[index] * lastOutputs[index]))
}
break
case .rectifiedLinear:
𝟃E𝟃z = upStreamGradient
for index in 0..<lastOutputs.count {
if (lastOutputs[index] < 0.0) { 𝟃E𝟃z[index] = 0.0 }
}
break
case .softSign:
𝟃E𝟃z = upStreamGradient
var z : Float
// Reconstitute z from h
for index in 0..<lastOutputs.count {
if (lastOutputs[index] < 0) { // Negative z
z = lastOutputs[index] / (1.0 + lastOutputs[index])
𝟃E𝟃z[index] /= -((1.0 + z) * (1.0 + z))
}
else { // Positive z
z = lastOutputs[index] / (1.0 - lastOutputs[index])
𝟃E𝟃z[index] /= ((1.0 + z) * (1.0 + z))
}
}
break
case .softMax:
// Should not get here - softmax is not allowed except on final layer
𝟃E𝟃z = upStreamGradient
break
}
// Get 𝟃E/𝟃W. 𝟃E/𝟃W = 𝟃E/𝟃z ⋅ 𝟃z/𝟃W = 𝟃E𝟃z ⋅ inputsWithBias
var weightChange = [Float](repeating: 0.0, count: weights.count)
vDSP_mmul(𝟃E𝟃z, 1, inputsWithBias, 1, &weightChange, 1, vDSP_Length(numNodes), vDSP_Length(numInputs+1), 1)
vDSP_vadd(weightChange, 1, weightAccumulations, 1, &weightAccumulations, 1, vDSP_Length(weightChange.count))
// Get 𝟃E/𝟃x. 𝟃E/𝟃x = 𝟃E/𝟃z ⋅ 𝟃z/𝟃x = 𝟃E𝟃z ⋅ weights
// var downStreamGradient = [Float](repeating: 0.0, count: numInputs)
// for index in 0..<numInputs {
// vDSP_dotpr(&weights[index], vDSP_Stride(numInputs+1), 𝟃E𝟃z, 1, &downStreamGradient[index], vDSP_Length(numNodes))
// }
var downStreamGradient = [Float](repeating: 0.0, count: numInputs+1) // an extra for the bias term in the weights
vDSP_mmul(𝟃E𝟃z, 1, weights, 1, &downStreamGradient, 1, 1, vDSP_Length(numInputs+1), vDSP_Length(numNodes))
downStreamGradient = Array(downStreamGradient[0..<numNodes]) // Size for the inputs without the bias
return downStreamGradient
}
public func updateWeights(_ trainingRate : Float, weightDecay: Float)
{
// If there is a decay factor, use it
if (weightDecay != 1.0) {
var λ = weightDecay // Needed for unsafe pointer conversion
vDSP_vsmul(weights, 1, &λ, &weights, 1, vDSP_Length(weights.count))
}
// Subtract the weight changes from the weight matrix (W = W - η∇)
var η = -trainingRate // Needed for unsafe pointer conversion
vDSP_vsma(weightAccumulations, 1, &η, weights, 1, &weights, 1, vDSP_Length(weights.count))
}
public func gradientCheck(ε: Float, Δ: Float, network: DeepNetwork) -> Bool
{
var result = true
// Iterate through each parameter
for index in 0..<weights.count {
let oldValue = weights[index]
// Get the network loss with a small addition to the parameter
weights[index] += ε
network.feedForward()
let plusLoss = network.getResultLoss()
// Get the network loss with a small subtraction from the parameter
weights[index] = oldValue - ε
network.feedForward()
let minusLoss = network.getResultLoss()
weights[index] = oldValue
// Iterate over the results
for resultIndex in 0..<plusLoss.count {
// Get the numerical gradient estimate 𝟃E/𝟃W
let gradient = (plusLoss[resultIndex] - minusLoss[resultIndex]) / (2.0 * ε)
// Compare with the analytical gradient
let difference = abs(gradient - weightAccumulations[index])
if (difference > Δ) { result = false }
}
}
return result
}
public func getPersistenceDictionary() -> [String: AnyObject]
{
var resultDictionary : [String: AnyObject] = [:]
// Set the activation type
resultDictionary["activation"] = activation.rawValue as AnyObject?
// Set the number of dimension
resultDictionary["numDimension"] = resultSize.numDimensions as AnyObject?
// Set the dimensions levels
resultDictionary["dimensions"] = resultSize.dimensions as AnyObject?
// Set the weights
resultDictionary["weights"] = weights as AnyObject?
return resultDictionary
}
}