Skip to content

Commit a8d1d57

Browse files
committed
Initial commit
0 parents  commit a8d1d57

File tree

11 files changed

+495
-0
lines changed

11 files changed

+495
-0
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
Raw/*
2+
*.psd
3+
*.pdf

LICENSE

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
MIT License
2+
3+
Copyright (c) 2018 Ali Ashraf
4+
5+
Permission is hereby granted, free of charge, to any person obtaining a copy
6+
of this software and associated documentation files (the "Software"), to deal
7+
in the Software without restriction, including without limitation the rights
8+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9+
copies of the Software, and to permit persons to whom the Software is
10+
furnished to do so, subject to the following conditions:
11+
12+
The above copyright notice and this permission notice shall be included in all
13+
copies or substantial portions of the Software.
14+
15+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21+
SOFTWARE.

README.md

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
# Dee.py
2+
3+
## A bare-metal deep neural network library for fun learning and experimentation
4+
5+
<p align="center">
6+
<img src="promo.png">
7+
</p>
8+
9+
## So what does it do?
10+
11+
This library allows you to make and train neural networks with any number of layers and neurons pretty easily:
12+
13+
```py
14+
# Make a network of three hidden layers with 5,3,4,3,5 neurons and 2 output layers
15+
network = Dee([5, 3, 4, 3, 5], 2)
16+
17+
# train the network with X (input) y (output) and custom parameters
18+
network.train(X, y, epochs=1000, learningRate=0.01, batchSize=20)
19+
```
20+
21+
## Purpose
22+
23+
This library was designed for learning and experimentation. The source is almost 200 lines of core code. And you can dig deep, tune parameters, apply custom functions, and visualize the result in no time. There are production-ready deep learning frameworks available such as Theano, Keras, and Tensorflow, but their implementation is pretty complicated and certainly not for the jumpstarters.
24+
25+
## XOR Example
26+
27+
```py
28+
from dee import Dee
29+
30+
# input data
31+
X = [
32+
[0, 0],
33+
[0, 1],
34+
[1, 0],
35+
[1, 1],
36+
]
37+
38+
# output truth
39+
y = [0, 1, 1, 0]
40+
41+
# training the network with two hidden layers of 3 neurons, and 2 output layers
42+
# play with the network and see how the plot changes
43+
network = Dee([3, 3], 2)
44+
network.train(X, y, epochs=1000, learningRate=0.01, batchSize=20)
45+
46+
# visualize everything
47+
network.visualize()
48+
network.plot2D()
49+
network.plotLoss()
50+
```
51+
52+
## Fun Exercises
53+
54+
- Modify the code to use sigmoid or relu instead of tanh
55+
- Experiment with single batch or single row (stochastic gradient descent)
56+
- Add regularization to reduce overfitting
57+
- Optimize the spiral problem to gain a better visual result
58+
- Try a dataset with multiple classes
59+
- Make the loss graph update in realtime
60+
- Use CudaMat to GPUfy the training and save time
61+
- Apply an advanced optimization function, such as adam (fun is subjective for this one ;)
62+
63+
## Stay In Touch
64+
65+
For latest releases and announcements, check out my site: [aliashraf.net](http://aliashraf.net)
66+
67+
## License
68+
69+
This software is released under the [MIT License](LICENSE). Please read LICENSE for information on the
70+
software availability and distribution.
71+
72+
Copyright (c) 2018 [Ali Ashraf](http://aliashraf.net)

__pycache__/dee.cpython-36.pyc

3.53 KB
Binary file not shown.
3.29 KB
Binary file not shown.

dee.py

Lines changed: 194 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,194 @@
1+
import numpy as np
2+
import matplotlib.pyplot as plt
3+
from visualize_dee import VisualizeDee
4+
5+
# Dee.py core neural network class
6+
class Dee:
7+
8+
# initialization constructor
9+
def __init__(self, hiddenLayers, outputNodes=1):
10+
self.hiddenLayers = hiddenLayers
11+
self.outputNodes = outputNodes
12+
self.datasetLength = 0
13+
self.W = []
14+
self.b = []
15+
16+
# standalone prediction function that can give output of any test input
17+
def predict(self, x):
18+
19+
# basically forward propogation happening here
20+
zAll = []
21+
aAll = []
22+
23+
zAll.append([])
24+
aAll.append(np.array(x))
25+
26+
WAll = self.W
27+
bAll = self.b
28+
29+
for j in range(0, len(self.W)):
30+
W = WAll[j]
31+
b = bAll[j]
32+
33+
inputData = aAll[j]
34+
35+
z = inputData.dot(W) + b
36+
37+
# modify to change activation function
38+
a = np.tanh(z)
39+
40+
zAll.append(z)
41+
aAll.append(a)
42+
43+
# probabilities of scores
44+
exp_scores = np.exp(zAll[len(zAll) - 1])
45+
probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
46+
47+
return probs
48+
49+
# a helper function that divides dataset into batches
50+
@staticmethod
51+
def batch(iterable, n=1):
52+
l = len(iterable)
53+
54+
result = []
55+
for ndx in range(0, l, n):
56+
result.append(iterable[ndx:min(ndx + n, l)])
57+
58+
return result
59+
60+
# the core training function
61+
def train(self, X, y, epochs = 10000, learningRate=0.1, batchSize = 5):
62+
np.random.seed(0)
63+
64+
X = np.array(X)
65+
y = np.array(y)
66+
67+
self.X = X
68+
self.y = y
69+
self.datasetLength = len(X)
70+
numHiddenLayers = len(self.hiddenLayers)
71+
numProcessingLayers = numHiddenLayers + 1
72+
73+
WAll = []
74+
bAll = []
75+
76+
# generate dimensions for each layer
77+
dimensions = [np.shape(X)[1]]
78+
79+
for numNodes in self.hiddenLayers:
80+
dimensions.append(numNodes)
81+
82+
dimensions.append(self.outputNodes)
83+
84+
# initialize to random weights and biases
85+
for i in range(0, numProcessingLayers):
86+
WAll.append(np.random.randn(dimensions[i], dimensions[i+1]) / np.sqrt(dimensions[i]))
87+
bAll.append(np.zeros((1, dimensions[i+1])))
88+
89+
self.W = []
90+
self.b = []
91+
self.loss = []
92+
93+
# generate batches of the data
94+
XBatches = Dee.batch(X, batchSize)
95+
yBatches = Dee.batch(y, batchSize)
96+
97+
for i in range(0, epochs):
98+
99+
epochActivations = []
100+
101+
for x in range(0, len(XBatches)):
102+
103+
Xbatch = np.array(XBatches[x])
104+
yBatch = np.array(yBatches[x])
105+
thisBatchSize = len(Xbatch)
106+
107+
zAll = []
108+
aAll = []
109+
110+
zAll.append([])
111+
aAll.append(Xbatch)
112+
113+
# forward propogate the network
114+
for j in range(0, numProcessingLayers):
115+
W = WAll[j]
116+
b = bAll[j]
117+
118+
inputData = aAll[j]
119+
120+
# crunch the numbers and activate the function
121+
z = inputData.dot(W) + b
122+
a = np.tanh(z)
123+
124+
zAll.append(z)
125+
aAll.append(a)
126+
127+
# calculate errors and probabilities of last layer
128+
lastError = zAll[len(zAll) - 1]
129+
lastActivation = aAll[len(aAll) - 1]
130+
131+
for item in lastActivation:
132+
epochActivations.append(item)
133+
134+
exp_scores = np.exp(lastError)
135+
probs = exp_scores / np.sum(exp_scores, axis=1, keepdims=True)
136+
137+
# Backpropagate through the network
138+
deltaL = probs
139+
deltaL[range(thisBatchSize), yBatch] -= 1
140+
141+
dWAll = []
142+
dbAll = []
143+
144+
lastDelta = deltaL
145+
146+
# reverse loop through the layers
147+
for j in reversed(range(0, numProcessingLayers)):
148+
149+
# calculate & add delta weight and biases
150+
dWAll.insert(0, (aAll[j].T).dot(lastDelta))
151+
dbAll.insert(0, np.sum(lastDelta, axis=0, keepdims=(j == 0)))
152+
153+
lastDelta = lastDelta.dot(WAll[j].T) * (1 - np.power(aAll[j], 2))
154+
155+
# apply the weights to the model
156+
# the delta is multiplied to the learning rate and adjusted to W/b
157+
for j in range(0, numProcessingLayers):
158+
WAll[j] += -learningRate * dWAll[j]
159+
bAll[j] += -learningRate * dbAll[j]
160+
161+
# save the weights and biases
162+
self.W = WAll;
163+
self.b = bAll;
164+
165+
# calculate the errors of this epoch
166+
epochActivations = np.array(epochActivations)
167+
168+
validActivations = epochActivations[range(self.datasetLength), self.y]
169+
170+
# via squared error formula
171+
lossNum = 1 - np.mean(np.square(validActivations))
172+
self.loss.append(lossNum)
173+
174+
# print the progress bar
175+
VisualizeDee.printProgress(i, epochs, 30, 0.01, "Epoch: " + str(i) + " Loss: " + str(lossNum))
176+
print("\n")
177+
178+
# helper functions that visualize data
179+
# implemented in dee.py
180+
def visualize(self):
181+
VisualizeDee.visualize(self)
182+
183+
def plot2D(self, colorMap = plt.cm.rainbow, resolution = 0.01, discrete = False, yColumn = 1):
184+
VisualizeDee.plot2D(self, colorMap, resolution, discrete, yColumn)
185+
186+
def plotLoss(self, color = 'r'):
187+
VisualizeDee.plotLoss(self, color)
188+
189+
190+
191+
192+
193+
194+

example-moons.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import sklearn
2+
import sklearn.datasets
3+
from dee import Dee
4+
5+
# importing moon dataset with 25% noise
6+
# NOTE: the result will always be different due to random noise
7+
X, y = sklearn.datasets.make_moons(200, noise=0.25)
8+
9+
# training on three hidden layers with 5,2,5 neurons
10+
network = Dee([5, 2, 5], 2)
11+
network.train(X, y, epochs=5000, learningRate=0.01, batchSize=20)
12+
13+
# visualization
14+
network.visualize()
15+
network.plot2D()
16+
network.plotLoss()

example-spiral.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
import numpy as np
2+
import math
3+
from dee import Dee
4+
5+
# a generic function that makes spirals
6+
def spiral(offset=0):
7+
a = 1
8+
offsetR = offset /180 * math.pi
9+
phi = np.arange(0, (3) * np.pi, 0.1)
10+
x1 = a*phi*np.cos(phi + offsetR)
11+
x2 = a*phi*np.sin(phi + offsetR)
12+
13+
dr = (np.diff(x1)**2 + np.diff(x2)**2)**.5 # segment lengths
14+
r = np.zeros_like(x1)
15+
r[1:] = np.cumsum(dr) # integrate path
16+
r_int = np.linspace(0, r.max(), 50) # regular spaced path
17+
x1 = np.interp(r_int, r, x1) # interpolate
18+
x2 = np.interp(r_int, r, x2)
19+
20+
result = np.column_stack((x1,x2))
21+
result = np.delete(result, 0, axis=0)
22+
23+
return result
24+
25+
# generating two spirals
26+
spiralA = spiral()
27+
spiralB = spiral(offset=180)
28+
29+
# and concatenating them to form X and y
30+
X = np.concatenate((spiralA, spiralB), axis=0)
31+
y = np.array([0] * len(spiralA) + [1] * len(spiralB))
32+
33+
# training the network
34+
# three hidden layers of 10 neurons and 2 output layers
35+
network = Dee([10, 10, 10], 2)
36+
network.train(X, y, epochs=15000, learningRate=0.001, batchSize=20)
37+
38+
# visualization
39+
network.visualize()
40+
network.plot2D()
41+
network.plotLoss()
42+
43+
print("Play with the network parameters to get a better result")

example-xor.py

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
from dee import Dee
2+
3+
# XOR function example
4+
5+
# input data
6+
X = [
7+
[0, 0],
8+
[0, 1],
9+
[1, 0],
10+
[1, 1],
11+
]
12+
13+
# output truth
14+
y = [
15+
0,
16+
1,
17+
1,
18+
0,
19+
]
20+
21+
# training the network with two hidden layers of 3 neurons, and 2 output layers
22+
# play with the network and see how the plot changes
23+
network = Dee([3, 3], 2)
24+
network.train(X, y, epochs=2000, learningRate=0.01, batchSize=20)
25+
26+
# predicting the outcomes of a test data: [0, 1]
27+
print(network.predict([
28+
[0, 1],
29+
])[:, 1])
30+
31+
# and [1, 1]
32+
print(network.predict([
33+
[1, 1],
34+
])[:, 1])
35+
36+
# network visualizations
37+
network.visualize()
38+
network.plot2D()
39+
network.plotLoss()

promo.png

235 KB
Loading

0 commit comments

Comments
 (0)