-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbidan.js
660 lines (540 loc) · 29.2 KB
/
bidan.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
const { logError } = require("./colors/bidanColors")
const colors = require("./colors/bidanColors")
const { Perceptron, Convu2D, Flatter, MaxPooling2D } = require("./neuron")
const fs = require("fs")
const { relu, reluDerivative } = require("./func/Activationfunctions")
const { log } = require("console")
class Neuralnetwork {
constructor() {
this.LayerInput = []
this.Layer = []
this.LayerOutput = []
this.LayerInputActivationfunction = undefined
this.LayerActivationfunction = []
this.LayerOutputActivationfunction = undefined
//aqui va la configuracion
this.data = {
"LayerInputConfig": [],
"LayersConfig": [],
"LayerOutputConfig": []
}
}
//funcion para configurar las capas de entrada
LayerInputConfig = (Input, neuronType, neuronConfig) => {
let neuron = new neuronType(neuronConfig)
//comprobamos si Input es un numero
if (typeof Input == "number") {
//Input es un numero
this.data.LayerInputConfig = [Input, neuron.Activationfunction.name]
//comprobamos si input es un numero valido(mayor a 0)
if (Input > 0) {
//input es valido
//agregamos las neuronas a LayerInput
this.LayerInputActivationfunction = neuron.Activationfunction
for (let index = 0; index < Input; index++) {
let neuron = new neuronType(neuronConfig)
neuron.name = "LayerInput" + index
this.LayerInput.push(neuron)
}
} else if (Input == 0) {
logError("Bidan error 003 LaInCo: The input layer is assigned zero neurons")
} else logError("Bidan error 003 LaInCo: The input layer is assigned a negative number of neurons")
} else if (typeof Input == "undefined") {
//Input no es un numero
logError("Bidan error 001 LaInCo: The input layer was not assigned a number of neurons")
} else logError("Bidan error 001 LaInCo: An invalid value was assigned to the number of neurons in the input layer")
}
//funcion para configurar las capas
LayersConfig = (ArrayInput, neuronType, neuronConfig) => {
if (typeof ArrayInput == "object") {
this.data.LayersConfig = ArrayInput
//Input es un array
for (let index = 0; index < ArrayInput.length; index++) {
if (typeof ArrayInput[index] == "number") {
if (ArrayInput[index] > 0) {
let layer = []
if (neuronConfig.constructor === Array) {
if (neuronType.length == neuronConfig.length) {
for (let o = 0; o < ArrayInput[index]; o++) {
let neuron = new neuronType[index](neuronConfig[index])
this.LayerActivationfunction.push(neuron.Activationfunction);
neuron.name = "Layer" + index + "Neuron" + o
layer.push(neuron)
}
} else logError("Bidan error 003 LaHiCo: An attempt was made to configure the type of neurons with arrays that do not match their lengths")
} else {
if (neuronType.length == undefined | neuronType.length == 1) {
for (let o = 0; o < ArrayInput[index]; o++) {
let neuron = new neuronType(neuronConfig)
this.LayerActivationfunction.push(neuron.Activationfunction);
neuron.name = "Layer" + index + "Neuron" + o
layer.push(neuron)
}
}
}
this.Layer.push(layer)
} else if (ArrayInput[index] == 0) {
logError("Bidan error 003 LaHiCo: The " + index + " layer was assigned zero neurons")
} else logError("Bidan error 003 LaHiCo: The " + index + " layer was assigned a negative number of neurons")
} else if (typeof ArrayInput[index] == "undefined") {
logError("Bidan error 001 LaHiCo: In the " + index + " layer is not assigned a number of neurons")
} else logError("Bidan error 001 LaHiCo:An invalid value was assigned to the number of neurons in layer " + index)
}
} else if (typeof ArrayInput == "number") {
let neuron = new neuronType(neuronConfig)
this.LayerActivationfunction[0] = neuron.Activationfunction
//Input es un numero
if (ArrayInput > 0) {
let layer = []
for (let o = 0; o < ArrayInput; o++) {
let neuron = new neuronType(neuronConfig)
neuron.name + "Layer" + o + "Neuron" + o
layer.push(neuron)
}
this.Layer.push(layer)
} else if (Input == 0) {
logError("Bidan error 003 LaHiCo: The hidden layer was assigned zero neurons")
} else logError("Bidan error 003 LaHiCo: The hidden layer was assigned a negative number of neurons")
} else if (typeof ArrayInput == "undefined") {
logError("Bidan error 001 LaHiCo: No hidden layers were assigned")
} else logError("Bidan error 001 LaHiCo: An invalid value was assigned on hidden layers")
}
//funcion para configurar las capas de salida
LayerOutputConfig = (Output, neuronType, neuronConfig) => {
let neuron = new neuronType(neuronConfig)
//comprobamos si Input es un numero
if (typeof Output == "number") {
//Input es un numero
this.data.LayerOutputConfig = [Output, neuron.Activationfunction.name]
//comprobamos si input es un numero valido(mayor a 0)
if (Output > 0) {
//input es valido
//agregamos las neuronas a LayerInput
this.LayerOutputActivationfunction = neuron.Activationfunction
for (let index = 0; index < Output; index++) {
let neuron = new neuronType(neuronConfig)
neuron.name = "LayerOutput" + index
this.LayerOutput.push(neuron)
}
} else if (Output == 0) {
logError("Bidan error 003 LaOuCo: The output layer is assigned zero neurons")
} else logError("Bidan error 003 LaOuCo: The output layer is assigned a negative number of neurons")
} else if (typeof Output == "undefined") {
//Input no es un numero
logError("Bidan error 001 LaOuCo: The output layer was not assigned a number of neurons")
} else logError("Bidan error 001 LaOuCo: An invalid value was assigned to the number of neurons in the output layer")
}
//funcion para configurar la red neuronal, llama a todas las funciones anteriores
config = (LayerInputConfig, LayerInputActivationfunction, LayersConfig, LayersActivationfunction, LayerOutputConfig, LayerOutputActivationfunction) => {
this.LayerInputConfig(LayerInputConfig, LayerInputActivationfunction)
this.LayersConfig(LayersConfig, LayersActivationfunction)
this.LayerOutputConfig(LayerOutputConfig, LayerOutputActivationfunction)
}
//funcion que replica la configuracion de una red
readMirror = (direction) => {
if (typeof direction == "string") {
if (fs.existsSync(direction + ".json")) {
//obtenemos la configuracion y la transformamos en un array
const data = Object.values(JSON.parse(fs.readFileSync(direction + ".json", "utf-8")))
const keys = Object.keys(JSON.parse(fs.readFileSync(direction + ".json", "utf-8")))
if (keys[0] == 'LayerInputConfig' & keys[1] == 'LayersConfig' & keys[2] == 'LayerOutputConfig') {
let configuration = [];
for (var i = 0; i < data.length; i++) {
configuration.push(data[i][0], data[i][1])
}
const findFunction = (functionName) => {
return funcions.find((f) => f.name === functionName)
}
this.config(configuration[0], findFunction(configuration[1]), configuration[2], findFunction(configuration[3]), configuration[4], findFunction(configuration[5]))
console.log(colors.mirror("mirror Neuralnetwork"));
} else logError("Bidan error 005 rM: The file on readmirror is corrupt")
} else logError("Bidan error 006 rM: In readmirror the file does not exist")
} else if (typeof direction == "undefined") {
logError("Bidan error 004 rM: In readMirror the file path was not specified")
} else logError("Bidan error 004 rM: In readmirror the file path is not a string")
}
//funcion que replica la configuracion de una red
mirror = (d) => {
//obtenemos la configuracion y la transformamos en un array
const data = Object.values(JSON.parse(fs.readFileSync(direction + ".json", "utf-8")))
const keys = Object.keys(JSON.parse(fs.readFileSync(direction + ".json", "utf-8")))
if (keys[0] == 'LayerInputConfig' & keys[1] == 'LayersConfig' & keys[2] == 'LayerOutputConfig') {
let configuration = [];
for (var i = 0; i < data.length; i++) {
configuration.push(data[i][0], data[i][1])
}
const findFunction = (functionName) => {
return funcions.find((f) => f.name === functionName)
}
this.config(configuration[0], findFunction(configuration[1]), configuration[2], findFunction(configuration[3]), configuration[4], findFunction(configuration[5]))
console.log(colors.mirror("mirror Neuralnetwork"));
} else logError("Bidan error 005 m: The data on readmirror is corrupt")
}
//funcion para obtener informacion de la red nueronal
info = () => {
if (this.LayerInput.length != 0) {
console.log(colors.LyInputTitle("¬¬ Input layer ¬¬"));
console.log(colors.LyInput(" ¬ Number of neurons in the input layer : " + this.LayerInput.length))
console.log(colors.LyInput(" ¬ Input layer activation function : " + this.LayerInputActivationfunction.name))
console.log(colors.LyInput(" ¬ Input layer neuron type : " + this.LayerInput[0].typeof))
} else {
console.log(colors.warn("Bidan error 000 in: Configuration error in input layer"))
}
if (this.Layer.length != 0) {
console.log(colors.LyTitle("++ Hidden Layers ++"));
console.log(colors.Ly(" + Number of hidden Layers: " + this.Layer.length))
for (let index = 0; index < this.Layer.length; index++) {
console.log(colors.Ly(" -- layer " + index + " --"))
console.log(colors.Ly(" - Number of neurons in the layer " + index + " : " + this.Layer[index].length))
console.log(colors.Ly(" - Layer activation function " + (" ".repeat(index.toString().length)) + " : " + this.LayerActivationfunction[index].name))
console.log(colors.Ly(" - Layer neuron type " + (" ".repeat(index.toString().length)) + " : " + this.Layer[index][0].typeof))
}
} else {
console.log(colors.warn("Bidan error 000 in: Hidden layers configuration error"))
}
if (this.LayerOutput.length != 0) {
console.log(colors.LyTitleOutput("** Output layer **"));
console.log(colors.LyOutput(" * Number of neurons in the output layer : " + this.LayerOutput.length))
console.log(colors.LyOutput(" * Output layer activation function : " + this.LayerOutputActivationfunction.name))
console.log(colors.LyOutput(" * Output layer neuron type : " + this.LayerOutput[0].typeof))
} else {
console.log(colors.warn("Bidan error 000 in: Configuration error in output layer"))
}
}
//funcion para guardar una configuracion
saveCofig = (name) => {
console.log(colors.save("Config of Neuralnetwork save"));
const json = JSON.stringify(this.data)
if (name.substring(name.length - 5) == ".json") {
fs.writeFileSync(name, json)
} else fs.writeFileSync(name + ".json", json)
}
//funcion para leer una configuracion
readCofig = () => {
return this.data
}
//funcion para iniciar las conexiones entre neuronas
initConnections = (bool = true) => {
if (this.LayerInput.length != 0 && this.Layer.length != 0 && this.LayerOutput.length != 0) {
for (let i = 0; i < this.LayerInput.length; i++) {
this.LayerInput[i].Output = this.Layer[0]
this.LayerInput[i].ActivationInput = 1
}
for (let o = 0; o < this.Layer.length; o++) {
for (let u = 0; u < this.Layer[o].length; u++) {
if (this.Layer[o + 1]) {
this.Layer[o][u].Output = this.Layer[o + 1]
} else {
this.Layer[o][u].Output = this.LayerOutput
}
if (this.Layer[o - 1]) {
this.Layer[o][u].ActivationInput = this.Layer[o - 1].length
} else {
this.Layer[o][u].ActivationInput = this.LayerInput.length
}
}
}
if (bool) console.log(colors.initC("init Connections of Neuralnetwork"));
} else if (this.Layer.length != 0 && this.LayerOutput.length != 0) {
console.log(colors.warn("Bidan error 000 iC: Configuration error in input layer"));
} else if (this.LayerInput.length != 0 && this.LayerOutput.length != 0) {
console.log(colors.warn("Bidan error 000 iC: Hidden layers configuration error"))
} else if (this.LayerInput.length != 0 && this.Layer.length != 0) {
console.log(colors.warn("Bidan error 000 iC: Configuration error in output layer"))
} else console.log(colors.warn("Bidan error 000 iC: Multi-layer configuration error"))
}
initWeights = (Factor = [-1 / 9, 1 / 9], mode = "Range") => {
// Initialize weights for hidden layers
for (let o = 0; o < this.Layer.length; o++) {
if (this.Layer[o][0] instanceof Perceptron) {
const prevLayerLength = o > 0 ? this.Layer[o - 1].length : this.LayerInput.length;
const weight = mode === "Range" ? Array.from({ length: prevLayerLength }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]) : Array.from({ length: prevLayerLength }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]);
for (let u = 0; u < this.Layer[o].length; u++) {
this.Layer[o][u].weight = weight;
}
} else if (this.Layer[o][0] instanceof Convu2D) {
// Initialize weights for input layer
const inputWeight = mode === "Range" ? Array.from({ length: this.Layer[o][0].size[0] * this.Layer[o][0].size[1] }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]) : Array.from({ length: this.LayerInput[0].size[0] * this.LayerInput[0].size[1] }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]);
for (let u = 0; u < this.Layer[o].length; u++) {
this.Layer[o][u].weight = inputWeight;
}
}
}
if (this.LayerOutput[0] instanceof Perceptron) {
// Initialize weights for output layer
const outputWeight = mode === "Range" ? Array.from({ length: this.Layer[this.Layer.length - 1].length }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]) : Array.from({ length: this.Layer[this.Layer.length - 1].length }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]);
for (let i = 0; i < this.LayerOutput.length; i++) {
this.LayerOutput[i].weight = outputWeight;
}
} else if (this.LayerOutput[0] instanceof Convu2D) {
// Initialize weights for input layer
const inputWeight = mode === "Range" ? Array.from({ length: this.LayerOutput[0].size[0] * this.LayerOutput[0].size[1] }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]) : Array.from({ length: this.LayerInput[0].size[0] * this.LayerInput[0].size[1] }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]);
for (let i = 0; i < this.LayerOutput.length; i++) {
this.LayerOutput[i].weight = inputWeight;
}
}
if (this.LayerInput[0] instanceof Perceptron) {
// Initialize weights for input layer
const inputWeight = mode === "Range" ? Array.from({ length: 1 }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]) : Array.from({ length: 1 }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]);
for (let i = 0; i < this.LayerInput.length; i++) {
this.LayerInput[i].weight = inputWeight;
}
} else if (this.LayerInput[0] instanceof Convu2D) {
// Initialize weights for input layer
const inputWeight = mode === "Range" ? Array.from({ length: this.LayerInput[0].size[0] * this.LayerInput[0].size[1] }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]) : Array.from({ length: this.LayerInput[0].size[0] * this.LayerInput[0].size[1] }, () => Math.random() * (Factor[1] - Factor[0]) + Factor[0]);
for (let i = 0; i < this.LayerInput.length; i++) {
this.LayerInput[i].weight = inputWeight;
}
}
}
StartPrediction = (DataSet, bool = true) => {
if (this.LayerInput[0] instanceof Perceptron) {
if (typeof DataSet === "object") {
if (DataSet.length === this.LayerInput.length) {
for (let i = 0; i < DataSet.length; i++) {
this.LayerInput[i].addInput(DataSet[i])
this.LayerInput[i].activation()
}
if (bool === true) {
console.log(colors.initC("Start Prediction"));
}
} else logError("Bidan error 007 SP: in StartPrediction the data is an array but does not match the number of input layer neurons")
} else if (typeof DataSet === "number" && this.LayerInput.length === 1) {
this.LayerInput[0].addInput(DataSet)
this.LayerInput[0].activation()
if (bool === true) {
console.log(colors.initC("Start Prediction"));
}
} else if (typeof DataSet === "number" && this.LayerInput.length != 1) {
logError("Bidan error 007 SP: in StartPrediction the data is a number but does not fit with the number of input neurons")
} else if (typeof DataSet === "undefined") {
logError("Bidan error 007 SP: in StartPrediction the data is not assigned")
} else logError("Bidan error 007 SP: in StartPrediction the data is neither array nor numbers")
} else if (this.LayerInput[0] instanceof Convu2D || this.LayerInput[0] instanceof MaxPooling2D || this.LayerInput[0] instanceof Flatter) {
if (typeof DataSet === "object") {
for (let i = 0; i < this.LayerInput.length; i++) {
this.LayerInput[i].addInput(DataSet)
this.LayerInput[i].activation()
}
if (bool === true) {
console.log(colors.initC("Start Prediction"));
}
} else if (typeof DataSet === "number" && this.LayerInput.length != 1) {
logError("Bidan error 007 SP: in StartPrediction the data is a number but does not fit with the number of input neurons")
} else if (typeof DataSet === "undefined") {
logError("Bidan error 007 SP: in StartPrediction the data is not assigned")
} else logError("Bidan error 007 SP: in StartPrediction the data is neither array nor numbers")
} else logError("Wazaaaaaaaaaaaa👻👻👻")
}
//funcion para guardar una configuracion
saveWeight = (name) => {
let data = {
config: this.data,
weight: {
LayerInput: [],
Layer: [],
LayerOutput: []
}
}
for (let i = 0; i < this.LayerInput.length; i++) {
data.weight.LayerInput.push(this.LayerInput[i].weight)
}
for (let i = 0; i < this.LayerOutput.length; i++) {
data.weight.LayerOutput.push(this.LayerOutput[i].weight)
}
for (let o = 0; o < this.Layer.length; o++) {
let layer = []
for (let u = 0; u < this.Layer[o].length; u++) {
if (this.Layer[o][u].weight) {
let a = []
for (let i = 0; i < this.Layer[o][u].weight.length; i++) {
a.push(this.Layer[o][u].weight[i])
}
layer.push(a)
} else layer.push(0)
}
data.weight.Layer.push(layer)
}
const json = JSON.stringify(data)
if (name.substring(name.length - 5) == ".json") {
fs.writeFileSync(name, json)
} else fs.writeFileSync(name + ".json", json)
}
//funcion para guardar una configuracion
getWeight = () => {
let data = {
config: this.data,
weight: {
LayerInput: [],
Layer: [],
LayerOutput: []
}
}
for (let i = 0; i < this.LayerInput.length; i++) {
data.weight.LayerInput.push(this.LayerInput[i].weight)
}
for (let i = 0; i < this.LayerOutput.length; i++) {
data.weight.LayerOutput.push(this.LayerOutput[i].weight)
}
for (let o = 0; o < this.Layer.length; o++) {
let layer = []
for (let u = 0; u < this.Layer[o].length; u++) {
let a = []
for (let i = 0; i < this.Layer[o][u].weight.length; i++) {
a.push(this.Layer[o][u].weight[i])
}
layer.push(a)
}
data.weight.Layer.push(layer)
}
return data
}
readWeights = (direction) => {
let ruta = ""
if (direction.substring(direction.length - 5) == ".json") {
ruta = direction
} else ruta = direction + ".json"
if (typeof direction == "string") {
if (fs.existsSync(ruta)) {
let data = JSON.parse(fs.readFileSync(direction + ".json", "utf-8"))
for (let i = 0; i < this.LayerInput.length; i++) {
this.LayerInput[i].weight = data.weight.LayerInput[i]
}
for (let i = 0; i < this.LayerOutput.length; i++) {
this.LayerOutput[i].weight = data.weight.LayerOutput[i]
}
for (let o = 0; o < this.Layer.length; o++) {
for (let u = 0; u < this.Layer[o].length; u++) {
this.Layer[o][u].weight = data.weight.Layer[o][u]
}
}
} else logError("Bidan error 006 rW: In readWeights the file does not exist")
} else if (typeof direction == "undefined") {
logError("Bidan error 004 rW: In readWeights the file path was not specified")
} else logError("Bidan error 004 rW: In readWeights the file path is not a string")
}
useWeights = (data) => {
for (let i = 0; i < this.LayerInput.length; i++) {
this.LayerInput[i].weight = data.weight.LayerInput[i]
}
for (let i = 0; i < this.LayerOutput.length; i++) {
this.LayerOutput[i].weight = data.weight.LayerOutput[i]
}
for (let o = 0; o < this.Layer.length; o++) {
for (let u = 0; u < this.Layer[o].length; u++) {
this.Layer[o][u].weight = data.weight.Layer[o][u]
}
}
}
Output = () => {
let r = []
for (let i = 0; i < this.LayerOutput.length; i++) {
r.push(this.LayerOutput[i].Activationfunction(this.LayerOutput[i].cal()))
}
return r
}
OutputLog = () => {
let r = []
for (let i = 0; i < this.LayerOutput.length; i++) {
r.push(this.LayerOutput[i].Activationfunction(this.LayerOutput[i].cal()))
}
console.log(colors.resu("r: " + r));
return r
}
reset = () => {
for (let o = 0; o < this.Layer.length; o++) {
for (let u = 0; u < this.Layer[o].length; u++) {
this.Layer[o][u].Input = []
}
}
for (let i = 0; i < this.LayerOutput.length; i++) {
this.LayerOutput[i].Input = []
}
for (let i = 0; i < this.LayerInput.length; i++) {
this.LayerInput[i].Input = []
}
}
mean_squared_error = (output, target) => {
let error = 0;
for (let i = 0; i < output.length; i++) {
error += 0.5 * Math.pow((target[i] - output[i]), 2);
}
return error;
}
train = (trainingData, targetData, learningRate, epochs, print = true) => {
if (
this.LayerInput.length === 0 ||
this.LayerOutput.length === 0 ||
this.Layer.length === 0
) {
logError("Bidan error: Network not properly configured for training");
return;
}
for (let epoch = 0; epoch < epochs; epoch++) {
let totalError = 0;
for (let dataIndex = 0; dataIndex < trainingData.length; dataIndex++) {
// Reset neuron inputs
this.reset();
// Set input data
this.StartPrediction(trainingData[dataIndex], false);
let errorLO = 0
let errors = []
for (let i = 0; i < this.LayerOutput.length; i++) {
const output = this.LayerOutput[i].Activationfunction(this.LayerOutput[i].cal());
const target = targetData[dataIndex];
const error = target - output;
totalError += Math.abs(error);
// Backpropagate the error
const derivative = this.LayerOutput[i].Derivative(output);
const delta = error * derivative;
errorLO += delta;
for (let j = 0; j < this.LayerOutput[i].weight.length; j++) {
this.LayerOutput[i].weight[j] += learningRate * delta * this.LayerOutput[i].Input[j];
}
}
errorLO /= this.LayerOutput.length
errors.push(errorLO)
for (let l = 0; l < this.Layer.length; l++) {
let errorLH = 0
for (let i = 0; i < this.Layer[l].length; i++) {
// Backpropagate the error
const derivative = this.Layer[l][i].Derivative(this.Layer[l][i].Activationfunction(this.Layer[l][i].cal()));
// Calculate the delta for the current neuron
let delta = 0;
for (let j = 0; j < this.Layer[l][i].weight.length; j++) {
delta += this.Layer[l][i].weight[j] * errors[errors.length-1];
}
totalError += Math.abs(delta);
delta *= derivative
errorLH += delta
for (let j = 0; j < this.Layer[l][i].weight.length; j++) {
this.Layer[l][i].weight[j] += learningRate * delta * this.Layer[l][i].Input[j];
}
}
errors.push(errorLH)
}
for (let i = 0; i < this.LayerInput.length; i++) {
// Backpropagate the error
const derivative = this.LayerInput[i].Derivative(this.LayerInput[i].Activationfunction(this.LayerInput[i].cal()));
// Calculate the delta for the current neuron
let delta = 0;
for (let j = 0; j < this.LayerInput[i].weight.length; j++) {
delta += this.LayerInput[i].weight[j] * errors[errors.length-1];
}
totalError += Math.abs(delta);
delta *= derivative
for (let j = 0; j < this.LayerInput[i].weight.length; j++) {
this.LayerInput[i].weight[j] += learningRate * delta * this.LayerInput[i].Input[j];
}
}
}
if (epoch % 10 === 0 && print) {
console.log(`Epoch ${epoch}: Total Error = ${totalError}`);
}
}
console.log("Training completed.");
};
}
module.exports = {
Neuralnetwork
}