-
Notifications
You must be signed in to change notification settings - Fork 0
/
MNIST_Settings.py
127 lines (106 loc) · 3.56 KB
/
MNIST_Settings.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 31 12:29:20 2018
@author: John
"""
from keras import optimizers
from mnist import MNIST
import numpy as np
#represent data using mfcc
#http://practicalcryptography.com/miscellaneous/machine-learning/guide-mel-frequency-cepstral-coefficients-mfccs/
#the sum of the NumTraining and NumTesting values must equal NumVectors
DataFilePath = 'C:\\Users\\John\\Stuff\\NN Cultivator\\MNIST Data'
ManagerOutputFile = r"C:\Users\John\Stuff\NN Cultivator\OutputData\MNISTManagerResult.txt"
NNOutputFile = r"C:\Users\John\Stuff\NN Cultivator\OutputData\MNISTNNResult.txt"
MoEOutputFile = r"C:\Users\John\Stuff\NN Cultivator\OutputData\MNISTMoEResult.txt"
TrueOutputFile = r"C:\Users\John\Stuff\NN Cultivator\OutputData\MNISTTrueResult.txt"
#%%
#set data variables
NUM_INPUTS = 784
NUM_OUTPUTS = 10
#stratify data.
#Must sum to the total number of vectors
HOLDOUT = 68000
#TRAINING = 60000
#VALIDATION = 10000
TRAINING = 1000
VALIDATION = 1000
TOTAL = HOLDOUT + TRAINING + VALIDATION
#statistics variables
NUMTRIALS = 1
#set nn worker variables
WRKR_NUM_WORKERS =30
WRKR_LOSS = 'mean_squared_error'
WRKR_MET = ['accuracy']
WRKR_EPOCHS = 500
WRKR_BATCH_SIZE = 1
WRKR_ACTIVATION = 'sigmoid'
WRKR_LEARNING_RATE = .1
WRKR_MOMENTUM = .4
WRKR_LYR_1 = 50
WRKR_LYR_2 = 8 #not used
WRKR_LYR_3 = NUM_OUTPUTS
WRKR_OPT = optimizers.SGD(lr=WRKR_LEARNING_RATE, decay=0, momentum=WRKR_MOMENTUM, nesterov=False)
#set lvq variables
LVQ_LEARNING_RATE = .3
LVQ_PREDICTED_NUM_CATAGORIES = 10
#set nn master variables
MSTR_LOSS = 'mean_squared_error'
MSTR_MET = ['accuracy']
MSTR_EPOCHS = 500
MSTR_BATCH_SIZE = 1
MSTR_ACTIVATION = 'sigmoid'
MSTR_LEARNING_RATE = .1
MSTR_MOMENTUM = .4
MSTR_LYR_1 = 50
MSTR_LYR_2 = 8 #not used
MSTR_LYR_3 = LVQ_PREDICTED_NUM_CATAGORIES
MSTR_OPT = optimizers.SGD(lr=MSTR_LEARNING_RATE, decay=0, momentum=MSTR_MOMENTUM, nesterov=False)
#set nn variables
NN_LOSS = 'mean_squared_error'
NN_MET = ['accuracy']
NN_EPOCHS = 500
NN_BATCH_SIZE = 1
NN_ACTIVATION = 'sigmoid'
NN_LEARNING_RATE = .1
NN_MOMENTUM = .4
NN_LYR_1 = 50
NN_LYR_2 = 8 #not used
NN_LYR_3 = NUM_OUTPUTS
NN_OPT = optimizers.SGD(lr=NN_LEARNING_RATE, decay=0, momentum=NN_MOMENTUM, nesterov=False)
#set MoE variables
MOE_LOSS = 'mean_squared_error'
MOE_MET = ['accuracy']
MOE_EPOCHS = 500
MOE_BATCH_SIZE = 200
MOE_EXPRT_ACTIVATION = 'sigmoid'
MOE_GATE_ACTIVATION ='sigmoid'
MOE_NUM_EXPERTS = 30
MOE_LEARNING_RATE = .1
MOE_MOMENTUM = .4
MOE_OPT = optimizers.SGD(lr=MOE_LEARNING_RATE, decay=0, momentum=MOE_MOMENTUM, nesterov=False)
MOE_DEBUG = 0
#%% Import data from file
mndata = MNIST('C:\\Users\\John\\Stuff\\NN Cultivator\\MNIST Data')
Tr_Images,Tr_Labels = mndata.load_training()
Te_Images,Te_Labels = mndata.load_testing()
Images = Tr_Images + Te_Images
Labels = Tr_Labels + Te_Labels
print(np.shape(Tr_Images))
print(np.shape(Te_Images))
print(np.shape(Tr_Labels))
print(np.shape(Te_Labels))
print(np.shape(Images))
print(np.unique(Tr_Labels))
Images = np.true_divide(Images,256)
One_Hot_Labels = np.zeros((TOTAL,NUM_OUTPUTS))
One_Hot_Labels[np.arange(TOTAL),Labels] = 1
print(np.shape(One_Hot_Labels))
dataset = np.zeros((TOTAL,NUM_INPUTS+NUM_OUTPUTS))
np.concatenate((Images,One_Hot_Labels),axis=1,out=dataset)
#for x in range(0,np.shape(Images)[0]):
# dataset[x] = Images[x] + One_Hot_Labels[x]
print(np.shape(dataset))
print("MNIST Load Complete")
#dataset = np.loadtxt(DataFileName, delimiter=DataFileDelimiter)
#Data_Length = np.size(dataset,0)