Skip to content

Commit e172e77

Browse files
committed
Update to Keras2 api
1 parent 51ae1f0 commit e172e77

6 files changed

+28
-36
lines changed

test/test_mnist.ipynb

+3-2
Original file line numberDiff line numberDiff line change
@@ -95,14 +95,15 @@
9595
}
9696
],
9797
"source": [
98+
"batch_size = 128\n",
9899
"out = model.evaluate_generator(\n",
99100
" RotNetDataGenerator(\n",
100101
" X_test,\n",
101-
" batch_size=128,\n",
102+
" batch_size=batch_size,\n",
102103
" preprocess_func=binarize_images,\n",
103104
" shuffle=True\n",
104105
" ), \n",
105-
" val_samples=len(y_test)\n",
106+
" steps=len(y_test) / batch_size\n",
106107
")\n",
107108
"\n",
108109
"print('Test loss:', out[0])\n",

test/test_street_view.ipynb

+3-2
Original file line numberDiff line numberDiff line change
@@ -103,17 +103,18 @@
103103
}
104104
],
105105
"source": [
106+
"batch_size = 64\n",
106107
"out = model.evaluate_generator(\n",
107108
" RotNetDataGenerator(\n",
108109
" test_filenames,\n",
109110
" input_shape=(224, 224, 3),\n",
110-
" batch_size=64,\n",
111+
" batch_size=batch_size,\n",
111112
" preprocess_func=preprocess_input,\n",
112113
" crop_center=True,\n",
113114
" crop_largest_rect=True,\n",
114115
" shuffle=True\n",
115116
" ),\n",
116-
" val_samples=len(test_filenames)\n",
117+
" steps=len(test_filenames) / batch_size\n",
117118
")\n",
118119
"\n",
119120
"print('Test loss:', out[0])\n",

train/train_mnist.py

+7-11
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,11 @@
66
from keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard
77
from keras.datasets import mnist
88
from keras.layers import Dense, Dropout, Flatten, Input
9-
from keras.layers import Convolution2D, MaxPooling2D
9+
from keras.layers import Conv2D, MaxPooling2D
1010
from keras.models import Model
1111

1212
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
1313
from utils import angle_error, RotNetDataGenerator, binarize_images
14-
15-
1614
# we don't need the labels indicating the digit value, so we only load the images
1715
(X_train, _), (X_test, _) = mnist.load_data()
1816

@@ -38,18 +36,16 @@
3836

3937
# model definition
4038
input = Input(shape=(img_rows, img_cols, img_channels))
41-
x = Convolution2D(nb_filters, kernel_size[0], kernel_size[1],
42-
activation='relu')(input)
43-
x = Convolution2D(nb_filters, kernel_size[0], kernel_size[1],
44-
activation='relu')(x)
39+
x = Conv2D(nb_filters, kernel_size, activation='relu')(input)
40+
x = Conv2D(nb_filters, kernel_size, activation='relu')(x)
4541
x = MaxPooling2D(pool_size=(2, 2))(x)
4642
x = Dropout(0.25)(x)
4743
x = Flatten()(x)
4844
x = Dense(128, activation='relu')(x)
4945
x = Dropout(0.25)(x)
5046
x = Dense(nb_classes, activation='softmax')(x)
5147

52-
model = Model(input=input, output=x)
48+
model = Model(inputs=input, outputs=x)
5349

5450
model.summary()
5551

@@ -82,14 +78,14 @@
8278
preprocess_func=binarize_images,
8379
shuffle=True
8480
),
85-
samples_per_epoch=nb_train_samples,
86-
nb_epoch=nb_epoch,
81+
steps_per_epoch=nb_train_samples / batch_size,
82+
epochs=nb_epoch,
8783
validation_data=RotNetDataGenerator(
8884
X_test,
8985
batch_size=batch_size,
9086
preprocess_func=binarize_images
9187
),
92-
nb_val_samples=nb_test_samples,
88+
validation_steps=nb_test_samples / batch_size,
9389
verbose=1,
9490
callbacks=[checkpointer, early_stopping, tensorboard]
9591
)

train/train_mnist_regression.py

+7-13
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard
77
from keras.datasets import mnist
88
from keras.layers import Dense, Dropout, Flatten, Input
9-
from keras.layers import Convolution2D, MaxPooling2D
9+
from keras.layers import Conv2D, MaxPooling2D
1010
from keras.models import Model
1111

1212
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@@ -35,22 +35,16 @@
3535

3636
# model definition
3737
input = Input(shape=(img_rows, img_cols, img_channels))
38-
x = Convolution2D(nb_filters, kernel_size[0], kernel_size[1],
39-
border_mode='valid',
40-
input_shape=input_shape,
41-
activation='relu')(input)
42-
x = Convolution2D(nb_filters, kernel_size[0], kernel_size[1],
43-
border_mode='valid',
44-
input_shape=input_shape,
45-
activation='relu')(x)
38+
x = Conv2D(nb_filters, kernel_size, activation='relu')(input)
39+
x = Conv2D(nb_filters, kernel_size, activation='relu')(x)
4640
x = MaxPooling2D(pool_size=(2, 2))(x)
4741
x = Dropout(0.25)(x)
4842
x = Flatten()(x)
4943
x = Dense(128, activation='relu')(x)
5044
x = Dropout(0.25)(x)
5145
x = Dense(1, activation='sigmoid')(x)
5246

53-
model = Model(input=input, output=x)
47+
model = Model(inputs=input, outputs=x)
5448

5549
model.summary()
5650

@@ -84,15 +78,15 @@
8478
preprocess_func=binarize_images,
8579
shuffle=True
8680
),
87-
samples_per_epoch=nb_train_samples,
88-
nb_epoch=nb_epoch,
81+
steps_per_epoch=nb_train_samples / batch_size,
82+
epochs=nb_epoch,
8983
validation_data=RotNetDataGenerator(
9084
X_test,
9185
one_hot=False,
9286
preprocess_func=binarize_images,
9387
batch_size=batch_size
9488
),
95-
nb_val_samples=nb_test_samples,
89+
validation_steps=nb_test_samples / batch_size,
9690
verbose=1,
9791
callbacks=[checkpointer, early_stopping, tensorboard]
9892
)

train/train_street_view.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
final_output = Dense(nb_classes, activation='softmax', name='fc360')(x)
3838

3939
# create the new model
40-
model = Model(input=base_model.input, output=final_output)
40+
model = Model(inputs=base_model.input, outputs=final_output)
4141

4242
model.summary()
4343

@@ -73,8 +73,8 @@
7373
crop_largest_rect=True,
7474
shuffle=True
7575
),
76-
samples_per_epoch=len(train_filenames),
77-
nb_epoch=nb_epoch,
76+
steps_per_epoch=len(train_filenames) / batch_size,
77+
epochs=nb_epoch,
7878
validation_data=RotNetDataGenerator(
7979
test_filenames,
8080
input_shape=input_shape,
@@ -83,7 +83,7 @@
8383
crop_center=True,
8484
crop_largest_rect=True
8585
),
86-
nb_val_samples=len(test_filenames),
86+
validation_steps=len(test_filenames) / batch_size,
8787
callbacks=[checkpointer, early_stopping, tensorboard],
8888
nb_worker=10,
8989
pickle_safe=True,

train/train_street_view_regression.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
final_output = Dense(1, activation='sigmoid', name='fc1')(x)
3636

3737
# create the new model
38-
model = Model(input=base_model.input, output=final_output)
38+
model = Model(inputs=base_model.input, outputs=final_output)
3939

4040
model.summary()
4141

@@ -71,8 +71,8 @@
7171
crop_largest_rect=True,
7272
shuffle=True
7373
),
74-
samples_per_epoch=len(train_filenames),
75-
nb_epoch=nb_epoch,
74+
steps_per_epoch=len(train_filenames) / batch_size,
75+
epochs=nb_epoch,
7676
validation_data=RotNetDataGenerator(
7777
test_filenames,
7878
input_shape=input_shape,
@@ -82,7 +82,7 @@
8282
crop_center=True,
8383
crop_largest_rect=True
8484
),
85-
nb_val_samples=len(test_filenames),
85+
validation_steps=len(test_filenames) / batch_size,
8686
callbacks=[checkpointer, early_stopping, tensorboard],
8787
nb_worker=10,
8888
pickle_safe=True,

0 commit comments

Comments
 (0)