Open erinmgraham opened 5 months ago
Shern: '#### Define the Model
'# INPUT: 32x32 images with three channels (RGB) '# Layer 1: convolutional, 16 filters, 3x3 kernel, ReLU activation '# Layer 2: max pooling, 2x2 window '# Layer 3: convolutional, 32 filters, 3x3 kernel, ReLU activation '# Layer 4: max pooling, 2x2 window '# Layer 5: flatten to 1D '# Layer 6: dense layer, 64 units, ReLU activation '# OUTPUT: dense layer, 10 units (one per class), softmax activation
inputs_intro = keras.Input(shape=train_images.shape[1:]) # INPUT
x_intro = keras.layers.Conv2D(16, (3, 3), activation='relu')(inputs_intro) # Layer 1 x_intro = keras.layers.MaxPooling2D((2, 2))(x_intro) # Layer 2 x_intro = keras.layers.Conv2D(32, (3, 3), activation='relu')(x_intro) # Layer 3 x_intro = keras.layers.MaxPooling2D((2, 2))(x_intro) # Layer 4 x_intro = keras.layers.Flatten()(x_intro) # Layer 5 x_intro = keras.layers.Dense(64, activation='relu')(x_intro) # Layer 6
outputs_intro = keras.layers.Dense(10, activation='softmax')(x_intro) # OUTPUT
Shern: '#### Define the Model
'# INPUT: 32x32 images with three channels (RGB) '# Layer 1: convolutional, 16 filters, 3x3 kernel, ReLU activation '# Layer 2: max pooling, 2x2 window '# Layer 3: convolutional, 32 filters, 3x3 kernel, ReLU activation '# Layer 4: max pooling, 2x2 window '# Layer 5: flatten to 1D '# Layer 6: dense layer, 64 units, ReLU activation '# OUTPUT: dense layer, 10 units (one per class), softmax activation
inputs_intro = keras.Input(shape=train_images.shape[1:]) # INPUT
x_intro = keras.layers.Conv2D(16, (3, 3), activation='relu')(inputs_intro) # Layer 1 x_intro = keras.layers.MaxPooling2D((2, 2))(x_intro) # Layer 2 x_intro = keras.layers.Conv2D(32, (3, 3), activation='relu')(x_intro) # Layer 3 x_intro = keras.layers.MaxPooling2D((2, 2))(x_intro) # Layer 4 x_intro = keras.layers.Flatten()(x_intro) # Layer 5 x_intro = keras.layers.Dense(64, activation='relu')(x_intro) # Layer 6
outputs_intro = keras.layers.Dense(10, activation='softmax')(x_intro) # OUTPUT