calico / basenji

Sequential regulatory activity predictions with deep convolutional neural networks.
Apache License 2.0
410 stars 126 forks source link

Akita: Loading weights for newer models results in mismatch #187

Closed jakevc closed 11 months ago

jakevc commented 11 months ago

I am trying to run the code from evaluate_models on the newer models described here: https://github.com/calico/basenji/blob/master/manuscripts/akita/v2/README.md

I am getting weights mismatch error when loading the model:

### load params, specify model ###

model_dir = '../models/models/f0c0/train/'
params_file = model_dir+'params.json'
model_file  = model_dir+'model1_best.h5'
with open(params_file) as params_open:
    params = json.load(params_open)
    params_model = params['model']
    params_train = params['train']

seqnn_model = seqnn.SeqNN(params_model)

Model: "model_18"
__________________________________________________________________________________________________
 Layer (type)                Output Shape                 Param #   Connected to                  
==================================================================================================
 sequence (InputLayer)       [(None, 1310720, 4)]         0         []                            

 stochastic_reverse_complem  ((None, 1310720, 4),         0         ['sequence[0][0]']            
 ent_6 (StochasticReverseCo   ())                                                                 
 mplement)                                                                                        

 stochastic_shift_6 (Stocha  (None, 1310720, 4)           0         ['stochastic_reverse_complemen
 sticShift)                                                         t_6[0][0]']                   

 re_lu_287 (ReLU)            (None, 1310720, 4)           0         ['stochastic_shift_6[0][0]']  

 conv1d_198 (Conv1D)         (None, 1310720, 128)         7680      ['re_lu_287[0][0]']           

 batch_normalization_276 (B  (None, 1310720, 128)         512       ['conv1d_198[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_66 (MaxPooli  (None, 655360, 128)          0         ['batch_normalization_276[0][0
 ng1D)                                                              ]']                           

 re_lu_288 (ReLU)            (None, 655360, 128)          0         ['max_pooling1d_66[0][0]']    

 conv1d_199 (Conv1D)         (None, 655360, 128)          81920     ['re_lu_288[0][0]']           

 batch_normalization_277 (B  (None, 655360, 128)          512       ['conv1d_199[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_67 (MaxPooli  (None, 327680, 128)          0         ['batch_normalization_277[0][0
 ng1D)                                                              ]']                           

 re_lu_289 (ReLU)            (None, 327680, 128)          0         ['max_pooling1d_67[0][0]']    

 conv1d_200 (Conv1D)         (None, 327680, 128)          81920     ['re_lu_289[0][0]']           

 batch_normalization_278 (B  (None, 327680, 128)          512       ['conv1d_200[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_68 (MaxPooli  (None, 163840, 128)          0         ['batch_normalization_278[0][0
 ng1D)                                                              ]']                           

 re_lu_290 (ReLU)            (None, 163840, 128)          0         ['max_pooling1d_68[0][0]']    

 conv1d_201 (Conv1D)         (None, 163840, 128)          81920     ['re_lu_290[0][0]']           

 batch_normalization_279 (B  (None, 163840, 128)          512       ['conv1d_201[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_69 (MaxPooli  (None, 81920, 128)           0         ['batch_normalization_279[0][0
 ng1D)                                                              ]']                           

 re_lu_291 (ReLU)            (None, 81920, 128)           0         ['max_pooling1d_69[0][0]']    

 conv1d_202 (Conv1D)         (None, 81920, 128)           81920     ['re_lu_291[0][0]']           

 batch_normalization_280 (B  (None, 81920, 128)           512       ['conv1d_202[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_70 (MaxPooli  (None, 40960, 128)           0         ['batch_normalization_280[0][0
 ng1D)                                                              ]']                           

 re_lu_292 (ReLU)            (None, 40960, 128)           0         ['max_pooling1d_70[0][0]']    

 conv1d_203 (Conv1D)         (None, 40960, 128)           81920     ['re_lu_292[0][0]']           

 batch_normalization_281 (B  (None, 40960, 128)           512       ['conv1d_203[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_71 (MaxPooli  (None, 20480, 128)           0         ['batch_normalization_281[0][0
 ng1D)                                                              ]']                           

 re_lu_293 (ReLU)            (None, 20480, 128)           0         ['max_pooling1d_71[0][0]']    

 conv1d_204 (Conv1D)         (None, 20480, 128)           81920     ['re_lu_293[0][0]']           

 batch_normalization_282 (B  (None, 20480, 128)           512       ['conv1d_204[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_72 (MaxPooli  (None, 10240, 128)           0         ['batch_normalization_282[0][0
 ng1D)                                                              ]']                           

 re_lu_294 (ReLU)            (None, 10240, 128)           0         ['max_pooling1d_72[0][0]']    

 conv1d_205 (Conv1D)         (None, 10240, 128)           81920     ['re_lu_294[0][0]']           

 batch_normalization_283 (B  (None, 10240, 128)           512       ['conv1d_205[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_73 (MaxPooli  (None, 5120, 128)            0         ['batch_normalization_283[0][0
 ng1D)                                                              ]']                           

 re_lu_295 (ReLU)            (None, 5120, 128)            0         ['max_pooling1d_73[0][0]']    

 conv1d_206 (Conv1D)         (None, 5120, 128)            81920     ['re_lu_295[0][0]']           

 batch_normalization_284 (B  (None, 5120, 128)            512       ['conv1d_206[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_74 (MaxPooli  (None, 2560, 128)            0         ['batch_normalization_284[0][0
 ng1D)                                                              ]']                           

 re_lu_296 (ReLU)            (None, 2560, 128)            0         ['max_pooling1d_74[0][0]']    

 conv1d_207 (Conv1D)         (None, 2560, 128)            81920     ['re_lu_296[0][0]']           

 batch_normalization_285 (B  (None, 2560, 128)            512       ['conv1d_207[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_75 (MaxPooli  (None, 1280, 128)            0         ['batch_normalization_285[0][0
 ng1D)                                                              ]']                           

 re_lu_297 (ReLU)            (None, 1280, 128)            0         ['max_pooling1d_75[0][0]']    

 conv1d_208 (Conv1D)         (None, 1280, 128)            81920     ['re_lu_297[0][0]']           

 batch_normalization_286 (B  (None, 1280, 128)            512       ['conv1d_208[0][0]']          
 atchNormalization)                                                                               

 max_pooling1d_76 (MaxPooli  (None, 640, 128)             0         ['batch_normalization_286[0][0
 ng1D)                                                              ]']                           

 re_lu_298 (ReLU)            (None, 640, 128)             0         ['max_pooling1d_76[0][0]']    

 conv1d_209 (Conv1D)         (None, 640, 64)              24576     ['re_lu_298[0][0]']           

 batch_normalization_287 (B  (None, 640, 64)              256       ['conv1d_209[0][0]']          
 atchNormalization)                                                                               

 re_lu_299 (ReLU)            (None, 640, 64)              0         ['batch_normalization_287[0][0
                                                                    ]']                           

 conv1d_210 (Conv1D)         (None, 640, 128)             8192      ['re_lu_299[0][0]']           

 batch_normalization_288 (B  (None, 640, 128)             512       ['conv1d_210[0][0]']          
 atchNormalization)                                                                               

 dropout_99 (Dropout)        (None, 640, 128)             0         ['batch_normalization_288[0][0
                                                                    ]']                           

 add_99 (Add)                (None, 640, 128)             0         ['max_pooling1d_76[0][0]',    
                                                                     'dropout_99[0][0]']          

 re_lu_300 (ReLU)            (None, 640, 128)             0         ['add_99[0][0]']              

 conv1d_211 (Conv1D)         (None, 640, 64)              24576     ['re_lu_300[0][0]']           

 batch_normalization_289 (B  (None, 640, 64)              256       ['conv1d_211[0][0]']          
 atchNormalization)                                                                               

 re_lu_301 (ReLU)            (None, 640, 64)              0         ['batch_normalization_289[0][0
                                                                    ]']                           

 conv1d_212 (Conv1D)         (None, 640, 128)             8192      ['re_lu_301[0][0]']           

 batch_normalization_290 (B  (None, 640, 128)             512       ['conv1d_212[0][0]']          
 atchNormalization)                                                                               

 dropout_100 (Dropout)       (None, 640, 128)             0         ['batch_normalization_290[0][0
                                                                    ]']                           

 add_100 (Add)               (None, 640, 128)             0         ['add_99[0][0]',              
                                                                     'dropout_100[0][0]']         

 re_lu_302 (ReLU)            (None, 640, 128)             0         ['add_100[0][0]']             

 conv1d_213 (Conv1D)         (None, 640, 64)              24576     ['re_lu_302[0][0]']           

 batch_normalization_291 (B  (None, 640, 64)              256       ['conv1d_213[0][0]']          
 atchNormalization)                                                                               

 re_lu_303 (ReLU)            (None, 640, 64)              0         ['batch_normalization_291[0][0
                                                                    ]']                           

 conv1d_214 (Conv1D)         (None, 640, 128)             8192      ['re_lu_303[0][0]']           

 batch_normalization_292 (B  (None, 640, 128)             512       ['conv1d_214[0][0]']          
 atchNormalization)                                                                               

 dropout_101 (Dropout)       (None, 640, 128)             0         ['batch_normalization_292[0][0
                                                                    ]']                           

 add_101 (Add)               (None, 640, 128)             0         ['add_100[0][0]',             
                                                                     'dropout_101[0][0]']         

 re_lu_304 (ReLU)            (None, 640, 128)             0         ['add_101[0][0]']             

 conv1d_215 (Conv1D)         (None, 640, 64)              24576     ['re_lu_304[0][0]']           

 batch_normalization_293 (B  (None, 640, 64)              256       ['conv1d_215[0][0]']          
 atchNormalization)                                                                               

 re_lu_305 (ReLU)            (None, 640, 64)              0         ['batch_normalization_293[0][0
                                                                    ]']                           

 conv1d_216 (Conv1D)         (None, 640, 128)             8192      ['re_lu_305[0][0]']           

 batch_normalization_294 (B  (None, 640, 128)             512       ['conv1d_216[0][0]']          
 atchNormalization)                                                                               

 dropout_102 (Dropout)       (None, 640, 128)             0         ['batch_normalization_294[0][0
                                                                    ]']                           

 add_102 (Add)               (None, 640, 128)             0         ['add_101[0][0]',             
                                                                     'dropout_102[0][0]']         

 re_lu_306 (ReLU)            (None, 640, 128)             0         ['add_102[0][0]']             

 conv1d_217 (Conv1D)         (None, 640, 64)              24576     ['re_lu_306[0][0]']           

 batch_normalization_295 (B  (None, 640, 64)              256       ['conv1d_217[0][0]']          
 atchNormalization)                                                                               

 re_lu_307 (ReLU)            (None, 640, 64)              0         ['batch_normalization_295[0][0
                                                                    ]']                           

 conv1d_218 (Conv1D)         (None, 640, 128)             8192      ['re_lu_307[0][0]']           

 batch_normalization_296 (B  (None, 640, 128)             512       ['conv1d_218[0][0]']          
 atchNormalization)                                                                               

 dropout_103 (Dropout)       (None, 640, 128)             0         ['batch_normalization_296[0][0
                                                                    ]']                           

 add_103 (Add)               (None, 640, 128)             0         ['add_102[0][0]',             
                                                                     'dropout_103[0][0]']         

 re_lu_308 (ReLU)            (None, 640, 128)             0         ['add_103[0][0]']             

 conv1d_219 (Conv1D)         (None, 640, 64)              24576     ['re_lu_308[0][0]']           

 batch_normalization_297 (B  (None, 640, 64)              256       ['conv1d_219[0][0]']          
 atchNormalization)                                                                               

 re_lu_309 (ReLU)            (None, 640, 64)              0         ['batch_normalization_297[0][0
                                                                    ]']                           

 conv1d_220 (Conv1D)         (None, 640, 128)             8192      ['re_lu_309[0][0]']           

 batch_normalization_298 (B  (None, 640, 128)             512       ['conv1d_220[0][0]']          
 atchNormalization)                                                                               

 dropout_104 (Dropout)       (None, 640, 128)             0         ['batch_normalization_298[0][0
                                                                    ]']                           

 add_104 (Add)               (None, 640, 128)             0         ['add_103[0][0]',             
                                                                     'dropout_104[0][0]']         

 re_lu_310 (ReLU)            (None, 640, 128)             0         ['add_104[0][0]']             

 conv1d_221 (Conv1D)         (None, 640, 64)              24576     ['re_lu_310[0][0]']           

 batch_normalization_299 (B  (None, 640, 64)              256       ['conv1d_221[0][0]']          
 atchNormalization)                                                                               

 re_lu_311 (ReLU)            (None, 640, 64)              0         ['batch_normalization_299[0][0
                                                                    ]']                           

 conv1d_222 (Conv1D)         (None, 640, 128)             8192      ['re_lu_311[0][0]']           

 batch_normalization_300 (B  (None, 640, 128)             512       ['conv1d_222[0][0]']          
 atchNormalization)                                                                               

 dropout_105 (Dropout)       (None, 640, 128)             0         ['batch_normalization_300[0][0
                                                                    ]']                           

 add_105 (Add)               (None, 640, 128)             0         ['add_104[0][0]',             
                                                                     'dropout_105[0][0]']         

 re_lu_312 (ReLU)            (None, 640, 128)             0         ['add_105[0][0]']             

 conv1d_223 (Conv1D)         (None, 640, 64)              24576     ['re_lu_312[0][0]']           

 batch_normalization_301 (B  (None, 640, 64)              256       ['conv1d_223[0][0]']          
 atchNormalization)                                                                               

 re_lu_313 (ReLU)            (None, 640, 64)              0         ['batch_normalization_301[0][0
                                                                    ]']                           

 conv1d_224 (Conv1D)         (None, 640, 128)             8192      ['re_lu_313[0][0]']           

 batch_normalization_302 (B  (None, 640, 128)             512       ['conv1d_224[0][0]']          
 atchNormalization)                                                                               

 dropout_106 (Dropout)       (None, 640, 128)             0         ['batch_normalization_302[0][0
                                                                    ]']                           

 add_106 (Add)               (None, 640, 128)             0         ['add_105[0][0]',             
                                                                     'dropout_106[0][0]']         

 re_lu_314 (ReLU)            (None, 640, 128)             0         ['add_106[0][0]']             

 conv1d_225 (Conv1D)         (None, 640, 64)              24576     ['re_lu_314[0][0]']           

 batch_normalization_303 (B  (None, 640, 64)              256       ['conv1d_225[0][0]']          
 atchNormalization)                                                                               

 re_lu_315 (ReLU)            (None, 640, 64)              0         ['batch_normalization_303[0][0
                                                                    ]']                           

 conv1d_226 (Conv1D)         (None, 640, 128)             8192      ['re_lu_315[0][0]']           

 batch_normalization_304 (B  (None, 640, 128)             512       ['conv1d_226[0][0]']          
 atchNormalization)                                                                               

 dropout_107 (Dropout)       (None, 640, 128)             0         ['batch_normalization_304[0][0
                                                                    ]']                           

 add_107 (Add)               (None, 640, 128)             0         ['add_106[0][0]',             
                                                                     'dropout_107[0][0]']         

 re_lu_316 (ReLU)            (None, 640, 128)             0         ['add_107[0][0]']             

 conv1d_227 (Conv1D)         (None, 640, 64)              24576     ['re_lu_316[0][0]']           

 batch_normalization_305 (B  (None, 640, 64)              256       ['conv1d_227[0][0]']          
 atchNormalization)                                                                               

 re_lu_317 (ReLU)            (None, 640, 64)              0         ['batch_normalization_305[0][0
                                                                    ]']                           

 conv1d_228 (Conv1D)         (None, 640, 128)             8192      ['re_lu_317[0][0]']           

 batch_normalization_306 (B  (None, 640, 128)             512       ['conv1d_228[0][0]']          
 atchNormalization)                                                                               

 dropout_108 (Dropout)       (None, 640, 128)             0         ['batch_normalization_306[0][0
                                                                    ]']                           

 add_108 (Add)               (None, 640, 128)             0         ['add_107[0][0]',             
                                                                     'dropout_108[0][0]']         

 re_lu_318 (ReLU)            (None, 640, 128)             0         ['add_108[0][0]']             

 conv1d_229 (Conv1D)         (None, 640, 64)              24576     ['re_lu_318[0][0]']           

 batch_normalization_307 (B  (None, 640, 64)              256       ['conv1d_229[0][0]']          
 atchNormalization)                                                                               

 re_lu_319 (ReLU)            (None, 640, 64)              0         ['batch_normalization_307[0][0
                                                                    ]']                           

 conv1d_230 (Conv1D)         (None, 640, 128)             8192      ['re_lu_319[0][0]']           

 batch_normalization_308 (B  (None, 640, 128)             512       ['conv1d_230[0][0]']          
 atchNormalization)                                                                               

 dropout_109 (Dropout)       (None, 640, 128)             0         ['batch_normalization_308[0][0
                                                                    ]']                           

 add_109 (Add)               (None, 640, 128)             0         ['add_108[0][0]',             
                                                                     'dropout_109[0][0]']         

 re_lu_320 (ReLU)            (None, 640, 128)             0         ['add_109[0][0]']             

 conv1d_231 (Conv1D)         (None, 640, 80)              51200     ['re_lu_320[0][0]']           

 batch_normalization_309 (B  (None, 640, 80)              320       ['conv1d_231[0][0]']          
 atchNormalization)                                                                               

 re_lu_321 (ReLU)            (None, 640, 80)              0         ['batch_normalization_309[0][0
                                                                    ]']                           

 one_to_two_6 (OneToTwo)     (None, 640, 640, 80)         0         ['re_lu_321[0][0]']           

 re_lu_322 (ReLU)            (None, 640, 640, 80)         0         ['one_to_two_6[0][0]']        

 conv2d_78 (Conv2D)          (None, 640, 640, 80)         57600     ['re_lu_322[0][0]']           

 batch_normalization_310 (B  (None, 640, 640, 80)         320       ['conv2d_78[0][0]']           
 atchNormalization)                                                                               

 symmetrize2d_42 (Symmetriz  (None, 640, 640, 80)         0         ['batch_normalization_310[0][0
 e2D)                                                               ]']                           

 re_lu_323 (ReLU)            (None, 640, 640, 80)         0         ['symmetrize2d_42[0][0]']     

 conv2d_79 (Conv2D)          (None, 640, 640, 40)         28800     ['re_lu_323[0][0]']           

 batch_normalization_311 (B  (None, 640, 640, 40)         160       ['conv2d_79[0][0]']           
 atchNormalization)                                                                               

 re_lu_324 (ReLU)            (None, 640, 640, 40)         0         ['batch_normalization_311[0][0
                                                                    ]']                           

 conv2d_80 (Conv2D)          (None, 640, 640, 80)         3200      ['re_lu_324[0][0]']           

 batch_normalization_312 (B  (None, 640, 640, 80)         320       ['conv2d_80[0][0]']           
 atchNormalization)                                                                               

 dropout_110 (Dropout)       (None, 640, 640, 80)         0         ['batch_normalization_312[0][0
                                                                    ]']                           

 add_110 (Add)               (None, 640, 640, 80)         0         ['symmetrize2d_42[0][0]',     
                                                                     'dropout_110[0][0]']         

 symmetrize2d_43 (Symmetriz  (None, 640, 640, 80)         0         ['add_110[0][0]']             
 e2D)                                                                                             

 re_lu_325 (ReLU)            (None, 640, 640, 80)         0         ['symmetrize2d_43[0][0]']     

 conv2d_81 (Conv2D)          (None, 640, 640, 40)         28800     ['re_lu_325[0][0]']           

 batch_normalization_313 (B  (None, 640, 640, 40)         160       ['conv2d_81[0][0]']           
 atchNormalization)                                                                               

 re_lu_326 (ReLU)            (None, 640, 640, 40)         0         ['batch_normalization_313[0][0
                                                                    ]']                           

 conv2d_82 (Conv2D)          (None, 640, 640, 80)         3200      ['re_lu_326[0][0]']           

 batch_normalization_314 (B  (None, 640, 640, 80)         320       ['conv2d_82[0][0]']           
 atchNormalization)                                                                               

 dropout_111 (Dropout)       (None, 640, 640, 80)         0         ['batch_normalization_314[0][0
                                                                    ]']                           

 add_111 (Add)               (None, 640, 640, 80)         0         ['symmetrize2d_43[0][0]',     
                                                                     'dropout_111[0][0]']         

 symmetrize2d_44 (Symmetriz  (None, 640, 640, 80)         0         ['add_111[0][0]']             
 e2D)                                                                                             

 re_lu_327 (ReLU)            (None, 640, 640, 80)         0         ['symmetrize2d_44[0][0]']     

 conv2d_83 (Conv2D)          (None, 640, 640, 40)         28800     ['re_lu_327[0][0]']           

 batch_normalization_315 (B  (None, 640, 640, 40)         160       ['conv2d_83[0][0]']           
 atchNormalization)                                                                               

 re_lu_328 (ReLU)            (None, 640, 640, 40)         0         ['batch_normalization_315[0][0
                                                                    ]']                           

 conv2d_84 (Conv2D)          (None, 640, 640, 80)         3200      ['re_lu_328[0][0]']           

 batch_normalization_316 (B  (None, 640, 640, 80)         320       ['conv2d_84[0][0]']           
 atchNormalization)                                                                               

 dropout_112 (Dropout)       (None, 640, 640, 80)         0         ['batch_normalization_316[0][0
                                                                    ]']                           

 add_112 (Add)               (None, 640, 640, 80)         0         ['symmetrize2d_44[0][0]',     
                                                                     'dropout_112[0][0]']         

 symmetrize2d_45 (Symmetriz  (None, 640, 640, 80)         0         ['add_112[0][0]']             
 e2D)                                                                                             

 re_lu_329 (ReLU)            (None, 640, 640, 80)         0         ['symmetrize2d_45[0][0]']     

 conv2d_85 (Conv2D)          (None, 640, 640, 40)         28800     ['re_lu_329[0][0]']           

 batch_normalization_317 (B  (None, 640, 640, 40)         160       ['conv2d_85[0][0]']           
 atchNormalization)                                                                               

 re_lu_330 (ReLU)            (None, 640, 640, 40)         0         ['batch_normalization_317[0][0
                                                                    ]']                           

 conv2d_86 (Conv2D)          (None, 640, 640, 80)         3200      ['re_lu_330[0][0]']           

 batch_normalization_318 (B  (None, 640, 640, 80)         320       ['conv2d_86[0][0]']           
 atchNormalization)                                                                               

 dropout_113 (Dropout)       (None, 640, 640, 80)         0         ['batch_normalization_318[0][0
                                                                    ]']                           

 add_113 (Add)               (None, 640, 640, 80)         0         ['symmetrize2d_45[0][0]',     
                                                                     'dropout_113[0][0]']         

 symmetrize2d_46 (Symmetriz  (None, 640, 640, 80)         0         ['add_113[0][0]']             
 e2D)                                                                                             

 re_lu_331 (ReLU)            (None, 640, 640, 80)         0         ['symmetrize2d_46[0][0]']     

 conv2d_87 (Conv2D)          (None, 640, 640, 40)         28800     ['re_lu_331[0][0]']           

 batch_normalization_319 (B  (None, 640, 640, 40)         160       ['conv2d_87[0][0]']           
 atchNormalization)                                                                               

 re_lu_332 (ReLU)            (None, 640, 640, 40)         0         ['batch_normalization_319[0][0
                                                                    ]']                           

 conv2d_88 (Conv2D)          (None, 640, 640, 80)         3200      ['re_lu_332[0][0]']           

 batch_normalization_320 (B  (None, 640, 640, 80)         320       ['conv2d_88[0][0]']           
 atchNormalization)                                                                               

 dropout_114 (Dropout)       (None, 640, 640, 80)         0         ['batch_normalization_320[0][0
                                                                    ]']                           

 add_114 (Add)               (None, 640, 640, 80)         0         ['symmetrize2d_46[0][0]',     
                                                                     'dropout_114[0][0]']         

 symmetrize2d_47 (Symmetriz  (None, 640, 640, 80)         0         ['add_114[0][0]']             
 e2D)                                                                                             

 re_lu_333 (ReLU)            (None, 640, 640, 80)         0         ['symmetrize2d_47[0][0]']     

 conv2d_89 (Conv2D)          (None, 640, 640, 40)         28800     ['re_lu_333[0][0]']           

 batch_normalization_321 (B  (None, 640, 640, 40)         160       ['conv2d_89[0][0]']           
 atchNormalization)                                                                               

 re_lu_334 (ReLU)            (None, 640, 640, 40)         0         ['batch_normalization_321[0][0
                                                                    ]']                           

 conv2d_90 (Conv2D)          (None, 640, 640, 80)         3200      ['re_lu_334[0][0]']           

 batch_normalization_322 (B  (None, 640, 640, 80)         320       ['conv2d_90[0][0]']           
 atchNormalization)                                                                               

 dropout_115 (Dropout)       (None, 640, 640, 80)         0         ['batch_normalization_322[0][0
                                                                    ]']                           

 add_115 (Add)               (None, 640, 640, 80)         0         ['symmetrize2d_47[0][0]',     
                                                                     'dropout_115[0][0]']         

 symmetrize2d_48 (Symmetriz  (None, 640, 640, 80)         0         ['add_115[0][0]']             
 e2D)                                                                                             

 squeeze_excite_5 (SqueezeE  (None, 640, 640, 80)         1690      ['symmetrize2d_48[0][0]']     
 xcite)                                                                                           

 cropping2d_6 (Cropping2D)   (None, 512, 512, 80)         0         ['squeeze_excite_5[0][0]']    

 upper_tri_6 (UpperTri)      (None, 130305, 80)           0         ['cropping2d_6[0][0]']        

 re_lu_335 (ReLU)            (None, 130305, 80)           0         ['upper_tri_6[0][0]']         

 dense_11 (Dense)            (None, 130305, 5)            405       ['re_lu_335[0][0]']           

 switch_reverse_triu_11 (Sw  (None, 130305, 5)            0         ['dense_11[0][0]',            
 itchReverseTriu)                                                    'stochastic_reverse_complemen
                                                                    t_6[0][1]']                   

==================================================================================================
Total params: 1507823 (5.75 MB)
Trainable params: 1499023 (5.72 MB)
Non-trainable params: 8800 (34.38 KB)
__________________________________________________________________________________________________
None
model_strides [2048, 2048]
target_lengths [130305, 130305]
target_crops [-64833, -64833]
### restore model ###
# note: run %%bash get_model.sh 
# if you have not already downloaded the model
seqnn_model.restore(model_file)
print('successfully loaded')

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Cell In[13], line 4
      1 ### restore model ###
      2 # note: run %%bash get_model.sh 
      3 # if you have not already downloaded the model
----> 4 seqnn_model.restore(model_file)
      5 print('successfully loaded')

File /home/groups/hoolock2/u0/jvc/AKITA-POC/basenji/basenji/seqnn.py:807, in SeqNN.restore(self, model_file, head_i, trunk)
    805   self.model_trunk.load_weights(model_file)
    806 else:
--> 807   self.models[head_i].load_weights(model_file)
    808   self.model = self.models[head_i]

File /home/groups/hoolock2/u0/jvc/miniconda3/envs/basenji/lib/python3.8/site-packages/keras/src/utils/traceback_utils.py:70, in filter_traceback.<locals>.error_handler(*args, **kwargs)
     67     filtered_tb = _process_traceback_frames(e.__traceback__)
     68     # To get the full stack trace, call:
     69     # `tf.debugging.disable_traceback_filtering()`
---> 70     raise e.with_traceback(filtered_tb) from None
     71 finally:
     72     del filtered_tb

File /home/groups/hoolock2/u0/jvc/miniconda3/envs/basenji/lib/python3.8/site-packages/keras/src/saving/legacy/hdf5_format.py:834, in load_weights_from_hdf5_group(f, model)
    830     weight_values = preprocess_weights_for_loading(
    831         layer, weight_values, original_keras_version, original_backend
    832     )
    833     if len(weight_values) != len(symbolic_weights):
--> 834         raise ValueError(
    835             f"Weight count mismatch for layer #{k} (named {layer.name} in "
    836             f"the current model, {name} in the save file). "
    837             f"Layer expects {len(symbolic_weights)} weight(s). Received "
    838             f"{len(weight_values)} saved weight(s)"
    839         )
    840     weight_value_tuples += zip(symbolic_weights, weight_values)
    842 if "top_level_model_weights" in f:

ValueError: Weight count mismatch for layer #94 (named squeeze_excite_5 in the current model, squeeze_excite in the save file). Layer expects 4 weight(s). Received 8 saved weight(s)
davek44 commented 11 months ago

I accidentally broke this a few months ago. I pushed a fix. Pull the latest from master.

jakevc commented 11 months ago

That fixed it thanks!