espressif / esp-dl

Espressif deep-learning library for AIoT applications
MIT License
516 stars 115 forks source link

model forward result is wrong (AIV-424) #66

Closed PureHing closed 2 years ago

PureHing commented 2 years ago

Evaluating the performance on esp32s3 with PC: (it's right,the same as fp32 model)

[outputs, _] = eva.evalute_quantized_model(img[np.newaxis, ..., np.newaxis], True)#16-bit quantization.
out=np.reshape(outputs[0],(-1))
print(out)#shape:450

out:0.250977,-0.176758,-0.112305,0.060547,-13.604492,5.455078,0.539062,0.054688,-0.067383,-0.698242,-13.085938,4.699219,0.250000,-0.012695,-0.137695,-0.631836,-13.399414,6.176758,0.595703,1.415039,-0.018555,0.151367,-13.418945,8.261719,1.035156,1.540039,-0.164062,-0.026367,-13.350586,8.171875,0.479492,1.749023,-0.130859,-0.222656,-13.000977,9.650391,0.431641,0.058594,-0.200195,0.232422,-14.308594,6.666016,0.502930,0.191406,0.130859,-0.095703,-14.264648,6.189453,0.833008,0.049805,-0.418945,0.401367,-14.893555,8.185547,0.743164,-0.220703,-0.141602,-0.321289,-14.200195,4.931641,0.870117,-0.337891,0.212891,-0.485352,-13.859375,5.105469,0.979492,-0.286133,0.034180,-0.236328,-14.085938,5.588867,1.015625,0.253906,0.083984,-0.056641,-14.302734,4.161133,1.025391,0.176758,0.255859,-0.559570,-14.336914,4.690430,1.280273,-0.026367,0.275391,-0.108398,-14.480469,4.962891,4.291992,1.449219,0.546875,-0.078125,-13.439453,9.039062,3.855469,1.469727,0.348633,-0.084961,-13.711914,9.059570,4.131836,1.147461,0.500977,0.486328,-13.538086,10.201172,0.378906,1.768555,0.241211,0.172852,8.666016,7.746094,0.380859,1.759766,0.262695,0.041016,8.466797,7.958984,0.357422,1.771484,0.107422,0.053711,8.459961,7.833984,-4.337891,1.166992,0.202148,-0.642578,-14.619141,10.533203,-4.406250,1.693359,0.235352,-0.208984,-14.652344,10.272461,-4.353516,1.301758,0.151367,-0.154297,-14.610352,10.080078,0.900391,0.170898,0.016602,0.200195,-12.057617,5.600586,1.421875,0.723633,-0.122070,-0.477539,-12.193359,5.867188,1.085938,0.234375,-0.126953,0.037109,-12.392578,6.725586,0.624023,-0.039062,-0.041992,0.211914,-11.121094,5.233398,0.722656,0.095703,0.250000,-0.235352,-11.137695,5.126953,0.708008,0.142578,0.182617,0.053711,-11.412109,5.269531,1.771484,-0.758789,0.359375,-0.288086,-13.453125,5.805664,1.934570,-0.618164,0.280273,-0.147461,-13.496094,6.825195,1.744141,-0.628906,0.019531,-0.104492,-13.276367,7.678711,0.211914,-4.602539,0.076172,-0.043945,-8.348633,7.416992,0.106445,-4.896484,0.129883,-0.166992,-8.569336,7.334961,0.215820,-4.907227,0.131836,-0.202148,-8.229492,8.449219,-1.743164,-1.010742,0.269531,-0.535156,-13.092773,8.364258,-1.802734,-0.978516,-0.004883,-0.287109,-12.891602,7.549805,-1.395508,-1.244141,0.088867,-0.257812,-12.828125,7.680664,-0.319336,0.015625,0.150391,0.029297,-14.893555,6.615234,0.051758,-0.012695,-0.026367,-0.256836,-14.662109,6.034180,-0.184570,0.041016,-0.239258,-0.239258,-14.588867,7.209961,-0.292969,-0.560547,-0.035156,0.081055,-10.958984,4.184570,0.288086,-0.710938,0.090820,0.059570,-11.273438,4.413086,0.093750,-0.500977,-0.052734,0.176758,-11.213867,4.779297,-0.445312,0.146484,0.108398,-0.066406,-12.413086,3.202148,-0.004883,0.170898,0.177734,-0.289062,-12.261719,3.977539,-0.445312,0.089844,-0.194336,-0.194336,-12.350586,4.142578,0.236328,-0.256836,0.423828,-0.028320,-13.122070,5.625000,0.226562,-0.236328,0.415039,-0.067383,-12.952148,6.209961,0.204102,-0.621094,-0.115234,-0.048828,-12.941406,6.201172,-0.166016,-0.842773,-0.023438,0.088867,-13.711914,5.881836,-0.416992,-1.128906,-0.069336,-0.123047,-13.401367,5.681641,-0.435547,-1.503906,-0.317383,-0.206055,-13.521484,6.309570,-0.154297,-0.673828,-0.059570,-0.045898,-13.975586,5.510742,0.058594,-0.664062,-0.089844,-0.256836,-14.071289,5.425781,-0.519531,-0.735352,-0.266602,-0.410156,-14.156250,5.971680,0.583008,-0.128906,-0.163086,0.156250,-9.775391,3.608398,1.410156,-0.112305,-0.159180,0.106445,-10.117188,3.995117,0.736328,-0.062500,-0.171875,0.193359,-10.370117,3.721680,0.256836,0.128906,0.281250,0.534180,-14.794922,3.031250,0.552734,0.533203,0.175781,-0.074219,-14.717773,3.645508,0.306641,0.703125,-0.022461,0.549805,-14.352539,4.047852,-0.444336,0.919922,0.137695,0.457031,-12.096680,4.141602,-0.231445,0.741211,0.120117,-0.093750,-12.156250,4.458984,-0.455078,0.503906,-0.013672,-0.171875,-12.182617,4.589844,-0.130859,-0.086914,-0.138672,0.274414,-13.085938,5.104492,-0.228516,-0.037109,-0.404297,0.127930,-13.279297,4.699219,-0.420898,-0.334961,-0.182617,0.016602,-13.584961,4.984375,-0.086914,0.042969,0.008789,0.136719,-12.492188,3.297852,-0.054688,0.145508,-0.378906,0.052734,-12.362305,3.658203,-0.293945,0.009766,0.209961,-0.098633,-12.892578,3.104492,0.348633,0.172852,0.029297,0.297852,-10.021484,3.531250,0.655273,0.160156,-0.092773,0.200195,-9.941406,4.354492,0.411133,0.197266,0.166016,0.418945,-10.635742,3.828125,

Inference on esp32s3 board: (it's wrong)

input.set_element(IMAGE_ELEMENT).set_exponent(-8).set_shape({inpt,inpt ,1}).set_auto_free(false);
int16_t * outputs1 = model.l12.get_output().get_element_ptr();
for (int i = 0; i < size_out; i++)
{
    out1[i] = outputs1[i] * pow(2.f, -10);//outputs1:int16_t,output_exponent: -10
    printf("%f,",out1[i]);
}//out1 shape:450
out1:0.549805,-1.241211,0.169922,-0.018555,-0.666016,0.696289,-0.194336,0.189453,-0.766602,-0.349609,-1.056641,-1.004883,-0.450195,-0.447266,-0.557617,-0.749023,-4.623047,5.691406,1.171875,-0.820312,1.213867,0.588867,-0.869141,0.662109,-0.190430,0.782227,-1.392578,-1.647461,-1.224609,-1.661133,-1.437500,-0.892578,-0.151367,-0.990234,-5.458008,7.097656,2.157227,-1.251953,1.219727,-0.148438,-0.869141,0.468750,-0.436523,0.768555,-1.109375,-2.026367,-1.219727,-2.380859,-1.286133,-1.530273,0.166016,-0.749023,-8.664062,7.535156,1.378906,-0.590820,1.210938,0.561523,-0.833984,0.511719,0.405273,0.812500,-1.206055,-2.020508,-1.042969,-1.727539,-1.589844,-1.605469,0.192383,-0.902344,-4.768555,7.388672,1.067383,0.056641,0.660156,0.121094,-0.743164,0.620117,0.581055,1.113281,-0.726562,-1.009766,-0.819336,-0.762695,-1.132812,-1.352539,-0.046875,-0.673828,-2.571289,6.830078,0.510742,-1.624023,0.140625,-0.377930,-0.045898,0.093750,-0.055664,0.095703,0.163086,-1.355469,-0.547852,-0.291992,-0.370117,-1.310547,0.052734,0.155273,-9.000000,7.082031,1.275391,-0.990234,1.346680,0.198242,-0.507812,0.384766,0.371094,1.246094,-0.500977,-1.527344,-0.522461,-0.070312,-0.572266,-0.747070,0.041992,0.620117,-11.640625,8.946289,0.816406,-1.200195,1.885742,-0.156250,-1.082031,0.090820,0.267578,1.011719,-1.105469,-1.852539,0.013672,-0.608398,-1.378906,-0.432617,0.348633,1.232422,-13.443359,9.682617,0.599609,-1.757812,0.993164,0.351562,-1.957031,-0.300781,0.260742,0.834961,-1.486328,-1.280273,0.865234,0.009766,-1.312500,-0.825195,1.012695,1.595703,-12.542969,10.609375,0.597656,-1.358398,1.641602,-0.085938,-0.877930,-0.272461,0.275391,0.857422,-1.203125,-1.593750,0.028320,-0.637695,-1.505859,-0.522461,0.634766,1.991211,-10.746094,7.693359,-0.605469,-1.333984,0.066406,-0.362305,0.245117,0.119141,-0.010742,-0.272461,0.485352,-1.029297,-0.289062,-0.482422,-0.249023,-0.934570,-0.024414,0.165039,-10.249023,6.854492,0.093750,-0.561523,1.747070,0.697266,0.214844,0.922852,0.856445,0.768555,-0.049805,-1.680664,-0.746094,0.059570,-0.671875,-0.612305,0.116211,1.152344,-13.830078,8.681641,-0.106445,-0.728516,2.425781,0.135742,-0.655273,0.592773,0.680664,0.713867,-0.736328,-1.744141,-0.336914,-0.208008,-1.202148,-0.038086,0.354492,0.908203,-13.674805,9.208984,0.468750,-1.246094,1.973633,0.610352,-1.061523,0.052734,0.708008,0.866211,-1.512695,-2.003906,-0.215820,-0.174805,-1.847656,-0.906250,0.546875,0.987305,-13.402344,10.451172,0.520508,-0.738281,1.978516,0.197266,-0.337891,0.570312,0.486328,1.144531,-0.921875,-1.169922,-0.350586,-0.568359,-1.391602,-0.223633,0.850586,2.466797,-11.069336,7.992188,-0.254883,-0.932617,0.273438,0.158203,0.091797,-0.000977,0.075195,-0.194336,0.958008,-0.965820,-0.527344,0.012695,-0.253906,-0.635742,-0.081055,1.140625,-8.752930,6.943359,-0.205078,-0.856445,1.557617,0.276367,-0.104492,0.821289,0.785156,0.668945,0.666016,-1.317383,-0.953125,-0.066406,-0.490234,-0.706055,0.291992,1.877930,-12.208008,8.849609,0.125977,-1.194336,2.051758,-0.049805,-1.043945,0.330078,0.526367,0.644531,-0.369141,-1.595703,-0.398438,0.426758,-0.838867,-0.175781,0.179688,1.163086,-14.944336,9.537109,0.733398,-1.424805,1.419922,0.348633,-0.996094,0.195312,0.715820,0.625000,-1.125977,-1.449219,0.161133,0.077148,-1.185547,-0.895508,0.518555,1.375977,-16.243164,10.588867,0.243164,-0.939453,1.442383,-0.168945,-0.097656,0.757812,0.426758,1.349609,-0.675781,-1.060547,-0.209961,-0.237305,-0.814453,-0.063477,0.644531,3.052734,-12.355469,7.920898,-0.048828,-0.822266,-0.648438,0.552734,0.835938,0.882812,0.596680,1.190430,0.765625,-0.390625,-0.323242,0.310547,-0.655273,-0.267578,0.546875,1.336914,-9.912109,6.797852,-0.807617,-1.009766,-0.413086,0.136719,1.518555,0.749023,0.682617,0.314453,1.030273,-1.043945,-0.947266,0.436523,-0.183594,-0.944336,0.376953,2.542969,-12.607422,7.548828,-1.164062,-1.224609,-0.322266,-0.125977,0.206055,0.341797,0.208984,0.052734,0.747070,-1.153320,-0.449219,0.421875,-0.169922,-1.091797,0.254883,2.485352,-16.666016,8.030273,-0.010742,-1.564453,-0.604492,0.040039,0.478516,-0.025391,0.370117,0.088867,0.128906,-1.103516,-0.310547,0.899414,-0.122070,-1.131836,-0.104492,2.301758,-17.416016,8.430664,0.691406,-1.036133,0.039062,-0.314453,0.119141,-0.195312,-0.740234,0.311523,-0.740234,-0.640625,-0.713867,-0.120117,-0.731445,-0.788086,-0.265625,2.136719,-15.479492,7.072266,
img(range is [0,1]) and IMAGE_ELEMENT(int16_t) is the same picture: ``` int16_t IMAGE_ELEMENT[] = { 12, 11, 11, 11, 11, 11, 11, 11, 13, 13, 13, 13, 13, 13, 13, 13, 13, 12, 11, 11, 11, 11, 11, 10, 12, 13, 14, 14, 13, 12, 13, 14, 12, 13, 14, 13, 13, 14, 14, 13, 16, 15, 14, 13, 13, 13, 13, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 14, 15, 16, 15, 14, 13, 13, 12, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 11, 11, 11, 11, 12, 13, 13, 13, 13, 13, 13, 14, 13, 14, 14, 14, 14, 15, 15, 14, 14, 14, 14, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 13, 13, 13, 13, 13, 14, 14, 15, 14, 13, 13, 14, 12, 11, 11, 11, 11, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 9, 10, 11, 11, 11, 11, 11, 12, 13, 13, 13, 13, 13, 13, 13, 13, 14, 13, 12, 11, 11, 12, 12, 12, 13, 13, 14, 13, 13, 13, 14, 15, 13, 14, 14, 14, 14, 15, 15, 15, 15, 16, 16, 16, 15, 15, 15, 15, 14, 14, 14, 14, 13, 13, 13, 13, 13, 14, 14, 14, 14, 14, 14, 15, 14, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 9, 10, 11, 11, 11, 10, 11, 12, 12, 12, 13, 13, 13, 13, 13, 13, 14, 13, 12, 12, 12, 12, 12, 13, 13, 13, 14, 14, 14, 14, 14, 15, 14, 14, 14, 14, 15, 15, 15, 15, 14, 15, 16, 15, 15, 14, 15, 15, 14, 14, 14, 14, 14, 13, 13, 13, 13, 14, 15, 15, 15, 15, 16, 16, 14, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 9, 10, 11, 11, 11, 10, 10, 11, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 12, 12, 12, 13, 13, 12, 13, 14, 14, 14, 15, 15, 15, 16, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 16, 15, 15, 15, 15, 15, 14, 14, 14, 13, 14, 15, 15, 15, 15, 15, 15, 15, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 10, 11, 11, 11, 10, 10, 10, 11, 12, 12, 12, 12, 12, 11, 11, 11, 12, 12, 13, 13, 13, 13, 13, 13, 12, 13, 14, 15, 16, 16, 16, 16, 15, 15, 14, 15, 15, 14, 14, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 14, 14, 14, 14, 14, 15, 15, 16, 16, 15, 15, 15, 16, 14, 11, 11, 11, 11, 11, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 10, 10, 11, 11, 12, 12, 11, 11, 11, 11, 11, 11, 11, 12, 13, 13, 13, 13, 13, 13, 13, 13, 14, 15, 15, 16, 16, 16, 14, 13, 13, 13, 13, 13, 13, 14, 13, 13, 13, 13, 13, 13, 13, 12, 13, 13, 13, 13, 13, 13, 13, 13, 15, 15, 15, 15, 16, 16, 16, 16, 16, 13, 11, 11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 12, 11, 11, 10, 10, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 13, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 12, 12, 13, 13, 13, 13, 14, 16, 15, 15, 16, 16, 16, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 15, 16, 16, 16, 15, 12, 10, 10, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 10, 12, 11, 17, 13, 12, 14, 13, 10, 11, 15, 11, 13, 15, 11, 12, 12, 15, 10, 21, 35, 69, 73, 97, 99, 87, 93, 95, 97, 91, 94, 54, 65, 46, 22, 14, 13, 13, 16, 12, 12, 14, 14, 11, 15, 12, 12, 18, 15, 14, 12, 10, 10, 13, 14, 10, 12, 11, 11, 11, 11, 11, 11, 12, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 10, 10, 12, 9, 11, 12, 12, 11, 15, 12, 11, 15, 13, 13, 14, 18, 35, 78, 89, 109, 107, 114, 111, 115, 110, 115, 103, 108, 114, 110, 115, 115, 117, 113, 106, 94, 83, 51, 10, 13, 16, 13, 9, 15, 13, 11, 16, 10, 13, 12, 14, 14, 12, 12, 13, 13, 12, 12, 12, 12, 12, 12, 12, 12, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 13, 13, 6, 14, 14, 13, 12, 13, 12, 9, 17, 11, 14, 65, 95, 107, 109, 120, 115, 111, 111, 102, 105, 105, 103, 104, 101, 39, 98, 119, 66, 105, 107, 105, 115, 123, 120, 119, 108, 78, 26, 17, 15, 14, 12, 16, 11, 12, 13, 11, 13, 15, 13, 11, 12, 13, 12, 12, 13, 13, 13, 13, 12, 12, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 9, 13, 12, 10, 15, 12, 15, 12, 14, 18, 12, 65, 111, 120, 117, 118, 116, 110, 111, 120, 116, 101, 126, 120, 121, 63, 134, 66, 118, 126, 126, 136, 93, 55, 38, 117, 116, 115, 124, 124, 128, 90, 37, 12, 15, 11, 13, 17, 15, 12, 12, 13, 13, 13, 13, 12, 12, 13, 13, 13, 13, 13, 13, 12, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 10, 11, 13, 13, 11, 16, 11, 15, 15, 87, 107, 118, 120, 115, 117, 120, 118, 128, 135, 131, 129, 129, 118, 141, 140, 131, 142, 87, 141, 139, 135, 140, 134, 87, 132, 116, 136, 129, 127, 104, 126, 130, 119, 95, 8, 14, 15, 10, 14, 12, 13, 13, 13, 14, 14, 12, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 11, 16, 10, 14, 11, 14, 20, 12, 105, 118, 129, 124, 115, 125, 124, 127, 136, 137, 135, 140, 138, 144, 139, 145, 141, 143, 137, 142, 140, 145, 141, 144, 145, 136, 146, 95, 134, 134, 135, 90, 133, 120, 134, 147, 104, 94, 18, 12, 11, 12, 14, 14, 12, 13, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 15, 10, 14, 16, 15, 19, 78, 115, 119, 121, 111, 114, 122, 130, 140, 141, 136, 140, 140, 147, 146, 123, 85, 76, 80, 78, 92, 104, 146, 155, 156, 141, 149, 149, 152, 145, 153, 143, 127, 89, 140, 136, 102, 124, 127, 130, 98, 10, 18, 14, 13, 13, 12, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 12, 13, 14, 15, 14, 19, 104, 115, 126, 120, 114, 118, 132, 136, 138, 146, 143, 142, 136, 79, 75, 79, 79, 72, 81, 80, 76, 117, 35, 26, 34, 36, 29, 11, 85, 154, 148, 149, 153, 150, 148, 151, 97, 75, 132, 80, 137, 138, 128, 30, 18, 11, 12, 14, 14, 13, 12, 12, 13, 13, 14, 14, 13, 13, 12, 11, 11, 11, 11, 11, 11, 11, 11, 10, 13, 8, 13, 10, 12, 13, 13, 13, 14, 13, 7, 115, 120, 124, 119, 60, 126, 132, 142, 144, 140, 133, 98, 79, 82, 79, 78, 77, 98, 83, 78, 84, 83, 85, 118, 25, 39, 201, 37, 28, 25, 25, 34, 83, 153, 157, 157, 152, 151, 153, 137, 132, 131, 137, 132, 120, 59, 15, 11, 15, 16, 14, 11, 14, 13, 13, 12, 13, 13, 13, 12, 11, 11, 11, 11, 11, 11, 11, 11, 12, 9, 11, 11, 11, 20, 11, 14, 15, 9, 17, 108, 119, 127, 122, 63, 47, 131, 139, 144, 141, 85, 77, 82, 85, 78, 83, 77, 93, 84, 80, 89, 81, 87, 85, 86, 22, 96, 197, 193, 17, 39, 24, 20, 26, 34, 30, 153, 156, 155, 158, 149, 148, 77, 81, 135, 138, 133, 73, 18, 16, 12, 17, 16, 14, 14, 13, 13, 14, 14, 13, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 15, 10, 11, 10, 16, 11, 12, 19, 112, 116, 123, 117, 115, 141, 111, 132, 135, 115, 83, 79, 83, 77, 82, 84, 101, 98, 87, 85, 87, 85, 85, 85, 87, 86, 88, 26, 116, 195, 193, 61, 25, 34, 30, 39, 25, 26, 120, 146, 156, 149, 154, 84, 144, 133, 125, 151, 135, 31, 17, 15, 15, 12, 15, 14, 14, 15, 15, 15, 14, 13, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 14, 14, 15, 10, 18, 19, 78, 119, 124, 121, 121, 133, 138, 101, 138, 96, 78, 81, 79, 79, 85, 85, 86, 90, 84, 85, 86, 90, 89, 89, 87, 86, 91, 90, 104, 38, 188, 44, 37, 30, 24, 27, 27, 23, 49, 27, 18, 166, 156, 149, 154, 148, 93, 99, 129, 150, 131, 19, 14, 17, 15, 15, 15, 15, 15, 15, 15, 15, 14, 11, 11, 11, 11, 11, 11, 11, 11, 12, 10, 13, 14, 11, 14, 15, 14, 78, 119, 124, 123, 119, 128, 138, 140, 139, 83, 77, 82, 83, 86, 84, 87, 85, 87, 87, 89, 91, 92, 93, 90, 86, 31, 31, 33, 92, 93, 156, 30, 22, 193, 125, 27, 30, 26, 33, 30, 52, 21, 28, 155, 155, 156, 155, 97, 161, 127, 109, 144, 123, 24, 13, 17, 16, 16, 15, 15, 15, 15, 15, 15, 11, 11, 11, 11, 11, 11, 11, 11, 7, 12, 16, 12, 14, 14, 19, 70, 112, 128, 120, 118, 132, 142, 141, 132, 72, 84, 81, 86, 84, 81, 87, 87, 89, 87, 89, 97, 89, 87, 96, 94, 46, 28, 37, 45, 46, 117, 95, 125, 13, 25, 199, 35, 26, 26, 162, 23, 25, 203, 33, 24, 115, 150, 159, 156, 101, 108, 141, 148, 142, 92, 20, 15, 17, 17, 16, 15, 15, 15, 15, 15, 11, 11, 11, 11, 11, 11, 11, 11, 12, 17, 11, 11, 12, 11, 19, 120, 120, 130, 114, 136, 137, 142, 140, 78, 85, 80, 85, 85, 77, 96, 89, 93, 92, 88, 92, 98, 118, 92, 96, 99, 36, 36, 41, 38, 36, 65, 102, 114, 31, 31, 112, 129, 31, 66, 27, 200, 201, 26, 156, 51, 55, 112, 154, 155, 152, 158, 149, 55, 150, 146, 55, 20, 18, 17, 17, 16, 16, 16, 15, 15, 11, 11, 11, 11, 11, 11, 11, 11, 13, 13, 11, 15, 17, 14, 115, 117, 127, 121, 132, 137, 140, 133, 79, 84, 83, 87, 87, 84, 87, 87, 87, 94, 95, 99, 94, 94, 98, 93, 101, 96, 108, 34, 37, 132, 119, 41, 95, 108, 84, 25, 18, 53, 184, 22, 35, 32, 85, 187, 21, 101, 28, 65, 140, 168, 162, 155, 130, 140, 87, 142, 139, 20, 18, 18, 17, 17, 17, 17, 16, 15, 11, 11, 11, 11, 11, 12, 12, 12, 13, 14, 11, 13, 15, 20, 108, 127, 124, 123, 138, 139, 145, 81, 81, 83, 85, 95, 71, 89, 86, 87, 94, 103, 98, 95, 98, 108, 102, 101, 101, 101, 101, 81, 34, 181, 27, 195, 39, 104, 102, 114, 30, 202, 141, 169, 57, 28, 31, 32, 190, 104, 18, 26, 40, 179, 168, 161, 155, 146, 150, 138, 146, 99, 24, 20, 18, 17, 14, 19, 17, 16, 11, 11, 11, 11, 11, 12, 12, 12, 11, 15, 14, 17, 22, 114, 126, 132, 120, 137, 143, 138, 92, 87, 83, 86, 84, 105, 175, 145, 94, 95, 93, 92, 99, 98, 107, 104, 113, 104, 97, 107, 108, 105, 51, 30, 41, 98, 46, 32, 98, 102, 174, 58, 233, 201, 110, 44, 70, 76, 103, 174, 20, 49, 29, 31, 164, 169, 157, 153, 65, 102, 144, 138, 20, 20, 17, 19, 24, 16, 18, 17, 11, 11, 11, 11, 11, 12, 12, 13, 12, 11, 19, 20, 12, 123, 132, 122, 135, 141, 142, 138, 80, 88, 88, 94, 166, 172, 186, 181, 109, 94, 98, 105, 102, 106, 110, 103, 107, 103, 106, 112, 118, 109, 110, 39, 26, 24, 196, 55, 38, 110, 116, 186, 32, 181, 118, 29, 41, 29, 102, 20, 198, 187, 34, 34, 50, 163, 172, 166, 65, 152, 135, 136, 138, 24, 22, 22, 16, 20, 12, 20, 11, 11, 11, 11, 11, 12, 13, 13, 14, 13, 33, 29, 125, 122, 134, 126, 143, 142, 141, 81, 82, 88, 87, 183, 173, 183, 53, 182, 184, 93, 106, 104, 103, 102, 113, 128, 111, 109, 113, 111, 111, 115, 113, 110, 36, 51, 46, 216, 55, 53, 109, 113, 67, 123, 58, 217, 38, 47, 32, 145, 8, 208, 30, 36, 30, 177, 168, 163, 158, 109, 140, 156, 135, 36, 21, 18, 20, 25, 19, 19, 11, 11, 11, 11, 12, 12, 13, 13, 13, 14, 12, 28, 119, 125, 130, 138, 146, 149, 117, 79, 88, 87, 190, 180, 183, 18, 42, 20, 190, 191, 86, 108, 108, 112, 114, 111, 117, 114, 115, 168, 104, 112, 105, 98, 114, 40, 141, 239, 28, 29, 101, 115, 111, 21, 93, 219, 161, 42, 39, 31, 147, 49, 32, 88, 30, 14, 164, 168, 161, 153, 161, 145, 143, 118, 24, 24, 20, 16, 21, 21, 11, 11, 11, 12, 12, 13, 13, 13, 13, 10, 21, 122, 118, 140, 125, 143, 146, 148, 83, 84, 88, 166, 177, 175, 54, 183, 69, 201, 17, 199, 201, 108, 113, 108, 117, 119, 109, 126, 106, 224, 128, 122, 104, 213, 118, 134, 48, 202, 230, 216, 39, 120, 136, 113, 38, 21, 38, 222, 49, 37, 52, 207, 182, 17, 15, 33, 187, 169, 183, 162, 172, 134, 145, 139, 33, 23, 25, 25, 19, 20, 11, 11, 12, 13, 13, 13, 13, 13, 14, 13, 17, 112, 133, 132, 136, 152, 153, 143, 88, 84, 86, 195, 182, 69, 50, 204, 200, 102, 149, 40, 205, 201, 117, 111, 120, 122, 122, 122, 123, 119, 181, 122, 126, 103, 232, 127, 103, 51, 184, 26, 39, 49, 121, 123, 163, 39, 214, 162, 87, 40, 28, 15, 41, 36, 22, 44, 27, 181, 172, 172, 161, 152, 153, 145, 20, 26, 23, 19, 20, 24, 11, 11, 12, 13, 14, 13, 13, 12, 12, 14, 24, 128, 133, 137, 147, 146, 152, 87, 111, 92, 89, 98, 198, 181, 192, 202, 20, 65, 34, 205, 200, 101, 115, 116, 127, 123, 121, 129, 126, 128, 130, 223, 137, 224, 161, 120, 132, 62, 42, 40, 56, 218, 32, 126, 124, 205, 45, 116, 62, 18, 42, 31, 42, 18, 213, 26, 34, 175, 176, 172, 176, 160, 152, 133, 136, 32, 23, 24, 22, 22, 11, 11, 11, 13, 13, 13, 13, 13, 12, 20, 59, 131, 139, 137, 148, 152, 161, 83, 88, 91, 94, 94, 98, 201, 187, 108, 214, 43, 202, 203, 118, 113, 117, 121, 123, 131, 128, 127, 131, 134, 133, 154, 225, 180, 107, 222, 125, 129, 33, 41, 123, 66, 182, 51, 133, 128, 108, 120, 49, 53, 30, 34, 33, 119, 35, 29, 44, 139, 176, 175, 84, 165, 142, 150, 134, 34, 26, 24, 21, 22, 11, 11, 12, 13, 13, 13, 13, 13, 13, 16, 134, 131, 146, 136, 155, 153, 131, 88, 86, 85, 99, 92, 100, 109, 198, 197, 44, 204, 215, 113, 113, 117, 125, 121, 125, 135, 127, 138, 134, 241, 135, 132, 140, 182, 230, 128, 222, 240, 130, 37, 49, 147, 25, 34, 99, 133, 129, 15, 209, 13, 90, 39, 30, 35, 36, 30, 31, 21, 171, 177, 168, 164, 113, 153, 143, 43, 21, 25, 22, 22, 12, 12, 12, 13, 13, 13, 13, 13, 17, 17, 94, 137, 142, 145, 159, 152, 85, 87, 89, 109, 100, 96, 95, 103, 104, 195, 200, 199, 112, 117, 118, 121, 122, 130, 125, 126, 138, 136, 133, 227, 148, 137, 139, 232, 138, 126, 228, 235, 138, 128, 48, 54, 212, 215, 59, 126, 132, 126, 35, 229, 35, 151, 41, 32, 30, 33, 30, 38, 178, 176, 172, 115, 142, 149, 142, 44, 25, 27, 21, 25, 13, 12, 13, 13, 13, 13, 13, 13, 16, 26, 119, 142, 142, 149, 153, 162, 87, 86, 96, 89, 104, 98, 99, 100, 106, 107, 128, 107, 112, 115, 125, 122, 127, 134, 128, 133, 117, 121, 128, 223, 250, 237, 133, 135, 244, 128, 139, 197, 179, 130, 136, 53, 65, 245, 158, 52, 130, 133, 138, 50, 116, 221, 201, 47, 35, 30, 37, 43, 187, 176, 170, 135, 73, 153, 144, 40, 32, 24, 24, 24, 13, 13, 13, 13, 13, 13, 13, 13, 14, 33, 135, 139, 143, 155, 158, 155, 87, 88, 90, 98, 101, 103, 104, 103, 105, 108, 109, 118, 118, 119, 125, 121, 128, 127, 136, 132, 131, 165, 130, 228, 215, 133, 217, 143, 129, 222, 238, 229, 145, 222, 137, 129, 53, 88, 229, 212, 44, 124, 132, 214, 35, 189, 200, 96, 33, 34, 37, 91, 182, 175, 177, 96, 120, 148, 138, 76, 34, 24, 29, 23, 13, 12, 12, 13, 13, 13, 13, 14, 19, 19, 134, 137, 146, 153, 164, 164, 87, 94, 94, 92, 98, 103, 107, 102, 111, 110, 108, 114, 119, 119, 118, 74, 140, 135, 130, 131, 138, 139, 219, 143, 127, 116, 237, 140, 142, 142, 147, 138, 229, 224, 136, 147, 77, 55, 210, 167, 43, 34, 130, 118, 175, 24, 223, 40, 204, 45, 32, 31, 116, 172, 171, 169, 166, 145, 147, 96, 38, 30, 23, 25, 13, 12, 12, 13, 13, 13, 13, 14, 19, 33, 135, 146, 143, 154, 160, 160, 98, 94, 95, 94, 102, 100, 106, 108, 116, 104, 58, 62, 68, 55, 131, 132, 151, 129, 135, 137, 132, 141, 133, 114, 234, 231, 107, 230, 146, 139, 142, 141, 226, 241, 227, 237, 143, 45, 61, 54, 29, 47, 73, 120, 123, 55, 42, 119, 42, 217, 47, 31, 49, 174, 173, 101, 151, 147, 146, 100, 41, 27, 28, 23, 13, 12, 12, 13, 13, 13, 14, 14, 19, 49, 138, 142, 146, 159, 164, 160, 89, 92, 94, 98, 98, 106, 102, 53, 111, 68, 53, 66, 80, 57, 128, 115, 48, 130, 136, 134, 131, 135, 131, 127, 232, 139, 227, 145, 231, 144, 141, 228, 155, 142, 133, 246, 177, 136, 52, 208, 213, 78, 46, 124, 126, 120, 20, 128, 121, 26, 210, 44, 33, 172, 177, 110, 162, 142, 139, 115, 40, 29, 23, 27, 13, 13, 13, 13, 13, 13, 14, 14, 19, 71, 132, 147, 134, 159, 163, 169, 91, 95, 94, 115, 102, 100, 58, 55, 72, 75, 67, 68, 76, 78, 83, 82, 74, 233, 58, 126, 136, 135, 138, 138, 135, 237, 146, 123, 232, 146, 147, 148, 145, 145, 233, 172, 177, 207, 130, 47, 44, 223, 208, 55, 127, 117, 44, 173, 39, 76, 209, 24, 31, 174, 170, 129, 137, 143, 144, 115, 37, 31, 25, 26, 13, 13, 13, 13, 13, 13, 14, 15, 19, 45, 129, 148, 144, 163, 160, 166, 88, 94, 98, 97, 100, 65, 67, 63, 73, 82, 211, 82, 75, 79, 229, 69, 87, 74, 49, 88, 150, 134, 138, 142, 141, 143, 145, 236, 130, 232, 235, 147, 144, 194, 237, 229, 232, 132, 125, 134, 61, 61, 223, 55, 55, 115, 32, 58, 48, 35, 33, 33, 43, 173, 176, 140, 85, 139, 150, 114, 38, 31, 26, 26, 13, 13, 13, 13, 13, 13, 14, 15, 17, 29, 133, 150, 147, 157, 162, 167, 83, 95, 104, 103, 99, 169, 61, 70, 58, 182, 219, 225, 59, 80, 58, 232, 64, 80, 103, 78, 240, 87, 130, 138, 145, 141, 136, 236, 188, 141, 132, 143, 143, 138, 137, 136, 221, 217, 193, 131, 97, 48, 49, 199, 224, 50, 116, 113, 200, 51, 30, 34, 100, 176, 169, 88, 80, 150, 148, 101, 40, 30, 27, 29, 13, 13, 13, 13, 13, 13, 14, 15, 19, 33, 134, 147, 151, 159, 169, 160, 91, 92, 104, 97, 43, 49, 63, 63, 62, 68, 214, 77, 204, 75, 80, 189, 46, 87, 76, 76, 81, 223, 62, 133, 145, 140, 145, 148, 149, 230, 246, 234, 172, 138, 140, 241, 233, 137, 131, 132, 129, 54, 50, 41, 213, 53, 30, 115, 115, 140, 44, 27, 186, 174, 176, 111, 88, 147, 149, 90, 43, 28, 31, 24, 13, 13, 13, 13, 13, 13, 14, 15, 16, 39, 138, 147, 154, 161, 169, 160, 92, 96, 100, 103, 49, 56, 65, 67, 72, 70, 82, 219, 70, 237, 55, 81, 222, 199, 77, 78, 81, 78, 218, 104, 137, 143, 144, 144, 140, 237, 137, 138, 222, 138, 136, 136, 133, 131, 130, 131, 129, 131, 30, 39, 54, 45, 47, 87, 121, 111, 76, 47, 174, 174, 166, 96, 86, 153, 142, 44, 36, 28, 29, 29, 13, 13, 13, 13, 13, 13, 14, 15, 14, 37, 135, 144, 153, 158, 165, 170, 82, 98, 99, 95, 49, 66, 61, 60, 70, 75, 72, 65, 95, 219, 215, 249, 75, 87, 229, 83, 88, 75, 87, 226, 160, 142, 142, 146, 144, 226, 126, 225, 126, 139, 135, 142, 136, 133, 132, 131, 130, 126, 125, 38, 50, 50, 40, 107, 113, 115, 107, 40, 170, 170, 182, 167, 152, 156, 131, 42, 39, 27, 28, 25, 13, 13, 13, 13, 13, 13, 14, 15, 18, 32, 117, 139, 156, 154, 165, 169, 82, 88, 102, 95, 36, 60, 55, 55, 68, 74, 204, 73, 69, 60, 80, 195, 178, 80, 224, 240, 81, 74, 78, 87, 59, 137, 146, 143, 140, 119, 222, 141, 136, 137, 138, 138, 132, 134, 132, 132, 129, 124, 123, 124, 41, 50, 32, 112, 111, 115, 112, 90, 175, 172, 101, 85, 137, 154, 138, 52, 34, 27, 30, 29, 13, 13, 13, 13, 13, 13, 14, 14, 13, 31, 110, 143, 159, 144, 166, 165, 157, 93, 102, 96, 55, 60, 63, 58, 92, 204, 217, 145, 102, 129, 231, 80, 86, 187, 82, 233, 132, 83, 78, 79, 64, 71, 136, 136, 141, 160, 145, 134, 140, 132, 132, 136, 132, 134, 129, 128, 126, 130, 125, 121, 121, 115, 112, 112, 111, 120, 108, 89, 171, 176, 159, 165, 149, 150, 116, 47, 32, 31, 28, 27, 13, 13, 13, 13, 13, 13, 14, 15, 16, 22, 43, 144, 154, 152, 165, 167, 164, 90, 97, 96, 48, 49, 59, 62, 69, 78, 210, 111, 177, 72, 170, 76, 216, 159, 55, 82, 52, 180, 77, 78, 79, 163, 134, 136, 136, 131, 145, 148, 136, 134, 129, 132, 134, 135, 131, 123, 124, 127, 122, 120, 121, 122, 117, 112, 110, 140, 113, 178, 172, 177, 184, 91, 154, 149, 83, 52, 28, 28, 28, 30, 13, 13, 13, 13, 12, 13, 13, 14, 14, 24, 46, 145, 148, 160, 164, 169, 165, 92, 97, 97, 100, 78, 54, 59, 69, 68, 57, 70, 234, 219, 67, 210, 71, 223, 239, 84, 70, 196, 57, 85, 76, 60, 64, 137, 131, 135, 130, 148, 149, 129, 132, 133, 132, 126, 132, 132, 127, 124, 127, 122, 121, 113, 113, 111, 108, 110, 99, 169, 176, 172, 111, 129, 153, 148, 62, 41, 33, 32, 27, 25, 13, 13, 13, 13, 13, 13, 13, 14, 14, 14, 32, 118, 154, 160, 152, 163, 168, 173, 97, 95, 92, 108, 58, 61, 62, 65, 72, 205, 206, 51, 231, 48, 69, 134, 239, 217, 57, 59, 229, 82, 81, 76, 50, 130, 141, 155, 150, 131, 182, 152, 139, 128, 130, 134, 136, 138, 127, 128, 127, 112, 125, 120, 110, 109, 109, 106, 137, 168, 186, 175, 109, 149, 154, 149, 49, 41, 28, 28, 31, 25, 13, 13, 13, 13, 13, 14, 14, 15, 17, 16, 29, 66, 140, 147, 159, 167, 170, 161, 91, 107, 105, 74, 61, 61, 58, 68, 75, 90, 56, 82, 123, 213, 81, 236, 49, 69, 225, 73, 125, 180, 86, 87, 235, 130, 130, 135, 154, 141, 132, 180, 138, 139, 126, 128, 133, 125, 124, 150, 123, 129, 118, 115, 113, 112, 117, 102, 168, 177, 99, 88, 156, 157, 148, 125, 51, 31, 29, 30, 26, 24, 13, 13, 13, 13, 13, 13, 14, 15, 15, 13, 24, 52, 131, 147, 161, 159, 171, 167, 148, 95, 103, 94, 166, 58, 62, 93, 82, 104, 80, 218, 209, 56, 231, 72, 86, 230, 129, 223, 80, 166, 135, 85, 61, 110, 126, 132, 140, 136, 145, 128, 141, 131, 128, 129, 128, 127, 121, 121, 117, 123, 114, 113, 114, 110, 104, 158, 172, 177, 169, 165, 152, 156, 154, 61, 46, 29, 30, 25, 23, 27, 13, 13, 13, 12, 12, 12, 13, 14, 15, 16, 23, 37, 135, 146, 155, 159, 167, 167, 169, 97, 95, 111, 88, 62, 72, 85, 200, 62, 63, 88, 68, 203, 205, 203, 78, 58, 217, 65, 236, 87, 142, 81, 69, 60, 125, 116, 131, 139, 141, 132, 123, 132, 129, 123, 123, 118, 122, 121, 124, 119, 119, 105, 111, 104, 102, 170, 172, 174, 168, 163, 162, 155, 163, 51, 30, 28, 23, 29, 30, 25, 13, 13, 13, 13, 12, 13, 13, 14, 15, 16, 12, 31, 57, 139, 159, 159, 160, 167, 167, 165, 99, 103, 98, 67, 67, 88, 194, 199, 79, 71, 74, 189, 217, 109, 62, 77, 215, 46, 223, 71, 88, 70, 80, 51, 120, 118, 122, 131, 143, 134, 124, 130, 128, 124, 119, 120, 117, 118, 112, 110, 112, 119, 101, 106, 171, 175, 177, 171, 168, 150, 161, 157, 66, 47, 29, 29, 25, 30, 24, 23, 13, 13, 13, 13, 13, 14, 15, 15, 13, 16, 16, 25, 47, 141, 155, 155, 158, 168, 171, 169, 123, 96, 107, 89, 146, 75, 93, 221, 156, 57, 69, 160, 36, 227, 125, 185, 57, 140, 175, 166, 74, 69, 70, 58, 116, 116, 118, 122, 127, 150, 124, 126, 121, 122, 120, 114, 113, 114, 113, 110, 113, 100, 107, 115, 169, 172, 177, 166, 158, 162, 158, 131, 55, 31, 27, 25, 27, 25, 22, 26, 12, 13, 13, 13, 13, 14, 14, 15, 15, 15, 15, 15, 31, 67, 141, 150, 160, 157, 167, 166, 165, 97, 109, 109, 89, 207, 59, 56, 68, 61, 61, 59, 66, 58, 64, 207, 68, 147, 184, 216, 151, 62, 45, 93, 118, 123, 121, 124, 129, 135, 119, 122, 115, 118, 116, 115, 111, 116, 111, 109, 110, 108, 78, 170, 170, 171, 176, 159, 158, 159, 151, 62, 40, 27, 26, 26, 24, 26, 24, 24, 12, 13, 13, 13, 13, 14, 14, 15, 16, 14, 15, 18, 20, 51, 81, 145, 156, 164, 162, 167, 164, 159, 98, 102, 98, 94, 189, 63, 61, 64, 55, 76, 76, 207, 194, 40, 69, 65, 72, 210, 60, 72, 204, 107, 111, 118, 123, 128, 128, 125, 125, 120, 113, 116, 131, 107, 106, 110, 107, 106, 100, 98, 170, 170, 170, 172, 165, 151, 163, 155, 92, 53, 32, 23, 25, 26, 23, 24, 23, 22, 12, 13, 13, 13, 13, 14, 15, 15, 14, 16, 17, 17, 17, 26, 60, 150, 150, 152, 159, 164, 171, 162, 166, 99, 101, 90, 143, 97, 55, 54, 60, 69, 114, 88, 65, 60, 62, 65, 67, 51, 62, 61, 52, 103, 106, 113, 114, 124, 113, 110, 120, 115, 111, 184, 207, 140, 108, 105, 104, 105, 108, 167, 170, 166, 164, 169, 153, 165, 151, 206, 62, 43, 26, 22, 25, 25, 22, 23, 23, 22, 12, 12, 13, 13, 13, 14, 15, 15, 16, 13, 17, 14, 16, 18, 45, 62, 146, 150, 155, 158, 163, 170, 163, 159, 102, 107, 93, 95, 41, 68, 60, 58, 66, 63, 66, 61, 59, 53, 65, 66, 65, 117, 99, 112, 109, 111, 106, 103, 110, 112, 110, 107, 110, 122, 141, 100, 109, 104, 96, 93, 162, 170, 156, 166, 167, 157, 158, 154, 191, 68, 42, 24, 24, 23, 25, 23, 20, 22, 23, 21, 12, 12, 13, 13, 13, 14, 15, 15, 15, 15, 15, 14, 20, 14, 17, 50, 76, 134, 145, 155, 155, 166, 164, 162, 147, 95, 96, 91, 90, 71, 52, 54, 56, 54, 57, 59, 57, 54, 58, 72, 111, 95, 95, 99, 101, 104, 106, 106, 105, 101, 105, 103, 104, 109, 100, 106, 96, 97, 76, 158, 170, 71, 98, 151, 147, 156, 155, 151, 62, 38, 26, 25, 23, 23, 22, 22, 20, 21, 22, 19, 11, 12, 12, 13, 13, 14, 15, 15, 15, 13, 17, 17, 17, 20, 19, 28, 51, 107, 148, 151, 151, 151, 169, 157, 166, 151, 125, 88, 95, 90, 86, 87, 46, 161, 101, 42, 41, 107, 143, 62, 84, 89, 97, 102, 102, 104, 99, 107, 107, 101, 106, 103, 95, 97, 96, 93, 97, 171, 156, 162, 166, 159, 131, 131, 153, 158, 150, 66, 50, 32, 24, 19, 21, 22, 20, 21, 22, 21, 21, 18, 11, 11, 12, 13, 13, 14, 15, 15, 17, 15, 13, 16, 20, 15, 19, 21, 33, 56, 129, 135, 147, 150, 165, 154, 155, 156, 155, 149, 87, 85, 91, 90, 82, 86, 83, 65, 69, 78, 83, 94, 97, 93, 96, 87, 91, 90, 93, 93, 89, 94, 91, 92, 90, 96, 95, 84, 153, 153, 159, 159, 159, 163, 138, 148, 144, 145, 132, 58, 33, 26, 16, 23, 20, 21, 18, 20, 21, 19, 20, 18, 11, 11, 12, 12, 13, 13, 14, 15, 15, 16, 17, 18, 14, 18, 18, 20, 21, 37, 55, 130, 142, 136, 138, 157, 143, 150, 151, 149, 149, 156, 85, 87, 84, 84, 81, 84, 84, 86, 88, 90, 90, 85, 87, 91, 88, 89, 88, 88, 89, 88, 90, 89, 89, 61, 144, 149, 149, 154, 155, 155, 147, 137, 149, 146, 143, 118, 51, 37, 22, 16, 20, 20, 20, 21, 15, 17, 18, 15, 18, 18, 11, 12, 12, 13, 13, 14, 14, 15, 16, 14, 15, 17, 17, 16, 17, 19, 21, 22, 35, 56, 120, 142, 129, 140, 151, 136, 150, 148, 149, 145, 138, 149, 69, 82, 87, 79, 87, 82, 87, 87, 87, 84, 84, 84, 84, 87, 81, 86, 85, 82, 88, 85, 139, 139, 145, 146, 144, 147, 142, 139, 135, 143, 137, 133, 70, 47, 29, 22, 17, 17, 18, 16, 17, 17, 16, 17, 17, 18, 17, 16, 11, 11, 12, 12, 12, 13, 14, 14, 16, 14, 15, 17, 18, 17, 18, 19, 20, 21, 23, 31, 55, 68, 146, 126, 130, 141, 133, 146, 142, 145, 143, 146, 132, 132, 130, 87, 69, 83, 82, 81, 75, 79, 80, 78, 83, 76, 81, 77, 65, 147, 130, 127, 139, 140, 140, 135, 136, 136, 126, 134, 139, 130, 126, 41, 45, 30, 20, 19, 16, 16, 18, 17, 17, 16, 16, 17, 17, 16, 15, 14, 11, 11, 11, 11, 11, 12, 13, 14, 14, 13, 14, 16, 17, 17, 17, 19, 19, 18, 17, 23, 32, 53, 55, 117, 125, 123, 132, 137, 136, 130, 144, 139, 136, 135, 133, 136, 128, 125, 128, 125, 129, 139, 134, 136, 129, 129, 128, 127, 124, 133, 129, 136, 129, 130, 135, 129, 123, 133, 130, 130, 119, 106, 50, 39, 25, 17, 15, 18, 16, 15, 16, 16, 16, 16, 16, 16, 16, 15, 13, 13, 11, 11, 11, 11, 11, 11, 12, 13, 13, 13, 14, 16, 17, 17, 17, 18, 19, 18, 20, 20, 25, 29, 40, 54, 118, 136, 117, 124, 121, 131, 137, 134, 131, 130, 127, 130, 130, 129, 128, 128, 129, 129, 133, 126, 128, 129, 128, 132, 130, 134, 129, 126, 127, 127, 123, 127, 125, 124, 126, 128, 57, 45, 36, 16, 18, 14, 15, 17, 15, 14, 15, 15, 15, 15, 15, 15, 14, 14, 13, 12, 11, 11, 11, 11, 11, 11, 12, 13, 14, 14, 15, 16, 17, 17, 18, 19, 22, 18, 19, 17, 21, 21, 23, 33, 48, 49, 118, 113, 126, 125, 115, 133, 129, 125, 129, 128, 123, 120, 122, 125, 123, 120, 124, 121, 126, 127, 125, 129, 123, 116, 119, 116, 118, 124, 117, 120, 119, 112, 126, 49, 38, 28, 14, 18, 16, 14, 14, 16, 15, 15, 15, 16, 15, 15, 15, 14, 14, 13, 13, 13, 11, 12, 11, 11, 11, 11, 12, 12, 13, 14, 14, 15, 16, 17, 17, 18, 17, 18, 24, 18, 23, 22, 20, 26, 33, 36, 48, 43, 115, 116, 114, 113, 112, 115, 121, 122, 119, 120, 122, 110, 116, 119, 111, 120, 118, 117, 113, 112, 115, 124, 117, 121, 115, 116, 111, 95, 99, 52, 36, 33, 23, 18, 15, 16, 15, 15, 15, 15, 16, 15, 15, 15, 15, 15, 14, 13, 13, 13, 13, 13, 11, 12, 12, 11, 11, 11, 11, 12, 12, 13, 13, 13, 14, 15, 16, 16, 17, 18, 16, 26, 18, 15, 26, 17, 20, 23, 26, 43, 47, 50, 129, 133, 110, 111, 107, 108, 112, 111, 107, 119, 118, 118, 117, 115, 121, 113, 117, 111, 110, 115, 109, 118, 94, 95, 56, 41, 36, 27, 19, 13, 19, 17, 14, 13, 15, 16, 16, 15, 15, 15, 14, 14, 15, 14, 13, 13, 13, 13, 13, 12, 11, 12, 12, 12, 11, 11, 11, 11, 13, 13, 13, 12, 13, 15, 16, 16, 17, 21, 20, 16, 22, 22, 19, 21, 20, 19, 19, 24, 31, 43, 46, 52, 49, 114, 97, 101, 109, 101, 106, 108, 112, 112, 110, 104, 102, 108, 104, 107, 110, 114, 87, 64, 48, 35, 35, 26, 21, 18, 17, 14, 14, 15, 14, 17, 15, 16, 15, 13, 13, 14, 13, 15, 15, 14, 13, 13, 13, 13, 12, 11, 10, 12, 12, 11, 11, 11, 12, 11, 12, 12, 12, 13, 14, 14, 14, 14, 16, 17, 18, 20, 21, 21, 21, 21, 20, 19, 18, 20, 24, 26, 28, 33, 39, 48, 46, 45, 49, 56, 105, 103, 102, 106, 99, 97, 103, 91, 61, 42, 52, 48, 41, 34, 27, 22, 19, 17, 15, 14, 14, 13, 13, 13, 13, 13, 14, 14, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 12, 12, 11, 11, 11, 11, 11, 11, 11, 11, 12, 13, 13, 13, 14, 15, 15, 17, 18, 19, 19, 20, 20, 21, 19, 17, 18, 20, 20, 20, 24, 21, 23, 31, 30, 30, 31, 44, 41, 40, 43, 37, 33, 37, 37, 31, 30, 28, 26, 23, 20, 18, 16, 15, 15, 14, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 12, 12, 12, 11, 11, 11, 11, 11, 11, 11, 11, 11, 12, 12, 13, 14, 13, 14, 15, 17, 18, 18, 18, 19, 21, 20, 17, 17, 17, 15, 14, 16, 16, 12, 21, 18, 16, 18, 17, 22, 24, 25, 22, 19, 19, 18, 16, 16, 14, 14, 14, 14, 14, 14, 14, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 11, 11, 11, 11, 12, 13, 11, 12, 12, 12, 12, 13, 14, 14, 13, 13, 15, 16, 17, 17, 18, 19, 20, 19, 17, 17, 17, 16, 14, 16, 14, 14, 15, 15, 15, 12, 14, 16, 17, 14, 13, 15, 17, 17, 16, 14, 15, 16, 16, 16, 15, 14, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 11, 12, 11, 11, 12, 13, 11, 12, 13, 13, 13, 14, 14, 13, 13, 13, 14, 15, 16, 16, 17, 18, 17, 18, 17, 16, 18, 17, 16, 17, 18, 20, 17, 15, 21, 16, 18, 14, 17, 14, 14, 15, 13, 13, 14, 14, 15, 15, 15, 15, 14, 13, 13, 12, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 12, 12, 12, 11, 11, 12, 10, 12, 13, 12, 12, 13, 13, 11, 13, 12, 13, 14, 14, 15, 15, 16, 16, 17, 16, 16, 18, 17, 16, 17, 16, 16, 18, 12, 17, 17, 18, 17, 15, 14, 16, 17, 16, 16, 17, 16, 14, 14, 14, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 11, 11, 12, 13, 13, 11, 11, 11, 10, 12, 13, 12, 12, 13, 12, 11, 13, 12, 11, 12, 13, 13, 13, 14, 15, 16, 15, 15, 17, 17, 16, 16, 15, 17, 22, 19, 17, 17, 16, 18, 21, 18, 15, 13, 13, 14, 14, 14, 15, 15, 14, 14, 14, 14, 14, 14, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 12, 12, 12, 12, 11, 10, 12, 13, 13, 11, 11, 12, 11, 13, 13, 11, 11, 13, 13, 11, 14, 12, 11, 12, 13, 13, 13, 12, 13, 14, 13, 13, 15, 16, 15, 15, 16, 17, 12, 17, 13, 16, 17, 17, 15, 16, 16, 15, 16, 16, 15, 15, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 14, 14, 13, 13, 12, 12, 12, 12, 13, 13, 13, 12, 12, 12, 12, 12}; ```

@Auroragan @yehangyang

Sorry to disturb you, the inference result is wrong on board, where can I locate the problem.

yehangyang commented 2 years ago

@PureHing I'm sorry to say you have to check the result layer by layer. Otherwise, we can not locate the problem. You can

  1. export the fp32 layer result by (* 2 ^ -exponent)
  2. save it as a array in C/C++
  3. check the result correctness by Tensor.check_element()
PureHing commented 2 years ago

@yehangyang Hi,It seems that my model output has a problem from the first layer. first layer(name: l0): conv2d,1x80x80x1->1x80x80x4, kernel size:3x3,pad=(1,1,1,1,)

My steps: 1.get fp32 result of first conv2d layer from onnx model 2.on eps32s3:

    int16_t *score = model.l0.get_output().get_element_ptr();
    int size_out = model.l0.get_output().get_size();
    for (int i = 0; i < size_out; i++)
    {
        result[i] = score[i] * pow(2.f, -12);
        printf("%f,",result[i]);
    }

the size_out is 26896 , //80x80x4=25600 with padding to 26896 3.The result gap is very big. 4.

  • export the fp32 layer result by (* 2 ^ -exponent)
  • save it as a array in C/C++

Do you mean the result on esp32s3?

yehangyang commented 2 years ago

@PureHing

PureHing commented 2 years ago

@yehangyang Hi code:

    input.set_element(IMAGE_ELEMENT).set_exponent(-8).set_shape({IMAGE_HEIGHT, IMAGE_WIDTH, IMAGE_CHANNEL}).set_auto_free(false);
    input.print_shape();
    DMMODEL model;
    // model forward
    latency.start();
    model.forward(input);
    latency.end();
    latency.print("detect", "forward");
    // parse
    printf("\n--------\n");
    int size_out = model.l0.get_output().get_size();
    model.l0.get_output().print_shape();
    printf("\nSize_out:%d\n", size_out);
    int16_t *score = model.l0.get_output().get_element_ptr();
    if (NULL == score)
    {
        printf("failed\n");
    }
    model.l0.get_output().check_element((int16_t *)l0data);
    printf("\nend\n");

Log:

shape = (0 + 80 + 0, 0 + 80 + 0, 1(1))
detect::forward: 20965 us

--------
shape = (1 + 80 + 1, 1 + 80 + 1, 4(4))

Size_out:26896
shape = (1 + 80 + 1, 1 + 80 + 1, 4(4))
element[0, 0, 0]: 32767 v.s. -8918

end

the firtst element is wrong . the same my way.

yehangyang commented 2 years ago

@PureHing Would you mind changing the first layer's padding style from "SAME" to "VALID". There is some problem with it. We've planed to cancel the "padding in advance" strategy in the next version. At that time, this problem will not happen.

Or, You can call input.set_padding_size({1,1,1,1}), just before send input into Model.call(input), right now.

PureHing commented 2 years ago

@yehangyang Does it only need the first layer need to be designed like thisl1(Conv2D<int16_t>(-2, get_l1_filter(), get_l1_bias(), get_l1_activation(), PADDING_VALID, 2, 2, "l1"))?

yehangyang commented 2 years ago

@PureHing , Yes, just avoid the first layer using PADDING_SAME.

PureHing commented 2 years ago

@yehangyang stride=2 or maxpooling downsample follow floor or ceil?

yehangyang commented 2 years ago

@PureHing, can not say floor or ceil, but it should be the same as Tensorflow's

PureHing commented 2 years ago

@yehangyang ok thanks

PureHing commented 2 years ago

input.set_padding_size({1,1,1,1})


    input.set_element(IMAGE_ELEMENT).set_exponent(-8).set_shape({IMAGE_HEIGHT, IMAGE_WIDTH, IMAGE_CHANNEL}).set_auto_free(false);
    input.set_padding_size({1,1,1,1});
    model.forward(input);

is it right?

yehangyang commented 2 years ago

@PureHing , Yes.

PureHing commented 2 years ago

Or, You can call input.set_padding_size({1,1,1,1}), just before send input into Model.call(input), right now.

@yehangyang the inference is wrong by this way.

Auroragan commented 2 years ago

@PureHing Didn't you say the result is correct on the call before?

PureHing commented 2 years ago

@Auroragan

1.

Would you mind changing the first layer's padding style from "SAME" to "VALID".

I changed my model to avoid the padding problem in firtst layer,then forward output is correct on esp32s3 comparing to PC.

2.

call input.set_padding_size({1,1,1,1}), just before send input into Model.call(input), right now.

If I use the old model which first layer padding is "SAME" and adding input.set_padding_size({1,1,1,1}), then forward output is wrong on esp32s3 comparing to PC.

xiao-mb commented 2 years ago

@yehangyang Hi,It seems that my model output has a problem from the first layer. first layer(name: l0): conv2d,1x80x80x1->1x80x80x4, kernel size:3x3,pad=(1,1,1,1,)

My steps: 1.get fp32 result of first conv2d layer from onnx model 2.on eps32s3:

    int16_t *score = model.l0.get_output().get_element_ptr();
    int size_out = model.l0.get_output().get_size();
    for (int i = 0; i < size_out; i++)
    {
        result[i] = score[i] * pow(2.f, -12);
        printf("%f,",result[i]);
    }

the size_out is 26896 , //80x80x4=25600 with padding to 26896 3.The result gap is very big. 4.

  • export the fp32 layer result by (* 2 ^ -exponent)
  • save it as a array in C/C++

Do you mean the result on esp32s3?

@PureHing Hi, can you share your code of step 1?

PureHing commented 2 years ago

If I use the old model which first layer padding is "SAME" and adding input.set_padding_size({1,1,1,1}), then forward output is wrong on esp32s3 comparing to PC.

@yehangyang HI Am I using this method wrong?

@xiao-mb follow this

Auroragan commented 2 years ago

@PureHing the result of first layer or the final result?

PureHing commented 2 years ago

@Auroragan Neither is right

Auroragan commented 2 years ago

Would you mind changing the first layer's padding style from "SAME" to "VALID".

I changed my model to avoid the padding problem in firtst layer,then forward output is correct on esp32s3 comparing to PC.

the one you just showed is another model using PADDING_VALID without adding set_padding_size?

PureHing commented 2 years ago

input setting like this input.set_element(IMAGE_ELEMENT).set_exponent(-8).set_shape({IMAGE_HEIGHT,IMAGE_WIDTH, IMAGE_CHANNEL}).set_auto_free(false);

Auroragan commented 2 years ago

@PureHing Is there any difference between this model that gets the wrong result and the previous model that's correct?

PureHing commented 2 years ago

no

@Auroragan @yehangyang If the outputs simulated([outputs, _] = eva.evalute_quantized_model(img[np.newaxis, np.newaxis,...,],True))on the PC are correct on the PC , does it mean that my onnx model is correct?

Auroragan commented 2 years ago

yes

PureHing commented 2 years ago

@Auroragan @yehangyang Have you received my model and engineering code?I sent it to shiqiqi. please help me take a look.

Auroragan commented 2 years ago

yeah got it. Can you also provide the pickle file and the test input for pc (npy file)?

PureHing commented 2 years ago

@Auroragan Please give me your email address

Auroragan commented 2 years ago

please send to shiqiqi

PureHing commented 2 years ago

Got it

Auroragan commented 2 years ago

sorry, I mean the generated pickle file of quantization table. and the floating point input you used as ground truth so that I can know the correct result of each layer.

PureHing commented 2 years ago

@Auroragan the gt output of first and last layers are in image.hpp

PureHing commented 2 years ago

the gt output of first and last layers are in image.hpp

@Auroragan Does this work for you?

Auroragan commented 2 years ago

hmmm the last layer is not the same as the gt you provided, and Conv_0_data is not the result after relu right? If you can provide the pickle file(quantization table) and the fp input, it'll be easier for me to know what's wrong.

PureHing commented 2 years ago

@Auroragan Is this what you need?

Conv_0_data is not the result after relu right

you are right.

Auroragan commented 2 years ago

@PureHing problem found:

  1. at the beginning, int16 API for S3 only support channel = 8*n, now it doesn't have this limitation but the code for exporting the coefficient for 16-bit is out of date so the filter_element for first layer and last layer is incorrect. will fix it ASAP.
  2. For Conv_6, Conv_12, Conv_18, should use PADDING_SAME_MXNET not PADDING_SAME, because different platform has different strategy for padding when stride = 2, I'm guessing this model is not from tensorflow.
  3. the output exponent for Conv_40 should be -11 according to the pickle file you provided

I'm getting the correct result after doing the above modification now.

PureHing commented 2 years ago

@Auroragan

Can u send your modified project to me?

Auroragan commented 2 years ago

ok

PureHing commented 2 years ago

@Auroragan 1.

  1. at the beginning, int16 API for S3 only support channel = 16*n, now it doesn't have this limitation but the code for exporting the coefficient for 16-bit is out of date so the filter_element for first layer and last layer is incorrect. will fix it ASAP.

For my test model, my channels are all multiples of 16, why the first layer of quantization is incorrect. If I retrain the model, how can I avoid this(filter_element) error? 16*n: n is greater than 1?

2.

2. For Conv_6, Conv_12, Conv_18, should use PADDING_SAME_MXNET not PADDING_SAME, because different platform has different strategy for padding when stride = 2, I'm guessing this model is not from tensorflow.

Is the method of s3 downsampling the same as the padding method of maxpooling in tensorflow?

3.

3. the output exponent for Conv_40 should be -11 according to the pickle file you provided

the outdata in image.hpp is generated by exponent=-11

4.As mentioned before, the first layer of pad method must use valid, does it still need to do this on the matser branch of esp-dl?

5.I replaced the file you gave me, and the result is as follows:

Conv_0[0:1, 0:1, 0:16] | shape = (1 + 39 + 1, 1 + 39 + 1, 16(16))
      0      1      2      3      4      5      6      7      8      9     10     11     12     13     14     15
      0   8420      0      0      0      0      0      0      0      0      0  11138  10058      0      0   6440
gt_Conv_0: res*pow(2,12)
      0   8417      0      0      0      0      0      0      0      0      0  11135  10056      0      0   6438

There is still some offset.when prasing the output of last layer, it is also incorrect.

Auroragan commented 2 years ago
  1. the input channel of first layer is 1, please wait for fix.
  2. what do you mean by s3 downsampling?
  3. the out_data is correct, but the initialization of this layer is incorrect: Conv_40(Conv2D<int16_t>(-10, get_Conv_40_filter(), get_Conv_40_bias(), NULL, PADDING_VALID, 1, 1, "Conv_40")) {}
  4. it's not a must, if you want to use PADDING_SAME, you need to add input.set_padding_size()
  5. it's normal to have some offset due to quantization. the result won't be exactly the same as fp32 result.
PureHing commented 2 years ago
  1. the input channel of first layer is 1, please wait for fix.

@Auroragan BTW,When will it be fixed?

the outout channel should be 8*n? It seems unreasonable.

Auroragan commented 2 years ago

@PureHing will send you the lib first, you can use it without any limitation

PureHing commented 2 years ago

@Auroragan sorry to disturb you. When I replaced the calibrator.so which you gave, the 16-bit quantized conv_0 filter element was different from the one you gave me this afternoon. I can prase the correct result that you gave me this afternoon. generated by new lib(Same model) :

    const static __attribute__((aligned(16))) int16_t Conv_0_filter_element[] = {
          2077,  -4268,   1845,   7087,   7273,   3863,  -2932,   5966,  -3911,  -5610,  -8973,   7078,   2121,  -7769,   4165,  -4981,  -3858,   1996,   3599,  -5371,   3071,   3103,  -7424,  17486,  -5771,  14175, -20329,   1741,   6661,   2254,  -2040,   3083,   1824,   3516,   1443,   1566,   3942,   5003,  -3172,   -707,  13029,   2335, -10934,    963,   5642,  10067,    489,   -326,  -1002, -19053,    751,   7626,  -9285,  11625,  -6636,   2608,   4242,  -2172,  14463,   -901,   8887,   6514, -10002,    994,    715,   -586,    512,   3579, -21433,  -2547,  22241,  -2228,  -6197, -12812,   1593,   7742,  24893,  -3850,   -209, -11986,    989,   9886,  -7551,   7236,  10315,   1597, -15835,   2864, -11546,   3574,  11251, -14135,  -6187,  13756,     20,  -5940,   5874,   1973,  -4873,   -119,   2842,  -3484,   -273,  -4700,  -3700,   -896,  -5572,  -2543,   5573,   1937,   4858,  -4018,  -3804,   1043,  -6315,  -8327,  -7253, -13258,   7717,   2014,  19076, -14446,  -3168,   4038,  -2180,   1960,   8047,  17826,   2918,   6284,   1141, -11618,  -1900, -11310, -10216,  -5555,    447,   2940,  -8804, -12618,   1900,  16678,   5533, -10553};

But I need to re-convert the new model. This dynamic library doesn't work for me.

Auroragan commented 2 years ago

Have you tried the new one?

But I need to re-convert the new model. This dynamic library doesn't work for me.

what do you mean?

PureHing commented 2 years ago

@Auroragan HI, Do you use the new dynamic library to test the first layer output of the model I gave you?

@Auroragan sorry to disturb you. When I replaced the calibrator.so which you gave, the 16-bit quantized conv_0 filter element was different from the one you gave me this afternoon. I can prase the correct result that you gave me this afternoon. generated by new lib(Same model) :

    const static __attribute__((aligned(16))) int16_t Conv_0_filter_element[] = {
          2077,  -4268,   1845,   7087,   7273,   3863,  -2932,   5966,  -3911,  -5610,  -8973,   7078,   2121,  -7769,   4165,  -4981,  -3858,   1996,   3599,  -5371,   3071,   3103,  -7424,  17486,  -5771,  14175, -20329,   1741,   6661,   2254,  -2040,   3083,   1824,   3516,   1443,   1566,   3942,   5003,  -3172,   -707,  13029,   2335, -10934,    963,   5642,  10067,    489,   -326,  -1002, -19053,    751,   7626,  -9285,  11625,  -6636,   2608,   4242,  -2172,  14463,   -901,   8887,   6514, -10002,    994,    715,   -586,    512,   3579, -21433,  -2547,  22241,  -2228,  -6197, -12812,   1593,   7742,  24893,  -3850,   -209, -11986,    989,   9886,  -7551,   7236,  10315,   1597, -15835,   2864, -11546,   3574,  11251, -14135,  -6187,  13756,     20,  -5940,   5874,   1973,  -4873,   -119,   2842,  -3484,   -273,  -4700,  -3700,   -896,  -5572,  -2543,   5573,   1937,   4858,  -4018,  -3804,   1043,  -6315,  -8327,  -7253, -13258,   7717,   2014,  19076, -14446,  -3168,   4038,  -2180,   1960,   8047,  17826,   2918,   6284,   1141, -11618,  -1900, -11310, -10216,  -5555,    447,   2940,  -8804, -12618,   1900,  16678,   5533, -10553};

But I need to re-convert the new model. This dynamic library doesn't work for me.

Today’s library has the same result as yesterday’s library, which is inconsistent with your hand-tuned

PureHing commented 2 years ago

The results can be run normally, thank you very much