Commit 5f2495a6 by Klin

fix: modify module/bias_qmax for outliners and refit

parent 11e4b3c9
ykl/AlexNet/image/AlexNet_table.png

21.4 KB | W: | H:

ykl/AlexNet/image/AlexNet_table.png

21.3 KB | W: | H:

ykl/AlexNet/image/AlexNet_table.png
ykl/AlexNet/image/AlexNet_table.png
ykl/AlexNet/image/AlexNet_table.png
ykl/AlexNet/image/AlexNet_table.png
  • 2-up
  • Swipe
  • Onion skin
ykl/AlexNet/image/flops.png

33.6 KB | W: | H:

ykl/AlexNet/image/flops.png

33 KB | W: | H:

ykl/AlexNet/image/flops.png
ykl/AlexNet/image/flops.png
ykl/AlexNet/image/flops.png
ykl/AlexNet/image/flops.png
  • 2-up
  • Swipe
  • Onion skin
ykl/AlexNet/image/param.png

35.5 KB | W: | H:

ykl/AlexNet/image/param.png

32.7 KB | W: | H:

ykl/AlexNet/image/param.png
ykl/AlexNet/image/param.png
ykl/AlexNet/image/param.png
ykl/AlexNet/image/param.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -64,7 +64,7 @@ def bias_qmax(quant_type):
elif quant_type == 'POT':
return get_qmax(quant_type)
else:
return get_qmax(quant_type, 16, 5)
return get_qmax(quant_type, 16, 7)
# 转化为FP32,不需再做限制
......
title_list:
INT_2 INT_3 INT_4 INT_5 INT_6 INT_7 INT_8 INT_9 INT_10 INT_11 INT_12 INT_13 INT_14 INT_15 INT_16 POT_2 POT_3 POT_4 POT_5 POT_6 POT_7 POT_8 FLOAT_3_E1 FLOAT_4_E1 FLOAT_4_E2 FLOAT_5_E1 FLOAT_5_E2 FLOAT_5_E3 FLOAT_6_E1 FLOAT_6_E2 FLOAT_6_E3 FLOAT_6_E4 FLOAT_7_E1 FLOAT_7_E2 FLOAT_7_E3 FLOAT_7_E4 FLOAT_7_E5 FLOAT_8_E1 FLOAT_8_E2 FLOAT_8_E3 FLOAT_8_E4 FLOAT_8_E5 FLOAT_8_E6
js_flops_list:
7507.750630903518 2739.696686260571 602.5610368972597 140.92196362794522 34.51723630314398 8.518501248514761 2.1353880852742875 0.5319393032307673 0.13161172461255044 0.03248745580294116 0.008041228126669929 0.002041263178018999 0.0004401365998344318 0.0001238196358571173 2.6684157208189957e-07 7507.6677482789855 1654.3776790768084 136.73977548990493 134.5782553082339 134.57822914515503 134.57815728826475 134.57822793682794 1054.343125709169 244.48315063136482 247.8970440086859 87.65672287018292 89.63829780248474 37.952907162652885 48.439500893999075 50.122513003094504 9.763710160467824 37.67667145925756 37.08251159654393 37.1627098092451 2.504499223445029 9.660234735230102 37.70545171879492 33.31639481832546 32.12035369527305 0.6541863956484224 2.4420428195112773 9.68812852009895 37.70545171879492
7507.750225073234 2739.7076966853715 602.5612265737566 140.92196299779113 34.51721972907417 8.518507426398175 2.1353734024227116 0.5319409334311419 0.13162665658268388 0.03249625709236131 0.00803756234599581 0.0020459635613112545 0.00041898539095542464 0.00013219370584495937 5.889692605810293e-06 7507.667349327719 1654.3776698748943 136.74005401701154 134.57824579197353 134.57836801317453 134.5784177398131 134.5783104900291 1054.3432294422712 244.48311706582172 247.89704517757673 87.6567210381318 89.6383158999559 37.95288939287757 48.43949061178506 50.12249448664991 9.763717307655865 37.67666344437851 37.082531959820756 37.162726030896145 2.504495273130303 9.660222213843284 37.67755581522706 33.31639025927671 32.12034332380884 0.654188829204954 2.4420339872993106 9.660223950420662 37.71766886303616
js_param_list:
2099.4012978223045 756.8852841760424 165.48611276543275 38.661077867143234 9.465502346081184 2.3380773681828533 0.58692184552745 0.14620052066785197 0.03612237294214898 0.008920127583145731 0.002220705680193553 0.0005652568568322304 0.00012068297675722306 4.6465238698738316e-05 3.978560588757452e-06 2099.3806976929545 455.6542423459878 38.279215341990685 37.69143863173502 37.69138152272717 37.691410138985525 37.6914533221459 292.0013646264601 67.95022753681195 68.50200422208128 24.58495282264393 24.797314677833658 10.623403633392762 13.713035034152393 13.858532642027827 2.732777962743172 10.54799818234084 10.549391145072232 10.275098560105489 0.7004983113671814 2.704403793583995 10.559890009589033 9.493248004294491 8.881733731714347 0.1827908118651683 0.6834420451874454 2.716044028457789 10.559890009589033
2099.4012777078437 756.8892728120519 165.48621719007886 38.661065835467824 9.465502337873694 2.33808893548631 0.5869253659714873 0.14619333548441132 0.03612248684512918 0.008927375183637454 0.002218583272882395 0.0005696575186261605 0.00011790447981599561 5.3025835391648186e-05 9.207995013302385e-06 2099.3808052778672 455.65423486787154 38.27924274988919 37.69144524899607 37.69150679435307 37.69151359769784 37.691495756398155 292.0014018519822 67.9502067901384 68.50198967225035 24.584935828703593 24.79733981410337 10.623398482526541 13.713032956745856 13.858531189296505 2.7327807059031084 10.548006045861452 10.549392886768755 10.275096064814912 0.7005038658637505 2.704395835634312 10.548258699586787 9.49326525014626 8.881738559127504 0.18278915543612456 0.6834360936949567 2.704395905153418 10.565651241352946
ptq_acc_list:
10.0 10.09 56.08 77.58 83.1 84.84 84.88 85.06 85.06 85.11 85.07 85.08 85.08 85.08 85.08 10.0 13.0 71.41 73.08 72.96 72.65 73.37 24.5 66.46 51.17 77.72 77.3 82.21 81.77 81.53 84.03 81.85 81.93 82.88 84.83 84.21 51.75 82.91 83.36 85.13 84.77 59.81 51.53
10.0 10.05 56.03 78.36 83.22 84.34 85.08 84.93 85.08 85.1 85.06 85.07 85.07 85.08 85.08 10.0 15.14 72.69 72.45 72.68 72.07 72.75 24.65 63.25 57.28 77.86 74.77 82.16 81.39 81.22 84.02 81.89 81.97 82.79 84.73 84.16 81.93 82.83 83.41 84.91 84.77 83.97 81.99
acc_loss_list:
0.8824635637047484 0.8814057357780911 0.34085566525622946 0.08815232722143865 0.02327221438645985 0.0028208744710859768 0.002350728725905064 0.0002350728725904563 0.0002350728725904563 -0.00035260930888576797 0.00011753643629531167 0.0 0.0 0.0 0.0 0.8824635637047484 0.847202632816173 0.16067230841560887 0.14104372355430184 0.1424541607898449 0.14609779031499756 0.13763516690173946 0.7120357310766338 0.2188528443817584 0.3985660554771979 0.08650681711330512 0.0914433474377057 0.03373295721673724 0.038904560413728285 0.04172543488481426 0.012341325811001377 0.03796426892336629 0.03702397743300413 0.02585801598495537 0.0029384109073812884 0.010225669957686936 0.39174894217207334 0.0255054066760696 0.02021626704278325 -0.0005876821814762242 0.0036436295251528242 0.2970145745181006 0.3943347437705689
0.8824635637047484 0.8818758815232722 0.34144334743770566 0.07898448519040902 0.021861777150916778 0.008697696285848553 0.0 0.0017630465444286728 0.0 -0.0002350728725904563 0.0002350728725904563 0.00011753643629531167 0.00011753643629531167 0.0 0.0 0.8824635637047484 0.8220498354489891 0.14562764456981667 0.14844851904090262 0.1457451810061118 0.1529149036201223 0.1449224259520451 0.710272684532205 0.2565820404325341 0.32675129290079924 0.0848613070051716 0.12118006582040436 0.03432063939821347 0.04337094499294779 0.04536906440996708 0.01245886224729669 0.037494123178185214 0.03655383168782322 0.026915843911612506 0.004113775270333736 0.01081335213916316 0.03702397743300413 0.026445698166431594 0.019628584861307027 0.001998119417019296 0.0036436295251528242 0.013046544428772913 0.03631875881523276
......@@ -143,7 +143,7 @@ if __name__ == "__main__":
full_file = 'ckpt/cifar10_AlexNet.pt'
model = AlexNet()
model.load_state_dict(torch.load(full_file))
# model.load_state_dict(torch.load(full_file))
model.to(device)
optimizer = optim.SGD(model.parameters(), lr=lr, momentum=momentum)
......@@ -159,10 +159,13 @@ if __name__ == "__main__":
for epoch in range(1, epochs+1):
# 训练原模型,获取梯度分布
loss,grad_dict = train(model, device, train_loader, optimizer, epoch)
if epoch == 1:
loss_start = loss
loss_delta = loss - loss_start
# print('loss:%f' % loss_avg)
writer.add_scalar('Full.loss',loss,epoch)
for name,grad in grad_dict.items():
writer.add_histogram('Full.'+name+'_grad',grad,global_step=epoch)
# for name,grad in grad_dict.items():
# writer.add_histogram('Full.'+name+'_grad',grad,global_step=epoch)
loss_sum += loss
loss_avg = loss_sum / epoch
......@@ -170,16 +173,17 @@ if __name__ == "__main__":
grad_dict_sum[name] += grad_dict[name]
grad_dict_avg[name] = grad_dict_sum[name] / epoch
ckpt = {
'epoch' : epoch,
'loss' : loss,
'loss_sum' : loss_sum,
'loss_avg' : loss_avg,
'grad_dict_avg' : grad_dict_avg
}
if epoch % 5 == 0:
subdir = 'epoch_%d/' % epoch
torch.save(ckpt,ckpt_prefix+ subdir +'full.pt')
if store_qat:
ckpt = {
'epoch' : epoch,
'loss' : loss,
'loss_sum' : loss_sum,
'loss_avg' : loss_avg,
'grad_dict_avg' : grad_dict_avg
}
if epoch % 5 == 0:
subdir = 'epoch_%d/' % epoch
torch.save(ckpt,ckpt_prefix+ subdir +'full.pt')
# loss_avg,grad_dict = quantize_aware_training(model_ptq, device, train_loader, optimizer, epoch)
......@@ -241,6 +245,7 @@ if __name__ == "__main__":
grad_dict_sum[name] += grad_dict[name]
grad_dict_avg[name] = grad_dict_sum[name] / epoch
ckpt = {
'epoch' : epoch,
'loss' : loss,
......
ykl/AlexNet_BN/image/flops.png

37.4 KB | W: | H:

ykl/AlexNet_BN/image/flops.png

36.8 KB | W: | H:

ykl/AlexNet_BN/image/flops.png
ykl/AlexNet_BN/image/flops.png
ykl/AlexNet_BN/image/flops.png
ykl/AlexNet_BN/image/flops.png
  • 2-up
  • Swipe
  • Onion skin
ykl/AlexNet_BN/image/param.png

36.2 KB | W: | H:

ykl/AlexNet_BN/image/param.png

36.2 KB | W: | H:

ykl/AlexNet_BN/image/param.png
ykl/AlexNet_BN/image/param.png
ykl/AlexNet_BN/image/param.png
ykl/AlexNet_BN/image/param.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -64,7 +64,7 @@ def bias_qmax(quant_type):
elif quant_type == 'POT':
return get_qmax(quant_type)
else:
return get_qmax(quant_type, 16, 5)
return get_qmax(quant_type, 16, 7)
# 转化为FP32,不需再做限制
......
title_list:
INT_2 INT_3 INT_4 INT_5 INT_6 INT_7 INT_8 INT_9 INT_10 INT_11 INT_12 INT_13 INT_14 INT_15 INT_16 POT_2 POT_3 POT_4 POT_5 POT_6 POT_7 POT_8 FLOAT_3_E1 FLOAT_4_E1 FLOAT_4_E2 FLOAT_5_E1 FLOAT_5_E2 FLOAT_5_E3 FLOAT_6_E1 FLOAT_6_E2 FLOAT_6_E3 FLOAT_6_E4 FLOAT_7_E1 FLOAT_7_E2 FLOAT_7_E3 FLOAT_7_E4 FLOAT_7_E5 FLOAT_8_E1 FLOAT_8_E2 FLOAT_8_E3 FLOAT_8_E4 FLOAT_8_E5 FLOAT_8_E6
js_flops_list:
7398.262055529559 2629.3751622617588 590.6821895683953 140.07310170087658 33.86048167345483 8.284908066398648 2.0380663672630033 0.5092279871870147 0.12684254729585437 0.031863777467731946 0.007841108109205986 0.0019867625414859602 0.000524805638519184 0.00015510881465292402 4.128433975522605e-05 7398.228137001189 1620.2559603222214 133.7304911846874 131.62907676756663 131.62871096032845 131.6289253991913 131.62875302977494 1069.390471249252 255.89338592444176 239.72194344867773 94.18685807791533 86.02821389442595 36.77673254387978 54.05171226668418 47.849590815698406 9.560264345209177 36.522392073949895 42.22214551260318 35.435660108435656 2.4388559138472727 9.464983665314236 36.53673927235493 38.25590966717735 30.62508628497586 0.6417078348950557 2.3826050378452384 9.478115589519865 36.536710369840804
7398.254588242588 2629.375369920873 590.682242850979 140.07309702607543 33.8605061564944 8.284907955771828 2.0380761928718356 0.5092245742053088 0.1268421502575885 0.03186153637549241 0.00786669870124002 0.0019702839340457524 0.0005105761502405256 0.00014720966403738878 5.009152681172546e-05 7398.228439126393 1620.255743626143 133.73060307719862 131.62858417356293 131.6287169902594 131.62886974968603 131.62916373182327 1069.390418247601 255.8934244865269 239.72195987976787 94.18685986225425 86.02822309304119 36.77674350011185 54.051731941424315 47.84958061374913 9.560250444993518 36.520065154425616 42.22217884431455 35.43566024130792 2.43887086039784 9.461428083033894 36.52006172442561 38.25590959917856 30.625094006227837 0.6417067659753078 2.378241078221592 9.461428733351953 36.54256264242457
js_param_list:
2072.6257003788373 729.7410477619161 163.5885811737197 38.802816944087496 9.38263941212123 2.2969966695619557 0.5644727100537247 0.1411507960630098 0.035134381675847655 0.008839262947625631 0.0021727478905621904 0.0005508804306955368 0.00014678034905458157 4.388988379219416e-05 1.1738754839340505e-05 2072.6054855115713 448.9787413512609 37.516807535381915 36.936289293152434 36.93619699724665 36.93626107722789 36.936217327546416 297.895038404521 71.53561204104008 66.59311683208784 26.516888484693222 23.910032735846137 10.313884888190998 15.311977796193256 13.29234301305728 2.6803832302061097 10.243483526646026 11.995040344505467 9.842200480067097 0.6838675134469407 2.65413470553611 10.251089473912115 10.878179543033623 8.50328530859542 0.17976968612984542 0.66836506379984 2.6613930698773456 10.251080477543102
2072.6236878099226 729.7443476264982 163.58861069796387 38.80282172149893 9.382636343553832 2.2970053703034847 0.5644822291532099 0.14115822853411863 0.035139363239340055 0.00883850565434335 0.002179718866202066 0.0005459844381131149 0.00014248590416669505 4.154122620906531e-05 1.4063815487843632e-05 2072.605571769845 448.9786991943886 37.51683456097356 36.93614282789083 36.93617886328605 36.93621446606478 36.93632841420433 297.8950402934797 71.5356184108912 66.5931076214962 26.51685077742183 23.910047607709778 10.313899279091611 15.31199014495078 13.292340995682595 2.680379799957696 10.242829742707812 11.995046375299877 9.842202416339607 0.6838684384320987 2.6531284784835787 10.24283079854063 10.878175502415237 8.50330068632705 0.17977809643751386 0.6671260475358367 2.6531289753057634 10.256522418400566
ptq_acc_list:
10.0 17.19 49.41 81.48 85.79 86.8 87.02 87.02 87.16 87.1 87.12 87.06 87.08 87.08 87.08 10.0 22.87 42.57 40.51 42.66 40.73 42.47 15.91 58.82 69.34 75.77 81.84 82.5 79.29 84.62 86.41 77.1 80.77 85.88 86.82 77.91 36.61 81.32 85.81 86.93 75.9 41.38 37.26
10.0 14.28 50.26 81.87 85.97 86.89 87.13 87.09 87.1 87.08 87.07 87.06 87.07 87.09 87.08 10.0 22.65 39.86 40.7 38.01 41.06 43.97 15.24 57.61 69.92 76.09 81.73 82.71 79.61 84.75 86.32 82.69 80.96 85.47 86.87 86.36 82.57 81.34 86.24 86.95 86.85 86.27 78.21
acc_loss_list:
0.8851762544494202 0.8026179813985532 0.432655873234585 0.06441612125387529 0.014927086921575348 0.0033298886209668878 0.0008037662188541438 0.0008037662188541438 -0.0008037662188539806 -0.00011482374555047542 -0.0003444712366517526 0.0003444712366517526 0.0001148237455506386 0.0001148237455506386 0.0001148237455506386 0.8851762544494202 0.7373980939258238 0.5111953151911816 0.534849006774601 0.5101619014812264 0.5323228843724883 0.5123435526466874 0.8173154208290275 0.32460672867148926 0.20381214835227923 0.12998047996325648 0.06028246641405442 0.052704099207716196 0.08956252152945225 0.02836146515099321 0.007808014697439508 0.11470892180502938 0.07256860718796655 0.013893673211620253 0.0031002411298657736 0.10540819841543239 0.5796302675393271 0.06625330118268469 0.014697439430474234 0.0018371799288092385 0.12848777127109884 0.5248593409117005 0.5721667240785395
0.8851894374282434 0.8360505166475315 0.4229621125143513 0.06004592422502859 0.012973593570608444 0.002411021814006817 -0.00034443168771528286 0.00011481056257165219 0.0 0.00022962112514346753 0.00034443168771528286 0.00045924225028693506 0.00034443168771528286 0.00011481056257165219 0.00022962112514346753 0.8851894374282434 0.7399540757749712 0.5423650975889782 0.5327210103329506 0.5636050516647532 0.5285878300803674 0.4951779563719862 0.825028702640643 0.33857634902411016 0.19724454649827777 0.12640642939150393 0.06165327210103319 0.050401836969001156 0.08599311136624564 0.02698048220436274 0.00895522388059703 0.05063145809414463 0.07049368541905857 0.018714121699196274 0.0026406429391502844 0.00849598163030993 0.05200918484500576 0.0661308840413317 0.009873708381171062 0.0017221584385762512 0.0028702640642939152 0.009529276693455779 0.10206659012629163
# 改动说明
## update: 2023/04/16
+ 添加了matlab的拟合及绘图脚本,支持模型分类标记,且曲线拟合相比cftool更加平滑
+ ptq.py中计算js_param笔误,应由flop_ratio改为par_ratio。否则flops和param拟合没有区别
+ module.py中bias_qmax方法,应当为float类型传参num_bits为16,e_bits为7.
+ 这里主要关注e_bits,拟合离群点主要为FLOAT_7_E5 / FLOAT_8_E5 / FLOAT_8_E6,其表现为bias两极分布,与之前int量化bias溢出的问题现象相似。
+ 原先指定e_bits为5,由于bias的scale为input和weight的scale乘积,bias量化范围应当大致为x和weight量化范围的平方倍。目前代码支持的最高x和weight量化范围大致为 $2^{2^6}$,因此bias范围应当近似取到$2^{2^7}$,即将e_bits指定为7
+ 改动之后,离群点消失,拟合效果显著提高
\ No newline at end of file
ykl/VGG_16/image/VGG16_table.png

21.8 KB | W: | H:

ykl/VGG_16/image/VGG16_table.png

21.6 KB | W: | H:

ykl/VGG_16/image/VGG16_table.png
ykl/VGG_16/image/VGG16_table.png
ykl/VGG_16/image/VGG16_table.png
ykl/VGG_16/image/VGG16_table.png
  • 2-up
  • Swipe
  • Onion skin
ykl/VGG_16/image/flops.png

33.7 KB | W: | H:

ykl/VGG_16/image/flops.png

33.1 KB | W: | H:

ykl/VGG_16/image/flops.png
ykl/VGG_16/image/flops.png
ykl/VGG_16/image/flops.png
ykl/VGG_16/image/flops.png
  • 2-up
  • Swipe
  • Onion skin
ykl/VGG_16/image/param.png

34.2 KB | W: | H:

ykl/VGG_16/image/param.png

33.5 KB | W: | H:

ykl/VGG_16/image/param.png
ykl/VGG_16/image/param.png
ykl/VGG_16/image/param.png
ykl/VGG_16/image/param.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -80,7 +80,7 @@ def bias_qmax(quant_type):
elif quant_type == 'POT':
return get_qmax(quant_type)
else:
return get_qmax(quant_type, 16, 5)
return get_qmax(quant_type, 16, 7)
# 转化为FP32,不需再做限制
......
title_list:
INT_2 INT_3 INT_4 INT_5 INT_6 INT_7 INT_8 INT_9 INT_10 INT_11 INT_12 INT_13 INT_14 INT_15 INT_16 POT_2 POT_3 POT_4 POT_5 POT_6 POT_7 POT_8 FLOAT_3_E1 FLOAT_4_E1 FLOAT_4_E2 FLOAT_5_E1 FLOAT_5_E2 FLOAT_5_E3 FLOAT_6_E1 FLOAT_6_E2 FLOAT_6_E3 FLOAT_6_E4 FLOAT_7_E1 FLOAT_7_E2 FLOAT_7_E3 FLOAT_7_E4 FLOAT_7_E5 FLOAT_8_E1 FLOAT_8_E2 FLOAT_8_E3 FLOAT_8_E4 FLOAT_8_E5 FLOAT_8_E6
js_flops_list:
9536.471074104704 2226.062767717981 479.08937301581057 110.29737790259509 26.51254683672561 6.543175408097222 1.6082547229117354 0.4010994193665803 0.09957335073633722 0.025111075393055696 0.0061973498640195265 0.0015194423491633707 0.00038995051898299435 7.942830031237921e-05 5.2415376317010985e-05 9536.459434888866 1346.1955987678602 186.0432674146965 184.66847178581048 184.66811538285353 184.66788024897852 184.66821779056917 1162.9049832114217 334.88738457777345 213.59354200345794 162.9083436791504 74.46976130202673 51.093367953882556 114.00986841709613 39.88037886445475 13.180682344029526 50.92046138218437 97.04451701067691 28.848989954377853 3.333524419321254 13.121054175424074 50.939926759367474 90.27653874927127 24.69379491562014 0.8539878687852623 3.300629416372651 13.138977654051457 50.93992491304259
9536.470545853725 2226.0625961453534 479.08942343671333 110.29737895892738 26.51254658172414 6.543173505823439 1.6082545552680656 0.4011046819184469 0.0995744091981884 0.02512104453474506 0.006191111080978074 0.0015259451834671177 0.0003948419706838088 9.156847371397302e-05 5.103821562683077e-05 9536.458765578662 1346.195594505469 186.04319320819684 184.6681482382002 184.6683487483022 184.66821940371673 184.66795503280395 1162.9049836246736 334.88736537318147 213.59353722936538 162.90832461289847 74.46974838241348 51.093368592351894 114.00984771261629 39.88037443687132 13.180687261808096 50.91668399478627 97.04450826091268 28.84898524647785 3.333529301905296 13.115715295208558 50.91668073149591 90.27653879084389 24.69379955366047 0.8539877927713175 3.2949263309664056 13.115710967364368 50.954444671302134
js_param_list:
6887.257101138355 1340.7850727629186 280.73809433329967 63.94496255963192 15.289535102058974 3.7565034795627565 0.9259750231943055 0.2288807692260617 0.057107940186636466 0.014183206997691562 0.003557619868239359 0.000877363417377142 0.00022626713835566516 3.542348440969017e-05 1.7145957804444833e-05 6887.256730096761 806.0595586054458 139.7164856361964 139.0401533816038 139.04027885232878 139.0402887905267 139.0402936537755 795.7370567027401 249.62616743230893 132.75014085752875 132.0024940072834 45.6707525557016 38.23747791072957 96.5944785050857 23.885526002706396 9.845974027938183 38.151876742443726 83.61522220389249 17.043439224317503 2.4726691755122503 9.815686765994785 38.15642983446397 78.20951581132825 14.518605994908436 0.6269759716394059 2.4555938958183803 9.819743080524256 38.156425344733975
6887.256091122821 1340.784804470707 280.73813097751594 63.94496568453547 15.289532692524366 3.7564939386797525 0.9259647062823152 0.2288835741602492 0.05710934861695347 0.014186045165619268 0.003557508084599861 0.0008813425517560222 0.00022710583614690054 4.224842305566257e-05 1.6705389170873787e-05 6887.256027866801 806.0594459403769 139.7164244353938 139.04019666205338 139.04029041111284 139.04030319644943 139.04024126647542 795.737012530977 249.6261323076044 132.75011877730668 132.00246136063635 45.67073400948295 38.23747085219327 96.59443228186598 23.88552681545614 9.84598569650612 38.15017558575456 83.61518038181232 17.04342608978349 2.4726694252419787 9.813474353111108 38.1501666315099 78.20948388628338 14.518609766822784 0.6269759811734874 2.4532405132850865 9.813473104774046 38.16105197534707
ptq_acc_list:
10.0 11.77 60.12 86.44 88.81 89.35 89.3 89.5 89.51 89.44 89.46 89.43 89.43 89.44 89.44 10.0 17.19 66.85 66.15 66.9 65.02 65.06 14.46 64.07 78.19 78.84 87.19 86.74 83.16 88.48 88.84 75.33 83.87 88.84 89.36 67.65 10.23 84.71 88.79 89.43 65.56 10.35 10.19
10.0 11.52 61.58 87.57 88.71 89.24 89.5 89.52 89.45 89.46 89.44 89.42 89.45 89.44 89.45 10.0 13.86 65.99 67.14 68.35 65.85 66.85 14.04 62.71 78.91 78.63 87.01 86.71 83.03 88.66 88.78 86.78 84.11 88.73 89.44 88.73 86.65 84.61 88.92 89.4 89.54 88.75 75.87
acc_loss_list:
0.8881932021466905 0.8684033989266547 0.3278175313059034 0.03354203935599284 0.007043828264758446 0.0010062611806798236 0.001565295169946339 -0.0006708407871198823 -0.000782647584973249 0.0 -0.00022361359570657448 0.0001118067978532078 0.0001118067978532078 0.0 0.0 0.8881932021466905 0.807804114490161 0.25257155635062617 0.2603980322003577 0.2520125223613595 0.2730322003577818 0.2725849731663685 0.8383273703041144 0.2836538461538462 0.12578264758497318 0.118515205724508 0.025156529516994635 0.03018783542039359 0.07021466905187837 0.01073345259391764 0.006708407871198505 0.15775939177101966 0.0622763864042933 0.006708407871198505 0.0008944543828264568 0.24362701252236127 0.8856216457960644 0.05288461538461543 0.007267441860465021 0.0001118067978532078 0.266994633273703 0.8842799642218248 0.8860688729874776
0.8881932021466905 0.8711985688729875 0.31149373881932024 0.020907871198568923 0.008161896243291637 0.0022361359570662216 -0.0006708407871198823 -0.0008944543828264568 -0.00011180679785336669 -0.00022361359570657448 0.0 0.00022361359570657448 -0.00011180679785336669 0.0 -0.00011180679785336669 0.8881932021466905 0.8450357781753131 0.26218694096601075 0.24932915921288012 0.23580053667262973 0.2637522361359571 0.25257155635062617 0.8430232558139535 0.2988595706618962 0.1177325581395349 0.12086314847942758 0.02716905187835412 0.030523255813953532 0.07166815742397134 0.008720930232558153 0.007379248658318388 0.029740608228980284 0.059593023255813934 0.007938282647584904 0.0 0.007938282647584904 0.031194096601073258 0.05400268336314846 0.005813953488372049 0.00044722719141314897 -0.0011180679785331902 0.007714669051878329 0.1517218246869409
......@@ -12,8 +12,6 @@
+ 拟合结果
+ 应该是FLOAT量化中js散度不大,但精度仅为10的量化点导致的R方较低
![flops](image/flops.png)
![param](image/param.png)
......
ykl/VGG_19/image/VGG19_table.png

22 KB | W: | H:

ykl/VGG_19/image/VGG19_table.png

21.6 KB | W: | H:

ykl/VGG_19/image/VGG19_table.png
ykl/VGG_19/image/VGG19_table.png
ykl/VGG_19/image/VGG19_table.png
ykl/VGG_19/image/VGG19_table.png
  • 2-up
  • Swipe
  • Onion skin
ykl/VGG_19/image/flops.png

35.9 KB | W: | H:

ykl/VGG_19/image/flops.png

32.8 KB | W: | H:

ykl/VGG_19/image/flops.png
ykl/VGG_19/image/flops.png
ykl/VGG_19/image/flops.png
ykl/VGG_19/image/flops.png
  • 2-up
  • Swipe
  • Onion skin
ykl/VGG_19/image/param.png

37.6 KB | W: | H:

ykl/VGG_19/image/param.png

33.6 KB | W: | H:

ykl/VGG_19/image/param.png
ykl/VGG_19/image/param.png
ykl/VGG_19/image/param.png
ykl/VGG_19/image/param.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -80,7 +80,7 @@ def bias_qmax(quant_type):
elif quant_type == 'POT':
return get_qmax(quant_type)
else:
return get_qmax(quant_type, 16, 5)
return get_qmax(quant_type, 16, 7)
# 转化为FP32,不需再做限制
......
title_list:
INT_2 INT_3 INT_4 INT_5 INT_6 INT_7 INT_8 INT_9 INT_10 INT_11 INT_12 INT_13 INT_14 INT_15 INT_16 POT_2 POT_3 POT_4 POT_5 POT_6 POT_7 POT_8 FLOAT_3_E1 FLOAT_4_E1 FLOAT_4_E2 FLOAT_5_E1 FLOAT_5_E2 FLOAT_5_E3 FLOAT_6_E1 FLOAT_6_E2 FLOAT_6_E3 FLOAT_6_E4 FLOAT_7_E1 FLOAT_7_E2 FLOAT_7_E3 FLOAT_7_E4 FLOAT_7_E5 FLOAT_8_E1 FLOAT_8_E2 FLOAT_8_E3 FLOAT_8_E4 FLOAT_8_E5 FLOAT_8_E6
js_flops_list:
10125.068777303843 2125.8746059892997 448.1684269275655 102.72227108791047 24.664131592897466 6.028952690039004 1.4808848911979104 0.3654121554844982 0.09188678563921866 0.022862010935799246 0.005681738921256071 0.0014282347355091774 0.0003390110858486199 0.00010615918609977952 3.489823651098289e-05 10125.058309270402 1275.5847525159818 202.11004564591005 200.95585017589545 200.95539368760936 200.95529631550048 200.95542079271524 1204.751815912724 367.27809530828097 207.51154219067092 188.4010741493603 71.77026853544704 55.978675489218205 135.4187671720789 37.94656880869658 14.343459472762477 55.84021870450606 116.4026136353036 27.25354110333402 3.606044260373801 14.295147200396164 55.85538278221231 108.58905223595819 23.270040334049046 0.9179187347666412 3.5799479606638203 14.308950780559794 55.85538134242835
10125.068781318083 2125.8746396005995 448.1684275076656 102.72227062531958 24.664131831035423 6.028952317368312 1.4808853001039965 0.3654121470448262 0.09188669130958198 0.022862163476844406 0.005681816830498811 0.0014281973847489284 0.00033902203745254765 0.00010609152120135955 3.4934861119480844e-05 10125.058594081152 1275.5846069054285 202.10990836927274 200.95538533356967 200.95543492335307 200.95532374524822 200.95529898315357 1204.7518156488943 367.2780949517851 207.51154204984104 188.40107410370646 71.77026871896568 55.97867526181043 135.4187670910398 37.94656908671154 14.343459410681755 55.83674829649791 116.40261372885318 27.2535410832288 3.606044025817579 14.290313737791045 55.83674707301358 108.58905190788829 23.270040541541757 0.9179190066917342 3.574594323551281 14.290315204670643 55.87150641050152
js_param_list:
7919.9999946725075 1368.1564225363845 283.16934566896754 64.2942694436596 15.321270118010474 3.745277689238688 0.9178676871947216 0.22807088096042888 0.056904570420633724 0.014193230067183694 0.0035684240604433215 0.0008905876260726569 0.0002163833713661727 7.152718652977152e-05 3.2130914002021176e-05 7920.002545364124 818.8838272570443 164.4618730779971 163.84526969797696 163.84537575503464 163.84534091325256 163.84535644152615 912.7525366351027 304.20939173305254 138.97594773808592 167.73729464269246 46.84141581944599 45.95750081299213 124.95158148181119 23.98366626289621 11.613224296186155 45.88300992073913 108.73234446521248 16.892437923675633 2.904718790614561 11.58708466778401 45.88710644854956 101.79740772746169 14.310069748579027 0.7348164303199256 2.8903894156159375 11.59069207978545 45.887101821422974
7919.99983980378 1368.1564713436267 283.1693554968751 64.29426936491545 15.321270528727021 3.745277426600951 0.9178679001253169 0.22807122885447256 0.05690431499169481 0.01419322342297318 0.003568448783068516 0.0008906301386853677 0.00021638563506049427 7.150496949260222e-05 3.2155535421527054e-05 7920.00259798844 818.8837200099067 164.46191241414346 163.8453142768725 163.84534269839182 163.84535520112814 163.84532577449994 912.7525364721757 304.20939172507764 138.97594743194938 167.73729462694857 46.84141610323806 45.957500752525 124.95158122945192 23.983665956476454 11.613224211085821 45.88101341301542 108.73234414542763 16.892438161824266 2.904718992579768 11.584588581117753 45.88101100206524 101.79740801889845 14.310069953741102 0.7348166494910408 2.8876544755152516 11.584596345742247 45.89450884590524
ptq_acc_list:
10.0 12.8 62.98 87.16 89.05 89.19 89.23 89.21 89.26 89.28 89.26 89.26 89.25 89.25 89.25 10.0 17.81 67.69 68.82 65.58 65.75 66.39 12.63 57.06 79.98 79.55 87.25 85.88 83.01 88.34 88.74 74.07 84.41 88.6 89.41 63.55 10.0 84.55 88.99 89.33 61.4 10.0 10.0
10.0 12.95 60.49 87.39 88.96 89.11 89.25 89.3 89.24 89.25 89.25 89.26 89.25 89.25 89.25 10.0 17.62 66.43 66.71 66.14 62.97 65.32 12.22 59.32 80.16 79.22 87.23 86.1 82.78 88.34 88.76 86.04 84.01 88.86 89.23 88.72 86.27 84.73 88.9 89.32 89.33 88.74 73.92
acc_loss_list:
0.8879551820728291 0.8565826330532214 0.2943417366946779 0.02341736694677875 0.0022408963585434493 0.0006722689075630507 0.00022408963585429716 0.0004481792717087535 -0.00011204481792722819 -0.00033613445378152535 -0.00011204481792722819 -0.00011204481792722819 0.0 0.0 0.0 0.8879551820728291 0.8004481792717086 0.24156862745098043 0.22890756302521015 0.26521008403361346 0.26330532212885155 0.2561344537815126 0.8584873949579832 0.360672268907563 0.10386554621848736 0.10868347338935577 0.022408963585434174 0.03775910364145663 0.06991596638655456 0.010196078431372511 0.0057142857142857715 0.17008403361344546 0.05422969187675074 0.00728291316526617 -0.0017927170868346958 0.28795518207282916 0.8879551820728291 0.05266106442577034 0.0029131652661065 -0.0008963585434173479 0.31204481792717087 0.8879551820728291 0.8879551820728291
0.8879551820728291 0.8549019607843137 0.3222408963585434 0.020840336134453775 0.0032492997198880253 0.0015686274509803986 0.0 -0.0005602240896358225 0.00011204481792722819 0.0 0.0 -0.00011204481792722819 0.0 0.0 0.0 0.8879551820728291 0.8025770308123249 0.25568627450980386 0.2525490196078432 0.2589355742296919 0.29445378151260504 0.26812324929972 0.8630812324929972 0.3353501400560224 0.10184873949579835 0.11238095238095239 0.02263305322128847 0.035294117647058885 0.07249299719887954 0.010196078431372511 0.005490196078431315 0.03596638655462178 0.05871148459383748 0.0043697478991596705 0.00022408963585429716 0.005938375350140069 0.03338935574229696 0.05064425770308119 0.003921568627450917 -0.0007843137254901196 -0.0008963585434173479 0.0057142857142857715 0.17176470588235293
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment