Commit e133ba3a by Klin

feat: Model Robustness, details in ALL/README.md

parent 51ce6c00
"""
利用PCA降维可视化伪数据所得特征与模型分类结果/生成器输入标签关系,从而观测决策边界
模型分类结果out表示该模型的决策边界
生成器输入标签label表示生成器所拟合的边界
针对cifar10数据集下ResNet_18生成可视化图像
"""
from model import Model
from dataloader import DataLoader
import os
import argparse
import numpy as np
import pandas as pd
from scipy.linalg import eigh
import torch
import torch.nn as nn
import torchvision
from torchvision import datasets, transforms
import seaborn as sns
from matplotlib import pyplot as plt
from sklearn.preprocessing import StandardScaler
from sklearn.decomposition import PCA
parser = argparse.ArgumentParser(description='PCA features')
# 获取测试集在model上分类图
parser.add_argument('--pca_src', action='store_true')
# 生成器伪数据的分类图,不含类间样本
parser.add_argument('--pca_gen', action='store_true')
# 生成器伪数据的分类图,包含类间样本
parser.add_argument('--pca_mix', action='store_true')
# 两两类之间的决策边界路径
parser.add_argument('--pca_path', action='store_true')
# 路径参数设置
parser.add_argument('--num_dot_per_mix', type=int, default=200)
parser.add_argument('--num_dot_per_path', type=int, default=200)
# 指定选择哪个模型对应的生成器
parser.add_argument('--quant',action='store_true')
# 当选择的为量化网络时,下述参数才生效
parser.add_argument('--quant_type',type=str,default='NONE')
parser.add_argument('--num_bits',type=int,default=0)
parser.add_argument('--e_bits',type=int,default=0)
def reduce_df(dataframe, num_per_class):
df_list = []
for i in range(10):
df_list.append(dataframe.iloc[i * 5000: i * 5000 + num_per_class])
df = pd.concat(df_list)
return df
if __name__ == '__main__':
args = parser.parse_args()
# 确定模型及生成器路径
if args.quant:
if args.quant_type == 'FLOAT':
title = '%s_%d_E%d' % (args.quant_type, args.num_bits, args.e_bits)
else:
title = '%s_%d' % (args.quant_type, args.num_bits)
model_file = 'ckpt_quant/cifar10/ResNet_18/'+title+'.pt'
gen_file = 'ckpt_quant_gen/cifar10/ResNet_18/'+title+'.pt'
pca_dir = 'pca_ResNet18_cifar10/'+title+'/'
else:
model_file = 'ckpt_full/cifar10/ResNet_18.pt'
gen_file = 'ckpt_full_gen/cifar10/ResNet_18.pt'
pca_dir = 'pca_ResNet18_cifar10/FULL/'
if True in [args.pca_gen, args.pca_mix, args.pca_path]:
generator = torch.load(gen_file)
generator.eval()
if True in [args.pca_src, args.pca_gen, args.pca_mix, args.pca_path]:
os.makedirs(pca_dir, exist_ok=True)
model = Model(model_name='ResNet_18',dataset='cifar10').cuda()
if args.quant:
model.quantize(args.quant_type,args.num_bits,args.e_bits)
model.load_state_dict(torch.load(model_file))
model.eval()
print('Model Ready')
dataloader = DataLoader('cifar10',512)
_,_,test_loader = dataloader.getloader()
features = []
targets = []
with torch.no_grad():
for i, (data, target) in enumerate(test_loader):
data = data.cuda()
if args.quant:
_, feature = model.quantize_forward(data, out_feature=True)
else:
_, feature = model(data, out_feature=True)
features.append(feature.cpu())
targets.append(target)
features_cat = torch.cat(features, dim=0)
targets_cat = torch.cat(targets, dim=0)
feature_scaler = StandardScaler().fit(features_cat)
standardized_data = feature_scaler.transform(features_cat)
covar_matrix = np.matmul(standardized_data.T , standardized_data)
values, vectors = eigh(covar_matrix, eigvals=(510,511))
vectors = vectors.T
if args.pca_src:
print('pca of source data')
new_coordinates = np.matmul(vectors, standardized_data.T)
new_coordinates = np.vstack((new_coordinates, targets_cat)).T
dataframe = pd.DataFrame(data=new_coordinates, columns=("1st_principal", "2nd_principal", "label"))
dataframe.sort_values(by=['label'], axis=0, inplace=True)
df = reduce_df(dataframe, 1000)
pca_result = sns.FacetGrid(df, hue="label", height=10, hue_kws={'marker':['x'] * 10}).map(plt.scatter, '1st_principal', '2nd_principal')
pca_result.set(xticks=[], yticks=[], xlabel='', ylabel='')
plt.savefig(pca_dir+'pca_src.png')
if args.pca_gen or args.pca_mix or args.pca_path:
print('pca of gen')
features_gen = []
targets_label = []
targets_out = []
with torch.no_grad():
for i in range(50):
z = torch.randn(200, 512).cuda()
labels = (torch.ones(200) * (i // 5)).type(torch.LongTensor)
targets_label.append(labels)
labels = labels.cuda()
z = z.contiguous()
labels = labels.contiguous()
images = generator(z, labels)
if args.quant:
out, feature = model.quantize_forward(images, out_feature=True)
else:
out, feature = model(images, out_feature=True)
features_gen.append(feature.cpu())
pred = out.argmax(dim=1, keepdim=True)
pred = pred.view_as(labels)
targets_out.append(pred.cpu())
features_cat_gen = torch.cat(features_gen, dim=0)
targets_cat_label = torch.cat(targets_label, dim=0)
targets_cat_out = torch.cat(targets_out, dim=0)
standardized_data_gen = feature_scaler.transform(features_cat_gen)
new_coordinates_gen = np.matmul(vectors, standardized_data_gen.T)
new_coordinates_label = np.vstack((new_coordinates_gen, targets_cat_label)).T
new_coordinates_out = np.vstack((new_coordinates_gen, targets_cat_out)).T
dataframe_label = pd.DataFrame(data=new_coordinates_label, columns=("1st_principal", "2nd_principal", "label"))
dataframe_out = pd.DataFrame(data=new_coordinates_out, columns=("1st_principal", "2nd_principal", "label"))
if args.pca_gen:
pca_result_label = sns.FacetGrid(dataframe_label, hue="label", height=10, hue_kws={'marker':['x'] * 10}).map(plt.scatter, '1st_principal', '2nd_principal')
pca_result_label.set(xticks=[], yticks=[], xlabel='', ylabel='')
plt.savefig(pca_dir+'pca_gen.png')
pca_result_out = sns.FacetGrid(dataframe_out, hue="label", height=10, hue_kws={'marker':['x'] * 10}).map(plt.scatter, '1st_principal', '2nd_principal')
pca_result_out.set(xticks=[], yticks=[], xlabel='', ylabel='')
plt.savefig(pca_dir+'pca_model.png')
if args.pca_mix or args.pca_path:
print('pca of embedding superposing')
linear = []
for i in range(args.num_dot_per_mix):
linear.append(torch.tensor([[i / args.num_dot_per_mix, 1 - (i / args.num_dot_per_mix)]]))
linear = torch.cat(linear, dim=0)
features_gen_mix = []
targets_label_mix = []
with torch.no_grad():
for i in range(10):
for j in range(i + 1, 10):
z = torch.randn(args.num_dot_per_mix, 2, 512).cuda()
z = z.contiguous()
labels = torch.tensor([[i, j]])
labels = torch.cat([labels] * args.num_dot_per_mix, dim=0).cuda()
target = torch.cat([torch.tensor([300])] * args.num_dot_per_mix, dim=0)
targets_label_mix.append(target)
l = linear.cuda()
images = generator(z, labels, l)
if args.quant:
out, feature = model.quantize_forward(images, out_feature=True)
else:
out, feature = model(images, out_feature=True)
features_gen_mix.append(feature.cpu())
features_cat_gen_mix = torch.cat(features_gen_mix, dim=0)
targets_cat_label_mix = torch.cat(targets_label_mix, dim=0)
standardized_data_gen_mix = feature_scaler.transform(features_cat_gen_mix)
new_coordinates_gen_mix = np.matmul(vectors, standardized_data_gen_mix.T)
new_coordinates_label_mix = np.vstack((new_coordinates_gen_mix, targets_cat_label_mix)).T
dataframe_label_mix = pd.DataFrame(data=new_coordinates_label_mix, columns=("1st_principal", "2nd_principal", "label"))
if args.pca_mix:
pca_result_label_mix = sns.FacetGrid(
pd.concat([dataframe_label_mix, dataframe_label]),
hue="label",
hue_order=[300.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0] ,
height=10,
hue_kws={'marker':['x'] * 11, 'color': ['black'] + sns.color_palette("tab10")[:10]}
).map(plt.scatter, '1st_principal', '2nd_principal')
pca_result_label_mix.set(xticks=[], yticks=[], xlabel='', ylabel='')
plt.savefig(pca_dir+'pca_gen_mix.png')
if args.pca_path:
print('pca of path')
pca_path_dir = pca_dir+'pca_path/'
os.makedirs(pca_path_dir, exist_ok=True)
linear = []
for i in range(args.num_dot_per_path):
linear.append(torch.tensor([[i / args.num_dot_per_path, 1 - (i / args.num_dot_per_path)]]))
linear = torch.cat(linear, dim=0)
features_no_noise = []
targets_no_noise = []
with torch.no_grad():
for i in range(10):
for j in range(i + 1, 10):
z = torch.zeros(args.num_dot_per_path, 2, 512).cuda()
z = z.contiguous()
labels = torch.tensor([[i, j]])
labels = torch.cat([labels] * args.num_dot_per_path, dim=0).cuda()
target = torch.tensor([(i + 1) * 10 + j])
target = torch.cat([target] * args.num_dot_per_path, dim=0)
targets_no_noise.append(target)
l = linear.cuda()
images = generator(z, labels, l)
if args.quant:
_, feature = model.quantize_forward(images, out_feature=True)
else:
_, feature = model(images, out_feature=True)
features_no_noise.append(feature.cpu())
features_cat_no_noise = torch.cat(features_no_noise, dim=0)
targets_cat_no_noise = torch.cat(targets_no_noise, dim=0)
standardized_data_no_noise = feature_scaler.transform(features_cat_no_noise)
new_coordinates_no_noise = np.matmul(vectors, standardized_data_no_noise.T)
new_coordinates_no_noise = np.vstack((new_coordinates_no_noise, targets_cat_no_noise)).T
dataframe_no_noise = pd.DataFrame(data=new_coordinates_no_noise, columns=("1st_principal", "2nd_principal", "label"))
MID_DOT_NUM = 11
linear = []
for i in range(MID_DOT_NUM):
linear.append(torch.tensor([[i/(MID_DOT_NUM - 1), 1 - (i/(MID_DOT_NUM - 1))]]))
linear = torch.cat(linear, dim=0)
features_no_noise_ten = []
targets_no_noise_ten = []
with torch.no_grad():
for i in range(10):
for j in range(i + 1, 10):
z = torch.zeros(MID_DOT_NUM, 2, 512).cuda()
z = z.contiguous()
labels = torch.tensor([[i, j]])
labels = torch.cat([labels] * MID_DOT_NUM, dim=0).cuda()
target = torch.tensor([(i + 1) * 10 + j + 500])
target = torch.cat([target] * MID_DOT_NUM, dim=0)
targets_no_noise_ten.append(target)
l = linear.cuda()
images = generator(z, labels, l)
if args.quant:
_, feature = model.quantize_forward(images, out_feature=True)
else:
_, feature = model(images, out_feature=True)
features_no_noise_ten.append(feature.cpu())
features_cat_no_noise_ten = torch.cat(features_no_noise_ten, dim=0)
targets_cat_no_noise_ten = torch.cat(targets_no_noise_ten, dim=0)
color = sns.color_palette("tab10")
features_total = torch.cat([features_cat_gen, features_cat_no_noise, features_cat_gen_mix, features_cat_no_noise_ten], dim=0)
targets_total = torch.cat([targets_cat_label, targets_cat_no_noise, targets_cat_label_mix, targets_cat_no_noise_ten], dim=0)
no_noise_start = len(targets_cat_label)
mix_start = no_noise_start + len(targets_cat_no_noise)
no_noise_ten_start = mix_start + len(targets_cat_label_mix)
standardized_data_total = feature_scaler.transform(features_total)
x = 0
for i in range(10):
for j in range(i + 1, 10):
if j - i == 1:
selected_features = torch.cat([
features_cat_gen[i * 1000: (i + 1) * 1000],
features_cat_gen[j * 1000: (j + 1) * 1000],
features_cat_no_noise[x * args.num_dot_per_path: (x + 1) * args.num_dot_per_path],
features_cat_gen_mix[x * args.num_dot_per_mix: (x + 1) * args.num_dot_per_mix],
features_cat_no_noise_ten[x * MID_DOT_NUM: (x + 1) * MID_DOT_NUM]
] , dim=0)
selected_targets = torch.cat([
targets_cat_label[i * 1000: (i + 1) * 1000],
targets_cat_label[j * 1000: (j + 1) * 1000],
targets_cat_no_noise[x * args.num_dot_per_path: (x + 1) * args.num_dot_per_path],
targets_cat_label_mix[x * args.num_dot_per_mix: (x + 1) * args.num_dot_per_mix],
targets_cat_no_noise_ten[x * MID_DOT_NUM: (x + 1) * MID_DOT_NUM]
], dim=0)
standardized_data_selected = feature_scaler.transform(selected_features)
covar_matrix_selected = np.matmul(standardized_data_selected.T , standardized_data_selected)
values_selected, vectors_selected = eigh(covar_matrix_selected, eigvals=(510,511))
vectors_selected = vectors_selected.T
new_coordinates_selected = np.matmul(vectors_selected, standardized_data_selected.T)
new_coordinates_selected = np.vstack((new_coordinates_selected, selected_targets)).T
df_selected = pd.DataFrame(data=new_coordinates_selected, columns=("1st_principal", "2nd_principal", "label"))
pca_result_selected = sns.FacetGrid(df_selected, hue="label", height=10, hue_kws={'marker':['o', 'o', 'o', 'o', 'o'], 's':[30, 30, 30, 30, 300], 'color':[color[i], color[j], 'black', 'lightgreen', 'black'],}).map(plt.scatter, '1st_principal', '2nd_principal')
pca_result_selected.set(xticks=[], yticks=[], xlabel='', ylabel='')
plt.savefig(pca_path_dir + f'path_{i}_{j}.png')
x += 1
#!/bin/bash
#- Job parameters
# (TODO)
# Please modify job name
#SBATCH -J Boundary # The job name
#SBATCH -o ret/ret-%j.out # Write the standard output to file named 'ret-<job_number>.out'
#SBATCH -e ret/ret-%j.err # Write the standard error to file named 'ret-<job_number>.err'
#- Resources
# (TODO)
# Please modify your requirements
#SBATCH -p nv-gpu # Submit to 'nv-gpu' Partitiion
#SBATCH -t 0-04:00:00 # Run for a maximum time of 0 days, 12 hours, 00 mins, 00 secs
#SBATCH --nodes=1 # Request N nodes
#SBATCH --gres=gpu:1 # Request M GPU per node
#SBATCH --gres-flags=enforce-binding # CPU-GPU Affinity
#SBATCH --qos=gpu-debug # Request QOS Type
###
### The system will alloc 8 or 16 cores per gpu by default.
### If you need more or less, use following:
### #SBATCH --cpus-per-task=K # Request K cores
###
###
### Without specifying the constraint, any available nodes that meet the requirement will be allocated
### You can specify the characteristics of the compute nodes, and even the names of the compute nodes
###
### #SBATCH --nodelist=gpu-v00 # Request a specific list of hosts
### #SBATCH --constraint="Volta|RTX8000" # Request GPU Type: Volta(V100 or V100S) or RTX8000
###
# set constraint for RTX8000 to meet my cuda
#SBATCH --constraint="Ampere|RTX8000"
#- Log information
echo "Job start at $(date "+%Y-%m-%d %H:%M:%S")"
echo "Job run at:"
echo "$(hostnamectl)"
#- Load environments
source /tools/module_env.sh
module list # list modules loaded
##- Tools
module load cluster-tools/v1.0
module load slurm-tools/v1.0
module load cmake/3.15.7
module load git/2.17.1
module load vim/8.1.2424
##- language
module load python3/3.8.16
##- CUDA
# module load cuda-cudnn/10.2-7.6.5
# module load cuda-cudnn/11.2-8.2.1
module load cuda-cudnn/11.1-8.2.1
##- virtualenv
# source xxxxx/activate
echo $(module list) # list modules loaded
echo $(which gcc)
echo $(which python)
echo $(which python3)
cluster-quota # nas quota
nvidia-smi --format=csv --query-gpu=name,driver_version,power.limit # gpu info
#- Warning! Please not change your CUDA_VISIBLE_DEVICES
#- in `.bashrc`, `env.sh`, or your job script
echo "Use GPU ${CUDA_VISIBLE_DEVICES}" # which gpus
#- The CUDA_VISIBLE_DEVICES variable is assigned and specified by SLURM
#- Job step
# [EDIT HERE(TODO)]
### FULL
python boundary_visualize.py --pca_gen --pca_mix --pca_path
# ### quant
python boundary_visualize.py --pca_gen --pca_mix --pca_path --quant --quant_type=INT --num_bits=4
python boundary_visualize.py --pca_gen --pca_mix --pca_path --quant --quant_type=INT --num_bits=5
# python boundary_visualize.py --pca_gen --pca_mix --pca_path --quant --quant_type=INT --num_bits=16
#- End
echo "Job end at $(date "+%Y-%m-%d %H:%M:%S")"
......@@ -9,7 +9,9 @@
# dropout: 'D'
# MakeLayer: 'ML','BBLK'/'BTNK'/'IRES', ml_idx, blocks
# softmax: 'SM'
# class 100
# 在VIEW后输出特征(可选)
# class 100 模型部署时会根据数据集适配
ResNet_18_cfg_table = [
['C','BRL',True,3,64,3,1,1,False],
['ML','BBLK',0,2],
......
from model import *
from model import Model
from dataloader import DataLoader
from utils import numbit_list, ebit_list,build_bias_list, build_list
from gen_options import GenOption
from generator import Generator,Generator_imagenet
import module
import gol
from robust_utils import build_gen_loader, get_adversary, test_robust
import torch.nn.functional as F
import argparse
import time
import torch
import torch.nn as nn
from torch.optim.lr_scheduler import MultiStepLR
import sys
import os
import os.path as osp
class GenOption(object):
def __init__(self, args):
self.model = args.model
self.dataset = args.dataset
self.batchSize = 128
if self.dataset == "cifar10":
self.nClasses = 10
elif self.dataset == "cifar100":
self.nClasses = 100
else:
assert False, "invalid dataset"
# ----------Generator options ---------------------------------------------
#每个epoch训练多少轮,和batchsize无关
self.iters = 200
#冻结embedding层权重
self.freeze = args.freeze
# self.randemb = args.randemb
self.randemb = False
# 如果不为randomemb,需要根据weight_t调整
self.latent_dim = 64
# 针对imagenet等数据集需要调整
self.img_size = 32
self.channels = 3
# self.milestones_G = [40,60,80]
if self.dataset == 'cifar10':
self.lr_G = 0.001
self.nEpochs = 20
self.milestones_G = [15]
elif self.dataset == 'cifar100':
# self.lr_G = 0.005
# self.nEpochs = 50
# self.milestones_G = [20]
self.lr_G = 0.005
self.nEpochs = 20
self.milestones_G = [15]
self.gamma_G = 0.2
self.b1 = 0.5
self.b2 = 0.999
# 对抗攻击相关信息
# 生成伪数据集的轮数,总样本量为batchSize*gen_iters
# 减少iters以加快训练速度,此处模糊估计即可
self.gen_iters = 20
self.eps = 8/255
self.steps = 10
# ----------More option ---------------------------------------------
self.multi_label_num = args.multi_label_num
self.no_DM = args.no_DM
self.noise_scale = args.noise_scale
self.intermediate_dim = 100
self.adjust = args.adjust
self.teacher_file = 'ckpt_full/'+self.dataset+'/'+self.model+'.pt'
gen_path = 'ckpt_gen/'+self.dataset+'/'
self.gen_file = gen_path+ self.model+'.pt'
self.gen_file_adjust = gen_path+self.model+'_adjust.pt'
if not osp.exists(self.teacher_file):
assert False, "Empty teacher file"
if not osp.exists(gen_path):
os.makedirs(gen_path)
class GenTrainer(object):
def __init__(self, option):
self.settings = option
self.set_test_loader()
self.set_teacher()
self.set_generator()
self.set_optim_G()
def set_test_loader(self):
dataloader = DataLoader(self.settings.dataset,self.settings.batchSize)
_,_,self.test_loader = dataloader.getloader()
def set_teacher(self):
self.model_teacher = Model(self.settings.model,self.settings.dataset).cuda()
if self.settings.quant:
self.model_teacher.quantize(self.settings.quant_type,self.settings.num_bits,self.settings.e_bits)
self.model_teacher.load_state_dict(torch.load(self.settings.teacher_file))
self.model_teacher.eval()
#当randemb为False,此处同时完成了latent_dim的修改
def set_generator(self):
if self.settings.randemb:
weight_t = None
else:
if self.settings.quant:
weight_t = self.model_teacher.get_quant_output_layer_weight()
else:
weight_t = self.model_teacher.get_output_layer_weight()
# 输出层如果是Conv,weight的shape有四元,后二元都是1,舍弃
if self.settings.model in ['Inception_BN']:
weight_t = weight_t.reshape(weight_t.size()[:2])
self.settings.latent_dim = weight_t.size()[1]
if self.settings.dataset in ['cifar10','cifar100']:
self.generator = Generator(self.settings, weight_t, self.settings.freeze).cuda()
elif self.settings.dataset in ['imagenet']:
self.generator = Generator_imagenet(self.settings, weight_t, self.settings.freeze).cuda()
else:
assert False, "Invalid dataset"
def set_optim_G(self):
self.optim_G = torch.optim.Adam(self.generator.parameters(), lr=self.settings.lr_G,
betas=(self.settings.b1, self.settings.b2))
self.lrs_G = MultiStepLR(self.optim_G, milestones=self.settings.milestones_G, gamma=self.settings.gamma_G)
def test_teacher(self):
correct = 0
with torch.no_grad():
for data, target in self.test_loader:
data,target = data.cuda(), target.cuda()
if self.settings.quant:
output = self.model_teacher.quantize_forward(data)
else:
output = self.model_teacher(data)
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
print('Teacher Accuracy: {:.2f}%'.format(100. * correct / len(self.test_loader.dataset)))
def prepare_train(self):
self.log_soft = nn.LogSoftmax(dim=1)
# MSE主要用于回归问题训练
self.MSE_loss = nn.MSELoss().cuda()
self.mean_list = []
self.var_list = []
self.teacher_running_mean = []
self.teacher_running_var = []
self.model_teacher.eval()
self.generator.train()
if self.settings.quant:
for m in self.model_teacher.modules():
if isinstance(m, module.QConvBN) or isinstance(m,module.QConvBNReLU) or isinstance(m,module.QConvBNReLU6):
m.register_forward_hook(self.quant_hook_fn_forward)
else:
for m in self.model_teacher.modules():
if isinstance(m, nn.BatchNorm2d):
m.register_forward_hook(self.hook_fn_forward)
#针对量化网络的ConvBN融合层,提取其中BN层的统计信息,因此对input进行Conv才作为BN输入
def quant_hook_fn_forward(self,module, input, output):
def __init__(self, option):
self.settings = option
self.set_test_loader()
self.set_teacher()
self.set_generator()
self.set_optim_G()
def set_test_loader(self):
dataloader = DataLoader(self.settings.dataset,self.settings.batchSize)
_,_,self.test_loader = dataloader.getloader()
def set_teacher(self):
self.teacher = Model(self.settings.model,self.settings.dataset).cuda()
self.teacher.load_state_dict(torch.load(self.settings.teacher_file))
self.teacher.eval()
# 用于对抗攻击使用,防止无用的hook,节省显存
self.teacher_nohook = Model(self.settings.model,self.settings.dataset).cuda()
self.teacher_nohook.load_state_dict(torch.load(self.settings.teacher_file))
self.teacher_nohook.eval()
#当randemb为False,此处同时完成了latent_dim的修改
def set_generator(self):
if self.settings.randemb:
weight_t = None
else:
#这里要clone,防止生成器更新改变模型权重
weight_t = self.teacher.get_output_layer_weight().clone().detach()
# 输出层如果是Conv,weight的shape有四元,后二元都是1,舍弃
if self.settings.model in ['Inception_BN']:
weight_t = weight_t.reshape(weight_t.size()[:2])
self.settings.latent_dim = weight_t.size()[1]
if self.settings.dataset in ['cifar10','cifar100']:
self.generator = Generator(self.settings, weight_t, self.settings.freeze).cuda()
elif self.settings.dataset in ['imagenet']:
self.generator = Generator_imagenet(self.settings, weight_t, self.settings.freeze).cuda()
else:
assert False, "Invalid dataset"
def set_optim_G(self):
self.optim_G = torch.optim.Adam(self.generator.parameters(), lr=self.settings.lr_G,
betas=(self.settings.b1, self.settings.b2))
self.lrs_G = MultiStepLR(self.optim_G, milestones=self.settings.milestones_G, gamma=self.settings.gamma_G)
def test_teacher(self):
correct = 0
with torch.no_grad():
for data, target in self.test_loader:
data,target = data.cuda(), target.cuda()
output = self.teacher(data)
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
test_acc = 100. * correct / len(self.test_loader.dataset)
print('Teacher Accuracy: {:.2f}%'.format(test_acc))
return test_acc
def prepare_train(self):
self.log_soft = nn.LogSoftmax(dim=1)
# MSE主要用于回归问题训练
self.MSE_loss = nn.MSELoss().cuda()
self.mean_list = []
self.var_list = []
self.teacher_running_mean = []
self.teacher_running_var = []
self.teacher.eval()
self.generator.train()
for m in self.teacher.modules():
if isinstance(m, nn.BatchNorm2d):
m.register_forward_hook(self.hook_fn_forward)
#初始关闭hook,只在跟踪BN层使用
# self.hook_switch(hook_on=False)
# 跟踪全精度模型的BN层,提取数据分布信息
def hook_fn_forward(self, module, input, output):
#mean和var是针对输入的伪数据,running_mean和running_var是对train时输入的数据
weight = module.conv_module.weight.clone()
if module.conv_module.bias is not None:
bias = module.conv_module.bias.clone()
else:
bias = None
stride = module.conv_module.stride
padding = module.conv_module.padding
groups = module.conv_module.groups
input = input[0]
input = F.conv2d(input, weight, bias,
stride=stride,
padding=padding,
groups=groups)
mean = input.mean([0, 2, 3])
# use biased var in train
var = input.var([0, 2, 3], unbiased=False)
self.mean_list.append(mean)
self.var_list.append(var)
#eval状态,直接提取QConvBN层中BN的信息即可
self.teacher_running_mean.append(module.bn_module.running_mean)
self.teacher_running_var.append(module.bn_module.running_var)
def hook_fn_forward(self,module, input, output):
#mean和var是针对输入的伪数据,running_mean和running_var是对train时输入的数据
input = input[0]
mean = input.mean([0, 2, 3])
# use biased var in train
var = input.var([0, 2, 3], unbiased=False)
self.mean_list.append(mean)
self.var_list.append(var)
#eval状态,直接提取QConvBN层中BN的信息即可
self.teacher_running_mean.append(module.running_mean)
self.teacher_running_var.append(module.running_var)
def train(self, epoch):
# total_loss = 0.
correct = 0
item_len = 0
for i in range(self.settings.iters):
multi_class = torch.rand(1)
self.MERGE_PARAM = self.settings.multi_label_num
MERGE_PROB = self.settings.multi_label_prob # superpose probability
# MERGE_PROB 表示决策边界样本(即多类label)的比例
# IF分支为多类Label,由于每个元素对应多个label,无法衡量acc,因此只在else分支更新acc
# 多分类样本处于决策边界,同样能根据loss等提高生成器效果
if multi_class<MERGE_PROB:
# Get labels ranging from 0 to n_classes for n rows
z = torch.randn(self.settings.batchSize, self.MERGE_PARAM,self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,self.MERGE_PARAM)).cuda()
linear = F.softmax(torch.randn(self.settings.batchSize,self.MERGE_PARAM),dim=1).cuda()
z = z.contiguous()
labels = labels.contiguous()
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_add_(1,labels,linear)
images = self.generator(z, labels, linear)
else:
z = torch.randn(self.settings.batchSize, self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,)).cuda()
z = z.contiguous()
labels = labels.contiguous()
images = self.generator(z, labels)
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_(1,labels.unsqueeze(1),1.0)
self.mean_list.clear()
self.var_list.clear()
# 获取teacher模型输出,同时使用hook_fn_forward获取了mean和var列表
if self.settings.quant:
output_teacher_batch = self.model_teacher.quantize_forward(images)
else:
output_teacher_batch = self.model_teacher(images)
# teacher模型输出和label的损失,一维tensor
loss_one_hot = (-(labels_loss*self.log_soft(output_teacher_batch)).sum(dim=1)).mean()
# BN statistic loss
# 这里统计了伪数据分布和teacher模型BN层分布的loss
BNS_loss = torch.zeros(1).cuda()
for num in range(len(self.mean_list)):
BNS_loss += self.MSE_loss(self.mean_list[num], self.teacher_running_mean[num]) + self.MSE_loss(
self.var_list[num], self.teacher_running_var[num])
BNS_loss = BNS_loss / len(self.mean_list)
# loss of Generator
loss_G = loss_one_hot + 0.1 * BNS_loss
# total_loss += loss_G.item()
self.optim_G.zero_grad()
loss_G.backward()
self.optim_G.step()
if not multi_class< MERGE_PROB:
pred = output_teacher_batch.argmax(dim=1, keepdim=True)
correct += pred.eq(labels.view_as(pred)).sum().item()
item_len += self.settings.batchSize
# train_loss = total_loss/self.settings.iters
gen_acc = 100. * correct / item_len
# 对应输出的第一行,表示teacher在含噪声的label和生成器根据噪声label生成input上取得的精度
# 这里acc越高表示生成器效果越好,生成的输入接近真实数据分布
# print(
# "[Epoch %d/%d] [Batch %d/%d] [acc: %.4f%%] [G loss: %f] [Time: %5.2fs]"
# % (epoch + 1, self.settings.nEpochs, i+1, self.settings.iters, gen_acc, train_loss, (time.time()-start_time))
# )
return gen_acc
def run(self):
self.test_teacher()
self.prepare_train()
start_time = time.time()
best_gen_acc = None
for epoch in range(1,self.settings.nEpochs):
gen_acc = self.train(epoch)
if not best_gen_acc or gen_acc > best_gen_acc:
best_gen_acc = gen_acc
torch.save(self.generator, self.settings.gen_file)
time_interval = time.time()-start_time
print('>> Epoch:%d Time:%.2fs Cur acc:%.4f Best acc:%.4f'%(epoch,time_interval,gen_acc,best_gen_acc))
start_time = time.time()
input = input[0]
mean = input.mean([0, 2, 3])
# use biased var in train
var = input.var([0, 2, 3], unbiased=False)
self.mean_list.append(mean)
self.var_list.append(var)
#eval状态,直接提取QConvBN层中BN的信息即可
self.teacher_running_mean.append(module.running_mean)
self.teacher_running_var.append(module.running_var)
# 训练,只取类内样本
def train(self, epoch):
start_time = time.time()
correct = 0
item_len = 0
# if epoch > 20:
# adversary = get_adversary(self.teacher_nohook, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses)
for i in range(self.settings.iters):
# multi_class = torch.rand(1)
# if multi_class <0.4:
# MERGE_PARAM = self.settings.multi_label_num
# z = torch.randn(self.settings.batchSize, MERGE_PARAM,self.settings.latent_dim).cuda()
# labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,MERGE_PARAM)).cuda()
# linear = F.softmax(torch.randn(self.settings.batchSize,MERGE_PARAM),dim=1).cuda()
# z = z.contiguous()
# labels = labels.contiguous()
# labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
# labels_loss.scatter_add_(1,labels,linear)
# images = self.generator(z, labels, linear)
# else:
z = torch.randn(self.settings.batchSize, self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,)).cuda()
z = z.contiguous()
labels = labels.contiguous()
images = self.generator(z, labels)
# if multi_class > 0.4 and multi_class < 0.41:
# adv_images = images.clone()
# adv_images.data = adversary.perturb(images, labels)
# images.data = adversary.perturb(images, labels)
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_(1,labels.unsqueeze(1),1.0)
self.mean_list.clear()
self.var_list.clear()
# 获取teacher模型输出,同时使用hook_fn_forward获取了mean和var列表
# self.hook_switch(hook_on=True)
output_teacher_batch = self.teacher(images)
# self.hook_switch(hook_on=False)
# teacher模型输出和label的损失,一维tensor
loss_one_hot = (-(labels_loss*self.log_soft(output_teacher_batch)).sum(dim=1)).mean()
# BN statistic loss
# 这里统计了伪数据分布和teacher模型BN层分布的loss
BNS_loss = torch.zeros(1).cuda()
for num in range(len(self.mean_list)):
BNS_loss += self.MSE_loss(self.mean_list[num], self.teacher_running_mean[num]) + self.MSE_loss(
self.var_list[num], self.teacher_running_var[num])
BNS_loss = BNS_loss / len(self.mean_list)
# if epoch > 20 and not multi_class < 0:
# if multi_class > 0.4 and multi_class < 0.45:
# adv_output_teacher = self.teacher_nohook(adv_images)
# adv_loss_one_hot = (-(labels_loss*self.log_soft(adv_output_teacher)).sum(dim=1)).mean()
# loss_G = loss_one_hot + 0.2 * adv_loss_one_hot + 0.1 * BNS_loss
# # loss_G = adv_loss_one_hot + 0.1 * BNS_loss
# # if i > 198:
# # print("%f %f %f"%(loss_one_hot.item(),adv_loss_one_hot.item(),BNS_loss.item()))
# else:
# # loss of Generator
# loss_G = loss_one_hot + 0.1 * BNS_loss
# # if i > 198:
# # print("%f %f"%(loss_one_hot.item(),BNS_loss.item()))
loss_G = loss_one_hot + 0.1 * BNS_loss
self.optim_G.zero_grad()
loss_G.backward()
self.optim_G.step()
# if multi_class >= 0.4:
pred = output_teacher_batch.argmax(dim=1, keepdim=True)
correct += pred.eq(labels.view_as(pred)).sum().item()
item_len += self.settings.batchSize
gen_acc = 100. * correct / item_len
time_interval = time.time()-start_time
print('>> Epoch:%d Time:%.2fs Gen acc:%.4f'%(epoch,time_interval,gen_acc))
return gen_acc
# 根据teacher对抗下精度指导类间样本混杂
def adjust(self):
# 获取teacher在测试集对抗下精度
teacher_org_acc = test_robust(self.teacher_nohook, attack_type=None, c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
teacher_fgsm_acc = test_robust(self.teacher_nohook, attack_type='fgsm', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
teacher_pgd_acc = test_robust(self.teacher_nohook, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
gen_fgsm_acc = None
gen_pgd_acc = None
adjust_gen_acc = None
iter_cnt = 0
org_bound = 3
bound = 1
tolerance_cnt = 0
tolerance_max = 300
adversary = get_adversary(self.teacher_nohook, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses)
while True:
# 获取teacher在伪数据集上对抗精度
gen_loader = build_gen_loader( generator=self.generator,
batchSize=self.settings.batchSize,
iters=self.settings.gen_iters,
latent_dim=self.settings.latent_dim,
nClasses=self.settings.nClasses)
print('>>Iters: %d'%iter_cnt)
print('Teacher org acc: %.4f fgsm acc: %.4f pgd acc: %.4f'%(teacher_org_acc, teacher_fgsm_acc, teacher_pgd_acc))
gen_org_acc = test_robust(self.teacher_nohook, attack_type=None, c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=gen_loader)
gen_fgsm_acc = test_robust(self.teacher_nohook, attack_type='fgsm', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=gen_loader)
gen_pgd_acc = test_robust(self.teacher_nohook, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=gen_loader)
# 对抗精度限制区间
if tolerance_cnt > tolerance_max:
org_bound += 1
bound += 1
tolerance_cnt = 0
else:
tolerance_cnt += 1
# 对抗精度过高,混入类间样本,过低补充类内样本,一次一个iters
# 期望对抗精度均为testloader精度+-bound
org_diff = gen_org_acc - teacher_org_acc
fgsm_diff = gen_fgsm_acc - teacher_fgsm_acc
pgd_diff = gen_pgd_acc - teacher_pgd_acc
# if (fgsm_diff > bound and pgd_diff > -bound) or (fgsm_diff > -bound and pgd_diff > bound):
# multi_label = True
# elif (fgsm_diff < bound and pgd_diff < -bound) or (fgsm_diff < -bound and pgd_diff < bound):
# multi_label = False
# elif (fgsm_diff > bound and pgd_diff < -bound) or (fgsm_diff < -bound and pgd_diff > bound):
# if fgsm_diff + pgd_diff > 0:
# multi_label = True
# else:
# multi_label = False
# if pgd_diff > bound:
# multi_label = True
# elif pgd_diff < -bound:
# multi_label = False
# else:
# # 获取最终伪数据集上的gen_acc, None表示使用纯净样本
# # print('>>Final Gen acc:')
# # adjust_gen_acc = test_robust(self.teacher, attack_type=None, c=self.settings.eps, num_classes=self.settings.nClasses,
# # testloader=gen_loader)
# break
if org_diff < -org_bound:
multi_label = False
if pgd_diff < -bound:
adv_flag = True
else:
adv_flag = False
else:
if pgd_diff < -bound:
multi_label = False
adv_flag = True
elif pgd_diff > bound:
multi_label = True
adv_flag = False
else:
break
if multi_label:
print('>>Multi Label: decrease gen_adv_acc')
MERGE_PARAM = self.settings.multi_label_num
z = torch.randn(self.settings.batchSize, MERGE_PARAM,self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,MERGE_PARAM)).cuda()
linear = F.softmax(torch.randn(self.settings.batchSize,MERGE_PARAM),dim=1).cuda()
z = z.contiguous()
labels = labels.contiguous()
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_add_(1,labels,linear)
images = self.generator(z, labels, linear)
else:
print('>>Single Label: rise gen_acc')
z = torch.randn(self.settings.batchSize, self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,)).cuda()
z = z.contiguous()
labels = labels.contiguous()
images = self.generator(z, labels)
if adv_flag:
adv_images = images.clone()
adv_images.data = adversary.perturb(images, labels)
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_(1,labels.unsqueeze(1),1.0)
self.mean_list.clear()
self.var_list.clear()
# 获取teacher模型输出,同时使用hook_fn_forward获取了mean和var列表
output_teacher_batch = self.teacher(images)
# teacher模型输出和label的损失,一维tensor
loss_one_hot = (-(labels_loss*self.log_soft(output_teacher_batch)).sum(dim=1)).mean()
# BN statistic loss
# 这里统计了伪数据分布和teacher模型BN层分布的loss
BNS_loss = torch.zeros(1).cuda()
for num in range(len(self.mean_list)):
BNS_loss += self.MSE_loss(self.mean_list[num], self.teacher_running_mean[num]) + self.MSE_loss(
self.var_list[num], self.teacher_running_var[num])
BNS_loss = BNS_loss / len(self.mean_list)
# loss of Generator
# if not multi_label:
if adv_flag:
adv_output_teacher = self.teacher_nohook(adv_images)
adv_loss_one_hot = (-(labels_loss*self.log_soft(adv_output_teacher)).sum(dim=1)).mean()
loss_G = loss_one_hot + 0.5 * adv_loss_one_hot + 0.1 * BNS_loss
else:
loss_G = loss_one_hot + 0.1 * BNS_loss
self.optim_G.zero_grad()
loss_G.backward()
self.optim_G.step()
iter_cnt += 1
# print('== Adjust: iters:%d gen_acc:%.4f FGSM_acc:%.4f/%.4f PGD_acc:%.4f/%.4f'%(iter_cnt, adjust_gen_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc))
# return iter_cnt, adjust_gen_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc
print('== Adjust: iters:%d Org_acc:%.4f/%.4f FGSM_acc:%.4f/%.4f PGD_acc:%.4f/%.4f'%(iter_cnt, gen_org_acc, teacher_org_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc))
return iter_cnt, gen_org_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc
def run(self, ft=None): # ft记录信息,可选
test_acc = self.test_teacher()
if ft is not None:
ft.write('Test Result:\n')
ft.write('\tTeacher Accuray: %f\n'%test_acc)
ft.flush()
start_time = time.time()
self.prepare_train()
for epoch in range(1,self.settings.nEpochs+1):
# self.generator.train()
gen_acc = self.train(epoch)
# self.generator.eval()
# gen_loader = build_gen_loader( generator=self.generator,
# batchSize=self.settings.batchSize,
# iters=self.settings.gen_iters,
# latent_dim=self.settings.latent_dim,
# nClasses=self.settings.nClasses)
# test_robust(self.teacher_nohook, attack_type='fgsm', c=self.settings.eps, num_classes=self.settings.nClasses,
# testloader=gen_loader)
# test_robust(self.teacher_nohook, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses,
# testloader=gen_loader)
torch.save(self.generator, self.settings.gen_file)
train_interval = time.time()-start_time
if ft is not None:
ft.write('Train Result:\n')
ft.write('\tGen acc: %f\n'%gen_acc)
ft.write('\tTime: %.2fs\n'%train_interval)
ft.flush()
if self.settings.adjust:
start_time = time.time()
iter_cnt, adjust_gen_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc = self.adjust()
torch.save(self.generator, self.settings.gen_file_adjust)
adjust_interval = time.time()-start_time
if ft is not None:
ft.write('Adjust Result:\n')
ft.write('\tIters: %d\n'%iter_cnt)
ft.write('\tGen acc: %f\n'%adjust_gen_acc)
ft.write('\tFGSM acc: %f -- %f(Testloader)\n'%(gen_fgsm_acc,teacher_fgsm_acc))
ft.write('\tPGD acc: %f -- %f(Testloader)\n'%(gen_pgd_acc,teacher_pgd_acc))
ft.write('\tTime: %.2fs\n'%adjust_interval)
ft.flush()
def main():
#及时打印信息
sys.stdout = open(sys.stdout.fileno(), mode='w', buffering=1)
parser = argparse.ArgumentParser(description='Gen Arg')
parser.add_argument('--model', type=str)
parser.add_argument('--dataset',type=str)
parser.add_argument('--quant', action='store_true')
parser.add_argument('--freeze', action='store_true')
parser.add_argument('--randemb', action='store_true')
parser.add_argument('--multi_label_prob', type=float, default=0.0)
parser.add_argument('--multi_label_num', type=int, default=2)
parser.add_argument('--no_DM', action='store_false')
parser.add_argument('--noise_scale', type=float, default=1.0)
args = parser.parse_args()
print(args)
option = GenOption(args)
if option.quant:
gol._init()
quant_type_list = ['POT','FLOAT']
for quant_type in quant_type_list:
num_bit_list = numbit_list(quant_type)
if quant_type != 'INT':
bias_list = build_bias_list(quant_type)
gol.set_value(bias_list, is_bias=True)
for num_bits in num_bit_list:
e_bit_list = ebit_list(quant_type,num_bits)
for e_bits in e_bit_list:
if quant_type == 'FLOAT':
title = '%s_%d_E%d' % (quant_type, num_bits, e_bits)
else:
title = '%s_%d' % (quant_type, num_bits)
# 设置量化表
if quant_type != 'INT':
plist = build_list(quant_type, num_bits, e_bits)
gol.set_value(plist)
print('>'*20 + 'Gen: '+option.model+' '+title+'<'*20)
option.set(quant_type,num_bits,e_bits)
gentrainer = GenTrainer(option)
gentrainer.run()
else:
print('>'*20 + 'Gen: '+option.model+' Full'+'<'*20)
option.set()
gentrainer = GenTrainer(option)
gentrainer.run()
#及时打印信息
sys.stdout = open(sys.stdout.fileno(), mode='w', buffering=1)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
parser = argparse.ArgumentParser(description='Gen Arg')
parser.add_argument('--model', type=str)
parser.add_argument('--dataset',type=str)
parser.add_argument('--freeze', action='store_true')
parser.add_argument('--randemb', action='store_true')
parser.add_argument('--multi_label_prob', type=float, default=0.0)
parser.add_argument('--multi_label_num', type=int, default=2)
parser.add_argument('--no_DM', action='store_false')
parser.add_argument('--noise_scale', type=float, default=1.0)
# 是否使用根据全精度模型在测试集上对抗精度调整生成器
parser.add_argument('--adjust', action='store_true')
args = parser.parse_args()
print(args)
option = GenOption(args)
print('>'*20 + 'Gen: '+option.model+'<'*20)
gen_result_dir = 'gen_result/'+option.dataset+'/'
os.makedirs(gen_result_dir, exist_ok=True)
txt_path = gen_result_dir+option.model+'.txt'
ft = open(txt_path,'w')
ft.write('Gen: '+option.model+' '+option.dataset+'\n')
ft.flush()
print(args, file=ft)
ft.flush()
gentrainer = GenTrainer(option)
gentrainer.run(ft)
if __name__ == '__main__':
main()
main()
......@@ -11,11 +11,11 @@
# Please modify your requirements
#SBATCH -p nv-gpu # Submit to 'nv-gpu' Partitiion
#SBATCH -t 1-06:00:00 # Run for a maximum time of 0 days, 12 hours, 00 mins, 00 secs
#SBATCH -t 0-08:00:00 # Run for a maximum time of 0 days, 12 hours, 00 mins, 00 secs
#SBATCH --nodes=1 # Request N nodes
#SBATCH --gres=gpu:1 # Request M GPU per node
#SBATCH --gres-flags=enforce-binding # CPU-GPU Affinity
#SBATCH --qos=gpu-normal # Request QOS Type
#SBATCH --qos=gpu-short # Request QOS Type
###
### The system will alloc 8 or 16 cores per gpu by default.
......@@ -31,7 +31,7 @@
###
# set constraint for RTX8000 to meet my cuda
#SBATCH --constraint="Ampere|RTX8000"
#SBATCH --constraint="Ampere"
#- Log information
......@@ -51,7 +51,7 @@ module load git/2.17.1
module load vim/8.1.2424
##- language
module load python3/3.6.8
module load python3/3.8.16
##- CUDA
# module load cuda-cudnn/10.2-7.6.5
......@@ -86,10 +86,7 @@ else
exit
fi
if [ $Quant = 'True' ]; then
python gen_one.py --model $Model --dataset $Dataset --quant --multi_label_prob 0.4 --multi_label_num $Label
else
python gen_one.py --model $Model --dataset $Dataset --multi_label_prob 0.4 --multi_label_num $Label
fi
python gen_one.py --model $Model --dataset $Dataset --multi_label_prob 0.4 --multi_label_num $Label --adjust
#- End
echo "Job end at $(date "+%Y-%m-%d %H:%M:%S")"
from model import Model
from dataloader import DataLoader
from generator import Generator,Generator_imagenet
from robust_utils import build_gen_loader, test_robust
import torch.nn.functional as F
import argparse
import time
import torch
import torch.nn as nn
from torch.optim.lr_scheduler import MultiStepLR
import sys
import os
import os.path as osp
class GenOption(object):
def __init__(self, args):
self.model = args.model
self.dataset = args.dataset
self.batchSize = 128
if self.dataset == "cifar10":
self.nClasses = 10
elif self.dataset == "cifar100":
self.nClasses = 100
else:
assert False, "invalid dataset"
# ----------Generator options ---------------------------------------------
#每个epoch训练多少轮,和batchsize无关
self.iters = 200
#冻结embedding层权重
self.freeze = args.freeze
# self.randemb = args.randemb
self.randemb = False
# 如果不为randomemb,需要根据weight_t调整
self.latent_dim = 64
# 针对imagenet等数据集需要调整
self.img_size = 32
self.channels = 3
# self.milestones_G = [40,60,80]
if self.dataset == 'cifar10':
self.lr_G = 0.001
self.nEpochs = 20
self.milestones_G = [15]
elif self.dataset == 'cifar100':
self.lr_G = 0.001
self.nEpochs = 20
self.milestones_G = [15]
self.gamma_G = 0.2
self.b1 = 0.5
self.b2 = 0.999
# 对抗攻击相关信息
# 生成伪数据集的轮数,总样本量为batchSize*gen_iters
# 减少iters以加快训练速度,此处模糊估计即可
self.gen_iters = 20
self.eps = 8/255
self.steps = 10
# ----------More option ---------------------------------------------
self.multi_label_num = args.multi_label_num
self.no_DM = args.no_DM
self.noise_scale = args.noise_scale
self.intermediate_dim = 100
self.adjust = args.adjust
self.teacher_file = 'ckpt_full/'+self.dataset+'/'+self.model+'.pt'
gen_path = 'ckpt_gen/'+self.dataset+'/'
self.gen_file = gen_path+ self.model+'.pt'
self.gen_file_adjust = gen_path+self.model+'_adjust.pt'
if not osp.exists(self.teacher_file):
assert False, "Empty teacher file"
if not osp.exists(gen_path):
os.makedirs(gen_path)
class GenTrainer(object):
def __init__(self, option):
self.settings = option
self.set_test_loader()
self.set_teacher()
self.set_generator()
self.set_optim_G()
def set_test_loader(self):
dataloader = DataLoader(self.settings.dataset,self.settings.batchSize)
_,_,self.test_loader = dataloader.getloader()
def set_teacher(self):
self.teacher = Model(self.settings.model,self.settings.dataset).cuda()
self.teacher.load_state_dict(torch.load(self.settings.teacher_file))
self.teacher.eval()
# 用于对抗攻击使用,防止无用的hook,节省显存
self.teacher_nohook = Model(self.settings.model,self.settings.dataset).cuda()
self.teacher_nohook.load_state_dict(torch.load(self.settings.teacher_file))
self.teacher_nohook.eval()
#当randemb为False,此处同时完成了latent_dim的修改
def set_generator(self):
if self.settings.randemb:
weight_t = None
else:
weight_t = self.teacher.get_output_layer_weight()
# 输出层如果是Conv,weight的shape有四元,后二元都是1,舍弃
if self.settings.model in ['Inception_BN']:
weight_t = weight_t.reshape(weight_t.size()[:2])
self.settings.latent_dim = weight_t.size()[1]
if self.settings.dataset in ['cifar10','cifar100']:
self.generator = Generator(self.settings, weight_t, self.settings.freeze).cuda()
elif self.settings.dataset in ['imagenet']:
self.generator = Generator_imagenet(self.settings, weight_t, self.settings.freeze).cuda()
else:
assert False, "Invalid dataset"
def set_optim_G(self):
self.optim_G = torch.optim.Adam(self.generator.parameters(), lr=self.settings.lr_G,
betas=(self.settings.b1, self.settings.b2))
self.lrs_G = MultiStepLR(self.optim_G, milestones=self.settings.milestones_G, gamma=self.settings.gamma_G)
def test_teacher(self):
correct = 0
with torch.no_grad():
for data, target in self.test_loader:
data,target = data.cuda(), target.cuda()
output = self.teacher(data)
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
test_acc = 100. * correct / len(self.test_loader.dataset)
print('Teacher Accuracy: {:.2f}%'.format(test_acc))
return test_acc
def prepare_train(self):
self.log_soft = nn.LogSoftmax(dim=1)
# MSE主要用于回归问题训练
self.MSE_loss = nn.MSELoss().cuda()
self.mean_list = []
self.var_list = []
self.teacher_running_mean = []
self.teacher_running_var = []
self.teacher.eval()
self.generator.train()
for m in self.teacher.modules():
if isinstance(m, nn.BatchNorm2d):
m.register_forward_hook(self.hook_fn_forward)
#初始关闭hook,只在跟踪BN层使用
# self.hook_switch(hook_on=False)
# 跟踪全精度模型的BN层,提取数据分布信息
def hook_fn_forward(self, module, input, output):
#mean和var是针对输入的伪数据,running_mean和running_var是对train时输入的数据
input = input[0]
mean = input.mean([0, 2, 3])
# use biased var in train
var = input.var([0, 2, 3], unbiased=False)
self.mean_list.append(mean)
self.var_list.append(var)
#eval状态,直接提取QConvBN层中BN的信息即可
self.teacher_running_mean.append(module.running_mean)
self.teacher_running_var.append(module.running_var)
# 训练,只取类内样本
def train(self, epoch):
start_time = time.time()
correct = 0
item_len = 0
for i in range(self.settings.iters):
z = torch.randn(self.settings.batchSize, self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,)).cuda()
z = z.contiguous()
labels = labels.contiguous()
images = self.generator(z, labels)
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_(1,labels.unsqueeze(1),1.0)
self.mean_list.clear()
self.var_list.clear()
# 获取teacher模型输出,同时使用hook_fn_forward获取了mean和var列表
# self.hook_switch(hook_on=True)
output_teacher_batch = self.teacher(images)
# self.hook_switch(hook_on=False)
# teacher模型输出和label的损失,一维tensor
loss_one_hot = (-(labels_loss*self.log_soft(output_teacher_batch)).sum(dim=1)).mean()
# BN statistic loss
# 这里统计了伪数据分布和teacher模型BN层分布的loss
BNS_loss = torch.zeros(1).cuda()
for num in range(len(self.mean_list)):
BNS_loss += self.MSE_loss(self.mean_list[num], self.teacher_running_mean[num]) + self.MSE_loss(
self.var_list[num], self.teacher_running_var[num])
BNS_loss = BNS_loss / len(self.mean_list)
# loss of Generator
loss_G = loss_one_hot + 0.1 * BNS_loss
self.optim_G.zero_grad()
loss_G.backward()
self.optim_G.step()
pred = output_teacher_batch.argmax(dim=1, keepdim=True)
correct += pred.eq(labels.view_as(pred)).sum().item()
item_len += self.settings.batchSize
gen_acc = 100. * correct / item_len
time_interval = time.time()-start_time
print('>> Epoch:%d Time:%.2fs Gen acc:%.4f'%(epoch,time_interval,gen_acc))
return gen_acc
# 根据teacher对抗下精度指导类间样本混杂
def adjust(self):
# 获取teacher在测试集对抗下精度
teacher_fgsm_acc = test_robust(self.teacher_nohook, attack_type='fgsm', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
teacher_pgd_acc = test_robust(self.teacher_nohook, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
gen_fgsm_acc = None
gen_pgd_acc = None
adjust_gen_acc = None
iter_cnt = 0
bound = 3
tolerance_cnt = 0
tolerance_max = 300
while True:
# 获取teacher在伪数据集上对抗精度
gen_loader = build_gen_loader( generator=self.generator,
batchSize=self.settings.batchSize,
iters=self.settings.gen_iters,
latent_dim=self.settings.latent_dim,
nClasses=self.settings.nClasses)
print('>>Iters: %d'%iter_cnt)
gen_fgsm_acc = test_robust(self.teacher_nohook, attack_type='fgsm', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=gen_loader)
gen_pgd_acc = test_robust(self.teacher_nohook, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=gen_loader)
# 对抗精度限制区间
if tolerance_cnt > tolerance_max:
bound += 1
tolerance_cnt = 0
else:
tolerance_cnt += 1
# 对抗精度过高,混入类间样本,过低补充类内样本,一次一个iters
# 期望对抗精度均为testloader精度+-bound
fgsm_diff = gen_fgsm_acc - teacher_fgsm_acc
pgd_diff = gen_pgd_acc - teacher_pgd_acc
if (fgsm_diff > bound and pgd_diff > -bound) or (fgsm_diff > -bound and pgd_diff > bound):
multi_label = True
elif (fgsm_diff < bound and pgd_diff < -bound) or (fgsm_diff < -bound and pgd_diff < bound):
multi_label = False
elif (fgsm_diff > bound and pgd_diff < -bound) or (fgsm_diff < -bound and pgd_diff > bound):
if fgsm_diff + pgd_diff > 0:
multi_label = True
else:
multi_label = False
else:
# 获取最终伪数据集上的gen_acc, None表示使用纯净样本
print('>>Final Gen acc:')
adjust_gen_acc = test_robust(self.teacher, attack_type=None, c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=gen_loader)
break
if multi_label:
print('>>Multi Label: decrease gen_adv_acc')
MERGE_PARAM = self.settings.multi_label_num
z = torch.randn(self.settings.batchSize, MERGE_PARAM,self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,MERGE_PARAM)).cuda()
linear = F.softmax(torch.randn(self.settings.batchSize,MERGE_PARAM),dim=1).cuda()
z = z.contiguous()
labels = labels.contiguous()
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_add_(1,labels,linear)
images = self.generator(z, labels, linear)
else:
print('>>Single Label: rise gen_acc')
z = torch.randn(self.settings.batchSize, self.settings.latent_dim).cuda()
labels = torch.randint(0, self.settings.nClasses, (self.settings.batchSize,)).cuda()
z = z.contiguous()
labels = labels.contiguous()
images = self.generator(z, labels)
labels_loss = torch.zeros(self.settings.batchSize,self.settings.nClasses).cuda()
labels_loss.scatter_(1,labels.unsqueeze(1),1.0)
self.mean_list.clear()
self.var_list.clear()
# 获取teacher模型输出,同时使用hook_fn_forward获取了mean和var列表
output_teacher_batch = self.teacher(images)
# teacher模型输出和label的损失,一维tensor
loss_one_hot = (-(labels_loss*self.log_soft(output_teacher_batch)).sum(dim=1)).mean()
# BN statistic loss
# 这里统计了伪数据分布和teacher模型BN层分布的loss
BNS_loss = torch.zeros(1).cuda()
for num in range(len(self.mean_list)):
BNS_loss += self.MSE_loss(self.mean_list[num], self.teacher_running_mean[num]) + self.MSE_loss(
self.var_list[num], self.teacher_running_var[num])
BNS_loss = BNS_loss / len(self.mean_list)
# loss of Generator
loss_G = loss_one_hot + 0.1 * BNS_loss
self.optim_G.zero_grad()
loss_G.backward()
self.optim_G.step()
iter_cnt += 1
print('== Adjust: iters:%d gen_acc:%.4f FGSM_acc:%.4f/%.4f PGD_acc:%.4f/%.4f'%(iter_cnt, adjust_gen_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc))
return iter_cnt, adjust_gen_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc
def run(self, ft=None): # ft记录信息,可选
test_acc = self.test_teacher()
if ft is not None:
ft.write('Test Result:\n')
ft.write('\tTeacher Accuray: %f\n'%test_acc)
ft.flush()
start_time = time.time()
self.prepare_train()
for epoch in range(1,self.settings.nEpochs+1):
gen_acc = self.train(epoch)
torch.save(self.generator, self.settings.gen_file)
train_interval = time.time()-start_time
if ft is not None:
ft.write('Train Result:\n')
ft.write('\tGen acc: %f\n'%gen_acc)
ft.write('\tTime: %.2fs\n'%train_interval)
ft.flush()
if self.settings.adjust:
start_time = time.time()
iter_cnt, adjust_gen_acc, gen_fgsm_acc, teacher_fgsm_acc, gen_pgd_acc, teacher_pgd_acc = self.adjust()
torch.save(self.generator, self.settings.gen_file_adjust)
adjust_interval = time.time()-start_time
if ft is not None:
ft.write('Adjust Result:\n')
ft.write('\tIters: %d\n'%iter_cnt)
ft.write('\tGen acc: %f\n'%adjust_gen_acc)
ft.write('\tFGSM acc: %f -- %f(Testloader)\n'%(gen_fgsm_acc,teacher_fgsm_acc))
ft.write('\tPGD acc: %f -- %f(Testloader)\n'%(gen_pgd_acc,teacher_pgd_acc))
ft.write('\tTime: %.2fs\n'%adjust_interval)
ft.flush()
def main():
#及时打印信息
sys.stdout = open(sys.stdout.fileno(), mode='w', buffering=1)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
parser = argparse.ArgumentParser(description='Gen Arg')
parser.add_argument('--model', type=str)
parser.add_argument('--dataset',type=str)
parser.add_argument('--freeze', action='store_true')
parser.add_argument('--randemb', action='store_true')
parser.add_argument('--multi_label_prob', type=float, default=0.0)
parser.add_argument('--multi_label_num', type=int, default=2)
parser.add_argument('--no_DM', action='store_false')
parser.add_argument('--noise_scale', type=float, default=1.0)
# 是否使用根据全精度模型在测试集上对抗精度调整生成器
parser.add_argument('--adjust', action='store_true')
args = parser.parse_args()
print(args)
option = GenOption(args)
print('>'*20 + 'Gen: '+option.model+'<'*20)
gen_result_dir = 'gen_result/'+option.dataset+'/'
os.makedirs(gen_result_dir, exist_ok=True)
txt_path = gen_result_dir+option.model+'.txt'
ft = open(txt_path,'w')
ft.write('Gen: '+option.model+' '+option.dataset+'\n')
ft.flush()
print(args, file=ft)
ft.flush()
gentrainer = GenTrainer(option)
gentrainer.run(ft)
if __name__ == '__main__':
main()
import os
import os.path as osp
class GenOption(object):
def __init__(self, args):
self.model = args.model
self.dataset = args.dataset
self.batchSize = 128
self.quant = args.quant
if self.dataset == "cifar10":
self.nClasses = 10
elif self.dataset == "cifar100":
self.nClasses = 100
else:
assert False, "invalid dataset"
# ----------Generator options ---------------------------------------------
# self.nEpochs = 100
self.nEpochs = 40
#每个epoch训练多少轮,和batchsize无关
self.iters = 200
#冻结embedding层权重
self.freeze = args.freeze
self.randemb = args.randemb
# 如果不为randomemb,需要根据weight_t调整
self.latent_dim = 64
# 针对imagenet等数据集需要调整
self.img_size = 32
self.channels = 3
self.lr_G = 0.001
# self.milestones_G = [40,60,80]
self.milestones_G = [20,30]
self.gamma_G = 0.1
self.b1 = 0.5
self.b2 = 0.999
# ----------More option ---------------------------------------------
self.multi_label_prob = args.multi_label_prob
self.multi_label_num = args.multi_label_num
self.no_DM = args.no_DM
self.noise_scale = args.noise_scale
self.intermediate_dim = 100
# if self.network == "resnet20":
# self.intermediate_dim = 64
def set(self,quant_type=None,num_bits=None,e_bits=None):
if self.quant:
self.quant_type = quant_type
self.num_bits = num_bits
self.e_bits = e_bits
if quant_type == 'FLOAT':
title = '%s_%d_E%d' % (quant_type, num_bits, e_bits)
else:
title = '%s_%d' % (quant_type, num_bits)
self.teacher_file = 'ckpt_quant/'+self.dataset+'/'+self.model+'/'+title+'.pt'
gen_path = 'ckpt_quant_gen/'+self.dataset+'/'+self.model
self.gen_file = gen_path + '/' + title + '.pt'
else:
self.teacher_file = 'ckpt_full/'+self.dataset+'/'+self.model+'.pt'
gen_path = 'ckpt_full_gen/'+self.dataset
self.gen_file = gen_path +'/'+ self.model+'.pt'
if not osp.exists(self.teacher_file):
assert False, "Empty teacher file"
if not osp.exists(gen_path):
os.makedirs(gen_path)
Gen: AlexNet cifar10
Namespace(dataset='cifar10', freeze=False, model='AlexNet', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 88.640000
Train Result:
Gen acc: 99.824219
Time: 44.96s
Adjust Result:
Iters: 13
Gen acc: 98.945312
FGSM acc: 43.828125 -- 45.340000(Testloader)
PGD acc: 19.023438 -- 19.810000(Testloader)
Time: 19.30s
Gen: AlexNet_BN cifar10
Namespace(dataset='cifar10', freeze=False, model='AlexNet_BN', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 90.130000
Train Result:
Gen acc: 99.878906
Time: 59.58s
Adjust Result:
Iters: 3
Gen acc: 97.656250
FGSM acc: 51.015625 -- 48.620000(Testloader)
PGD acc: 21.601562 -- 19.170000(Testloader)
Time: 10.53s
Gen: Inception_BN cifar10
Namespace(dataset='cifar10', freeze=False, model='Inception_BN', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 94.830000
Train Result:
Gen acc: 99.976562
Time: 430.90s
Adjust Result:
Iters: 1
Gen acc: 94.375000
FGSM acc: 45.625000 -- 42.760000(Testloader)
PGD acc: 5.976562 -- 7.210000(Testloader)
Time: 102.60s
Gen: MobileNetV2 cifar10
Namespace(dataset='cifar10', freeze=False, model='MobileNetV2', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 90.270000
Train Result:
Gen acc: 99.957031
Time: 246.35s
Adjust Result:
Iters: 2
Gen acc: 80.976562
FGSM acc: 37.890625 -- 40.190000(Testloader)
PGD acc: 3.320312 -- 5.590000(Testloader)
Time: 39.31s
Gen: ResNet_152 cifar10
Namespace(dataset='cifar10', freeze=False, model='ResNet_152', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 94.430000
Train Result:
Gen acc: 99.558594
Time: 967.98s
Adjust Result:
Iters: 5
Gen acc: 89.062500
FGSM acc: 42.421875 -- 44.370000(Testloader)
PGD acc: 12.343750 -- 10.190000(Testloader)
Time: 334.84s
Gen: ResNet_18 cifar10
Namespace(dataset='cifar10', freeze=False, model='ResNet_18', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 94.300000
Train Result:
Gen acc: 99.910156
Time: 143.98s
Adjust Result:
Iters: 14
Gen acc: 99.609375
FGSM acc: 47.968750 -- 47.410000(Testloader)
PGD acc: 10.585938 -- 11.240000(Testloader)
Time: 80.04s
Gen: ResNet_50 cifar10
Namespace(dataset='cifar10', freeze=False, model='ResNet_50', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 94.570000
Train Result:
Gen acc: 99.964844
Time: 394.87s
Adjust Result:
Iters: 18
Gen acc: 97.265625
FGSM acc: 46.015625 -- 44.400000(Testloader)
PGD acc: 11.250000 -- 8.920000(Testloader)
Time: 325.25s
Gen: VGG_16 cifar10
Namespace(dataset='cifar10', freeze=False, model='VGG_16', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 93.130000
Train Result:
Gen acc: 99.863281
Time: 131.06s
Adjust Result:
Iters: 7
Gen acc: 99.726562
FGSM acc: 51.640625 -- 53.170000(Testloader)
PGD acc: 20.546875 -- 18.740000(Testloader)
Time: 35.02s
Gen: VGG_19 cifar10
Namespace(dataset='cifar10', freeze=False, model='VGG_19', multi_label_num=2, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 93.030000
Train Result:
Gen acc: 99.941406
Time: 147.04s
Adjust Result:
Iters: 21
Gen acc: 97.343750
FGSM acc: 49.375000 -- 50.580000(Testloader)
PGD acc: 14.687500 -- 16.890000(Testloader)
Time: 89.95s
Gen: AlexNet cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='AlexNet', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 63.450000
Train Result:
Gen acc: 99.980469
Time: 46.65s
Adjust Result:
Iters: 49
Gen acc: 65.742188
FGSM acc: 55.117188 -- 21.870000(Testloader)
PGD acc: 6.992188 -- 7.740000(Testloader)
Time: 65.03s
Gen: AlexNet_BN cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='AlexNet_BN', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 66.310000
Train Result:
Gen acc: 99.984375
Time: 59.13s
Adjust Result:
Iters: 32
Gen acc: 68.007812
FGSM acc: 61.367188 -- 21.380000(Testloader)
PGD acc: 6.718750 -- 6.720000(Testloader)
Time: 51.86s
Gen: Inception_BN cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='Inception_BN', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 77.970000
Train Result:
Gen acc: 100.000000
Time: 430.51s
Adjust Result:
Iters: 484
Gen acc: 99.414062
FGSM acc: 38.085938 -- 18.840000(Testloader)
PGD acc: 4.843750 -- 3.130000(Testloader)
Time: 8508.16s
Gen: MobileNetV2 cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='MobileNetV2', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 66.760000
Train Result:
Gen acc: 99.996094
Time: 251.41s
Adjust Result:
Iters: 25
Gen acc: 64.453125
FGSM acc: 29.218750 -- 17.310000(Testloader)
PGD acc: 3.007812 -- 2.980000(Testloader)
Time: 203.93s
Gen: ResNet_152 cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='ResNet_152', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 77.430000
Train Result:
Gen acc: 100.000000
Time: 973.32s
Adjust Result:
Iters: 46
Gen acc: 79.375000
FGSM acc: 33.164062 -- 22.460000(Testloader)
PGD acc: 5.859375 -- 5.310000(Testloader)
Time: 1796.65s
Gen: ResNet_18 cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='ResNet_18', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 75.720000
Train Result:
Gen acc: 100.000000
Time: 141.31s
Adjust Result:
Iters: 27
Gen acc: 93.515625
FGSM acc: 44.140625 -- 18.760000(Testloader)
PGD acc: 4.218750 -- 4.550000(Testloader)
Time: 150.11s
Gen: ResNet_50 cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='ResNet_50', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 77.220000
Train Result:
Gen acc: 100.000000
Time: 425.48s
Adjust Result:
Iters: 179
Gen acc: 99.218750
FGSM acc: 30.039062 -- 21.770000(Testloader)
PGD acc: 5.273438 -- 4.580000(Testloader)
Time: 2697.18s
Gen: VGG_16 cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='VGG_16', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 71.120000
Train Result:
Gen acc: 99.992188
Time: 131.73s
Adjust Result:
Iters: 70
Gen acc: 74.375000
FGSM acc: 20.039062 -- 23.670000(Testloader)
PGD acc: 7.265625 -- 6.370000(Testloader)
Time: 228.97s
Gen: VGG_19 cifar100
Namespace(adjust=True, dataset='cifar100', freeze=False, model='VGG_19', multi_label_num=10, multi_label_prob=0.4, no_DM=True, noise_scale=1.0, randemb=False)
Test Result:
Teacher Accuray: 70.450000
Train Result:
Gen acc: 99.996094
Time: 146.33s
Adjust Result:
Iters: 43
Gen acc: 67.734375
FGSM acc: 33.125000 -- 26.100000(Testloader)
PGD acc: 5.312500 -- 6.180000(Testloader)
Time: 176.94s
......@@ -6,6 +6,10 @@ from torch.nn import init
class Generator(nn.Module):
def __init__(self, options=None, teacher_weight=None, freeze=True):
super(Generator, self).__init__()
# 记录额外信息,帮助模型边界测试的辅助
self.target_test_acc = None #训练目标在测试集上取得的acc
self.target_gen_acc = None #伪数据对训练目标边界的拟合效果
self.settings = options
# 注意这里有embedding层,两个分别是词典大小和向量长度
# 用于将标签映射为向量
......
from model import *
from model import Model
import sys
import torch
......
......@@ -56,7 +56,7 @@ module load git/2.17.1
module load vim/8.1.2424
##- language
module load python3/3.6.8
module load python3/3.8.16
##- CUDA
# module load cuda-cudnn/10.2-7.6.5
......
......@@ -11,28 +11,34 @@ class Model(nn.Module):
self.cfg_table = model_cfg_table[model_name]
adapt_dataset(self.cfg_table,dataset)
make_layers(self,self.cfg_table)
self.model_state = None
def forward(self,x):
x = model_forward(self,self.cfg_table,x)
return x
def forward(self,x,out_feature=False):
if self.model_state is None:
return model_forward(self,self.cfg_table,x,out_feature)
elif self.model_state == 'quantize':
return self.quantize_forward(x,out_feature)
elif self.model_state == 'freeze':
return self.quantize_inference(x,out_feature)
else:
assert False, "Illegal Model State"
def quantize(self, quant_type, num_bits=8, e_bits=3):
model_quantize(self,self.cfg_table,quant_type,num_bits,e_bits)
self.model_state = 'quantize'
def quantize_forward(self,x):
return model_utils(self,self.cfg_table,func='forward',x=x)
def quantize_forward(self,x,out_feature=False):
return model_utils(self,self.cfg_table,func='forward',x=x,out_feature=out_feature)
def freeze(self):
model_utils(self,self.cfg_table,func='freeze')
self.model_state = 'freeze'
def quantize_inference(self,x):
return model_utils(self,self.cfg_table,func='inference',x=x)
def quantize_inference(self,x,out_feature=False):
return model_utils(self,self.cfg_table,func='inference',x=x,out_feature=out_feature)
def fakefreeze(self):
model_utils(self,self.cfg_table,func='fakefreeze')
def get_output_layer_weight(self):
return get_output_layer_weight(self,self.cfg_table)
def get_quant_output_layer_weight(self):
return get_quant_output_layer_weight(self,self.cfg_table)
\ No newline at end of file
return get_output_layer_weight(self,self.cfg_table)
\ No newline at end of file
......@@ -97,7 +97,7 @@ def make_layers(model,cfg_table):
model.add_module(name,layer)
def model_forward(model,cfg_table,x):
def model_forward(model,cfg_table,x,out_feature=False):
for i in range(len(cfg_table)):
cfg = cfg_table[i]
if cfg[0] == 'Inc':
......@@ -147,9 +147,15 @@ def model_forward(model,cfg_table,x):
elif cfg[0] == 'VW':
if len(cfg) == 1: #default
x = x.view(x.size(0),-1)
if out_feature:
feature = x
elif cfg[0] == 'SM':
x = F.softmax(x,dim=1)
return x
if out_feature:
return x, feature
else:
return x
def model_quantize(model,cfg_table,quant_type,num_bits,e_bits):
......@@ -204,7 +210,7 @@ def model_quantize(model,cfg_table,quant_type,num_bits,e_bits):
# 增加了func='fakefreeze'
def model_utils(model,cfg_table,func,x=None):
def model_utils(model,cfg_table,func,x=None,out_feature=False):
last_qo = None
# 表示已经经过反量化,用于区别反量化不再最后,而是在softmax前的情形
done_flag = False
......@@ -287,6 +293,8 @@ def model_utils(model,cfg_table,func,x=None):
if func == 'inference' or func == 'forward':
if len(cfg) == 1: #default
x = x.view(x.size(0),-1)
if out_feature:
feature = x #只有伪量化的时候可直接输出,inference如需输出feature需要rescale
elif cfg[0] == 'SM':
if func == 'inference':
done_flag = True
......@@ -298,7 +306,10 @@ def model_utils(model,cfg_table,func,x=None):
if func == 'inference' and not done_flag:
x = last_qo.dequantize_tensor(x)
return x
if out_feature:
return x,feature
else:
return x
def make_inc_layers(model,inc_idx):
......
from torch.utils.data import DataLoader
import torchvision
import torchvision.datasets as dsets
import torchvision.transforms as transforms
import os
import sys
import numpy as np
import torch
# CIFAR10提取的图像只有32x32像素,较为模糊,为更清晰显示,使用插值法进行了resize
if __name__ == "__main__":
noise_dir = 'noise_image/'
os.makedirs(noise_dir,exist_ok='True')
noise_scale_list = [1, 0.5, 0.1, 0.05, 0.01, 0.005, 0.001]
batchSize = 1
testloader = DataLoader(
dsets.CIFAR10(root='/lustre/datasets/CIFAR10',
train=False,
transform=transforms.ToTensor(),
download=False),
batch_size = batchSize,
shuffle = False
)
classes = ['plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']
for data,target in testloader:
for image, label in zip(data,target):
print(classes[label])
torchvision.utils.save_image(image,noise_dir+'img_org.png')
for i, noise_scale in enumerate(noise_scale_list):
noise = np.random.normal(0., noise_scale, image.size())
noise = torch.tensor(noise).float()
noise_image = image + noise
noise_image.clamp_(0. , 255.)
torchvision.utils.save_image(noise_image,noise_dir+'img_noise'+str(noise_scale)+'.png')
sys.exit()
\ No newline at end of file
......@@ -51,7 +51,7 @@ module load git/2.17.1
module load vim/8.1.2424
##- language
module load python3/3.6.8
module load python3/3.8.16
##- CUDA
# module load cuda-cudnn/10.2-7.6.5
......
from model import Model
from dataloader import DataLoader
from utils import numbit_list, ebit_list, build_bias_list, build_list
# from generator import Generator,Generator_imagenet
import module
import gol
from robust_utils import build_gen_loader, test_autoattack, test_robust
from gen_one import GenOption
import torch.nn.functional as F
import numpy as np
import argparse
import time
import torch
import torch.nn as nn
import sys
import os
import os.path as osp
import openpyxl
class RobustOption(object):
def __init__(self, args):
self.model = args.model
self.dataset = args.dataset
self.batchSize = 128
self.channels = 3
if self.dataset == "cifar10":
self.nClasses = 10
self.img_size = 32
elif self.dataset == "cifar100":
self.nClasses = 100
self.img_size = 32
else:
assert False, "invalid dataset"
# 对抗攻击相关信息
self.gen_iters = 100 #伪数据集生成轮数
self.eps = 8/255
self.steps = 10
self.coeff = 0.1
# ----------Eval options ---------------------------------------------
#每个模型需要评估多少轮,和batchsize无关
self.iters = 200
# 根据加载的生成器进行调整
self.latent_dim = 64
# ----------Noise Options--------------------------------------------
# 关于对边界样本扰动的噪声指标
# self.noise_mean = 0.0
# self.noise_sigma = 0.01
self.adjust = args.adjust
if self.adjust:
self.gen_file = 'ckpt_gen/'+self.dataset+'/'+self.model+'_adjust.pt'
else:
self.gen_file = 'ckpt_gen/'+self.dataset+'/'+self.model+'.pt'
# 逐个加载全精度和量化模型
def set(self,quant,quant_type=None,num_bits=None,e_bits=None):
self.quant = quant
if self.quant:
self.quant_type = quant_type
self.num_bits = num_bits
self.e_bits = e_bits
if quant_type == 'FLOAT':
title = '%s_%d_E%d' % (quant_type, num_bits, e_bits)
else:
title = '%s_%d' % (quant_type, num_bits)
self.model_file = 'ckpt_quant/'+self.dataset+'/'+self.model+'/'+title+'.pt'
else:
self.model_file = 'ckpt_full/'+self.dataset+'/'+self.model+'.pt'
if not osp.exists(self.model_file):
assert False, "Empty model file"
class RobustEvaler(object):
def __init__(self, option):
self.settings = option
self.set_test_loader()
self.set_gen_loader() # 设置全精度模型生成器的伪数据集
def set(self,quant,quant_type=None,num_bits=None,e_bits=None):
# 调整option非公共部分及相关结构
self.settings.set(quant,quant_type,num_bits,e_bits)
self.set_model()
def set_test_loader(self):
dataloader = DataLoader(self.settings.dataset,self.settings.batchSize)
_,_,self.test_loader = dataloader.getloader()
def set_model(self):
self.model = Model(self.settings.model,self.settings.dataset).cuda()
if self.settings.quant:
self.model.quantize(self.settings.quant_type,self.settings.num_bits,self.settings.e_bits)
self.model.load_state_dict(torch.load(self.settings.model_file))
self.model.eval()
# 这里直接加载gen_one训练的完整模型,避免结构不同。注意:latent_dim需要根据加载的文件进行修改
def set_gen_loader(self):
self.generator = torch.load(self.settings.gen_file)
self.settings.latent_dim = self.generator.settings.latent_dim
self.gen_loader = build_gen_loader(generator=self.generator,
batchSize=self.settings.batchSize,
iters=self.settings.gen_iters,
latent_dim=self.settings.latent_dim,
nClasses=self.settings.nClasses)
def run(self):
start_time = time.time()
# org_acc, rob_acc = self.eval_robust()
print('Evaluate Orgin')
org_acc = test_robust(self.model, attack_type=None, c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
org_gen_acc = test_robust(self.model, attack_type=None, c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.gen_loader)
print('Evaluate FGSM:')
fgsm_acc = test_robust(self.model, attack_type='fgsm', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
fgsm_gen_acc = test_robust(self.model, attack_type='fgsm', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.gen_loader)
print('Evaluate PGD:')
pgd_acc = test_robust(self.model, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.test_loader)
pgd_gen_acc = test_robust(self.model, attack_type='pgd', c=self.settings.eps, num_classes=self.settings.nClasses,
testloader=self.gen_loader)
# 耗时太长,舍弃
# print('Evaluate CW:')
# test_robust(self.model, attack_type='cw', c=self.settings.coeff, num_classes=self.settings.nClasses,
# testloader=self.test_loader, loss_fn=nn.CrossEntropyLoss(), req_count=10000)
# 不兼容python3.6,且量化模型存在零梯度点
# print('Evaluate AA:')
# aa_acc = test_autoattack(self.model, self.test_loader, norm='Linf', eps=self.settings.eps,
# version='standard', verbose=False)
# aa_gen_acc = test_autoattack(self.model, self.gen_loader, norm='Linf', eps=self.settings.eps,
# version='standard', verbose=False)
time_interval = time.time()-start_time
start_time = time.time()
print('=== Time:%.2fs'%time_interval)
return org_acc, org_gen_acc, fgsm_acc, fgsm_gen_acc, pgd_acc, pgd_gen_acc
# 比较全精度模型和量化模型对边界扰动的耐受程度
# 度量指标:以边界样本分类为基准,考虑加上噪声后结果的改变
def main():
#及时打印信息
sys.stdout = open(sys.stdout.fileno(), mode='w', buffering=1)
torch.backends.cudnn.enabled = True
torch.backends.cudnn.benchmark = True
parser = argparse.ArgumentParser(description='Robust Arg')
parser.add_argument('--model', type=str)
parser.add_argument('--dataset',type=str)
parser.add_argument('--multi_label_num', type=int, default=2)
# 是否使用调整后的生成器
parser.add_argument('--adjust', action='store_true')
args = parser.parse_args()
print(args)
option = RobustOption(args)
robust_dir = 'robust_result/'+option.dataset+'/'
os.makedirs(robust_dir, exist_ok=True)
if option.adjust:
txt_path = robust_dir+option.model+'_adjust.txt'
excel_path = robust_dir+option.model+'_adjust.xlsx'
else:
txt_path = robust_dir+option.model+'.txt'
excel_path = robust_dir+option.model+'.xlsx'
workbook = openpyxl.Workbook()
worksheet = workbook.active
ft = open(txt_path,'w')
ft.write(option.model+' '+option.dataset+'\n')
ft.write('Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time\n')
ft.flush()
print('>'*20 + 'Robustness: '+option.model+' '+option.dataset+'<'*20)
robust_title_list = []
org_acc_list = []
org_gen_acc_list = []
fgsm_acc_list = []
fgsm_gen_acc_list = []
pgd_acc_list = []
pgd_gen_acc_list = []
print('>> Full')
RobustEval = RobustEvaler(option)
RobustEval.set(False)
start_time = time.time()
org_acc, org_gen_acc, fgsm_acc, fgsm_gen_acc, pgd_acc, pgd_gen_acc = RobustEval.run()
time_interval = time.time()-start_time
ft.write('Full %f %f %f %f %f %f %.2fs\n'%(org_acc, org_gen_acc, fgsm_acc, fgsm_gen_acc, pgd_acc, pgd_gen_acc, time_interval))
ft.flush()
robust_title_list.append('Full')
org_acc_list.append(org_acc)
org_gen_acc_list.append(org_gen_acc)
fgsm_acc_list.append(fgsm_acc)
fgsm_gen_acc_list.append(fgsm_gen_acc)
pgd_acc_list.append(pgd_acc)
pgd_gen_acc_list.append(pgd_gen_acc)
gol._init()
quant_type_list = ['INT','POT','FLOAT']
for quant_type in quant_type_list:
num_bit_list = numbit_list(quant_type)
if quant_type != 'INT':
bias_list = build_bias_list(quant_type)
gol.set_value(bias_list, is_bias=True)
for num_bits in num_bit_list:
e_bit_list = ebit_list(quant_type,num_bits)
for e_bits in e_bit_list:
if quant_type == 'FLOAT':
title = '%s_%d_E%d' % (quant_type, num_bits, e_bits)
else:
title = '%s_%d' % (quant_type, num_bits)
# 设置量化表
if quant_type != 'INT':
plist = build_list(quant_type, num_bits, e_bits)
gol.set_value(plist)
print('>> '+title)
RobustEval.set(True,quant_type,num_bits,e_bits)
start_time = time.time()
org_acc, org_gen_acc, fgsm_acc, fgsm_gen_acc, pgd_acc, pgd_gen_acc = RobustEval.run()
time_interval = time.time()-start_time
ft.write(title+' %f %f %f %f %f %f %.2fs\n'%(org_acc, org_gen_acc, fgsm_acc, fgsm_gen_acc, pgd_acc, pgd_gen_acc, time_interval))
ft.flush()
robust_title_list.append(title)
org_acc_list.append(org_acc)
org_gen_acc_list.append(org_gen_acc)
fgsm_acc_list.append(fgsm_acc)
fgsm_gen_acc_list.append(fgsm_gen_acc)
pgd_acc_list.append(pgd_acc)
pgd_gen_acc_list.append(pgd_gen_acc)
worksheet.cell(row=1,column=1,value='Title')
worksheet.cell(row=1,column=2,value='Org_acc')
worksheet.cell(row=1,column=3,value='Org_gen_acc')
worksheet.cell(row=1,column=4,value='FGSM_acc')
worksheet.cell(row=1,column=5,value='FGSM_gen_acc')
worksheet.cell(row=1,column=6,value='PGD_acc')
worksheet.cell(row=1,column=7,value='PGD_gen_acc')
for i in range(len(robust_title_list)):
worksheet.cell(row=i+3,column=1,value=robust_title_list[i])
worksheet.cell(row=i+3,column=2,value=org_acc_list[i])
worksheet.cell(row=i+3,column=3,value=org_gen_acc_list[i])
worksheet.cell(row=i+3,column=4,value=fgsm_acc_list[i])
worksheet.cell(row=i+3,column=5,value=fgsm_gen_acc_list[i])
worksheet.cell(row=i+3,column=6,value=pgd_acc_list[i])
worksheet.cell(row=i+3,column=7,value=pgd_gen_acc_list[i])
workbook.save(excel_path)
ft.close()
if __name__ == '__main__':
main()
#!/bin/bash
#- Job parameters
# (TODO)
# Please modify job name
#- Resources
# (TODO)
# Please modify your requirements
#SBATCH -p nv-gpu # Submit to 'nv-gpu' Partitiion
#SBATCH -t 1-06:00:00 # Run for a maximum time of 0 days, 12 hours, 00 mins, 00 secs
#SBATCH --nodes=1 # Request N nodes
#SBATCH --gres=gpu:1 # Request M GPU per node
#SBATCH --gres-flags=enforce-binding # CPU-GPU Affinity
#SBATCH --qos=gpu-normal # Request QOS Type
###
### The system will alloc 8 or 16 cores per gpu by default.
### If you need more or less, use following:
### #SBATCH --cpus-per-task=K # Request K cores
###
###
### Without specifying the constraint, any available nodes that meet the requirement will be allocated
### You can specify the characteristics of the compute nodes, and even the names of the compute nodes
###
### #SBATCH --nodelist=gpu-v00 # Request a specific list of hosts
### #SBATCH --constraint="Volta|RTX8000" # Request GPU Type: Volta(V100 or V100S) or RTX8000
###
# set constraint for RTX8000 to meet my cuda
#SBATCH --constraint="Ampere"
#- Log information
echo "Job start at $(date "+%Y-%m-%d %H:%M:%S")"
echo "Job run at:"
echo "$(hostnamectl)"
#- Load environments
source /tools/module_env.sh
module list # list modules loaded
##- Tools
module load cluster-tools/v1.0
module load slurm-tools/v1.0
module load cmake/3.15.7
module load git/2.17.1
module load vim/8.1.2424
##- language
module load python3/3.8.16
##- CUDA
# module load cuda-cudnn/10.2-7.6.5
# module load cuda-cudnn/11.2-8.2.1
module load cuda-cudnn/11.1-8.2.1
##- virtualenv
# source xxxxx/activate
echo $(module list) # list modules loaded
echo $(which gcc)
echo $(which python)
echo $(which python3)
cluster-quota # nas quota
nvidia-smi --format=csv --query-gpu=name,driver_version,power.limit # gpu info
#- Warning! Please not change your CUDA_VISIBLE_DEVICES
#- in `.bashrc`, `env.sh`, or your job script
echo "Use GPU ${CUDA_VISIBLE_DEVICES}" # which gpus
#- The CUDA_VISIBLE_DEVICES variable is assigned and specified by SLURM
#- Job step
# [EDIT HERE(TODO)]
if [ $Dataset = 'cifar10' ]; then
Label=2
elif [ $Dataset = 'cifar100' ]; then
Label=10
else
echo "Invalid Dataset $Dataset"
exit
fi
python robust_one.py --model $Model --dataset $Dataset --multi_label_num $Label --adjust
python robust_one.py --model $Model --dataset $Dataset --multi_label_num $Label
#- End
echo "Job end at $(date "+%Y-%m-%d %H:%M:%S")"
AlexNet cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 88.640000 99.726562 45.340000 60.039062 19.860000 34.625000 12.48s
INT_2 10.000000 10.101562 10.000000 10.101562 10.000000 10.101562 27.51s
INT_3 10.120000 12.867188 10.000000 9.859375 10.000000 9.859375 28.69s
INT_4 55.640000 61.656250 13.900000 24.125000 11.280000 18.562500 24.35s
INT_5 82.880000 97.656250 37.790000 47.492188 29.120000 39.851562 29.08s
INT_6 87.300000 99.601562 44.420000 57.523438 30.590000 44.406250 28.29s
INT_7 88.230000 99.687500 44.040000 64.289062 25.940000 45.078125 26.63s
INT_8 88.450000 99.750000 45.230000 60.523438 23.230000 38.359375 29.04s
INT_9 88.630000 99.703125 45.200000 60.445312 21.140000 35.984375 26.95s
INT_10 88.700000 99.726562 45.230000 59.882812 20.510000 35.289062 26.87s
INT_11 88.660000 99.726562 45.250000 60.046875 20.100000 34.851562 25.40s
INT_12 88.660000 99.734375 45.260000 60.109375 19.830000 34.804688 26.59s
INT_13 88.630000 99.726562 45.250000 60.039062 19.860000 34.640625 28.14s
INT_14 88.630000 99.726562 45.230000 59.992188 19.820000 34.687500 28.48s
INT_15 88.620000 99.726562 45.390000 60.000000 19.920000 34.617188 26.30s
INT_16 88.630000 99.726562 45.350000 60.039062 19.840000 34.679688 26.49s
POT_2 10.000000 10.101562 10.000000 10.101562 10.000000 10.101562 37.71s
POT_3 16.660000 22.515625 10.010000 9.859375 10.000000 9.859375 38.29s
POT_4 69.810000 93.085938 28.190000 31.140625 17.220000 22.968750 44.90s
POT_5 69.760000 94.015625 27.550000 30.828125 15.820000 22.289062 53.51s
POT_6 70.680000 93.789062 27.980000 29.921875 15.690000 22.023438 74.98s
POT_7 70.320000 94.070312 28.060000 30.867188 15.880000 22.164062 118.00s
POT_8 69.810000 93.976562 27.690000 30.664062 15.670000 22.210938 201.64s
FLOAT_3_E1 23.220000 27.843750 14.160000 22.140625 11.230000 17.406250 38.73s
FLOAT_4_E1 68.120000 91.484375 23.850000 34.398438 17.930000 27.429688 44.20s
FLOAT_4_E2 66.360000 85.289062 24.400000 34.234375 20.050000 31.000000 43.02s
FLOAT_5_E1 79.430000 98.726562 29.100000 47.679688 18.930000 33.992188 54.85s
FLOAT_5_E2 84.020000 97.804688 37.570000 55.046875 24.200000 39.953125 54.37s
FLOAT_5_E3 85.030000 99.250000 39.370000 55.171875 19.650000 32.359375 54.10s
FLOAT_6_E1 83.350000 98.812500 32.690000 61.523438 18.640000 41.695312 75.39s
FLOAT_6_E2 87.150000 99.632812 40.570000 50.257812 22.020000 32.414062 75.66s
FLOAT_6_E3 87.980000 99.648438 44.120000 67.335938 20.330000 39.570312 75.61s
FLOAT_6_E4 85.250000 99.250000 39.030000 53.781250 19.680000 31.476562 75.94s
FLOAT_7_E1 85.030000 99.195312 35.290000 68.187500 17.640000 45.750000 118.53s
FLOAT_7_E2 87.490000 99.750000 41.860000 48.734375 20.750000 28.445312 118.74s
FLOAT_7_E3 88.610000 99.765625 44.980000 59.601562 19.960000 35.289062 118.59s
FLOAT_7_E4 88.090000 99.671875 43.810000 67.312500 20.060000 39.593750 118.47s
FLOAT_7_E5 85.270000 99.296875 39.270000 53.632812 19.510000 31.132812 118.94s
FLOAT_8_E1 86.080000 99.320312 36.530000 69.195312 17.620000 45.312500 202.51s
FLOAT_8_E2 87.870000 99.640625 41.830000 47.335938 19.320000 26.867188 202.51s
FLOAT_8_E3 88.730000 99.757812 44.970000 60.351562 19.860000 35.195312 202.19s
FLOAT_8_E4 88.630000 99.781250 44.840000 59.601562 19.950000 34.890625 202.12s
FLOAT_8_E5 88.150000 99.648438 44.280000 67.242188 20.160000 39.281250 202.64s
FLOAT_8_E6 85.200000 99.281250 38.980000 53.492188 19.830000 30.859375 202.72s
AlexNet_BN cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 90.130000 99.843750 48.620000 64.164062 19.220000 23.390625 14.21s
INT_2 10.000000 9.796875 10.000000 9.796875 10.000000 9.796875 28.30s
INT_3 14.040000 16.929688 10.000000 9.796875 10.000000 9.796875 31.25s
INT_4 48.760000 62.328125 15.280000 17.804688 13.540000 15.468750 35.92s
INT_5 76.310000 96.093750 23.750000 32.851562 16.270000 23.593750 28.12s
INT_6 88.910000 99.375000 43.970000 65.976562 24.860000 43.390625 31.49s
INT_7 89.790000 99.875000 47.430000 65.625000 24.060000 32.945312 33.04s
INT_8 90.240000 99.828125 47.930000 65.617188 21.380000 28.531250 34.08s
INT_9 90.070000 99.835938 48.230000 64.531250 20.100000 24.960938 31.51s
INT_10 90.100000 99.843750 48.380000 64.101562 19.560000 24.078125 31.64s
INT_11 90.120000 99.843750 48.360000 64.320312 19.390000 23.601562 30.91s
INT_12 90.160000 99.843750 48.490000 63.929688 19.300000 23.343750 33.59s
INT_13 90.140000 99.843750 48.490000 64.062500 19.200000 23.414062 37.41s
INT_14 90.130000 99.843750 48.550000 64.062500 19.200000 23.320312 34.77s
INT_15 90.140000 99.843750 48.650000 64.125000 19.310000 23.312500 34.93s
INT_16 90.130000 99.843750 48.610000 64.179688 19.170000 23.242188 27.59s
POT_2 10.000000 9.796875 10.000000 9.796875 10.000000 9.796875 40.82s
POT_3 17.480000 21.945312 10.930000 17.320312 10.470000 16.445312 39.56s
POT_4 74.750000 94.773438 28.570000 30.968750 16.050000 20.554688 46.32s
POT_5 74.280000 95.609375 30.290000 34.664062 16.780000 23.367188 56.10s
POT_6 74.770000 94.875000 30.550000 32.242188 17.040000 21.546875 77.70s
POT_7 74.510000 94.929688 30.430000 32.617188 16.910000 21.593750 120.50s
POT_8 74.440000 94.882812 30.590000 32.570312 17.060000 21.695312 203.68s
FLOAT_3_E1 23.080000 29.265625 10.980000 12.031250 10.450000 9.218750 40.04s
FLOAT_4_E1 68.200000 85.187500 20.790000 39.726562 14.550000 26.523438 46.18s
FLOAT_4_E2 72.840000 75.398438 24.180000 25.703125 18.250000 14.500000 46.47s
FLOAT_5_E1 82.640000 97.960938 33.660000 37.171875 19.340000 21.437500 56.24s
FLOAT_5_E2 85.490000 97.875000 39.450000 52.281250 22.870000 36.187500 56.42s
FLOAT_5_E3 87.690000 99.500000 44.720000 39.648438 22.760000 8.898438 55.78s
FLOAT_6_E1 85.790000 98.984375 40.120000 41.929688 20.980000 20.867188 77.81s
FLOAT_6_E2 87.740000 99.203125 42.470000 58.515625 21.300000 29.851562 77.15s
FLOAT_6_E3 89.570000 99.804688 48.220000 62.257812 21.390000 31.390625 77.14s
FLOAT_6_E4 87.980000 99.492188 45.690000 39.554688 22.790000 9.023438 77.11s
FLOAT_7_E1 86.840000 99.039062 41.350000 42.812500 19.330000 18.835938 120.79s
FLOAT_7_E2 88.630000 99.546875 44.440000 60.148438 21.050000 27.734375 120.65s
FLOAT_7_E3 89.950000 99.812500 48.060000 64.007812 19.780000 23.828125 120.79s
FLOAT_7_E4 89.280000 99.773438 48.030000 62.218750 21.390000 30.929688 120.54s
FLOAT_7_E5 87.790000 99.562500 45.490000 39.453125 22.620000 9.007812 120.39s
FLOAT_8_E1 86.990000 99.125000 42.250000 44.859375 19.130000 18.960938 203.77s
FLOAT_8_E2 88.850000 99.679688 44.820000 61.281250 20.120000 26.546875 203.59s
FLOAT_8_E3 90.120000 99.835938 48.090000 64.414062 19.210000 24.250000 203.75s
FLOAT_8_E4 89.810000 99.804688 48.030000 63.921875 19.600000 23.882812 203.55s
FLOAT_8_E5 89.560000 99.804688 48.230000 62.453125 21.530000 31.429688 203.70s
FLOAT_8_E6 87.870000 99.515625 45.530000 39.664062 22.880000 9.023438 204.57s
AlexNet_BN cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 90.130000 98.140625 48.620000 53.664062 19.240000 21.531250 14.11s
INT_2 10.000000 9.859375 10.000000 9.859375 10.000000 9.859375 35.01s
INT_3 14.040000 14.937500 10.000000 9.859375 10.000000 9.859375 32.19s
INT_4 48.760000 50.593750 15.270000 15.078125 13.680000 11.140625 33.45s
INT_5 76.310000 89.203125 23.770000 24.539062 16.410000 15.882812 35.46s
INT_6 88.910000 97.398438 44.340000 59.687500 25.030000 34.093750 34.29s
INT_7 89.790000 98.390625 47.460000 57.789062 24.340000 28.945312 34.86s
INT_8 90.240000 98.226562 48.240000 55.875000 21.340000 24.593750 28.40s
INT_9 90.070000 98.210938 48.250000 54.171875 20.160000 22.367188 35.74s
INT_10 90.100000 98.148438 48.370000 53.789062 19.640000 22.046875 27.08s
INT_11 90.120000 98.140625 48.360000 53.882812 19.380000 21.640625 28.42s
INT_12 90.160000 98.156250 48.430000 53.468750 19.290000 21.445312 35.95s
INT_13 90.140000 98.164062 48.510000 53.726562 19.230000 21.523438 33.68s
INT_14 90.130000 98.156250 48.610000 53.640625 19.320000 21.492188 35.24s
INT_15 90.140000 98.156250 48.630000 53.695312 19.190000 21.484375 32.02s
INT_16 90.130000 98.148438 48.590000 53.648438 19.190000 21.468750 30.39s
POT_2 10.000000 9.859375 10.000000 9.859375 10.000000 9.859375 36.74s
POT_3 17.480000 18.789062 10.110000 11.898438 10.070000 9.265625 39.12s
POT_4 74.750000 82.226562 28.580000 19.375000 16.420000 4.570312 44.06s
POT_5 74.280000 82.382812 30.280000 20.734375 16.600000 3.804688 54.91s
POT_6 74.770000 81.304688 30.540000 19.132812 16.790000 3.882812 75.97s
POT_7 74.510000 81.250000 30.410000 19.304688 17.110000 4.023438 120.26s
POT_8 74.440000 81.210938 30.580000 19.148438 16.620000 4.062500 202.14s
FLOAT_3_E1 23.080000 30.210938 11.580000 12.078125 10.740000 10.859375 38.64s
FLOAT_4_E1 68.200000 79.320312 20.760000 27.664062 14.320000 22.875000 43.86s
FLOAT_4_E2 72.840000 62.210938 24.170000 24.750000 18.080000 20.062500 44.07s
FLOAT_5_E1 82.640000 93.945312 34.060000 25.593750 19.360000 8.656250 54.97s
FLOAT_5_E2 85.490000 93.062500 39.460000 45.640625 22.900000 32.156250 55.10s
FLOAT_5_E3 87.690000 97.679688 44.720000 52.140625 22.650000 33.414062 56.69s
FLOAT_6_E1 85.790000 97.007812 40.050000 23.304688 20.800000 2.242188 78.21s
FLOAT_6_E2 87.740000 96.273438 42.320000 46.750000 21.030000 23.171875 78.11s
FLOAT_6_E3 89.570000 98.117188 48.210000 54.781250 21.460000 33.867188 77.11s
FLOAT_6_E4 87.980000 97.796875 45.490000 51.765625 22.620000 33.242188 77.57s
FLOAT_7_E1 86.840000 97.500000 40.980000 23.367188 19.280000 2.437500 120.82s
FLOAT_7_E2 88.630000 97.476562 44.790000 49.515625 21.340000 21.687500 120.77s
FLOAT_7_E3 89.950000 98.000000 47.930000 54.250000 19.720000 23.625000 120.26s
FLOAT_7_E4 89.280000 98.210938 48.330000 54.109375 21.410000 33.726562 121.09s
FLOAT_7_E5 87.790000 97.656250 44.930000 50.390625 22.760000 32.015625 120.11s
FLOAT_8_E1 86.990000 97.312500 42.130000 24.601562 18.890000 2.265625 204.38s
FLOAT_8_E2 88.850000 97.835938 44.780000 50.250000 20.240000 21.273438 203.89s
FLOAT_8_E3 90.120000 97.992188 47.990000 53.593750 19.320000 21.429688 203.68s
FLOAT_8_E4 89.810000 97.984375 48.180000 54.085938 19.630000 23.789062 203.57s
FLOAT_8_E5 89.560000 98.187500 48.020000 54.031250 21.520000 33.882812 203.36s
FLOAT_8_E6 87.870000 97.664062 45.340000 51.070312 22.850000 32.656250 203.67s
AlexNet cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 88.640000 98.367188 45.350000 44.460938 19.840000 18.523438 12.74s
INT_2 10.000000 9.664062 10.000000 9.664062 10.000000 9.664062 25.70s
INT_3 10.120000 11.695312 10.000000 9.734375 10.000000 9.734375 26.12s
INT_4 55.630000 32.539062 14.140000 15.945312 11.180000 12.492188 26.08s
INT_5 82.870000 91.015625 38.720000 39.273438 29.440000 32.289062 27.52s
INT_6 87.290000 96.789062 43.940000 42.273438 30.270000 30.421875 26.53s
INT_7 88.210000 98.171875 44.080000 45.671875 25.950000 25.851562 27.96s
INT_8 88.450000 98.320312 45.320000 45.484375 23.240000 21.710938 26.62s
INT_9 88.620000 98.382812 45.110000 44.523438 21.210000 19.460938 28.96s
INT_10 88.700000 98.382812 45.330000 44.515625 20.510000 19.023438 27.46s
INT_11 88.660000 98.351562 45.370000 44.546875 20.060000 18.617188 28.68s
INT_12 88.650000 98.367188 45.280000 44.601562 19.970000 18.515625 28.58s
INT_13 88.610000 98.359375 45.300000 44.445312 19.930000 18.539062 28.37s
INT_14 88.630000 98.367188 45.280000 44.390625 19.850000 18.515625 28.09s
INT_15 88.620000 98.375000 45.340000 44.437500 19.920000 18.484375 27.94s
INT_16 88.640000 98.359375 45.360000 44.453125 19.860000 18.500000 29.81s
POT_2 10.000000 9.664062 10.000000 9.664062 10.000000 9.664062 38.80s
POT_3 16.660000 22.265625 10.000000 9.734375 10.000000 9.734375 39.44s
POT_4 69.820000 80.359375 28.350000 21.015625 17.140000 16.023438 44.81s
POT_5 69.760000 81.335938 27.910000 20.648438 15.550000 15.398438 54.06s
POT_6 70.700000 82.140625 27.860000 20.617188 15.480000 15.296875 75.89s
POT_7 70.330000 81.703125 27.980000 20.531250 15.410000 15.296875 118.40s
POT_8 69.780000 81.601562 27.390000 20.640625 15.190000 15.539062 202.54s
FLOAT_3_E1 23.290000 23.593750 14.870000 17.546875 12.380000 13.703125 38.63s
FLOAT_4_E1 68.090000 78.453125 23.870000 25.351562 17.530000 19.164062 45.01s
FLOAT_4_E2 66.380000 64.734375 24.690000 28.187500 20.490000 24.898438 44.96s
FLOAT_5_E1 79.440000 91.828125 28.910000 34.781250 18.920000 23.375000 54.94s
FLOAT_5_E2 84.040000 90.421875 38.090000 39.320312 23.950000 24.382812 54.86s
FLOAT_5_E3 85.010000 96.132812 39.170000 42.804688 19.610000 21.914062 54.40s
FLOAT_6_E1 83.300000 92.437500 33.340000 48.632812 18.750000 28.976562 76.28s
FLOAT_6_E2 87.160000 95.906250 40.520000 36.976562 21.800000 19.195312 76.35s
FLOAT_6_E3 88.000000 98.132812 44.240000 46.445312 20.020000 18.070312 75.01s
FLOAT_6_E4 85.260000 96.414062 39.190000 42.765625 19.890000 21.921875 76.13s
FLOAT_7_E1 85.010000 94.062500 35.230000 57.640625 17.780000 35.171875 118.51s
FLOAT_7_E2 87.530000 96.648438 41.790000 33.414062 20.990000 15.617188 119.42s
FLOAT_7_E3 88.620000 98.375000 44.910000 45.765625 19.990000 19.265625 118.86s
FLOAT_7_E4 88.100000 98.046875 44.130000 46.031250 19.990000 18.000000 118.58s
FLOAT_7_E5 85.250000 96.125000 39.210000 42.187500 19.480000 21.625000 119.04s
FLOAT_8_E1 86.050000 95.023438 36.430000 57.023438 17.670000 33.640625 202.68s
FLOAT_8_E2 87.840000 96.796875 42.310000 31.882812 19.590000 14.875000 202.56s
FLOAT_8_E3 88.720000 98.507812 44.790000 45.359375 19.850000 18.406250 203.11s
FLOAT_8_E4 88.620000 98.343750 44.770000 45.515625 20.090000 19.156250 203.37s
FLOAT_8_E5 88.150000 98.187500 44.180000 45.843750 20.120000 18.125000 203.58s
FLOAT_8_E6 85.170000 96.140625 38.670000 42.554688 19.550000 21.828125 203.49s
Inception_BN cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 94.830000 99.976562 42.840000 61.320312 7.240000 4.601562 158.12s
INT_2 10.000000 10.039062 10.000000 10.039062 10.000000 10.039062 289.91s
INT_3 10.680000 11.445312 10.090000 9.281250 10.320000 9.921875 286.58s
INT_4 29.840000 34.031250 13.340000 25.953125 11.530000 23.289062 309.70s
INT_5 80.350000 87.601562 31.260000 43.593750 16.150000 26.859375 310.38s
INT_6 93.480000 99.765625 41.320000 58.539062 12.690000 23.078125 314.44s
INT_7 94.540000 99.976562 42.930000 61.117188 9.610000 11.625000 313.02s
INT_8 94.830000 99.976562 43.680000 61.593750 8.100000 6.328125 311.08s
INT_9 94.840000 99.960938 43.210000 60.367188 7.430000 4.828125 309.57s
INT_10 94.790000 99.968750 42.950000 60.304688 7.220000 4.656250 305.39s
INT_11 94.840000 99.968750 42.830000 60.445312 7.210000 4.523438 308.33s
INT_12 94.850000 99.968750 42.900000 60.382812 7.230000 4.507812 313.92s
INT_13 94.820000 99.968750 42.740000 60.468750 7.250000 4.515625 292.75s
INT_14 94.830000 99.968750 42.790000 60.414062 7.060000 4.531250 309.78s
INT_15 94.810000 99.968750 42.850000 60.398438 7.250000 4.523438 309.29s
INT_16 94.830000 99.968750 42.800000 60.445312 7.200000 4.492188 307.34s
POT_2 10.000000 10.039062 10.000000 10.039062 10.000000 10.039062 449.54s
POT_3 11.500000 8.343750 9.780000 9.632812 9.630000 9.210938 502.14s
POT_4 38.810000 24.929688 14.590000 15.445312 10.510000 13.671875 639.36s
POT_5 40.190000 39.179688 16.720000 26.164062 13.180000 21.125000 909.17s
POT_6 23.960000 24.367188 13.880000 16.445312 9.070000 13.445312 1425.79s
POT_7 38.480000 48.031250 18.230000 26.125000 10.690000 18.312500 2487.01s
POT_8 40.730000 42.921875 19.370000 23.312500 12.090000 14.000000 4562.34s
FLOAT_3_E1 10.000000 9.890625 9.910000 9.218750 10.140000 9.398438 503.88s
FLOAT_4_E1 19.150000 30.679688 20.670000 15.750000 14.380000 12.265625 640.31s
FLOAT_4_E2 54.380000 44.984375 16.450000 25.640625 10.530000 16.679688 640.48s
FLOAT_5_E1 60.290000 76.406250 28.360000 38.593750 15.290000 22.578125 906.06s
FLOAT_5_E2 85.500000 98.218750 37.320000 39.484375 17.170000 17.351562 906.78s
FLOAT_5_E3 86.480000 99.593750 40.220000 59.359375 13.480000 26.515625 907.52s
FLOAT_6_E1 76.320000 95.289062 30.400000 43.734375 12.730000 19.054688 1420.68s
FLOAT_6_E2 92.260000 99.859375 40.440000 46.671875 14.370000 13.015625 1424.48s
FLOAT_6_E3 93.400000 99.929688 43.200000 62.179688 11.020000 16.570312 1426.48s
FLOAT_6_E4 86.480000 99.500000 38.810000 51.578125 14.470000 22.687500 1422.52s
FLOAT_7_E1 78.170000 97.359375 31.890000 42.812500 11.760000 14.968750 2480.58s
FLOAT_7_E2 93.010000 99.914062 42.120000 46.851562 13.230000 9.828125 2480.65s
FLOAT_7_E3 94.470000 99.976562 43.230000 57.070312 8.860000 6.117188 2485.42s
FLOAT_7_E4 93.430000 99.953125 42.890000 63.476562 11.410000 17.773438 2486.79s
FLOAT_7_E5 87.980000 98.765625 38.120000 51.718750 14.820000 25.601562 2484.98s
Inception_BN cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 94.830000 88.007812 42.810000 46.429688 7.230000 5.210938 157.45s
INT_2 10.000000 9.539062 10.000000 9.539062 10.000000 9.539062 312.83s
INT_3 10.750000 7.906250 9.430000 10.664062 9.670000 9.742188 273.20s
INT_4 29.690000 28.929688 14.540000 19.476562 10.040000 15.421875 327.39s
INT_5 80.670000 59.156250 30.230000 29.742188 16.490000 18.468750 326.41s
INT_6 93.540000 79.750000 42.680000 40.429688 13.360000 15.554688 313.06s
INT_7 94.510000 86.109375 42.520000 45.703125 9.560000 10.015625 321.22s
INT_8 94.860000 86.617188 43.170000 45.945312 8.010000 5.648438 317.96s
INT_9 94.810000 85.437500 43.200000 45.750000 7.580000 5.710938 326.22s
INT_10 94.840000 85.554688 42.920000 45.273438 7.240000 5.296875 308.54s
INT_11 94.840000 85.335938 42.870000 45.500000 7.360000 5.328125 308.32s
INT_12 94.850000 85.539062 42.750000 45.359375 7.230000 5.234375 319.09s
INT_13 94.820000 85.296875 42.870000 45.484375 7.120000 5.171875 313.66s
INT_14 94.840000 85.359375 42.820000 45.460938 7.210000 5.164062 308.37s
INT_15 94.810000 85.585938 42.770000 45.515625 7.200000 5.226562 309.83s
INT_16 94.830000 85.632812 42.780000 45.523438 7.270000 5.210938 312.72s
POT_2 10.000000 9.539062 10.000000 9.539062 10.000000 9.539062 442.95s
POT_3 11.540000 10.664062 10.350000 9.187500 10.290000 9.710938 500.26s
POT_4 38.340000 21.226562 26.370000 22.453125 16.150000 11.648438 639.03s
POT_5 39.980000 34.382812 17.060000 16.164062 11.520000 11.773438 908.58s
POT_6 24.030000 15.484375 13.480000 11.171875 9.170000 10.406250 1426.20s
POT_7 38.250000 40.664062 22.170000 20.437500 15.010000 9.546875 2483.35s
POT_8 41.360000 32.523438 23.510000 20.164062 14.980000 12.617188 4567.44s
FLOAT_3_E1 10.030000 9.968750 9.950000 9.984375 10.060000 10.132812 503.42s
FLOAT_4_E1 17.620000 28.054688 19.260000 16.750000 14.870000 11.382812 639.87s
FLOAT_4_E2 54.990000 26.539062 16.470000 19.804688 10.100000 9.843750 640.43s
FLOAT_5_E1 59.920000 48.804688 28.060000 29.125000 14.400000 19.906250 905.43s
FLOAT_5_E2 85.410000 71.265625 33.910000 26.796875 16.600000 12.984375 910.60s
FLOAT_5_E3 86.580000 72.437500 37.810000 41.031250 13.790000 18.593750 903.49s
FLOAT_6_E1 76.300000 58.875000 30.310000 32.015625 12.410000 15.710938 1420.05s
FLOAT_6_E2 92.190000 88.070312 41.010000 36.132812 14.840000 9.726562 1419.32s
FLOAT_6_E3 93.290000 86.539062 42.810000 42.671875 11.900000 12.796875 1422.39s
FLOAT_6_E4 86.740000 78.867188 39.560000 44.601562 14.680000 18.531250 1421.04s
FLOAT_7_E1 78.370000 58.156250 30.890000 33.570312 11.620000 14.000000 2480.21s
FLOAT_7_E2 93.030000 88.695312 42.090000 39.156250 13.060000 9.492188 2483.90s
FLOAT_7_E3 94.450000 88.250000 43.370000 43.000000 9.310000 6.257812 2482.01s
FLOAT_7_E4 93.570000 85.632812 43.480000 43.718750 11.540000 13.203125 2481.06s
FLOAT_7_E5 88.000000 68.929688 39.650000 41.398438 14.730000 17.562500 2482.61s
FLOAT_8_E1 82.240000 61.664062 31.650000 31.703125 10.610000 12.351562 4562.81s
FLOAT_8_E2 93.680000 86.945312 41.010000 35.921875 12.320000 8.031250 4563.76s
FLOAT_8_E3 94.760000 86.843750 42.640000 45.796875 7.780000 5.781250 4562.76s
FLOAT_8_E4 94.460000 88.773438 43.660000 41.367188 9.330000 5.921875 4575.23s
FLOAT_8_E5 93.290000 86.468750 41.740000 41.054688 11.830000 11.843750 4576.43s
FLOAT_8_E6 87.980000 77.648438 37.900000 42.898438 14.720000 18.507812 4572.51s
MobileNetV2 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 90.270000 99.960938 40.190000 76.007812 5.550000 16.015625 58.61s
INT_2 10.000000 9.453125 10.000000 9.453125 10.000000 9.453125 228.33s
INT_3 9.960000 9.453125 10.000000 9.453125 10.000000 9.453125 225.76s
INT_4 9.490000 10.398438 10.280000 9.835938 10.150000 9.960938 229.86s
INT_5 20.560000 38.156250 11.720000 18.320312 10.230000 14.906250 229.48s
INT_6 57.910000 94.695312 24.450000 39.335938 14.740000 25.492188 229.98s
INT_7 80.300000 99.617188 35.160000 64.382812 12.750000 33.750000 228.65s
INT_8 88.180000 99.898438 43.090000 76.250000 10.770000 28.632812 228.98s
INT_9 89.760000 99.937500 41.570000 74.953125 8.410000 21.726562 227.42s
INT_10 90.380000 99.968750 40.400000 75.234375 6.480000 18.093750 204.56s
INT_11 90.290000 99.945312 40.410000 76.359375 5.950000 17.062500 230.18s
INT_12 90.270000 99.960938 40.260000 75.882812 5.750000 16.234375 228.24s
INT_13 90.240000 99.960938 40.270000 76.000000 5.760000 16.195312 184.42s
INT_14 90.260000 99.960938 40.140000 75.937500 5.610000 16.101562 231.10s
INT_15 90.270000 99.960938 40.220000 76.000000 5.660000 16.085938 230.59s
INT_16 90.270000 99.960938 40.100000 76.117188 5.650000 16.085938 226.82s
POT_2 10.000000 9.453125 10.000000 9.453125 10.000000 9.453125 249.26s
POT_3 9.990000 9.343750 9.790000 9.484375 9.910000 9.453125 253.89s
POT_4 14.620000 21.656250 10.240000 14.851562 10.260000 14.046875 267.65s
POT_5 14.390000 20.992188 10.700000 12.523438 10.550000 9.789062 314.54s
POT_6 14.330000 20.406250 10.030000 11.757812 10.020000 11.039062 446.22s
POT_7 15.040000 22.085938 10.510000 15.671875 10.100000 12.734375 688.91s
POT_8 14.710000 20.492188 11.000000 12.359375 10.430000 11.156250 1190.58s
FLOAT_3_E1 9.660000 9.859375 9.960000 10.007812 9.920000 10.117188 222.21s
FLOAT_4_E1 11.660000 13.765625 10.170000 10.539062 11.100000 10.703125 269.14s
FLOAT_4_E2 13.230000 19.789062 10.430000 13.820312 10.080000 13.921875 269.10s
FLOAT_5_E1 17.160000 32.843750 11.100000 21.820312 10.590000 19.171875 320.41s
FLOAT_5_E2 26.760000 49.726562 14.300000 24.031250 11.460000 21.664062 316.64s
FLOAT_5_E3 46.320000 76.359375 28.230000 38.593750 15.810000 27.125000 325.47s
FLOAT_6_E1 28.010000 48.945312 11.350000 28.734375 10.320000 25.109375 445.64s
FLOAT_6_E2 45.820000 88.882812 21.670000 25.640625 13.820000 20.078125 445.42s
FLOAT_6_E3 69.910000 97.281250 37.480000 64.585938 13.410000 35.734375 444.29s
FLOAT_6_E4 46.590000 77.820312 27.800000 37.906250 15.840000 25.828125 444.06s
FLOAT_7_E1 33.850000 65.789062 12.760000 12.945312 9.460000 10.710938 695.39s
FLOAT_7_E2 59.400000 95.421875 21.080000 24.164062 9.980000 16.007812 675.21s
FLOAT_7_E3 85.620000 99.906250 38.860000 71.718750 11.040000 29.531250 682.41s
FLOAT_7_E4 68.900000 97.562500 36.920000 63.671875 13.480000 34.007812 676.20s
FLOAT_7_E5 47.350000 76.328125 23.630000 37.179688 13.530000 25.382812 689.76s
FLOAT_8_E1 41.420000 72.015625 11.360000 22.734375 10.010000 15.734375 1184.54s
FLOAT_8_E2 68.860000 97.851562 24.470000 32.976562 11.570000 21.460938 1189.14s
FLOAT_8_E3 89.220000 99.960938 40.040000 71.375000 8.530000 22.945312 1193.14s
FLOAT_8_E4 85.830000 99.867188 37.180000 68.492188 10.300000 27.601562 1187.11s
FLOAT_8_E5 68.490000 97.140625 36.850000 63.984375 13.690000 35.257812 1192.24s
FLOAT_8_E6 46.620000 76.835938 27.690000 38.179688 15.320000 25.703125 1191.62s
MobileNetV2 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 90.270000 75.890625 40.190000 36.140625 5.590000 3.234375 57.71s
INT_2 10.000000 9.960938 10.000000 9.960938 10.000000 9.960938 140.67s
INT_3 9.960000 9.960938 10.000000 9.960938 10.000000 9.960938 203.35s
INT_4 9.490000 8.921875 10.470000 9.171875 10.220000 9.117188 221.97s
INT_5 20.560000 21.453125 12.930000 13.609375 10.540000 13.062500 219.63s
INT_6 57.910000 59.773438 23.630000 22.429688 14.120000 12.093750 223.85s
INT_7 80.300000 70.710938 36.340000 31.156250 13.340000 12.796875 223.15s
INT_8 88.180000 72.945312 42.470000 36.648438 10.800000 7.296875 222.42s
INT_9 89.760000 76.039062 41.690000 35.085938 8.180000 4.953125 224.29s
INT_10 90.380000 76.070312 40.280000 35.804688 6.340000 3.812500 220.84s
INT_11 90.290000 75.656250 40.460000 36.070312 5.930000 3.484375 225.77s
INT_12 90.270000 75.679688 40.280000 36.039062 5.750000 3.281250 221.15s
INT_13 90.240000 75.804688 40.230000 36.039062 5.770000 3.203125 221.53s
INT_14 90.260000 75.851562 40.170000 36.195312 5.660000 3.187500 222.13s
INT_15 90.270000 75.882812 40.190000 36.039062 5.630000 3.218750 223.85s
INT_16 90.270000 75.875000 40.150000 36.078125 5.580000 3.203125 222.96s
POT_2 10.000000 9.960938 10.000000 9.960938 10.000000 9.960938 243.47s
POT_3 9.990000 9.882812 9.670000 9.281250 9.610000 9.593750 240.64s
POT_4 14.620000 12.492188 10.170000 16.406250 10.070000 15.171875 262.08s
POT_5 14.390000 11.203125 10.610000 12.218750 10.590000 10.539062 317.48s
POT_6 14.330000 12.171875 10.020000 12.226562 10.000000 10.890625 436.84s
POT_7 15.040000 14.234375 10.490000 16.687500 10.250000 14.820312 690.31s
POT_8 14.710000 12.039062 11.290000 12.656250 10.560000 11.804688 1186.04s
FLOAT_3_E1 9.660000 9.750000 10.420000 9.703125 10.340000 9.921875 238.90s
FLOAT_4_E1 11.660000 11.148438 11.340000 9.953125 10.950000 8.992188 261.89s
FLOAT_4_E2 13.230000 15.601562 11.040000 12.132812 10.780000 11.171875 264.80s
FLOAT_5_E1 17.160000 14.484375 10.780000 13.296875 10.350000 12.828125 319.40s
FLOAT_5_E2 26.760000 25.257812 14.530000 20.609375 11.370000 17.187500 322.91s
FLOAT_5_E3 46.320000 35.921875 26.910000 18.406250 14.750000 11.726562 323.57s
FLOAT_6_E1 28.010000 21.164062 11.670000 13.085938 10.670000 10.898438 441.74s
FLOAT_6_E2 45.820000 50.187500 21.360000 21.625000 13.380000 15.359375 441.60s
FLOAT_6_E3 69.910000 66.226562 37.160000 31.343750 13.500000 10.710938 419.23s
FLOAT_6_E4 46.590000 35.015625 27.670000 18.195312 15.450000 10.656250 438.30s
FLOAT_7_E1 33.850000 42.398438 12.750000 13.359375 9.600000 8.742188 672.35s
FLOAT_7_E2 59.400000 57.781250 21.050000 16.929688 10.500000 10.140625 679.77s
FLOAT_7_E3 85.620000 75.304688 39.370000 37.000000 10.910000 9.648438 679.33s
FLOAT_7_E4 68.900000 65.718750 36.710000 30.421875 13.590000 10.265625 690.20s
FLOAT_7_E5 47.350000 38.382812 24.230000 21.367188 13.760000 16.851562 668.22s
FLOAT_8_E1 41.420000 42.039062 11.290000 18.187500 9.980000 11.757812 1183.25s
FLOAT_8_E2 68.860000 58.406250 25.150000 18.640625 11.960000 9.539062 1182.87s
FLOAT_8_E3 89.220000 75.000000 39.870000 33.851562 8.730000 6.867188 1185.24s
FLOAT_8_E4 85.830000 75.945312 37.480000 36.562500 10.440000 8.906250 1187.76s
FLOAT_8_E5 68.490000 65.054688 36.680000 30.789062 13.730000 10.367188 1180.14s
FLOAT_8_E6 46.620000 36.007812 27.130000 17.242188 15.720000 10.234375 1187.43s
ResNet_152 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 94.430000 82.945312 44.280000 40.617188 10.150000 11.281250 327.10s
INT_2 10.000000 10.179688 10.000000 10.179688 10.000000 10.179688 837.44s
INT_3 10.030000 15.242188 9.990000 10.992188 9.990000 10.914062 842.54s
INT_4 15.610000 15.093750 8.960000 11.781250 9.210000 11.539062 845.09s
INT_5 30.350000 11.750000 17.610000 10.328125 13.970000 10.304688 850.06s
INT_6 74.440000 24.976562 17.420000 52.289062 7.250000 24.015625 847.64s
INT_7 90.430000 58.632812 35.830000 51.773438 10.340000 14.453125 842.06s
INT_8 94.060000 80.796875 41.780000 38.859375 11.150000 11.289062 839.31s
INT_9 94.280000 82.921875 45.410000 39.656250 11.100000 11.148438 846.84s
INT_10 94.450000 83.195312 43.900000 40.585938 10.420000 11.867188 837.70s
INT_11 94.390000 83.273438 44.180000 40.460938 10.280000 11.273438 843.87s
INT_12 94.410000 83.070312 44.240000 40.648438 10.150000 11.406250 839.62s
INT_13 94.440000 82.968750 44.300000 40.492188 10.140000 11.210938 843.67s
INT_14 94.430000 82.875000 44.460000 40.781250 10.240000 11.265625 838.38s
INT_15 94.420000 82.914062 44.310000 40.679688 10.220000 11.312500 840.17s
INT_16 94.430000 82.906250 44.390000 40.632812 10.180000 11.281250 839.31s
POT_2 10.000000 10.179688 10.000000 10.179688 10.000000 10.179688 1220.44s
POT_3 10.010000 12.671875 10.390000 14.234375 10.420000 14.554688 1389.87s
POT_4 43.550000 32.453125 17.810000 21.554688 11.080000 16.914062 1775.54s
POT_5 43.000000 24.554688 17.400000 28.750000 11.100000 17.304688 2581.30s
POT_6 40.110000 34.664062 18.850000 22.343750 12.360000 17.015625 4165.48s
POT_7 34.390000 28.687500 19.560000 18.109375 13.220000 14.476562 7427.86s
POT_8 39.190000 39.695312 18.710000 18.664062 13.120000 16.187500 13804.55s
FLOAT_3_E1 9.970000 15.734375 10.000000 9.968750 10.000000 9.968750 1384.86s
FLOAT_4_E1 18.600000 10.773438 11.530000 9.539062 9.750000 8.382812 1780.54s
FLOAT_4_E2 19.530000 23.335938 12.460000 18.726562 10.830000 17.187500 1778.50s
FLOAT_5_E1 61.040000 23.507812 16.320000 25.726562 9.940000 11.859375 2579.43s
FLOAT_5_E2 73.510000 36.351562 26.920000 42.765625 14.370000 24.898438 2585.40s
FLOAT_5_E3 84.050000 45.218750 28.630000 41.265625 13.650000 26.765625 2581.44s
FLOAT_6_E1 87.580000 69.328125 36.360000 41.992188 14.300000 7.453125 4150.79s
FLOAT_6_E2 82.830000 49.992188 27.760000 35.648438 14.260000 22.250000 4152.38s
FLOAT_6_E3 91.170000 57.562500 37.240000 44.117188 12.300000 8.039062 4164.32s
FLOAT_6_E4 85.330000 46.023438 30.710000 40.304688 13.220000 26.726562 4160.66s
FLOAT_7_E1 90.300000 83.601562 35.850000 26.359375 15.280000 8.570312 7388.60s
FLOAT_7_E2 85.720000 48.398438 27.820000 32.976562 12.420000 19.898438 7393.53s
FLOAT_7_E3 93.620000 74.273438 43.020000 44.156250 12.260000 13.476562 7409.19s
FLOAT_7_E4 91.100000 59.023438 36.440000 43.937500 11.750000 8.085938 7428.80s
ResNet_18 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 94.300000 99.984375 47.320000 82.757812 11.300000 60.851562 61.58s
INT_2 10.000000 9.882812 10.000000 9.882812 10.000000 9.882812 113.99s
INT_3 10.000000 9.625000 10.000000 9.625000 10.000000 9.625000 110.67s
INT_4 49.440000 55.828125 21.800000 34.109375 14.300000 27.742188 115.23s
INT_5 87.620000 98.531250 34.660000 52.773438 20.740000 42.156250 117.49s
INT_6 93.250000 99.945312 47.120000 82.835938 16.640000 66.960938 111.18s
INT_7 94.070000 99.953125 46.560000 81.468750 13.600000 62.968750 110.95s
INT_8 94.260000 99.976562 48.200000 83.757812 12.300000 62.914062 112.09s
INT_9 94.350000 99.976562 47.650000 83.375000 11.700000 62.328125 110.99s
INT_10 94.340000 99.976562 47.670000 82.531250 11.450000 61.117188 108.41s
INT_11 94.290000 99.976562 47.590000 82.851562 11.420000 61.273438 112.16s
INT_12 94.320000 99.984375 47.530000 82.875000 11.280000 61.054688 113.02s
INT_13 94.310000 99.984375 47.490000 82.789062 11.360000 61.078125 113.65s
INT_14 94.320000 99.976562 47.390000 82.820312 11.390000 61.085938 114.14s
INT_15 94.300000 99.976562 47.420000 82.796875 11.330000 61.125000 116.35s
INT_16 94.320000 99.976562 47.340000 82.750000 11.310000 61.164062 112.37s
POT_2 10.000000 9.882812 10.000000 9.882812 10.000000 9.882812 172.08s
POT_3 10.200000 14.468750 10.050000 13.390625 8.030000 11.812500 192.24s
POT_4 63.780000 75.414062 28.790000 18.781250 17.630000 14.562500 233.05s
POT_5 50.530000 51.570312 24.380000 10.195312 13.340000 9.984375 314.09s
POT_6 51.610000 57.195312 24.930000 10.351562 14.880000 10.078125 475.13s
POT_7 49.050000 53.460938 26.370000 11.148438 15.120000 10.148438 805.59s
POT_8 55.540000 59.414062 26.630000 10.859375 16.300000 10.265625 1438.61s
FLOAT_3_E1 12.740000 15.453125 11.000000 13.179688 11.810000 12.515625 190.74s
FLOAT_4_E1 47.470000 59.093750 22.790000 14.085938 17.650000 12.562500 232.41s
FLOAT_4_E2 52.510000 95.570312 17.470000 27.437500 10.200000 20.601562 233.23s
FLOAT_5_E1 81.840000 91.625000 36.050000 32.656250 18.880000 23.625000 315.02s
FLOAT_5_E2 85.740000 99.890625 35.820000 58.484375 14.070000 37.796875 315.16s
FLOAT_5_E3 89.480000 99.851562 44.030000 75.523438 17.910000 59.203125 315.12s
FLOAT_6_E1 87.390000 98.367188 40.690000 32.328125 16.650000 19.828125 476.72s
FLOAT_6_E2 91.650000 99.914062 42.820000 77.484375 13.280000 56.468750 476.46s
FLOAT_6_E3 93.410000 99.984375 46.900000 83.117188 14.290000 64.664062 476.15s
FLOAT_6_E4 89.530000 99.828125 44.860000 73.500000 18.060000 56.882812 476.13s
FLOAT_7_E1 90.160000 98.796875 42.160000 52.984375 15.160000 36.179688 807.53s
FLOAT_7_E2 92.610000 99.945312 45.390000 80.851562 12.460000 60.054688 807.65s
FLOAT_7_E3 94.080000 99.984375 48.150000 83.132812 13.180000 62.726562 807.36s
FLOAT_7_E4 93.120000 99.929688 47.440000 81.375000 15.060000 61.218750 807.40s
FLOAT_7_E5 89.320000 99.734375 41.960000 62.453125 18.720000 44.476562 807.31s
FLOAT_8_E1 90.340000 99.343750 41.100000 49.804688 14.850000 32.750000 1439.81s
FLOAT_8_E2 92.250000 99.953125 43.710000 72.687500 13.540000 49.375000 1439.54s
FLOAT_8_E3 94.240000 99.976562 47.240000 82.289062 11.960000 61.062500 1439.69s
FLOAT_8_E4 94.060000 99.976562 47.570000 82.531250 12.870000 62.132812 1439.74s
FLOAT_8_E5 93.310000 99.968750 46.900000 84.390625 14.510000 65.539062 1439.76s
FLOAT_8_E6 89.410000 99.781250 43.070000 67.773438 18.690000 50.945312 1440.24s
ResNet_18 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 94.300000 99.414062 47.310000 47.835938 11.270000 10.867188 59.05s
INT_2 10.000000 9.843750 10.000000 9.843750 10.000000 9.843750 105.14s
INT_3 10.000000 10.007812 10.000000 10.007812 10.000000 10.007812 105.30s
INT_4 49.440000 28.398438 20.680000 24.406250 14.080000 17.132812 105.48s
INT_5 87.620000 87.671875 34.230000 39.539062 19.800000 19.164062 105.49s
INT_6 93.250000 98.453125 47.820000 52.632812 16.790000 18.226562 105.88s
INT_7 94.070000 98.898438 46.700000 47.726562 13.450000 13.914062 128.22s
INT_8 94.260000 99.398438 48.870000 48.085938 12.440000 12.304688 109.68s
INT_9 94.350000 99.437500 47.720000 49.085938 11.800000 11.414062 126.65s
INT_10 94.340000 99.406250 47.400000 47.968750 11.470000 11.085938 130.41s
INT_11 94.290000 99.414062 47.590000 48.164062 11.430000 11.085938 124.30s
INT_12 94.320000 99.421875 47.470000 48.023438 11.370000 10.812500 105.42s
INT_13 94.310000 99.414062 47.440000 48.015625 11.390000 10.882812 118.16s
INT_14 94.320000 99.414062 47.420000 48.093750 11.190000 10.921875 119.38s
INT_15 94.300000 99.414062 47.470000 48.031250 11.450000 10.921875 126.28s
INT_16 94.320000 99.414062 47.480000 48.093750 11.290000 10.953125 126.69s
POT_2 10.000000 9.843750 10.000000 9.843750 10.000000 9.843750 169.25s
POT_3 10.200000 14.890625 10.350000 10.046875 8.400000 7.742188 190.90s
POT_4 63.780000 61.000000 29.720000 10.218750 17.810000 5.429688 231.81s
POT_5 50.530000 68.671875 26.900000 10.445312 15.230000 4.335938 311.95s
POT_6 51.610000 72.226562 27.290000 10.101562 15.910000 9.226562 472.97s
POT_7 49.050000 71.421875 28.030000 10.531250 15.920000 5.140625 802.77s
POT_8 55.540000 71.281250 28.080000 10.250000 16.380000 8.296875 1439.04s
FLOAT_3_E1 12.740000 23.578125 11.370000 13.601562 11.430000 13.273438 189.11s
FLOAT_4_E1 47.470000 51.546875 24.570000 13.429688 17.960000 10.171875 228.67s
FLOAT_4_E2 52.510000 82.953125 16.440000 11.648438 9.930000 10.578125 229.72s
FLOAT_5_E1 81.840000 71.031250 37.930000 11.773438 20.110000 5.796875 311.31s
FLOAT_5_E2 85.740000 97.453125 36.790000 35.609375 12.920000 13.664062 313.17s
FLOAT_5_E3 89.480000 95.187500 43.070000 32.476562 17.940000 19.031250 312.07s
FLOAT_6_E1 87.390000 89.695312 41.370000 11.984375 16.970000 1.296875 473.94s
FLOAT_6_E2 91.650000 99.054688 42.760000 38.984375 13.620000 13.421875 473.40s
FLOAT_6_E3 93.410000 99.046875 46.530000 33.992188 14.550000 11.796875 473.30s
FLOAT_6_E4 89.530000 95.625000 44.260000 35.390625 17.820000 20.460938 472.14s
FLOAT_7_E1 90.160000 93.593750 41.590000 15.351562 15.700000 0.828125 801.96s
FLOAT_7_E2 92.610000 99.070312 44.680000 43.726562 12.340000 14.000000 802.51s
FLOAT_7_E3 94.080000 99.437500 48.210000 45.812500 13.040000 12.179688 801.48s
FLOAT_7_E4 93.120000 99.109375 47.720000 40.773438 14.930000 12.601562 801.43s
FLOAT_7_E5 89.320000 95.203125 42.210000 39.250000 17.570000 22.953125 801.24s
FLOAT_8_E1 90.340000 94.007812 41.470000 16.093750 14.760000 0.554688 1437.55s
FLOAT_8_E2 92.250000 99.171875 43.930000 30.164062 13.510000 12.054688 1439.68s
FLOAT_8_E3 94.240000 99.398438 47.440000 45.812500 12.370000 12.070312 1438.96s
FLOAT_8_E4 94.060000 99.382812 47.390000 45.953125 13.020000 12.484375 1438.85s
FLOAT_8_E5 93.310000 99.031250 46.520000 40.296875 14.590000 12.234375 1438.88s
FLOAT_8_E6 89.410000 91.976562 42.590000 26.476562 19.140000 15.687500 1439.03s
ResNet_50 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 94.570000 99.945312 44.350000 70.015625 8.940000 13.328125 132.07s
INT_2 10.000000 9.820312 10.000000 9.820312 10.000000 9.820312 325.77s
INT_3 10.040000 14.148438 10.080000 12.039062 10.040000 12.109375 323.77s
INT_4 24.550000 21.554688 11.880000 9.898438 10.870000 9.882812 274.27s
INT_5 79.260000 83.382812 36.870000 39.500000 21.580000 22.937500 311.02s
INT_6 92.880000 99.015625 45.070000 55.203125 15.340000 19.492188 309.83s
INT_7 93.760000 99.921875 44.650000 64.335938 12.250000 14.515625 307.40s
INT_8 94.400000 99.921875 45.510000 71.429688 11.380000 16.343750 307.96s
INT_9 94.510000 99.945312 45.000000 70.609375 9.550000 14.187500 324.85s
INT_10 94.550000 99.945312 44.320000 69.859375 9.180000 13.507812 309.98s
INT_11 94.660000 99.945312 44.560000 70.039062 8.940000 13.304688 308.57s
INT_12 94.580000 99.945312 44.520000 69.953125 8.980000 13.421875 309.40s
INT_13 94.580000 99.945312 44.470000 69.945312 8.950000 13.195312 309.02s
INT_14 94.570000 99.945312 44.470000 69.929688 9.020000 13.171875 308.25s
INT_15 94.570000 99.945312 44.370000 70.031250 8.950000 13.242188 307.84s
INT_16 94.570000 99.945312 44.380000 69.937500 9.030000 13.304688 308.10s
POT_2 10.000000 9.820312 10.000000 9.820312 10.000000 9.820312 485.49s
POT_3 10.390000 12.835938 10.030000 9.585938 9.930000 9.476562 567.34s
POT_4 51.590000 74.953125 19.470000 30.929688 13.380000 19.828125 756.15s
POT_5 50.500000 73.953125 17.320000 36.148438 11.770000 31.187500 1123.11s
POT_6 55.340000 72.921875 22.490000 39.312500 14.490000 32.406250 1847.88s
POT_7 50.930000 72.921875 20.260000 36.695312 13.530000 28.648438 3331.46s
POT_8 44.430000 75.757812 18.900000 32.453125 14.180000 28.312500 6251.90s
FLOAT_3_E1 9.440000 15.210938 9.810000 3.289062 9.880000 2.976562 567.61s
FLOAT_4_E1 53.580000 32.593750 15.650000 21.671875 13.770000 18.554688 757.48s
FLOAT_4_E2 19.850000 58.656250 9.520000 37.015625 7.560000 35.359375 755.65s
ResNet_50 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 94.570000 95.296875 44.360000 45.945312 8.980000 10.601562 135.24s
INT_2 10.000000 10.328125 10.000000 10.328125 10.000000 10.328125 313.70s
INT_3 10.040000 13.429688 10.050000 13.992188 10.070000 14.164062 318.85s
INT_4 24.480000 18.546875 10.970000 10.367188 10.650000 10.625000 316.06s
INT_5 79.260000 59.109375 37.500000 27.835938 19.430000 16.328125 296.41s
INT_6 92.840000 90.531250 46.820000 40.570312 15.700000 13.406250 323.31s
INT_7 93.730000 93.828125 43.610000 42.125000 11.800000 10.304688 305.24s
INT_8 94.450000 95.585938 45.520000 48.046875 10.950000 12.203125 303.49s
INT_9 94.550000 94.570312 44.480000 45.390625 9.540000 11.250000 303.94s
INT_10 94.580000 94.906250 44.460000 45.843750 9.100000 10.898438 305.67s
INT_11 94.640000 95.296875 44.510000 45.945312 9.030000 10.500000 308.67s
INT_12 94.580000 95.257812 44.410000 46.054688 8.940000 10.671875 303.62s
INT_13 94.590000 95.273438 44.450000 45.921875 8.950000 10.679688 305.91s
INT_14 94.600000 95.273438 44.470000 45.945312 8.960000 10.695312 272.73s
INT_15 94.570000 95.281250 44.520000 45.835938 8.930000 10.625000 289.87s
INT_16 94.570000 95.296875 44.380000 45.890625 8.950000 10.617188 272.32s
POT_2 10.000000 10.328125 10.000000 10.328125 10.000000 10.328125 485.14s
POT_3 10.260000 12.617188 10.000000 9.578125 10.000000 9.437500 564.90s
POT_4 51.730000 72.195312 19.410000 30.421875 13.800000 24.734375 757.13s
POT_5 53.030000 64.093750 21.140000 26.039062 14.780000 22.023438 1128.74s
POT_6 55.280000 66.914062 21.140000 33.882812 13.710000 28.804688 1857.15s
POT_7 51.220000 68.859375 18.990000 30.648438 12.140000 23.312500 3337.61s
POT_8 44.780000 58.265625 18.930000 25.929688 13.450000 21.671875 6253.15s
FLOAT_3_E1 9.440000 15.976562 9.010000 8.359375 8.890000 8.507812 567.10s
FLOAT_4_E1 52.520000 40.414062 16.870000 24.734375 12.980000 15.328125 757.10s
FLOAT_4_E2 19.800000 33.039062 10.030000 31.843750 8.310000 28.679688 755.31s
FLOAT_5_E1 84.380000 77.562500 40.060000 40.164062 21.950000 29.828125 1122.66s
FLOAT_5_E2 63.310000 59.867188 21.590000 31.250000 13.280000 24.078125 1127.25s
FLOAT_5_E3 88.820000 85.156250 42.390000 45.140625 20.040000 27.195312 1128.58s
FLOAT_6_E1 87.410000 84.593750 38.790000 45.656250 17.710000 28.890625 1851.13s
FLOAT_6_E2 88.610000 77.281250 35.700000 45.382812 15.570000 31.609375 1853.98s
FLOAT_6_E3 92.990000 90.609375 44.470000 49.640625 14.770000 17.515625 1857.08s
FLOAT_6_E4 88.470000 83.656250 42.930000 49.234375 19.750000 31.250000 1856.95s
FLOAT_7_E1 90.630000 88.554688 40.630000 48.429688 15.880000 28.632812 3327.18s
FLOAT_7_E2 91.940000 83.742188 38.530000 50.320312 14.890000 30.359375 3332.99s
FLOAT_7_E3 94.170000 93.312500 44.200000 46.835938 11.800000 12.992188 3340.01s
FLOAT_7_E4 92.950000 90.148438 43.750000 49.507812 14.640000 17.976562 3342.85s
FLOAT_7_E5 88.970000 84.078125 42.870000 47.156250 20.230000 29.546875 3342.01s
FLOAT_8_E1 90.840000 89.453125 39.530000 45.695312 14.220000 24.164062 6245.58s
FLOAT_8_E2 92.650000 85.210938 39.950000 52.820312 13.510000 27.437500 6251.00s
FLOAT_8_E3 94.470000 94.718750 44.390000 46.265625 10.180000 11.757812 6263.01s
FLOAT_8_E4 94.170000 92.882812 44.530000 48.015625 12.000000 14.109375 6269.03s
FLOAT_8_E5 92.950000 90.125000 44.670000 50.742188 14.740000 18.765625 6268.95s
FLOAT_8_E6 87.850000 83.859375 41.850000 43.132812 21.560000 28.687500 6267.85s
VGG_16 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 93.130000 99.031250 53.170000 51.195312 18.780000 25.054688 28.36s
INT_2 10.000000 10.070312 10.000000 10.070312 10.000000 10.070312 70.63s
INT_3 10.020000 9.812500 10.000000 10.070312 10.000000 10.070312 55.48s
INT_4 29.300000 10.828125 11.970000 11.328125 10.190000 10.593750 63.97s
INT_5 88.810000 92.210938 39.680000 52.656250 20.310000 38.015625 72.48s
INT_6 92.500000 98.500000 50.700000 54.453125 21.590000 33.304688 74.44s
INT_7 92.980000 98.812500 52.910000 52.335938 20.730000 29.117188 72.67s
INT_8 93.020000 98.992188 53.240000 51.328125 19.540000 26.250000 62.34s
INT_9 93.050000 99.070312 53.120000 51.601562 19.020000 25.835938 72.20s
INT_10 93.150000 99.000000 53.180000 51.078125 18.930000 25.390625 73.33s
INT_11 93.110000 99.023438 53.070000 51.085938 18.730000 25.179688 68.53s
INT_12 93.140000 99.031250 53.200000 51.062500 18.720000 25.125000 68.94s
INT_13 93.160000 99.023438 53.210000 51.210938 18.730000 25.125000 71.74s
INT_14 93.160000 99.031250 53.190000 51.265625 18.780000 25.101562 72.25s
INT_15 93.130000 99.031250 53.290000 51.148438 18.820000 25.101562 71.79s
INT_16 93.140000 99.031250 53.160000 51.187500 18.700000 25.132812 61.27s
POT_2 10.000000 10.070312 10.000000 10.070312 10.000000 10.070312 89.14s
POT_3 10.450000 13.343750 10.010000 10.148438 10.010000 10.046875 87.41s
POT_4 79.980000 64.750000 43.280000 32.359375 20.620000 22.156250 105.94s
POT_5 79.820000 64.664062 43.910000 31.320312 20.470000 21.453125 143.33s
POT_6 79.670000 65.164062 42.980000 31.812500 20.350000 22.554688 217.25s
POT_7 79.850000 64.421875 43.180000 31.375000 20.320000 21.734375 367.59s
POT_8 79.790000 64.773438 43.390000 31.101562 20.480000 21.281250 665.74s
FLOAT_3_E1 10.130000 10.070312 10.030000 10.085938 10.010000 10.062500 90.55s
FLOAT_4_E1 80.090000 53.960938 35.250000 32.960938 19.880000 24.148438 106.88s
FLOAT_4_E2 72.050000 83.281250 37.510000 34.007812 21.830000 25.718750 107.78s
FLOAT_5_E1 87.670000 76.468750 45.970000 35.593750 20.450000 23.593750 143.18s
FLOAT_5_E2 90.380000 97.765625 49.290000 43.804688 21.410000 28.507812 143.05s
FLOAT_5_E3 89.850000 95.109375 51.530000 42.296875 21.690000 24.343750 143.16s
FLOAT_6_E1 90.200000 85.960938 48.690000 40.906250 20.800000 25.617188 215.65s
FLOAT_6_E2 91.860000 98.812500 50.450000 45.195312 19.170000 26.992188 215.42s
FLOAT_6_E3 92.750000 98.593750 53.610000 50.125000 20.300000 26.750000 215.43s
FLOAT_6_E4 89.750000 95.078125 51.470000 42.039062 21.660000 24.109375 215.40s
FLOAT_7_E1 90.810000 90.343750 49.430000 41.398438 19.780000 25.335938 365.55s
FLOAT_7_E2 91.920000 98.968750 51.530000 45.648438 18.970000 26.421875 366.54s
FLOAT_7_E3 92.890000 99.023438 52.850000 51.203125 19.150000 25.984375 366.76s
FLOAT_7_E4 92.710000 98.578125 53.240000 50.257812 20.240000 27.101562 367.26s
FLOAT_7_E5 89.770000 95.179688 51.810000 42.375000 21.750000 24.351562 366.76s
FLOAT_8_E1 91.190000 91.710938 49.480000 42.101562 19.400000 25.476562 661.73s
FLOAT_8_E2 92.220000 99.132812 52.180000 47.023438 19.320000 26.281250 663.08s
FLOAT_8_E3 93.150000 99.023438 53.110000 50.234375 19.090000 25.382812 662.49s
FLOAT_8_E4 93.000000 99.007812 53.010000 51.062500 19.200000 25.875000 664.14s
FLOAT_8_E5 92.790000 98.632812 53.290000 49.976562 20.310000 26.875000 664.98s
FLOAT_8_E6 89.750000 95.062500 51.650000 42.023438 21.710000 24.554688 665.24s
VGG_16 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 93.140000 99.539062 53.180000 49.640625 18.800000 20.265625 28.55s
INT_2 10.000000 10.062500 10.000000 10.062500 10.000000 10.062500 70.01s
INT_3 10.020000 9.390625 10.000000 10.062500 10.000000 10.062500 55.97s
INT_4 29.300000 10.687500 11.750000 10.695312 9.820000 10.117188 69.62s
INT_5 88.550000 86.140625 39.580000 53.320312 19.850000 40.585938 65.74s
INT_6 92.310000 98.867188 50.520000 52.898438 21.360000 28.515625 65.59s
INT_7 92.980000 99.437500 52.860000 51.609375 20.450000 24.734375 72.48s
INT_8 93.130000 99.554688 53.380000 49.750000 19.640000 21.234375 75.11s
INT_9 93.080000 99.554688 53.190000 50.328125 19.190000 21.179688 63.08s
INT_10 93.140000 99.554688 53.380000 49.562500 18.820000 20.500000 72.66s
INT_11 93.120000 99.531250 53.100000 49.734375 18.720000 20.437500 57.33s
INT_12 93.160000 99.539062 53.180000 49.578125 18.690000 20.320312 72.70s
INT_13 93.170000 99.539062 53.140000 49.539062 18.720000 20.281250 55.47s
INT_14 93.150000 99.539062 53.220000 49.625000 18.640000 20.320312 72.86s
INT_15 93.140000 99.539062 53.190000 49.710938 18.670000 20.335938 60.35s
INT_16 93.140000 99.539062 53.170000 49.710938 18.740000 20.304688 74.73s
POT_2 10.000000 10.062500 10.000000 10.062500 10.000000 10.062500 88.77s
POT_3 10.450000 12.125000 10.140000 10.210938 9.960000 9.921875 91.41s
POT_4 79.980000 59.140625 43.350000 27.429688 20.610000 21.804688 108.44s
POT_5 79.780000 57.601562 43.470000 27.398438 20.450000 21.515625 143.89s
POT_6 80.060000 57.820312 43.140000 27.375000 20.040000 21.445312 216.30s
POT_7 79.890000 58.921875 43.630000 27.562500 20.730000 21.757812 366.70s
POT_8 79.900000 57.867188 43.730000 27.406250 20.540000 21.945312 664.48s
FLOAT_3_E1 10.130000 10.054688 10.000000 10.062500 10.010000 10.023438 90.10s
FLOAT_4_E1 80.220000 46.203125 35.740000 31.843750 19.140000 23.343750 104.98s
FLOAT_4_E2 72.180000 85.367188 35.890000 33.085938 21.340000 25.484375 109.12s
FLOAT_5_E1 87.580000 84.609375 46.180000 33.101562 20.860000 21.296875 144.63s
FLOAT_5_E2 90.320000 98.515625 48.540000 36.945312 20.950000 21.679688 145.13s
FLOAT_5_E3 89.790000 97.789062 51.460000 36.906250 21.600000 20.562500 144.24s
FLOAT_6_E1 90.310000 92.906250 48.590000 36.343750 20.680000 21.468750 215.38s
FLOAT_6_E2 91.670000 99.187500 50.740000 40.945312 19.230000 21.367188 216.65s
FLOAT_6_E3 92.800000 99.484375 53.400000 48.804688 20.180000 20.765625 215.16s
FLOAT_6_E4 89.540000 98.109375 51.650000 36.203125 21.940000 20.507812 215.94s
FLOAT_7_E1 90.740000 96.718750 49.670000 36.132812 19.640000 21.085938 365.69s
FLOAT_7_E2 92.000000 99.421875 51.320000 42.093750 19.180000 20.632812 365.91s
FLOAT_7_E3 93.010000 99.468750 52.940000 49.523438 19.250000 21.015625 365.89s
FLOAT_7_E4 92.650000 99.406250 53.010000 48.484375 20.330000 21.015625 366.05s
FLOAT_7_E5 89.570000 97.804688 51.280000 36.648438 21.830000 20.546875 366.57s
FLOAT_8_E1 91.060000 96.914062 49.670000 35.992188 19.690000 20.648438 661.56s
FLOAT_8_E2 92.060000 99.468750 52.110000 43.414062 18.690000 21.046875 661.99s
FLOAT_8_E3 93.190000 99.523438 53.030000 48.750000 19.070000 20.531250 663.80s
FLOAT_8_E4 92.970000 99.523438 53.000000 49.328125 19.200000 21.085938 664.10s
FLOAT_8_E5 92.760000 99.445312 53.290000 48.398438 20.350000 20.992188 664.13s
FLOAT_8_E6 89.740000 97.937500 51.530000 36.359375 21.750000 20.710938 664.38s
VGG_19 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 93.020000 99.914062 50.590000 60.492188 16.780000 26.593750 50.74s
INT_2 10.000000 9.679688 10.000000 9.679688 10.000000 9.679688 102.39s
INT_3 10.000000 9.679688 10.000000 9.679688 10.000000 9.679688 102.14s
INT_4 17.330000 22.578125 12.330000 15.132812 9.470000 14.117188 103.49s
INT_5 88.220000 98.562500 36.050000 61.734375 17.140000 41.718750 103.08s
INT_6 92.250000 99.867188 49.620000 69.320312 20.110000 40.828125 93.44s
INT_7 92.760000 99.914062 50.360000 63.726562 18.850000 32.343750 89.96s
INT_8 93.000000 99.921875 50.710000 61.210938 17.760000 28.468750 84.60s
INT_9 93.040000 99.898438 50.460000 60.617188 17.170000 27.273438 101.82s
INT_10 93.060000 99.914062 50.580000 60.523438 16.760000 26.765625 103.82s
INT_11 92.980000 99.921875 50.740000 60.351562 16.920000 26.546875 85.06s
INT_12 93.030000 99.914062 50.580000 60.468750 16.750000 26.625000 84.62s
INT_13 93.010000 99.914062 50.500000 60.523438 16.940000 26.554688 84.16s
INT_14 93.030000 99.914062 50.540000 60.515625 16.870000 26.554688 84.56s
INT_15 93.030000 99.914062 50.540000 60.476562 16.870000 26.562500 104.47s
INT_16 93.030000 99.914062 50.580000 60.437500 16.800000 26.531250 102.85s
POT_2 10.000000 9.679688 10.000000 9.679688 10.000000 9.679688 127.08s
POT_3 10.000000 9.679688 10.000000 9.679688 10.000000 9.679688 138.92s
POT_4 71.830000 82.578125 37.830000 24.765625 18.680000 17.468750 165.01s
POT_5 71.960000 81.296875 37.380000 24.375000 18.000000 17.312500 217.42s
POT_6 72.020000 82.164062 37.370000 24.554688 18.310000 17.453125 322.57s
POT_7 71.430000 82.203125 37.540000 24.234375 17.810000 17.039062 537.44s
POT_8 71.450000 81.195312 37.200000 24.437500 18.290000 17.109375 954.10s
FLOAT_3_E1 10.010000 9.679688 9.960000 9.687500 9.970000 9.679688 138.82s
FLOAT_4_E1 77.130000 77.609375 27.980000 43.015625 16.310000 30.054688 163.94s
FLOAT_4_E2 71.290000 94.898438 27.710000 33.773438 18.030000 25.085938 165.68s
FLOAT_5_E1 86.890000 95.406250 42.240000 38.531250 19.310000 23.898438 218.32s
FLOAT_5_E2 88.630000 99.554688 44.110000 47.046875 20.910000 26.898438 219.26s
FLOAT_5_E3 90.500000 99.679688 47.970000 48.367188 19.600000 24.515625 219.31s
FLOAT_6_E1 88.860000 97.664062 44.750000 35.500000 19.350000 20.781250 324.47s
FLOAT_6_E2 91.150000 99.820312 48.270000 52.203125 19.410000 26.687500 324.68s
FLOAT_6_E3 92.420000 99.898438 51.350000 60.164062 18.580000 26.484375 324.80s
FLOAT_6_E4 90.460000 99.710938 47.780000 48.328125 19.350000 24.445312 324.63s
FLOAT_7_E1 89.930000 98.476562 44.670000 35.250000 17.830000 20.281250 537.80s
FLOAT_7_E2 91.910000 99.859375 48.640000 55.484375 19.150000 26.453125 537.25s
FLOAT_7_E3 92.940000 99.914062 50.600000 59.750000 17.260000 27.234375 539.38s
FLOAT_7_E4 92.700000 99.898438 51.310000 60.328125 18.570000 26.500000 539.31s
FLOAT_7_E5 90.600000 99.703125 48.100000 48.070312 19.520000 24.328125 539.27s
FLOAT_8_E1 90.110000 98.507812 45.080000 35.031250 17.500000 19.875000 954.49s
FLOAT_8_E2 92.020000 99.914062 49.220000 55.992188 18.060000 25.960938 952.79s
FLOAT_8_E3 93.100000 99.898438 50.260000 59.765625 17.110000 26.656250 952.70s
FLOAT_8_E4 92.940000 99.906250 50.450000 59.320312 17.310000 27.242188 952.77s
FLOAT_8_E5 92.640000 99.921875 51.410000 60.554688 18.730000 26.437500 954.18s
FLOAT_8_E6 90.790000 99.734375 47.960000 48.148438 19.490000 24.445312 954.13s
VGG_19 cifar10
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 93.030000 96.468750 50.580000 48.289062 16.840000 14.828125 52.77s
INT_2 10.000000 9.945312 10.000000 9.945312 10.000000 9.945312 96.00s
INT_3 10.000000 9.945312 10.000000 9.945312 10.000000 9.945312 94.59s
INT_4 17.340000 19.406250 12.000000 12.210938 10.190000 11.203125 86.37s
INT_5 88.200000 79.890625 35.510000 37.460938 17.080000 22.085938 95.28s
INT_6 92.270000 93.968750 49.290000 51.601562 20.150000 23.242188 96.17s
INT_7 92.800000 96.117188 50.060000 49.304688 18.910000 18.437500 90.90s
INT_8 92.980000 96.226562 50.590000 48.484375 17.920000 16.695312 90.05s
INT_9 93.040000 96.367188 50.320000 48.421875 17.180000 15.632812 92.23s
INT_10 93.050000 96.453125 50.610000 48.523438 16.900000 15.312500 90.34s
INT_11 93.000000 96.460938 50.580000 48.328125 16.930000 15.031250 90.91s
INT_12 93.030000 96.460938 50.600000 48.390625 16.840000 14.921875 90.62s
INT_13 93.020000 96.460938 50.560000 48.304688 16.840000 14.898438 92.62s
INT_14 93.020000 96.460938 50.510000 48.289062 16.850000 14.906250 91.85s
INT_15 93.030000 96.460938 50.550000 48.226562 16.820000 14.890625 90.73s
INT_16 93.020000 96.468750 50.640000 48.250000 16.730000 14.929688 93.51s
POT_2 10.000000 9.945312 10.000000 9.945312 10.000000 9.945312 131.40s
POT_3 10.000000 9.945312 10.000000 9.945312 10.000000 9.945312 142.94s
POT_4 71.750000 51.281250 37.890000 26.898438 18.670000 18.296875 169.33s
POT_5 71.750000 50.445312 36.830000 25.984375 18.010000 17.914062 221.88s
POT_6 71.900000 51.773438 37.120000 26.546875 18.570000 18.000000 325.98s
POT_7 71.420000 52.296875 37.100000 26.640625 18.220000 17.828125 540.99s
POT_8 71.330000 51.046875 37.440000 26.000000 18.410000 17.500000 955.80s
FLOAT_3_E1 10.000000 9.953125 10.070000 9.960938 10.010000 9.953125 139.87s
FLOAT_4_E1 77.180000 53.250000 26.920000 36.937500 15.590000 26.500000 167.38s
FLOAT_4_E2 71.290000 80.570312 28.350000 31.726562 17.350000 23.281250 166.30s
FLOAT_5_E1 86.940000 76.109375 42.550000 39.476562 19.490000 24.515625 219.69s
FLOAT_5_E2 88.600000 92.351562 44.490000 41.062500 21.190000 18.078125 219.69s
FLOAT_5_E3 90.470000 95.218750 47.770000 41.828125 19.290000 18.851562 220.72s
FLOAT_6_E1 88.850000 85.789062 44.830000 38.187500 19.830000 22.335938 325.89s
FLOAT_6_E2 91.160000 93.945312 48.750000 45.328125 19.660000 19.773438 325.96s
FLOAT_6_E3 92.440000 96.523438 51.350000 47.250000 18.790000 12.625000 325.88s
FLOAT_6_E4 90.540000 95.109375 48.140000 41.835938 19.490000 18.242188 325.96s
FLOAT_7_E1 89.910000 87.546875 44.600000 38.703125 17.840000 22.023438 540.81s
FLOAT_7_E2 91.930000 95.109375 49.150000 46.820312 19.320000 19.820312 540.80s
FLOAT_7_E3 92.920000 96.328125 50.620000 48.382812 17.310000 15.632812 540.87s
FLOAT_7_E4 92.700000 96.546875 51.260000 47.156250 18.720000 12.664062 540.89s
FLOAT_7_E5 90.580000 95.226562 47.660000 41.632812 19.500000 18.468750 540.80s
FLOAT_8_E1 90.100000 87.765625 45.000000 38.296875 17.510000 21.710938 955.80s
FLOAT_8_E2 92.040000 95.437500 49.610000 47.187500 18.310000 19.351562 955.09s
FLOAT_8_E3 93.090000 96.468750 50.520000 47.914062 16.990000 15.812500 954.98s
FLOAT_8_E4 92.920000 96.375000 50.560000 48.312500 17.400000 15.710938 955.04s
FLOAT_8_E5 92.620000 96.703125 51.200000 47.195312 18.730000 12.593750 955.57s
FLOAT_8_E6 90.770000 94.867188 47.870000 41.742188 19.570000 18.664062 954.81s
AlexNet cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 63.440000 99.781250 21.870000 58.664062 7.790000 20.382812 8.91s
INT_2 1.000000 0.960938 1.000000 0.960938 1.000000 0.960938 27.25s
INT_3 0.990000 1.195312 1.000000 0.960938 1.000000 0.960938 27.09s
INT_4 15.960000 27.812500 3.650000 9.210938 2.870000 7.562500 26.44s
INT_5 46.760000 89.523438 14.230000 25.921875 11.530000 19.718750 26.94s
INT_6 58.120000 98.859375 20.030000 46.726562 14.010000 33.867188 27.15s
INT_7 61.680000 99.507812 21.820000 55.445312 11.780000 31.750000 27.73s
INT_8 62.860000 99.750000 22.370000 57.468750 9.870000 25.929688 24.49s
INT_9 63.150000 99.773438 22.030000 58.265625 8.570000 22.343750 25.93s
INT_10 63.360000 99.789062 22.140000 58.531250 8.090000 21.296875 20.20s
INT_11 63.360000 99.781250 22.100000 58.718750 7.920000 20.679688 19.94s
INT_12 63.400000 99.789062 21.940000 58.664062 7.740000 20.500000 19.83s
INT_13 63.440000 99.773438 21.980000 58.757812 7.780000 20.296875 19.74s
INT_14 63.430000 99.773438 21.870000 58.734375 7.740000 20.335938 22.01s
INT_15 63.450000 99.781250 21.950000 58.679688 7.750000 20.343750 28.26s
INT_16 63.440000 99.781250 21.890000 58.679688 7.720000 20.453125 27.28s
POT_2 1.000000 0.960938 1.000000 0.960938 1.000000 0.960938 31.73s
POT_3 1.330000 1.992188 1.030000 0.968750 1.020000 0.960938 34.02s
POT_4 24.460000 37.812500 6.950000 12.546875 4.390000 7.921875 36.31s
POT_5 25.240000 38.757812 6.860000 12.976562 4.070000 7.562500 44.06s
POT_6 24.910000 39.328125 7.130000 12.851562 4.310000 7.921875 59.85s
POT_7 24.810000 37.171875 7.270000 12.867188 4.160000 7.585938 92.18s
POT_8 25.050000 38.906250 7.120000 12.718750 4.130000 7.531250 161.44s
FLOAT_3_E1 1.960000 2.929688 1.450000 1.757812 1.190000 1.328125 30.23s
FLOAT_4_E1 15.090000 19.953125 7.860000 6.531250 5.730000 4.953125 35.10s
FLOAT_4_E2 18.310000 41.609375 4.630000 4.023438 3.960000 3.765625 33.30s
FLOAT_5_E1 38.590000 72.234375 14.170000 21.953125 9.610000 12.617188 44.45s
FLOAT_5_E2 44.250000 91.476562 13.070000 22.640625 8.780000 14.507812 43.65s
FLOAT_5_E3 50.410000 91.835938 17.620000 41.953125 8.930000 19.648438 45.00s
FLOAT_6_E1 48.020000 81.992188 17.060000 25.007812 9.070000 8.937500 59.66s
FLOAT_6_E2 55.160000 97.992188 16.790000 35.453125 8.730000 18.320312 58.73s
FLOAT_6_E3 59.950000 98.812500 21.100000 53.242188 9.260000 22.585938 57.76s
FLOAT_6_E4 49.880000 91.546875 17.620000 41.804688 8.830000 19.796875 58.66s
FLOAT_7_E1 51.440000 88.687500 18.360000 31.421875 8.520000 10.320312 93.81s
FLOAT_7_E2 58.390000 98.789062 17.930000 41.304688 8.260000 18.765625 93.89s
FLOAT_7_E3 62.220000 99.601562 21.890000 56.132812 8.620000 20.601562 93.65s
FLOAT_7_E4 60.150000 98.742188 21.200000 53.140625 8.920000 22.117188 92.42s
FLOAT_7_E5 50.190000 91.726562 17.770000 41.757812 8.690000 19.820312 93.67s
FLOAT_8_E1 53.620000 90.671875 18.810000 35.398438 7.840000 10.742188 160.80s
FLOAT_8_E2 58.680000 99.031250 18.510000 42.804688 8.330000 18.171875 160.99s
FLOAT_8_E3 62.860000 99.750000 21.930000 57.921875 8.130000 20.843750 159.48s
FLOAT_8_E4 62.490000 99.507812 21.980000 56.250000 8.680000 20.507812 159.30s
FLOAT_8_E5 60.100000 98.718750 20.930000 53.304688 9.170000 22.414062 159.52s
FLOAT_8_E6 50.440000 91.523438 17.520000 41.765625 8.950000 19.757812 161.18s
AlexNet cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 63.440000 65.804688 21.880000 53.984375 7.660000 6.578125 8.41s
INT_2 1.000000 1.093750 1.000000 1.093750 1.000000 1.093750 23.27s
INT_3 0.990000 1.640625 1.000000 1.093750 1.000000 1.093750 21.28s
INT_4 15.960000 13.703125 2.340000 4.437500 1.950000 4.085938 21.70s
INT_5 46.760000 41.851562 12.940000 19.562500 10.280000 15.484375 29.08s
INT_6 58.120000 52.031250 19.850000 34.242188 14.180000 21.562500 23.98s
INT_7 61.680000 62.367188 21.370000 47.414062 11.940000 19.070312 26.71s
INT_8 62.860000 63.937500 22.190000 52.859375 9.820000 12.234375 25.46s
INT_9 63.160000 65.367188 22.220000 53.437500 8.680000 8.500000 19.93s
INT_10 63.360000 65.679688 21.890000 53.906250 8.190000 7.335938 28.45s
INT_11 63.360000 65.617188 22.180000 54.023438 7.850000 6.945312 26.87s
INT_12 63.380000 65.703125 21.910000 53.812500 7.840000 6.570312 22.55s
INT_13 63.440000 65.812500 21.880000 54.109375 7.790000 6.625000 23.92s
INT_14 63.430000 65.773438 21.990000 54.007812 7.690000 6.554688 22.24s
INT_15 63.450000 65.804688 21.930000 54.000000 7.730000 6.687500 23.07s
INT_16 63.440000 65.796875 21.920000 53.968750 7.790000 6.601562 19.49s
POT_2 1.000000 1.093750 1.000000 1.093750 1.000000 1.093750 33.10s
POT_3 1.330000 2.359375 1.000000 1.093750 1.000000 1.093750 33.87s
POT_4 24.460000 21.984375 7.210000 10.726562 4.430000 6.500000 35.21s
POT_5 25.240000 23.015625 7.100000 10.679688 3.860000 5.765625 43.64s
POT_6 24.910000 22.734375 7.130000 10.867188 4.040000 5.960938 59.41s
POT_7 24.810000 23.109375 7.120000 10.476562 4.020000 5.523438 93.92s
POT_8 25.050000 22.726562 7.380000 10.609375 3.810000 5.875000 160.87s
FLOAT_3_E1 1.960000 1.664062 1.050000 1.312500 1.070000 1.164062 37.02s
FLOAT_4_E1 15.090000 11.257812 6.890000 4.078125 5.000000 1.796875 37.46s
FLOAT_4_E2 18.310000 11.132812 5.500000 3.250000 4.670000 2.773438 36.25s
FLOAT_5_E1 38.590000 30.023438 14.710000 9.929688 10.150000 3.289062 43.81s
FLOAT_5_E2 44.250000 33.789062 13.560000 16.523438 9.300000 8.218750 45.09s
FLOAT_5_E3 50.410000 40.882812 16.380000 32.531250 8.120000 13.789062 45.03s
FLOAT_6_E1 48.020000 38.218750 16.760000 17.492188 9.100000 3.851562 59.94s
FLOAT_6_E2 55.150000 46.351562 17.260000 32.960938 8.870000 12.203125 59.64s
FLOAT_6_E3 59.950000 60.585938 21.310000 46.125000 9.260000 5.523438 59.46s
FLOAT_6_E4 49.880000 41.445312 16.450000 33.078125 7.880000 13.578125 59.79s
FLOAT_7_E1 51.440000 40.226562 17.770000 26.625000 8.190000 4.960938 94.36s
FLOAT_7_E2 58.410000 51.968750 18.080000 39.398438 8.650000 10.492188 93.51s
FLOAT_7_E3 62.230000 63.578125 21.860000 52.304688 8.610000 8.937500 93.97s
FLOAT_7_E4 60.140000 60.273438 21.240000 46.125000 9.240000 5.507812 93.16s
FLOAT_7_E5 50.190000 41.195312 16.190000 32.390625 7.940000 13.273438 93.84s
FLOAT_8_E1 53.620000 43.640625 18.290000 27.578125 7.720000 4.625000 161.06s
FLOAT_8_E2 58.660000 54.062500 18.560000 42.382812 8.340000 9.492188 160.72s
FLOAT_8_E3 62.860000 65.648438 21.950000 54.078125 8.060000 7.453125 160.37s
FLOAT_8_E4 62.490000 63.945312 21.700000 52.523438 8.410000 8.953125 160.88s
FLOAT_8_E5 60.040000 59.562500 21.440000 46.468750 9.160000 5.718750 161.28s
FLOAT_8_E6 50.440000 41.125000 16.410000 32.968750 8.110000 13.093750 161.59s
ResNet_18 cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 75.720000 100.000000 18.720000 72.023438 4.580000 9.796875 41.82s
INT_2 1.050000 0.937500 1.000000 1.046875 1.000000 1.046875 101.36s
INT_3 1.110000 1.164062 1.140000 0.890625 1.040000 0.976562 103.44s
INT_4 6.050000 18.429688 2.100000 4.140625 1.290000 2.445312 84.53s
INT_5 50.970000 95.859375 11.460000 32.476562 5.310000 14.367188 96.83s
INT_6 70.860000 100.000000 18.540000 68.101562 7.300000 23.320312 101.82s
INT_7 74.530000 99.992188 18.460000 66.476562 5.890000 15.171875 108.33s
INT_8 75.420000 100.000000 18.340000 70.250000 5.350000 14.757812 103.04s
INT_9 75.290000 100.000000 18.530000 71.007812 5.090000 14.656250 90.25s
INT_10 75.500000 100.000000 18.470000 71.429688 5.070000 14.210938 106.96s
INT_11 75.420000 100.000000 18.460000 71.023438 5.020000 14.171875 103.37s
INT_12 75.500000 100.000000 18.570000 71.500000 4.950000 13.500000 106.11s
INT_13 75.400000 100.000000 18.510000 71.289062 5.020000 14.000000 105.36s
INT_14 75.380000 100.000000 18.420000 71.210938 5.020000 14.218750 106.59s
INT_15 75.430000 100.000000 18.530000 71.335938 5.010000 13.796875 103.67s
INT_16 75.380000 100.000000 18.550000 71.273438 4.990000 14.125000 101.68s
POT_2 1.000000 1.132812 1.000000 1.046875 1.000000 1.046875 132.63s
POT_3 1.000000 0.882812 1.000000 0.992188 1.000000 0.992188 144.21s
POT_4 19.970000 37.257812 6.010000 3.460938 3.650000 1.351562 172.32s
POT_5 17.190000 46.421875 7.760000 3.218750 4.160000 1.492188 236.35s
POT_6 18.690000 37.445312 6.610000 4.101562 4.190000 1.710938 363.53s
POT_7 15.080000 30.570312 6.060000 3.289062 4.120000 1.453125 623.36s
POT_8 20.530000 46.226562 7.880000 3.085938 4.350000 1.320312 1136.85s
FLOAT_3_E1 0.960000 1.148438 1.040000 1.000000 1.120000 1.156250 145.13s
FLOAT_4_E1 25.630000 39.640625 5.910000 12.218750 5.320000 6.343750 172.46s
FLOAT_4_E2 15.960000 61.335938 3.200000 5.062500 1.720000 2.296875 171.38s
FLOAT_5_E1 53.420000 87.265625 12.640000 15.460938 6.110000 6.203125 234.96s
FLOAT_5_E2 53.880000 98.460938 8.820000 14.609375 4.340000 4.437500 236.93s
FLOAT_5_E3 58.640000 97.812500 15.390000 28.984375 6.130000 7.437500 237.52s
FLOAT_6_E1 59.440000 95.328125 14.160000 20.406250 6.050000 6.578125 363.66s
FLOAT_6_E2 64.600000 99.929688 12.210000 33.445312 5.090000 9.289062 362.85s
FLOAT_6_E3 71.680000 99.992188 18.200000 59.335938 6.320000 15.453125 364.36s
FLOAT_6_E4 58.550000 97.742188 14.810000 27.281250 5.960000 6.929688 363.45s
FLOAT_7_E1 64.430000 97.562500 14.970000 23.359375 6.290000 6.710938 623.47s
FLOAT_7_E2 68.690000 99.984375 14.460000 40.945312 6.280000 11.750000 624.23s
FLOAT_7_E3 74.380000 100.000000 18.630000 68.492188 5.570000 14.820312 624.86s
FLOAT_7_E4 71.490000 99.984375 18.110000 60.945312 6.200000 16.414062 624.94s
FLOAT_7_E5 57.370000 97.765625 15.000000 25.585938 6.150000 7.109375 624.37s
FLOAT_8_E1 65.180000 98.242188 15.500000 25.929688 6.210000 6.851562 1135.77s
FLOAT_8_E2 69.890000 100.000000 14.170000 45.382812 5.660000 12.070312 1136.86s
FLOAT_8_E3 75.280000 100.000000 18.200000 70.593750 5.310000 15.242188 1139.26s
FLOAT_8_E4 74.490000 99.992188 18.470000 68.578125 5.560000 14.664062 1139.00s
FLOAT_8_E5 71.590000 99.992188 18.130000 60.117188 6.080000 15.929688 1139.45s
FLOAT_8_E6 59.890000 98.406250 15.390000 28.210938 6.210000 7.296875 1137.74s
ResNet_18 cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 75.730000 93.804688 18.740000 45.265625 4.580000 4.468750 41.64s
INT_2 1.050000 0.960938 1.000000 0.960938 1.000000 0.960938 85.74s
INT_3 1.110000 1.054688 1.000000 1.093750 1.000000 1.093750 84.06s
INT_4 6.050000 11.140625 1.520000 3.367188 1.170000 2.390625 109.52s
INT_5 50.990000 67.367188 11.260000 27.296875 5.250000 12.703125 108.59s
INT_6 70.680000 90.703125 17.440000 36.882812 6.720000 10.109375 105.91s
INT_7 74.590000 92.890625 17.580000 40.734375 5.410000 7.945312 83.94s
INT_8 75.350000 93.460938 18.290000 43.820312 5.280000 6.593750 83.70s
INT_9 75.300000 93.265625 18.600000 44.914062 5.160000 6.023438 87.10s
INT_10 75.480000 93.523438 18.440000 45.007812 5.110000 5.562500 108.09s
INT_11 75.390000 93.476562 18.380000 44.750000 5.070000 5.585938 111.52s
INT_12 75.490000 93.640625 18.610000 45.195312 5.010000 5.390625 108.93s
INT_13 75.380000 93.476562 18.490000 45.015625 4.950000 5.570312 96.75s
INT_14 75.380000 93.476562 18.490000 45.039062 4.960000 5.562500 109.07s
INT_15 75.440000 93.546875 18.550000 45.156250 4.930000 5.507812 109.48s
INT_16 75.380000 93.429688 18.470000 44.898438 5.030000 5.687500 108.75s
POT_2 1.000000 0.960938 1.000000 0.960938 1.000000 0.960938 131.52s
POT_3 1.000000 1.765625 1.030000 1.242188 0.990000 1.125000 147.38s
POT_4 20.070000 25.234375 7.270000 4.390625 3.850000 3.218750 176.41s
POT_5 17.150000 23.843750 5.450000 4.718750 3.950000 3.031250 241.32s
POT_6 18.800000 25.750000 6.280000 6.242188 4.340000 4.000000 367.15s
POT_7 15.320000 23.148438 6.150000 4.093750 4.200000 3.007812 626.71s
POT_8 20.980000 23.523438 6.420000 5.703125 4.200000 3.703125 1138.57s
FLOAT_3_E1 0.960000 1.140625 1.080000 1.015625 0.810000 1.085938 145.22s
FLOAT_4_E1 25.430000 16.085938 6.060000 5.187500 3.970000 3.531250 176.01s
FLOAT_4_E2 16.120000 31.812500 1.930000 4.054688 1.170000 2.304688 174.85s
FLOAT_5_E1 52.850000 48.109375 11.740000 11.757812 6.430000 7.585938 241.36s
FLOAT_5_E2 54.290000 75.695312 9.530000 11.234375 4.940000 3.421875 240.09s
FLOAT_5_E3 58.550000 77.734375 15.420000 25.593750 6.730000 12.265625 239.32s
FLOAT_6_E1 59.440000 64.679688 14.050000 21.218750 6.450000 9.937500 366.27s
FLOAT_6_E2 64.250000 87.062500 12.510000 26.875000 5.470000 8.468750 367.72s
FLOAT_6_E3 71.590000 90.867188 18.400000 36.000000 6.360000 6.203125 368.44s
FLOAT_6_E4 58.480000 77.101562 15.430000 26.312500 6.730000 11.984375 367.47s
FLOAT_7_E1 64.330000 74.210938 14.460000 28.000000 6.290000 12.523438 626.82s
FLOAT_7_E2 68.740000 89.718750 15.260000 34.421875 6.390000 10.593750 628.45s
FLOAT_7_E3 74.330000 92.421875 18.450000 42.718750 5.490000 7.554688 627.38s
FLOAT_7_E4 71.460000 90.453125 18.310000 35.320312 6.390000 6.328125 627.53s
FLOAT_7_E5 57.240000 77.351562 15.610000 26.500000 6.520000 11.929688 627.83s
FLOAT_8_E1 65.190000 75.515625 14.840000 28.757812 6.210000 12.234375 1139.52s
FLOAT_8_E2 69.840000 90.054688 15.180000 33.796875 6.320000 9.906250 1139.23s
FLOAT_8_E3 75.230000 93.218750 18.350000 44.781250 5.180000 6.203125 1140.96s
FLOAT_8_E4 74.400000 92.492188 18.440000 42.757812 5.540000 7.664062 1137.38s
FLOAT_8_E5 71.530000 90.710938 18.720000 34.914062 6.320000 6.007812 1138.86s
FLOAT_8_E6 60.140000 79.359375 15.510000 30.039062 6.600000 12.195312 1140.52s
AlexNet cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 63.450000 99.945312 21.860000 66.210938 7.770000 27.406250 12.27s
INT_2 1.000000 0.851562 1.000000 0.851562 1.000000 0.851562 26.09s
INT_3 0.990000 0.875000 1.000000 0.851562 1.000000 0.851562 25.51s
INT_4 15.970000 26.914062 2.270000 5.226562 2.010000 4.445312 25.78s
INT_5 46.720000 92.828125 13.120000 22.859375 10.730000 18.429688 26.86s
INT_6 58.070000 99.609375 20.240000 50.281250 14.080000 38.507812 26.59s
INT_7 61.670000 99.906250 21.460000 60.078125 12.010000 37.750000 23.32s
INT_8 62.890000 99.929688 22.090000 65.281250 9.910000 34.468750 25.05s
INT_9 63.180000 99.945312 22.290000 65.703125 8.690000 30.226562 25.91s
INT_10 63.330000 99.945312 22.000000 66.023438 8.160000 28.460938 23.97s
INT_11 63.310000 99.945312 22.100000 66.281250 7.850000 27.812500 22.86s
INT_12 63.400000 99.945312 21.920000 66.320312 7.780000 27.421875 26.03s
INT_13 63.460000 99.945312 21.890000 66.257812 7.750000 27.343750 27.17s
INT_14 63.420000 99.945312 21.890000 66.289062 7.790000 27.406250 25.76s
INT_15 63.440000 99.945312 21.940000 66.171875 7.740000 27.257812 28.05s
INT_16 63.440000 99.945312 21.980000 66.281250 7.760000 27.265625 25.70s
POT_2 1.000000 0.851562 1.000000 0.851562 1.000000 0.851562 34.37s
POT_3 1.330000 0.937500 1.000000 0.851562 1.000000 0.851562 33.10s
POT_4 24.470000 41.289062 7.430000 16.500000 4.990000 12.554688 36.69s
POT_5 25.270000 42.375000 7.760000 17.554688 4.150000 12.476562 43.05s
POT_6 24.940000 42.281250 7.360000 17.468750 4.220000 12.234375 60.00s
POT_7 24.790000 41.820312 7.580000 18.164062 4.060000 12.796875 93.74s
POT_8 25.060000 41.437500 7.210000 18.054688 4.280000 12.617188 160.86s
FLOAT_3_E1 1.930000 2.304688 1.440000 1.820312 1.260000 1.617188 32.22s
FLOAT_4_E1 15.120000 25.031250 7.050000 7.218750 5.240000 5.789062 36.64s
FLOAT_4_E2 18.330000 34.695312 5.740000 6.476562 4.730000 5.937500 35.29s
FLOAT_5_E1 38.560000 79.453125 14.710000 24.468750 10.340000 16.992188 44.30s
FLOAT_5_E2 44.220000 93.125000 14.580000 28.460938 10.160000 20.312500 43.24s
FLOAT_5_E3 50.550000 93.546875 16.390000 36.960938 8.120000 20.351562 43.29s
FLOAT_6_E1 47.990000 87.773438 16.470000 29.765625 9.370000 15.773438 59.58s
FLOAT_6_E2 55.120000 99.468750 17.160000 44.804688 9.420000 25.640625 60.68s
FLOAT_6_E3 59.990000 99.750000 21.540000 60.570312 9.420000 28.335938 59.20s
FLOAT_6_E4 49.970000 93.640625 16.600000 37.203125 8.050000 20.000000 60.02s
FLOAT_7_E1 51.540000 92.757812 17.860000 39.539062 8.510000 17.359375 93.95s
FLOAT_7_E2 58.320000 99.859375 18.170000 51.078125 8.430000 26.671875 93.07s
FLOAT_7_E3 62.260000 99.914062 21.910000 63.078125 8.440000 26.882812 93.84s
FLOAT_7_E4 60.040000 99.773438 21.320000 60.859375 9.240000 28.210938 94.14s
FLOAT_7_E5 50.240000 93.640625 16.580000 37.109375 8.030000 19.828125 93.02s
FLOAT_8_E1 53.610000 92.765625 18.240000 43.117188 7.660000 18.023438 160.95s
FLOAT_8_E2 58.720000 99.851562 18.200000 52.929688 8.290000 27.609375 160.38s
FLOAT_8_E3 62.820000 99.937500 22.070000 67.140625 8.160000 28.867188 160.66s
FLOAT_8_E4 62.420000 99.906250 21.750000 62.906250 8.420000 26.601562 160.51s
FLOAT_8_E5 60.120000 99.781250 21.140000 61.679688 9.260000 28.468750 161.37s
FLOAT_8_E6 50.310000 93.375000 16.300000 36.640625 7.850000 19.632812 161.20s
AlexNet cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 63.440000 40.640625 21.870000 24.437500 7.720000 7.750000 10.40s
INT_2 1.000000 1.031250 1.000000 1.031250 1.000000 1.031250 27.93s
INT_3 0.990000 1.359375 1.000000 1.031250 1.000000 1.031250 28.42s
INT_4 15.960000 6.398438 3.190000 3.492188 2.510000 2.867188 25.97s
INT_5 46.760000 18.492188 14.760000 9.968750 11.850000 8.093750 24.20s
INT_6 58.120000 30.093750 20.090000 16.859375 14.060000 12.265625 27.94s
INT_7 61.680000 38.125000 21.950000 22.531250 11.510000 12.273438 27.70s
INT_8 62.860000 40.031250 21.980000 24.000000 9.650000 10.015625 27.06s
INT_9 63.160000 40.546875 22.110000 24.062500 8.480000 8.578125 22.35s
INT_10 63.360000 40.757812 22.030000 24.242188 8.070000 8.148438 28.25s
INT_11 63.360000 40.625000 22.040000 24.406250 7.890000 7.875000 24.18s
INT_12 63.380000 40.671875 21.960000 24.445312 7.780000 7.703125 25.65s
INT_13 63.440000 40.710938 21.900000 24.343750 7.780000 7.703125 26.41s
INT_14 63.430000 40.664062 21.900000 24.359375 7.750000 7.765625 28.50s
INT_15 63.450000 40.656250 21.910000 24.414062 7.770000 7.695312 29.40s
INT_16 63.440000 40.656250 21.920000 24.406250 7.700000 7.718750 28.96s
POT_2 1.000000 1.031250 1.000000 1.031250 1.000000 1.031250 34.26s
POT_3 1.330000 1.195312 1.020000 1.031250 1.030000 1.031250 32.69s
POT_4 24.460000 9.398438 6.620000 6.632812 4.390000 4.710938 34.70s
POT_5 25.240000 10.554688 7.100000 6.500000 4.050000 4.710938 42.45s
POT_6 24.910000 10.031250 7.150000 6.601562 4.140000 4.476562 60.39s
POT_7 24.810000 9.890625 6.970000 6.875000 4.010000 4.710938 92.57s
POT_8 25.050000 10.539062 7.430000 6.320312 4.310000 4.734375 162.23s
FLOAT_3_E1 1.960000 1.351562 1.390000 1.648438 1.220000 1.664062 33.76s
FLOAT_4_E1 15.090000 4.226562 7.150000 3.039062 5.430000 2.765625 35.21s
FLOAT_4_E2 18.310000 4.492188 4.130000 3.125000 3.650000 2.937500 35.69s
FLOAT_5_E1 38.590000 14.789062 15.370000 7.773438 9.790000 5.179688 44.28s
FLOAT_5_E2 44.250000 16.828125 13.450000 9.695312 8.910000 7.000000 44.45s
FLOAT_5_E3 50.410000 23.187500 16.520000 13.796875 8.270000 7.812500 44.88s
FLOAT_6_E1 48.020000 21.312500 16.270000 6.773438 8.780000 3.679688 61.10s
FLOAT_6_E2 55.150000 25.289062 17.050000 14.234375 9.010000 7.429688 61.39s
FLOAT_6_E3 59.950000 38.609375 21.440000 21.617188 9.480000 9.250000 59.84s
FLOAT_6_E4 49.880000 23.218750 16.680000 13.703125 8.140000 7.859375 60.05s
FLOAT_7_E1 51.440000 23.953125 17.860000 10.000000 7.960000 4.132812 94.77s
FLOAT_7_E2 58.410000 29.664062 18.260000 17.640625 8.860000 8.070312 94.29s
FLOAT_7_E3 62.230000 40.257812 21.940000 22.257812 8.560000 7.867188 94.02s
FLOAT_7_E4 60.140000 38.945312 21.520000 21.968750 9.160000 9.093750 94.04s
FLOAT_7_E5 50.190000 23.757812 16.300000 13.882812 7.970000 7.968750 94.16s
FLOAT_8_E1 53.620000 24.234375 18.030000 11.234375 7.520000 4.734375 161.72s
FLOAT_8_E2 58.660000 31.140625 18.620000 19.093750 8.720000 8.140625 161.20s
FLOAT_8_E3 62.860000 40.531250 22.000000 24.265625 8.140000 7.828125 160.96s
FLOAT_8_E4 62.490000 40.359375 22.000000 22.437500 8.470000 7.867188 161.25s
FLOAT_8_E5 60.040000 38.804688 21.310000 21.531250 9.210000 9.156250 161.08s
FLOAT_8_E6 50.440000 22.921875 16.300000 13.984375 7.900000 7.789062 160.57s
AlexNete cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
AlexNete cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
ResNet_18 cifar100
Title Org_acc Org_gen_acc FGSM_acc FGSM_gen_acc PGD_acc PGD_gen_acc Time
Full 75.720000 48.468750 18.720000 51.976562 4.600000 36.257812 41.51s
INT_2 1.050000 1.062500 1.000000 1.062500 1.000000 1.062500 105.76s
INT_3 1.110000 0.937500 1.000000 0.992188 1.000000 0.992188 107.01s
INT_4 6.050000 5.179688 1.050000 2.148438 0.920000 1.468750 90.83s
INT_5 50.970000 30.781250 10.850000 33.718750 5.720000 23.820312 85.06s
INT_6 70.860000 44.273438 17.920000 48.929688 6.760000 36.203125 104.06s
INT_7 74.530000 49.281250 17.560000 52.179688 5.550000 38.210938 103.49s
INT_8 75.420000 48.562500 18.590000 52.656250 5.420000 36.968750 107.94s
INT_9 75.290000 48.656250 18.520000 52.882812 5.150000 36.921875 102.77s
INT_10 75.500000 48.429688 18.590000 52.679688 5.130000 36.890625 99.65s
INT_11 75.420000 48.929688 18.490000 52.796875 4.930000 36.820312 101.77s
INT_12 75.500000 48.671875 18.560000 52.546875 4.890000 36.789062 104.04s
INT_13 75.400000 48.750000 18.450000 52.781250 4.960000 36.718750 107.09s
INT_14 75.380000 48.812500 18.510000 52.757812 5.080000 36.734375 85.38s
INT_15 75.430000 48.625000 18.560000 52.664062 4.970000 36.804688 106.51s
INT_16 75.380000 48.828125 18.490000 52.812500 5.020000 36.789062 104.30s
POT_2 1.000000 1.062500 1.000000 1.062500 1.000000 1.062500 133.99s
POT_3 1.000000 1.632812 1.000000 0.992188 1.000000 0.992188 143.01s
POT_4 19.970000 23.000000 4.930000 5.875000 3.550000 3.562500 172.34s
POT_5 17.190000 20.859375 4.650000 5.976562 3.320000 3.960938 237.55s
POT_6 18.690000 21.367188 5.060000 3.843750 3.370000 2.828125 364.69s
POT_7 15.080000 24.492188 5.560000 6.539062 4.090000 4.414062 624.68s
POT_8 20.530000 20.875000 5.170000 6.101562 3.980000 3.890625 1139.09s
FLOAT_3_E1 0.960000 1.367188 0.830000 0.953125 1.190000 0.820312 144.80s
FLOAT_4_E1 25.630000 16.054688 5.740000 7.226562 3.630000 4.921875 172.17s
FLOAT_4_E2 15.960000 12.500000 1.980000 6.554688 1.360000 5.132812 171.87s
FLOAT_5_E1 53.420000 33.117188 13.590000 24.507812 7.510000 17.632812 237.25s
FLOAT_5_E2 53.880000 38.500000 7.500000 28.742188 4.260000 17.109375 237.45s
FLOAT_5_E3 58.640000 43.281250 14.920000 41.570312 6.500000 31.171875 237.14s
FLOAT_6_E1 59.440000 42.421875 14.460000 32.320312 6.570000 23.421875 363.70s
FLOAT_6_E2 64.600000 43.406250 11.560000 42.757812 5.270000 27.898438 364.25s
FLOAT_6_E3 71.680000 46.843750 18.480000 50.414062 6.330000 36.750000 364.37s
FLOAT_6_E4 58.550000 43.406250 14.920000 42.132812 6.250000 31.445312 365.01s
FLOAT_7_E1 64.430000 40.851562 15.280000 36.179688 6.430000 26.046875 625.27s
FLOAT_7_E2 68.690000 45.304688 14.340000 45.789062 6.220000 32.796875 625.91s
FLOAT_7_E3 74.380000 49.187500 18.570000 52.671875 5.480000 36.343750 627.07s
FLOAT_7_E4 71.490000 47.078125 18.710000 50.796875 6.470000 37.296875 627.11s
FLOAT_7_E5 57.370000 43.585938 14.750000 41.679688 6.410000 31.289062 626.12s
FLOAT_8_E1 65.180000 42.164062 15.020000 36.500000 6.260000 26.226562 1139.44s
FLOAT_8_E2 69.890000 46.632812 14.750000 47.617188 6.060000 34.531250 1140.83s
FLOAT_8_E3 75.280000 48.398438 18.450000 52.445312 5.190000 36.156250 1141.41s
FLOAT_8_E4 74.490000 49.156250 18.580000 52.726562 5.580000 36.234375 1141.37s
FLOAT_8_E5 71.590000 47.515625 18.680000 50.570312 6.420000 36.929688 1142.54s
FLOAT_8_E6 59.890000 42.718750 14.770000 40.554688 6.410000 30.367188 1142.01s
import torch
import torch.nn as nn
import time
from autoattack import AutoAttack
from advertorch.attacks import GradientSignAttack, LinfBasicIterativeAttack,\
LinfPGDAttack, LinfMomentumIterativeAttack, \
CarliniWagnerL2Attack, JacobianSaliencyMapAttack, ElasticNetL1Attack
class MyDataset(torch.utils.data.Dataset):
def __init__(self, images, labels):
self.images = images
self.labels = labels
def __len__(self):
return len(self.images)
def __getitem__(self, index):
image = self.images[index]
label = self.labels[index]
return image, label
# 构建生成器的伪数据集
def build_gen_loader(generator, batchSize, iters, latent_dim, nClasses):
gen_images = []
gen_labels = []
for i in range(iters):
z = torch.randn(batchSize, latent_dim).cuda()
labels = torch.randint(0, nClasses, (batchSize,)).cuda()
z = z.contiguous()
labels = labels.contiguous()
images = generator(z, labels).detach()
gen_images.append(images)
gen_labels.append(labels)
gen_images = torch.cat(gen_images)
gen_labels = torch.cat(gen_labels)
gen_dataset = MyDataset(gen_images, gen_labels)
gen_loader = torch.utils.data.DataLoader(gen_dataset, batch_size=batchSize, shuffle=True)
return gen_loader
def test_autoattack(model, testloader, norm='Linf', eps=8/255, version='standard', verbose=True):
start_time = time.time()
adversary = AutoAttack(model, norm=norm, eps=eps, version=version, verbose=verbose)
if version == 'custom':
adversary.attacks_to_run = ['apgd-ce', 'apgd-t']
adversary.apgd.n_restarts = 1
adversary.apgd_targeted.n_restarts = 1
x_test = [x for (x,y) in testloader]
x_test = torch.cat(x_test, 0)
y_test = [y for (x,y) in testloader]
y_test = torch.cat(y_test, 0)
with torch.no_grad():
x_adv, y_adv = adversary.run_standard_evaluation(x_test, y_test, bs=testloader.batch_size, return_labels=True)
adv_correct = torch.sum(y_adv==y_test).data
total = y_test.shape[0]
rob_acc = adv_correct / total
timeinterval = time.time()-start_time
print('Attack Strength:%.4f \t AutoAttack Acc:%.3f (%d/%d)\t Time:%.2fs'%(eps, rob_acc, adv_correct, total, timeinterval))
def get_adversary(model, attack_type, c, num_classes, loss_fn=nn.CrossEntropyLoss()):
if (attack_type == "pgd"):
adversary = LinfPGDAttack(
model, loss_fn=loss_fn, eps=c,
nb_iter=10, eps_iter=c/4, rand_init=True, clip_min=0., clip_max=1.,
targeted=False)
elif (attack_type == "fgsm"):
adversary = GradientSignAttack(
model, loss_fn=loss_fn, eps=c,
clip_min=0., clip_max=1., targeted=False)
elif (attack_type == "mim"):
adversary = LinfMomentumIterativeAttack(
model, loss_fn=loss_fn, eps=c,
nb_iter=40, eps_iter=c/10, clip_min=0., clip_max=1.,
targeted=False)
elif (attack_type == "bim"):
adversary = LinfBasicIterativeAttack(
model, loss_fn=loss_fn, eps=c,
nb_iter=40, eps_iter=c/10, clip_min=0., clip_max=1.,
targeted=False)
elif (attack_type == "ela"):
adversary = ElasticNetL1Attack(
model, initial_const=c, confidence=0.1, max_iterations=100, clip_min=0., clip_max=1.,
targeted=False, num_classes=10)
elif (attack_type == "jsma"):
adversary = JacobianSaliencyMapAttack(
model, clip_min=0., clip_max=1., num_classes=10, gamma=c)
elif (attack_type == "cw"):
adversary = CarliniWagnerL2Attack(
model, confidence=0.01, max_iterations=1000, clip_min=0., clip_max=1., learning_rate=0.01,
targeted=False, num_classes=num_classes, binary_search_steps=1, initial_const=c)
elif (attack_type == None):
adversary = None
else:
raise NotImplementedError
return adversary
def test_robust(model, attack_type, c, num_classes, testloader, loss_fn=nn.CrossEntropyLoss(), is_return=True):
start_time = time.time()
adversary = get_adversary(model, attack_type, c, num_classes, loss_fn)
# ori_correct = 0
adv_correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(testloader):
# if batch_idx < int(req_count/testloader.batch_size):
inputs, targets = inputs.cuda(), targets.cuda()
total += targets.size(0)
# ori_outputs = adversary.predict(inputs)
# ori_preds = ori_outputs.max(dim=1, keepdim=False)[1]
# ori_correct += ori_preds.eq(targets.data).cpu().sum()
# nat_acc = 100. * float(ori_correct) / total
if attack_type is None: #纯净样本
advs = inputs
with torch.no_grad():
adv_outputs = model(advs)
else:
advs = adversary.perturb(inputs, targets).detach()
with torch.no_grad():
adv_outputs = adversary.predict(advs)
adv_preds = adv_outputs.max(dim=1, keepdim=False)[1]
adv_correct += adv_preds.eq(targets.data).cpu().sum()
rob_acc = 100. * float(adv_correct) / total
timeinterval = time.time() - start_time
print('Attack Strength:%.4f Acc:%.3f (%d/%d) Time:%.2fs'%(c, rob_acc, adv_correct, total, timeinterval))
if is_return:
return rob_acc
# return nat_acc, rob_acc
\ No newline at end of file
#!/bin/bash
name_list="ResNet_152 ResNet_50 ResNet_18 MobileNetV2 Inception_BN VGG_19 VGG_16 AlexNet_BN AlexNet"
if [ ! -d "ckpt_full_gen/cifar10" ]; then
mkdir -p "ckpt_full_gen/cifar10"
fi
for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
fi
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar10,Quant=False gen_one.slurm
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar10 gen_one.slurm
done
\ No newline at end of file
#!/bin/bash
name_list="ResNet_152 ResNet_50 ResNet_18 MobileNetV2 Inception_BN VGG_19 VGG_16 AlexNet_BN AlexNet"
if [ ! -d "ckpt_full_gen/cifar100" ]; then
mkdir -p "ckpt_full_gen/cifar100"
fi
for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
fi
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar100,Quant=False gen_one.slurm
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar100 gen_one.slurm
done
\ No newline at end of file
......@@ -2,7 +2,4 @@
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
if [ ! -d "ckpt_full_gen/cifar10/$1" ]; then
mkdir -p "ckpt_full_gen/cifar10/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10,Quant=False gen_one.slurm
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10 gen_one.slurm
......@@ -2,7 +2,4 @@
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
if [ ! -d "ckpt_full_gen/cifar100/$1" ]; then
mkdir -p "ckpt_full_gen/cifar100/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar100,Quant=False gen_one.slurm
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar100 gen_one.slurm
......@@ -2,7 +2,5 @@
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
if [ ! -d "ckpt_quant_gen/cifar10/$1" ]; then
mkdir -p "ckpt_quant_gen/cifar10/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10,Quant=True gen_one.slurm
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10 loss_one.slurm
......@@ -4,8 +4,6 @@ for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
fi
if [ ! -d "ckpt_quant/cifar10/$name" ]; then
mkdir -p "ckpt_quant/cifar10/$name"
fi
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar10 ptq_one.slurm
done
\ No newline at end of file
......@@ -4,8 +4,6 @@ for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
fi
if [ ! -d "ckpt_quant/cifar100/$name" ]; then
mkdir -p "ckpt_quant/cifar100/$name"
fi
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar100 ptq_one.slurm
done
\ No newline at end of file
......@@ -2,7 +2,5 @@
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
if [ ! -d "ckpt_quant/cifar10/$1" ]; then
mkdir -p "ckpt_quant/cifar10/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10 ptq_one.slurm
......@@ -2,7 +2,5 @@
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
if [ ! -d "ckpt_quant/cifar100/$1" ]; then
mkdir -p "ckpt_quant/cifar100/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar100 ptq_one.slurm
#!/bin/bash
name_list="ResNet_152 ResNet_50 ResNet_18 MobileNetV2 Inception_BN VGG_19 VGG_16 AlexNet_BN AlexNet"
if [ ! -d "ckpt_quant_gen/cifar10" ]; then
mkdir -p "ckpt_quant_gen/cifar10"
fi
for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
fi
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar10,Quant=True gen_one.slurm
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar10 robust_one.slurm
done
\ No newline at end of file
#!/bin/bash
name_list="ResNet_152 ResNet_50 ResNet_18 MobileNetV2 Inception_BN VGG_19 VGG_16 AlexNet_BN AlexNet"
if [ ! -d "ckpt_quant_gen/cifar100" ]; then
mkdir -p "ckpt_quant_gen/cifar100"
fi
for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
fi
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar100,Quant=True gen_one.slurm
sbatch --job-name=$name -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$name,Dataset=cifar100 robust_one.slurm
done
\ No newline at end of file
......@@ -2,7 +2,5 @@
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
if [ ! -d "ckpt_quant_gen/cifar100/$1" ]; then
mkdir -p "ckpt_quant_gen/cifar100/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar100,Quant=True gen_one.slurm
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10 robust_one.slurm
#!/bin/bash
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar100 robust_one.slurm
#!/bin/bash
name_list="ResNet_152 ResNet_50 ResNet_18 MobileNetV2 Inception_BN VGG_19 VGG_16 AlexNet_BN AlexNet"
if [ ! -d "ckpt_full/cifar10" ]; then
mkdir -p "ckpt_full/cifar10"
fi
for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
......
#!/bin/bash
name_list="ResNet_152 ResNet_50 ResNet_18 MobileNetV2 Inception_BN VGG_19 VGG_16 AlexNet_BN AlexNet"
if [ ! -d "ckpt_full/cifar100" ]; then
mkdir -p "ckpt_full/cifar100"
fi
for name in $name_list; do
if [ ! -d "ret_one/$name" ]; then
mkdir -p "ret_one/$name"
......
......@@ -2,7 +2,5 @@
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
if [ ! -d "ckpt_full/cifar10" ]; then
mkdir -p "ckpt_full/cifar10"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10 train_one.slurm
\ No newline at end of file
......@@ -2,7 +2,5 @@
if [ ! -d "ckpt_full/cifar100" ]; then
mkdir -p "ckpt_full/cifar100"
fi
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar100 train_one.slurm
\ No newline at end of file
#!/bin/bash
if [ ! -d "ret_one/$1" ]; then
mkdir -p "ret_one/$1"
fi
sbatch --job-name=$1 -o "ret_one/%x/%j.out" -e "ret_one/%x/%j.err" --export=Model=$1,Dataset=cifar10 robust_one_try.slurm
......@@ -51,7 +51,7 @@ module load git/2.17.1
module load vim/8.1.2424
##- language
module load python3/3.6.8
module load python3/3.8.16
##- CUDA
# module load cuda-cudnn/10.2-7.6.5
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment