甘德县公司网站建设,ppt设计主题,网站建设 收费标准,著名网红公司开业1 需求
GitHub - xhh890921/cnn-captcha-pytorch: 小黑黑讲AI#xff0c;AI实战项目《验证码识别》 2 接口
含义 在optim.Adam接口中#xff0c;lr参数代表学习率#xff08;Learning Rate#xff09;。学习率是优化算法中的一个关键超参数#xff0c;它决定了在每次迭代…1 需求
GitHub - xhh890921/cnn-captcha-pytorch: 小黑黑讲AIAI实战项目《验证码识别》 2 接口
含义 在optim.Adam接口中lr参数代表学习率Learning Rate。学习率是优化算法中的一个关键超参数它决定了在每次迭代过程中模型参数沿着梯度方向更新的步长大小。简单来说它控制着模型学习的速度。工作原理 以梯度下降为例在每次迭代中模型参数的更新公式一般为其中是模型参数是学习率是损失函数关于参数的梯度。在Adam优化器中虽然更新过程比简单的梯度下降更复杂涉及到一阶矩估计和二阶矩估计等但学习率lr仍然起着类似的作用。Adam会根据梯度的一阶矩估计类似于均值和二阶矩估计类似于方差来调整参数更新的方向和大小而学习率lr则是在此基础上进一步缩放更新的步长。例如当lr较大时参数更新的步长就大模型在参数空间中的移动速度快当lr较小时参数更新的步长小模型在参数空间中的移动速度慢。对训练的影响 学习率过大 如果学习率设置得过大可能会导致模型在训练过程中无法收敛甚至出现梯度爆炸的情况。例如在训练神经网络时参数可能会在每次迭代中过度更新使得损失函数的值越来越大而不是越来越小。以一个简单的线性回归模型为例如果学习率过大模型可能会在参数空间中 “跳过” 最优解并且由于更新步长过大很难再回到最优解附近。学习率过小 当学习率设置得过小时模型训练的速度会非常慢需要更多的迭代次数才能达到较好的收敛效果。这会增加训练的时间和计算成本。例如在一个复杂的深度学习模型如卷积神经网络用于图像识别的训练中如果学习率过小可能需要花费数倍甚至数十倍的时间才能达到与合适学习率相当的训练效果。选择合适学习率的方法 经验法则通常可以先尝试一些常用的学习率如 0.001、0.0001 等观察模型在训练初期的表现如损失函数的下降速度和稳定性。学习率调度Learning Rate Scheduling可以根据训练的阶段动态地调整学习率。例如在训练初期可以使用较大的学习率让模型快速学习数据的大致模式随着训练的进行逐渐减小学习率使模型能够更精细地调整参数以接近最优解。常见的学习率调度策略包括阶梯式下降在特定的训练阶段降低学习率、余弦退火根据余弦函数的形状来降低学习率等。超参数搜索方法使用超参数搜索算法如网格搜索Grid Search、随机搜索Random Search或更高级的贝叶斯优化Bayesian Optimization来寻找合适的学习率。这些方法通过在一定范围内尝试不同的学习率值并根据模型在验证集上的性能来确定最优的学习率。 3 示例
config.json
{train_data_path: ./data/train-digit/,test_data_path: ./data/test-digit/,train_num: 2000,test_num: 1000,characters: 0123456789,digit_num: 1,img_width: 200,img_height: 100,resize_width: 128,resize_height: 128,batch_size: 128,epoch_num: 200,learning_rate: 0.0001,model_save_path: ./model/,model_name: captcha.1digit.2k,test_model_path: ./model/captcha.1digit.2k
} generate.py
# 导入验证码模块ImageCaptcha和随机数模块random
from captcha.image import ImageCaptcha
import random# 定义函数generate_data用于生成验证码图片
# num是需要生成的验证码图片数量
# count是验证码图中包含的字符数量
# chars保存验证码中包含的字符
# path是图片结果的保存路径
# width是height是图片的宽和高
def generate_data(num, count, chars, path, width, height):# 使用变量i循环生成num个验证码图片for i in range(num):# 打印当前的验证码编号print(generate %d%(i))# 使用ImageCaptcha创建验证码生成器generatorgenerator ImageCaptcha(widthwidth, heightheight)random_str #保存验证码图片上的字符# 向random_str中循环添加count个字符for j in range(count):# 每个字符使用random.choice随机的从chars中选择choose random.choice(chars)random_str choose# 调用generate_image生成验证码图片imgimg generator.generate_image(random_str)# 在验证码上加干扰点generator.create_noise_dots(img, #000000, 4, 40)# 在验证码上加干扰线generator.create_noise_curve(img, #000000)# 设置文件名命名规则为验证码字符串random_str加下划线加数据编号file_name path random_str _ str(i) .jpgimg.save(file_name) # 保存文件import json
import osif __name__ __main__:# 使用open函数打开config.json配置文件with open(config.json, r) as f:# 使用json.load读取解析json结果保存在configconfig json.load(f)# 接着从配置中获取各项参数# 具体使用config加中括号中括号中为参数名这样的方式读取配置内容train_data_path config[train_data_path] # 训练数据路径test_data_path config[test_data_path] # 测试数据路径train_num config[train_num] # 训练样本个数test_num config[test_num] # 测试样本个数characters config[characters] # 验证码使用的字符集digit_num config[digit_num] # 图片上的字符数量img_width config[img_width] # 图片的宽度img_height config[img_height] # 图片的高度# 检查数据路径上的文件夹是否存在# 如果不存在则创建保存数据的文件夹if not os.path.exists(train_data_path):os.makedirs(train_data_path)if not os.path.exists(test_data_path):os.makedirs(test_data_path)# 调用generate_data生成训练数据generate_data(train_num, digit_num, characters,train_data_path, img_width, img_height)# 调用generate_data生成测试数据generate_data(test_num, digit_num, characters,test_data_path, img_width, img_height) dataset.py
from torch.utils.data import Dataset
from PIL import Image
import torch
import os# 设置CaptchaDataset继承Dataset用于读取验证码数据
class CaptchaDataset(Dataset):# init函数用于初始化# 函数传入数据的路径data_dir和数据转换对象transform# 将验证码使用的字符集characters通过参数传入def __init__(self, data_dir, transform, characters):self.file_list list() #保存每个训练数据的路径# 使用os.listdir获取data_dir中的全部文件files os.listdir(data_dir)for file in files: #遍历files# 将目录路径与文件名组合为文件路径path os.path.join(data_dir, file)# 将path添加到file_list列表self.file_list.append(path)# 将数据转换对象transform保存到类中self.transform transform# 创建一个字符到数字的字典self.char2int {}# 在创建字符到数字的字典时使用外界传入的字符集charactersfor i, char in enumerate(characters):self.char2int[char] idef __len__(self):# 直接返回数据集中的样本数量# 重写该方法后可以使用len(dataset)语法来获取数据集的大小return len(self.file_list)# 函数传入索引index函数应当返回与该索引对应的数据和标签# 通过dataset[i]就可以获取到第i个样本了def __getitem__(self, index):file_path self.file_list[index] #获取数据的路径# 打开文件并使用convert(L)将图片转换为灰色# 不需要通过颜色来判断验证码中的字符转为灰色后可以提升模型的鲁棒性image Image.open(file_path).convert(L)# 使用transform转换数据将图片数据转为张量数据image self.transform(image)# 获取该数据图片中的字符标签label_char os.path.basename(file_path).split(_)[0]# 在获取到该数据图片中的字符标签label_char后label list()for char in label_char: # 遍历字符串label_char# 将其中的字符转为数字添加到列表label中label.append(self.char2int[char])# 将label转为张量作为训练数据的标签label torch.tensor(label, dtypetorch.long)return image, label #返回image和labelfrom torch.utils.data import DataLoader
from torchvision import transforms
import jsonif __name__ __main__:with open(config.json, r) as f:config json.load(f)height config[resize_height] # 图片的高度width config[resize_width] # 图片的宽度# 定义数据转换对象transform# 使用transforms.Compose定义数据预处理流水线# 在transform添加Resize和ToTensor两个数据处理操作transform transforms.Compose([transforms.Resize((height, width)), # 将图片缩放到指定的大小transforms.ToTensor()]) # 将图片数据转换为张量data_path config[train_data_path] # 训练数据储存路径characters config[characters] # 验证码使用的字符集batch_size config[batch_size]epoch_num config[epoch_num]# 定义CaptchaDataset对象datasetdataset CaptchaDataset(data_path, transform, characters)# 定义数据加载器data_load# 其中参数dataset是数据集# batch_size8代表每个小批量数据的大小是8# shuffle True表示每个epoch都会随机打乱数据的顺序data_load DataLoader(dataset,batch_size batch_size,shuffle True)# 编写一个循环模拟小批量梯度下降迭代时的数据读取# 外层循环代表了整个训练数据集的迭代轮数3个epoch就是3轮循环# 对于每个epoch都会遍历全部的训练数据for epoch in range(epoch_num):print(epoch %d%(epoch))# 内层循环代表了在一个迭代轮次中以小批量的方式# 使用dataloader对数据进行遍历# batch_idx表示当前遍历的批次# data和label表示这个批次的训练数据和标记for batch_idx, (data, label) in enumerate(data_load):print(batch_idx %d label %s%(batch_idx, label)) model.py
import torch.nn as nn# 设置类CNNModel它继承了torch.nn中的Module模块
class CNNModel(nn.Module):# 定义卷积神经网络# 修改初始化函数init的参数列表# 需要将训练图片的高height、宽width、# 图片中的字符数量digit_num类别数量class_num传入def __init__(self, height, width, digit_num, class_num):super(CNNModel, self).__init__()self.digit_num digit_num # 将digit_num保存在类中# 定义第1个卷积层组conv1# 其中包括了1个卷积层# 1个ReLU激活函数和1个2乘2的最大池化self.conv1 nn.Sequential(# 卷积层使用Conv2d定义# 包括了1个输入通道8个输出通道# 卷积核的大小是3乘3的# 使用paddingsame进行填充# 这样可以保证输入和输出的特征图大小相同nn.Conv2d(1, 32, kernel_size3, paddingsame),nn.ReLU(),nn.MaxPool2d(2),nn.Dropout(0.25))# 第2个卷积层组和conv1具有相同的结self.conv2 nn.Sequential(# 包括8个输入通道和16个输出通道nn.Conv2d(32, 64, kernel_size3, paddingsame),nn.ReLU(),nn.MaxPool2d(2),nn.Dropout(0.25))# 第3个卷积层组和conv1具有相同的结self.conv3 nn.Sequential(# 包括16个输入通道和16个输出通道nn.Conv2d(64, 64, kernel_size3, paddingsame),nn.ReLU(),nn.MaxPool2d(2),nn.Dropout(0.25))# 完成三个卷积层的计算后计算全连接层的输入数据数量input_num# 它等于图片的高和宽分别除以8再乘以输出特征图的个数16# 除以8的原因是由于经过了3个2*2的最大池化# 因此图片的高和宽都被缩小到原来的1/8input_num (height//8) * (width//8) * 64self.fc1 nn.Sequential(nn.Linear(input_num, 1024),nn.ReLU(),nn.Dropout(0.25))# 将输出层的神经元个数设置为class_numself.fc2 nn.Sequential(nn.Linear(1024, class_num),)# 后面训练会使用交叉熵损失函数CrossEntropyLoss# softmax函数会定义在损失函数中所以这里就不显示的定义了# 前向传播函数# 函数输入一个四维张量x# 这四个维度分别是样本数量、输入通道、图片的高度和宽度def forward(self, x): # [n, 1, 128, 128]# 将输入张量x按照顺序输入至每一层中进行计算# 每层都会使张量x的维度发生变化out self.conv1(x) # [n, 8, 64, 64]out self.conv2(out) # [n, 16, 32, 32]out self.conv3(out) # [n, 16, 16, 16]# 使用view函数将张量的维度从n*16*16*16转为n*4096out out.view(out.size(0), -1) # [n, 4096]out self.fc1(out) # [n, 128]# 经过3个卷积层与2个全连接层后会计算得到n*40的张量out self.fc2(out) # [n, 40]# 使用初始化时传入的digit_num# 也就是将模型的最终输出修改为n*digit_num*字符种类out out.view(out.size(0), self.digit_num, -1)return outimport json
if __name__ __main__:with open(config.json, r) as f:config json.load(f)height config[resize_height] # 图片的高度width config[resize_width] # 图片的宽度characters config[characters] # 验证码使用的字符集digit_num config[digit_num]class_num len(characters) * digit_num# 定义一个CNNModelUp1实例model CNNModel(height, width, digit_num, class_num)print(model) #将其打印观察打印结果可以了解模型的结构print() train.py
# 直接导入dataset.py中的CaptchaDataset类
from dataset import CaptchaDataset
# 直接导入model.py中的CNNModel类
from model import CNNModelimport torch
import torch.nn as nn
from torch.utils.data import DataLoader
from torchvision import transforms
from torch import optim
import json
import osif __name__ __main__:# 打开配置文件with open(config.json, r) as f:config json.load(f)# 读取resize_height和resize_width两个参数# 它们代表图片数据最终缩放的高和宽用于创建transformheight config[resize_height] # 图片的高度width config[resize_width] # 图片的宽度# 定义数据转换对象transform# 使用transforms.Compose定义数据预处理流水线# 在transform添加Resize和ToTensor两个数据处理操作transform transforms.Compose([transforms.RandomRotation(10), # 添加旋转方案transforms.Resize((height, width)), # 将图片缩放到指定的大小transforms.ToTensor()]) # 将图片数据转换为张量train_data_path config[train_data_path] # 获取训练数据路径characters config[characters] # 验证码字符集batch_size config[batch_size] # 批量大小epoch_num config[epoch_num] # 迭代轮数digit_num config[digit_num] # 字符个数learning_rate config[learning_rate] #迭代速率# 计算类别个数class_num等于使用的字符数量*字符个数class_num len(characters) * digit_nummodel_save_path config[model_save_path] #获取模型的保存路径model_name config[model_name] #模型名称model_save_name model_save_path / model_name# 创建模型文件夹if not os.path.exists(model_save_path):os.makedirs(model_save_path)print(resize_height %d%(height))print(resize_width %d %(width))print(train_data_path %s%(train_data_path))print(characters %s % (characters))print(batch_size %d % (batch_size))print(epoch_num %d % (epoch_num))print(digit_num %d % (digit_num))print(class_num %d % (class_num))print(learning_rate %lf % (learning_rate))print(model_save_name %s % (model_save_name))print()# 定义CaptchaDataset对象train_datatrain_data CaptchaDataset(train_data_path, transform, characters)# 使用DataLoader定义数据加载器train_load# 其中参数train_data是训练集# batch_size64代表每个小批量数据的大小是64# shuffle True表示每一轮训练都会随机打乱数据的顺序train_load DataLoader(train_data,batch_size batch_size,shuffle True)# 训练集有3000个数据由于每个小批量大小是64# 3000个数据就会分成47个小批量前46个小批量包括64个数据# 最后一个小批量包括56个数据。46*64563000# 定义设备对象device这里如果cuda可用则使用GPU否则使用CPUdevice torch.device(cuda if torch.cuda.is_available() else cpu)# 创建一个CNNModel模型对象并转移到GPU上model CNNModel(height, width, digit_num, class_num).to(device)model.train() # 需要指定迭代速率。默认情况下是0.001我们将迭代速率修改0.0001# 因为面对更复杂的数据较小的迭代速率可以使迭代更稳定optimizer optim.Adam(model.parameters(), lrlearning_rate)criterion nn.CrossEntropyLoss() # 创建一个交叉熵损失函数print(Begin training:)# 提升迭代轮数从50轮训练提升至200轮训练for epoch in range(epoch_num): # 外层循环代表了整个训练数据集的遍历次数# 内层循环代表了在一个epoch中以批量的方式使用train_load对于数据进行遍历# batch_idx 表示当前遍历的批次# (data, label) 表示这个批次的训练数据和标记。for batch_idx, (data, label) in enumerate(train_load):# 将数据data和标签label转移到GPU上data, label data.to(device), label.to(device)# 使用当前的模型预测训练数据data结果保存在output中output model(data)# 修改损失值loss的计算方法# 将4位验证码的每一位的损失都累加到一起loss torch.tensor(0.0).to(device)for i in range(digit_num): #使用i循环4位验证码# 每一位验证码的模型计算输出为output[:, i, :]# 标记为label[:, i]# 交叉熵损失函数criterion计算一位验证码的损失# 将4位验证码的损失累加到lossloss criterion(output[:, i, :], label[:, i])loss.backward() # 计算损失函数关于模型参数的梯度optimizer.step() # 更新模型参数optimizer.zero_grad() # 将梯度清零以便于下一次迭代# 计算训练时每个batch的正确率accpredicted torch.argmax(output, dim2)correct (predicted label).all(dim1).sum().item()acc correct / data.size(0)# 对于每个epoch每训练10个batch打印一次当前的损失if batch_idx % 10 0:print(fEpoch {epoch 1}/{epoch_num} f| Batch {batch_idx}/{len(train_load)} f| Loss: {loss.item():.4f} f| accuracy {correct}/{data.size(0)}{acc:.3f})# 每10轮训练就保存一次checkpoint模型用来调试使用if (epoch 1) % 10 0:checkpoint model_save_path /check.epoch str(epoch1)torch.save(model.state_dict(), checkpoint)print(checkpoint saved : %s % (checkpoint))# 程序的最后使用配置中的路径保存训练结果torch.save(model.state_dict(), model_save_name)print(model saved : %s % (model_save_name)) test.py
from dataset import CaptchaDataset
from model import CNNModelimport torch
from torch.utils.data import DataLoader
import torchvision.transforms as transformsimport jsonif __name__ __main__:with open(config.json, r) as f:config json.load(f)height config[resize_height] # 图片的高度width config[resize_width] # 图片的宽度# 定义数据转换对象transform# 将图片缩放到指定的大小并将图片数据转换为张量transform transforms.Compose([transforms.Resize((height, width)),transforms.ToTensor()])test_data_path config[test_data_path] # 训练数据储存路径characters config[characters] # 验证码使用的字符集digit_num config[digit_num]class_num len(characters) * digit_numtest_model_path config[test_model_path]print(resize_height %d % (height))print(resize_width %d % (width))print(test_data_path %s % (test_data_path))print(characters %s % (characters))print(digit_num %d % (digit_num))print(class_num %d % (class_num))print(test_model_path %s % (test_model_path))print()# 使用CaptchaDataset构造测试数据集test_data CaptchaDataset(test_data_path, transform, characters)# 使用DataLoader读取test_data# 不需要设置任何参数这样会一个一个数据的读取test_loader DataLoader(test_data)# 定义设备对象device这里如果cuda可用则使用GPU否则使用CPUdevice torch.device(cuda:0 if torch.cuda.is_available() else cpu)# 创建一个CNNModel模型对象并转移到GPU上model CNNModel(height, width, digit_num, class_num).to(device)model.eval()# 调用load_state_dict读取已经训练好的模型文件captcha.digitmodel.load_state_dict(torch.load(test_model_path))right 0 # 设置right变量保存预测正确的样本数量all 0 # all保存全部的样本数量# 遍历test_loader中的数据# x表示样本的特征张量y表示样本的标签for (x, y) in test_loader:x, y x.to(device), y.to(device) # 转移数据至GPUpred model(x) # 使用模型预测x的结果保存在pred中# 使用pred.argmax(dim2).squeeze(0)获取4位验证码数据的预测结果# y.squeeze(0)是4验证码的标记结果if torch.equal(pred.argmax(dim2).squeeze(0),y.squeeze(0)):right 1 # 如果相同那么right加1all 1 # 每次循环all变量加1# 循环结束后计算模型的正确率acc right * 1.0 / allprint(test accuracy %d / %d %.3lf % (right, all, acc)) D:\Python310\python.exe D:/project/PycharmProjects/CNN/train.py
resize_height 128
resize_width 128
train_data_path ./data/train-digit/
characters 0123456789
batch_size 128
epoch_num 200
digit_num 1
class_num 10
learning_rate 0.000100
model_save_name ./model//captcha.1digit.2kBegin training:
Epoch 1/200 | Batch 0/16 | Loss: 2.3091 | accuracy 15/1280.117
Epoch 1/200 | Batch 10/16 | Loss: 2.3238 | accuracy 10/1280.078
Epoch 2/200 | Batch 0/16 | Loss: 2.3016 | accuracy 14/1280.109
Epoch 2/200 | Batch 10/16 | Loss: 2.3000 | accuracy 15/1280.117
Epoch 3/200 | Batch 0/16 | Loss: 2.3062 | accuracy 13/1280.102
Epoch 3/200 | Batch 10/16 | Loss: 2.3053 | accuracy 12/1280.094
Epoch 4/200 | Batch 0/16 | Loss: 2.3071 | accuracy 15/1280.117
Epoch 4/200 | Batch 10/16 | Loss: 2.3018 | accuracy 18/1280.141
Epoch 5/200 | Batch 0/16 | Loss: 2.2999 | accuracy 14/1280.109
Epoch 5/200 | Batch 10/16 | Loss: 2.3003 | accuracy 17/1280.133
Epoch 6/200 | Batch 0/16 | Loss: 2.3056 | accuracy 10/1280.078
Epoch 6/200 | Batch 10/16 | Loss: 2.3008 | accuracy 17/1280.133
Epoch 7/200 | Batch 0/16 | Loss: 2.3007 | accuracy 10/1280.078
Epoch 7/200 | Batch 10/16 | Loss: 2.3061 | accuracy 10/1280.078
Epoch 8/200 | Batch 0/16 | Loss: 2.3027 | accuracy 16/1280.125
Epoch 8/200 | Batch 10/16 | Loss: 2.3041 | accuracy 11/1280.086
Epoch 9/200 | Batch 0/16 | Loss: 2.3063 | accuracy 14/1280.109
Epoch 9/200 | Batch 10/16 | Loss: 2.3000 | accuracy 12/1280.094
Epoch 10/200 | Batch 0/16 | Loss: 2.2981 | accuracy 17/1280.133
Epoch 10/200 | Batch 10/16 | Loss: 2.3018 | accuracy 17/1280.133
checkpoint saved : ./model//check.epoch10
Epoch 11/200 | Batch 0/16 | Loss: 2.3048 | accuracy 13/1280.102
Epoch 11/200 | Batch 10/16 | Loss: 2.3009 | accuracy 18/1280.141
Epoch 12/200 | Batch 0/16 | Loss: 2.3007 | accuracy 5/1280.039
Epoch 12/200 | Batch 10/16 | Loss: 2.3052 | accuracy 13/1280.102
Epoch 13/200 | Batch 0/16 | Loss: 2.3016 | accuracy 15/1280.117
Epoch 13/200 | Batch 10/16 | Loss: 2.2970 | accuracy 16/1280.125
Epoch 14/200 | Batch 0/16 | Loss: 2.2986 | accuracy 19/1280.148
Epoch 14/200 | Batch 10/16 | Loss: 2.3021 | accuracy 14/1280.109
Epoch 15/200 | Batch 0/16 | Loss: 2.2987 | accuracy 17/1280.133
Epoch 15/200 | Batch 10/16 | Loss: 2.3041 | accuracy 14/1280.109
Epoch 16/200 | Batch 0/16 | Loss: 2.2994 | accuracy 16/1280.125
Epoch 16/200 | Batch 10/16 | Loss: 2.3019 | accuracy 16/1280.125
Epoch 17/200 | Batch 0/16 | Loss: 2.2933 | accuracy 14/1280.109
Epoch 17/200 | Batch 10/16 | Loss: 2.2991 | accuracy 12/1280.094
Epoch 18/200 | Batch 0/16 | Loss: 2.3012 | accuracy 16/1280.125
Epoch 18/200 | Batch 10/16 | Loss: 2.3045 | accuracy 13/1280.102
Epoch 19/200 | Batch 0/16 | Loss: 2.2907 | accuracy 25/1280.195
Epoch 19/200 | Batch 10/16 | Loss: 2.3016 | accuracy 10/1280.078
Epoch 20/200 | Batch 0/16 | Loss: 2.3050 | accuracy 13/1280.102
Epoch 20/200 | Batch 10/16 | Loss: 2.2988 | accuracy 14/1280.109
checkpoint saved : ./model//check.epoch20
Epoch 21/200 | Batch 0/16 | Loss: 2.2999 | accuracy 17/1280.133
Epoch 21/200 | Batch 10/16 | Loss: 2.2937 | accuracy 15/1280.117
Epoch 22/200 | Batch 0/16 | Loss: 2.3047 | accuracy 16/1280.125
Epoch 22/200 | Batch 10/16 | Loss: 2.2853 | accuracy 18/1280.141
Epoch 23/200 | Batch 0/16 | Loss: 2.2850 | accuracy 19/1280.148
Epoch 23/200 | Batch 10/16 | Loss: 2.2959 | accuracy 13/1280.102
Epoch 24/200 | Batch 0/16 | Loss: 2.2884 | accuracy 18/1280.141
Epoch 24/200 | Batch 10/16 | Loss: 2.2940 | accuracy 18/1280.141
Epoch 25/200 | Batch 0/16 | Loss: 2.2775 | accuracy 18/1280.141
Epoch 25/200 | Batch 10/16 | Loss: 2.2858 | accuracy 15/1280.117
Epoch 26/200 | Batch 0/16 | Loss: 2.2522 | accuracy 27/1280.211
Epoch 26/200 | Batch 10/16 | Loss: 2.3032 | accuracy 16/1280.125
Epoch 27/200 | Batch 0/16 | Loss: 2.2583 | accuracy 24/1280.188
Epoch 27/200 | Batch 10/16 | Loss: 2.2422 | accuracy 28/1280.219
Epoch 28/200 | Batch 0/16 | Loss: 2.2255 | accuracy 29/1280.227
Epoch 28/200 | Batch 10/16 | Loss: 2.2325 | accuracy 16/1280.125
Epoch 29/200 | Batch 0/16 | Loss: 2.1752 | accuracy 28/1280.219
Epoch 29/200 | Batch 10/16 | Loss: 2.2192 | accuracy 23/1280.180
Epoch 30/200 | Batch 0/16 | Loss: 2.2291 | accuracy 18/1280.141
Epoch 30/200 | Batch 10/16 | Loss: 2.1861 | accuracy 25/1280.195
checkpoint saved : ./model//check.epoch30
Epoch 31/200 | Batch 0/16 | Loss: 2.1700 | accuracy 35/1280.273
Epoch 31/200 | Batch 10/16 | Loss: 2.0598 | accuracy 33/1280.258
Epoch 32/200 | Batch 0/16 | Loss: 2.1042 | accuracy 29/1280.227
Epoch 32/200 | Batch 10/16 | Loss: 2.0796 | accuracy 27/1280.211
Epoch 33/200 | Batch 0/16 | Loss: 2.1144 | accuracy 23/1280.180
Epoch 33/200 | Batch 10/16 | Loss: 2.1632 | accuracy 26/1280.203
Epoch 34/200 | Batch 0/16 | Loss: 2.0593 | accuracy 38/1280.297
Epoch 34/200 | Batch 10/16 | Loss: 2.0564 | accuracy 37/1280.289
Epoch 35/200 | Batch 0/16 | Loss: 1.9282 | accuracy 42/1280.328
Epoch 35/200 | Batch 10/16 | Loss: 2.0059 | accuracy 36/1280.281
Epoch 36/200 | Batch 0/16 | Loss: 2.0065 | accuracy 35/1280.273
Epoch 36/200 | Batch 10/16 | Loss: 1.9090 | accuracy 42/1280.328
Epoch 37/200 | Batch 0/16 | Loss: 1.9358 | accuracy 39/1280.305
Epoch 37/200 | Batch 10/16 | Loss: 1.9197 | accuracy 45/1280.352
Epoch 38/200 | Batch 0/16 | Loss: 1.9248 | accuracy 42/1280.328
Epoch 38/200 | Batch 10/16 | Loss: 1.9072 | accuracy 40/1280.312
Epoch 39/200 | Batch 0/16 | Loss: 1.9429 | accuracy 41/1280.320
Epoch 39/200 | Batch 10/16 | Loss: 1.9401 | accuracy 39/1280.305
Epoch 40/200 | Batch 0/16 | Loss: 1.8600 | accuracy 44/1280.344
Epoch 40/200 | Batch 10/16 | Loss: 1.8164 | accuracy 46/1280.359
checkpoint saved : ./model//check.epoch40
Epoch 41/200 | Batch 0/16 | Loss: 1.8458 | accuracy 48/1280.375
Epoch 41/200 | Batch 10/16 | Loss: 1.7130 | accuracy 54/1280.422
Epoch 42/200 | Batch 0/16 | Loss: 1.6807 | accuracy 53/1280.414
Epoch 42/200 | Batch 10/16 | Loss: 1.8174 | accuracy 41/1280.320
Epoch 43/200 | Batch 0/16 | Loss: 1.8646 | accuracy 40/1280.312
Epoch 43/200 | Batch 10/16 | Loss: 1.6046 | accuracy 54/1280.422
Epoch 44/200 | Batch 0/16 | Loss: 1.7627 | accuracy 43/1280.336
Epoch 44/200 | Batch 10/16 | Loss: 1.7279 | accuracy 48/1280.375
Epoch 45/200 | Batch 0/16 | Loss: 1.6728 | accuracy 50/1280.391
Epoch 45/200 | Batch 10/16 | Loss: 1.6171 | accuracy 53/1280.414
Epoch 46/200 | Batch 0/16 | Loss: 1.6969 | accuracy 51/1280.398
Epoch 46/200 | Batch 10/16 | Loss: 1.6196 | accuracy 48/1280.375
Epoch 47/200 | Batch 0/16 | Loss: 1.6617 | accuracy 56/1280.438
Epoch 47/200 | Batch 10/16 | Loss: 1.5410 | accuracy 67/1280.523
Epoch 48/200 | Batch 0/16 | Loss: 1.6146 | accuracy 55/1280.430
Epoch 48/200 | Batch 10/16 | Loss: 1.7213 | accuracy 44/1280.344
Epoch 49/200 | Batch 0/16 | Loss: 1.5919 | accuracy 61/1280.477
Epoch 49/200 | Batch 10/16 | Loss: 1.5982 | accuracy 51/1280.398
Epoch 50/200 | Batch 0/16 | Loss: 1.6092 | accuracy 59/1280.461
Epoch 50/200 | Batch 10/16 | Loss: 1.4322 | accuracy 65/1280.508
checkpoint saved : ./model//check.epoch50
Epoch 51/200 | Batch 0/16 | Loss: 1.5115 | accuracy 65/1280.508
Epoch 51/200 | Batch 10/16 | Loss: 1.5191 | accuracy 58/1280.453
Epoch 52/200 | Batch 0/16 | Loss: 1.5553 | accuracy 64/1280.500
Epoch 52/200 | Batch 10/16 | Loss: 1.5587 | accuracy 60/1280.469
Epoch 53/200 | Batch 0/16 | Loss: 1.5137 | accuracy 61/1280.477
Epoch 53/200 | Batch 10/16 | Loss: 1.3685 | accuracy 67/1280.523
Epoch 54/200 | Batch 0/16 | Loss: 1.6554 | accuracy 50/1280.391
Epoch 54/200 | Batch 10/16 | Loss: 1.4803 | accuracy 59/1280.461
Epoch 55/200 | Batch 0/16 | Loss: 1.3825 | accuracy 66/1280.516
Epoch 55/200 | Batch 10/16 | Loss: 1.4612 | accuracy 62/1280.484
Epoch 56/200 | Batch 0/16 | Loss: 1.3605 | accuracy 73/1280.570
Epoch 56/200 | Batch 10/16 | Loss: 1.4856 | accuracy 66/1280.516
Epoch 57/200 | Batch 0/16 | Loss: 1.5354 | accuracy 51/1280.398
Epoch 57/200 | Batch 10/16 | Loss: 1.4573 | accuracy 59/1280.461
Epoch 58/200 | Batch 0/16 | Loss: 1.3566 | accuracy 61/1280.477
Epoch 58/200 | Batch 10/16 | Loss: 1.3901 | accuracy 63/1280.492
Epoch 59/200 | Batch 0/16 | Loss: 1.3130 | accuracy 70/1280.547
Epoch 59/200 | Batch 10/16 | Loss: 1.1667 | accuracy 76/1280.594
Epoch 60/200 | Batch 0/16 | Loss: 1.3881 | accuracy 70/1280.547
Epoch 60/200 | Batch 10/16 | Loss: 1.2703 | accuracy 68/1280.531
checkpoint saved : ./model//check.epoch60
Epoch 61/200 | Batch 0/16 | Loss: 1.4010 | accuracy 62/1280.484
Epoch 61/200 | Batch 10/16 | Loss: 1.3181 | accuracy 72/1280.562
Epoch 62/200 | Batch 0/16 | Loss: 1.2716 | accuracy 69/1280.539
Epoch 62/200 | Batch 10/16 | Loss: 1.3523 | accuracy 62/1280.484
Epoch 63/200 | Batch 0/16 | Loss: 1.2137 | accuracy 78/1280.609
Epoch 63/200 | Batch 10/16 | Loss: 1.2490 | accuracy 75/1280.586
Epoch 64/200 | Batch 0/16 | Loss: 1.2601 | accuracy 77/1280.602
Epoch 64/200 | Batch 10/16 | Loss: 1.2207 | accuracy 72/1280.562
Epoch 65/200 | Batch 0/16 | Loss: 1.1812 | accuracy 73/1280.570
Epoch 65/200 | Batch 10/16 | Loss: 1.2019 | accuracy 74/1280.578
Epoch 66/200 | Batch 0/16 | Loss: 1.0996 | accuracy 77/1280.602
Epoch 66/200 | Batch 10/16 | Loss: 1.1076 | accuracy 72/1280.562
Epoch 67/200 | Batch 0/16 | Loss: 1.2806 | accuracy 71/1280.555
Epoch 67/200 | Batch 10/16 | Loss: 1.2237 | accuracy 74/1280.578
Epoch 68/200 | Batch 0/16 | Loss: 1.1196 | accuracy 81/1280.633
Epoch 68/200 | Batch 10/16 | Loss: 1.1982 | accuracy 78/1280.609
Epoch 69/200 | Batch 0/16 | Loss: 1.0038 | accuracy 93/1280.727
Epoch 69/200 | Batch 10/16 | Loss: 1.2466 | accuracy 72/1280.562
Epoch 70/200 | Batch 0/16 | Loss: 1.0274 | accuracy 79/1280.617
Epoch 70/200 | Batch 10/16 | Loss: 1.0536 | accuracy 82/1280.641
checkpoint saved : ./model//check.epoch70
Epoch 71/200 | Batch 0/16 | Loss: 1.1594 | accuracy 79/1280.617
Epoch 71/200 | Batch 10/16 | Loss: 1.0447 | accuracy 80/1280.625
Epoch 72/200 | Batch 0/16 | Loss: 1.2550 | accuracy 68/1280.531
Epoch 72/200 | Batch 10/16 | Loss: 1.1217 | accuracy 79/1280.617
Epoch 73/200 | Batch 0/16 | Loss: 1.0504 | accuracy 78/1280.609
Epoch 73/200 | Batch 10/16 | Loss: 1.2043 | accuracy 77/1280.602
Epoch 74/200 | Batch 0/16 | Loss: 1.0929 | accuracy 74/1280.578
Epoch 74/200 | Batch 10/16 | Loss: 1.0416 | accuracy 82/1280.641
Epoch 75/200 | Batch 0/16 | Loss: 0.9702 | accuracy 89/1280.695
Epoch 75/200 | Batch 10/16 | Loss: 0.9303 | accuracy 95/1280.742
Epoch 76/200 | Batch 0/16 | Loss: 0.8531 | accuracy 93/1280.727
Epoch 76/200 | Batch 10/16 | Loss: 1.0092 | accuracy 87/1280.680
Epoch 77/200 | Batch 0/16 | Loss: 1.0739 | accuracy 78/1280.609
Epoch 77/200 | Batch 10/16 | Loss: 1.0276 | accuracy 81/1280.633
Epoch 78/200 | Batch 0/16 | Loss: 0.9078 | accuracy 91/1280.711
Epoch 78/200 | Batch 10/16 | Loss: 0.9602 | accuracy 80/1280.625
Epoch 79/200 | Batch 0/16 | Loss: 0.9347 | accuracy 85/1280.664
Epoch 79/200 | Batch 10/16 | Loss: 0.9257 | accuracy 87/1280.680
Epoch 80/200 | Batch 0/16 | Loss: 1.0276 | accuracy 84/1280.656
Epoch 80/200 | Batch 10/16 | Loss: 0.8795 | accuracy 88/1280.688
checkpoint saved : ./model//check.epoch80
Epoch 81/200 | Batch 0/16 | Loss: 0.7719 | accuracy 96/1280.750
Epoch 81/200 | Batch 10/16 | Loss: 0.9031 | accuracy 90/1280.703
Epoch 82/200 | Batch 0/16 | Loss: 0.8802 | accuracy 91/1280.711
Epoch 82/200 | Batch 10/16 | Loss: 0.8708 | accuracy 88/1280.688
Epoch 83/200 | Batch 0/16 | Loss: 0.8398 | accuracy 91/1280.711
Epoch 83/200 | Batch 10/16 | Loss: 0.7149 | accuracy 99/1280.773
Epoch 84/200 | Batch 0/16 | Loss: 0.7306 | accuracy 101/1280.789
Epoch 84/200 | Batch 10/16 | Loss: 0.8610 | accuracy 92/1280.719
Epoch 85/200 | Batch 0/16 | Loss: 0.8118 | accuracy 92/1280.719
Epoch 85/200 | Batch 10/16 | Loss: 0.8698 | accuracy 94/1280.734
Epoch 86/200 | Batch 0/16 | Loss: 0.7987 | accuracy 93/1280.727
Epoch 86/200 | Batch 10/16 | Loss: 0.7173 | accuracy 101/1280.789
Epoch 87/200 | Batch 0/16 | Loss: 0.7868 | accuracy 93/1280.727
Epoch 87/200 | Batch 10/16 | Loss: 0.9372 | accuracy 80/1280.625
Epoch 88/200 | Batch 0/16 | Loss: 0.8355 | accuracy 91/1280.711
Epoch 88/200 | Batch 10/16 | Loss: 0.7740 | accuracy 93/1280.727
Epoch 89/200 | Batch 0/16 | Loss: 0.8853 | accuracy 86/1280.672
Epoch 89/200 | Batch 10/16 | Loss: 0.7612 | accuracy 91/1280.711
Epoch 90/200 | Batch 0/16 | Loss: 0.6926 | accuracy 99/1280.773
Epoch 90/200 | Batch 10/16 | Loss: 0.6736 | accuracy 97/1280.758
checkpoint saved : ./model//check.epoch90
Epoch 91/200 | Batch 0/16 | Loss: 0.7096 | accuracy 95/1280.742
Epoch 91/200 | Batch 10/16 | Loss: 0.7188 | accuracy 103/1280.805
Epoch 92/200 | Batch 0/16 | Loss: 0.7054 | accuracy 96/1280.750
Epoch 92/200 | Batch 10/16 | Loss: 0.6021 | accuracy 110/1280.859
Epoch 93/200 | Batch 0/16 | Loss: 0.7780 | accuracy 96/1280.750
Epoch 93/200 | Batch 10/16 | Loss: 0.7090 | accuracy 103/1280.805
Epoch 94/200 | Batch 0/16 | Loss: 0.6440 | accuracy 102/1280.797
Epoch 94/200 | Batch 10/16 | Loss: 0.8302 | accuracy 88/1280.688
Epoch 95/200 | Batch 0/16 | Loss: 0.7757 | accuracy 96/1280.750
Epoch 95/200 | Batch 10/16 | Loss: 0.6106 | accuracy 104/1280.812
Epoch 96/200 | Batch 0/16 | Loss: 0.6474 | accuracy 96/1280.750
Epoch 96/200 | Batch 10/16 | Loss: 0.6675 | accuracy 102/1280.797
Epoch 97/200 | Batch 0/16 | Loss: 0.5350 | accuracy 106/1280.828
Epoch 97/200 | Batch 10/16 | Loss: 0.8105 | accuracy 93/1280.727
Epoch 98/200 | Batch 0/16 | Loss: 0.7731 | accuracy 87/1280.680
Epoch 98/200 | Batch 10/16 | Loss: 0.6888 | accuracy 96/1280.750
Epoch 99/200 | Batch 0/16 | Loss: 0.6044 | accuracy 106/1280.828
Epoch 99/200 | Batch 10/16 | Loss: 0.5313 | accuracy 101/1280.789
Epoch 100/200 | Batch 0/16 | Loss: 0.7274 | accuracy 96/1280.750
Epoch 100/200 | Batch 10/16 | Loss: 0.6472 | accuracy 100/1280.781
checkpoint saved : ./model//check.epoch100
Epoch 101/200 | Batch 0/16 | Loss: 0.6915 | accuracy 98/1280.766
Epoch 101/200 | Batch 10/16 | Loss: 0.5370 | accuracy 109/1280.852
Epoch 102/200 | Batch 0/16 | Loss: 0.5760 | accuracy 104/1280.812
Epoch 102/200 | Batch 10/16 | Loss: 0.7622 | accuracy 93/1280.727
Epoch 103/200 | Batch 0/16 | Loss: 0.5385 | accuracy 102/1280.797
Epoch 103/200 | Batch 10/16 | Loss: 0.6802 | accuracy 103/1280.805
Epoch 104/200 | Batch 0/16 | Loss: 0.5285 | accuracy 110/1280.859
Epoch 104/200 | Batch 10/16 | Loss: 0.5555 | accuracy 110/1280.859
Epoch 105/200 | Batch 0/16 | Loss: 0.6075 | accuracy 102/1280.797
Epoch 105/200 | Batch 10/16 | Loss: 0.5659 | accuracy 101/1280.789
Epoch 106/200 | Batch 0/16 | Loss: 0.4936 | accuracy 108/1280.844
Epoch 106/200 | Batch 10/16 | Loss: 0.6707 | accuracy 102/1280.797
Epoch 107/200 | Batch 0/16 | Loss: 0.5391 | accuracy 105/1280.820
Epoch 107/200 | Batch 10/16 | Loss: 0.4698 | accuracy 105/1280.820
Epoch 108/200 | Batch 0/16 | Loss: 0.4267 | accuracy 108/1280.844
Epoch 108/200 | Batch 10/16 | Loss: 0.5509 | accuracy 102/1280.797
Epoch 109/200 | Batch 0/16 | Loss: 0.4462 | accuracy 107/1280.836
Epoch 109/200 | Batch 10/16 | Loss: 0.5380 | accuracy 105/1280.820
Epoch 110/200 | Batch 0/16 | Loss: 0.4637 | accuracy 110/1280.859
Epoch 110/200 | Batch 10/16 | Loss: 0.4375 | accuracy 109/1280.852
checkpoint saved : ./model//check.epoch110
Epoch 111/200 | Batch 0/16 | Loss: 0.5567 | accuracy 105/1280.820
Epoch 111/200 | Batch 10/16 | Loss: 0.4808 | accuracy 108/1280.844
Epoch 112/200 | Batch 0/16 | Loss: 0.4961 | accuracy 109/1280.852
Epoch 112/200 | Batch 10/16 | Loss: 0.5008 | accuracy 104/1280.812
Epoch 113/200 | Batch 0/16 | Loss: 0.4603 | accuracy 112/1280.875
Epoch 113/200 | Batch 10/16 | Loss: 0.4817 | accuracy 108/1280.844
Epoch 114/200 | Batch 0/16 | Loss: 0.3971 | accuracy 111/1280.867
Epoch 114/200 | Batch 10/16 | Loss: 0.4703 | accuracy 105/1280.820
Epoch 115/200 | Batch 0/16 | Loss: 0.5089 | accuracy 102/1280.797
Epoch 115/200 | Batch 10/16 | Loss: 0.4242 | accuracy 112/1280.875
Epoch 116/200 | Batch 0/16 | Loss: 0.5037 | accuracy 103/1280.805
Epoch 116/200 | Batch 10/16 | Loss: 0.4972 | accuracy 102/1280.797
Epoch 117/200 | Batch 0/16 | Loss: 0.4382 | accuracy 109/1280.852
Epoch 117/200 | Batch 10/16 | Loss: 0.3487 | accuracy 116/1280.906
Epoch 118/200 | Batch 0/16 | Loss: 0.3746 | accuracy 112/1280.875
Epoch 118/200 | Batch 10/16 | Loss: 0.3572 | accuracy 114/1280.891
Epoch 119/200 | Batch 0/16 | Loss: 0.3941 | accuracy 110/1280.859
Epoch 119/200 | Batch 10/16 | Loss: 0.4587 | accuracy 110/1280.859
Epoch 120/200 | Batch 0/16 | Loss: 0.3700 | accuracy 114/1280.891
Epoch 120/200 | Batch 10/16 | Loss: 0.3846 | accuracy 112/1280.875
checkpoint saved : ./model//check.epoch120
Epoch 121/200 | Batch 0/16 | Loss: 0.4735 | accuracy 110/1280.859
Epoch 121/200 | Batch 10/16 | Loss: 0.5561 | accuracy 104/1280.812
Epoch 122/200 | Batch 0/16 | Loss: 0.3554 | accuracy 115/1280.898
Epoch 122/200 | Batch 10/16 | Loss: 0.4541 | accuracy 113/1280.883
Epoch 123/200 | Batch 0/16 | Loss: 0.4274 | accuracy 110/1280.859
Epoch 123/200 | Batch 10/16 | Loss: 0.3901 | accuracy 112/1280.875
Epoch 124/200 | Batch 0/16 | Loss: 0.3440 | accuracy 118/1280.922
Epoch 124/200 | Batch 10/16 | Loss: 0.3341 | accuracy 113/1280.883
Epoch 125/200 | Batch 0/16 | Loss: 0.3978 | accuracy 111/1280.867
Epoch 125/200 | Batch 10/16 | Loss: 0.4012 | accuracy 113/1280.883
Epoch 126/200 | Batch 0/16 | Loss: 0.3910 | accuracy 114/1280.891
Epoch 126/200 | Batch 10/16 | Loss: 0.4164 | accuracy 113/1280.883
Epoch 127/200 | Batch 0/16 | Loss: 0.3342 | accuracy 114/1280.891
Epoch 127/200 | Batch 10/16 | Loss: 0.3473 | accuracy 120/1280.938
Epoch 128/200 | Batch 0/16 | Loss: 0.3794 | accuracy 111/1280.867
Epoch 128/200 | Batch 10/16 | Loss: 0.4186 | accuracy 110/1280.859
Epoch 129/200 | Batch 0/16 | Loss: 0.3165 | accuracy 117/1280.914
Epoch 129/200 | Batch 10/16 | Loss: 0.3586 | accuracy 112/1280.875
Epoch 130/200 | Batch 0/16 | Loss: 0.3648 | accuracy 113/1280.883
Epoch 130/200 | Batch 10/16 | Loss: 0.4095 | accuracy 115/1280.898
checkpoint saved : ./model//check.epoch130
Epoch 131/200 | Batch 0/16 | Loss: 0.3751 | accuracy 114/1280.891
Epoch 131/200 | Batch 10/16 | Loss: 0.2695 | accuracy 122/1280.953
Epoch 132/200 | Batch 0/16 | Loss: 0.3491 | accuracy 115/1280.898
Epoch 132/200 | Batch 10/16 | Loss: 0.2876 | accuracy 118/1280.922
Epoch 133/200 | Batch 0/16 | Loss: 0.3161 | accuracy 116/1280.906
Epoch 133/200 | Batch 10/16 | Loss: 0.3067 | accuracy 115/1280.898
Epoch 134/200 | Batch 0/16 | Loss: 0.3532 | accuracy 117/1280.914
Epoch 134/200 | Batch 10/16 | Loss: 0.3171 | accuracy 116/1280.906
Epoch 135/200 | Batch 0/16 | Loss: 0.3430 | accuracy 113/1280.883
Epoch 135/200 | Batch 10/16 | Loss: 0.3494 | accuracy 116/1280.906
Epoch 136/200 | Batch 0/16 | Loss: 0.3088 | accuracy 116/1280.906
Epoch 136/200 | Batch 10/16 | Loss: 0.3662 | accuracy 115/1280.898
Epoch 137/200 | Batch 0/16 | Loss: 0.3178 | accuracy 117/1280.914
Epoch 137/200 | Batch 10/16 | Loss: 0.4010 | accuracy 112/1280.875
Epoch 138/200 | Batch 0/16 | Loss: 0.3349 | accuracy 114/1280.891
Epoch 138/200 | Batch 10/16 | Loss: 0.3311 | accuracy 114/1280.891
Epoch 139/200 | Batch 0/16 | Loss: 0.3263 | accuracy 115/1280.898
Epoch 139/200 | Batch 10/16 | Loss: 0.3045 | accuracy 117/1280.914
Epoch 140/200 | Batch 0/16 | Loss: 0.2755 | accuracy 117/1280.914
Epoch 140/200 | Batch 10/16 | Loss: 0.2942 | accuracy 116/1280.906
checkpoint saved : ./model//check.epoch140
Epoch 141/200 | Batch 0/16 | Loss: 0.2904 | accuracy 115/1280.898
Epoch 141/200 | Batch 10/16 | Loss: 0.2317 | accuracy 121/1280.945
Epoch 142/200 | Batch 0/16 | Loss: 0.4009 | accuracy 112/1280.875
Epoch 142/200 | Batch 10/16 | Loss: 0.2950 | accuracy 117/1280.914
Epoch 143/200 | Batch 0/16 | Loss: 0.2833 | accuracy 114/1280.891
Epoch 143/200 | Batch 10/16 | Loss: 0.2006 | accuracy 121/1280.945
Epoch 144/200 | Batch 0/16 | Loss: 0.3718 | accuracy 117/1280.914
Epoch 144/200 | Batch 10/16 | Loss: 0.4305 | accuracy 106/1280.828
Epoch 145/200 | Batch 0/16 | Loss: 0.2323 | accuracy 118/1280.922
Epoch 145/200 | Batch 10/16 | Loss: 0.2974 | accuracy 120/1280.938
Epoch 146/200 | Batch 0/16 | Loss: 0.2393 | accuracy 120/1280.938
Epoch 146/200 | Batch 10/16 | Loss: 0.2414 | accuracy 120/1280.938
Epoch 147/200 | Batch 0/16 | Loss: 0.2520 | accuracy 117/1280.914
Epoch 147/200 | Batch 10/16 | Loss: 0.1956 | accuracy 123/1280.961
Epoch 148/200 | Batch 0/16 | Loss: 0.3122 | accuracy 112/1280.875
Epoch 148/200 | Batch 10/16 | Loss: 0.2806 | accuracy 119/1280.930
Epoch 149/200 | Batch 0/16 | Loss: 0.2155 | accuracy 120/1280.938
Epoch 149/200 | Batch 10/16 | Loss: 0.2039 | accuracy 119/1280.930
Epoch 150/200 | Batch 0/16 | Loss: 0.2909 | accuracy 115/1280.898
Epoch 150/200 | Batch 10/16 | Loss: 0.2923 | accuracy 119/1280.930
checkpoint saved : ./model//check.epoch150
Epoch 151/200 | Batch 0/16 | Loss: 0.2236 | accuracy 119/1280.930
Epoch 151/200 | Batch 10/16 | Loss: 0.2395 | accuracy 116/1280.906
Epoch 152/200 | Batch 0/16 | Loss: 0.2158 | accuracy 122/1280.953
Epoch 152/200 | Batch 10/16 | Loss: 0.3395 | accuracy 115/1280.898
Epoch 153/200 | Batch 0/16 | Loss: 0.1672 | accuracy 122/1280.953
Epoch 153/200 | Batch 10/16 | Loss: 0.2050 | accuracy 122/1280.953
Epoch 154/200 | Batch 0/16 | Loss: 0.1663 | accuracy 123/1280.961
Epoch 154/200 | Batch 10/16 | Loss: 0.3110 | accuracy 115/1280.898
Epoch 155/200 | Batch 0/16 | Loss: 0.2082 | accuracy 121/1280.945
Epoch 155/200 | Batch 10/16 | Loss: 0.1615 | accuracy 126/1280.984
Epoch 156/200 | Batch 0/16 | Loss: 0.1987 | accuracy 120/1280.938
Epoch 156/200 | Batch 10/16 | Loss: 0.2378 | accuracy 120/1280.938
Epoch 157/200 | Batch 0/16 | Loss: 0.2627 | accuracy 119/1280.930
Epoch 157/200 | Batch 10/16 | Loss: 0.2107 | accuracy 119/1280.930
Epoch 158/200 | Batch 0/16 | Loss: 0.2405 | accuracy 117/1280.914
Epoch 158/200 | Batch 10/16 | Loss: 0.1911 | accuracy 121/1280.945
Epoch 159/200 | Batch 0/16 | Loss: 0.2335 | accuracy 116/1280.906
Epoch 159/200 | Batch 10/16 | Loss: 0.1842 | accuracy 124/1280.969
Epoch 160/200 | Batch 0/16 | Loss: 0.1570 | accuracy 122/1280.953
Epoch 160/200 | Batch 10/16 | Loss: 0.2303 | accuracy 118/1280.922
checkpoint saved : ./model//check.epoch160
Epoch 161/200 | Batch 0/16 | Loss: 0.1888 | accuracy 122/1280.953
Epoch 161/200 | Batch 10/16 | Loss: 0.1389 | accuracy 123/1280.961
Epoch 162/200 | Batch 0/16 | Loss: 0.2047 | accuracy 121/1280.945
Epoch 162/200 | Batch 10/16 | Loss: 0.1748 | accuracy 120/1280.938
Epoch 163/200 | Batch 0/16 | Loss: 0.1451 | accuracy 124/1280.969
Epoch 163/200 | Batch 10/16 | Loss: 0.1395 | accuracy 124/1280.969
Epoch 164/200 | Batch 0/16 | Loss: 0.1824 | accuracy 120/1280.938
Epoch 164/200 | Batch 10/16 | Loss: 0.1795 | accuracy 120/1280.938
Epoch 165/200 | Batch 0/16 | Loss: 0.1478 | accuracy 123/1280.961
Epoch 165/200 | Batch 10/16 | Loss: 0.1997 | accuracy 123/1280.961
Epoch 166/200 | Batch 0/16 | Loss: 0.1808 | accuracy 120/1280.938
Epoch 166/200 | Batch 10/16 | Loss: 0.1875 | accuracy 119/1280.930
Epoch 167/200 | Batch 0/16 | Loss: 0.1764 | accuracy 118/1280.922
Epoch 167/200 | Batch 10/16 | Loss: 0.1592 | accuracy 124/1280.969
Epoch 168/200 | Batch 0/16 | Loss: 0.2030 | accuracy 118/1280.922
Epoch 168/200 | Batch 10/16 | Loss: 0.1260 | accuracy 123/1280.961
Epoch 169/200 | Batch 0/16 | Loss: 0.1836 | accuracy 119/1280.930
Epoch 169/200 | Batch 10/16 | Loss: 0.2194 | accuracy 120/1280.938
Epoch 170/200 | Batch 0/16 | Loss: 0.2251 | accuracy 120/1280.938
Epoch 170/200 | Batch 10/16 | Loss: 0.1552 | accuracy 123/1280.961
checkpoint saved : ./model//check.epoch170
Epoch 171/200 | Batch 0/16 | Loss: 0.0859 | accuracy 127/1280.992
Epoch 171/200 | Batch 10/16 | Loss: 0.1966 | accuracy 121/1280.945
Epoch 172/200 | Batch 0/16 | Loss: 0.1674 | accuracy 120/1280.938
Epoch 172/200 | Batch 10/16 | Loss: 0.1515 | accuracy 124/1280.969
Epoch 173/200 | Batch 0/16 | Loss: 0.1992 | accuracy 115/1280.898
Epoch 173/200 | Batch 10/16 | Loss: 0.1338 | accuracy 123/1280.961
Epoch 174/200 | Batch 0/16 | Loss: 0.1419 | accuracy 124/1280.969
Epoch 174/200 | Batch 10/16 | Loss: 0.1699 | accuracy 121/1280.945
Epoch 175/200 | Batch 0/16 | Loss: 0.2120 | accuracy 120/1280.938
Epoch 175/200 | Batch 10/16 | Loss: 0.2010 | accuracy 119/1280.930
Epoch 176/200 | Batch 0/16 | Loss: 0.2256 | accuracy 120/1280.938
Epoch 176/200 | Batch 10/16 | Loss: 0.1252 | accuracy 122/1280.953
Epoch 177/200 | Batch 0/16 | Loss: 0.1566 | accuracy 123/1280.961
Epoch 177/200 | Batch 10/16 | Loss: 0.1291 | accuracy 122/1280.953
Epoch 178/200 | Batch 0/16 | Loss: 0.1606 | accuracy 120/1280.938
Epoch 178/200 | Batch 10/16 | Loss: 0.1472 | accuracy 125/1280.977
Epoch 179/200 | Batch 0/16 | Loss: 0.1642 | accuracy 121/1280.945
Epoch 179/200 | Batch 10/16 | Loss: 0.1051 | accuracy 125/1280.977
Epoch 180/200 | Batch 0/16 | Loss: 0.2038 | accuracy 121/1280.945
Epoch 180/200 | Batch 10/16 | Loss: 0.1333 | accuracy 122/1280.953
checkpoint saved : ./model//check.epoch180
Epoch 181/200 | Batch 0/16 | Loss: 0.2143 | accuracy 120/1280.938
Epoch 181/200 | Batch 10/16 | Loss: 0.1642 | accuracy 121/1280.945
Epoch 182/200 | Batch 0/16 | Loss: 0.1173 | accuracy 123/1280.961
Epoch 182/200 | Batch 10/16 | Loss: 0.1296 | accuracy 125/1280.977
Epoch 183/200 | Batch 0/16 | Loss: 0.1144 | accuracy 126/1280.984
Epoch 183/200 | Batch 10/16 | Loss: 0.1317 | accuracy 124/1280.969
Epoch 184/200 | Batch 0/16 | Loss: 0.1667 | accuracy 124/1280.969
Epoch 184/200 | Batch 10/16 | Loss: 0.0716 | accuracy 126/1280.984
Epoch 185/200 | Batch 0/16 | Loss: 0.1296 | accuracy 122/1280.953
Epoch 185/200 | Batch 10/16 | Loss: 0.1412 | accuracy 124/1280.969
Epoch 186/200 | Batch 0/16 | Loss: 0.1750 | accuracy 121/1280.945
Epoch 186/200 | Batch 10/16 | Loss: 0.1369 | accuracy 121/1280.945
Epoch 187/200 | Batch 0/16 | Loss: 0.2256 | accuracy 121/1280.945
Epoch 187/200 | Batch 10/16 | Loss: 0.1291 | accuracy 122/1280.953
Epoch 188/200 | Batch 0/16 | Loss: 0.1657 | accuracy 120/1280.938
Epoch 188/200 | Batch 10/16 | Loss: 0.0768 | accuracy 126/1280.984
Epoch 189/200 | Batch 0/16 | Loss: 0.1616 | accuracy 122/1280.953
Epoch 189/200 | Batch 10/16 | Loss: 0.1312 | accuracy 121/1280.945
Epoch 190/200 | Batch 0/16 | Loss: 0.1196 | accuracy 126/1280.984
Epoch 190/200 | Batch 10/16 | Loss: 0.0910 | accuracy 128/1281.000
checkpoint saved : ./model//check.epoch190
Epoch 191/200 | Batch 0/16 | Loss: 0.1195 | accuracy 123/1280.961
Epoch 191/200 | Batch 10/16 | Loss: 0.1772 | accuracy 121/1280.945
Epoch 192/200 | Batch 0/16 | Loss: 0.1274 | accuracy 124/1280.969
Epoch 192/200 | Batch 10/16 | Loss: 0.1134 | accuracy 123/1280.961
Epoch 193/200 | Batch 0/16 | Loss: 0.1581 | accuracy 123/1280.961
Epoch 193/200 | Batch 10/16 | Loss: 0.0965 | accuracy 126/1280.984
Epoch 194/200 | Batch 0/16 | Loss: 0.1425 | accuracy 123/1280.961
Epoch 194/200 | Batch 10/16 | Loss: 0.1087 | accuracy 124/1280.969
Epoch 195/200 | Batch 0/16 | Loss: 0.1437 | accuracy 122/1280.953
Epoch 195/200 | Batch 10/16 | Loss: 0.1568 | accuracy 123/1280.961
Epoch 196/200 | Batch 0/16 | Loss: 0.0746 | accuracy 127/1280.992
Epoch 196/200 | Batch 10/16 | Loss: 0.1321 | accuracy 124/1280.969
Epoch 197/200 | Batch 0/16 | Loss: 0.1514 | accuracy 121/1280.945
Epoch 197/200 | Batch 10/16 | Loss: 0.1016 | accuracy 126/1280.984
Epoch 198/200 | Batch 0/16 | Loss: 0.1348 | accuracy 123/1280.961
Epoch 198/200 | Batch 10/16 | Loss: 0.1297 | accuracy 123/1280.961
Epoch 199/200 | Batch 0/16 | Loss: 0.1765 | accuracy 121/1280.945
Epoch 199/200 | Batch 10/16 | Loss: 0.1166 | accuracy 122/1280.953
Epoch 200/200 | Batch 0/16 | Loss: 0.0859 | accuracy 126/1280.984
Epoch 200/200 | Batch 10/16 | Loss: 0.1667 | accuracy 121/1280.945
checkpoint saved : ./model//check.epoch200
model saved : ./model//captcha.1digit.2kProcess finished with exit code 0 D:\Python310\python.exe D:/project/PycharmProjects/CNN/test.py
resize_height 128
resize_width 128
test_data_path ./data/test-digit/
characters 0123456789
digit_num 1
class_num 10
test_model_path ./model/captcha.1digit.2ktest accuracy 859 / 1000 0.859 4 参考资料