Ai
1 Star 0 Fork 0

horn-learn/BertClassifier

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
train.py 4.50 KB
一键复制 编辑 原始数据 按行查看 历史
hornlive 提交于 2024-06-24 01:46 +08:00 . init
### 此资源由 58学课资源站 收集整理 ###
# 想要获取完整课件资料 请访问:58xueke.com
# 百万资源 畅享学习
#
# coding: utf-8
import os
import torch
import torch.nn as nn
from transformers import BertTokenizer, AdamW, BertConfig
from torch.utils.data import DataLoader
from model import BertClassifier
from dataset import CNewsDataset
from tqdm import tqdm
from sklearn import metrics
from common import constants
def main():
# ----------------获取参数设置----------------
batch_size = 4
epochs = 10
learning_rate = 5e-6
device = 'cpu'
# ----------------数据读取与封装----------------
# 获取到dataset
train_dataset = CNewsDataset('data/cnew.train_debug.txt')
valid_dataset = CNewsDataset('data/cnew.val_debug.txt')
# 封装DataLoader,用于生成Batch
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
valid_dataloader = DataLoader(valid_dataset, batch_size=batch_size, shuffle=False)
# ----------------模型加载----------------
# 读取BERT的配置文件
bert_config = BertConfig.from_pretrained(constants.BERT_PATH)
num_labels = len(train_dataset.labels)
# 初始化模型
model = BertClassifier(bert_config, num_labels).to(device)
# ----------------优化器与损失函数配置----------------
# 优化器
optimizer = AdamW(model.parameters(), lr=learning_rate)
# 损失函数
criterion = nn.CrossEntropyLoss()
best_f1 = 0
# ----------------模型训练过程----------------
for epoch in range(1, epochs+1):
losses = 0
accuracy = 0
model.train()
train_bar = tqdm(train_dataloader, ncols=100)
for input_ids, token_type_ids, attention_mask, label_id in train_bar:
# 梯度清零
model.zero_grad()
train_bar.set_description('Epoch %i train' % epoch)
# 传入数据,调用model.forward() [4, 10]
output = model(
input_ids=input_ids.to(device),
attention_mask=attention_mask.to(device),
token_type_ids=token_type_ids.to(device),
)
# 计算loss
loss = criterion(output, label_id.to(device))
losses += loss.item()
# output [0.1, 0.05, ..., 0.23]
pred_labels = torch.argmax(output, dim=1)
acc = torch.sum(pred_labels == label_id.to(device)).item() / len(pred_labels)
accuracy += acc
loss.backward()
optimizer.step()
train_bar.set_postfix(loss=loss.item(), acc=acc)
average_loss = losses / len(train_dataloader)
average_acc = accuracy / len(train_dataloader)
print('\tTrain ACC:', average_acc, '\tLoss:', average_loss)
# ----------------模型验证过程----------------
model.eval()
losses = 0
pred_labels = []
true_labels = []
valid_bar = tqdm(valid_dataloader, ncols=100)
for input_ids, token_type_ids, attention_mask, label_id in valid_bar:
valid_bar.set_description('Epoch %i valid' % epoch)
output = model(
input_ids=input_ids.to(device),
attention_mask=attention_mask.to(device),
token_type_ids=token_type_ids.to(device),
)
loss = criterion(output, label_id.to(device))
losses += loss.item()
pred_label = torch.argmax(output, dim=1)
acc = torch.sum(pred_label == label_id.to(device)).item() / len(pred_label)
valid_bar.set_postfix(loss=loss.item(), acc=acc)
pred_labels.extend(pred_label.cpu().numpy().tolist())
true_labels.extend(label_id.numpy().tolist())
average_loss = losses / len(valid_dataloader)
print('\tLoss:', average_loss)
# 分类报告
report = metrics.classification_report(true_labels, pred_labels, labels=valid_dataset.labels_id, target_names=valid_dataset.labels)
print('* Classification Report:')
print(report)
# f1 用来判断最优模型
f1 = metrics.f1_score(true_labels, pred_labels, labels=valid_dataset.labels_id, average='micro')
if not os.path.exists('models'):
os.makedirs('models')
# 判断并保存验证集上表现最好的模型
if f1 > best_f1:
best_f1 = f1
torch.save(model.state_dict(), 'models/best_model.pkl')
if __name__ == '__main__':
main()
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/horn-learn/bert-classifier.git
git@gitee.com:horn-learn/bert-classifier.git
horn-learn
bert-classifier
BertClassifier
master

搜索帮助