2019-12-13 14:52:07 +08:00
|
|
|
|
# coding: UTF-8
|
|
|
|
|
|
import torch
|
|
|
|
|
|
import torch.nn as nn
|
|
|
|
|
|
# from pytorch_pretrained_bert import BertModel, BertTokenizer
|
2019-12-24 10:26:47 +08:00
|
|
|
|
from transformers import BertModel, BertTokenizer,BertConfig
|
|
|
|
|
|
import os
|
2019-12-13 14:52:07 +08:00
|
|
|
|
|
|
|
|
|
|
class Config(object):
|
|
|
|
|
|
|
|
|
|
|
|
"""配置参数"""
|
|
|
|
|
|
def __init__(self, dataset):
|
|
|
|
|
|
self.model_name = 'bert'
|
2019-12-24 10:26:47 +08:00
|
|
|
|
self.train_path = dataset + '/data/train.txt' # 训练集
|
|
|
|
|
|
self.dev_path = dataset + '/data/dev.txt' # 验证集
|
|
|
|
|
|
self.test_path = dataset + '/data/test.txt' # 测试集
|
2019-12-13 14:52:07 +08:00
|
|
|
|
self.class_list = [x.strip() for x in open(
|
2019-12-24 10:26:47 +08:00
|
|
|
|
dataset + '/data/class.txt').readlines()] # 类别名单
|
|
|
|
|
|
self.save_path = dataset + '/saved_dict/' + self.model_name + '.ckpt' # 模型训练结果
|
|
|
|
|
|
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # 设备
|
|
|
|
|
|
|
|
|
|
|
|
self.require_improvement = 1000 # 若超过1000batch效果还没提升,则提前结束训练
|
|
|
|
|
|
self.num_classes = len(self.class_list) # 类别数
|
|
|
|
|
|
self.num_epochs = 3 # epoch数
|
|
|
|
|
|
self.batch_size = 128 # mini-batch大小
|
|
|
|
|
|
self.pad_size = 32 # 每句话处理成的长度(短填长切)
|
|
|
|
|
|
self.learning_rate = 5e-5 # 学习率
|
2019-12-13 14:52:07 +08:00
|
|
|
|
self.bert_path = './bert'
|
|
|
|
|
|
self.tokenizer = BertTokenizer.from_pretrained(self.bert_path)
|
|
|
|
|
|
self.hidden_size = 768
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Model(nn.Module):
|
|
|
|
|
|
|
|
|
|
|
|
def __init__(self, config):
|
|
|
|
|
|
super(Model, self).__init__()
|
2019-12-24 10:26:47 +08:00
|
|
|
|
bert_config_file = os.path.join(config.bert_path, f'bert_config.json')
|
|
|
|
|
|
bert_config = BertConfig.from_json_file(bert_config_file)
|
|
|
|
|
|
self.bert = BertModel.from_pretrained(config.bert_path,config=bert_config)
|
2019-12-13 14:52:07 +08:00
|
|
|
|
for param in self.bert.parameters():
|
|
|
|
|
|
param.requires_grad = True
|
|
|
|
|
|
self.fc = nn.Linear(config.hidden_size, config.num_classes)
|
|
|
|
|
|
|
2019-12-24 10:26:47 +08:00
|
|
|
|
def forward(self, x):
|
|
|
|
|
|
context = x[0] # 输入的句子
|
|
|
|
|
|
mask = x[2] # 对padding部分进行mask,和句子一个size,padding部分用0表示,如:[1, 1, 1, 1, 0, 0]
|
|
|
|
|
|
_, pooled = self.bert(context, attention_mask=mask)
|
2019-12-13 14:52:07 +08:00
|
|
|
|
out = self.fc(pooled)
|
|
|
|
|
|
return out
|