the first commit
This commit is contained in:
36
cache/make_vocab.py
vendored
Normal file
36
cache/make_vocab.py
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
import argparse
|
||||
import thulac
|
||||
import json
|
||||
|
||||
from tqdm import tqdm
|
||||
from keras.preprocessing.text import Tokenizer
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--raw_data_path', default='../data/train.json', type=str, required=False, help='原始训练语料')
|
||||
parser.add_argument('--vocab_file', default='vocab_processed.txt', type=str, required=False, help='生成vocab链接')
|
||||
parser.add_argument('--vocab_size', default=50000, type=int, required=False, help='词表大小')
|
||||
args = parser.parse_args()
|
||||
|
||||
lac = thulac.thulac(seg_only=True)
|
||||
tokenizer = Tokenizer(num_words=args.vocab_size)
|
||||
print('args:\n' + args.__repr__())
|
||||
print('This script is extremely slow especially for large corpus. Take a break.')
|
||||
|
||||
f = open(args.raw_data_path, 'r')
|
||||
lines = json.load(f)
|
||||
for i, line in enumerate(tqdm(lines)):
|
||||
lines[i] = lac.cut(line, text=True)
|
||||
|
||||
tokenizer.fit_on_texts(lines)
|
||||
vocab = list(tokenizer.index_word.values())
|
||||
pre = ['[SEP]', '[CLS]', '[MASK]', '[PAD]', '[UNK]']
|
||||
vocab = pre + vocab
|
||||
with open(args.vocab_file, 'w') as f:
|
||||
for word in vocab[:args.vocab_size + 5]:
|
||||
f.write(word + '\n')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
4
cache/make_vocab.sh
vendored
Normal file
4
cache/make_vocab.sh
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
python make_vocab.py \
|
||||
--raw_data_path ../data/train.json \
|
||||
--vocab_file vocab_user.txt \
|
||||
--vocab_size 50000
|
||||
21128
cache/vocab.txt
vendored
Normal file
21128
cache/vocab.txt
vendored
Normal file
File diff suppressed because it is too large
Load Diff
19020
cache/vocab_all.txt
vendored
Normal file
19020
cache/vocab_all.txt
vendored
Normal file
File diff suppressed because it is too large
Load Diff
32044
cache/vocab_guwen.txt
vendored
Normal file
32044
cache/vocab_guwen.txt
vendored
Normal file
File diff suppressed because it is too large
Load Diff
49005
cache/vocab_seg.txt
vendored
Normal file
49005
cache/vocab_seg.txt
vendored
Normal file
File diff suppressed because it is too large
Load Diff
13317
cache/vocab_small.txt
vendored
Normal file
13317
cache/vocab_small.txt
vendored
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user