This commit is contained in:
Duzeyao
2019-10-24 09:26:35 +08:00
parent 4e56c1b3b6
commit 491bb9dbf5

View File

@@ -153,7 +153,7 @@ def main():
for i, title in enumerate(titles): for i, title in enumerate(titles):
for j in range(articles_per_title): for j in range(articles_per_title):
with open(save_path + str(i * j), 'w') as f: with open(save_path + str(i) + '-' + str(j) + '.txt', 'w') as f:
context_tokens = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(title)) context_tokens = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(title))
generated = 0 generated = 0
out = sample_sequence( out = sample_sequence(
@@ -182,7 +182,7 @@ def main():
text = ''.join(text).replace('##', '').strip() text = ''.join(text).replace('##', '').strip()
# text = ''.join(text.split('\n')[:-1]) # text = ''.join(text.split('\n')[:-1])
print(text) print(text)
f.write(text) f.write(text + '\n')
print("=" * 80) print("=" * 80)