Files

11 lines
379 B
Python
Raw Permalink Normal View History

2024-02-24 01:20:17 +08:00
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("google/gemma-7b-it")
2024-03-22 00:30:07 +08:00
model = AutoModelForCausalLM.from_pretrained("google/gemma-7b-it")
2024-02-24 01:20:17 +08:00
input_text = "Write me a poem about Machine Learning."
2024-03-22 00:30:07 +08:00
input_ids = tokenizer(input_text, return_tensors="pt")
2024-02-24 01:20:17 +08:00
outputs = model.generate(**input_ids)
print(tokenizer.decode(outputs[0]))