Update hf-inference-example.py

main
randaller 3 years ago committed by GitHub
parent 784f052685
commit 94d2ba3691
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -18,4 +18,4 @@ model = llamahf.LLaMAForCausalLM.from_pretrained(MODEL, low_cpu_mem_usage=True)
model.to('cpu')
batch = tokenizer("The highest mountain in China is ", return_tensors="pt")
print(tokenizer.decode(model.generate(batch["input_ids"].cpu(), max_length=100)[0]))
print(tokenizer.decode(model.generate(batch["input_ids"].cpu(), do_sample=True, top_k=50, max_length=100, top_p=0.95, temperature=1.0)[0]))

Loading…
Cancel
Save