From 94d2ba36917e3122467f111b3aa153cc69dcf604 Mon Sep 17 00:00:00 2001 From: randaller Date: Sun, 19 Mar 2023 19:28:23 +0300 Subject: [PATCH] Update hf-inference-example.py --- hf-inference-example.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/hf-inference-example.py b/hf-inference-example.py index 1a57e17..9471ead 100644 --- a/hf-inference-example.py +++ b/hf-inference-example.py @@ -18,4 +18,4 @@ model = llamahf.LLaMAForCausalLM.from_pretrained(MODEL, low_cpu_mem_usage=True) model.to('cpu') batch = tokenizer("The highest mountain in China is ", return_tensors="pt") -print(tokenizer.decode(model.generate(batch["input_ids"].cpu(), max_length=100)[0])) +print(tokenizer.decode(model.generate(batch["input_ids"].cpu(), do_sample=True, top_k=50, max_length=100, top_p=0.95, temperature=1.0)[0]))