Update hf-inference-example.py

main
randaller 3 years ago committed by GitHub
parent 4a3d53eb2f
commit a80a606685
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -5,9 +5,11 @@ MODEL = 'decapoda-research/llama-7b-hf'
# MODEL = 'decapoda-research/llama-30b-hf' # MODEL = 'decapoda-research/llama-30b-hf'
# MODEL = 'decapoda-research/llama-65b-hf' # MODEL = 'decapoda-research/llama-65b-hf'
# MODEL = './trained'
tokenizer = llamahf.LLaMATokenizer.from_pretrained(MODEL) tokenizer = llamahf.LLaMATokenizer.from_pretrained(MODEL)
model = llamahf.LLaMAForCausalLM.from_pretrained(MODEL, low_cpu_mem_usage=True) model = llamahf.LLaMAForCausalLM.from_pretrained(MODEL, low_cpu_mem_usage=True)
model.to('cpu') model.to('cpu')
batch = tokenizer("The highest mountain in China is ", return_tensors="pt") batch = tokenizer("The highest mountain in China is ", return_tensors="pt")
print(tokenizer.decode(model.generate(batch["input_ids"].cpu(), max_length=256)[0])) print(tokenizer.decode(model.generate(batch["input_ids"].cpu(), max_length=100)[0]))

Loading…
Cancel
Save