File tree Expand file tree Collapse file tree 1 file changed +4
-4
lines changed Expand file tree Collapse file tree 1 file changed +4
-4
lines changed Original file line number Diff line number Diff line change @@ -36,7 +36,7 @@ checkpoint = "bigcode/starcoder"
3636device = " cuda" # for GPU usage or "cpu" for CPU usage
3737
3838tokenizer = AutoTokenizer.from_pretrained(checkpoint)
39- model = AutoModelForCausalLM.from_pretrained(checkpoint, trust_remote_code = True ).to(device)
39+ model = AutoModelForCausalLM.from_pretrained(checkpoint).to(device)
4040
4141inputs = tokenizer.encode(" def print_hello_world():" , return_tensors = " pt" ).to(device)
4242outputs = model.generate(inputs)
@@ -45,10 +45,10 @@ print(tokenizer.decode(outputs[0]))
4545or
4646``` python
4747from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
48- model_ckpt = " bigcode/starcoder"
48+ checkpoint = " bigcode/starcoder"
4949
50- model = AutoModelForCausalLM.from_pretrained(model_ckpt )
51- tokenizer = AutoTokenizer.from_pretrained(model_ckpt )
50+ model = AutoModelForCausalLM.from_pretrained(checkpoint )
51+ tokenizer = AutoTokenizer.from_pretrained(checkpoint )
5252
5353pipe = pipeline(" text-generation" , model = model, tokenizer = tokenizer, device = 0 )
5454print ( pipe(" def hello():" ) )
You can’t perform that action at this time.
0 commit comments