Skip to content

Commit 649d82b

Browse files
authored
Merge branch 'main' into addprompts
2 parents d2ea115 + 2ec0159 commit 649d82b

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

bigcode_eval/tasks/humanevalpack.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -224,17 +224,19 @@ def get_prompt(self, prompt_base, instruction, context=None):
224224
# https://github.com/nlpxucan/WizardLM/blob/main/WizardCoder/src/humaneval_gen.py#L37
225225
prompt = f'Below is an instruction that describes a task. Write a response that appropriately completes the request.\n\n### Instruction:\n{inp}\n\n### Response:\n{prompt_base}'
226226
elif self.prompt == "codellama":
227-
# https://huggingface.co/codellama
227+
# https://hf.co/codellama
228228
prompt = f"[INST] {inp.strip()} [/INST] {prompt_base}"
229229
elif self.prompt in ["tulu", "gritlm"]:
230-
# https://huggingface.co/GritLM/GritLM-7B
230+
# https://hf.co/GritLM/GritLM-7B
231231
prompt = f"<|user|>\n{inp}\n<|assistant|>\n{prompt_base}"
232232
elif self.prompt == "zephyr":
233233
# https://hf.co/HuggingFaceH4/zephyr-7b-beta
234234
prompt = f"<|user|>\n{inp}</s>\n<|assistant|>\n{prompt_base}"
235235
elif self.prompt == "yi":
236236
# https://hf.co/01-ai/Yi-34B-Chat
237237
prompt = f"<|im_start|>user\n{inp}<|im_end|>\n<|im_start|>assistant\n{prompt_base}"
238+
elif self.prompt == "codellama-70b":
239+
prompt = f"Source: user\n\n {inp.strip()} Source: assistant\nDestination: user \n\n{prompt_base}"
238240
else:
239241
raise ValueError(f"The --prompt argument {self.prompt} wasn't provided or isn't supported")
240242
# Strip off the final \n to make the tokens more natural

0 commit comments

Comments
 (0)