From 0cf6eaed1c284e873c5cc491ab722be3e586f515 Mon Sep 17 00:00:00 2001 From: Loubna Ben Allal <44069155+loubnabnl@users.noreply.github.com> Date: Tue, 16 Apr 2024 22:06:18 +0200 Subject: [PATCH] Add instruct models prompts --- bigcode_eval/tasks/humanevalpack.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/bigcode_eval/tasks/humanevalpack.py b/bigcode_eval/tasks/humanevalpack.py index 40d99ebb4..64a5f673f 100644 --- a/bigcode_eval/tasks/humanevalpack.py +++ b/bigcode_eval/tasks/humanevalpack.py @@ -228,15 +228,19 @@ def get_prompt(self, prompt_base, instruction, context=None): elif self.prompt == "codellama": # https://hf.co/codellama prompt = f"[INST] {inp.strip()} [/INST] {prompt_base}" + elif self.prompt == "deepseek": + prompt = f"You are an AI programming assistant, utilizing the Deepseek Coder model, developed by Deepseek Company, and you only answer questions related to computer science. For politically sensitive questions, security and privacy issues, and other non-computer science questions, you will refuse to answer\n### Instruction:\n{inp.strip()}\n### Response:\n{prompt_base}" elif self.prompt in ["tulu", "gritlm"]: # https://hf.co/GritLM/GritLM-7B prompt = f"<|user|>\n{inp}\n<|assistant|>\n{prompt_base}" elif self.prompt == "zephyr": # https://hf.co/HuggingFaceH4/zephyr-7b-beta prompt = f"<|user|>\n{inp}\n<|assistant|>\n{prompt_base}" - elif self.prompt == "yi": + elif self.prompt in ["yi", "starchat2", "codeqwen"]: # https://hf.co/01-ai/Yi-34B-Chat prompt = f"<|im_start|>user\n{inp}<|im_end|>\n<|im_start|>assistant\n{prompt_base}" + elif self.prompt == "codegemma": + prompt = f"user\n{inp}\nmodel\n{prompt_base}" elif self.prompt == "codellama-70b": prompt = f"Source: user\n\n {inp.strip()} Source: assistant\nDestination: user \n\n{prompt_base}" elif self.prompt == "aurora-m":