jerpint commited on
Commit
a807fc1
·
1 Parent(s): 1a2c740

add code generation

Browse files
Files changed (2) hide show
  1. generate.py +70 -0
  2. llms.py +126 -0
generate.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+ from llms import build_prompt, get_completion, PROMPT_TEMPLATE, SYSTEM_PROMPT
4
+
5
+
6
+ def generate_code(problem_description: str, prompt_template: str, system_prompt: str, model: str, temperature: float):
7
+ prompt = build_prompt(problem_description, prompt_template)
8
+ completion = get_completion(prompt, system_prompt, model, temperature)
9
+ return completion
10
+
11
+
12
+ def load_txt_file(file_path: str):
13
+ with open(file_path, "r") as file:
14
+ return file.read()
15
+
16
+ def load_problem_description(day: int):
17
+ day_str = f"{day:02d}"
18
+ file_path = f"day{day_str}/problem.txt"
19
+ return load_txt_file(file_path)
20
+
21
+
22
+ def extract_code(completion: str) -> str:
23
+ """Extracts the code from the completion, should be contained in ```python ...``` code block."""
24
+ code_block = re.search(r"```python\s*([\s\S]*?)\s*```", completion)
25
+ if code_block:
26
+ return code_block.group(1)
27
+
28
+ def get_solution_file_path(day: int, model: str):
29
+ """Returns the path to the solution file for the given day and model."""
30
+ day_str = f"{day:02d}"
31
+ return f"day{day_str}/solution_{model}.py"
32
+
33
+
34
+ def save_code(day: int, code: str, model: str):
35
+ """Saves the code to the solution file for the given day and model."""
36
+ file_path = get_solution_file_path(day, model)
37
+ with open(file_path, "w") as file:
38
+ file.write(code)
39
+ print(f"Saved code to {file_path}")
40
+
41
+
42
+ all_models = {
43
+ "openai": ["gpt-4o"],
44
+ "gemini": ["gemini-1.5-pro"],
45
+ "anthropic": ["claude-3-5-sonnet-20241022"],
46
+ }
47
+
48
+
49
+ if __name__ == "__main__":
50
+
51
+ for day in range(1, 26):
52
+ problem_description = load_problem_description(day)
53
+ print(f"***Generating code for day {day}***")
54
+
55
+ for provider in all_models:
56
+ for model in all_models[provider]:
57
+ print("-" * 80)
58
+ print(f"Generating code for {provider} {model}")
59
+
60
+ if os.path.exists(get_solution_file_path(day, model)):
61
+ print(f"Skipping {provider} {model} for day {day} because it already exists")
62
+ continue
63
+
64
+ prompt = build_prompt(problem_description, PROMPT_TEMPLATE)
65
+ completion = get_completion(provider=provider, user_prompt=prompt, system_prompt=SYSTEM_PROMPT, model=model, temperature=0)
66
+ code = extract_code(completion)
67
+ save_code(day, code, model)
68
+ print("-" * 80)
69
+
70
+ print("*" * 80)
llms.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import logging
3
+
4
+ import google.generativeai as genai
5
+ from openai import OpenAI
6
+ import anthropic
7
+
8
+
9
+ DEFAULT_PROVIDER = "openai"
10
+ DEFAULT_MODEL = "gpt-4o-mini"
11
+
12
+ # Load API keys
13
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
14
+ AI_STUDIO_API_KEY = os.getenv("AI_STUDIO_API_KEY")
15
+ ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
16
+
17
+ assert (
18
+ OPENAI_API_KEY
19
+ ), "OPENAI_API_KEY is not set, please set it in your environment variables."
20
+ assert (
21
+ AI_STUDIO_API_KEY
22
+ ), "AI_STUDIO_API_KEY is not set, please set it in your environment variables."
23
+ assert (
24
+ ANTHROPIC_API_KEY
25
+ ), "ANTHROPIC_API_KEY is not set, please set it in your environment variables."
26
+
27
+ # Initialize clients
28
+ openai_client = OpenAI(api_key=OPENAI_API_KEY)
29
+ genai.configure(api_key=AI_STUDIO_API_KEY)
30
+ anthropic_client = anthropic.Anthropic(api_key=ANTHROPIC_API_KEY)
31
+
32
+
33
+ SYSTEM_PROMPT = "You are a programming assistant. You are solving the 2024 advent of code challenge."
34
+ PROMPT_TEMPLATE = """You are solving the 2024 advent of code challenge.
35
+ You will be provided the description of each challenge. You are to provide the solution to each given challenge.
36
+ 1) You can reason and explain your logic before writing the code.
37
+ 2) You must write the code such that it can be parsed into an actual python file.
38
+ 3) It will be parsed by the evaluator, so it must be valid python code.
39
+ 4) All of the code must be in a single code block, delimited by ```python and ```.
40
+ 5) To count as a proper submission, the code must print the result to each question asked.
41
+ 6) Each question will have a single string as an answer. Make sure to print it that string, and nothing else.
42
+ 7) The actual input to the question will be provided in a file relative to the python file, e.g. "./input.txt". You must read and parse from the file accordingly. You can safely assume the file will always be relative to the python file.
43
+
44
+ Here is an example of a proper submission:
45
+
46
+ You reasoning goes here ...
47
+
48
+ ```python
49
+
50
+
51
+ file = "input.txt"
52
+
53
+ def your_function(...)
54
+ ...
55
+
56
+ ...
57
+ print(result1)
58
+
59
+
60
+ def your_other_function(...)
61
+ ...
62
+
63
+ ...
64
+ print(result2)
65
+
66
+ ```
67
+
68
+ Here is today's challenge description:
69
+ {problem_description}
70
+ """
71
+
72
+
73
+ def build_prompt(
74
+ problem_description: str, prompt_template: str = PROMPT_TEMPLATE
75
+ ) -> str:
76
+ return prompt_template.format(problem_description=problem_description)
77
+
78
+
79
+ def get_completion(
80
+ provider: str,
81
+ user_prompt: str,
82
+ system_prompt: str,
83
+ model: str,
84
+ temperature: float,
85
+ ) -> str:
86
+ """
87
+ Unified function to get completions from various LLM providers.
88
+ """
89
+ if provider == "openai":
90
+ completion = openai_client.chat.completions.create(
91
+ model=model,
92
+ messages=[
93
+ {"role": "system", "content": system_prompt},
94
+ {"role": "user", "content": user_prompt},
95
+ ],
96
+ temperature=temperature,
97
+ )
98
+ # logger.info("Completion: %s", completion)
99
+ return completion.choices[0].message.content
100
+
101
+ elif provider == "gemini":
102
+ model = genai.GenerativeModel(
103
+ model_name=model,
104
+ system_instruction=system_prompt,
105
+ )
106
+ response = model.generate_content(
107
+ user_prompt,
108
+ generation_config=genai.types.GenerationConfig(temperature=temperature),
109
+ )
110
+
111
+ # logger.info("reponse: %s", response)
112
+ return response.text
113
+
114
+ elif provider == "anthropic":
115
+ response = anthropic_client.messages.create(
116
+ model=model,
117
+ max_tokens=2048,
118
+ temperature=temperature,
119
+ system=system_prompt,
120
+ messages=[{"role": "user", "content": user_prompt}],
121
+ )
122
+ # logger.info("Response: %s", response)
123
+ return response.content[0].text
124
+
125
+ else:
126
+ raise ValueError(f"Unknown provider: {provider}")