LDA1020 commited on
Commit
56c4b9b
·
verified ·
1 Parent(s): bcf61fc

feat: code release

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +31 -0
  2. .gitignore +131 -0
  3. README.md +34 -3
  4. code_generation.py +438 -0
  5. configs/default.yaml +21 -0
  6. configs/method/funsearch.yaml +9 -0
  7. configs/method/refine.yaml +9 -0
  8. configs/method/repeated_sample.yaml +4 -0
  9. configs/model/claude-3-5-haiku.yaml +6 -0
  10. configs/model/claude-3-7-sonnet-thinking.yaml +6 -0
  11. configs/model/claude-3-7-sonnet.yaml +6 -0
  12. configs/model/deepseek-chat.yaml +5 -0
  13. configs/model/deepseek-reasoner.yaml +5 -0
  14. configs/model/gemini-2.0-flash-thinking.yaml +4 -0
  15. configs/model/gemini-2.0-flash.yaml +4 -0
  16. configs/model/gemini-2.5-pro.yaml +4 -0
  17. configs/model/gpt-4.1.yaml +5 -0
  18. configs/model/gpt-4o-mini.yaml +5 -0
  19. configs/model/gpt-4o.yaml +5 -0
  20. configs/model/o1-mini.yaml +5 -0
  21. configs/model/o3-mini.yaml +5 -0
  22. configs/model/o3.yaml +5 -0
  23. configs/model/o4-mini.yaml +5 -0
  24. configs/model/qwen-max.yaml +6 -0
  25. configs/model/qwq.yaml +6 -0
  26. configs/pde/advection.yaml +10 -0
  27. configs/pde/burgers.yaml +10 -0
  28. configs/pde/cns1d.yaml +10 -0
  29. configs/pde/darcy.yaml +9 -0
  30. configs/pde/reacdiff1d.yaml +11 -0
  31. data/data_download.py +106 -0
  32. data/extract_data_subsets.py +57 -0
  33. data/pdebench_data_urls.csv +376 -0
  34. funsearch.py +160 -0
  35. llm_api.py +129 -0
  36. main.py +33 -0
  37. program_database.py +270 -0
  38. prompt_files/general_prompt.py +126 -0
  39. prompt_files/pde_descriptions.py +84 -0
  40. refine.py +71 -0
  41. repeated_sample.py +45 -0
  42. requirements.txt +14 -0
  43. solvers/advection/beta_0.1/seeds/errors_0.txt +0 -0
  44. solvers/advection/beta_0.1/seeds/errors_1.txt +0 -0
  45. solvers/advection/beta_0.1/seeds/errors_2.txt +0 -0
  46. solvers/advection/beta_0.1/seeds/errors_3.txt +0 -0
  47. solvers/advection/beta_0.1/seeds/errors_4.txt +0 -0
  48. solvers/advection/beta_0.1/seeds/implementation_0.py +83 -0
  49. solvers/advection/beta_0.1/seeds/implementation_1.py +104 -0
  50. solvers/advection/beta_0.1/seeds/implementation_2.py +127 -0
.gitattributes CHANGED
@@ -33,3 +33,34 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ solvers/advection/beta_0.1/seeds/visualization_0.png filter=lfs diff=lfs merge=lfs -text
37
+ solvers/advection/beta_0.1/seeds/visualization_1.png filter=lfs diff=lfs merge=lfs -text
38
+ solvers/advection/beta_0.1/seeds/visualization_2.png filter=lfs diff=lfs merge=lfs -text
39
+ solvers/advection/beta_0.1/seeds/visualization_3.png filter=lfs diff=lfs merge=lfs -text
40
+ solvers/advection/beta_0.1/seeds/visualization_4.png filter=lfs diff=lfs merge=lfs -text
41
+ solvers/burgers/nu_0.01/seeds/burgers_visualization_0.png filter=lfs diff=lfs merge=lfs -text
42
+ solvers/burgers/nu_0.01/seeds/burgers_visualization_1.png filter=lfs diff=lfs merge=lfs -text
43
+ solvers/burgers/nu_0.01/seeds/burgers_visualization_2.png filter=lfs diff=lfs merge=lfs -text
44
+ solvers/burgers/nu_0.01/seeds/burgers_visualization_3.png filter=lfs diff=lfs merge=lfs -text
45
+ solvers/burgers/nu_0.01/seeds/burgers_visualization_4.png filter=lfs diff=lfs merge=lfs -text
46
+ solvers/burgers/nu_0.1/seeds/burgers_visualization_0.png filter=lfs diff=lfs merge=lfs -text
47
+ solvers/burgers/nu_0.1/seeds/burgers_visualization_1.png filter=lfs diff=lfs merge=lfs -text
48
+ solvers/burgers/nu_0.1/seeds/burgers_visualization_2.png filter=lfs diff=lfs merge=lfs -text
49
+ solvers/burgers/nu_0.1/seeds/burgers_visualization_3.png filter=lfs diff=lfs merge=lfs -text
50
+ solvers/burgers/nu_0.1/seeds/burgers_visualization_4.png filter=lfs diff=lfs merge=lfs -text
51
+ solvers/burgers/nu_0.1/seeds/output_2.txt filter=lfs diff=lfs merge=lfs -text
52
+ solvers/burgers/nu_1.0/seeds/burgers_visualization_0.png filter=lfs diff=lfs merge=lfs -text
53
+ solvers/burgers/nu_1.0/seeds/burgers_visualization_1.png filter=lfs diff=lfs merge=lfs -text
54
+ solvers/burgers/nu_1.0/seeds/burgers_visualization_2.png filter=lfs diff=lfs merge=lfs -text
55
+ solvers/burgers/nu_1.0/seeds/burgers_visualization_3.png filter=lfs diff=lfs merge=lfs -text
56
+ solvers/burgers/nu_1.0/seeds/burgers_visualization_4.png filter=lfs diff=lfs merge=lfs -text
57
+ solvers/cns1d/eta_0.1/seeds/visualization_0.png filter=lfs diff=lfs merge=lfs -text
58
+ solvers/cns1d/eta_0.1/seeds/visualization_1.png filter=lfs diff=lfs merge=lfs -text
59
+ solvers/cns1d/eta_0.1/seeds/visualization_2.png filter=lfs diff=lfs merge=lfs -text
60
+ solvers/cns1d/eta_0.1/seeds/visualization_3.png filter=lfs diff=lfs merge=lfs -text
61
+ solvers/cns1d/eta_0.1/seeds/visualization_4.png filter=lfs diff=lfs merge=lfs -text
62
+ solvers/reacdiff1d/nu_0.5_rho_1.0/seeds/visualization_0.png filter=lfs diff=lfs merge=lfs -text
63
+ solvers/reacdiff1d/nu_0.5_rho_1.0/seeds/visualization_1.png filter=lfs diff=lfs merge=lfs -text
64
+ solvers/reacdiff1d/nu_0.5_rho_1.0/seeds/visualization_2.png filter=lfs diff=lfs merge=lfs -text
65
+ solvers/reacdiff1d/nu_0.5_rho_1.0/seeds/visualization_3.png filter=lfs diff=lfs merge=lfs -text
66
+ solvers/reacdiff1d/nu_0.5_rho_1.0/seeds/visualization_4.png filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ pip-wheel-metadata/
24
+ share/python-wheels/
25
+ *.egg-info/
26
+ .installed.cfg
27
+ *.egg
28
+ MANIFEST
29
+
30
+ # PyInstaller
31
+ # Usually these files are written by a python script from a template
32
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
33
+ *.manifest
34
+ *.spec
35
+
36
+ # Installer logs
37
+ pip-log.txt
38
+ pip-delete-this-directory.txt
39
+
40
+ # Unit test / coverage reports
41
+ htmlcov/
42
+ .tox/
43
+ .nox/
44
+ .coverage
45
+ .coverage.*
46
+ .cache
47
+ nosetests.xml
48
+ coverage.xml
49
+ *.cover
50
+ *.py,cover
51
+ .hypothesis/
52
+ .pytest_cache/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ target/
76
+
77
+ # Jupyter Notebook
78
+ .ipynb_checkpoints
79
+
80
+ # IPython
81
+ profile_default/
82
+ ipython_config.py
83
+
84
+ # pyenv
85
+ .python-version
86
+
87
+ # pipenv
88
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
90
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
91
+ # install all needed dependencies.
92
+ #Pipfile.lock
93
+
94
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95
+ __pypackages__/
96
+
97
+ # Celery stuff
98
+ celerybeat-schedule
99
+ celerybeat.pid
100
+
101
+ # SageMath parsed files
102
+ *.sage.py
103
+
104
+ # Environments
105
+ .env
106
+ .venv
107
+ env/
108
+ venv/
109
+ ENV/
110
+ env.bak/
111
+ venv.bak/
112
+
113
+ # Spyder project settings
114
+ .spyderproject
115
+ .spyproject
116
+
117
+ # Rope project settings
118
+ .ropeproject
119
+
120
+ # mkdocs documentation
121
+ /site
122
+
123
+ # mypy
124
+ .mypy_cache/
125
+ .dmypy.json
126
+ dmypy.json
127
+
128
+ # Pyre type checker
129
+ .pyre/
130
+ .outputs/
131
+ .hydra/
README.md CHANGED
@@ -1,3 +1,34 @@
1
- ---
2
- license: mit
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## CodePDE
2
+
3
+ This is the official implementation for [CodePDE: An Inference Framework for LLM-driven PDE Solver Generation](https://arxiv.org/abs/2505.08783), the first inference framework for generating PDE solvers using large language models (LLMs).
4
+
5
+ ### Dependencies
6
+
7
+ The required packages are listed in `requirements.txt`, which can be installed by running `pip install -r requirements.txt`.
8
+
9
+ ### Getting started
10
+
11
+ Download the data using the `data/data_download.py` script from PDEbench. Then process the data with the `data/extract_data_subsets.py` script.
12
+
13
+ Set up the configurations in `config` and run `python main.py`.
14
+
15
+ In the _repeated sampling_ mode, the LLM generates solvers from scratch.
16
+
17
+ In the _refinement_ mode, the LLM uses existing solvers in the `solvers` folder as "seeds" (e.g., `solvers/burgers/nu_0.01/seeds` for Burgers Equation with $\nu=0.01$) and tries to improve upon the "seeds".
18
+
19
+ In the _funsearch_ mode, the LLM uses a few solvers generated in the _repeated sampling_ stage to warm start the program database and then generates new solvers via evolutionary search. The implementation assumes that the _repeated sampling_ results are stored under `../archived_logs`.
20
+
21
+ ### Contact
22
+
23
+ May you have any questions on our work or implementation, feel free to reach out to [`[email protected]`]([email protected])!
24
+
25
+ If you find this repository useful, please consider giving a star ⭐ and cite our paper.
26
+
27
+ ```
28
+ @article{li2025codepde,
29
+ title={CodePDE: An Inference Framework for LLM-driven PDE Solver Generation},
30
+ author={Li, Shanda and Marwah, Tanya and Shen, Junhong and Sun, Weiwei and Risteski, Andrej and Yang, Yiming and Talwalkar, Ameet},
31
+ journal={arXiv preprint arXiv:2505.08783},
32
+ year={2025}
33
+ }
34
+ ```
code_generation.py ADDED
@@ -0,0 +1,438 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import math
3
+ import os
4
+ import re
5
+ import shutil
6
+ import signal
7
+ import subprocess
8
+ import time
9
+
10
+
11
+ from llm_api import generate_response
12
+ from prompt_files import general_prompt, pde_descriptions
13
+
14
+
15
+ def file_to_string(file_path):
16
+ with open(file_path) as f:
17
+ string = ''.join(f.readlines())
18
+ return string
19
+
20
+
21
+ def get_last_line(output_file):
22
+ with open(output_file, 'r') as f:
23
+ lines = f.readlines()
24
+ result_line = lines[-1]
25
+ return result_line
26
+
27
+
28
+ def generate_pde_description(cfg, pde_name):
29
+ if pde_name == 'advection':
30
+ pde_description = pde_descriptions.advection_description.format(advection_beta=cfg.pde.beta)
31
+ elif pde_name == 'burgers':
32
+ pde_description = pde_descriptions.burgers_description.format(burgers_nu=cfg.pde.nu)
33
+ elif pde_name == 'reacdiff1d':
34
+ pde_description = pde_descriptions.reacdiff_1d_description.format(reacdiff1d_nu=cfg.pde.nu,
35
+ reacdiff1d_rho=cfg.pde.rho)
36
+ elif pde_name == 'cns1d':
37
+ pde_description = pde_descriptions.cns1d_description.format(cns1d_eta=cfg.pde.eta)
38
+ elif pde_name == 'darcy':
39
+ pde_description = pde_descriptions.darcy_description.format()
40
+ elif pde_name == 'ins2d':
41
+ pde_description = pde_descriptions.ins2d_description.format()
42
+ else:
43
+ raise ValueError(f'PDE {pde_name} not recognized')
44
+ return pde_description
45
+
46
+
47
+ def generate_initial_prompt_without_seed(cfg, pde_name):
48
+ system_prompt = general_prompt.system_prompt
49
+ pde_description = generate_pde_description(cfg, pde_name)
50
+
51
+ solver_template = file_to_string(f'solvers/{pde_name}/solver_template.py')
52
+
53
+ problem = general_prompt.code_generation_without_seed_prompt.format(
54
+ pde_description=pde_description,
55
+ solver_template=solver_template
56
+ )
57
+
58
+ messages = [
59
+ {"role": "system", "content": system_prompt},
60
+ {"role": "user", "content": problem}
61
+ ]
62
+ return messages
63
+
64
+
65
+ def generate_initial_prompt(
66
+ cfg,
67
+ seed_implementations:list,
68
+ working_folder: str,
69
+ pde_name:str = 'burgers'
70
+ ):
71
+ system_prompt = general_prompt.system_prompt
72
+
73
+ pde_description = generate_pde_description(cfg, pde_name)
74
+
75
+ if cfg.method.name == 'funsearch':
76
+ seed_folder = working_folder
77
+ else:
78
+ # cfg.method.name == 'refine'
79
+ seed_folder = os.path.join('solvers', pde_name, cfg.pde.pde_setting_name, 'seeds')
80
+ examples = [
81
+ general_prompt.code_sample.format(
82
+ id=example_id,
83
+ code=file_to_string(os.path.join(seed_folder, f'implementation_{seed_id}.py')),
84
+ code_output=get_last_line(os.path.join(seed_folder, f'output_{seed_id}.txt')),
85
+ )
86
+ for example_id, seed_id in enumerate(seed_implementations)
87
+ ]
88
+
89
+ code_samples = ''.join(examples)
90
+
91
+ problem = general_prompt.problem_prompt.format(
92
+ pde_description=pde_description,
93
+ code_samples=code_samples)
94
+
95
+ messages = [
96
+ {"role": "system", "content": system_prompt},
97
+ {"role": "user", "content": problem}
98
+ ]
99
+ return messages
100
+
101
+
102
+ def generate_debugging_prompt(
103
+ round_idx:int,
104
+ working_folder: str,
105
+ debugging_reason:str = 'execution_error'
106
+ ):
107
+ # Load the prompt from the file
108
+ with open(os.path.join(working_folder, f'messages_{round_idx}.json'), 'r') as f:
109
+ messages = json.load(f)
110
+ # Load model response
111
+ model_response = file_to_string(os.path.join(working_folder, f'responses_{round_idx}.txt'))
112
+ messages.append({"role": "assistant", "content": model_response})
113
+ # Load the error message (truncated to the last 5000 characters)
114
+ code_output = file_to_string(os.path.join(working_folder, f'output_{round_idx}.txt'))[-5000:]
115
+ errors = file_to_string(os.path.join(working_folder, f'errors_{round_idx}.txt'))[-5000:]
116
+ if debugging_reason == 'execution_error':
117
+ feebdack = general_prompt.debugging_execution_error_prompt.format(
118
+ code_output=code_output,
119
+ error_message=errors
120
+ )
121
+ else: # debugging_reason == 'nan_inf'
122
+ feebdack = general_prompt.debugging_nan_inf_prompt.format(
123
+ code_output=code_output,
124
+ error_message=errors
125
+ )
126
+ messages.append({"role": "user", "content": feebdack})
127
+ return messages
128
+
129
+
130
+ def generate_prompt(
131
+ cfg,
132
+ round_idx:int,
133
+ working_folder: str,
134
+ seed_implementations: list|None = None,
135
+ generation_mode:str='initial',
136
+ pde_name:str='burgers'
137
+ ):
138
+ if generation_mode == 'debugging_execution_error':
139
+ prompt = generate_debugging_prompt(
140
+ round_idx=round_idx,
141
+ working_folder=working_folder,
142
+ debugging_reason='execution_error'
143
+ )
144
+ elif generation_mode == 'debugging_nan_inf':
145
+ prompt = generate_debugging_prompt(
146
+ round_idx=round_idx,
147
+ working_folder=working_folder,
148
+ debugging_reason='nan_inf'
149
+ )
150
+ elif seed_implementations is None or len(seed_implementations) == 0:
151
+ prompt = generate_initial_prompt_without_seed(
152
+ cfg,
153
+ pde_name=pde_name
154
+ )
155
+ else:
156
+ prompt = generate_initial_prompt(
157
+ cfg,
158
+ seed_implementations=seed_implementations,
159
+ working_folder=working_folder,
160
+ pde_name=pde_name
161
+ )
162
+
163
+ return prompt
164
+
165
+
166
+ def code_generation(
167
+ cfg,
168
+ round_idx:int,
169
+ working_folder: str,
170
+ seed_implementations: list|None = None,
171
+ generation_mode: str = 'initial',
172
+ pde_name: str = 'burgers',
173
+ model_name='deepseek-chat'
174
+ ):
175
+
176
+ messages = generate_prompt(
177
+ cfg,
178
+ round_idx=round_idx,
179
+ working_folder=working_folder,
180
+ seed_implementations=seed_implementations,
181
+ generation_mode=generation_mode,
182
+ pde_name=pde_name
183
+ )
184
+
185
+ # Save the messages to a file
186
+ with open(os.path.join(working_folder, f'messages_{round_idx}.json'), 'w') as f:
187
+ json.dump(messages, f, ensure_ascii=False, indent=4)
188
+ responses = generate_response(messages, cfg)
189
+ if 'claude' in model_name:
190
+ content = ''
191
+ for block in responses.content:
192
+ if block.type == 'thinking':
193
+ # Save the CoT of Claude-thinking
194
+ with open(os.path.join(working_folder, f'thinking_{round_idx}.txt'), 'w') as f:
195
+ f.write(str(block.thinking))
196
+ if content == '':
197
+ content = block.thinking
198
+ elif block.type == 'text':
199
+ # Extract the final response
200
+ content = block.text
201
+ elif 'gemini' in model_name:
202
+ content = responses.text
203
+ elif 'qwq' in model_name:
204
+ content = responses
205
+ else:
206
+ content = responses.choices[0].message.content
207
+ # Save the response to a file
208
+ with open(os.path.join(working_folder, f'responses_{round_idx}.txt'), 'w') as f:
209
+ f.write(content)
210
+
211
+ matches = re.findall(
212
+ r'```python(.*?)```',
213
+ content, re.DOTALL)
214
+
215
+ if not matches:
216
+ raise ValueError('No relevant code block found in response')
217
+
218
+ generated_code = max(matches, key=len)
219
+
220
+ with open(os.path.join(working_folder, f'implementation_{round_idx}.py'), 'w') as f:
221
+ f.write(generated_code)
222
+
223
+
224
+ def code_execution(
225
+ cfg,
226
+ working_folder: str,
227
+ round_idx: int = 0,
228
+ pde_name: str = 'burgers',
229
+ eval_dataset: str = None
230
+ ):
231
+ # Copy the implementation file to solver.py to make the evaluator's life easier
232
+ os.system(f'cp {working_folder}/implementation_{round_idx}.py {working_folder}/solver.py')
233
+
234
+ # Open files for standard output and error logging
235
+ job_out = open(os.path.join(working_folder, f'output_{round_idx}.txt'), 'w')
236
+ job_err = open(os.path.join(working_folder, f'errors_{round_idx}.txt'), 'w')
237
+
238
+ # Construct the base command
239
+ if eval_dataset is None:
240
+ eval_dataset = os.path.join(cfg.root_dataset_folder, cfg.pde.dataset_folder_for_eval)
241
+ cmd = (
242
+ f'CUDA_VISIBLE_DEVICES={cfg.assigned_gpu} '
243
+ f'python {working_folder}/evaluator.py '
244
+ f'--save-pth {working_folder} '
245
+ f'--run-id {round_idx} '
246
+ f'--dataset-path-for-eval '
247
+ f'{eval_dataset} '
248
+ )
249
+ # Note: In Funsearch, we will need to customize the eval_dataset to seperate development and testing
250
+
251
+ # Append PDE-specific hyperparameters to the command
252
+ if pde_name == 'advection':
253
+ hyperparam = f'--beta {cfg.pde.beta} '
254
+ elif pde_name == 'burgers':
255
+ hyperparam = f'--nu {cfg.pde.nu} '
256
+ elif pde_name == 'reacdiff1d':
257
+ hyperparam = f'--nu {cfg.pde.nu} --rho {cfg.pde.rho} '
258
+ elif pde_name == 'cns1d':
259
+ hyperparam = f'--eta {cfg.pde.eta} '
260
+ elif pde_name in ['darcy', 'ins2d']:
261
+ hyperparam = f' ' # No hyperparameters for these two
262
+ else:
263
+ raise ValueError(f'PDE {pde_name} not recognized')
264
+
265
+ try:
266
+ # Start process using Popen
267
+ process = subprocess.Popen(
268
+ f'{cmd} {hyperparam}',
269
+ shell=True,
270
+ stdout=job_out,
271
+ stderr=job_err,
272
+ text=True,
273
+ preexec_fn=os.setsid # Create a new process group
274
+ )
275
+
276
+ # Wait for the process with timeout
277
+ exit_code = process.wait(timeout=cfg.pde.timeout)
278
+ stderr = None
279
+ status = "completed"
280
+
281
+ except subprocess.TimeoutExpired:
282
+ # Kill the entire process group on timeout
283
+ os.killpg(os.getpgid(process.pid), signal.SIGTERM)
284
+
285
+ # Wait a moment for graceful termination
286
+ time.sleep(2)
287
+
288
+ # If still running, use SIGKILL
289
+ if process.poll() is None:
290
+ os.killpg(os.getpgid(process.pid), signal.SIGKILL)
291
+
292
+ # Clean up any GPU processes that might still be running
293
+ cleanup_gpu_processes(cfg.assigned_gpu)
294
+
295
+ job_out.write(f"Process exceeded the {cfg.pde.timeout}-second timeout limit.\n")
296
+ job_err.write(f"Process exceeded the {cfg.pde.timeout}-second timeout limit.\n")
297
+ exit_code = -1
298
+ stderr = "TimeoutExpired: Process exceeded the timeout limit."
299
+ status = "timeout"
300
+
301
+ finally:
302
+ # Always close the files
303
+ job_out.close()
304
+ job_err.close()
305
+
306
+ return {
307
+ "exit_code": exit_code,
308
+ "stderr": stderr,
309
+ "status": status
310
+ }
311
+
312
+ def cleanup_gpu_processes(gpu_id):
313
+ """
314
+ Clean up any orphaned processes still using the specified GPU
315
+ """
316
+ try:
317
+ # Find all processes using this GPU
318
+ result = subprocess.run(
319
+ f"nvidia-smi --query-compute-apps=pid --format=csv,noheader,nounits -i {gpu_id}",
320
+ shell=True,
321
+ capture_output=True,
322
+ text=True
323
+ )
324
+
325
+ # Extract process IDs
326
+ pids = result.stdout.strip().split('\n')
327
+
328
+ # Kill each process
329
+ for pid in pids:
330
+ if pid and pid.isdigit():
331
+ try:
332
+ os.kill(int(pid), signal.SIGKILL)
333
+ print(f"Killed GPU process with PID {pid}")
334
+ except ProcessLookupError:
335
+ pass # Process already terminated
336
+ except Exception as e:
337
+ print(f"Error during GPU cleanup: {e}")
338
+
339
+
340
+ def get_results(output_file):
341
+ result_line = get_last_line(output_file)
342
+
343
+ relative_error_match = re.search(r'nRMSE: (.*?)\t', result_line)
344
+ relative_error = float(relative_error_match.group(1))
345
+
346
+ elapsed_time_match = re.search(r'Time: (.*?)s', result_line)
347
+ elapsed_time = float(elapsed_time_match.group(1))
348
+
349
+ avg_rate_match = re.search(
350
+ r'Average convergence rate: (.*?)\t', result_line)
351
+ avg_rate = float(avg_rate_match.group(1))
352
+
353
+ return relative_error, elapsed_time, avg_rate
354
+
355
+
356
+ def prepare_working_folder(
357
+ cfg,
358
+ working_folder,
359
+ pde_name='burgers',
360
+ use_sample_solver_init=False
361
+ ):
362
+ result_sheet_path = os.path.join(working_folder, 'test_results.csv')
363
+ print('Generating result sheet')
364
+ with open(result_sheet_path, 'w') as f:
365
+ f.write('round,nRMSE,elapsed_time,convergence_rate,num_trial\n')
366
+
367
+ evluator_path = os.path.join(working_folder, f'evaluator.py')
368
+ os.system(f'cp solvers/{pde_name}/evaluator.py {evluator_path}')
369
+
370
+ if use_sample_solver_init:
371
+ # We don't copy the sample solvers, nor execute them.
372
+ pass
373
+
374
+
375
+ def generate_and_debug(
376
+ cfg,
377
+ round_idx:int,
378
+ num_trials:int,
379
+ pde_name:str,
380
+ working_folder:str,
381
+ seed_implementations:list|None,
382
+ model_name:str
383
+ ):
384
+ generation_mode = 'initial'
385
+ for num_trial in range(1, num_trials+1):
386
+ # When num_trial==1, it is not debugging
387
+ # The output of the generated code will be saved in
388
+ # os.path.join(working_folder, f'generated_code_{round_idx}.txt')
389
+ code_generation(
390
+ cfg,
391
+ round_idx=round_idx,
392
+ working_folder=working_folder,
393
+ seed_implementations=seed_implementations,
394
+ generation_mode=generation_mode,
395
+ pde_name=pde_name,
396
+ model_name=model_name
397
+ )
398
+ print(f'Round {round_idx}, trial {num_trial} code generation completed successfully')
399
+
400
+ print(f'Round {round_idx}, trial {num_trial} code execution started')
401
+ execution_results = code_execution(
402
+ cfg,
403
+ working_folder=working_folder,
404
+ round_idx=round_idx,
405
+ pde_name=pde_name
406
+ )
407
+
408
+ if execution_results['exit_code'] != 0:
409
+ print(f'Error in round {round_idx}, trial {num_trial} code execution.')
410
+ if num_trial < num_trials:
411
+ print(f'Let LLM debug the code')
412
+ generation_mode = 'debugging_execution_error'
413
+ else:
414
+ with open(os.path.join(working_folder, 'test_results.csv'), 'a') as f:
415
+ f.write(f'{round_idx},failed,failed,failed,{num_trial}\n')
416
+ raise ValueError(f'Error in round {round_idx}, trial {num_trial} code execution.')
417
+
418
+ else:
419
+ print(f'Round {round_idx}, trial {num_trial} completed successfully')
420
+ relative_error, elapsed_time, avg_rate = get_results(
421
+ os.path.join(working_folder, f'output_{round_idx}.txt')
422
+ )
423
+
424
+ if (
425
+ (math.isnan(relative_error) or math.isinf(relative_error))
426
+ and num_trial < num_trials
427
+ ):
428
+ # If we get NaN or Inf in nRMSE and still have chances to debug, we will debug the code
429
+ print(f'nRMSE is NaN/Inf in round {round_idx}, trial {num_trial} code execution.')
430
+ print(f'Let LLM debug the code')
431
+ generation_mode = 'debugging_nan_inf'
432
+ else:
433
+ # Otherwise, we will save the results and break the loop
434
+ with open(os.path.join(working_folder, 'test_results.csv'), 'a') as f:
435
+ f.write(f'{round_idx},{relative_error},{elapsed_time},{avg_rate},{num_trial}\n')
436
+ print(f'nRMSE: {relative_error:.5f}\t| Time: {elapsed_time:.2f}s\t| Rate: {avg_rate}\t| Trial: {num_trial}')
437
+ return relative_error, elapsed_time, avg_rate
438
+ return None, None, None
configs/default.yaml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # @package _global_
2
+ defaults:
3
+ - _self_
4
+ - pde: burgers
5
+ - model: gpt-4.1
6
+ - method: repeated_sample
7
+
8
+ log_path: ../working_logs
9
+ # log_path: ../tmp_test
10
+
11
+ folder_name_suffix: ''
12
+ redirect_stdout: False
13
+ assigned_gpu: 1
14
+
15
+ working_folder: ${log_path}/${pde.name}/${pde.pde_setting_name}${folder_name_suffix}/${method.name}/${model.name}/${now:%Y-%m-%d-%H-%M-%S}/
16
+
17
+ root_dataset_folder: ../dataset/CodePDE # Replace with your dataset path. Absolute path is recommended.
18
+
19
+ hydra:
20
+ run:
21
+ dir: ${working_folder}
configs/method/funsearch.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ # @package method
2
+ num_search_rounds: 2
3
+ num_debugging_trials_per_sample: 5
4
+
5
+ num_initial_seeds: 12
6
+
7
+ name: 'funsearch'
8
+
9
+ use_sample_solver_init: True
configs/method/refine.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ # @package method
2
+ num_repeated_samples: 2
3
+ num_debugging_trials_per_sample: 5
4
+
5
+ num_sample_for_refine: 5
6
+ name: 'refine_sample${method.num_sample_for_refine}'
7
+
8
+ start_round: 0
9
+ use_sample_solver_init: True
configs/method/repeated_sample.yaml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # @package method
2
+ name: repeated_sample
3
+ num_repeated_samples: 2
4
+ num_debugging_trials_per_sample: 5
configs/model/claude-3-5-haiku.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: claude
3
+ name: claude-3-5-haiku-20241022
4
+ api_key: <API_KEY>
5
+
6
+ thinking: False
configs/model/claude-3-7-sonnet-thinking.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: claude
3
+ name: claude-3-7-sonnet-20250219
4
+ api_key: <API_KEY>
5
+
6
+ thinking: True
configs/model/claude-3-7-sonnet.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: claude
3
+ name: claude-3-7-sonnet-20250219
4
+ api_key: <API_KEY>
5
+
6
+ thinking: False
configs/model/deepseek-chat.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: deepseek
3
+ name: deepseek-chat
4
+ api_key: <API_KEY>
5
+ base_url: https://api.deepseek.com
configs/model/deepseek-reasoner.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: deepseek
3
+ name: deepseek-reasoner
4
+ api_key: <API_KEY>
5
+ base_url: https://api.deepseek.com
configs/model/gemini-2.0-flash-thinking.yaml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # @package model
2
+ family_name: gemini
3
+ name: gemini-2.0-flash-thinking-exp
4
+ api_key: <API_KEY>
configs/model/gemini-2.0-flash.yaml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # @package model
2
+ family_name: gemini
3
+ name: gemini-2.0-flash
4
+ api_key: <API_KEY>
configs/model/gemini-2.5-pro.yaml ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ # @package model
2
+ family_name: gemini
3
+ name: gemini-2.5-pro-preview-06-05
4
+ api_key: <API_KEY>
configs/model/gpt-4.1.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: gpt
3
+ name: gpt-4.1-2025-04-14
4
+
5
+ api_key: <API_KEY>
configs/model/gpt-4o-mini.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: gpt
3
+ name: gpt-4o-mini-2024-07-18
4
+
5
+ api_key: <API_KEY>
configs/model/gpt-4o.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: gpt
3
+ name: gpt-4o-2024-08-06
4
+
5
+ api_key: <API_KEY>
configs/model/o1-mini.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: o
3
+ name: o1-mini
4
+
5
+ api_key: <API_KEY>
configs/model/o3-mini.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: o
3
+ name: o3-mini
4
+
5
+ api_key: <API_KEY>
configs/model/o3.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: o
3
+ name: o3
4
+
5
+ api_key: <API_KEY>
configs/model/o4-mini.yaml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: o
3
+ name: o4-mini
4
+
5
+ api_key: <API_KEY>
configs/model/qwen-max.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: qwen
3
+ name: qwen-max
4
+
5
+ api_key: <API_KEY>
6
+ base_url: https://dashscope.aliyuncs.com/compatible-mode/v1
configs/model/qwq.yaml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ # @package model
2
+ family_name: qwen
3
+ name: qwq-plus
4
+
5
+ api_key: <API_KEY>
6
+ base_url: https://dashscope.aliyuncs.com/compatible-mode/v1
configs/pde/advection.yaml ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # @package pde
2
+ name: advection
3
+ beta: 0.1 # choices: [0.1, 0.2, 0.4, 0.7, 1.0, 2.0, 4.0, 7.0]
4
+
5
+ pde_setting_name: 'beta_${pde.beta}'
6
+
7
+ # To be appended to `root_dataset_folder` to get the full path
8
+ dataset_folder_for_eval: Advection/1D_Advection_Sols_beta${pde.beta}_development.hdf5
9
+
10
+ timeout: 1200
configs/pde/burgers.yaml ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # @package pde
2
+ name: burgers
3
+ nu: 0.01 # choices: [1.0, 2.0, 4.0, 0.1, 0.2, 0.4, 0.01, 0.02, 0.04, 0.001, 0.002, 0.004]
4
+
5
+ pde_setting_name: nu_${pde.nu}
6
+
7
+ # To be appended to `root_dataset_folder` to get the full path
8
+ dataset_folder_for_eval: Burgers/1D_Burgers_Sols_Nu${pde.nu}_development.hdf5
9
+
10
+ timeout: 1200
configs/pde/cns1d.yaml ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # @package pde
2
+ name: cns1d
3
+ eta: 0.1 # choices: [0.01, 0.1, 1.e-8]
4
+
5
+ pde_setting_name: 'eta_${pde.eta}'
6
+
7
+ # To be appended to `root_dataset_folder` to get the full path
8
+ dataset_folder_for_eval: CNS/1D_CFD_Rand_Eta${pde.eta}_Zeta${pde.eta}_periodic_Train_development.hdf5
9
+
10
+ timeout: 2400
configs/pde/darcy.yaml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ # @package pde
2
+ name: darcy
3
+
4
+ pde_setting_name: 'beta_1.0'
5
+
6
+ # To be appended to `root_dataset_folder` to get the full path
7
+ dataset_folder_for_eval: Darcy/piececonst_r421_N1024_smooth1_sample50_development.hdf5
8
+
9
+ timeout: 1200
configs/pde/reacdiff1d.yaml ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # @package pde
2
+ name: reacdiff1d
3
+ nu: 0.5 # choices: [0.5, 1.0, 2.0, 5.0]
4
+ rho: 1.0 # choices: [1.0, 2.0, 5.0, 10.0]
5
+
6
+ pde_setting_name: 'nu_${pde.nu}_rho_${pde.rho}'
7
+
8
+ # To be appended to `root_dataset_folder` to get the full path
9
+ dataset_folder_for_eval: ReactionDiffusion/ReacDiff_Nu${pde.nu}_Rho${pde.rho}_development.hdf5
10
+
11
+ timeout: 2400
data/data_download.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ import os
3
+ import argparse
4
+ from pathlib import Path
5
+
6
+ import pandas as pd
7
+ from torchvision.datasets.utils import download_url
8
+ from tqdm import tqdm
9
+
10
+
11
+ def parse_metadata(pde_names):
12
+ """
13
+ This function parses the argument to filter the metadata of files that need to be downloaded.
14
+
15
+ Args:
16
+ pde_names: List containing the name of the PDE to be downloaded
17
+ df : The provided dataframe loaded from the csv file
18
+
19
+ Options for pde_names:
20
+ - Advection
21
+ - Burgers
22
+ - 1D_CFD
23
+ - Diff-Sorp
24
+ - 1D_ReacDiff
25
+ - 2D_CFD
26
+ - Darcy
27
+ - 2D_ReacDiff
28
+ - NS_Incom
29
+ - SWE
30
+ - 3D_CFD
31
+
32
+ Returns:
33
+ pde_df : Filtered dataframe containing metadata of files to be downloaded
34
+ """
35
+
36
+ meta_df = pd.read_csv(os.path.join(os.path.dirname(__file__), 'pdebench_data_urls.csv'))
37
+
38
+ # Ensure the pde_name is defined
39
+ pde_list = [
40
+ "advection",
41
+ "burgers",
42
+ "1d_cfd",
43
+ "diff_sorp",
44
+ "1d_reacdiff",
45
+ "2d_cfd",
46
+ "darcy",
47
+ "2d_reacdiff",
48
+ "ns_incom",
49
+ "swe",
50
+ "3d_cfd",
51
+ ]
52
+ pde_names = [pde_names]
53
+ pde_names = [name.lower() for name in pde_names]
54
+
55
+ assert all(name.lower() in pde_list for name in pde_names), "PDE name not defined."
56
+
57
+ # Filter the files to be downloaded
58
+ meta_df["PDE"] = meta_df["PDE"].str.lower()
59
+
60
+ return meta_df[meta_df["PDE"].isin(pde_names)]
61
+
62
+
63
+ def download_data(root_folder, pde_name):
64
+ """ "
65
+ Download data splits specific to a given PDE.
66
+
67
+ Args:
68
+ root_folder: The root folder where the data will be downloaded
69
+ pde_name : The name of the PDE for which the data to be downloaded
70
+ """
71
+
72
+ # print(f"Downloading data for {pde_name} ...")
73
+
74
+ # Load and parse metadata csv file
75
+ pde_df = parse_metadata(pde_name)
76
+
77
+ # Iterate filtered dataframe and download the files
78
+ for _, row in tqdm(pde_df.iterrows(), total=pde_df.shape[0]):
79
+ file_path = Path(root_folder) / row["Path"]
80
+ download_url(row["URL"], file_path, row["Filename"], md5=row["MD5"])
81
+
82
+
83
+ if __name__ == "__main__":
84
+ arg_parser = argparse.ArgumentParser(
85
+ prog="Download Script",
86
+ description="Helper script to download the PDEBench datasets",
87
+ epilog="",
88
+ )
89
+
90
+ arg_parser.add_argument(
91
+ "--root_folder",
92
+ type=str,
93
+ # required=True,
94
+ help="Root folder where the data will be downloaded",
95
+ default="../dataset",
96
+ )
97
+ arg_parser.add_argument(
98
+ "--pde_name",
99
+ action="append",
100
+ help="Name of the PDE dataset to download. You can use this flag multiple times to download multiple datasets",
101
+ default="burgers",
102
+ )
103
+
104
+ args = arg_parser.parse_args()
105
+
106
+ download_data(args.root_folder, args.pde_name)
data/extract_data_subsets.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import h5py
2
+ import numpy as np
3
+ import os
4
+
5
+ def work(dataset_path, subset_path, subset_selection):
6
+ # Load data from file
7
+ with h5py.File(dataset_path, 'r') as f:
8
+ # Load the data
9
+ t_coordinate = np.array(f['t-coordinate'])[:-1] # Keep as is
10
+ x_coordinate = np.array(f['x-coordinate']) # Keep as is
11
+ u = subset_selection(np.array(f['tensor']))
12
+
13
+ # Navier-Stokes data has different structure
14
+ # Vx = subset_selection((f['Vx']))
15
+ # density = subset_selection(np.array(f['density']))
16
+ # pressure = subset_selection(np.array(f['pressure']))
17
+
18
+ # Verify shapes
19
+ print(t_coordinate.shape, x_coordinate.shape, u.shape)
20
+ # (201,) (1024,) (100, 201, 1024) for burgers equation
21
+
22
+ # Save the subset to a new HDF5 file
23
+ with h5py.File(subset_path, 'w') as f:
24
+ # Create datasets in the new file
25
+ f.create_dataset('t-coordinate', data=t_coordinate)
26
+ f.create_dataset('tensor', data=u)
27
+ f.create_dataset('x-coordinate', data=x_coordinate)
28
+
29
+ # Uncomment if you want to save Navier-Stokes specific data
30
+ # f.create_dataset('Vx', data=Vx)
31
+ # f.create_dataset('density', data=density)
32
+ # f.create_dataset('pressure', data=pressure)
33
+
34
+ print(f"Subset data saved successfully at {subset_path}!")
35
+
36
+ if __name__ == '__main__':
37
+
38
+ dataset_dir = '../dataset/1D/Burgers/Train'
39
+ test_subset_size = 100
40
+ dev_subset_size = 50
41
+ subset_dir = '../dataset/CodePDE/Burgers'
42
+ if not os.path.exists(subset_dir):
43
+ print(f"Creating: {subset_dir}")
44
+ os.makedirs(subset_dir)
45
+ else:
46
+ print(f"Exist: {subset_dir}")
47
+
48
+ for item in os.listdir(dataset_dir):
49
+ full_path = os.path.join(dataset_dir, item)
50
+ if os.path.isfile(full_path):
51
+ print(full_path)
52
+
53
+ subset_path = os.path.join(subset_dir, item)
54
+ work(full_path, subset_path, lambda x: x[:test_subset_size])
55
+
56
+ development_subset_path = subset_path.replace('.hdf5', '_development.hdf5')
57
+ work(full_path, development_subset_path, lambda x: x[-dev_subset_size:])
data/pdebench_data_urls.csv ADDED
@@ -0,0 +1,376 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ PDE,Filename,URL,Path,MD5
2
+ Advection,1D_Advection_Sols_beta0.1.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255672,1D/Advection/Train/,b4be2fc3383f737c76033073e6d2ccfb
3
+ Advection,1D_Advection_Sols_beta0.2.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255671,1D/Advection/Train/,d47b52c1be4b6bcaa8748a8df2f5fbb8
4
+ Advection,1D_Advection_Sols_beta0.4.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255674,1D/Advection/Train/,d595bbfd2c659df995a93cd40d6ea568
5
+ Advection,1D_Advection_Sols_beta0.7.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255666,1D/Advection/Train/,f89030c240a1e90b55649776854a84c2
6
+ Advection,1D_Advection_Sols_beta1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255675,1D/Advection/Train/,1fe41923a4123db55bf4e89bea32e142
7
+ Advection,1D_Advection_Sols_beta2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255677,1D/Advection/Train/,0e9beff98693089c38e213a1f2b9ac43
8
+ Advection,1D_Advection_Sols_beta4.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255676,1D/Advection/Train/,a87b03ff425496360e2732921805f47d
9
+ Advection,1D_Advection_Sols_beta7.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/255664,1D/Advection/Train/,72a286ad2fcba574bd20bc045ba82d58
10
+ Burgers,1D_Burgers_Sols_Nu1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/281365,1D/Burgers/Train/,9021eba35332d127306f11ef84c1a60f
11
+ Burgers,1D_Burgers_Sols_Nu2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/281364,1D/Burgers/Train/,70fe0c24d9313e70f6059e5212bdb3d7
12
+ Burgers,1D_Burgers_Sols_Nu4.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268188,1D/Burgers/Train/,8af7f70166c00ffad377c80fa477d81f
13
+ Burgers,1D_Burgers_Sols_Nu0.001.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268190,1D/Burgers/Train/,44cb784d5a07aa2b1c864cabdcf625f9
14
+ Burgers,1D_Burgers_Sols_Nu0.002.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268193,1D/Burgers/Train/,edf1cd13622d151dfde3e4e5af7a95b4
15
+ Burgers,1D_Burgers_Sols_Nu0.004.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268191,1D/Burgers/Train/,435e1fecb8a64a0b4563bb8d09f81c33
16
+ Burgers,1D_Burgers_Sols_Nu0.01.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/281363,1D/Burgers/Train/,e6d9a4f62baf9a29121a816b919e2770
17
+ Burgers,1D_Burgers_Sols_Nu0.02.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268189,1D/Burgers/Train/,7c8c717a3a7818145877baa57106b090
18
+ Burgers,1D_Burgers_Sols_Nu0.04.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/281362,1D/Burgers/Train/,16f4c7afaf8c16238be157e54c9297c7
19
+ Burgers,1D_Burgers_Sols_Nu0.1.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268185,1D/Burgers/Train/,660ba1008d3843bf4e28d2895eb607ce
20
+ Burgers,1D_Burgers_Sols_Nu0.2.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268187,1D/Burgers/Train/,01d9333254dff2f3cad090b61f5cf695
21
+ Burgers,1D_Burgers_Sols_Nu0.4.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/268192,1D/Burgers/Train/,58327a6107e8f9defb5075677cc42533
22
+ 1D_CFD,1D_CFD_Rand_Eta0.01_Zeta0.01_periodic_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164672,1D/CFD/Train/,2fdd00138f1d7abc794fb953021a9f43
23
+ 1D_CFD,1D_CFD_Rand_Eta0.1_Zeta0.1_periodic_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164668,1D/CFD/Train/,45655bd77d006ab539c52b7fbcf099b9
24
+ 1D_CFD,1D_CFD_Rand_Eta1.e-8_Zeta1.e-8_periodic_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/135485,1D/CFD/Train/,ad22fe08b8abc721179dbb34f2cc8b2a
25
+ 1D_CFD,1D_CFD_Rand_Eta1.e-8_Zeta1.e-8_trans_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133155,1D/CFD/Train/,79857a11ad9a69d77b8cf249a1c72d06
26
+ 1D_CFD,1D_CFD_Shock_Eta1.e-8_Zeta1.e-8_trans_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133156,1D/CFD/Train/,08dffc3d84033c7574a70614703dd753
27
+ 1D_CFD,Sod1.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133145,1D/CFD/Test/ShockTube/,ef8271ce332b986e961a7aaedcae24e7
28
+ 1D_CFD,Sod2.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133146,1D/CFD/Test/ShockTube/,10e47c5c6ee0d1028f912a52bbf613d0
29
+ 1D_CFD,Sod3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133147,1D/CFD/Test/ShockTube/,495f2d2e0cee9b4ae28d7fef29bb7c68
30
+ 1D_CFD,Sod4.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133148,1D/CFD/Test/ShockTube/,544399b3d05ae514d66fc96982ee9db8
31
+ 1D_CFD,Sod5.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133149,1D/CFD/Test/ShockTube/,ca7fa6631201e8bd506914d3b0b9fe50
32
+ 1D_CFD,Sod6.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133150,1D/CFD/Train/ShockTube/,adb2d95bf0d48e03bc0d8f4a2cbcd1c6
33
+ 1D_CFD,Sod7.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133151,1D/CFD/Test/ShockTube/,b8d73e1e3ef862ef732a5fa43c64612e
34
+ Diff_Sorp,1D_diff-sorp_NA_NA.h5,https://darus.uni-stuttgart.de/api/access/datafile/133020,1D/diffusion-sorption/,9d466d1213065619d087319e16d9a938
35
+ 1D_ReacDiff,ReacDiff_Nu0.5_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133177,1D/ReactionDiffusion/Train/,69a429239778d529cd419ed5888ea835
36
+ 1D_ReacDiff,ReacDiff_Nu0.5_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133178,1D/ReactionDiffusion/Train/,ff7c724b18e7ebe02e19c179852f48ee
37
+ 1D_ReacDiff,ReacDiff_Nu0.5_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133179,1D/ReactionDiffusion/Train/,ac907daa7e483d203a5c77567cdea561
38
+ 1D_ReacDiff,ReacDiff_Nu0.5_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133180,1D/ReactionDiffusion/Train/,fb149e7540d8977af158bb8fec1048a3
39
+ 1D_ReacDiff,ReacDiff_Nu1.0_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133181,1D/ReactionDiffusion/Train/,bd73c2f3448d03e95e98c3831fc8fa70
40
+ 1D_ReacDiff,ReacDiff_Nu1.0_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133182,1D/ReactionDiffusion/Train/,a94e65631881a27ddae3ef74caf53093
41
+ 1D_ReacDiff,ReacDiff_Nu1.0_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133183,1D/ReactionDiffusion/Train/,112c01a76447162bd67c8c1073f58ca2
42
+ 1D_ReacDiff,ReacDiff_Nu1.0_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133184,1D/ReactionDiffusion/Train/,fa224c9d143de37ac6914d391e70f425
43
+ 1D_ReacDiff,ReacDiff_Nu2.0_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133185,1D/ReactionDiffusion/Train/,925a7e2c9b9e40ad2dd44b7002ec882b
44
+ 1D_ReacDiff,ReacDiff_Nu2.0_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133186,1D/ReactionDiffusion/Train/,a3e25a9fb8a99f010352ad3dd1afd596
45
+ 1D_ReacDiff,ReacDiff_Nu2.0_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133187,1D/ReactionDiffusion/Train/,426353b241acfbc64067acb1bdc80ade
46
+ 1D_ReacDiff,ReacDiff_Nu2.0_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133188,1D/ReactionDiffusion/Train/,f2890bdac5103a3b78fac5f80e57b760
47
+ 1D_ReacDiff,ReacDiff_Nu5.0_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133189,1D/ReactionDiffusion/Train/,0ed75b55f61bec11c47d379e34959e54
48
+ 1D_ReacDiff,ReacDiff_Nu5.0_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133190,1D/ReactionDiffusion/Train/,264c80af0e2a6cc1f70e87275e0f6ac4
49
+ 1D_ReacDiff,ReacDiff_Nu5.0_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133191,1D/ReactionDiffusion/Train/,f927f5d85f2e9bd97dff31b4dab051ae
50
+ 1D_ReacDiff,ReacDiff_Nu5.0_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133192,1D/ReactionDiffusion/Train/,c9c26e2b5f2d4bf5bcbbd4ba34fefd5e
51
+ 1D_ReacDiff,ReacDiff_react_Nu0.5_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133157,1D/ReactionDiffusion/Test/,6dc95d36e7126c104c5316485670f2c0
52
+ 1D_ReacDiff,ReacDiff_react_Nu0.5_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133158,1D/ReactionDiffusion/Test/,5bf21fbc428ac4346c0e7a04a5899f9b
53
+ 1D_ReacDiff,ReacDiff_react_Nu0.5_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133159,1D/ReactionDiffusion/Test/,5198a1572c201657dc19ad8592e24ac1
54
+ 1D_ReacDiff,ReacDiff_react_Nu0.5_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133160,1D/ReactionDiffusion/Test/,9968701e89a8b406f18ffbc2b74045d8
55
+ 1D_ReacDiff,ReacDiff_react_Nu1.0_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133161,1D/ReactionDiffusion/Test/,a258ff5be23d17d89cb10ac94f512165
56
+ 1D_ReacDiff,ReacDiff_react_Nu1.0_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133162,1D/ReactionDiffusion/Test/,09279a597860dcf48ef6f6539c46920f
57
+ 1D_ReacDiff,ReacDiff_react_Nu1.0_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133163,1D/ReactionDiffusion/Test/,27113922509f6336f5c8be8d99109828
58
+ 1D_ReacDiff,ReacDiff_react_Nu1.0_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133164,1D/ReactionDiffusion/Test/,24ab563c27bf07d653b881d3c652bfa5
59
+ 1D_ReacDiff,ReacDiff_react_Nu10.0_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133165,1D/ReactionDiffusion/Test/,5a0f4d1ff304b11b9b22c3b00b1f63d4
60
+ 1D_ReacDiff,ReacDiff_react_Nu10.0_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133166,1D/ReactionDiffusion/Test/,4cf67490b01e54f80d45c8e7936b804d
61
+ 1D_ReacDiff,ReacDiff_react_Nu10.0_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133167,1D/ReactionDiffusion/Test/,3c54c1cd3710b1f5b02a3de3782201ed
62
+ 1D_ReacDiff,ReacDiff_react_Nu10.0_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133168,1D/ReactionDiffusion/Test/,580f08d5755ee393e7092eafedb70e46
63
+ 1D_ReacDiff,ReacDiff_react_Nu2.0_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133169,1D/ReactionDiffusion/Test/,208882dcf594d52f8e5017904efd84c9
64
+ 1D_ReacDiff,ReacDiff_react_Nu2.0_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133170,1D/ReactionDiffusion/Test/,891f8b47e49348b5411882ecebbc1bfd
65
+ 1D_ReacDiff,ReacDiff_react_Nu2.0_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133171,1D/ReactionDiffusion/Test/,56e8e1e55ac92acf21b253511c370642
66
+ 1D_ReacDiff,ReacDiff_react_Nu2.0_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133172,1D/ReactionDiffusion/Test/,4498954ef4cd5c63ac530ee46c351790
67
+ 1D_ReacDiff,ReacDiff_react_Nu5.0_Rho1.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133173,1D/ReactionDiffusion/Test/,6b2c45748711bea8779dea3b48a53a7e
68
+ 1D_ReacDiff,ReacDiff_react_Nu5.0_Rho10.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133174,1D/ReactionDiffusion/Test/,b04c22629bf86d85929386aaf4a5f95d
69
+ 1D_ReacDiff,ReacDiff_react_Nu5.0_Rho2.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133175,1D/ReactionDiffusion/Test/,1738e4374456af974e4d0058d3426a69
70
+ 1D_ReacDiff,ReacDiff_react_Nu5.0_Rho5.0.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133176,1D/ReactionDiffusion/Test/,4e3cf0ac66e85dbb77c79861076db8e8
71
+ 2D_CFD,2D_CFD_Rand_M0.1_Eta0.01_Zeta0.01_periodic_128_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164687,2D/CFD/2D_Train_Rand/,5b21dcccaef4d2145ca579a71153c580
72
+ 2D_CFD,2D_CFD_Rand_M0.1_Eta0.1_Zeta0.1_periodic_128_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164688,2D/CFD/2D_Train_Rand/,b2733888745a1d64e36df813e979910b
73
+ 2D_CFD,2D_CFD_Rand_M0.1_Eta1e-08_Zeta1e-08_periodic_512_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164689,2D/CFD/2D_Train_Rand/,ae2552864d0856f58dc74132a2f95d20
74
+ 2D_CFD,2D_CFD_Rand_M1.0_Eta0.01_Zeta0.01_periodic_128_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164690,2D/CFD/2D_Train_Rand/,21969082d0e9524bcc4708e216148e60
75
+ 2D_CFD,2D_CFD_Rand_M1.0_Eta0.1_Zeta0.1_periodic_128_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164691,2D/CFD/2D_Train_Rand/,75a34f34a3dcd9bc8ae4b7adf8bf372c
76
+ 2D_CFD,2D_CFD_Rand_M1.0_Eta1e-08_Zeta1e-08_periodic_512_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164692,2D/CFD/2D_Train_Rand/,04fcc44c39acea69e2869b84cd653d4a
77
+ 2D_CFD,2D_CFD_Turb_M0.1_Eta1e-08_Zeta1e-08_periodic_512_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164685,2D/CFD/2D_Train_Turb/,844555000d342d2947162c6cf46798e7
78
+ 2D_CFD,2D_CFD_Turb_M1.0_Eta1e-08_Zeta1e-08_periodic_512_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164686,2D/CFD/2D_Train_Turb/,3f2c7376cde5fb072db0f9814f1c6992
79
+ 2D_CFD,2D_shock.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133193,2D/CFD/Test/2DShock/,b4098c20e2e0a25cd8ee229c9294b899
80
+ 2D_CFD,KH_M01_dk10_Re1e3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133209,2D/CFD/Test/KH/,54e278b3a7419107a5cd6a8a019225d5
81
+ 2D_CFD,KH_M01_dk1_Re1e3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133210,2D/CFD/Test/KH/,29ce31d3ce61de3b90ac0b42716c061c
82
+ 2D_CFD,KH_M01_dk2_Re1e3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133211,2D/CFD/Test/KH/,de0b2a3343de028e4c162723ec571713
83
+ 2D_CFD,KH_M01_dk5_Re1e3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133212,2D/CFD/Test/KH/,5bcaed9eb7db5fd55d51f7347a938413
84
+ 2D_CFD,KH_M02_dk1_Re1e3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133213,2D/CFD/Test/KH/,fbc392f83affc2224a5703dad7bda28a
85
+ 2D_CFD,KH_M04_dk1_Re1e3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133214,2D/CFD/Test/KH/,66626c935ab070a1e5df52bcf6aeccc7
86
+ 2D_CFD,KH_M1_dk1_Re1e3.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133215,2D/CFD/Test/KH/,338393c118196048a38d1df731a33b0a
87
+ 2D_CFD,OTVortex.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133216,2D/CFD/Test/TOV/,3b68cb29e3d1906a19ce2cb4ce71faa4
88
+ Darcy,2D_DarcyFlow_beta0.01_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133217,2D/DarcyFlow/,d05c287d4c0b7d3178b0097084238251
89
+ Darcy,2D_DarcyFlow_beta0.1_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133218,2D/DarcyFlow/,294f9a03a4aa16b0e386469ca8b471be
90
+ Darcy,2D_DarcyFlow_beta1.0_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133219,2D/DarcyFlow/,81694ed31306ff2e5f6b76349b0b4389
91
+ Darcy,2D_DarcyFlow_beta10.0_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133220,2D/DarcyFlow/,a7f23cf8011fc211b180828af39b7d1a
92
+ Darcy,2D_DarcyFlow_beta100.0_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133221,2D/DarcyFlow/,7c09f3b1bb097737d3fd52ffb0c6f1c8
93
+ 2D_ReacDiff,2D_diff-react_NA_NA.h5,https://darus.uni-stuttgart.de/api/access/datafile/133017,2D/diffusion-reaction/,b8d0b86064193195ddc30c33be5dc949
94
+ NS_Incom,ns_incom_inhom_2d_512-0.h5,https://darus.uni-stuttgart.de/api/access/datafile/133280,2D/NS_incom/,54109d46f9c957317bd670ddb2068ac0
95
+ NS_Incom,ns_incom_inhom_2d_512-1.h5,https://darus.uni-stuttgart.de/api/access/datafile/136439,2D/NS_incom/,e280fd3208fccb8ad5c1ff46c4796864
96
+ NS_Incom,ns_incom_inhom_2d_512-10.h5,https://darus.uni-stuttgart.de/api/access/datafile/133309,2D/NS_incom/,f497297295ce3f2ebfefd81d00de31da
97
+ NS_Incom,ns_incom_inhom_2d_512-100.h5,https://darus.uni-stuttgart.de/api/access/datafile/133267,2D/NS_incom/,fcf3b743fc82ac8706cfa85a7b259dcd
98
+ NS_Incom,ns_incom_inhom_2d_512-101.h5,https://darus.uni-stuttgart.de/api/access/datafile/133289,2D/NS_incom/,9d1cd827c95f80975d09eae0b4e6c3da
99
+ NS_Incom,ns_incom_inhom_2d_512-102.h5,https://darus.uni-stuttgart.de/api/access/datafile/133291,2D/NS_incom/,ce1f0a7a5e52bf49db43084eef367639
100
+ NS_Incom,ns_incom_inhom_2d_512-103.h5,https://darus.uni-stuttgart.de/api/access/datafile/133294,2D/NS_incom/,4e0a5f39d14745adf96655f220329550
101
+ NS_Incom,ns_incom_inhom_2d_512-104.h5,https://darus.uni-stuttgart.de/api/access/datafile/133298,2D/NS_incom/,52f59ffdef04338f2630e98e32448086
102
+ NS_Incom,ns_incom_inhom_2d_512-105.h5,https://darus.uni-stuttgart.de/api/access/datafile/133290,2D/NS_incom/,1010c2d0c807e5e355e336fe8d905067
103
+ NS_Incom,ns_incom_inhom_2d_512-106.h5,https://darus.uni-stuttgart.de/api/access/datafile/133374,2D/NS_incom/,1127b6e69bd83b128a9420ae58187729
104
+ NS_Incom,ns_incom_inhom_2d_512-107.h5,https://darus.uni-stuttgart.de/api/access/datafile/133375,2D/NS_incom/,38e676bfd97cc946cd6db2af48c487bf
105
+ NS_Incom,ns_incom_inhom_2d_512-108.h5,https://darus.uni-stuttgart.de/api/access/datafile/133376,2D/NS_incom/,0355430cc9cee8e921378eda7c1c6b84
106
+ NS_Incom,ns_incom_inhom_2d_512-109.h5,https://darus.uni-stuttgart.de/api/access/datafile/133305,2D/NS_incom/,eff2dd6e71649bb3f9f37f951bd54d63
107
+ NS_Incom,ns_incom_inhom_2d_512-11.h5,https://darus.uni-stuttgart.de/api/access/datafile/133336,2D/NS_incom/,3342616df260d3ec60fbf24dfaabf9ae
108
+ NS_Incom,ns_incom_inhom_2d_512-110.h5,https://darus.uni-stuttgart.de/api/access/datafile/133313,2D/NS_incom/,d9739419a2100b390742acf88e4548b3
109
+ NS_Incom,ns_incom_inhom_2d_512-111.h5,https://darus.uni-stuttgart.de/api/access/datafile/133318,2D/NS_incom/,ea5239ab7204d53c476f48739a4e0d78
110
+ NS_Incom,ns_incom_inhom_2d_512-112.h5,https://darus.uni-stuttgart.de/api/access/datafile/133324,2D/NS_incom/,cdaf99ff4ef69ee79ad60648a06259d7
111
+ NS_Incom,ns_incom_inhom_2d_512-113.h5,https://darus.uni-stuttgart.de/api/access/datafile/133377,2D/NS_incom/,aee8074da1cf6e19ee421a7b86caeaa3
112
+ NS_Incom,ns_incom_inhom_2d_512-114.h5,https://darus.uni-stuttgart.de/api/access/datafile/133378,2D/NS_incom/,f1b4550b00a887771e79a81c7336a4b0
113
+ NS_Incom,ns_incom_inhom_2d_512-115.h5,https://darus.uni-stuttgart.de/api/access/datafile/133379,2D/NS_incom/,0756d685a754b84337d1de7280572c85
114
+ NS_Incom,ns_incom_inhom_2d_512-116.h5,https://darus.uni-stuttgart.de/api/access/datafile/133380,2D/NS_incom/,f7c55c110dcf0aeaed03699bc0942f0d
115
+ NS_Incom,ns_incom_inhom_2d_512-117.h5,https://darus.uni-stuttgart.de/api/access/datafile/133381,2D/NS_incom/,375331e30111457028de38b89e03c404
116
+ NS_Incom,ns_incom_inhom_2d_512-118.h5,https://darus.uni-stuttgart.de/api/access/datafile/133383,2D/NS_incom/,6d2fbe4a0e32e1934d349616ca6b85d8
117
+ NS_Incom,ns_incom_inhom_2d_512-119.h5,https://darus.uni-stuttgart.de/api/access/datafile/133385,2D/NS_incom/,993112f4fe3910230005cc35099c7d59
118
+ NS_Incom,ns_incom_inhom_2d_512-12.h5,https://darus.uni-stuttgart.de/api/access/datafile/133574,2D/NS_incom/,71f407084513d7c0e93377c1b46e4bb4
119
+ NS_Incom,ns_incom_inhom_2d_512-120.h5,https://darus.uni-stuttgart.de/api/access/datafile/133386,2D/NS_incom/,f7c0b147e5b3f217a1d534f303ab52f4
120
+ NS_Incom,ns_incom_inhom_2d_512-121.h5,https://darus.uni-stuttgart.de/api/access/datafile/133441,2D/NS_incom/,785d733406eeec2694eb89769f72971b
121
+ NS_Incom,ns_incom_inhom_2d_512-122.h5,https://darus.uni-stuttgart.de/api/access/datafile/133346,2D/NS_incom/,eeb2488aa0a5d1f14069dcfaaa0da71d
122
+ NS_Incom,ns_incom_inhom_2d_512-123.h5,https://darus.uni-stuttgart.de/api/access/datafile/133442,2D/NS_incom/,57e341bea82fd33ec095e7070fd1ae38
123
+ NS_Incom,ns_incom_inhom_2d_512-124.h5,https://darus.uni-stuttgart.de/api/access/datafile/133443,2D/NS_incom/,6aba12029ab0ddb165c1c71f3b1c8f27
124
+ NS_Incom,ns_incom_inhom_2d_512-125.h5,https://darus.uni-stuttgart.de/api/access/datafile/133444,2D/NS_incom/,2b15572d1dda8048ab9713c2318d9322
125
+ NS_Incom,ns_incom_inhom_2d_512-126.h5,https://darus.uni-stuttgart.de/api/access/datafile/133303,2D/NS_incom/,56bcc085c7c0d303dedf147e6c608730
126
+ NS_Incom,ns_incom_inhom_2d_512-127.h5,https://darus.uni-stuttgart.de/api/access/datafile/133292,2D/NS_incom/,71b5191ed81e86ec68f0a7bcd6a07759
127
+ NS_Incom,ns_incom_inhom_2d_512-128.h5,https://darus.uni-stuttgart.de/api/access/datafile/133454,2D/NS_incom/,72b8110ec99098530494bbf0b64f51f7
128
+ NS_Incom,ns_incom_inhom_2d_512-129.h5,https://darus.uni-stuttgart.de/api/access/datafile/133293,2D/NS_incom/,0e2cda441dec2c19ffaefd8ad035220e
129
+ NS_Incom,ns_incom_inhom_2d_512-13.h5,https://darus.uni-stuttgart.de/api/access/datafile/133304,2D/NS_incom/,424f6d2ab607612599645442a45234cf
130
+ NS_Incom,ns_incom_inhom_2d_512-130.h5,https://darus.uni-stuttgart.de/api/access/datafile/133295,2D/NS_incom/,8a6f6b785ce64aecc9f46a31650662d0
131
+ NS_Incom,ns_incom_inhom_2d_512-131.h5,https://darus.uni-stuttgart.de/api/access/datafile/133275,2D/NS_incom/,3c4541c0ed411fdc9641b27978318ad6
132
+ NS_Incom,ns_incom_inhom_2d_512-132.h5,https://darus.uni-stuttgart.de/api/access/datafile/133296,2D/NS_incom/,3c4541c0ed411fdc9641b27978318ad6
133
+ NS_Incom,ns_incom_inhom_2d_512-133.h5,https://darus.uni-stuttgart.de/api/access/datafile/133297,2D/NS_incom/,ba2bb63092d5d0cb8fae29a0c18ff851
134
+ NS_Incom,ns_incom_inhom_2d_512-134.h5,https://darus.uni-stuttgart.de/api/access/datafile/133269,2D/NS_incom/,4b0b6e0c3d22772ea20dc4c3e07c5e7d
135
+ NS_Incom,ns_incom_inhom_2d_512-135.h5,https://darus.uni-stuttgart.de/api/access/datafile/133274,2D/NS_incom/,e48583df141230425ea36d093b0b46f6
136
+ NS_Incom,ns_incom_inhom_2d_512-136.h5,https://darus.uni-stuttgart.de/api/access/datafile/133299,2D/NS_incom/,4b0b6e0c3d22772ea20dc4c3e07c5e7d
137
+ NS_Incom,ns_incom_inhom_2d_512-137.h5,https://darus.uni-stuttgart.de/api/access/datafile/133300,2D/NS_incom/,e48583df141230425ea36d093b0b46f6
138
+ NS_Incom,ns_incom_inhom_2d_512-138.h5,https://darus.uni-stuttgart.de/api/access/datafile/133302,2D/NS_incom/,294b49a65b2fc861e23dfc40098d6e91
139
+ NS_Incom,ns_incom_inhom_2d_512-139.h5,https://darus.uni-stuttgart.de/api/access/datafile/133575,2D/NS_incom/,14c9705c20d630e08f4387cc4880a646
140
+ NS_Incom,ns_incom_inhom_2d_512-14.h5,https://darus.uni-stuttgart.de/api/access/datafile/133281,2D/NS_incom/,f8099cba29f6374b6454424ef521b86d
141
+ NS_Incom,ns_incom_inhom_2d_512-140.h5,https://darus.uni-stuttgart.de/api/access/datafile/133698,2D/NS_incom/,f8c12c7ee3973eab55a531a24689ce95
142
+ NS_Incom,ns_incom_inhom_2d_512-141.h5,https://darus.uni-stuttgart.de/api/access/datafile/133307,2D/NS_incom/,95a897a07713b654ce497b513ada21df
143
+ NS_Incom,ns_incom_inhom_2d_512-142.h5,https://darus.uni-stuttgart.de/api/access/datafile/133306,2D/NS_incom/,b0c964793163e49d46ddfa783b7f2a87
144
+ NS_Incom,ns_incom_inhom_2d_512-143.h5,https://darus.uni-stuttgart.de/api/access/datafile/133590,2D/NS_incom/,b35dd28ee302e41dd9500e65e681bd93
145
+ NS_Incom,ns_incom_inhom_2d_512-144.h5,https://darus.uni-stuttgart.de/api/access/datafile/133591,2D/NS_incom/,d7b000376bcb6cf4949e2dec4232a926
146
+ NS_Incom,ns_incom_inhom_2d_512-145.h5,https://darus.uni-stuttgart.de/api/access/datafile/133592,2D/NS_incom/,e90fad8268cf369136cebba09f3b06e8
147
+ NS_Incom,ns_incom_inhom_2d_512-146.h5,https://darus.uni-stuttgart.de/api/access/datafile/133593,2D/NS_incom/,568fac9aab2f599c20da783e9c7f3f0a
148
+ NS_Incom,ns_incom_inhom_2d_512-147.h5,https://darus.uni-stuttgart.de/api/access/datafile/133599,2D/NS_incom/,43194bdefada7b59cac975cc08e6d12d
149
+ NS_Incom,ns_incom_inhom_2d_512-148.h5,https://darus.uni-stuttgart.de/api/access/datafile/133600,2D/NS_incom/,41a78815f65ff7b607d69dd508b3ffea
150
+ NS_Incom,ns_incom_inhom_2d_512-149.h5,https://darus.uni-stuttgart.de/api/access/datafile/133601,2D/NS_incom/,3ebb3a014d7c33787cda458a689f7728
151
+ NS_Incom,ns_incom_inhom_2d_512-15.h5,https://darus.uni-stuttgart.de/api/access/datafile/133609,2D/NS_incom/,74dd42aaf8b809925ff6dce345be11fd
152
+ NS_Incom,ns_incom_inhom_2d_512-150.h5,https://darus.uni-stuttgart.de/api/access/datafile/133602,2D/NS_incom/,562bd9a0d25ce189a9635d385e3d6a3c
153
+ NS_Incom,ns_incom_inhom_2d_512-151.h5,https://darus.uni-stuttgart.de/api/access/datafile/133271,2D/NS_incom/,1acb06d08a4ae3a17ddbe04a2e3fd5cf
154
+ NS_Incom,ns_incom_inhom_2d_512-152.h5,https://darus.uni-stuttgart.de/api/access/datafile/133603,2D/NS_incom/,0023490cb7c00a4a75f9a252af9be71c
155
+ NS_Incom,ns_incom_inhom_2d_512-153.h5,https://darus.uni-stuttgart.de/api/access/datafile/133594,2D/NS_incom/,669788ec079a3e695b68821c17d79a26
156
+ NS_Incom,ns_incom_inhom_2d_512-154.h5,https://darus.uni-stuttgart.de/api/access/datafile/133604,2D/NS_incom/,547cfa5abca50bcd927630b8264d6876
157
+ NS_Incom,ns_incom_inhom_2d_512-155.h5,https://darus.uni-stuttgart.de/api/access/datafile/133605,2D/NS_incom/,f28fcf6d99c0d1f1bb08a82be94114eb
158
+ NS_Incom,ns_incom_inhom_2d_512-156.h5,https://darus.uni-stuttgart.de/api/access/datafile/133606,2D/NS_incom/,8c52c6258bdbbc7c4fe1cb7892b5bd23
159
+ NS_Incom,ns_incom_inhom_2d_512-157.h5,https://darus.uni-stuttgart.de/api/access/datafile/133607,2D/NS_incom/,91f4ab6bd4eef91ab43908290539bdab
160
+ NS_Incom,ns_incom_inhom_2d_512-158.h5,https://darus.uni-stuttgart.de/api/access/datafile/133321,2D/NS_incom/,299521efb9f871919b003e86d0e8d628
161
+ NS_Incom,ns_incom_inhom_2d_512-159.h5,https://darus.uni-stuttgart.de/api/access/datafile/133608,2D/NS_incom/,54b54c80708366c74d4aee82688be26e
162
+ NS_Incom,ns_incom_inhom_2d_512-16.h5,https://darus.uni-stuttgart.de/api/access/datafile/133311,2D/NS_incom/,b83a1465fee7f5dcb9826d1bbb4b6f71
163
+ NS_Incom,ns_incom_inhom_2d_512-160.h5,https://darus.uni-stuttgart.de/api/access/datafile/133610,2D/NS_incom/,e90d31aff2172f508846473126c78947
164
+ NS_Incom,ns_incom_inhom_2d_512-161.h5,https://darus.uni-stuttgart.de/api/access/datafile/133611,2D/NS_incom/,24b4b52f0a03dc52f7159ef10652c40a
165
+ NS_Incom,ns_incom_inhom_2d_512-162.h5,https://darus.uni-stuttgart.de/api/access/datafile/133683,2D/NS_incom/,87d592f2be6a76751b2b36e8bf0d0a88
166
+ NS_Incom,ns_incom_inhom_2d_512-163.h5,https://darus.uni-stuttgart.de/api/access/datafile/133612,2D/NS_incom/,2acf733c2833fb815487271bbb5ce66f
167
+ NS_Incom,ns_incom_inhom_2d_512-164.h5,https://darus.uni-stuttgart.de/api/access/datafile/136435,2D/NS_incom/,8470816b05ecbabd2ec6036f346d4d79
168
+ NS_Incom,ns_incom_inhom_2d_512-165.h5,https://darus.uni-stuttgart.de/api/access/datafile/133685,2D/NS_incom/,61d677b1edd03987c26648d7d136672d
169
+ NS_Incom,ns_incom_inhom_2d_512-166.h5,https://darus.uni-stuttgart.de/api/access/datafile/133308,2D/NS_incom/,4f1dd0e87f4ad925dc82444da4df7d3a
170
+ NS_Incom,ns_incom_inhom_2d_512-167.h5,https://darus.uni-stuttgart.de/api/access/datafile/133684,2D/NS_incom/,8e90f2ab307a6c5f8a2e1a3befa02d8f
171
+ NS_Incom,ns_incom_inhom_2d_512-168.h5,https://darus.uni-stuttgart.de/api/access/datafile/133701,2D/NS_incom/,1f8ef01efadb38eb638c383e242d9511
172
+ NS_Incom,ns_incom_inhom_2d_512-169.h5,https://darus.uni-stuttgart.de/api/access/datafile/133310,2D/NS_incom/,d2ae69de21cb38a24dbc174f34afeb70
173
+ NS_Incom,ns_incom_inhom_2d_512-17.h5,https://darus.uni-stuttgart.de/api/access/datafile/133325,2D/NS_incom/,9c206312c353aa6dcaa3144d53753075
174
+ NS_Incom,ns_incom_inhom_2d_512-170.h5,https://darus.uni-stuttgart.de/api/access/datafile/133312,2D/NS_incom/,d7f9e313387dfb3efc356c1d5606ebc3
175
+ NS_Incom,ns_incom_inhom_2d_512-171.h5,https://darus.uni-stuttgart.de/api/access/datafile/133679,2D/NS_incom/,f6ade3872e3e0db0d6b8af4cc9fd7ed6
176
+ NS_Incom,ns_incom_inhom_2d_512-172.h5,https://darus.uni-stuttgart.de/api/access/datafile/133314,2D/NS_incom/,986851271af43e55264a8db4229b2049
177
+ NS_Incom,ns_incom_inhom_2d_512-173.h5,https://darus.uni-stuttgart.de/api/access/datafile/133315,2D/NS_incom/,e064a3b043136572de413744f470e795
178
+ NS_Incom,ns_incom_inhom_2d_512-174.h5,https://darus.uni-stuttgart.de/api/access/datafile/133316,2D/NS_incom/,d4906a07e5e94b6e147570613205d19d
179
+ NS_Incom,ns_incom_inhom_2d_512-175.h5,https://darus.uni-stuttgart.de/api/access/datafile/133317,2D/NS_incom/,7d7230752c444c2570396927e9f9b4bf
180
+ NS_Incom,ns_incom_inhom_2d_512-176.h5,https://darus.uni-stuttgart.de/api/access/datafile/133319,2D/NS_incom/,47a59095761d69cecc886ddb3027b556
181
+ NS_Incom,ns_incom_inhom_2d_512-177.h5,https://darus.uni-stuttgart.de/api/access/datafile/133320,2D/NS_incom/,979de8be5c402f6dc50060e92ca6245a
182
+ NS_Incom,ns_incom_inhom_2d_512-178.h5,https://darus.uni-stuttgart.de/api/access/datafile/133342,2D/NS_incom/,a72fc652b38b2dc0b0aa406bb35cd676
183
+ NS_Incom,ns_incom_inhom_2d_512-179.h5,https://darus.uni-stuttgart.de/api/access/datafile/133697,2D/NS_incom/,6dd8a78a85ad7c0e03daaabcb3826fdb
184
+ NS_Incom,ns_incom_inhom_2d_512-18.h5,https://darus.uni-stuttgart.de/api/access/datafile/136437,2D/NS_incom/,272c1cdf65a90dc3e4308df8a15ef7dc
185
+ NS_Incom,ns_incom_inhom_2d_512-180.h5,https://darus.uni-stuttgart.de/api/access/datafile/133326,2D/NS_incom/,f9ffc612a7438dc17524818b48acb7ac
186
+ NS_Incom,ns_incom_inhom_2d_512-181.h5,https://darus.uni-stuttgart.de/api/access/datafile/133327,2D/NS_incom/,78ee160778aab06d0e88507944f661b4
187
+ NS_Incom,ns_incom_inhom_2d_512-182.h5,https://darus.uni-stuttgart.de/api/access/datafile/133328,2D/NS_incom/,e9a36056e3e9fe23937fbf65e0dfc16a
188
+ NS_Incom,ns_incom_inhom_2d_512-183.h5,https://darus.uni-stuttgart.de/api/access/datafile/133329,2D/NS_incom/,5107343596d93f8739b4f09926daeef7
189
+ NS_Incom,ns_incom_inhom_2d_512-184.h5,https://darus.uni-stuttgart.de/api/access/datafile/133330,2D/NS_incom/,f4a7aff84a03fff6e1958617abf373c4
190
+ NS_Incom,ns_incom_inhom_2d_512-185.h5,https://darus.uni-stuttgart.de/api/access/datafile/133331,2D/NS_incom/,f59cb88424414a28aad308499935619b
191
+ NS_Incom,ns_incom_inhom_2d_512-186.h5,https://darus.uni-stuttgart.de/api/access/datafile/133666,2D/NS_incom/,d9bc339d2d2047f8f6a369ad2e4e9357
192
+ NS_Incom,ns_incom_inhom_2d_512-187.h5,https://darus.uni-stuttgart.de/api/access/datafile/136436,2D/NS_incom/,4d0461fc34683816d5906d4809c84432
193
+ NS_Incom,ns_incom_inhom_2d_512-188.h5,https://darus.uni-stuttgart.de/api/access/datafile/133332,2D/NS_incom/,3565b7f3ab620c3716a25865670b98fb
194
+ NS_Incom,ns_incom_inhom_2d_512-189.h5,https://darus.uni-stuttgart.de/api/access/datafile/133668,2D/NS_incom/,d385ef198ae29ef5eff9aad6d129621a
195
+ NS_Incom,ns_incom_inhom_2d_512-19.h5,https://darus.uni-stuttgart.de/api/access/datafile/133335,2D/NS_incom/,f67fec4521fc9a113cd16de7f7c283ff
196
+ NS_Incom,ns_incom_inhom_2d_512-190.h5,https://darus.uni-stuttgart.de/api/access/datafile/136438,2D/NS_incom/,2dce11f971c3561083d580f7299f3e02
197
+ NS_Incom,ns_incom_inhom_2d_512-191.h5,https://darus.uni-stuttgart.de/api/access/datafile/133663,2D/NS_incom/,fda967e4ca21881b00d5c67b984d4c78
198
+ NS_Incom,ns_incom_inhom_2d_512-192.h5,https://darus.uni-stuttgart.de/api/access/datafile/133664,2D/NS_incom/,85542754064fcab3675930ac607b68e0
199
+ NS_Incom,ns_incom_inhom_2d_512-193.h5,https://darus.uni-stuttgart.de/api/access/datafile/133371,2D/NS_incom/,2f67289963f5d9d3041acf67f45a38b7
200
+ NS_Incom,ns_incom_inhom_2d_512-194.h5,https://darus.uni-stuttgart.de/api/access/datafile/133669,2D/NS_incom/,609d2d5dbd19ccdb6cee8ab67df66c38
201
+ NS_Incom,ns_incom_inhom_2d_512-195.h5,https://darus.uni-stuttgart.de/api/access/datafile/133699,2D/NS_incom/,b39dbe9518dbc19da27345ce569489e4
202
+ NS_Incom,ns_incom_inhom_2d_512-196.h5,https://darus.uni-stuttgart.de/api/access/datafile/133665,2D/NS_incom/,fc636092ce4175b3d908d156db66deb8
203
+ NS_Incom,ns_incom_inhom_2d_512-197.h5,https://darus.uni-stuttgart.de/api/access/datafile/133702,2D/NS_incom/,0b3bd457d75ccb3120f649bcdb0089f2
204
+ NS_Incom,ns_incom_inhom_2d_512-198.h5,https://darus.uni-stuttgart.de/api/access/datafile/133368,2D/NS_incom/,526cce686110114720bc9d84eb8d16ee
205
+ NS_Incom,ns_incom_inhom_2d_512-199.h5,https://darus.uni-stuttgart.de/api/access/datafile/133667,2D/NS_incom/,123272aaaf52747c39e8c32b4955390b
206
+ NS_Incom,ns_incom_inhom_2d_512-2.h5,https://darus.uni-stuttgart.de/api/access/datafile/133721,2D/NS_incom/,1d7a2aac41a410bea6c887f624273d20
207
+ NS_Incom,ns_incom_inhom_2d_512-20.h5,https://darus.uni-stuttgart.de/api/access/datafile/133283,2D/NS_incom/,859c8c8cede4fd611548af291acfa362
208
+ NS_Incom,ns_incom_inhom_2d_512-200.h5,https://darus.uni-stuttgart.de/api/access/datafile/136440,2D/NS_incom/,621c5bdf211ab8aa6d586ede4b51c5a2
209
+ NS_Incom,ns_incom_inhom_2d_512-201.h5,https://darus.uni-stuttgart.de/api/access/datafile/133367,2D/NS_incom/,89b6667026a2f289b81e48b6d75ef4d6
210
+ NS_Incom,ns_incom_inhom_2d_512-202.h5,https://darus.uni-stuttgart.de/api/access/datafile/133661,2D/NS_incom/,a70f3e38089caec832075b7c3ce86744
211
+ NS_Incom,ns_incom_inhom_2d_512-203.h5,https://darus.uni-stuttgart.de/api/access/datafile/133660,2D/NS_incom/,f0bc2659a6f671813e88409bdb08531a
212
+ NS_Incom,ns_incom_inhom_2d_512-204.h5,https://darus.uni-stuttgart.de/api/access/datafile/133671,2D/NS_incom/,c109d2aa5cf0688c5f0565b03d1e3ce0
213
+ NS_Incom,ns_incom_inhom_2d_512-205.h5,https://darus.uni-stuttgart.de/api/access/datafile/133688,2D/NS_incom/,2fa87303d667e4d50935fe26f04437b4
214
+ NS_Incom,ns_incom_inhom_2d_512-206.h5,https://darus.uni-stuttgart.de/api/access/datafile/136441,2D/NS_incom/,28a557b57ffce43cb9de4cc48642fc05
215
+ NS_Incom,ns_incom_inhom_2d_512-207.h5,https://darus.uni-stuttgart.de/api/access/datafile/133282,2D/NS_incom/,01a63c9ad30ac4d140f86bb2fa929274
216
+ NS_Incom,ns_incom_inhom_2d_512-208.h5,https://darus.uni-stuttgart.de/api/access/datafile/133340,2D/NS_incom/,d2aa05820caf6a38be4711ef797d3bf4
217
+ NS_Incom,ns_incom_inhom_2d_512-209.h5,https://darus.uni-stuttgart.de/api/access/datafile/133333,2D/NS_incom/,9e17d624a2eea451f0956f9184ce63d8
218
+ NS_Incom,ns_incom_inhom_2d_512-21.h5,https://darus.uni-stuttgart.de/api/access/datafile/133334,2D/NS_incom/,859c8c8cede4fd611548af291acfa362
219
+ NS_Incom,ns_incom_inhom_2d_512-210.h5,https://darus.uni-stuttgart.de/api/access/datafile/133703,2D/NS_incom/,fb5de2b943891ed1ec1bbb682520bc78
220
+ NS_Incom,ns_incom_inhom_2d_512-211.h5,https://darus.uni-stuttgart.de/api/access/datafile/133286,2D/NS_incom/,24f8fe56ffe24baaa03f6bf12762ef95
221
+ NS_Incom,ns_incom_inhom_2d_512-212.h5,https://darus.uni-stuttgart.de/api/access/datafile/133284,2D/NS_incom/,8e4561fe4ae69d62000df136f980fae3
222
+ NS_Incom,ns_incom_inhom_2d_512-213.h5,https://darus.uni-stuttgart.de/api/access/datafile/133285,2D/NS_incom/,d1101482e0d766334b92c4d0e11d73f3
223
+ NS_Incom,ns_incom_inhom_2d_512-214.h5,https://darus.uni-stuttgart.de/api/access/datafile/133337,2D/NS_incom/,d1101482e0d766334b92c4d0e11d73f3
224
+ NS_Incom,ns_incom_inhom_2d_512-215.h5,https://darus.uni-stuttgart.de/api/access/datafile/133287,2D/NS_incom/,f615b96dd71864673bba274240acfb12
225
+ NS_Incom,ns_incom_inhom_2d_512-216.h5,https://darus.uni-stuttgart.de/api/access/datafile/133288,2D/NS_incom/,892986d49b49b431f3f172d7baa9977b
226
+ NS_Incom,ns_incom_inhom_2d_512-217.h5,https://darus.uni-stuttgart.de/api/access/datafile/133338,2D/NS_incom/,a471e758505de149dba27c9fa2d29198
227
+ NS_Incom,ns_incom_inhom_2d_512-218.h5,https://darus.uni-stuttgart.de/api/access/datafile/133339,2D/NS_incom/,f615b96dd71864673bba274240acfb12
228
+ NS_Incom,ns_incom_inhom_2d_512-219.h5,https://darus.uni-stuttgart.de/api/access/datafile/133341,2D/NS_incom/,1965889c070e802965ebe19b01cdf2db
229
+ NS_Incom,ns_incom_inhom_2d_512-22.h5,https://darus.uni-stuttgart.de/api/access/datafile/133344,2D/NS_incom/,2235dd8e60d0d43b49aba99819ddb8d3
230
+ NS_Incom,ns_incom_inhom_2d_512-220.h5,https://darus.uni-stuttgart.de/api/access/datafile/133343,2D/NS_incom/,588ce18cf51ca3517527f3dd0b0ee443
231
+ NS_Incom,ns_incom_inhom_2d_512-221.h5,https://darus.uni-stuttgart.de/api/access/datafile/133345,2D/NS_incom/,e57ad44f1ae4fa9f31ddff57578507ce
232
+ NS_Incom,ns_incom_inhom_2d_512-222.h5,https://darus.uni-stuttgart.de/api/access/datafile/133347,2D/NS_incom/,892986d49b49b431f3f172d7baa9977b
233
+ NS_Incom,ns_incom_inhom_2d_512-223.h5,https://darus.uni-stuttgart.de/api/access/datafile/133369,2D/NS_incom/,9da97766e3b311af6d79a946f8db5184
234
+ NS_Incom,ns_incom_inhom_2d_512-224.h5,https://darus.uni-stuttgart.de/api/access/datafile/133370,2D/NS_incom/,44a864a879590427273a3dbf1aa2c190
235
+ NS_Incom,ns_incom_inhom_2d_512-225.h5,https://darus.uni-stuttgart.de/api/access/datafile/133372,2D/NS_incom/,8e4561fe4ae69d62000df136f980fae3
236
+ NS_Incom,ns_incom_inhom_2d_512-226.h5,https://darus.uni-stuttgart.de/api/access/datafile/133373,2D/NS_incom/,adf37d1dd066c2dc8fb3886369b5434f
237
+ NS_Incom,ns_incom_inhom_2d_512-227.h5,https://darus.uni-stuttgart.de/api/access/datafile/136442,2D/NS_incom/,de46866be488bb59d0d1c8ddc13a4dcb
238
+ NS_Incom,ns_incom_inhom_2d_512-228.h5,https://darus.uni-stuttgart.de/api/access/datafile/133723,2D/NS_incom/,43975b57db7c922f94703d1c63fdaebe
239
+ NS_Incom,ns_incom_inhom_2d_512-229.h5,https://darus.uni-stuttgart.de/api/access/datafile/136443,2D/NS_incom/,805f1a7336301b2bcc4a9f0fcce43864
240
+ NS_Incom,ns_incom_inhom_2d_512-23.h5,https://darus.uni-stuttgart.de/api/access/datafile/133617,2D/NS_incom/,d026b809986795e73f13368a763e56d2
241
+ NS_Incom,ns_incom_inhom_2d_512-230.h5,https://darus.uni-stuttgart.de/api/access/datafile/136445,2D/NS_incom/,e43acdfb560a45181111256e4ce45cc1
242
+ NS_Incom,ns_incom_inhom_2d_512-231.h5,https://darus.uni-stuttgart.de/api/access/datafile/133613,2D/NS_incom/,f56497f0ba427fdf9e215c885c6c1d3a
243
+ NS_Incom,ns_incom_inhom_2d_512-232.h5,https://darus.uni-stuttgart.de/api/access/datafile/133692,2D/NS_incom/,599ff6b1ea10572a829a13e5fc9c2523
244
+ NS_Incom,ns_incom_inhom_2d_512-233.h5,https://darus.uni-stuttgart.de/api/access/datafile/133614,2D/NS_incom/,b0cbda1f7435ccbe04bc6a09840624df
245
+ NS_Incom,ns_incom_inhom_2d_512-234.h5,https://darus.uni-stuttgart.de/api/access/datafile/133700,2D/NS_incom/,6331197878327890af4da21d5c4065bf
246
+ NS_Incom,ns_incom_inhom_2d_512-235.h5,https://darus.uni-stuttgart.de/api/access/datafile/133693,2D/NS_incom/,dd937dace08671e5b61a8f960d845561
247
+ NS_Incom,ns_incom_inhom_2d_512-236.h5,https://darus.uni-stuttgart.de/api/access/datafile/133615,2D/NS_incom/,07a4596a86c3099cf7f6e7e5c47b71b6
248
+ NS_Incom,ns_incom_inhom_2d_512-237.h5,https://darus.uni-stuttgart.de/api/access/datafile/133616,2D/NS_incom/,4c977f28e59fd8cc877f6d95d96011e5
249
+ NS_Incom,ns_incom_inhom_2d_512-238.h5,https://darus.uni-stuttgart.de/api/access/datafile/133695,2D/NS_incom/,9d3bad375edf84cd86a648caca49b6b5
250
+ NS_Incom,ns_incom_inhom_2d_512-239.h5,https://darus.uni-stuttgart.de/api/access/datafile/136446,2D/NS_incom/,bae42dea9f17c4ca5ec33997e3eb966f
251
+ NS_Incom,ns_incom_inhom_2d_512-24.h5,https://darus.uni-stuttgart.de/api/access/datafile/133618,2D/NS_incom/,c7e0bdab4f18748914263534cf21422d
252
+ NS_Incom,ns_incom_inhom_2d_512-240.h5,https://darus.uni-stuttgart.de/api/access/datafile/133268,2D/NS_incom/,10b14e2a9eb6fcf5a88a7a5447481aa7
253
+ NS_Incom,ns_incom_inhom_2d_512-241.h5,https://darus.uni-stuttgart.de/api/access/datafile/136447,2D/NS_incom/,0aa745b2377c49379f91f7f000d8f23e
254
+ NS_Incom,ns_incom_inhom_2d_512-242.h5,https://darus.uni-stuttgart.de/api/access/datafile/136448,2D/NS_incom/,79033da1fbd3167b560769c8292c15f4
255
+ NS_Incom,ns_incom_inhom_2d_512-243.h5,https://darus.uni-stuttgart.de/api/access/datafile/136450,2D/NS_incom/,896aa796fbf2052da573c30694744794
256
+ NS_Incom,ns_incom_inhom_2d_512-244.h5,https://darus.uni-stuttgart.de/api/access/datafile/136451,2D/NS_incom/,3063dee5e25740331b4c606939c3960e
257
+ NS_Incom,ns_incom_inhom_2d_512-245.h5,https://darus.uni-stuttgart.de/api/access/datafile/136457,2D/NS_incom/,daf18bcc864d5db453f7321de3c199bf
258
+ NS_Incom,ns_incom_inhom_2d_512-246.h5,https://darus.uni-stuttgart.de/api/access/datafile/136458,2D/NS_incom/,8f97135545fdea8eb504c6f76878b77c
259
+ NS_Incom,ns_incom_inhom_2d_512-247.h5,https://darus.uni-stuttgart.de/api/access/datafile/136459,2D/NS_incom/,2e1f4cf38135b2f07a72462e0f995c2d
260
+ NS_Incom,ns_incom_inhom_2d_512-248.h5,https://darus.uni-stuttgart.de/api/access/datafile/136460,2D/NS_incom/,edaec61956c5af7be9f8996b8498c8fd
261
+ NS_Incom,ns_incom_inhom_2d_512-249.h5,https://darus.uni-stuttgart.de/api/access/datafile/136461,2D/NS_incom/,b05738fc7cdf18fecca88361ba58875f
262
+ NS_Incom,ns_incom_inhom_2d_512-25.h5,https://darus.uni-stuttgart.de/api/access/datafile/133619,2D/NS_incom/,d5e37c12c61353533ab82c6c9e48e93f
263
+ NS_Incom,ns_incom_inhom_2d_512-250.h5,https://darus.uni-stuttgart.de/api/access/datafile/166273,2D/NS_incom/,c9731467c80b809ce0f3f337ce3ed71e
264
+ NS_Incom,ns_incom_inhom_2d_512-251.h5,https://darus.uni-stuttgart.de/api/access/datafile/166274,2D/NS_incom/,3a515e95b641360734702248b3bfcb6d
265
+ NS_Incom,ns_incom_inhom_2d_512-252.h5,https://darus.uni-stuttgart.de/api/access/datafile/166275,2D/NS_incom/,283244af2adc86953dd6021bf6722ba4
266
+ NS_Incom,ns_incom_inhom_2d_512-253.h5,https://darus.uni-stuttgart.de/api/access/datafile/166276,2D/NS_incom/,a471e758505de149dba27c9fa2d29198
267
+ NS_Incom,ns_incom_inhom_2d_512-254.h5,https://darus.uni-stuttgart.de/api/access/datafile/166277,2D/NS_incom/,9da97766e3b311af6d79a946f8db5184
268
+ NS_Incom,ns_incom_inhom_2d_512-255.h5,https://darus.uni-stuttgart.de/api/access/datafile/166278,2D/NS_incom/,1965889c070e802965ebe19b01cdf2db
269
+ NS_Incom,ns_incom_inhom_2d_512-256.h5,https://darus.uni-stuttgart.de/api/access/datafile/166279,2D/NS_incom/,588ce18cf51ca3517527f3dd0b0ee443
270
+ NS_Incom,ns_incom_inhom_2d_512-257.h5,https://darus.uni-stuttgart.de/api/access/datafile/166281,2D/NS_incom/,44a864a879590427273a3dbf1aa2c190
271
+ NS_Incom,ns_incom_inhom_2d_512-258.h5,https://darus.uni-stuttgart.de/api/access/datafile/166282,2D/NS_incom/,adf37d1dd066c2dc8fb3886369b5434f
272
+ NS_Incom,ns_incom_inhom_2d_512-259.h5,https://darus.uni-stuttgart.de/api/access/datafile/166283,2D/NS_incom/,588200460d5168ce1ba582101aa3547e
273
+ NS_Incom,ns_incom_inhom_2d_512-26.h5,https://darus.uni-stuttgart.de/api/access/datafile/133689,2D/NS_incom/,563b6261312a32460d848e5b9ca82f79
274
+ NS_Incom,ns_incom_inhom_2d_512-260.h5,https://darus.uni-stuttgart.de/api/access/datafile/166284,2D/NS_incom/,2e1e5883bec2e8e50048738266785bae
275
+ NS_Incom,ns_incom_inhom_2d_512-261.h5,https://darus.uni-stuttgart.de/api/access/datafile/166285,2D/NS_incom/,b5f6b09ad77bbcafbc7d52710388669c
276
+ NS_Incom,ns_incom_inhom_2d_512-262.h5,https://darus.uni-stuttgart.de/api/access/datafile/166286,2D/NS_incom/,8a8026b2cd2e59a7b1d796dca4404e3a
277
+ NS_Incom,ns_incom_inhom_2d_512-263.h5,https://darus.uni-stuttgart.de/api/access/datafile/166287,2D/NS_incom/,8958869c63e7a496816d5c551d4d8115
278
+ NS_Incom,ns_incom_inhom_2d_512-264.h5,https://darus.uni-stuttgart.de/api/access/datafile/166288,2D/NS_incom/,69e4911decfab00ff945884915084104
279
+ NS_Incom,ns_incom_inhom_2d_512-265.h5,https://darus.uni-stuttgart.de/api/access/datafile/166290,2D/NS_incom/,3516776e1d47d573ee91815ca70360b6
280
+ NS_Incom,ns_incom_inhom_2d_512-266.h5,https://darus.uni-stuttgart.de/api/access/datafile/166291,2D/NS_incom/,f50c0e5f4ad659c1b5d6f7343e072bdf
281
+ NS_Incom,ns_incom_inhom_2d_512-267.h5,https://darus.uni-stuttgart.de/api/access/datafile/166292,2D/NS_incom/,85a8bd3bd539acb7685393393294e916
282
+ NS_Incom,ns_incom_inhom_2d_512-268.h5,https://darus.uni-stuttgart.de/api/access/datafile/166293,2D/NS_incom/,3b43956b04c4e0e619ba671cdfbbc834
283
+ NS_Incom,ns_incom_inhom_2d_512-269.h5,https://darus.uni-stuttgart.de/api/access/datafile/166294,2D/NS_incom/,1ca4a6528b2d73b55042eb6dbe487ff1
284
+ NS_Incom,ns_incom_inhom_2d_512-27.h5,https://darus.uni-stuttgart.de/api/access/datafile/133620,2D/NS_incom/,3536aace9614abf8868d9c03bbccf9d4
285
+ NS_Incom,ns_incom_inhom_2d_512-270.h5,https://darus.uni-stuttgart.de/api/access/datafile/166295,2D/NS_incom/,8bf9ac2f7dc47e3617ba9de2ac10d582
286
+ NS_Incom,ns_incom_inhom_2d_512-271.h5,https://darus.uni-stuttgart.de/api/access/datafile/166296,2D/NS_incom/,907d7cd2c84312f52f94b27197128d00
287
+ NS_Incom,ns_incom_inhom_2d_512-272.h5,https://darus.uni-stuttgart.de/api/access/datafile/166297,2D/NS_incom/,7d1dc19e1b08c09d86bed9a0c7448ceb
288
+ NS_Incom,ns_incom_inhom_2d_512-273.h5,https://darus.uni-stuttgart.de/api/access/datafile/166298,2D/NS_incom/,e5e51712cc8921fcc4e89b9fe4690c7f
289
+ NS_Incom,ns_incom_inhom_2d_512-274.h5,https://darus.uni-stuttgart.de/api/access/datafile/166299,2D/NS_incom/,5a3ea4a7cdeade672038ffdbf3a7886a
290
+ NS_Incom,ns_incom_inhom_2d_512-28.h5,https://darus.uni-stuttgart.de/api/access/datafile/136462,2D/NS_incom/,aef3e1e349adc36e3d738d7d5534548b
291
+ NS_Incom,ns_incom_inhom_2d_512-29.h5,https://darus.uni-stuttgart.de/api/access/datafile/136463,2D/NS_incom/,b5f7a9f28aab2baf06d6129a436dca79
292
+ NS_Incom,ns_incom_inhom_2d_512-3.h5,https://darus.uni-stuttgart.de/api/access/datafile/136466,2D/NS_incom/,e9c92a19854e96c918d3a46d39994be4
293
+ NS_Incom,ns_incom_inhom_2d_512-30.h5,https://darus.uni-stuttgart.de/api/access/datafile/136464,2D/NS_incom/,fa2275aaf761334ec2d80affb10d324f
294
+ NS_Incom,ns_incom_inhom_2d_512-31.h5,https://darus.uni-stuttgart.de/api/access/datafile/133621,2D/NS_incom/,a30bc0bb2120dcb42a8335c6d264ac7d
295
+ NS_Incom,ns_incom_inhom_2d_512-32.h5,https://darus.uni-stuttgart.de/api/access/datafile/133626,2D/NS_incom/,6579fe51ea46095aa1cd336be4701b34
296
+ NS_Incom,ns_incom_inhom_2d_512-33.h5,https://darus.uni-stuttgart.de/api/access/datafile/133622,2D/NS_incom/,63ee81664764659f3b6436c055981c95
297
+ NS_Incom,ns_incom_inhom_2d_512-34.h5,https://darus.uni-stuttgart.de/api/access/datafile/133623,2D/NS_incom/,e7c32d512bf95b97699c36381fea07ca
298
+ NS_Incom,ns_incom_inhom_2d_512-35.h5,https://darus.uni-stuttgart.de/api/access/datafile/133624,2D/NS_incom/,c2715bd899018326e9679775d57e1dfd
299
+ NS_Incom,ns_incom_inhom_2d_512-36.h5,https://darus.uni-stuttgart.de/api/access/datafile/136465,2D/NS_incom/,b085ae82d91f9baa2a7f9c8720f5ca48
300
+ NS_Incom,ns_incom_inhom_2d_512-37.h5,https://darus.uni-stuttgart.de/api/access/datafile/133625,2D/NS_incom/,80c39f8aaf4168974f8bde94df177ab6
301
+ NS_Incom,ns_incom_inhom_2d_512-38.h5,https://darus.uni-stuttgart.de/api/access/datafile/133645,2D/NS_incom/,d01ee492fcdb64cd52b7d0ae7b606370
302
+ NS_Incom,ns_incom_inhom_2d_512-39.h5,https://darus.uni-stuttgart.de/api/access/datafile/133630,2D/NS_incom/,3f4f083589ab6d314584fa7ec795669e
303
+ NS_Incom,ns_incom_inhom_2d_512-4.h5,https://darus.uni-stuttgart.de/api/access/datafile/166289,2D/NS_incom/,306a56ac14ea5686921cbb3f09c7dcb6
304
+ NS_Incom,ns_incom_inhom_2d_512-40.h5,https://darus.uni-stuttgart.de/api/access/datafile/133627,2D/NS_incom/,c906a76f920bb63d575e347e76e47caf
305
+ NS_Incom,ns_incom_inhom_2d_512-41.h5,https://darus.uni-stuttgart.de/api/access/datafile/133696,2D/NS_incom/,b55a275050a22b9c515d9d896dba6da0
306
+ NS_Incom,ns_incom_inhom_2d_512-42.h5,https://darus.uni-stuttgart.de/api/access/datafile/133642,2D/NS_incom/,5aa8b626b64ba979905797b9e56dd34c
307
+ NS_Incom,ns_incom_inhom_2d_512-43.h5,https://darus.uni-stuttgart.de/api/access/datafile/133650,2D/NS_incom/,d6ef4cf80d79f696dfc34d4947b14a77
308
+ NS_Incom,ns_incom_inhom_2d_512-44.h5,https://darus.uni-stuttgart.de/api/access/datafile/133686,2D/NS_incom/,b8527daec6934026f5128a6d1f5db1e5
309
+ NS_Incom,ns_incom_inhom_2d_512-45.h5,https://darus.uni-stuttgart.de/api/access/datafile/136467,2D/NS_incom/,7899ab1897fbd4665e4d7daea7c90bc2
310
+ NS_Incom,ns_incom_inhom_2d_512-46.h5,https://darus.uni-stuttgart.de/api/access/datafile/136468,2D/NS_incom/,6853e9a78b6a2b2b756c4e6f8bcdac3e
311
+ NS_Incom,ns_incom_inhom_2d_512-47.h5,https://darus.uni-stuttgart.de/api/access/datafile/136469,2D/NS_incom/,4737e1e1283748baae9617ceff1e777a
312
+ NS_Incom,ns_incom_inhom_2d_512-48.h5,https://darus.uni-stuttgart.de/api/access/datafile/136470,2D/NS_incom/,5d3abc3ecbddeacb5270030954cc8064
313
+ NS_Incom,ns_incom_inhom_2d_512-5.h5,https://darus.uni-stuttgart.de/api/access/datafile/136471,2D/NS_incom/,6a939d7418593d70faf7f90a191f841d
314
+ NS_Incom,ns_incom_inhom_2d_512-50.h5,https://darus.uni-stuttgart.de/api/access/datafile/133678,2D/NS_incom/,f30303f48d036e9aeac07fd70e61c97b
315
+ NS_Incom,ns_incom_inhom_2d_512-51.h5,https://darus.uni-stuttgart.de/api/access/datafile/133659,2D/NS_incom/,e82c2adbbc22b59168b7b587012d296d
316
+ NS_Incom,ns_incom_inhom_2d_512-52.h5,https://darus.uni-stuttgart.de/api/access/datafile/133657,2D/NS_incom/,1a072500b92f728f24526182a8e22d71
317
+ NS_Incom,ns_incom_inhom_2d_512-53.h5,https://darus.uni-stuttgart.de/api/access/datafile/133670,2D/NS_incom/,e82c2adbbc22b59168b7b587012d296d
318
+ NS_Incom,ns_incom_inhom_2d_512-54.h5,https://darus.uni-stuttgart.de/api/access/datafile/133672,2D/NS_incom/,3516776e1d47d573ee91815ca70360b6
319
+ NS_Incom,ns_incom_inhom_2d_512-55.h5,https://darus.uni-stuttgart.de/api/access/datafile/133662,2D/NS_incom/,49732762c231a410904cf8bddb0444cd
320
+ NS_Incom,ns_incom_inhom_2d_512-56.h5,https://darus.uni-stuttgart.de/api/access/datafile/133673,2D/NS_incom/,f50c0e5f4ad659c1b5d6f7343e072bdf
321
+ NS_Incom,ns_incom_inhom_2d_512-57.h5,https://darus.uni-stuttgart.de/api/access/datafile/133674,2D/NS_incom/,1a072500b92f728f24526182a8e22d71
322
+ NS_Incom,ns_incom_inhom_2d_512-58.h5,https://darus.uni-stuttgart.de/api/access/datafile/133658,2D/NS_incom/,ae1e262d7f1a56369b13b6d5de80e03d
323
+ NS_Incom,ns_incom_inhom_2d_512-59.h5,https://darus.uni-stuttgart.de/api/access/datafile/133656,2D/NS_incom/,b99c5b112d4a7dfca053cb2c78ebe581
324
+ NS_Incom,ns_incom_inhom_2d_512-6.h5,https://darus.uni-stuttgart.de/api/access/datafile/136472,2D/NS_incom/,36156ac1f8cecb47d076a1ee0707dec5
325
+ NS_Incom,ns_incom_inhom_2d_512-60.h5,https://darus.uni-stuttgart.de/api/access/datafile/133675,2D/NS_incom/,85a8bd3bd539acb7685393393294e916
326
+ NS_Incom,ns_incom_inhom_2d_512-61.h5,https://darus.uni-stuttgart.de/api/access/datafile/133676,2D/NS_incom/,3b43956b04c4e0e619ba671cdfbbc834
327
+ NS_Incom,ns_incom_inhom_2d_512-62.h5,https://darus.uni-stuttgart.de/api/access/datafile/133677,2D/NS_incom/,8bf9ac2f7dc47e3617ba9de2ac10d582
328
+ NS_Incom,ns_incom_inhom_2d_512-63.h5,https://darus.uni-stuttgart.de/api/access/datafile/133680,2D/NS_incom/,49732762c231a410904cf8bddb0444cd
329
+ NS_Incom,ns_incom_inhom_2d_512-64.h5,https://darus.uni-stuttgart.de/api/access/datafile/133628,2D/NS_incom/,c0e54ed459c84470986818d6d89be0c8
330
+ NS_Incom,ns_incom_inhom_2d_512-65.h5,https://darus.uni-stuttgart.de/api/access/datafile/133635,2D/NS_incom/,69f1b9013bb76fba1006272951e12845
331
+ NS_Incom,ns_incom_inhom_2d_512-66.h5,https://darus.uni-stuttgart.de/api/access/datafile/133629,2D/NS_incom/,c79245b195ec850a40c0957056bffad7
332
+ NS_Incom,ns_incom_inhom_2d_512-67.h5,https://darus.uni-stuttgart.de/api/access/datafile/133277,2D/NS_incom/,f8465b71d9afe577ba30cbe5d0c490e6
333
+ NS_Incom,ns_incom_inhom_2d_512-68.h5,https://darus.uni-stuttgart.de/api/access/datafile/133681,2D/NS_incom/,ae1e262d7f1a56369b13b6d5de80e03d
334
+ NS_Incom,ns_incom_inhom_2d_512-69.h5,https://darus.uni-stuttgart.de/api/access/datafile/133631,2D/NS_incom/,b5159c70d9b14e480bb520961bafcae4
335
+ NS_Incom,ns_incom_inhom_2d_512-7.h5,https://darus.uni-stuttgart.de/api/access/datafile/136473,2D/NS_incom/,e1b06fd07d09227a0767baa260f361e2
336
+ NS_Incom,ns_incom_inhom_2d_512-70.h5,https://darus.uni-stuttgart.de/api/access/datafile/133632,2D/NS_incom/,3aee2aa9b1a14c7d04ce2770e95a354a
337
+ NS_Incom,ns_incom_inhom_2d_512-71.h5,https://darus.uni-stuttgart.de/api/access/datafile/133634,2D/NS_incom/,2f5b2bf56e72d61c41d4b056ac540fb0
338
+ NS_Incom,ns_incom_inhom_2d_512-72.h5,https://darus.uni-stuttgart.de/api/access/datafile/133682,2D/NS_incom/,b99c5b112d4a7dfca053cb2c78ebe581
339
+ NS_Incom,ns_incom_inhom_2d_512-73.h5,https://darus.uni-stuttgart.de/api/access/datafile/133273,2D/NS_incom/,9d1c5e98ee4fe97c7498d2f574c165ae
340
+ NS_Incom,ns_incom_inhom_2d_512-74.h5,https://darus.uni-stuttgart.de/api/access/datafile/133633,2D/NS_incom/,475427b7337f1fd7114b7e6c32a2f709
341
+ NS_Incom,ns_incom_inhom_2d_512-75.h5,https://darus.uni-stuttgart.de/api/access/datafile/133694,2D/NS_incom/,170b32d051ed72716aab0f40b13f359e
342
+ NS_Incom,ns_incom_inhom_2d_512-76.h5,https://darus.uni-stuttgart.de/api/access/datafile/133636,2D/NS_incom/,2666826a3944f5999a1053242e9588a9
343
+ NS_Incom,ns_incom_inhom_2d_512-77.h5,https://darus.uni-stuttgart.de/api/access/datafile/133638,2D/NS_incom/,c58cc6dd09fb89c1b5fb95081f1220c9
344
+ NS_Incom,ns_incom_inhom_2d_512-78.h5,https://darus.uni-stuttgart.de/api/access/datafile/133637,2D/NS_incom/,b183a32a3e2230968d5b7f20d9dbf818
345
+ NS_Incom,ns_incom_inhom_2d_512-79.h5,https://darus.uni-stuttgart.de/api/access/datafile/133651,2D/NS_incom/,def3145be356b7d020ea09e5232bb60f
346
+ NS_Incom,ns_incom_inhom_2d_512-8.h5,https://darus.uni-stuttgart.de/api/access/datafile/136449,2D/NS_incom/,2e6a62550680a255d4dfec24a761d5cf
347
+ NS_Incom,ns_incom_inhom_2d_512-80.h5,https://darus.uni-stuttgart.de/api/access/datafile/133639,2D/NS_incom/,07a1edc16fb7d981155ad4173c42abf1
348
+ NS_Incom,ns_incom_inhom_2d_512-81.h5,https://darus.uni-stuttgart.de/api/access/datafile/133276,2D/NS_incom/,3a58395f316b2158ea61844eadd4109f
349
+ NS_Incom,ns_incom_inhom_2d_512-82.h5,https://darus.uni-stuttgart.de/api/access/datafile/133640,2D/NS_incom/,a0f4505418beb4da8b7f84a83efc3904
350
+ NS_Incom,ns_incom_inhom_2d_512-83.h5,https://darus.uni-stuttgart.de/api/access/datafile/133272,2D/NS_incom/,89993b4f3ac2653f6377941e640e2233
351
+ NS_Incom,ns_incom_inhom_2d_512-84.h5,https://darus.uni-stuttgart.de/api/access/datafile/133643,2D/NS_incom/,c5269398ec4edb0f5891d4efdb2fc89d
352
+ NS_Incom,ns_incom_inhom_2d_512-85.h5,https://darus.uni-stuttgart.de/api/access/datafile/133641,2D/NS_incom/,090862c29ecbc2c4232ddbacca3fb230
353
+ NS_Incom,ns_incom_inhom_2d_512-86.h5,https://darus.uni-stuttgart.de/api/access/datafile/133647,2D/NS_incom/,a712449bef87318a620d1663c4ffde27
354
+ NS_Incom,ns_incom_inhom_2d_512-87.h5,https://darus.uni-stuttgart.de/api/access/datafile/133644,2D/NS_incom/,920a3dcb3b216f0d9a4ab0b9b4c8745a
355
+ NS_Incom,ns_incom_inhom_2d_512-88.h5,https://darus.uni-stuttgart.de/api/access/datafile/133655,2D/NS_incom/,cc96b1e357f264f12dcfbb41736a53bf
356
+ NS_Incom,ns_incom_inhom_2d_512-89.h5,https://darus.uni-stuttgart.de/api/access/datafile/133646,2D/NS_incom/,7fd484851410b7139bfea4166074fe00
357
+ NS_Incom,ns_incom_inhom_2d_512-9.h5,https://darus.uni-stuttgart.de/api/access/datafile/136475,2D/NS_incom/,254bb44710b625f716b61d51d5d1e58e
358
+ NS_Incom,ns_incom_inhom_2d_512-90.h5,https://darus.uni-stuttgart.de/api/access/datafile/133652,2D/NS_incom/,c6c4e27d6ba19b4ccb5f27e67dd9cdf1
359
+ NS_Incom,ns_incom_inhom_2d_512-91.h5,https://darus.uni-stuttgart.de/api/access/datafile/136474,2D/NS_incom/,527b23b7e4ca4d18c5c036c41e15e4fe
360
+ NS_Incom,ns_incom_inhom_2d_512-92.h5,https://darus.uni-stuttgart.de/api/access/datafile/133648,2D/NS_incom/,84fd5f64db474f8e20b670d01929d57a
361
+ NS_Incom,ns_incom_inhom_2d_512-93.h5,https://darus.uni-stuttgart.de/api/access/datafile/133649,2D/NS_incom/,20583d7edf7da9a80f16a26c8388a65e
362
+ NS_Incom,ns_incom_inhom_2d_512-94.h5,https://darus.uni-stuttgart.de/api/access/datafile/133691,2D/NS_incom/,8540e7f1cc6a9e2bc64726a325ddacfb
363
+ NS_Incom,ns_incom_inhom_2d_512-95.h5,https://darus.uni-stuttgart.de/api/access/datafile/133690,2D/NS_incom/,25d4c08d5534d0f10c877220e258640c
364
+ NS_Incom,ns_incom_inhom_2d_512-96.h5,https://darus.uni-stuttgart.de/api/access/datafile/133270,2D/NS_incom/,93e772cbfd36d434e97a079d701d5382
365
+ NS_Incom,ns_incom_inhom_2d_512-97.h5,https://darus.uni-stuttgart.de/api/access/datafile/133653,2D/NS_incom/,ffa1d46e58f67aa1932fc4ffd2e7716a
366
+ NS_Incom,ns_incom_inhom_2d_512-98.h5,https://darus.uni-stuttgart.de/api/access/datafile/133687,2D/NS_incom/,c28112e1339cda43f67dce03536e3455
367
+ NS_Incom,ns_incom_inhom_2d_512-99.h5,https://darus.uni-stuttgart.de/api/access/datafile/133654,2D/NS_incom/,abfe923a224cb4515dd545d5858b2124
368
+ SWE,2D_rdb_NA_NA.h5,https://darus.uni-stuttgart.de/api/access/datafile/133021,2D/shallow-water/,75d838c47aa410694bdc912ea7f22282
369
+ 3D_CFD,3D_CFD_Rand_M1.0_Eta1e-08_Zeta1e-08_periodic_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164693,3D/Train/,45892a12d1066d54af74badae55c438e
370
+ 3D_CFD,3D_CFD_Turb_M1.0_Eta1e-08_Zeta1e-08_periodic_Train.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/164694,3D/Train/,4f0eacaec5ff000bbd9a31026ea207b8
371
+ 3D_CFD,BlastWave.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133264,3D/Test/BlastWave/,f901a1ec75925c1d861ac99a825878f3
372
+ 3D_CFD,Turb_M01.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133224,3D/Test/Turbulence/,b27495031f434d01762bb0b819ac71e2
373
+ 3D_CFD,Turb_M05.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133225,3D/Test/Turbulence/,f9407dff1a75a1d14d93e7dd570af728
374
+ 3D_CFD,Turb_M1.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/135833,3D/Test/Turbulence/,3758f23f71684ac666e0b1e91da0a1c4
375
+ 3D_CFD,Turb_M2.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133227,3D/Test/Turbulence/,12e528dc8ab800f69474600ec58b24d3
376
+ 3D_CFD,Turb_M4.hdf5,https://darus.uni-stuttgart.de/api/access/datafile/133228,3D/Test/Turbulence/,8db384feba75903a8c5b21ebeba40083
funsearch.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ import os
3
+ import pandas as pd
4
+ import random
5
+ import shutil
6
+ import time
7
+
8
+ from code_generation import generate_and_debug, prepare_working_folder, code_execution, get_results
9
+ from program_database import ProgramsDatabase, ProgramsDatabaseConfig
10
+
11
+ def get_seed_score(nRMSE, convergence_rate):
12
+ return {
13
+ 'bucketed_convergence_rate': int(max(0, convergence_rate)*4),
14
+ 'bucketed_nRMSE': int(-math.log10(min(1e9, nRMSE))*10)
15
+ }
16
+
17
+ def funsearch(cfg):
18
+ num_trials = cfg.method.num_debugging_trials_per_sample
19
+ pde_name = cfg.pde.name
20
+ working_folder = cfg.working_folder
21
+ model_name = cfg.model.name
22
+ num_search_rounds = cfg.method.num_search_rounds
23
+ num_initial_seeds = cfg.method.num_initial_seeds
24
+ use_sample_solver_init = cfg.method.use_sample_solver_init
25
+ assert use_sample_solver_init, 'Sample solvers must be enabled for refinement'
26
+
27
+ sample_solver_folder = os.path.join(
28
+ 'solvers', pde_name, cfg.pde.pde_setting_name, 'seeds'
29
+ )
30
+ sample_solver_info = pd.read_csv(
31
+ os.path.join(sample_solver_folder, 'seed_results.csv')
32
+ )
33
+
34
+ prepare_working_folder(
35
+ cfg,
36
+ working_folder=working_folder,
37
+ pde_name=pde_name,
38
+ use_sample_solver_init=use_sample_solver_init
39
+ )
40
+
41
+ pd_cfg = ProgramsDatabaseConfig()
42
+ program_db = ProgramsDatabase(pd_cfg)
43
+
44
+ # The first round: generate without seed
45
+ seed_path = os.path.join(
46
+ '../archived_logs',
47
+ pde_name,
48
+ cfg.pde.pde_setting_name,
49
+ 'repeated_sample',
50
+ model_name
51
+ )
52
+ subdirectories = [d for d in os.listdir(seed_path) if os.path.isdir(os.path.join(seed_path, d))]
53
+ assert len(subdirectories) == 1, 'Only one subdirectory is expected'
54
+ seed_path = os.path.join(seed_path, subdirectories[0])
55
+ result_sheet = pd.read_csv(os.path.join(seed_path, 'test_results.csv'))
56
+
57
+ for i in range(num_initial_seeds):
58
+ relevant_files = [
59
+ 'errors_{idx}.txt',
60
+ 'implementation_{idx}.py',
61
+ 'output_{idx}.txt',
62
+ ]
63
+
64
+ complete_seed = True
65
+ for file in relevant_files:
66
+ if not os.path.exists(os.path.join(seed_path, file.format(idx=i))):
67
+ complete_seed = False
68
+ break
69
+ if result_sheet[result_sheet['round'] == i].empty:
70
+ complete_seed = False
71
+
72
+ seed_info = result_sheet[result_sheet['round'] == i].to_numpy().tolist()[0]
73
+ seed_info = [str(x) for x in seed_info]
74
+ if seed_info[1] == 'failed':
75
+ complete_seed = False
76
+
77
+ if not complete_seed:
78
+ continue
79
+
80
+ # The seed is complete, copy it to the working folder
81
+ for file in relevant_files:
82
+ source_file = os.path.join(seed_path, file.format(idx=int(i)))
83
+ destination_file = os.path.join(working_folder, file.format(idx=int(i)))
84
+ shutil.copy(source_file, destination_file)
85
+ with open(os.path.join(working_folder, 'test_results.csv'), 'a') as f:
86
+ seed_info[0] = str(int(i))
87
+ f.write(','.join(seed_info) + '\n')
88
+
89
+ # Register the seed in the database
90
+ seed_score = get_seed_score(float(seed_info[1]), float(seed_info[3]))
91
+ with open(os.path.join(working_folder, f'implementation_{i}.py'), 'r') as f:
92
+ implementation = f.readlines()
93
+ program_len = len(implementation)
94
+ program_db.register_program(
95
+ program=i,
96
+ program_len=program_len,
97
+ island_id=None,
98
+ scores_per_test=seed_score,
99
+ )
100
+
101
+ for i in range(num_initial_seeds, num_initial_seeds+num_search_rounds):
102
+ island_id, seed_ids = program_db.get_seed()
103
+ try:
104
+ relative_error, elapsed_time, avg_rate = generate_and_debug(
105
+ cfg,
106
+ round_idx=i,
107
+ num_trials=num_trials,
108
+ pde_name=pde_name,
109
+ working_folder=working_folder,
110
+ seed_implementations=seed_ids,
111
+ model_name=model_name
112
+ )
113
+ seed_score = get_seed_score(float(relative_error), float(avg_rate))
114
+ with open(os.path.join(working_folder, f'implementation_{i}.py'), 'r') as f:
115
+ implementation = f.readlines()
116
+ program_len = len(implementation)
117
+ program_db.register_program(
118
+ program=i,
119
+ program_len=program_len,
120
+ island_id=island_id,
121
+ scores_per_test=seed_score,
122
+ )
123
+ except Exception as e:
124
+ print(f'Error in round {i}: {e}. Move on to the next sample.')
125
+
126
+
127
+ # Finally, report the best program
128
+ results = pd.read_csv(os.path.join(working_folder, 'test_results.csv'))
129
+ keywords = ['nRMSE', 'elapsed_time', 'convergence_rate']
130
+ for keyword in keywords:
131
+ results[keyword] = pd.to_numeric(results[keyword], errors="coerce")
132
+ # Sort by nRMSE, elapsed_time, and convergence_rate
133
+ sorted_results = results.sort_values(by=keywords, ascending=[True, True, False])
134
+ best_idx = int(sorted_results.head(1)["round"].values[0])
135
+
136
+ test_run_id = 999
137
+ shutil.copy(
138
+ os.path.join(working_folder, f'implementation_{best_idx}.py'),
139
+ os.path.join(working_folder, f'implementation_{test_run_id}.py')
140
+ )
141
+ execution_results = code_execution(
142
+ cfg,
143
+ working_folder = working_folder,
144
+ round_idx=test_run_id,
145
+ pde_name=pde_name,
146
+ eval_dataset=os.path.join(
147
+ cfg.root_dataset_folder,
148
+ cfg.pde.dataset_folder_for_eval.replace('_development.hdf5', '.hdf5')
149
+ )
150
+ )
151
+
152
+ if execution_results['exit_code'] != 0:
153
+ relative_error, elapsed_time, avg_rate = None, None, None
154
+ else:
155
+ relative_error, elapsed_time, avg_rate = get_results(
156
+ os.path.join(working_folder, f'output_{test_run_id}.txt')
157
+ )
158
+ with open(os.path.join(working_folder, 'final_result.txt'), 'w') as f:
159
+ f.write('best_idx,relative_error,elapsed_time,avg_rate\n')
160
+ f.write(f'{best_idx},{relative_error},{elapsed_time},{avg_rate}\n')
llm_api.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from anthropic import Anthropic
2
+ from google import genai
3
+ from google.genai import types
4
+ from openai import OpenAI
5
+ import time
6
+
7
+
8
+ def get_client(messages, cfg):
9
+ if 'gpt' in cfg.model.family_name or cfg.model.family_name == 'o':
10
+ client = OpenAI(api_key=cfg.model.api_key)
11
+ elif 'claude' in cfg.model.family_name:
12
+ client = Anthropic(api_key=cfg.model.api_key)
13
+ elif 'deepseek' in cfg.model.family_name:
14
+ client = OpenAI(api_key=cfg.model.api_key, base_url=cfg.model.base_url)
15
+ elif 'gemini' in cfg.model.family_name:
16
+ client = genai.Client(api_key=cfg.model.api_key)
17
+ elif cfg.model.family_name == 'qwen':
18
+ client = OpenAI(api_key=cfg.model.api_key, base_url=cfg.model.base_url)
19
+ else:
20
+ raise ValueError(f'Model {cfg.model.family_name} not recognized')
21
+ return client
22
+
23
+
24
+ def generate_response(messages, cfg):
25
+ client = get_client(messages, cfg)
26
+ model_name = cfg.model.name
27
+ if 'o1' in model_name or 'o3' in model_name or 'o4' in model_name:
28
+ # Need to follow the restrictions
29
+ if 'o1' in model_name and len(messages)>0 and messages[0]['role'] == 'system':
30
+ system_prompt = messages[0]['content']
31
+ messages = messages[1:]
32
+ messages[0]['content'] = system_prompt + messages[0]['content']
33
+ # TODO: add these to the hydra config
34
+ num_tokens = 16384
35
+ temperature = 1.0
36
+ start_time = time.time()
37
+ response = client.chat.completions.create(
38
+ model=model_name,
39
+ messages=messages,
40
+ max_completion_tokens=num_tokens,
41
+ temperature=temperature)
42
+ end_time = time.time()
43
+ print(f'It takes {model_name} {end_time - start_time:.2f}s to generate the response.')
44
+ return response
45
+
46
+ if 'claude' in model_name:
47
+ num_tokens = 8192 # Give claude more tokens
48
+ temperature = 0.7
49
+
50
+ if len(messages)>0 and messages[0]['role'] == 'system':
51
+ system_prompt = messages[0]['content']
52
+ messages = messages[1:]
53
+
54
+ start_time = time.time()
55
+ if cfg.model.thinking:
56
+ num_thinking_tokens = 12288
57
+ response = client.messages.create(
58
+ model=model_name,
59
+ max_tokens=num_tokens+num_thinking_tokens,
60
+ thinking= {"type": "enabled", "budget_tokens": num_thinking_tokens},
61
+ system=system_prompt,
62
+ messages=messages,
63
+ # temperature has to be set to 1 for thinking
64
+ temperature=1.0,
65
+ )
66
+ else:
67
+ response = client.messages.create(
68
+ model=model_name,
69
+ max_tokens=num_tokens,
70
+ system=system_prompt,
71
+ messages=messages,
72
+ temperature=temperature,
73
+ )
74
+ end_time = time.time()
75
+ print(f'It takes {model_name} {end_time - start_time:.2f}s to generate the response.')
76
+ return response
77
+
78
+ if 'gemini' in model_name:
79
+ start_time = time.time()
80
+ if len(messages)>0 and messages[0]['role'] == 'system':
81
+ # If the first message is a system message, we need to prepend it to the user message
82
+ system_prompt = messages[0]['content']
83
+ messages = messages[1:]
84
+ messages[0]['content'] = system_prompt + messages[0]['content']
85
+
86
+ for message in messages:
87
+ if message['role'] == 'assistant':
88
+ message['role'] = 'model'
89
+
90
+ chat = client.chats.create(
91
+ model=model_name,
92
+ history=[
93
+ types.Content(role=message['role'], parts=[types.Part(text=message['content'])])
94
+ for message in messages[:-1]
95
+ ],
96
+ )
97
+ response = chat.send_message(message=messages[-1]['content'])
98
+ end_time = time.time()
99
+ print(f'It takes {model_name} {end_time - start_time:.2f}s to generate the response.')
100
+ return response
101
+
102
+ num_tokens = 4096
103
+ temperature = 0.7
104
+
105
+ start_time = time.time()
106
+ response = client.chat.completions.create(
107
+ model=model_name,
108
+ messages=messages,
109
+ max_tokens=num_tokens,
110
+ temperature=temperature,
111
+ stream=('qwq' in model_name),
112
+ )
113
+
114
+ if 'qwq' in model_name:
115
+ answer_content = ""
116
+ for chunk in response:
117
+ if chunk.choices:
118
+ delta = chunk.choices[0].delta
119
+ if hasattr(delta, 'reasoning_content') and delta.reasoning_content != None:
120
+ # We don't need to print the reasoning content
121
+ pass
122
+ else:
123
+ answer_content += delta.content
124
+ response = answer_content
125
+
126
+ end_time = time.time()
127
+ print(f'It takes {model_name} {end_time - start_time:.2f}s to generate the response.')
128
+ return response
129
+
main.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+
4
+ import hydra
5
+
6
+ from refine import refine
7
+ from repeated_sample import repeated_sample
8
+ from funsearch import funsearch
9
+
10
+ @hydra.main(config_path='configs', config_name='default', version_base=None)
11
+ def main(cfg):
12
+
13
+ print(f'Method: {cfg.method.name}')
14
+ print(f'Model name: {cfg.model.name}')
15
+ print(f'PDE name: {cfg.pde.name}')
16
+
17
+ print(f'Working folder: {cfg.working_folder}')
18
+ if not os.path.exists(cfg.working_folder):
19
+ os.makedirs(cfg.working_folder)
20
+ if cfg.redirect_stdout:
21
+ sys.stdout = open(os.path.join(cfg.working_folder, 'stdout.txt'), 'w')
22
+
23
+ if cfg.method.name[:6] == 'refine':
24
+ refine(cfg)
25
+ elif cfg.method.name == 'repeated_sample':
26
+ repeated_sample(cfg)
27
+ elif cfg.method.name == 'funsearch':
28
+ funsearch(cfg)
29
+ else:
30
+ raise NotImplementedError(f'Unknown method: {cfg.method.name}')
31
+
32
+ if __name__ == "__main__":
33
+ main()
program_database.py ADDED
@@ -0,0 +1,270 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2023 DeepMind Technologies Limited
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ # ==============================================================================
15
+
16
+ """A programs database that implements the evolutionary algorithm."""
17
+ from collections.abc import Mapping, Sequence
18
+ import copy
19
+ import dataclasses
20
+ import time
21
+ from typing import Any
22
+
23
+ import numpy as np
24
+ import scipy
25
+
26
+ Signature = tuple[float, ...]
27
+ ScoresPerTest = Mapping[Any, float]
28
+
29
+
30
+ def _softmax(logits: np.ndarray, temperature: float) -> np.ndarray:
31
+ """Returns the tempered softmax of 1D finite `logits`."""
32
+ if not np.all(np.isfinite(logits)):
33
+ non_finites = set(logits[~np.isfinite(logits)])
34
+ raise ValueError(f'`logits` contains non-finite value(s): {non_finites}')
35
+ if not np.issubdtype(logits.dtype, np.floating):
36
+ logits = np.array(logits, dtype=np.float32)
37
+
38
+ result = scipy.special.softmax(logits / temperature, axis=-1)
39
+ # Ensure that probabilities sum to 1 to prevent error in `np.random.choice`.
40
+ index = np.argmax(result)
41
+ result[index] = 1 - np.sum(result[0:index]) - np.sum(result[index+1:])
42
+ return result
43
+
44
+
45
+ def _reduce_score(scores_per_test: ScoresPerTest) -> float:
46
+ """Reduces per-test scores into a single score."""
47
+ return scores_per_test[list(scores_per_test.keys())[-1]]
48
+
49
+
50
+ def _get_signature(scores_per_test: ScoresPerTest) -> Signature:
51
+ """Represents test scores as a canonical signature."""
52
+ return tuple(scores_per_test[k] for k in sorted(scores_per_test.keys()))
53
+
54
+
55
+ @dataclasses.dataclass(frozen=True)
56
+ class Prompt:
57
+ """A prompt produced by the ProgramsDatabase, to be sent to Samplers.
58
+
59
+ Attributes:
60
+ code: The prompt, ending with the header of the function to be completed.
61
+ version_generated: The function to be completed is `_v{version_generated}`.
62
+ island_id: Identifier of the island that produced the implementations
63
+ included in the prompt. Used to direct the newly generated implementation
64
+ into the same island.
65
+ """
66
+ code: str
67
+ version_generated: int
68
+ island_id: int
69
+
70
+
71
+ @dataclasses.dataclass(frozen=True)
72
+ class ProgramsDatabaseConfig:
73
+ """Configuration of a ProgramsDatabase.
74
+
75
+ Attributes:
76
+ functions_per_prompt: Number of previous programs to include in prompts.
77
+ num_islands: Number of islands to maintain as a diversity mechanism.
78
+ reset_period: How often (in seconds) the weakest islands should be reset.
79
+ cluster_sampling_temperature_init: Initial temperature for softmax sampling
80
+ of clusters within an island.
81
+ cluster_sampling_temperature_period: Period of linear decay of the cluster
82
+ sampling temperature.
83
+ """
84
+ functions_per_prompt: int = 2
85
+ num_islands: int = 4
86
+ reset_period: int = 60 * 60
87
+ cluster_sampling_temperature_init: float = 0.1
88
+ cluster_sampling_temperature_period: int = 5
89
+
90
+
91
+ class ProgramsDatabase:
92
+ """A collection of programs, organized as islands."""
93
+
94
+ def __init__(
95
+ self,
96
+ config: ProgramsDatabaseConfig,
97
+ ) -> None:
98
+ self._config: ProgramsDatabaseConfig = config
99
+
100
+ # Initialize empty islands.
101
+ self._islands: list[Island] = []
102
+ for _ in range(config.num_islands):
103
+ self._islands.append(
104
+ Island(config.functions_per_prompt,
105
+ config.cluster_sampling_temperature_init,
106
+ config.cluster_sampling_temperature_period))
107
+ self._best_score_per_island: list[float] = (
108
+ [-float('inf')] * config.num_islands)
109
+ self._best_program_per_island: list[int | None] = (
110
+ [None] * config.num_islands)
111
+ self._best_scores_per_test_per_island: list[ScoresPerTest | None] = (
112
+ [None] * config.num_islands)
113
+
114
+ self._last_reset_time: float = time.time()
115
+
116
+ def get_seed(self) -> list[int]:
117
+ """Returns a prompt containing implementations from one chosen island."""
118
+ island_id = np.random.randint(len(self._islands))
119
+ seed_ids = self._islands[island_id].get_seed()
120
+ return island_id, seed_ids
121
+
122
+ def _register_program_in_island(
123
+ self,
124
+ program: int,
125
+ program_len: int,
126
+ island_id: int,
127
+ scores_per_test: ScoresPerTest,
128
+ ) -> None:
129
+ """Registers `program` in the specified island."""
130
+ self._islands[island_id].register_program(program, program_len, scores_per_test)
131
+ score = _reduce_score(scores_per_test)
132
+ if score > self._best_score_per_island[island_id]:
133
+ self._best_program_per_island[island_id] = program
134
+ self._best_scores_per_test_per_island[island_id] = scores_per_test
135
+ self._best_score_per_island[island_id] = score
136
+
137
+ def register_program(
138
+ self,
139
+ program: int,
140
+ program_len: int,
141
+ island_id: int | None,
142
+ scores_per_test: ScoresPerTest,
143
+ ) -> None:
144
+ """Registers `program` in the database."""
145
+ # In an asynchronous implementation we should consider the possibility of
146
+ # registering a program on an island that had been reset after the prompt
147
+ # was generated. Leaving that out here for simplicity.
148
+ if island_id is None:
149
+ # This is a program added at the beginning, so adding it to all islands.
150
+ for island_id in range(len(self._islands)):
151
+ self._register_program_in_island(program, program_len, island_id, scores_per_test)
152
+ else:
153
+ self._register_program_in_island(program, program_len, island_id, scores_per_test)
154
+
155
+ # Check whether it is time to reset an island.
156
+ if (time.time() - self._last_reset_time > self._config.reset_period):
157
+ self._last_reset_time = time.time()
158
+ self.reset_islands()
159
+
160
+ def reset_islands(self) -> None:
161
+ """Resets the weaker half of islands."""
162
+ # We sort best scores after adding minor noise to break ties.
163
+ indices_sorted_by_score: np.ndarray = np.argsort(
164
+ self._best_score_per_island +
165
+ np.random.randn(len(self._best_score_per_island)) * 1e-6)
166
+ num_islands_to_reset = self._config.num_islands // 2
167
+ reset_islands_ids = indices_sorted_by_score[:num_islands_to_reset]
168
+ keep_islands_ids = indices_sorted_by_score[num_islands_to_reset:]
169
+ for island_id in reset_islands_ids:
170
+ self._islands[island_id] = Island(
171
+ self._template,
172
+ self._function_to_evolve,
173
+ self._config.functions_per_prompt,
174
+ self._config.cluster_sampling_temperature_init,
175
+ self._config.cluster_sampling_temperature_period)
176
+ self._best_score_per_island[island_id] = -float('inf')
177
+ founder_island_id = np.random.choice(keep_islands_ids)
178
+ founder = self._best_program_per_island[founder_island_id]
179
+ founder_scores = self._best_scores_per_test_per_island[founder_island_id]
180
+ self._register_program_in_island(founder, island_id, founder_scores)
181
+
182
+
183
+ class Island:
184
+ """A sub-population of the programs database."""
185
+
186
+ def __init__(
187
+ self,
188
+ functions_per_prompt: int,
189
+ cluster_sampling_temperature_init: float,
190
+ cluster_sampling_temperature_period: int,
191
+ ) -> None:
192
+ self._functions_per_prompt: int = functions_per_prompt
193
+ self._cluster_sampling_temperature_init = cluster_sampling_temperature_init
194
+ self._cluster_sampling_temperature_period = (
195
+ cluster_sampling_temperature_period)
196
+
197
+ self._clusters: dict[Signature, Cluster] = {}
198
+ self._num_programs: int = 0
199
+
200
+ def register_program(
201
+ self,
202
+ program: int,
203
+ program_len: int,
204
+ scores_per_test: ScoresPerTest,
205
+ ) -> None:
206
+ """Stores a program on this island, in its appropriate cluster."""
207
+ signature = _get_signature(scores_per_test)
208
+ if signature not in self._clusters:
209
+ score = _reduce_score(scores_per_test)
210
+ self._clusters[signature] = Cluster(score, [program], [program_len])
211
+ else:
212
+ self._clusters[signature].register_program(program, program_len)
213
+ self._num_programs += 1
214
+
215
+ def get_seed(self) -> tuple[str, int]:
216
+ """Constructs a prompt containing functions from this island."""
217
+ signatures = list(self._clusters.keys())
218
+ cluster_scores = np.array(
219
+ [self._clusters[signature].score for signature in signatures])
220
+
221
+ # Convert scores to probabilities using softmax with temperature schedule.
222
+ period = self._cluster_sampling_temperature_period
223
+ temperature = self._cluster_sampling_temperature_init * (
224
+ 1 - (self._num_programs % period) / period)
225
+ probabilities = _softmax(cluster_scores, temperature)
226
+
227
+ # At the beginning of an experiment when we have few clusters, place fewer
228
+ # programs into the prompt.
229
+ functions_per_prompt = min(len(self._clusters), self._functions_per_prompt)
230
+
231
+ idx = np.random.choice(
232
+ len(signatures), size=functions_per_prompt, p=probabilities)
233
+ chosen_signatures = [signatures[i] for i in idx]
234
+ implementations = []
235
+ scores = []
236
+ for signature in chosen_signatures:
237
+ cluster = self._clusters[signature]
238
+ implementations.append(cluster.sample_program())
239
+ scores.append(cluster.score)
240
+
241
+ indices = np.argsort(scores)
242
+ sorted_implementations = [implementations[i] for i in indices]
243
+ return sorted_implementations
244
+
245
+
246
+ class Cluster:
247
+ """A cluster of programs on the same island and with the same Signature."""
248
+
249
+ def __init__(self, score: float, init_programs: list[int], init_program_lengths: list[int]) -> None:
250
+ self._score = score
251
+ # We store the indices of the programs in the cluster
252
+ self._programs: list[int] = init_programs
253
+ self._lengths: list[int] = init_program_lengths
254
+
255
+ @property
256
+ def score(self) -> float:
257
+ """Reduced score of the signature that this cluster represents."""
258
+ return self._score
259
+
260
+ def register_program(self, program_idx: int, program_length: int) -> None:
261
+ """Adds `program` to the cluster."""
262
+ self._programs.append(program_idx)
263
+ self._lengths.append(program_length)
264
+
265
+ def sample_program(self) -> int:
266
+ """Samples a program, giving higher probability to shorther programs."""
267
+ normalized_lengths = (np.array(self._lengths) - min(self._lengths)) / (
268
+ max(self._lengths) + 1e-6)
269
+ probabilities = _softmax(-normalized_lengths, temperature=1.0)
270
+ return np.random.choice(self._programs, p=probabilities)
prompt_files/general_prompt.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ system_prompt = '''
2
+ You are an intelligent AI researcher for coding, numerical algorithms, and scientific computing.
3
+ Your goal is to conduct cutting-edge research in the field of PDE solving by leveraging and creatively improving existing algorithms to maximize performances based on feedbacks.
4
+ Follow the user's requirements carefully and make sure you understand them.
5
+ Always document your code as comments to explain the reason behind them.
6
+ Always use Markdown cells to present your code.
7
+ '''
8
+
9
+ code_generation_without_seed_prompt = '''
10
+ Your task is to solve a partial differential equation (PDE) using Python in batch mode.
11
+ {pde_description}
12
+
13
+ You will be completing the following code skeleton provided below:
14
+
15
+ ```python
16
+ {solver_template}
17
+ ```
18
+
19
+ Your tasks are:
20
+ 1. Understand the above code samples.
21
+ 2. Implement the `solver` function to solve the PDE. You must not modify the function signature.
22
+
23
+ The generated code needs to be clearly structured and bug-free. You must implement auxiliary functions or add additional arguments to the function if needed to modularize the code.
24
+
25
+ Your generated code will be executed to evaluated. Make sure your `solver` function runs correctly and efficiently.
26
+ You can use PyTorch or JAX with GPU acceleration.
27
+ You must use print statements for to keep track of intermediate results, but do not print too much information. Those output will be useful for future code improvement and/or debugging.
28
+
29
+ Your output must follow the following structure:
30
+
31
+ 1. A plan on the implementation idea.
32
+ 2. Your python implementation (modularized with appropriate auxiliary functions):
33
+
34
+ ```python
35
+ [Your implementation (do NOT add a main function)]
36
+ ```
37
+
38
+ You must use very simple algorithms that are easy to implement.
39
+ '''
40
+
41
+ problem_prompt = '''
42
+ Your task is to solve a partial differential equation (PDE) using Python in batch mode.
43
+ {pde_description}
44
+
45
+ You will be improving the following exising code samples. The code samples are provided below:
46
+
47
+ {code_samples}
48
+
49
+ Your tasks are:
50
+ 1. Understand the above code samples. Compare their techniques and performances.
51
+ 2. Identify the parts that could potentially be improved.
52
+ 3. Plan on how you can improve the function.
53
+ 4. Improve the function.
54
+
55
+ The goal is to get a very low nRMSE (normalized RMSE) and make the code as fast as possible.
56
+
57
+ You must analyze the implementation and test results of the examples provided in the code template, and think about how you can improve them to reduce the nRMSE.
58
+ If the RMSE is much higher than 1e-2 or becomes Nan, it is likely that there is a bug in the implementation and you must debug it or think about completely different approaches.
59
+ If the running time is much longer than 600s, you must prioritize making it more efficient.
60
+ The convergence rate is the empirical order of convergence with respect to spatial resolution. It is also a good indicator of the performance of the algorithm which you may consider.
61
+
62
+ You can implement auxiliary functions or add additional arguments to the function if needed.
63
+ You can use PyTorch or JAX with GPU acceleration.
64
+ You can consider known techniques and analyze their effiectiveness based on exisiting results. You should also consider combining existing techniques or even developing new techniques since you are doing cutting-edge research.
65
+
66
+ Your generated code will be executed to evaluated. Make sure your `solver` function runs correctly and efficiently.
67
+ You must use print statements for to keep track of intermediate results, but do not print too much information. Those output will be useful for future code improvement and/or debugging.
68
+
69
+ Your output must follow the following structure:
70
+
71
+ 1. Summary of the existing implementation along with their performances. Identify what techniques work well, what could be buggy (due to high error or Nan), and what could be further improved.
72
+ 2. Rationale for the new implementation (think step-by-step) based on the summary of the existing implementation.
73
+ 3. Your python implementation (modularized with appropriate auxiliary functions):
74
+
75
+ ```python
76
+ [Your implementation (do NOT add a main function)]
77
+ ```
78
+ '''
79
+
80
+ code_sample = '''Code Sample {id}:
81
+ ```python
82
+ {code}
83
+ ```
84
+
85
+ Running the above code leads to the following output: {code_output}
86
+ '''
87
+
88
+
89
+ debugging_execution_error_prompt = '''
90
+ Thank you for your implmentation! When running the code, I got the following output and error message:
91
+
92
+ Code output: {code_output}
93
+
94
+ Error message: {error_message}
95
+
96
+ Can you think step-by-step to identify the root cause of the error and provide a solution to fix it? Please provide a detailed explanation of the error and the solution you propose. You can refer to the code implementation you provided earlier and analyze the error message to identify the issue.
97
+
98
+ Your response should be in the following format:
99
+
100
+ [Your rationale for debugging (think step-by-step)]
101
+
102
+ ```python
103
+ [Your bug-free implementation]
104
+ ```
105
+ '''
106
+
107
+
108
+ debugging_nan_inf_prompt = '''
109
+ Thank you for your implmentation! After running the code, the error becomes NaN or inf.
110
+
111
+ The followings are the output and error message:
112
+
113
+ Code output: {code_output}
114
+
115
+ Error message: {error_message}
116
+
117
+ Can you think step-by-step to identify the root cause of the error and provide a solution to fix it? You should check if any computation in the code is numerically stable or not. You should also consider to use smaller step sizes.
118
+
119
+ Your response should be in the following format:
120
+
121
+ [Your rationale for debugging (think step-by-step)]
122
+
123
+ ```python
124
+ [Your bug-free implementation]
125
+ ```
126
+ '''
prompt_files/pde_descriptions.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ advection_description = '''
2
+ The PDE is the 1D advection equation, given by
3
+
4
+ \\[
5
+ \\begin{{cases}}
6
+ \\partial_t u(t, x) + \\beta \\partial_x u(t, x) = 0, & x \\in (0,1), \; t \\in (0,2] \\
7
+ u(0, x) = u_0(x), & x \\in (0,1)
8
+ \\end{{cases}}
9
+ \\]
10
+
11
+ where $\\beta$ is a constant representing the advection speed. In our task, we assume the periodic boundary condition.
12
+
13
+ Given the discretization of $u_0(x)$ of shape [batch_size, N], where $N$ is the number of spatial points, you need to implement a solver to predict $u(t, \\cdot)$ for the specified subsequent time steps ($t = t_1, \\ldots, t_T$). The solution is of shape [batch_size, T+1, N] (with the initial time frame and the subsequent steps). Note that although the required time steps are specified, you should consider using smaller time steps internally to obtain more stable simulation.
14
+
15
+ In particular, your code should be tailored to the case where $\\beta = {advection_beta}$, i.e., optimizing it particularly for this use case.
16
+ '''
17
+
18
+
19
+ burgers_description = '''
20
+ The PDE is the burgers equation, given by
21
+
22
+ \\[
23
+ \\begin{{cases}}
24
+ \\partial_t u(x, t) + \\partial_x \left( \\frac{{u^2(x, t)}}{{2}} \\right) = \\nu \\partial_{{xx}} u(x, t), & x \\in (0,1), \; t \\in (0,1] \\\\
25
+ u(x, 0) = u_0(x), & x \\in (0,1)
26
+ \\end{{cases}}
27
+ \\]
28
+
29
+ wheer $\\nu$ is a constant representing the viscosity. In our task, we assume the periodic boundary condition.
30
+
31
+ Given the discretization of $u_0(x)$ of shape [batch_size, N] where $N$ is the number of spatial points, you need to implement a solver to predict u(\cdot, t) for the specified subseqent time steps ($t=t_1, ..., t_T$). The solution is of shape [batch_size, T+1, N] (with the initial time frame and the subsequent steps). Note that although the required time steps are specified, you should consider using smaller time steps internally to obtain more stable simulation.
32
+
33
+ In particular, your code should be tailored to the case where $\\nu={burgers_nu}$, i.e., optimizing it particularly for this use case.
34
+ '''
35
+
36
+ reacdiff_1d_description = '''
37
+ The PDE is a diffusion-reaction equation, given by
38
+
39
+ \\[
40
+ \\begin{{cases}}
41
+ \\partial_t u(t, x) - \\nu \\partial_{{xx}} u(t, x) - \\rho u(1 - u) = 0, & x \\in (0,1), \; t \in (0,T] \\\\
42
+ u(0, x) = u_0(x), & x \in (0,1)
43
+ \end{{cases}}
44
+ \\]
45
+
46
+ where $\\nu$ and $\\rho$ are coefficients representing diffusion and reaction terms, respectively. In our task, we assume the periodic boundary condition.
47
+
48
+ Given the discretization of $u_0(x)$ of shape [batch_size, N] where $N$ is the number of spatial points, you need to implement a solver to predict $u(\cdot, t)$ for the specified subsequent time steps ($t = t_1, \ldots, t_T$). The solution is of shape [batch_size, T+1, N] (with the initial time frame and the subsequent steps). Note that although the required time steps are specified, you should consider using smaller time steps internally to obtain more stable simulation.
49
+
50
+ In particular, your code should be tailored to the case where $\\nu={reacdiff1d_nu}, \\rho={reacdiff1d_rho}$, i.e., optimizing it particularly for this use case.
51
+ Think carefully about the structure of the reaction and diffusion terms in the PDE and how you can exploit this structure to derive accurate result.
52
+ '''
53
+
54
+
55
+ cns1d_description = '''
56
+ The PDE is the 1D compressible Navier-Stokes equations, given by
57
+
58
+ \\[
59
+ \\begin{{cases}}
60
+ \\partial_t \\rho + \\partial_x (\\rho v) = 0 \\\\
61
+ \\rho(\\partial_t v + v\\partial_x v) = -\\partial_x p + \\eta\\partial_{{xx}} v + (\\zeta + \\eta/3)\\partial_x(\\partial_x v) \\\\
62
+ \\partial_t \\left[\\epsilon + \\frac{{\\rho v^2}}{{2}}\\right] + \\partial_x\\left[\\left(\\epsilon + p + \\frac{{\\rho v^2}}{{2}}\\right)v - v\\sigma'\\right] = 0
63
+ \\end{{cases}}
64
+ \\]
65
+ where $\\rho$ is the mass density, $v$ is the velocity, $p$ is the gas pressure, $\\epsilon = p/(\\Gamma - 1)$ is the internal energy with $\\Gamma = 5/3$, $\\sigma'=(\\zeta+\\frac{{4}}{{3}}\\eta) \\partial_x v$ is the viscous stress tensor, and $\\eta, \\zeta$ are the shear and bulk viscosity coefficients, respectively. In our task, we assume periodic boundary conditions. The spatial domain is $\\Omega = [-1,1]$.
66
+
67
+ Given the discretization of the initial velocity, density, pressure, each of shape [batch_size, N] where $N$ is the number of spatial points, you need to implement a solver to predict the state variables for the specified subsequent time steps ($t = t_1, \\ldots, t_T$). The solver outputs velocity, density, pressure, each of shape [batch_size, T+1, N] (with the initial time frame and the subsequent steps). Note that although the required time steps are specified, you should consider using smaller time steps internally to obtain more stable simulation.
68
+
69
+ In particular, your code should be tailored to the case where $\\eta = \\zeta = {cns1d_eta}$, i.e., optimizing it particularly for this use case.
70
+ '''
71
+
72
+
73
+ darcy_description = '''The PDE is the 2D Darcy flow equation, given by:
74
+
75
+ \\[-\\nabla \\cdot (a(x) \\nabla u(x)) = 1, \\quad x \\in (0,1)^2\\]
76
+
77
+ with the boundary condition:
78
+
79
+ \\[ u(x) = 0, \\quad x \\in \\partial (0,1)^2 \\]
80
+
81
+ where $u(x)$ is the solution function, and $a(x)$ is a batch of coefficient function.
82
+
83
+ Given the discretization of the coefficient function $a(x)$ of shape [batch_size, N, N], where $N$ is the number of spatial grid points in each direction, you need to implement a solver to predict $u(x)$ for the specified subsequent time steps. The solution should be of shape [batch_size, N, N].'''
84
+
refine.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import pandas as pd
3
+ import random
4
+ import time
5
+
6
+ from code_generation import generate_and_debug, prepare_working_folder
7
+
8
+ def select_seed_implementations(
9
+ total_num_sample_solvers,
10
+ num_sample_for_refine=None,
11
+ ):
12
+ if (
13
+ num_sample_for_refine is None or
14
+ num_sample_for_refine > total_num_sample_solvers or
15
+ num_sample_for_refine == -1
16
+ ):
17
+ num_sample_for_refine = total_num_sample_solvers
18
+
19
+ # Select random samples for refinement
20
+ selected_indices = random.sample(range(total_num_sample_solvers), num_sample_for_refine)
21
+
22
+ return selected_indices
23
+
24
+
25
+
26
+ def refine(cfg):
27
+ num_repeated_samples = cfg.method.num_repeated_samples
28
+ num_trials = cfg.method.num_debugging_trials_per_sample
29
+ pde_name = cfg.pde.name
30
+ working_folder = cfg.working_folder
31
+ model_name = cfg.model.name
32
+ num_sample_for_refine = cfg.method.num_sample_for_refine
33
+ start_round = cfg.method.start_round
34
+ use_sample_solver_init = cfg.method.use_sample_solver_init
35
+ assert use_sample_solver_init, 'Sample solvers must be enabled for refinement'
36
+
37
+ sample_solver_folder = os.path.join(
38
+ 'solvers', pde_name, cfg.pde.pde_setting_name, 'seeds'
39
+ )
40
+ sample_solver_info = pd.read_csv(
41
+ os.path.join(sample_solver_folder, 'seed_results.csv')
42
+ )
43
+ total_num_sample_solvers = len(sample_solver_info)
44
+
45
+ if start_round == 0:
46
+ prepare_working_folder(
47
+ cfg,
48
+ working_folder=working_folder,
49
+ pde_name=pde_name,
50
+ use_sample_solver_init=use_sample_solver_init
51
+ )
52
+
53
+ for round_idx in range(start_round, num_repeated_samples):
54
+ try:
55
+ seed_implementations = select_seed_implementations(
56
+ total_num_sample_solvers=total_num_sample_solvers,
57
+ num_sample_for_refine=num_sample_for_refine
58
+ )
59
+ generate_and_debug(
60
+ cfg,
61
+ round_idx=round_idx,
62
+ num_trials=num_trials,
63
+ pde_name=pde_name,
64
+ working_folder=working_folder,
65
+ seed_implementations=seed_implementations,
66
+ model_name=model_name
67
+ )
68
+ except Exception as e:
69
+ print(f'Error in sample {round_idx}: {e}. Move on to the next sample.')
70
+
71
+ time.sleep(2) # Small delay to prevent API rate limit
repeated_sample.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import time
4
+
5
+ from code_generation import generate_and_debug, prepare_working_folder
6
+
7
+ def repeated_sample(cfg):
8
+ num_repeated_samples = cfg.method.num_repeated_samples
9
+ num_trials = cfg.method.num_debugging_trials_per_sample
10
+ pde_name = cfg.pde.name
11
+ working_folder = cfg.working_folder
12
+ model_name = cfg.model.name
13
+
14
+ if not os.path.exists(working_folder):
15
+ os.makedirs(working_folder)
16
+
17
+ if cfg.redirect_stdout:
18
+ sys.stdout = open(os.path.join(working_folder, 'stdout.txt'), 'w')
19
+
20
+ print(f'Model name: {cfg.model.name}')
21
+ print(f'Working folder: {working_folder}')
22
+
23
+ prepare_working_folder(
24
+ cfg,
25
+ working_folder=working_folder,
26
+ pde_name=pde_name,
27
+ use_sample_solver_init=False
28
+ )
29
+
30
+
31
+ for sample_idx in range(num_repeated_samples):
32
+ try:
33
+ generate_and_debug(
34
+ cfg,
35
+ round_idx=sample_idx,
36
+ num_trials=num_trials,
37
+ pde_name=pde_name,
38
+ working_folder=working_folder,
39
+ seed_implementations=None,
40
+ model_name=model_name
41
+ )
42
+ except Exception as e:
43
+ print(f'Error in sample {sample_idx}: {e}. Move on to the next sample.')
44
+
45
+ time.sleep(2) # Small delay to prevent API rate limit
requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ torch
2
+ torchvision
3
+ bitsandbytes
4
+ openai
5
+ pandas
6
+ tqdm
7
+ h5py
8
+ matplotlib
9
+ numpy
10
+ scipy
11
+ hydra-core
12
+ anthropic
13
+ jax
14
+ google-genai
solvers/advection/beta_0.1/seeds/errors_0.txt ADDED
File without changes
solvers/advection/beta_0.1/seeds/errors_1.txt ADDED
File without changes
solvers/advection/beta_0.1/seeds/errors_2.txt ADDED
File without changes
solvers/advection/beta_0.1/seeds/errors_3.txt ADDED
File without changes
solvers/advection/beta_0.1/seeds/errors_4.txt ADDED
File without changes
solvers/advection/beta_0.1/seeds/implementation_0.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import numpy as np
3
+
4
+ def _fractional_periodic_shift(u, shift):
5
+ """
6
+ Periodically shifts the last axis of an array by a (possibly non-integer)
7
+ number of grid points using linear interpolation.
8
+
9
+ Parameters
10
+ ----------
11
+ u : np.ndarray
12
+ Input array of shape (..., N).
13
+ shift : float
14
+ Positive shift (to the right) measured in grid points.
15
+ Returns
16
+ -------
17
+ shifted : np.ndarray
18
+ Array of the same shape as `u`, shifted periodically.
19
+ """
20
+ N = u.shape[-1]
21
+ # Bring the shift back to the canonical interval [0, N)
22
+ shift = shift % N
23
+
24
+ # Integer and fractional parts of the shift
25
+ k = int(np.floor(shift))
26
+ f = shift - k # 0 <= f < 1
27
+
28
+ if f < 1.0e-12: # pure integer shift – avoid extra work
29
+ return np.roll(u, k, axis=-1)
30
+
31
+ # Values needed for linear interpolation
32
+ u_k = np.roll(u, k, axis=-1) # u[j - k]
33
+ u_k1 = np.roll(u, k + 1, axis=-1) # u[j - k - 1]
34
+
35
+ return (1.0 - f) * u_k + f * u_k1
36
+
37
+
38
+ def solver(u0_batch, t_coordinate, beta):
39
+ """Solves the 1-D periodic advection equation u_t + beta * u_x = 0.
40
+
41
+ The method is *exact*: the initial profile is merely shifted by
42
+ beta * t for each requested time instant.
43
+
44
+ Parameters
45
+ ----------
46
+ u0_batch : np.ndarray
47
+ Initial data with shape [batch_size, N]
48
+ t_coordinate : np.ndarray
49
+ Time stamps (T+1,) beginning with 0.
50
+ beta : float
51
+ Constant advection speed.
52
+
53
+ Returns
54
+ -------
55
+ solutions : np.ndarray
56
+ Array of shape [batch_size, T+1, N] containing u(t_i, x_j).
57
+ """
58
+ # --------------------------- sanity checks ---------------------------
59
+ if t_coordinate.ndim != 1:
60
+ raise ValueError("t_coordinate must be one-dimensional")
61
+ if abs(t_coordinate[0]) > 1e-12:
62
+ raise ValueError("t_coordinate[0] must be 0.0")
63
+
64
+ batch_size, N = u0_batch.shape
65
+ T = len(t_coordinate) - 1 # number of future time frames
66
+
67
+ # Spatial step Δx assuming domain length L = 1
68
+ dx = 1.0 / N
69
+
70
+ # ---------- allocate result tensor and copy initial condition -------
71
+ solutions = np.empty((batch_size, T + 1, N), dtype=u0_batch.dtype)
72
+ solutions[:, 0, :] = u0_batch
73
+
74
+ # ----------------------- march through required times ---------------
75
+ for i, t in enumerate(t_coordinate[1:], start=1):
76
+ shift_flow = beta * t / dx # shift in *grid points*
77
+ if i == 1: # tiny diagnostic once per run
78
+ print(f"[solver] beta={beta:.4g}, t={t:.4g}, "
79
+ f"shift={shift_flow:.4g} grid points")
80
+ solutions[:, i, :] = _fractional_periodic_shift(u0_batch,
81
+ shift_flow)
82
+
83
+ return solutions
solvers/advection/beta_0.1/seeds/implementation_1.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import numpy as np
3
+
4
+ # Optional acceleration with PyTorch if it is installed and a GPU is visible.
5
+ try:
6
+ import torch
7
+ _TORCH_AVAILABLE = True
8
+ except ModuleNotFoundError: # pragma: no cover
9
+ _TORCH_AVAILABLE = False
10
+
11
+
12
+ def _select_backend(array):
13
+ """
14
+ Decide whether to run on numpy (CPU) or torch (CPU/GPU) based on the
15
+ type of `array` and on the availability of torch + CUDA.
16
+ """
17
+ if _TORCH_AVAILABLE and isinstance(array, torch.Tensor):
18
+ return "torch"
19
+ return "numpy"
20
+
21
+
22
+ def _rfft(a, backend):
23
+ """Real-to-complex FFT along the last axis for both back-ends."""
24
+ if backend == "torch":
25
+ return torch.fft.rfft(a, dim=-1)
26
+ return np.fft.rfft(a, axis=-1)
27
+
28
+
29
+ def _irfft(a, n, backend, dtype):
30
+ """
31
+ Inverse rFFT that returns a real array of length `n` and is cast back to
32
+ `dtype` for the NumPy backend (torch keeps dtype automatically).
33
+ """
34
+ if backend == "torch":
35
+ return torch.fft.irfft(a, n=n, dim=-1)
36
+ arr = np.fft.irfft(a, n=n, axis=-1)
37
+ return arr.astype(dtype, copy=False)
38
+
39
+
40
+ def solver(u0_batch, t_coordinate, beta):
41
+ """Solves the 1-D periodic advection equation ∂_t u + β ∂_x u = 0.
42
+
43
+ Exact spectral shift method:
44
+ u(t,x) = u0(x − β t) (periodic on [0,1))
45
+
46
+ Parameters
47
+ ----------
48
+ u0_batch : np.ndarray | torch.Tensor, shape (B, N)
49
+ Batch of initial conditions.
50
+ t_coordinate : np.ndarray | torch.Tensor, shape (T+1,)
51
+ Time stamps, starting with 0.0.
52
+ beta : float
53
+ Constant advection speed.
54
+
55
+ Returns
56
+ -------
57
+ solutions : same backend as `u0_batch`, shape (B, T+1, N)
58
+ Numerical solution for all requested times.
59
+ """
60
+ backend = _select_backend(u0_batch)
61
+ B, N = u0_batch.shape
62
+ T_plus_1 = t_coordinate.shape[0]
63
+
64
+ # Convert t_coordinate and wavenumbers to the active backend
65
+ if backend == "torch":
66
+ device = u0_batch.device
67
+ dtype = u0_batch.dtype
68
+ t_arr = torch.as_tensor(t_coordinate, dtype=dtype, device=device)
69
+ k = torch.arange(0, N // 2 + 1, dtype=dtype, device=device)
70
+ else:
71
+ dtype = u0_batch.dtype
72
+ t_arr = np.asarray(t_coordinate, dtype=dtype)
73
+ k = np.arange(0, N // 2 + 1, dtype=dtype)
74
+
75
+ # Allocate output container
76
+ if backend == "torch":
77
+ solutions = torch.empty((B, T_plus_1, N), dtype=dtype, device=u0_batch.device)
78
+ else:
79
+ solutions = np.empty((B, T_plus_1, N), dtype=dtype)
80
+
81
+ # Store initial condition
82
+ solutions[:, 0, :] = u0_batch
83
+
84
+ # Forward FFT of the initial condition
85
+ U0 = _rfft(u0_batch, backend=backend) # shape (B, N//2+1)
86
+
87
+ # Build complex exponent -i 2π k β t (vectorised over all times)
88
+ if backend == "torch":
89
+ exponent = -2 * torch.pi * 1j * (beta * t_arr)[:, None] * k[None, :]
90
+ phase = torch.exp(exponent) # shape (T+1, N//2+1)
91
+ else:
92
+ exponent = -2 * np.pi * 1j * (beta * t_arr)[:, None] * k[None, :]
93
+ phase = np.exp(exponent) # shape (T+1, N//2+1)
94
+
95
+ # Fill all requested time levels except t=0 (already done)
96
+ for idx in range(1, T_plus_1):
97
+ Ut = U0 * phase[idx] # broadcasting over batch
98
+ solutions[:, idx, :] = _irfft(Ut, n=N, backend=backend, dtype=dtype)
99
+
100
+ # Concise diagnostic
101
+ print(f"[solver] batch={B}, N={N}, times={T_plus_1-1}, "
102
+ f"beta={beta}, t_min={t_arr[0]:.3g}, t_max={t_arr[-1]:.3g}")
103
+
104
+ return solutions
solvers/advection/beta_0.1/seeds/implementation_2.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ import numpy as np
3
+ import torch # Import PyTorch
4
+
5
+ # Check if GPU is available and set the device accordingly
6
+ if torch.cuda.is_available():
7
+ device = torch.device("cuda")
8
+ print("GPU found, using CUDA.")
9
+ else:
10
+ device = torch.device("cpu")
11
+ print("GPU not found, using CPU.")
12
+
13
+ def spectral_step(u_t, dt, k, beta):
14
+ """
15
+ Performs one time step of the advection equation using the spectral method.
16
+
17
+ Args:
18
+ u_t (torch.Tensor): Solution at the current time step [batch_size, N].
19
+ dt (float): Time step duration.
20
+ k (torch.Tensor): Wavenumbers [1, N].
21
+ beta (float): Advection speed.
22
+
23
+ Returns:
24
+ torch.Tensor: Solution at the next time step [batch_size, N].
25
+ """
26
+ # Compute the Fast Fourier Transform (FFT) along the spatial dimension (last dim)
27
+ u_hat = torch.fft.fft(u_t, dim=-1)
28
+
29
+ # Compute the propagator term in Fourier space: exp(-i * beta * k * dt)
30
+ # Ensure k has the correct shape for broadcasting and is on the same device/dtype
31
+ # Note: 1j is the imaginary unit in Python
32
+ propagator = torch.exp(-1j * beta * k * dt)
33
+
34
+ # Multiply the Fourier coefficients by the propagator
35
+ u_hat_next = u_hat * propagator
36
+
37
+ # Compute the Inverse Fast Fourier Transform (IFFT)
38
+ u_next = torch.fft.ifft(u_hat_next, dim=-1)
39
+
40
+ # Return the real part of the solution (numerical errors might introduce small imaginary parts)
41
+ return u_next.real
42
+
43
+ def solver(u0_batch, t_coordinate, beta):
44
+ """Solves the 1D Advection equation using the Fourier spectral method.
45
+
46
+ Args:
47
+ u0_batch (np.ndarray): Initial condition [batch_size, N],
48
+ where batch_size is the number of different initial conditions,
49
+ and N is the number of spatial grid points.
50
+ t_coordinate (np.ndarray): Time coordinates of shape [T+1].
51
+ It begins with t_0=0 and follows the time steps t_1, ..., t_T.
52
+ beta (float): Constant advection speed. Specifically considered for beta=0.1.
53
+
54
+ Returns:
55
+ solutions (np.ndarray): Shape [batch_size, T+1, N].
56
+ solutions[:, 0, :] contains the initial conditions (u0_batch),
57
+ solutions[:, i, :] contains the solutions at time t_coordinate[i].
58
+ """
59
+ # --- 1. Initialization ---
60
+ print("Starting solver...")
61
+
62
+ # Get dimensions
63
+ batch_size, N = u0_batch.shape
64
+ num_time_steps = len(t_coordinate)
65
+ T = num_time_steps - 1 # Number of intervals
66
+
67
+ # Convert inputs to PyTorch tensors and move to the selected device
68
+ # Use float32 for efficiency on GPUs, and derive complex dtype
69
+ u0_batch_torch = torch.tensor(u0_batch, dtype=torch.float32, device=device)
70
+ t_coordinate_torch = torch.tensor(t_coordinate, dtype=torch.float32, device=device)
71
+
72
+ # Define spatial domain parameters (assuming domain is [0, 1])
73
+ L = 1.0 # Length of the spatial domain
74
+ dx = L / N # Spatial step size
75
+
76
+ # Calculate wavenumbers k for the spectral method
77
+ # k = 2 * pi * [0, 1, ..., N/2-1, -N/2, ..., -1] / L
78
+ # Use torch.fft.fftfreq for convenience
79
+ # Reshape k to [1, N] for broadcasting with u_hat [batch_size, N]
80
+ k_freq = torch.fft.fftfreq(N, d=dx, device=device)
81
+ k = 2 * np.pi * k_freq
82
+ k = k.reshape(1, N) # Reshape for broadcasting
83
+ # Ensure k is complex for calculations involving 1j
84
+ k = k.to(torch.complex64)
85
+
86
+
87
+ # Initialize the solutions tensor to store results at each required time step
88
+ # Shape: [batch_size, T+1, N]
89
+ solutions_torch = torch.zeros((batch_size, num_time_steps, N), dtype=torch.float32, device=device)
90
+
91
+ # Store the initial condition
92
+ solutions_torch[:, 0, :] = u0_batch_torch
93
+ print(f"Initial condition stored. Shape: {u0_batch_torch.shape}")
94
+
95
+ # Set the current solution state
96
+ u_current = u0_batch_torch
97
+
98
+ # --- 2. Time Stepping Loop ---
99
+ print(f"Starting time stepping for {T} intervals...")
100
+ for i in range(T):
101
+ # Calculate the time step duration for this interval
102
+ t_current = t_coordinate_torch[i]
103
+ t_next = t_coordinate_torch[i+1]
104
+ dt = (t_next - t_current).item() # Get dt as a float
105
+
106
+ # Perform one spectral step to get the solution at t_next
107
+ u_next = spectral_step(u_current, dt, k, beta)
108
+
109
+ # Store the solution at t_next
110
+ solutions_torch[:, i+1, :] = u_next
111
+
112
+ # Update the current solution for the next iteration
113
+ u_current = u_next
114
+
115
+ # Optional: Print progress
116
+ if (i + 1) % max(1, T // 10) == 0 or i == T - 1:
117
+ print(f"Computed step {i+1}/{T}, Time = {t_next.item():.4f}")
118
+
119
+ # --- 3. Finalization ---
120
+ print("Simulation finished.")
121
+
122
+ # Move the solutions tensor back to CPU and convert to NumPy array
123
+ solutions_np = solutions_torch.cpu().numpy()
124
+
125
+ print(f"Returning solutions array with shape: {solutions_np.shape}")
126
+ return solutions_np
127
+