@@ -117,19 +117,19 @@ def write_workflow_json(wg, file_name):
117117
118118 i += 1
119119
120- for link in wgdata ["links" ]:
121- if (
122- wgdata ["tasks" ][link ["from_node" ]]["executor" ]["callable_name" ]
123- == "pickle_node"
124- ):
125- link ["from_socket" ] = None
126- link ["source" ] = node_name_mapping [link ["from_node" ]]
127- del link ["from_node" ]
128- link ["target" ] = node_name_mapping [link ["to_node" ]]
129- del link ["to_node" ]
130- link ["sourceHandle" ] = link .pop ("from_socket" )
131- link ["targetHandle" ] = link .pop ("to_socket" )
132- data ["edges" ].append (link )
120+ # for link in wgdata["links"]:
121+ # if (
122+ # wgdata["tasks"][link["from_node"]]["executor"]["callable_name"]
123+ # == "pickle_node"
124+ # ):
125+ # link["from_socket"] = None
126+ # link["source"] = node_name_mapping[link["from_node"]]
127+ # del link["from_node"]
128+ # link["target"] = node_name_mapping[link["to_node"]]
129+ # del link["to_node"]
130+ # link["sourceHandle"] = link.pop("from_socket")
131+ # link["targetHandle"] = link.pop("to_socket")
132+ # data["edges"].append(link)
133133
134134 with open (file_name , "w" ) as f :
135135 # json.dump({"nodes": data[], "edges": edges_new_lst}, f)
@@ -171,124 +171,202 @@ def construct_wg_simple(add_x_and_y_func, add_x_and_y_and_z_func) -> WorkGraph:
171171
172172
173173def construct_wg_qe (
174- get_dict_task ,
175- get_list_task ,
176- get_bulk_structure_task ,
177- calculate_qe_task ,
178- generate_structures_task ,
179- plot_energy_volume_curve_task ,
174+ get_dict ,
175+ get_list ,
176+ get_bulk_structure ,
177+ calculate_qe ,
178+ generate_structures ,
179+ plot_energy_volume_curve ,
180180 strain_lst ,
181181):
182182 wg = WorkGraph ()
183183
184- # TODO: Change order of tasks, and then manually add links, such that we have the same order as in the original
185- # TODO: JSON, and can debug better
186- # pickle_node_task = task.pythonjob(pickle_node)
184+ get_bulk_structure_task = wg .add_task (
185+ get_bulk_structure ,
186+ name = "bulk" ,
187+ register_pickle_by_value = True ,
188+ )
189+
190+ relax_task = wg .add_task (
191+ calculate_qe ,
192+ name = "mini" ,
193+ # input_dict=wg.tasks.get_dict.outputs.result,
194+ # working_directory=wg.tasks.relax_workdir.outputs.result,
195+ register_pickle_by_value = True ,
196+ )
197+
198+ generate_structures_task = wg .add_task (
199+ generate_structures ,
200+ name = "generate_structures" ,
201+ # structure=wg.tasks.mini.outputs.structure,
202+ # strain_lst=wg.tasks.strain_lst.outputs.result,
203+ register_pickle_by_value = True ,
204+ )
205+
206+ # here we add the structure outputs based on the number of strains
207+ del wg .tasks .generate_structures .outputs ["result" ]
208+
209+ qe_tasks = []
210+ for i , strain in enumerate (strain_lst ):
211+
212+ generate_structures_task .add_output ("workgraph.any" , f"s_{ i } " )
187213
188- # task.pythonjob(outputs=["element"])(pickle_node), name="element", value="Al"
214+ # Possible remove `get_dict_task` here to have it later
215+ # get_dict_task = wg.add_task(
216+ # get_dict,
217+ # name=f"get_dict_{i}",
218+ # # calculation=wg.tasks.calculation_scf.outputs.result,
219+ # # structure=wg.tasks.generate_structures.outputs[f"s_{i}"],
220+ # # kpts=wg.tasks.kpts.outputs.result,
221+ # # pseudopotentials=wg.tasks.pseudopotentials.outputs.result,
222+ # # smearing=wg.tasks.smearing.outputs.result,
223+ # register_pickle_by_value=True,
224+ # )
189225
190- pickle_element = task .pythonjob (outputs = ["element" ])(pickle_node )
191- wg .add_task (pickle_element , name = "pickle_element" , value = "Al" )
226+ strain_dir = f"strain_{ i } "
227+
228+ # strain_dir_task = wg.add_task(
229+ # task.pythonjob(outputs=[strain_dir])(pickle_node),
230+ # name=strain_dir,
231+ # value=strain_dir,
232+ # register_pickle_by_value=True,
233+ # )
234+ # del pickle_node.TaskCls
192235
236+ # import ipdb; ipdb.set_trace()
237+ qe_task = wg .add_task (
238+ calculate_qe ,
239+ name = f"qe_{ i } " ,
240+ # input_dict=get_dict_task.outputs.result,
241+ # working_directory=strain_dir_task.outputs[strain_dir],
242+ register_pickle_by_value = True ,
243+ )
244+ qe_tasks .append (qe_task )
245+
246+ # collect energy and volume
247+ # TODO: Maybe put this outside, in a separate for-loop to again try to fix the order
248+ # wg.add_link(qe_task.outputs.energy, wg.tasks.get_energies.inputs.kwargs)
249+ # wg.add_link(qe_task.outputs.volume, wg.tasks.get_volumes.inputs.kwargs)
250+
251+ plot_energy_volume_curve_task = wg .add_task (
252+ plot_energy_volume_curve ,
253+ # volume_lst=wg.tasks.get_volumes.outputs.result,
254+ # energy_lst=wg.tasks.get_energies.outputs.result,
255+ register_pickle_by_value = True ,
256+ )
257+
258+ pickle_element_task = wg .add_task (
259+ task .pythonjob (outputs = ["element" ])(pickle_node ),
260+ name = "pickle_element" ,
261+ value = "Al" ,
262+ )
193263 del pickle_node .TaskCls
194264
195- wg .add_task (task .pythonjob (outputs = ["a" ])(pickle_node ), name = "pickle_a" , value = 4.05 )
265+ pickle_a_task = wg .add_task (task .pythonjob (outputs = ["a" ])(pickle_node ), name = "pickle_a" , value = 4.05 )
196266 del pickle_node .TaskCls
197- wg .add_task (
267+
268+ pickle_cubic_task = wg .add_task (
198269 task .pythonjob (outputs = ["cubic" ])(pickle_node ), name = "pickle_cubic" , value = True
199270 )
200271 del pickle_node .TaskCls
201272
202- for task_ in wg .tasks :
203- print (task_ .name , task_ .outputs )
273+ pickle_relax_workdir_task = wg .add_task (
274+ task .pythonjob (outputs = ["relax_workdir" ])(pickle_node ),
275+ name = "pickle_relax_workdir" ,
276+ value = "mini" ,
277+ )
278+ del pickle_node .TaskCls
204279
205- import ipdb ; ipdb .set_trace ()
206- wg .add_task (
207- get_bulk_structure_task ,
208- name = "bulk" ,
209- element = wg .tasks .pickle_element .outputs .element ,
210- a = wg .tasks .pickle_a .outputs .a ,
211- cubic = wg .tasks .pickle_cubic .outputs .result ,
280+ # ? relax or SCF, or general?
281+ get_dict_task = wg .add_task (
282+ get_dict ,
283+ name = "get_dict" ,
284+ structure = wg .tasks .bulk .outputs .result ,
285+ # calculation=wg.tasks.calculation.outputs.result,
286+ # kpts=wg.tasks.kpts.outputs.result,
287+ # pseudopotentials=wg.tasks.pseudopotentials.outputs.result,
288+ # smearing=wg.tasks.smearing.outputs.result,
212289 register_pickle_by_value = True ,
213290 )
214- wg .add_task (task .pythonjob ()(pickle_node ), name = "calculation" , value = "vc-relax" )
215291
216- wg .add_task (
292+ pp_task = wg .add_task (
217293 task .pythonjob (pickle_node ),
218294 name = "pseudopotentials" ,
219295 value = {"Al" : "Al.pbe-n-kjpaw_psl.1.0.0.UPF" },
220296 )
221- wg .add_task (pickle_node_task , name = "smearing" , value = 0.02 )
222- wg .add_task (
297+ del pickle_node .TaskCls
298+
299+ kpts_task = wg .add_task (
223300 task .pythonjob (outputs = ["kpts" ])(pickle_node ), name = "kpts_task" , value = [3 , 3 , 3 ]
224301 )
225- wg .add_task (
226- get_dict_task ,
227- name = "get_dict" ,
228- structure = wg .tasks .bulk .outputs .result ,
229- calculation = wg .tasks .calculation .outputs .result ,
230- kpts = wg .tasks .kpts .outputs .result ,
231- pseudopotentials = wg .tasks .pseudopotentials .outputs .result ,
232- smearing = wg .tasks .smearing .outputs .result ,
233- register_pickle_by_value = True ,
234- )
235- wg .add_task (pickle_node_task , name = "scf_workdir" , value = "mini" )
236- wg .add_task (
237- calculate_qe_task ,
238- name = "mini" ,
239- input_dict = wg .tasks .get_dict .outputs .result ,
240- working_directory = wg .tasks .scf_workdir .outputs .result ,
241- register_pickle_by_value = True ,
242- )
302+ del pickle_node .TaskCls
243303
244- wg .add_task (pickle_node_task , name = "strain_lst" , value = strain_lst )
304+ vc_relax_task = wg .add_task (task .pythonjob ()(pickle_node ), name = "calculation" , value = "vc-relax" )
305+ del pickle_node .TaskCls
306+
307+ smearing_task = wg .add_task (task .pythonjob ()(pickle_node ), name = "smearing" , value = 0.02 )
308+ del pickle_node .TaskCls
245309
246- wg .add_task (
247- generate_structures_task ,
248- name = "generate_structures" ,
249- structure = wg .tasks .mini .outputs .structure ,
250- strain_lst = wg .tasks .strain_lst .outputs .result ,
251- register_pickle_by_value = True ,
310+ strain_lst_task = wg .add_task (
311+ task .pythonjob (outputs = ["strain_lst" ])(pickle_node ),
312+ name = "pickle_strain_lst" ,
313+ value = strain_lst ,
252314 )
253- # here we add the structure outputs based on the number of strains
254- del wg .tasks .generate_structures .outputs ["result" ]
255- for i in range (len (strain_lst )):
256- wg .tasks .generate_structures .add_output ("workgraph.any" , f"s_{ i } " )
257-
258- wg .add_task (get_list_task , name = "get_energies" , register_pickle_by_value = True )
259- wg .add_task (get_list_task , name = "get_volumes" , register_pickle_by_value = True )
260- wg .add_task (pickle_node_task , name = "calculation_scf" , value = "scf" )
315+ del pickle_node .TaskCls
261316
317+ strain_dir_tasks = []
262318 for i , strain in enumerate (strain_lst ):
263- get_dict_task_ = wg .add_task (
264- get_dict_task ,
265- name = f"get_dict_{ i } " ,
266- calculation = wg .tasks .calculation_scf .outputs .result ,
267- structure = wg .tasks .generate_structures .outputs [f"s_{ i } " ],
268- kpts = wg .tasks .kpts .outputs .result ,
269- pseudopotentials = wg .tasks .pseudopotentials .outputs .result ,
270- smearing = wg .tasks .smearing .outputs .result ,
319+
320+ strain_dir = f"strain_{ i } "
321+
322+ strain_dir_task = wg .add_task (
323+ task .pythonjob (outputs = [strain_dir ])(pickle_node ),
324+ name = strain_dir ,
325+ value = strain_dir ,
271326 register_pickle_by_value = True ,
272327 )
273- strain_dir = f"strain_ { i } "
274- strain_dir_task = wg . add_task ( pickle_node , name = strain_dir , value = strain_dir )
328+ del pickle_node . TaskCls
329+ strain_dir_tasks . append ( strain_dir_task )
275330
276- qe_task = wg .add_task (
277- calculate_qe_task ,
278- name = f"qe_{ i } " ,
279- input_dict = get_dict_task_ .outputs .result ,
280- working_directory = strain_dir_task .outputs .result ,
331+ # Possible remove `get_dict_task` here to have it later
332+ get_dict_task = wg .add_task (
333+ get_dict ,
334+ name = f"get_dict_{ i } " ,
335+ # calculation=wg.tasks.calculation_scf.outputs.result,
336+ # structure=wg.tasks.generate_structures.outputs[f"s_{i}"],
337+ # kpts=wg.tasks.kpts.outputs.result,
338+ # pseudopotentials=wg.tasks.pseudopotentials.outputs.result,
339+ # smearing=wg.tasks.smearing.outputs.result,
281340 register_pickle_by_value = True ,
282341 )
342+
343+ if i == 0 :
344+ scf_task = wg .add_task (task .pythonjob (pickle_node ), name = "calculation_scf" , value = "scf" )
345+ del pickle_node .TaskCls
346+
283347 # collect energy and volume
284- wg .add_link (qe_task .outputs .energy , wg .tasks .get_energies .inputs .kwargs )
285- wg .add_link (qe_task .outputs .volume , wg .tasks .get_volumes .inputs .kwargs )
348+ # TODO: Maybe put this outside, in a separate for-loop to again try to fix the order
349+ # wg.add_link(qe_task.outputs.energy, wg.tasks.get_energies.inputs.kwargs)
350+ # wg.add_link(qe_task.outputs.volume, wg.tasks.get_volumes.inputs.kwargs)
286351
287- wg .add_task (
288- plot_energy_volume_curve_task ,
289- volume_lst = wg .tasks .get_volumes .outputs .result ,
290- energy_lst = wg .tasks .get_energies .outputs .result ,
352+ get_energies_task = wg .add_task (
353+ task .pythonjob (outputs = ["energies" ])(get_list ),
354+ name = "get_energies" ,
291355 register_pickle_by_value = True ,
292356 )
357+ del get_list .TaskCls
358+
359+ get_volumes_task = wg .add_task (
360+ task .pythonjob (outputs = ["volumes" ])(get_list ),
361+ name = "get_volumes" ,
362+ register_pickle_by_value = True ,
363+ )
364+ del get_list .TaskCls
365+
366+ # Add remaining links
367+ wg .add_link (get_bulk_structure_task .inputs .element , element = pickle_element_task .outputs .element )
368+ # ,
369+ # a=wg.tasks.pickle_a.outputs.a,
370+ # cubic=wg.tasks.pickle_cubic.outputs.result,
293371
294372 return wg
0 commit comments