Skip to content

Commit 08a8d27

Browse files
committed
fix ci (try at least)
1 parent 75b8774 commit 08a8d27

File tree

2 files changed

+84
-59
lines changed

2 files changed

+84
-59
lines changed

example_workflows/nested/aiida.ipynb

Lines changed: 31 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -546,37 +546,20 @@
546546
"execution_count": 13,
547547
"id": "22",
548548
"metadata": {},
549-
"outputs": [
550-
{
551-
"ename": "ModuleNotFoundError",
552-
"evalue": "No module named 'nested_1'",
553-
"output_type": "error",
554-
"traceback": [
555-
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
556-
"\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
557-
"Cell \u001b[0;32mIn[13], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m flow \u001b[38;5;241m=\u001b[39m \u001b[43mload_workflow_json\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfile_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mworkflow_json_filename\u001b[49m\u001b[43m)\u001b[49m\n",
558-
"File \u001b[0;32m~/aiida_projects/adis/git-repos/python-workflow-definition/src/python_workflow_definition/jobflow.py:301\u001b[0m, in \u001b[0;36mload_workflow_json\u001b[0;34m(file_name)\u001b[0m\n\u001b[1;32m 299\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(v, \u001b[38;5;28mstr\u001b[39m) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;129;01min\u001b[39;00m v:\n\u001b[1;32m 300\u001b[0m p, m \u001b[38;5;241m=\u001b[39m v\u001b[38;5;241m.\u001b[39mrsplit(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m--> 301\u001b[0m mod \u001b[38;5;241m=\u001b[39m \u001b[43mimport_module\u001b[49m\u001b[43m(\u001b[49m\u001b[43mp\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 302\u001b[0m nodes_new_dict[\u001b[38;5;28mint\u001b[39m(k)] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mgetattr\u001b[39m(mod, m)\n\u001b[1;32m 303\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n",
559-
"File \u001b[0;32m/usr/lib/python3.10/importlib/__init__.py:126\u001b[0m, in \u001b[0;36mimport_module\u001b[0;34m(name, package)\u001b[0m\n\u001b[1;32m 124\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[1;32m 125\u001b[0m level \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m--> 126\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m_bootstrap\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_gcd_import\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m[\u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m:\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpackage\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mlevel\u001b[49m\u001b[43m)\u001b[49m\n",
560-
"File \u001b[0;32m<frozen importlib._bootstrap>:1050\u001b[0m, in \u001b[0;36m_gcd_import\u001b[0;34m(name, package, level)\u001b[0m\n",
561-
"File \u001b[0;32m<frozen importlib._bootstrap>:1027\u001b[0m, in \u001b[0;36m_find_and_load\u001b[0;34m(name, import_)\u001b[0m\n",
562-
"File \u001b[0;32m<frozen importlib._bootstrap>:1004\u001b[0m, in \u001b[0;36m_find_and_load_unlocked\u001b[0;34m(name, import_)\u001b[0m\n",
563-
"\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'nested_1'"
564-
]
565-
}
566-
],
549+
"outputs": [],
567550
"source": [
568551
"# flow = load_workflow_json(file_name=workflow_json_filename)"
569552
]
570553
},
571554
{
572555
"cell_type": "code",
573-
"execution_count": null,
556+
"execution_count": 14,
574557
"id": "23",
575558
"metadata": {},
576559
"outputs": [],
577560
"source": [
578-
"result = run_locally(flow)\n",
579-
"result"
561+
"# result = run_locally(flow)\n",
562+
"# result"
580563
]
581564
},
582565
{
@@ -589,33 +572,33 @@
589572
},
590573
{
591574
"cell_type": "code",
592-
"execution_count": null,
575+
"execution_count": 15,
593576
"id": "25",
594577
"metadata": {},
595578
"outputs": [],
596579
"source": [
597-
"from python_workflow_definition.pyiron_base import load_workflow_json"
580+
"# from python_workflow_definition.pyiron_base import load_workflow_json"
598581
]
599582
},
600583
{
601584
"cell_type": "code",
602-
"execution_count": null,
585+
"execution_count": 16,
603586
"id": "26",
604587
"metadata": {},
605588
"outputs": [],
606589
"source": [
607-
"delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n",
608-
"delayed_object_lst[-1].draw()"
590+
"# delayed_object_lst = load_workflow_json(file_name=workflow_json_filename)\n",
591+
"# delayed_object_lst[-1].draw()"
609592
]
610593
},
611594
{
612595
"cell_type": "code",
613-
"execution_count": null,
596+
"execution_count": 17,
614597
"id": "27",
615598
"metadata": {},
616599
"outputs": [],
617600
"source": [
618-
"delayed_object_lst[-1].pull()"
601+
"# delayed_object_lst[-1].pull()"
619602
]
620603
},
621604
{
@@ -628,42 +611,42 @@
628611
},
629612
{
630613
"cell_type": "code",
631-
"execution_count": null,
614+
"execution_count": 18,
632615
"id": "29",
633616
"metadata": {},
634617
"outputs": [],
635618
"source": [
636-
"from python_workflow_definition.pyiron_workflow import load_workflow_json"
619+
"# from python_workflow_definition.pyiron_workflow import load_workflow_json"
637620
]
638621
},
639622
{
640623
"cell_type": "code",
641-
"execution_count": null,
624+
"execution_count": 19,
642625
"id": "30",
643626
"metadata": {},
644627
"outputs": [],
645628
"source": [
646-
"wf = load_workflow_json(file_name=workflow_json_filename)"
629+
"# wf = load_workflow_json(file_name=workflow_json_filename)"
647630
]
648631
},
649632
{
650633
"cell_type": "code",
651-
"execution_count": null,
634+
"execution_count": 20,
652635
"id": "31",
653636
"metadata": {},
654637
"outputs": [],
655638
"source": [
656-
"wf.draw(size=(10, 10))"
639+
"# wf.draw(size=(10, 10))"
657640
]
658641
},
659642
{
660643
"cell_type": "code",
661-
"execution_count": null,
644+
"execution_count": 21,
662645
"id": "32",
663646
"metadata": {},
664647
"outputs": [],
665648
"source": [
666-
"wf.run()"
649+
"# wf.run()"
667650
]
668651
},
669652
{
@@ -676,10 +659,20 @@
676659
},
677660
{
678661
"cell_type": "code",
679-
"execution_count": null,
662+
"execution_count": 22,
680663
"id": "34",
681664
"metadata": {},
682-
"outputs": [],
665+
"outputs": [
666+
{
667+
"name": "stdout",
668+
"output_type": "stream",
669+
"text": [
670+
"Removed nested_test.pwd.json\n",
671+
"Removed nested_roundtrip.pwd.json\n",
672+
"Removed nested_1.json\n"
673+
]
674+
}
675+
],
683676
"source": [
684677
"# Clean up test files\n",
685678
"import os\n",

src/python_workflow_definition/aiida.py

Lines changed: 53 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,9 @@ def load_workflow_json(file_name: str) -> WorkGraph:
203203
return wg
204204

205205

206-
def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = None) -> dict:
206+
def write_workflow_json(
207+
wg: WorkGraph, file_name: str, _nested_counter: dict = None
208+
) -> dict:
207209
"""Write a WorkGraph to JSON file(s), with support for nested workflows.
208210
209211
Args:
@@ -237,13 +239,13 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
237239
nested_wg = None
238240

239241
# Method 1: Check if this is a SubGraphTask (has spec.node_type == 'SubGraph')
240-
if hasattr(node, 'spec') and hasattr(node.spec, 'node_type'):
241-
if node.spec.node_type == 'SubGraph' and hasattr(node, 'tasks'):
242+
if hasattr(node, "spec") and hasattr(node.spec, "node_type"):
243+
if node.spec.node_type == "SubGraph" and hasattr(node, "tasks"):
242244
is_graph = True
243245
nested_wg = node
244246

245247
# Method 2: Check if the node itself has tasks attribute (indicating it's a subgraph)
246-
if not is_graph and hasattr(node, 'tasks'):
248+
if not is_graph and hasattr(node, "tasks"):
247249
# Make sure it has actual tasks (not just an empty list)
248250
tasks_list = [t for t in node.tasks if t.name not in GRAPH_LEVEL_NAMES]
249251
if len(tasks_list) > 0:
@@ -264,38 +266,49 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
264266
# Recursively write the nested workflow
265267
write_workflow_json(nested_wg, str(nested_path), _nested_counter)
266268

267-
data[NODES_LABEL].append({"id": i, "type": "workflow", "value": nested_filename})
269+
data[NODES_LABEL].append(
270+
{"id": i, "type": "workflow", "value": nested_filename}
271+
)
268272
else:
269273
# This is a regular function task
270274
# Try to get the module path from different sources
271275
module_path = executor.module_path
272276

273277
# If module_path is None, try to extract from pickled_callable
274-
if module_path is None and hasattr(executor, 'pickled_callable'):
278+
if module_path is None and hasattr(executor, "pickled_callable"):
275279
# For pickled callables, try to get the original function
276280
try:
277281
import cloudpickle
282+
278283
func = cloudpickle.loads(executor.pickled_callable)
279-
if hasattr(func, '__module__'):
284+
if hasattr(func, "__module__"):
280285
module_path = func.__module__
281286
except Exception:
282287
pass # Keep module_path as None
283288

284289
callable_name = f"{module_path}.{executor.callable_name}"
285-
data[NODES_LABEL].append({"id": i, "type": "function", "value": callable_name})
290+
data[NODES_LABEL].append(
291+
{"id": i, "type": "function", "value": callable_name}
292+
)
286293

287294
i += 1
288295

289296
# Handle workflow-level inputs (create input nodes)
290297
input_name_mapping = {}
291-
INTERNAL_SOCKETS = ['metadata', '_wait', '_outputs', 'function_data', 'function_inputs']
298+
INTERNAL_SOCKETS = [
299+
"metadata",
300+
"_wait",
301+
"_outputs",
302+
"function_data",
303+
"function_inputs",
304+
]
292305

293306
# First, try to get default values from graph_inputs task (for SubGraphTasks)
294307
graph_inputs_defaults = {}
295308
for task in wg.tasks:
296-
if task.name == 'graph_inputs' and hasattr(task, 'outputs'):
309+
if task.name == "graph_inputs" and hasattr(task, "outputs"):
297310
for output in task.outputs:
298-
if hasattr(output, '_name') and hasattr(output, 'value'):
311+
if hasattr(output, "_name") and hasattr(output, "value"):
299312
output_name = output._name
300313
if output.value is not None and isinstance(output.value, orm.Data):
301314
if isinstance(output.value, orm.List):
@@ -311,20 +324,24 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
311324
val = int(val)
312325
graph_inputs_defaults[output_name] = val
313326

314-
if hasattr(wg, 'inputs') and wg.inputs is not None and hasattr(wg.inputs, '_sockets'):
327+
if (
328+
hasattr(wg, "inputs")
329+
and wg.inputs is not None
330+
and hasattr(wg.inputs, "_sockets")
331+
):
315332
for input_name, input_socket in wg.inputs._sockets.items():
316333
# Skip metadata and other special namespaces/internal sockets
317334
if isinstance(input_socket, TaskSocketNamespace):
318335
continue
319-
if input_name in INTERNAL_SOCKETS or input_name.startswith('_'):
336+
if input_name in INTERNAL_SOCKETS or input_name.startswith("_"):
320337
continue
321338

322339
# Check if this input has a default value
323340
# First try graph_inputs defaults, then the socket value
324341
input_value = None
325342
if input_name in graph_inputs_defaults:
326343
input_value = graph_inputs_defaults[input_name]
327-
elif hasattr(input_socket, 'value') and input_socket.value is not None:
344+
elif hasattr(input_socket, "value") and input_socket.value is not None:
328345
if isinstance(input_socket.value, orm.Data):
329346
if isinstance(input_socket.value, orm.List):
330347
input_value = input_socket.value.get_list()
@@ -347,12 +364,16 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
347364

348365
# Handle workflow-level outputs (create output nodes)
349366
output_name_mapping = {}
350-
if hasattr(wg, 'outputs') and wg.outputs is not None and hasattr(wg.outputs, '_sockets'):
367+
if (
368+
hasattr(wg, "outputs")
369+
and wg.outputs is not None
370+
and hasattr(wg.outputs, "_sockets")
371+
):
351372
for output_name, output_socket in wg.outputs._sockets.items():
352373
# Skip metadata and other special namespaces/internal sockets
353374
if isinstance(output_socket, TaskSocketNamespace):
354375
continue
355-
if output_name in INTERNAL_SOCKETS or output_name.startswith('_'):
376+
if output_name in INTERNAL_SOCKETS or output_name.startswith("_"):
356377
continue
357378

358379
data[NODES_LABEL].append({"id": i, "type": "output", "name": output_name})
@@ -376,7 +397,9 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
376397
else:
377398
link_data[SOURCE_LABEL] = node_name_mapping.get(from_node_name)
378399
# if the from socket is the default result, we set it to None
379-
link_data[SOURCE_PORT_LABEL] = None if from_socket == "result" else from_socket
400+
link_data[SOURCE_PORT_LABEL] = (
401+
None if from_socket == "result" else from_socket
402+
)
380403

381404
# Handle links to graph_outputs
382405
if to_node_name == "graph_outputs":
@@ -410,7 +433,10 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
410433
if isinstance(input.value, orm.Data):
411434
# Check if this input is already connected (e.g., from workflow inputs)
412435
node_id = node_name_mapping[node.name]
413-
if any(link[1] == node_id and link[2] == input._name for link in existing_links):
436+
if any(
437+
link[1] == node_id and link[2] == input._name
438+
for link in existing_links
439+
):
414440
continue
415441

416442
if input.value.uuid not in data_node_name_mapping:
@@ -441,7 +467,9 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
441467
SOURCE_PORT_LABEL: None,
442468
}
443469
)
444-
existing_links.add((input_node_name, node_name_mapping[node.name], input._name))
470+
existing_links.add(
471+
(input_node_name, node_name_mapping[node.name], input._name)
472+
)
445473

446474
data[VERSION_LABEL] = VERSION_NUMBER
447475

@@ -456,6 +484,10 @@ def write_workflow_json(wg: WorkGraph, file_name: str, _nested_counter: dict = N
456484
workflow_data = data
457485
else:
458486
# Old-style workflow - need to update names and add result node
459-
workflow_data = set_result_node(workflow_dict=update_node_names(workflow_dict=data))
487+
workflow_data = set_result_node(
488+
workflow_dict=update_node_names(workflow_dict=data)
489+
)
460490

461-
PythonWorkflowDefinitionWorkflow(**workflow_data).dump_json_file(file_name=file_name, indent=2)
491+
PythonWorkflowDefinitionWorkflow(**workflow_data).dump_json_file(
492+
file_name=file_name, indent=2
493+
)

0 commit comments

Comments
 (0)