From cb234016b44d1c86f69d65053271716cd02580fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 15 Sep 2025 15:48:19 +0100 Subject: [PATCH 1/3] =?UTF-8?q?=E2=9C=A8=20Added=20batch=20run=20creation?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/artifact/base.py | 4 ++ simvue/api/objects/base.py | 90 ++++++++++++++++++++++++----- simvue/api/objects/run.py | 51 +++++++++++++++- tests/unit/test_run.py | 17 ++++++ 4 files changed, 146 insertions(+), 16 deletions(-) diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index e8e4df92..6261e446 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -59,6 +59,10 @@ def __init__( # from the initial creation self._init_data: dict[str, dict] = {} + @classmethod + def new(cls, *_, **__) -> Self: + raise NotImplementedError + def commit(self) -> None: """Not applicable, cannot commit single write artifact.""" self._logger.info("Cannot call method 'commit' on write-once type 'Artifact'") diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index e26c6481..87977c9d 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -18,6 +18,7 @@ import msgpack import pydantic +from collections.abc import Generator from simvue.utilities import staging_merger from simvue.config.user import SimvueConfiguration from simvue.exception import ObjectNotFoundError @@ -172,6 +173,16 @@ def to_params(self) -> dict[str, str]: return {"id": self.column, "desc": self.descending} +class VisibilityBatchArgs(pydantic.BaseModel): + tenant: bool | None = None + user: list[str] | None = None + public: bool | None = None + + +class ObjectBatchArgs(pydantic.BaseModel): + pass + + class SimvueObject(abc.ABC): def __init__( self, @@ -361,13 +372,21 @@ def _get_visibility(self) -> dict[str, bool | list[str]]: return {} @classmethod + @abc.abstractmethod def new(cls, **_) -> Self: pass + @classmethod + def batch_create( + cls, obj_args: ObjectBatchArgs, visibility: VisibilityBatchArgs + ) -> Generator[str]: + _, __ = obj_args, visibility + raise NotImplementedError + @classmethod def ids( cls, count: int | None = None, offset: int | None = None, **kwargs - ) -> typing.Generator[str, None, None]: + ) -> Generator[str, None, None]: """Retrieve a list of all object identifiers. Parameters @@ -402,7 +421,7 @@ def get( count: pydantic.PositiveInt | None = None, offset: pydantic.NonNegativeInt | None = None, **kwargs, - ) -> typing.Generator[tuple[str, T | None], None, None]: + ) -> Generator[tuple[str, T | None], None, None]: """Retrieve items of this object type from the server. Parameters @@ -462,7 +481,7 @@ def count(cls, **kwargs) -> int: @classmethod def _get_all_objects( cls, offset: int | None, count: int | None, **kwargs - ) -> typing.Generator[dict, None, None]: + ) -> Generator[dict, None, None]: _class_instance = cls(_read_only=True) _url = f"{_class_instance._base_url}" @@ -496,7 +515,7 @@ def read_only(self, is_read_only: bool) -> None: if not self._read_only: self._staging = self._get_local_staged() - def commit(self) -> dict | None: + def commit(self) -> dict | list[dict] | None: """Send updates to the server, or if offline, store locally.""" if self._read_only: raise AttributeError("Cannot commit object in 'read-only' mode") @@ -508,15 +527,22 @@ def commit(self) -> dict | None: self._cache() return - _response: dict | None = None + _response: dict[str, str] | list[dict[str, str]] | None = None # Initial commit is creation of object # if staging is empty then we do not need to use PUT if not self._identifier or self._identifier.startswith("offline_"): - self._logger.debug( - f"Posting from staged data for {self._label} '{self.id}': {self._staging}" - ) - _response = self._post(**self._staging) + # If batch upload send as list, else send as dictionary of params + if _batch_commit := self._staging.get("batch"): + self._logger.debug( + f"Posting batched data to server: \n{json.dumps(_batch_commit, indent=2)}" + ) + _response = self._post_batch(batch_data=_batch_commit) + else: + self._logger.debug( + f"Posting from staged data for {self._label} '{self.id}': {self._staging}" + ) + _response = self._post_single(**self._staging) elif self._staging: self._logger.debug( f"Pushing updates from staged data for {self._label} '{self.id}': {self._staging}" @@ -552,9 +578,48 @@ def url(self) -> URL | None: """ return None if self._identifier is None else self._base_url / self._identifier - def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: + def _post_batch( + self, + batch_data: list[ObjectBatchArgs], + ) -> list[dict[str, str]]: + _response = sv_post( + url=f"{self._base_url}", + headers=self._headers | {"Content-Type": "application/msgpack"}, + params=self._params, + data=batch_data, + is_json=True, + ) + + if _response.status_code == http.HTTPStatus.FORBIDDEN: + raise RuntimeError( + f"Forbidden: You do not have permission to create object of type '{self._label}'" + ) + + _json_response = get_json_from_response( + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.CONFLICT], + scenario=f"Creation of multiple {self._label}s", + expected_type=list, + ) + + if not len(batch_data) == (_n_created := len(_json_response)): + raise RuntimeError( + f"Expected {len(batch_data)} to be created, but only {_n_created} found." + ) + + self._logger.debug(f"successfully created {_n_created} {self._label}s") + + return _json_response + + def _post_single( + self, + *, + is_json: bool = True, + **kwargs, + ) -> dict[str, typing.Any] | list[dict[str, typing.Any]]: if not is_json: kwargs = msgpack.packb(kwargs, use_bin_type=True) + _response = sv_post( url=f"{self._base_url}", headers=self._headers | {"Content-Type": "application/msgpack"}, @@ -574,11 +639,6 @@ def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: scenario=f"Creation of {self._label}", ) - if isinstance(_json_response, list): - raise RuntimeError( - "Expected dictionary from JSON response but got type list" - ) - if _id := _json_response.get("id"): self._logger.debug("'%s' created successfully", _id) self._identifier = _id diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index c00b3f9e..681fc1e2 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -7,6 +7,7 @@ """ +from collections.abc import Generator, Iterable import http import typing import pydantic @@ -19,7 +20,15 @@ except ImportError: from typing_extensions import Self -from .base import SimvueObject, Sort, staging_check, Visibility, write_only +from .base import ( + ObjectBatchArgs, + VisibilityBatchArgs, + SimvueObject, + Sort, + staging_check, + Visibility, + write_only, +) from simvue.api.request import ( get as sv_get, put as sv_put, @@ -54,6 +63,18 @@ def check_column(cls, column: str) -> str: return column +class RunBatchArgs(ObjectBatchArgs): + name: str | None = None + description: str | None = None + tags: list[str] | None = None + metadata: dict[str, str] | None = None + folder: typing.Annotated[str, pydantic.Field(pattern=FOLDER_REGEX)] | None = None + system: dict[str, typing.Any] | None = None + status: typing.Literal[ + "terminated", "created", "failed", "completed", "lost", "running" + ] = "created" + + class Run(SimvueObject): """Class for directly interacting with/creating runs on the server.""" @@ -123,6 +144,34 @@ def new( **kwargs, ) + @classmethod + @typing.override + @pydantic.validate_call + def batch_create( + cls, + entries: Iterable[RunBatchArgs], + *, + visibility: VisibilityBatchArgs | None = None, + folder: typing.Annotated[str, pydantic.StringConstraints(pattern=FOLDER_REGEX)] + | None = None, + metadata: dict[str, str] | None = None, + offline: bool = False, + ) -> Generator[str]: + _data: list[dict[str, object]] = [ + entry.model_dump(exclude_none=True) + | ( + {"visibility": visibility.model_dump(exclude_none=True)} + if visibility + else {} + ) + | ({"folder": folder} if folder else {}) + | {"metadata": (entry.metadata or {}) | (metadata or {})} + for entry in entries + ] + for entry in Run(batch=_data, _offline=offline, _read_only=False).commit(): + _id: str = entry["id"] + yield _id + @property @staging_check def name(self) -> str: diff --git a/tests/unit/test_run.py b/tests/unit/test_run.py index 560620ff..fc7e48e0 100644 --- a/tests/unit/test_run.py +++ b/tests/unit/test_run.py @@ -4,6 +4,7 @@ import time import datetime import uuid +from simvue.api.objects.run import RunBatchArgs from simvue.sender import sender from simvue.api.objects import Run, Folder from simvue.client import Client @@ -187,3 +188,19 @@ def test_run_get_properties() -> None: if _failed: raise AssertionError("\n" + "\n\t- ".join(": ".join(i) for i in _failed)) + + +@pytest.mark.api +@pytest.mark.online +def test_batch_run_creation() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder: Folder = Folder.new(path=f"/simvue_unit_testing/{_uuid}") + _folder.commit() + _runs = [ + RunBatchArgs(name=f"run_{i}") + for i in range(10) + ] + for _id in Run.batch_create(entries=_runs, folder=f"/simvue_unit_testing/{_uuid}", metadata={"batch_id": "0"}): + assert Run(identifier=_id).name + with contextlib.suppress(Exception): + _folder.delete(recursive=True, delete_runs=True) From be134effebc97e977eea535600a0579342009db4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 15 Sep 2025 15:57:07 +0100 Subject: [PATCH 2/3] =?UTF-8?q?=F0=9F=90=9B=20Fix=20wrong=20method=20call?= =?UTF-8?q?=20in=20objects=20for=20post?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/artifact/file.py | 2 +- simvue/api/objects/artifact/object.py | 2 +- simvue/api/objects/metrics.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index b2dfd078..664dadde 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -94,7 +94,7 @@ def new( _artifact._init_data = {} else: - _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._init_data = _artifact._post_single(**_artifact._staging) _artifact._staging["url"] = _artifact._init_data["url"] _artifact._init_data["runs"] = kwargs.get("runs") or {} diff --git a/simvue/api/objects/artifact/object.py b/simvue/api/objects/artifact/object.py index efe1d9fa..bfbdd46e 100644 --- a/simvue/api/objects/artifact/object.py +++ b/simvue/api/objects/artifact/object.py @@ -114,7 +114,7 @@ def new( file.write(_serialized) else: - _artifact._init_data = _artifact._post(**_artifact._staging) + _artifact._init_data = _artifact._post_single(**_artifact._staging) _artifact._staging["url"] = _artifact._init_data["url"] _artifact._init_data["runs"] = kwargs.get("runs") or {} diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index fb9cea1e..c4bf0f9d 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -139,7 +139,7 @@ def names(self, run_ids: list[str]) -> list[str]: ) def _post(self, **kwargs) -> dict[str, typing.Any]: - return super()._post(is_json=False, **kwargs) + return super()._post_single(is_json=False, **kwargs) def delete(self, **kwargs) -> dict[str, typing.Any]: """Metrics cannot be deleted""" From 647e40970e18ffae6db7c4858295b7a6d9944b44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristian=20Zar=C4=99bski?= Date: Mon, 15 Sep 2025 15:59:28 +0100 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=93=9D=20[skip=20ci]=20Updated=20chan?= =?UTF-8?q?gelog?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f778567c..ae89a2cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ - Adds additional options to `Client.get_runs`. - Added ability to include environment variables within metadata for runs. - Improves checks on `offline.cache` directory specification in config file. +- Added ability to upload multiple runs as a batch via the low level API. ## [v2.1.2](https://github.com/simvue-io/client/releases/tag/v2.1.2) - 2025-06-25