From 2f60bfdf1ed25b7846bc72e5eb91cfeea5ce7627 Mon Sep 17 00:00:00 2001 From: Stephan van Rooij <1292510+svrooij@users.noreply.github.com> Date: Fri, 17 Jan 2025 22:16:58 +0100 Subject: [PATCH 1/6] Add support for Python 3.13 Fixes #704 Add support for Python 3.13 and update dependencies. * **pyproject.toml** - Add Python 3.13 to the `requires-python` field. - Add Python 3.13 to the `classifiers` field. - Set higher requirement for all `microsoft-kiota-....` packages to at least 1.8.0. * **.github/workflows/build.yml** - Add Python 3.13 to the `matrix` for testing. * **requirements-dev.txt** - Update dependencies to be compatible with Python 3.13. * **tests/conftest.py** - Add a test to check for Python 3.13 compatibility. --- For more details, open the [Copilot Workspace session](https://copilot-workspace.githubnext.com/microsoftgraph/msgraph-sdk-python-core/issues/704?shareId=XXXX-XXXX-XXXX-XXXX). --- .github/workflows/build.yml | 2 +- pyproject.toml | 9 +++++---- requirements-dev.txt | 6 +++--- tests/conftest.py | 5 +++++ 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2b32073d..e0c2f647 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -17,7 +17,7 @@ jobs: strategy: max-parallel: 5 matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - uses: actions/checkout@v4 diff --git a/pyproject.toml b/pyproject.toml index 552d8473..17a841f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,12 +11,12 @@ version = "1.2.0" authors = [{name = "Microsoft", email = "graphtooling+python@microsoft.com"}] description = "Core component of the Microsoft Graph Python SDK" dependencies = [ - "microsoft-kiota-abstractions >=1.0.0,<2.0.0", - "microsoft-kiota-authentication-azure >=1.0.0,<2.0.0", - "microsoft-kiota-http >=1.0.0,<2.0.0", + "microsoft-kiota-abstractions >=1.8.0,<2.0.0", + "microsoft-kiota-authentication-azure >=1.8.0,<2.0.0", + "microsoft-kiota-http >=1.8.0,<2.0.0", "httpx[http2] >=0.23.0", ] -requires-python = ">=3.9" +requires-python = ">=3.9, <3.14" license = {file = "LICENSE"} readme = "README.md" keywords = ["msgraph", "openAPI", "Microsoft", "Graph"] @@ -26,6 +26,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", ] diff --git a/requirements-dev.txt b/requirements-dev.txt index 6b12b28b..34af3d77 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -145,11 +145,11 @@ httpx[http2]==0.28.1 hyperframe==6.0.1 ; python_full_version >= '3.6.1' -microsoft-kiota-abstractions==1.7.1 +microsoft-kiota-abstractions==1.8.0 -microsoft-kiota-authentication-azure==1.7.1 +microsoft-kiota-authentication-azure==1.8.0 -microsoft-kiota-http==1.7.1 +microsoft-kiota-http==1.8.0 multidict==6.1.0 ; python_version >= '3.7' diff --git a/tests/conftest.py b/tests/conftest.py index 9703b820..c2f5c2e7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -50,3 +50,8 @@ def mock_response(): return httpx.Response( json={'message': 'Success!'}, status_code=200, headers={"Content-Type": "application/json"} ) + + +def test_python_3_13_compatibility(): + import sys + assert sys.version_info >= (3, 13), "Python 3.13 or higher is required" From 41a7b22a80bc0e8e19f9d88ec5e68bf04dd13ad1 Mon Sep 17 00:00:00 2001 From: Stephan van Rooij <1292510+svrooij@users.noreply.github.com> Date: Fri, 17 Jan 2025 22:19:34 +0100 Subject: [PATCH 2/6] Update `pyproject.toml` to support Python 3.13 and set higher package requirements * Add Python 3.13 to the `requires-python` field * Add Python 3.13 to the `classifiers` field * Set higher requirement for all `microsoft-kiota-....` packages to at least 1.8.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 17a841f1..b2b28300 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ dependencies = [ "microsoft-kiota-http >=1.8.0,<2.0.0", "httpx[http2] >=0.23.0", ] -requires-python = ">=3.9, <3.14" +requires-python = ">=3.9" license = {file = "LICENSE"} readme = "README.md" keywords = ["msgraph", "openAPI", "Microsoft", "Graph"] From 24cd6fa611423e5a10fcc396558cfba786d81f4c Mon Sep 17 00:00:00 2001 From: Stephan van Rooij <1292510+svrooij@users.noreply.github.com> Date: Fri, 17 Jan 2025 21:21:37 +0000 Subject: [PATCH 3/6] chore: Copilot added something it shouldn't had --- tests/conftest.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index c2f5c2e7..9703b820 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -50,8 +50,3 @@ def mock_response(): return httpx.Response( json={'message': 'Success!'}, status_code=200, headers={"Content-Type": "application/json"} ) - - -def test_python_3_13_compatibility(): - import sys - assert sys.version_info >= (3, 13), "Python 3.13 or higher is required" From 3aaf800886b457d80bd5fd518b59ac14ab0ed683 Mon Sep 17 00:00:00 2001 From: Stephan van Rooij <1292510+svrooij@users.noreply.github.com> Date: Fri, 17 Jan 2025 22:05:11 +0000 Subject: [PATCH 4/6] fix(ci): Publish pipeline to 3.13 --- .github/workflows/publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 78c1a67e..05d55f05 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -18,10 +18,10 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.13 uses: actions/setup-python@v5 with: - python-version: 3.12 + python-version: 3.13 - name: Install dependencies run: | python -m pip install --upgrade pip From e8a72cf59315a618ccc23fb5ec8106604453293d Mon Sep 17 00:00:00 2001 From: Stephan van Rooij <1292510+svrooij@users.noreply.github.com> Date: Fri, 17 Jan 2025 22:54:46 +0000 Subject: [PATCH 5/6] fix: Python 3.13 changes to typing --- .devcontainer/devcontainer.json | 38 +++++++++++++++++ .vscode/settings.json | 7 +++- requirements-dev.txt | 2 +- .../models/large_file_upload_session.py | 3 +- src/msgraph_core/models/page_result.py | 3 +- src/msgraph_core/models/upload_result.py | 3 +- .../requests/batch_response_content.py | 21 ++++++---- .../batch_response_content_collection.py | 9 ++-- src/msgraph_core/tasks/large_file_upload.py | 41 ++++++++++++------- src/msgraph_core/tasks/page_iterator.py | 12 +++--- 10 files changed, 99 insertions(+), 40 deletions(-) create mode 100644 .devcontainer/devcontainer.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..3a4d53e6 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,38 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python +{ + "name": "Python 3", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + // "image": "mcr.microsoft.com/devcontainers/python:3.9-bookworm", + // "image": "mcr.microsoft.com/devcontainers/python:3.10-bookworm", + // "image": "mcr.microsoft.com/devcontainers/python:3.11-bookworm", + // "image": "mcr.microsoft.com/devcontainers/python:3.12-bookworm", + // "image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye", + // "image": "mcr.microsoft.com/devcontainers/python:3.13-bookworm", + "image": "mcr.microsoft.com/devcontainers/python:3.13-bullseye", + + "features": { + "ghcr.io/hspaans/devcontainer-features/pytest:1": {}, + "ghcr.io/devcontainers-extra/features/pylint:2": {}, + "ghcr.io/devcontainers-extra/features/poetry:2": {} + }, + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "git config --global core.autocrlf true && pip3 install --user -r requirements-dev.txt", + + // Configure tool-specific properties. + "customizations": { + "vscode": { + "extensions": ["ms-python.python"] + } + } + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 23fd35f0..0df470aa 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,8 @@ { - "editor.formatOnSave": true + "editor.formatOnSave": true, + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true } \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 34af3d77..db651e02 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -74,7 +74,7 @@ pycparser==2.22 pyjwt[crypto]==2.9.0 ; python_version >= '3.7' -pylint==3.2.7 +pylint==3.3.3 pyproject-hooks==1.2.0 ; python_version >= '3.7' diff --git a/src/msgraph_core/models/large_file_upload_session.py b/src/msgraph_core/models/large_file_upload_session.py index a5c79b9a..ecbb39bf 100644 --- a/src/msgraph_core/models/large_file_upload_session.py +++ b/src/msgraph_core/models/large_file_upload_session.py @@ -1,9 +1,8 @@ from __future__ import annotations import datetime -from collections.abc import Callable from dataclasses import dataclass, field -from typing import Any, Optional +from typing import Any, Callable, Optional from kiota_abstractions.serialization import ( AdditionalDataHolder, diff --git a/src/msgraph_core/models/page_result.py b/src/msgraph_core/models/page_result.py index b11bc965..fa67e6fb 100644 --- a/src/msgraph_core/models/page_result.py +++ b/src/msgraph_core/models/page_result.py @@ -11,9 +11,8 @@ """ from __future__ import annotations -from collections.abc import Callable from dataclasses import dataclass -from typing import Optional, TypeVar +from typing import Callable, Optional, TypeVar from kiota_abstractions.serialization.parsable import Parsable from kiota_abstractions.serialization.parse_node import ParseNode diff --git a/src/msgraph_core/models/upload_result.py b/src/msgraph_core/models/upload_result.py index 4c3c720a..f63d1464 100644 --- a/src/msgraph_core/models/upload_result.py +++ b/src/msgraph_core/models/upload_result.py @@ -1,7 +1,6 @@ -from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import Any, Generic, Optional, TypeVar +from typing import Any, Callable, Generic, Optional, TypeVar from kiota_abstractions.serialization import ( AdditionalDataHolder, diff --git a/src/msgraph_core/requests/batch_response_content.py b/src/msgraph_core/requests/batch_response_content.py index 11edbbe8..3c2d565c 100644 --- a/src/msgraph_core/requests/batch_response_content.py +++ b/src/msgraph_core/requests/batch_response_content.py @@ -1,7 +1,6 @@ import base64 -from collections.abc import Callable from io import BytesIO -from typing import Optional, Type, TypeVar, Union +from typing import Callable, Optional, Type, TypeVar, Union from kiota_abstractions.serialization import ( Parsable, @@ -108,17 +107,20 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]: raise ValueError(f"No response found for id: {request_id}") if not issubclass(type, Parsable): - raise ValueError("Type passed must implement the Parsable interface") + raise ValueError( + "Type passed must implement the Parsable interface") response = self.get_response_by_id(request_id) if response is not None: content_type = response.content_type else: raise ValueError( - f"Unable to get content-type header in response item for request Id: {request_id}" + f"Unable to get content-type header in response item for request Id: { + request_id}" ) if not content_type: - raise RuntimeError("Unable to get content-type header in response item") + raise RuntimeError( + "Unable to get content-type header in response item") response_body = response.body or BytesIO() try: @@ -128,7 +130,8 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]: ) except Exception: response_body.seek(0) - base64_decoded_body = BytesIO(base64.b64decode(response_body.read())) + base64_decoded_body = BytesIO( + base64.b64decode(response_body.read())) parse_node = ParseNodeFactoryRegistry().get_root_parse_node( content_type, base64_decoded_body ) @@ -136,7 +139,8 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]: return parse_node.get_object_value(type) except Exception: raise ValueError( - f"Unable to deserialize batch response for request Id: {request_id} to {type}" + f"Unable to deserialize batch response for request Id: { + request_id} to {type}" ) def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]: @@ -161,7 +165,8 @@ def serialize(self, writer: SerializationWriter) -> None: :param writer: The writer to write to """ if self._responses is not None: - writer.write_collection_of_object_values('responses', list(self._responses.values())) + writer.write_collection_of_object_values( + 'responses', list(self._responses.values())) else: writer.write_collection_of_object_values('responses', []) diff --git a/src/msgraph_core/requests/batch_response_content_collection.py b/src/msgraph_core/requests/batch_response_content_collection.py index 60984aa4..130df18b 100644 --- a/src/msgraph_core/requests/batch_response_content_collection.py +++ b/src/msgraph_core/requests/batch_response_content_collection.py @@ -1,5 +1,4 @@ -from collections.abc import Callable - +from typing import Callable from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter from .batch_response_content import BatchResponseContent @@ -52,7 +51,8 @@ async def responses_status_codes(self) -> dict[str, int]: else: raise ValueError("Response ID cannot be None") else: - raise TypeError("Invalid type: Collection must be of type BatchResponseContent") + raise TypeError( + "Invalid type: Collection must be of type BatchResponseContent") return status_codes def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]: @@ -65,7 +65,8 @@ def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]: return { 'responses': lambda n: - setattr(self, "_responses", n.get_collection_of_object_values(BatchResponseItem)) + setattr(self, "_responses", + n.get_collection_of_object_values(BatchResponseItem)) } def serialize(self, writer: SerializationWriter) -> None: diff --git a/src/msgraph_core/tasks/large_file_upload.py b/src/msgraph_core/tasks/large_file_upload.py index 250321a2..496f0842 100644 --- a/src/msgraph_core/tasks/large_file_upload.py +++ b/src/msgraph_core/tasks/large_file_upload.py @@ -1,10 +1,9 @@ import logging import os from asyncio import Future -from collections.abc import Callable from datetime import datetime, timedelta, timezone from io import BytesIO -from typing import Any, Optional, Tuple, TypeVar, Union +from typing import Any, Callable, Optional, Tuple, TypeVar, Union from kiota_abstractions.headers_collection import HeadersCollection from kiota_abstractions.method import Method @@ -38,11 +37,13 @@ def __init__( self.max_chunk_size = max_chunk_size self.factory = parsable_factory cleaned_value = self.check_value_exists( - upload_session, 'get_next_expected_range', ['next_expected_range', 'NextExpectedRange'] + upload_session, 'get_next_expected_range', [ + 'next_expected_range', 'NextExpectedRange'] ) self.next_range = cleaned_value[0] self._chunks = int((self.file_size / max_chunk_size) + 0.5) - self.on_chunk_upload_complete: Optional[Callable[[list[int]], None]] = None + self.on_chunk_upload_complete: Optional[Callable[[ + list[int]], None]] = None @property def upload_session(self): @@ -68,7 +69,8 @@ def upload_session_expired(self, upload_session: Optional[Parsable] = None) -> b now = datetime.now(timezone.utc) upload_session = upload_session or self.upload_session if not hasattr(upload_session, "expiration_date_time"): - raise ValueError("Upload session does not have an expiration date time") + raise ValueError( + "Upload session does not have an expiration date time") expiry = getattr(upload_session, 'expiration_date_time') if expiry is None: raise ValueError("Expiry is None") @@ -93,13 +95,16 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None): self.on_chunk_upload_complete = after_chunk_upload or self.on_chunk_upload_complete session: LargeFileUploadSession = await self.next_chunk( - self.stream, 0, max(0, min(self.max_chunk_size - 1, self.file_size - 1)) + self.stream, 0, max( + 0, min(self.max_chunk_size - 1, self.file_size - 1)) ) process_next = session # determine the range to be uploaded # even when resuming existing upload sessions. - range_parts = self.next_range[0].split("-") if self.next_range else ['0', '0'] - end = min(int(range_parts[0]) + self.max_chunk_size - 1, self.file_size) + range_parts = self.next_range[0].split( + "-") if self.next_range else ['0', '0'] + end = min(int(range_parts[0]) + + self.max_chunk_size - 1, self.file_size) uploaded_range = [range_parts[0], end] response = None @@ -124,12 +129,13 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None): if not next_range: continue range_parts = str(next_range[0]).split("-") - end = min(int(range_parts[0]) + self.max_chunk_size, self.file_size) + end = min(int(range_parts[0]) + + self.max_chunk_size, self.file_size) uploaded_range = [range_parts[0], end] self.next_range = next_range[0] + "-" process_next = await self.next_chunk(self.stream) - except Exception as error: #pylint: disable=broad-except + except Exception as error: # pylint: disable=broad-except logging.error("Error uploading chunk %s", error) finally: self.chunks -= 1 @@ -176,7 +182,8 @@ async def next_chunk( chunk_data = file.read(end - start + 1) info.headers = HeadersCollection() - info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}') + info.headers.try_add( + 'Content-Range', f'bytes {start}-{end}/{self.file_size}') info.headers.try_add('Content-Length', str(len(chunk_data))) info.headers.try_add("Content-Type", "application/octet-stream") info.set_stream_content(bytes(chunk_data)) @@ -216,7 +223,8 @@ async def last_chunk( chunk_data = file.read(end - start + 1) info.headers = HeadersCollection() - info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}') + info.headers.try_add( + 'Content-Range', f'bytes {start}-{end}/{self.file_size}') info.headers.try_add('Content-Length', str(len(chunk_data))) info.headers.try_add("Content-Type", "application/octet-stream") info.set_stream_content(bytes(chunk_data)) @@ -231,7 +239,8 @@ def get_file(self) -> BytesIO: async def cancel(self) -> Parsable: upload_url = self.get_validated_upload_url(self.upload_session) - request_information = RequestInformation(method=Method.DELETE, url_template=upload_url) + request_information = RequestInformation( + method=Method.DELETE, url_template=upload_url) await self.request_adapter.send_no_response_content_async(request_information) @@ -254,7 +263,8 @@ def additional_data_contains(self, parsable: Parsable, 'AdditionalDataHolder' ) if not hasattr(parsable, 'additional_data'): - raise ValueError('The object passed does not contain an additional_data property') + raise ValueError( + 'The object passed does not contain an additional_data property') additional_data = parsable.additional_data for property_candidate in property_candidates: if property_candidate in additional_data: @@ -298,7 +308,8 @@ async def resume(self) -> Future: def get_validated_upload_url(self, upload_session: Parsable) -> str: if not hasattr(upload_session, 'upload_url'): - raise RuntimeError('The upload session does not contain a valid upload url') + raise RuntimeError( + 'The upload session does not contain a valid upload url') result = upload_session.upload_url if result is None or result.strip() == '': diff --git a/src/msgraph_core/tasks/page_iterator.py b/src/msgraph_core/tasks/page_iterator.py index dd56ca4c..38926f66 100644 --- a/src/msgraph_core/tasks/page_iterator.py +++ b/src/msgraph_core/tasks/page_iterator.py @@ -17,8 +17,7 @@ and models modules. """ -from collections.abc import Callable -from typing import Optional, Type, TypeVar, Union +from typing import Callable, Optional, Type, TypeVar, Union from kiota_abstractions.headers_collection import HeadersCollection from kiota_abstractions.method import Method @@ -152,7 +151,8 @@ async def next(self) -> Optional[PageResult]: next_link = response.odata_next_link if response and hasattr( response, 'odata_next_link' ) else None - value = response.value if response and hasattr(response, 'value') else None + value = response.value if response and hasattr( + response, 'value') else None return PageResult(next_link, value) @staticmethod @@ -180,7 +180,8 @@ def convert_to_page(response: Union[T, list, object]) -> PageResult: value = getattr(response, 'value', []) if value is None: raise ValueError('The response does not contain a value.') - parsable_page = response if isinstance(response, dict) else vars(response) + parsable_page = response if isinstance( + response, dict) else vars(response) next_link = parsable_page.get('odata_next_link', '') if isinstance( parsable_page, dict ) else getattr(parsable_page, 'odata_next_link', '') @@ -230,7 +231,8 @@ def enumerate(self, callback: Optional[Callable] = None) -> bool: if not page_items: return False for i in range(self.pause_index, len(page_items)): - keep_iterating = callback(page_items[i]) if callback is not None else True + keep_iterating = callback( + page_items[i]) if callback is not None else True if not keep_iterating: self.pause_index = i + 1 break From b431fffa0916591a35f6010a0f121daa01ff7569 Mon Sep 17 00:00:00 2001 From: Andrew Omondi Date: Mon, 3 Feb 2025 11:58:38 +0300 Subject: [PATCH 6/6] chore: revert typing changes --- .../models/large_file_upload_session.py | 3 +- src/msgraph_core/models/page_result.py | 3 +- src/msgraph_core/models/upload_result.py | 3 +- .../requests/batch_response_content.py | 21 ++++------ .../batch_response_content_collection.py | 9 ++-- src/msgraph_core/tasks/large_file_upload.py | 41 +++++++------------ src/msgraph_core/tasks/page_iterator.py | 12 +++--- 7 files changed, 38 insertions(+), 54 deletions(-) diff --git a/src/msgraph_core/models/large_file_upload_session.py b/src/msgraph_core/models/large_file_upload_session.py index ecbb39bf..a5c79b9a 100644 --- a/src/msgraph_core/models/large_file_upload_session.py +++ b/src/msgraph_core/models/large_file_upload_session.py @@ -1,8 +1,9 @@ from __future__ import annotations import datetime +from collections.abc import Callable from dataclasses import dataclass, field -from typing import Any, Callable, Optional +from typing import Any, Optional from kiota_abstractions.serialization import ( AdditionalDataHolder, diff --git a/src/msgraph_core/models/page_result.py b/src/msgraph_core/models/page_result.py index fa67e6fb..b11bc965 100644 --- a/src/msgraph_core/models/page_result.py +++ b/src/msgraph_core/models/page_result.py @@ -11,8 +11,9 @@ """ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass -from typing import Callable, Optional, TypeVar +from typing import Optional, TypeVar from kiota_abstractions.serialization.parsable import Parsable from kiota_abstractions.serialization.parse_node import ParseNode diff --git a/src/msgraph_core/models/upload_result.py b/src/msgraph_core/models/upload_result.py index f63d1464..4c3c720a 100644 --- a/src/msgraph_core/models/upload_result.py +++ b/src/msgraph_core/models/upload_result.py @@ -1,6 +1,7 @@ +from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import Any, Callable, Generic, Optional, TypeVar +from typing import Any, Generic, Optional, TypeVar from kiota_abstractions.serialization import ( AdditionalDataHolder, diff --git a/src/msgraph_core/requests/batch_response_content.py b/src/msgraph_core/requests/batch_response_content.py index 3c2d565c..11edbbe8 100644 --- a/src/msgraph_core/requests/batch_response_content.py +++ b/src/msgraph_core/requests/batch_response_content.py @@ -1,6 +1,7 @@ import base64 +from collections.abc import Callable from io import BytesIO -from typing import Callable, Optional, Type, TypeVar, Union +from typing import Optional, Type, TypeVar, Union from kiota_abstractions.serialization import ( Parsable, @@ -107,20 +108,17 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]: raise ValueError(f"No response found for id: {request_id}") if not issubclass(type, Parsable): - raise ValueError( - "Type passed must implement the Parsable interface") + raise ValueError("Type passed must implement the Parsable interface") response = self.get_response_by_id(request_id) if response is not None: content_type = response.content_type else: raise ValueError( - f"Unable to get content-type header in response item for request Id: { - request_id}" + f"Unable to get content-type header in response item for request Id: {request_id}" ) if not content_type: - raise RuntimeError( - "Unable to get content-type header in response item") + raise RuntimeError("Unable to get content-type header in response item") response_body = response.body or BytesIO() try: @@ -130,8 +128,7 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]: ) except Exception: response_body.seek(0) - base64_decoded_body = BytesIO( - base64.b64decode(response_body.read())) + base64_decoded_body = BytesIO(base64.b64decode(response_body.read())) parse_node = ParseNodeFactoryRegistry().get_root_parse_node( content_type, base64_decoded_body ) @@ -139,8 +136,7 @@ def response_body(self, request_id: str, type: Type[T]) -> Optional[T]: return parse_node.get_object_value(type) except Exception: raise ValueError( - f"Unable to deserialize batch response for request Id: { - request_id} to {type}" + f"Unable to deserialize batch response for request Id: {request_id} to {type}" ) def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]: @@ -165,8 +161,7 @@ def serialize(self, writer: SerializationWriter) -> None: :param writer: The writer to write to """ if self._responses is not None: - writer.write_collection_of_object_values( - 'responses', list(self._responses.values())) + writer.write_collection_of_object_values('responses', list(self._responses.values())) else: writer.write_collection_of_object_values('responses', []) diff --git a/src/msgraph_core/requests/batch_response_content_collection.py b/src/msgraph_core/requests/batch_response_content_collection.py index 130df18b..60984aa4 100644 --- a/src/msgraph_core/requests/batch_response_content_collection.py +++ b/src/msgraph_core/requests/batch_response_content_collection.py @@ -1,4 +1,5 @@ -from typing import Callable +from collections.abc import Callable + from kiota_abstractions.serialization import Parsable, ParseNode, SerializationWriter from .batch_response_content import BatchResponseContent @@ -51,8 +52,7 @@ async def responses_status_codes(self) -> dict[str, int]: else: raise ValueError("Response ID cannot be None") else: - raise TypeError( - "Invalid type: Collection must be of type BatchResponseContent") + raise TypeError("Invalid type: Collection must be of type BatchResponseContent") return status_codes def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]: @@ -65,8 +65,7 @@ def get_field_deserializers(self) -> dict[str, Callable[[ParseNode], None]]: return { 'responses': lambda n: - setattr(self, "_responses", - n.get_collection_of_object_values(BatchResponseItem)) + setattr(self, "_responses", n.get_collection_of_object_values(BatchResponseItem)) } def serialize(self, writer: SerializationWriter) -> None: diff --git a/src/msgraph_core/tasks/large_file_upload.py b/src/msgraph_core/tasks/large_file_upload.py index 496f0842..250321a2 100644 --- a/src/msgraph_core/tasks/large_file_upload.py +++ b/src/msgraph_core/tasks/large_file_upload.py @@ -1,9 +1,10 @@ import logging import os from asyncio import Future +from collections.abc import Callable from datetime import datetime, timedelta, timezone from io import BytesIO -from typing import Any, Callable, Optional, Tuple, TypeVar, Union +from typing import Any, Optional, Tuple, TypeVar, Union from kiota_abstractions.headers_collection import HeadersCollection from kiota_abstractions.method import Method @@ -37,13 +38,11 @@ def __init__( self.max_chunk_size = max_chunk_size self.factory = parsable_factory cleaned_value = self.check_value_exists( - upload_session, 'get_next_expected_range', [ - 'next_expected_range', 'NextExpectedRange'] + upload_session, 'get_next_expected_range', ['next_expected_range', 'NextExpectedRange'] ) self.next_range = cleaned_value[0] self._chunks = int((self.file_size / max_chunk_size) + 0.5) - self.on_chunk_upload_complete: Optional[Callable[[ - list[int]], None]] = None + self.on_chunk_upload_complete: Optional[Callable[[list[int]], None]] = None @property def upload_session(self): @@ -69,8 +68,7 @@ def upload_session_expired(self, upload_session: Optional[Parsable] = None) -> b now = datetime.now(timezone.utc) upload_session = upload_session or self.upload_session if not hasattr(upload_session, "expiration_date_time"): - raise ValueError( - "Upload session does not have an expiration date time") + raise ValueError("Upload session does not have an expiration date time") expiry = getattr(upload_session, 'expiration_date_time') if expiry is None: raise ValueError("Expiry is None") @@ -95,16 +93,13 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None): self.on_chunk_upload_complete = after_chunk_upload or self.on_chunk_upload_complete session: LargeFileUploadSession = await self.next_chunk( - self.stream, 0, max( - 0, min(self.max_chunk_size - 1, self.file_size - 1)) + self.stream, 0, max(0, min(self.max_chunk_size - 1, self.file_size - 1)) ) process_next = session # determine the range to be uploaded # even when resuming existing upload sessions. - range_parts = self.next_range[0].split( - "-") if self.next_range else ['0', '0'] - end = min(int(range_parts[0]) + - self.max_chunk_size - 1, self.file_size) + range_parts = self.next_range[0].split("-") if self.next_range else ['0', '0'] + end = min(int(range_parts[0]) + self.max_chunk_size - 1, self.file_size) uploaded_range = [range_parts[0], end] response = None @@ -129,13 +124,12 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None): if not next_range: continue range_parts = str(next_range[0]).split("-") - end = min(int(range_parts[0]) + - self.max_chunk_size, self.file_size) + end = min(int(range_parts[0]) + self.max_chunk_size, self.file_size) uploaded_range = [range_parts[0], end] self.next_range = next_range[0] + "-" process_next = await self.next_chunk(self.stream) - except Exception as error: # pylint: disable=broad-except + except Exception as error: #pylint: disable=broad-except logging.error("Error uploading chunk %s", error) finally: self.chunks -= 1 @@ -182,8 +176,7 @@ async def next_chunk( chunk_data = file.read(end - start + 1) info.headers = HeadersCollection() - info.headers.try_add( - 'Content-Range', f'bytes {start}-{end}/{self.file_size}') + info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}') info.headers.try_add('Content-Length', str(len(chunk_data))) info.headers.try_add("Content-Type", "application/octet-stream") info.set_stream_content(bytes(chunk_data)) @@ -223,8 +216,7 @@ async def last_chunk( chunk_data = file.read(end - start + 1) info.headers = HeadersCollection() - info.headers.try_add( - 'Content-Range', f'bytes {start}-{end}/{self.file_size}') + info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}') info.headers.try_add('Content-Length', str(len(chunk_data))) info.headers.try_add("Content-Type", "application/octet-stream") info.set_stream_content(bytes(chunk_data)) @@ -239,8 +231,7 @@ def get_file(self) -> BytesIO: async def cancel(self) -> Parsable: upload_url = self.get_validated_upload_url(self.upload_session) - request_information = RequestInformation( - method=Method.DELETE, url_template=upload_url) + request_information = RequestInformation(method=Method.DELETE, url_template=upload_url) await self.request_adapter.send_no_response_content_async(request_information) @@ -263,8 +254,7 @@ def additional_data_contains(self, parsable: Parsable, 'AdditionalDataHolder' ) if not hasattr(parsable, 'additional_data'): - raise ValueError( - 'The object passed does not contain an additional_data property') + raise ValueError('The object passed does not contain an additional_data property') additional_data = parsable.additional_data for property_candidate in property_candidates: if property_candidate in additional_data: @@ -308,8 +298,7 @@ async def resume(self) -> Future: def get_validated_upload_url(self, upload_session: Parsable) -> str: if not hasattr(upload_session, 'upload_url'): - raise RuntimeError( - 'The upload session does not contain a valid upload url') + raise RuntimeError('The upload session does not contain a valid upload url') result = upload_session.upload_url if result is None or result.strip() == '': diff --git a/src/msgraph_core/tasks/page_iterator.py b/src/msgraph_core/tasks/page_iterator.py index 38926f66..dd56ca4c 100644 --- a/src/msgraph_core/tasks/page_iterator.py +++ b/src/msgraph_core/tasks/page_iterator.py @@ -17,7 +17,8 @@ and models modules. """ -from typing import Callable, Optional, Type, TypeVar, Union +from collections.abc import Callable +from typing import Optional, Type, TypeVar, Union from kiota_abstractions.headers_collection import HeadersCollection from kiota_abstractions.method import Method @@ -151,8 +152,7 @@ async def next(self) -> Optional[PageResult]: next_link = response.odata_next_link if response and hasattr( response, 'odata_next_link' ) else None - value = response.value if response and hasattr( - response, 'value') else None + value = response.value if response and hasattr(response, 'value') else None return PageResult(next_link, value) @staticmethod @@ -180,8 +180,7 @@ def convert_to_page(response: Union[T, list, object]) -> PageResult: value = getattr(response, 'value', []) if value is None: raise ValueError('The response does not contain a value.') - parsable_page = response if isinstance( - response, dict) else vars(response) + parsable_page = response if isinstance(response, dict) else vars(response) next_link = parsable_page.get('odata_next_link', '') if isinstance( parsable_page, dict ) else getattr(parsable_page, 'odata_next_link', '') @@ -231,8 +230,7 @@ def enumerate(self, callback: Optional[Callable] = None) -> bool: if not page_items: return False for i in range(self.pause_index, len(page_items)): - keep_iterating = callback( - page_items[i]) if callback is not None else True + keep_iterating = callback(page_items[i]) if callback is not None else True if not keep_iterating: self.pause_index = i + 1 break