diff --git a/.code-samples.meilisearch.yaml b/.code-samples.meilisearch.yaml index bf7062f3..e75dc353 100644 --- a/.code-samples.meilisearch.yaml +++ b/.code-samples.meilisearch.yaml @@ -514,6 +514,16 @@ faceted_search_1: |- }) post_dump_1: |- client.create_dump() +export_post_1: |- + client.export( + url='https://remote-meilisearch-instance.com', + api_key='masterKey', + payload_size='50 MiB', + indexes={ + 'movies*': {}, + 'books*': {}, + }, + ) phrase_search_1: |- client.index('movies').search('"african american" horror') sorting_guide_update_sortable_attributes_1: |- diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 3210f8b5..cfb00d47 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -30,9 +30,22 @@ jobs: - name: Install dependencies run: pipenv install --dev --python=${{ matrix.python-version }} - name: Meilisearch (latest version) setup with Docker - run: docker run -d -p 7700:7700 getmeili/meilisearch-enterprise:latest meilisearch --no-analytics --master-key=masterKey + run: docker run -d --name meilisearch -p 7700:7700 getmeili/meilisearch-enterprise:latest meilisearch --no-analytics --master-key=masterKey + - name: Meilisearch (latest version) secondary server for testing setup with Docker + run: docker run -d --name meilisearch2 -p 7701:7700 getmeili/meilisearch-enterprise:latest meilisearch --no-analytics --master-key=masterKey + - name: Wait for Meilisearch servers to be ready + run: | + echo "Waiting for primary Meilisearch server..." + timeout 30 bash -c 'until curl -f http://127.0.0.1:7700/health > /dev/null 2>&1; do sleep 1; done' || exit 1 + echo "Primary Meilisearch server is ready" + + echo "Waiting for secondary Meilisearch server..." + timeout 30 bash -c 'until curl -f http://127.0.0.1:7701/health > /dev/null 2>&1; do sleep 1; done' || exit 1 + echo "Secondary Meilisearch server is ready" - name: Test with pytest run: pipenv run pytest --cov-report=xml + env: + MEILISEARCH_URL_2: "http://127.0.0.1:7701" pylint: name: pylint diff --git a/docker-compose.yml b/docker-compose.yml index 1f9f9630..529312be 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,10 +6,13 @@ services: working_dir: /home/package environment: - MEILISEARCH_URL=http://meilisearch:7700 + - MEILISEARCH_URL_2=http://meilisearch2:7700 depends_on: - meilisearch + - meilisearch2 links: - meilisearch + - meilisearch2 volumes: - ./:/home/package @@ -20,3 +23,12 @@ services: environment: - MEILI_MASTER_KEY=masterKey - MEILI_NO_ANALYTICS=true + + meilisearch2: + image: getmeili/meilisearch-enterprise:latest + container_name: meili2 + ports: + - "7701:7700" + environment: + - MEILI_MASTER_KEY=masterKey + - MEILI_NO_ANALYTICS=true diff --git a/meilisearch/client.py b/meilisearch/client.py index 99f8a5da..ca6fdd0e 100644 --- a/meilisearch/client.py +++ b/meilisearch/client.py @@ -631,6 +631,57 @@ def create_dump(self) -> TaskInfo: return TaskInfo(**task) + def export( + self, + url: str, + api_key: Optional[str] = None, + payload_size: Optional[str] = None, + indexes: Optional[Mapping[str, Any]] = None, + ) -> TaskInfo: + """Trigger the creation of a Meilisearch export. + + Parameters + ---------- + url: + A string pointing to a remote Meilisearch instance, including its port if necessary. + + api_key: + A security key with index.create, settings.update, and documents.add permissions + to a secured Meilisearch instance. + + payload_size: + The maximum size of each single data payload in a human-readable format such as "100 MiB". + Larger payloads are generally more efficient, but require significantly more powerful machines. + + indexes: + A set of objects whose keys correspond to patterns matching the indexes you want to export. + By default, Meilisearch exports all documents across all indexes. + + Returns + ------- + task_info: + TaskInfo instance containing information about a task to track the progress of an asynchronous process. + https://www.meilisearch.com/docs/reference/api/export#create-an-export + + Raises + ------ + MeilisearchApiError + An error containing details about why Meilisearch can't process your request. + Meilisearch error codes are described + here: https://www.meilisearch.com/docs/reference/errors/error_codes#meilisearch-errors + """ + payload: Dict[str, Any] = {"url": url} + if api_key is not None: + payload["apiKey"] = api_key + if payload_size is not None: + payload["payloadSize"] = payload_size + if indexes is not None: + payload["indexes"] = indexes + + task = self.http.post(self.config.paths.exports, body=payload) + + return TaskInfo(**task) + def create_snapshot(self) -> TaskInfo: """Trigger the creation of a Meilisearch snapshot. diff --git a/meilisearch/config.py b/meilisearch/config.py index df39e64a..36594613 100644 --- a/meilisearch/config.py +++ b/meilisearch/config.py @@ -49,6 +49,7 @@ class Paths: network = "network" experimental_features = "experimental-features" webhooks = "webhooks" + exports = "export" def __init__( self, diff --git a/tests/client/test_client_exports.py b/tests/client/test_client_exports.py new file mode 100644 index 00000000..8b78dc8b --- /dev/null +++ b/tests/client/test_client_exports.py @@ -0,0 +1,60 @@ +import os +import time + +import pytest + +from tests import common + +pytestmark = pytest.mark.skipif( + not os.getenv("MEILISEARCH_URL_2"), + reason="Export API tests run only when second server is configured", +) + + +def test_export_creation( + client, client2, index_with_documents, enable_vector_search +): # pylint: disable=unused-argument + """Tests the creation of a Meilisearch export.""" + index = index_with_documents() + export_task = client.export(common.BASE_URL_2, api_key=common.MASTER_KEY) + task_result = client.wait_for_task(export_task.task_uid) + assert task_result.status == "succeeded" + + index2 = client2.get_index(index.uid) + assert index2.uid == index.uid + assert index2.primary_key == index.get_primary_key() + assert_exported_count(index2, index.get_documents().total) + + +def test_export_creation_with_index_filter( + client, client2, index_with_documents, enable_vector_search +): # pylint: disable=unused-argument + """Tests the creation of a Meilisearch export with specific index UIDs.""" + index = index_with_documents() + + indexes = {index.uid: {"filter": None}} + export_task = client.export(common.BASE_URL_2, api_key=common.MASTER_KEY, indexes=indexes) + task_result = client.wait_for_task(export_task.task_uid) + assert task_result.status == "succeeded" + + response = client2.get_indexes() + assert response["total"] == 1 + index2 = client2.get_index(index.uid) + assert index2.uid == index.uid + assert index2.primary_key == index.get_primary_key() + assert_exported_count(index2, index.get_documents().total) + + +def assert_exported_count(index, expected_count): + # Wait up to 20 seconds for documents to be imported + max_attempts = 20 + for attempt in range(max_attempts): + doc_count = index.get_documents().total + if doc_count == expected_count: + return + if attempt < max_attempts - 1: + time.sleep(1) + + # Final check with clear failure message + actual_count = index.get_documents().total + assert actual_count == expected_count diff --git a/tests/common.py b/tests/common.py index 253ec760..c19212de 100644 --- a/tests/common.py +++ b/tests/common.py @@ -2,6 +2,7 @@ MASTER_KEY = "masterKey" BASE_URL = os.getenv("MEILISEARCH_URL", "http://127.0.0.1:7700") +BASE_URL_2 = os.getenv("MEILISEARCH_URL_2") INDEX_UID = "indexUID" INDEX_UID2 = "indexUID2" diff --git a/tests/conftest.py b/tests/conftest.py index b9aeed65..cc61935a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ # pylint: disable=redefined-outer-name import json +import os from typing import Optional import requests @@ -16,19 +17,33 @@ def client(): return meilisearch.Client(common.BASE_URL, common.MASTER_KEY) +@fixture(scope="session") +def client2(): + if not os.getenv("MEILISEARCH_URL_2"): + return None + return meilisearch.Client(common.BASE_URL_2, common.MASTER_KEY) + + +def _clear_indexes(meilisearch_client): + """Deletes all the indexes in the Meilisearch instance.""" + + indexes = meilisearch_client.get_indexes() + for index in indexes["results"]: + task = meilisearch_client.index(index.uid).delete() + meilisearch_client.wait_for_task(task.task_uid) + + @fixture(autouse=True) -def clear_indexes(client): +def clear_indexes(client, client2): """ Auto-clears the indexes after each test function run. Makes all the test functions independent. """ # Yields back to the test function. yield - # Deletes all the indexes in the Meilisearch instance. - indexes = client.get_indexes() - for index in indexes["results"]: - task = client.index(index.uid).delete() - client.wait_for_task(task.task_uid) + _clear_indexes(client) + if client2 is not None: + _clear_indexes(client2) @fixture(autouse=True) @@ -47,12 +62,14 @@ def clear_webhooks(client): @fixture(autouse=True) -def clear_all_tasks(client): +def clear_all_tasks(client, client2): """ Auto-clears the tasks after each test function run. Makes all the test functions independent. """ client.delete_tasks({"statuses": ["succeeded", "failed", "canceled"]}) + if client2 is not None: + client2.delete_tasks({"statuses": ["succeeded", "failed", "canceled"]}) @fixture(scope="function") @@ -254,14 +271,26 @@ def enable_vector_search(): requests.patch( f"{common.BASE_URL}/experimental-features", headers={"Authorization": f"Bearer {common.MASTER_KEY}"}, - json={"vectorStore": True}, + json={"vectorStoreSetting": True}, + timeout=10, + ) + requests.patch( + f"{common.BASE_URL_2}/experimental-features", + headers={"Authorization": f"Bearer {common.MASTER_KEY}"}, + json={"vectorStoreSetting": True}, timeout=10, ) yield requests.patch( f"{common.BASE_URL}/experimental-features", headers={"Authorization": f"Bearer {common.MASTER_KEY}"}, - json={"vectorStore": False}, + json={"vectorStoreSetting": False}, + timeout=10, + ) + requests.patch( + f"{common.BASE_URL_2}/experimental-features", + headers={"Authorization": f"Bearer {common.MASTER_KEY}"}, + json={"vectorStoreSetting": False}, timeout=10, )