Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .code-samples.meilisearch.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -514,6 +514,16 @@ faceted_search_1: |-
})
post_dump_1: |-
client.create_dump()
export_post_1: |-
client.export(
url='https://remote-meilisearch-instance.com',
api_key='masterKey',
payload_size='50 MiB',
indexes={
'movies*': {},
'books*': {},
},
)
phrase_search_1: |-
client.index('movies').search('"african american" horror')
sorting_guide_update_sortable_attributes_1: |-
Expand Down
15 changes: 14 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,22 @@ jobs:
- name: Install dependencies
run: pipenv install --dev --python=${{ matrix.python-version }}
- name: Meilisearch (latest version) setup with Docker
run: docker run -d -p 7700:7700 getmeili/meilisearch-enterprise:latest meilisearch --no-analytics --master-key=masterKey
run: docker run -d --name meilisearch -p 7700:7700 getmeili/meilisearch-enterprise:latest meilisearch --no-analytics --master-key=masterKey
- name: Meilisearch (latest version) secondary server for testing setup with Docker
run: docker run -d --name meilisearch2 -p 7701:7700 getmeili/meilisearch-enterprise:latest meilisearch --no-analytics --master-key=masterKey
- name: Wait for Meilisearch servers to be ready
run: |
echo "Waiting for primary Meilisearch server..."
timeout 30 bash -c 'until curl -f http://127.0.0.1:7700/health > /dev/null 2>&1; do sleep 1; done' || exit 1
echo "Primary Meilisearch server is ready"

echo "Waiting for secondary Meilisearch server..."
timeout 30 bash -c 'until curl -f http://127.0.0.1:7701/health > /dev/null 2>&1; do sleep 1; done' || exit 1
echo "Secondary Meilisearch server is ready"
- name: Test with pytest
run: pipenv run pytest --cov-report=xml
env:
MEILISEARCH_URL_2: "http://127.0.0.1:7701"

pylint:
name: pylint
Expand Down
12 changes: 12 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,13 @@ services:
working_dir: /home/package
environment:
- MEILISEARCH_URL=http://meilisearch:7700
- MEILISEARCH_URL_2=http://meilisearch2:7700
depends_on:
- meilisearch
- meilisearch2
links:
- meilisearch
- meilisearch2
volumes:
- ./:/home/package

Expand All @@ -20,3 +23,12 @@ services:
environment:
- MEILI_MASTER_KEY=masterKey
- MEILI_NO_ANALYTICS=true

meilisearch2:
image: getmeili/meilisearch-enterprise:latest
container_name: meili2
ports:
- "7701:7700"
environment:
- MEILI_MASTER_KEY=masterKey
- MEILI_NO_ANALYTICS=true
51 changes: 51 additions & 0 deletions meilisearch/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -631,6 +631,57 @@ def create_dump(self) -> TaskInfo:

return TaskInfo(**task)

def export(
self,
url: str,
api_key: Optional[str] = None,
payload_size: Optional[str] = None,
indexes: Optional[Mapping[str, Any]] = None,
) -> TaskInfo:
"""Trigger the creation of a Meilisearch export.

Parameters
----------
url:
A string pointing to a remote Meilisearch instance, including its port if necessary.

api_key:
A security key with index.create, settings.update, and documents.add permissions
to a secured Meilisearch instance.

payload_size:
The maximum size of each single data payload in a human-readable format such as "100 MiB".
Larger payloads are generally more efficient, but require significantly more powerful machines.

indexes:
A set of objects whose keys correspond to patterns matching the indexes you want to export.
By default, Meilisearch exports all documents across all indexes.

Returns
-------
task_info:
TaskInfo instance containing information about a task to track the progress of an asynchronous process.
https://www.meilisearch.com/docs/reference/api/export#create-an-export

Raises
------
MeilisearchApiError
An error containing details about why Meilisearch can't process your request.
Meilisearch error codes are described
here: https://www.meilisearch.com/docs/reference/errors/error_codes#meilisearch-errors
"""
payload: Dict[str, Any] = {"url": url}
if api_key is not None:
payload["apiKey"] = api_key
if payload_size is not None:
payload["payloadSize"] = payload_size
if indexes is not None:
payload["indexes"] = indexes

task = self.http.post(self.config.paths.exports, body=payload)

return TaskInfo(**task)

def create_snapshot(self) -> TaskInfo:
"""Trigger the creation of a Meilisearch snapshot.

Expand Down
1 change: 1 addition & 0 deletions meilisearch/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ class Paths:
network = "network"
experimental_features = "experimental-features"
webhooks = "webhooks"
exports = "export"

def __init__(
self,
Expand Down
60 changes: 60 additions & 0 deletions tests/client/test_client_exports.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import os
import time

import pytest

from tests import common

pytestmark = pytest.mark.skipif(
not os.getenv("MEILISEARCH_URL_2"),
reason="Export API tests run only when second server is configured",
)


def test_export_creation(
client, client2, index_with_documents, enable_vector_search
): # pylint: disable=unused-argument
"""Tests the creation of a Meilisearch export."""
index = index_with_documents()
export_task = client.export(common.BASE_URL_2, api_key=common.MASTER_KEY)
task_result = client.wait_for_task(export_task.task_uid)
assert task_result.status == "succeeded"

index2 = client2.get_index(index.uid)
assert index2.uid == index.uid
assert index2.primary_key == index.get_primary_key()
assert_exported_count(index2, index.get_documents().total)


def test_export_creation_with_index_filter(
client, client2, index_with_documents, enable_vector_search
): # pylint: disable=unused-argument
"""Tests the creation of a Meilisearch export with specific index UIDs."""
index = index_with_documents()

indexes = {index.uid: {"filter": None}}
export_task = client.export(common.BASE_URL_2, api_key=common.MASTER_KEY, indexes=indexes)
task_result = client.wait_for_task(export_task.task_uid)
assert task_result.status == "succeeded"

response = client2.get_indexes()
assert response["total"] == 1
index2 = client2.get_index(index.uid)
assert index2.uid == index.uid
assert index2.primary_key == index.get_primary_key()
assert_exported_count(index2, index.get_documents().total)


def assert_exported_count(index, expected_count):
# Wait up to 20 seconds for documents to be imported
max_attempts = 20
for attempt in range(max_attempts):
doc_count = index.get_documents().total
if doc_count == expected_count:
return
if attempt < max_attempts - 1:
time.sleep(1)

# Final check with clear failure message
actual_count = index.get_documents().total
assert actual_count == expected_count
1 change: 1 addition & 0 deletions tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

MASTER_KEY = "masterKey"
BASE_URL = os.getenv("MEILISEARCH_URL", "http://127.0.0.1:7700")
BASE_URL_2 = os.getenv("MEILISEARCH_URL_2")

INDEX_UID = "indexUID"
INDEX_UID2 = "indexUID2"
Expand Down
47 changes: 38 additions & 9 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# pylint: disable=redefined-outer-name
import json
import os
from typing import Optional

import requests
Expand All @@ -16,19 +17,33 @@ def client():
return meilisearch.Client(common.BASE_URL, common.MASTER_KEY)


@fixture(scope="session")
def client2():
if not os.getenv("MEILISEARCH_URL_2"):
return None
return meilisearch.Client(common.BASE_URL_2, common.MASTER_KEY)


def _clear_indexes(meilisearch_client):
"""Deletes all the indexes in the Meilisearch instance."""

indexes = meilisearch_client.get_indexes()
for index in indexes["results"]:
task = meilisearch_client.index(index.uid).delete()
meilisearch_client.wait_for_task(task.task_uid)


@fixture(autouse=True)
def clear_indexes(client):
def clear_indexes(client, client2):
"""
Auto-clears the indexes after each test function run.
Makes all the test functions independent.
"""
# Yields back to the test function.
yield
# Deletes all the indexes in the Meilisearch instance.
indexes = client.get_indexes()
for index in indexes["results"]:
task = client.index(index.uid).delete()
client.wait_for_task(task.task_uid)
_clear_indexes(client)
if client2 is not None:
_clear_indexes(client2)


@fixture(autouse=True)
Expand All @@ -47,12 +62,14 @@ def clear_webhooks(client):


@fixture(autouse=True)
def clear_all_tasks(client):
def clear_all_tasks(client, client2):
"""
Auto-clears the tasks after each test function run.
Makes all the test functions independent.
"""
client.delete_tasks({"statuses": ["succeeded", "failed", "canceled"]})
if client2 is not None:
client2.delete_tasks({"statuses": ["succeeded", "failed", "canceled"]})


@fixture(scope="function")
Expand Down Expand Up @@ -254,14 +271,26 @@ def enable_vector_search():
requests.patch(
f"{common.BASE_URL}/experimental-features",
headers={"Authorization": f"Bearer {common.MASTER_KEY}"},
json={"vectorStore": True},
json={"vectorStoreSetting": True},
timeout=10,
)
requests.patch(
f"{common.BASE_URL_2}/experimental-features",
headers={"Authorization": f"Bearer {common.MASTER_KEY}"},
json={"vectorStoreSetting": True},
timeout=10,
)
yield
requests.patch(
f"{common.BASE_URL}/experimental-features",
headers={"Authorization": f"Bearer {common.MASTER_KEY}"},
json={"vectorStore": False},
json={"vectorStoreSetting": False},
timeout=10,
)
requests.patch(
f"{common.BASE_URL_2}/experimental-features",
headers={"Authorization": f"Bearer {common.MASTER_KEY}"},
json={"vectorStoreSetting": False},
timeout=10,
)

Expand Down
Loading