From 5e74bab02b5a4665e06c88d20a96aef5b5e9c507 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Thu, 27 Nov 2025 12:41:00 +0100 Subject: [PATCH 1/8] Upgrade to the last client version and remove suspicious obsolete code intended for older ES service versions. --- esrally/client/asynchronous.py | 131 +------------------ esrally/client/synchronous.py | 225 +-------------------------------- pyproject.toml | 4 +- uv.lock | 21 +-- 4 files changed, 21 insertions(+), 360 deletions(-) diff --git a/esrally/client/asynchronous.py b/esrally/client/asynchronous.py index fdb5c0314..480a82902 100644 --- a/esrally/client/asynchronous.py +++ b/esrally/client/asynchronous.py @@ -18,33 +18,18 @@ import asyncio import json import logging -import warnings -from collections.abc import Iterable, Mapping from typing import Any, Optional import aiohttp from aiohttp import BaseConnector, RequestInfo from aiohttp.client_proto import ResponseHandler from aiohttp.helpers import BaseTimerContext -from elastic_transport import ( - AiohttpHttpNode, - ApiResponse, - AsyncTransport, - BinaryApiResponse, - HeadApiResponse, - ListApiResponse, - ObjectApiResponse, - TextApiResponse, -) -from elastic_transport.client_utils import DEFAULT +from elastic_transport import AiohttpHttpNode, AsyncTransport from elasticsearch import AsyncElasticsearch from elasticsearch._async.client import IlmClient -from elasticsearch.compat import warn_stacklevel -from elasticsearch.exceptions import HTTP_EXCEPTIONS, ApiError, ElasticsearchWarning from multidict import CIMultiDict, CIMultiDictProxy from yarl import URL -from esrally.client.common import _WARNING_RE, _mimetype_header_to_compat, _quote_query from esrally.client.context import RequestContextHolder from esrally.utils import io, versions @@ -293,14 +278,8 @@ async def put_lifecycle(self, *args, **kwargs): class RallyAsyncElasticsearch(AsyncElasticsearch, RequestContextHolder): - def __init__(self, *args, **kwargs): - distribution_version = kwargs.pop("distribution_version", None) - distribution_flavor = kwargs.pop("distribution_flavor", None) - super().__init__(*args, **kwargs) - # skip verification at this point; we've already verified this earlier with the synchronous client. - # The async client is used in the hot code path and we use customized overrides (such as that we don't - # parse response bodies in some cases for performance reasons, e.g. when using the bulk API). - self._verified_elasticsearch = True + def __init__(self, hosts: Any = None, *, distribution_version: str | None = None, distribution_flavor: str | None = None, **kwargs): + super().__init__(hosts, **kwargs) self.distribution_version = distribution_version self.distribution_flavor = distribution_flavor @@ -318,107 +297,3 @@ def options(self, *args, **kwargs): new_self.distribution_version = self.distribution_version new_self.distribution_flavor = self.distribution_flavor return new_self - - async def perform_request( - self, - method: str, - path: str, - *, - params: Optional[Mapping[str, Any]] = None, - headers: Optional[Mapping[str, str]] = None, - body: Optional[Any] = None, - ) -> ApiResponse[Any]: - # We need to ensure that we provide content-type and accept headers - if body is not None: - if headers is None: - headers = {"content-type": "application/json", "accept": "application/json"} - else: - if headers.get("content-type") is None: - headers["content-type"] = "application/json" - if headers.get("accept") is None: - headers["accept"] = "application/json" - - if headers: - request_headers = self._headers.copy() - request_headers.update(headers) - else: - request_headers = self._headers - - # Converts all parts of a Accept/Content-Type headers - # from application/X -> application/vnd.elasticsearch+X - # see https://github.com/elastic/elasticsearch/issues/51816 - # Not applicable to serverless - if not self.is_serverless: - if versions.is_version_identifier(self.distribution_version) and ( - versions.Version.from_string(self.distribution_version) >= versions.Version.from_string("8.0.0") - ): - _mimetype_header_to_compat("Accept", request_headers) - _mimetype_header_to_compat("Content-Type", request_headers) - - if params: - target = f"{path}?{_quote_query(params)}" - else: - target = path - - meta, resp_body = await self.transport.perform_request( - method, - target, - headers=request_headers, - body=body, - request_timeout=self._request_timeout, - max_retries=self._max_retries, - retry_on_status=self._retry_on_status, - retry_on_timeout=self._retry_on_timeout, - client_meta=self._client_meta, - ) - - # HEAD with a 404 is returned as a normal response - # since this is used as an 'exists' functionality. - if not (method == "HEAD" and meta.status == 404) and ( - not 200 <= meta.status < 299 - and (self._ignore_status is DEFAULT or self._ignore_status is None or meta.status not in self._ignore_status) - ): - message = str(resp_body) - - # If the response is an error response try parsing - # the raw Elasticsearch error before raising. - if isinstance(resp_body, dict): - try: - error = resp_body.get("error", message) - if isinstance(error, dict) and "type" in error: - error = error["type"] - message = error - except (ValueError, KeyError, TypeError): - pass - - raise HTTP_EXCEPTIONS.get(meta.status, ApiError)(message=message, meta=meta, body=resp_body) - - # 'Warning' headers should be reraised as 'ElasticsearchWarning' - if "warning" in meta.headers: - warning_header = (meta.headers.get("warning") or "").strip() - warning_messages: Iterable[str] = _WARNING_RE.findall(warning_header) or (warning_header,) - stacklevel = warn_stacklevel() - for warning_message in warning_messages: - warnings.warn( - warning_message, - category=ElasticsearchWarning, - stacklevel=stacklevel, - ) - - if method == "HEAD": - response = HeadApiResponse(meta=meta) - elif isinstance(resp_body, dict): - response = ObjectApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - elif isinstance(resp_body, list): - response = ListApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - elif isinstance(resp_body, str): - response = TextApiResponse( # type: ignore[assignment] - body=resp_body, - meta=meta, - ) - elif isinstance(resp_body, bytes): - response = BinaryApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - else: - response = ApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - - return response diff --git a/esrally/client/synchronous.py b/esrally/client/synchronous.py index 29d6babfc..68dab19cd 100644 --- a/esrally/client/synchronous.py +++ b/esrally/client/synchronous.py @@ -14,119 +14,16 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from typing import Any -import warnings -from collections.abc import Iterable, Mapping -from typing import Any, Optional - -from elastic_transport import ( - ApiResponse, - BinaryApiResponse, - HeadApiResponse, - ListApiResponse, - ObjectApiResponse, - TextApiResponse, -) -from elastic_transport.client_utils import DEFAULT from elasticsearch import Elasticsearch -from elasticsearch.compat import warn_stacklevel -from elasticsearch.exceptions import ( - HTTP_EXCEPTIONS, - ApiError, - ElasticsearchWarning, - UnsupportedProductError, -) -from esrally.client.common import _WARNING_RE, _mimetype_header_to_compat, _quote_query from esrally.utils import versions -# This reproduces the product verification behavior of v7.14.0 of the client: -# https://github.com/elastic/elasticsearch-py/blob/v7.14.0/elasticsearch/transport.py#L606 -# -# As of v8.0.0, the client determines whether the server is Elasticsearch by checking -# whether HTTP responses contain the `X-elastic-product` header. If they do not, it raises -# an `UnsupportedProductError`. This header was only introduced in Elasticsearch 7.14.0, -# however, so the client will consider any version of ES prior to 7.14.0 unsupported due to -# responses not including it. -# -# Because Rally needs to support versions of ES >= 6.8.0, we resurrect the previous -# logic for determining the authenticity of the server, which does not rely exclusively -# on this header. -class _ProductChecker: - """Class which verifies we're connected to a supported product""" - - # States that can be returned from 'check_product' - SUCCESS = True - UNSUPPORTED_PRODUCT = 2 - UNSUPPORTED_DISTRIBUTION = 3 - - @classmethod - def raise_error(cls, state, meta, body): - # These states mean the product_check() didn't fail so do nothing. - if state in (None, True): - return - - if state == cls.UNSUPPORTED_DISTRIBUTION: - message = "The client noticed that the server is not a supported distribution of Elasticsearch" - else: # UNSUPPORTED_PRODUCT - message = "The client noticed that the server is not Elasticsearch and we do not support this unknown product" - raise UnsupportedProductError(message, meta=meta, body=body) - - @classmethod - def check_product(cls, headers, response): - # type: (dict[str, str], dict[str, str]) -> int - """ - Verifies that the server we're talking to is Elasticsearch. - Does this by checking HTTP headers and the deserialized - response to the 'info' API. Returns one of the states above. - """ - - version = response.get("version", {}) - try: - version_number = versions.Version.from_string(version.get("number", None)) - except TypeError: - # No valid 'version.number' field, either Serverless Elasticsearch, or not Elasticsearch at all - version_number = versions.Version.from_string("0.0.0") - - build_flavor = version.get("build_flavor", None) - - # Check all of the fields and headers for missing/valid values. - try: - bad_tagline = response.get("tagline", None) != "You Know, for Search" - bad_build_flavor = build_flavor not in ("default", "serverless") - bad_product_header = headers.get("x-elastic-product", None) != "Elasticsearch" - except (AttributeError, TypeError): - bad_tagline = True - bad_build_flavor = True - bad_product_header = True - - # 7.0-7.13 and there's a bad 'tagline' or unsupported 'build_flavor' - if versions.Version.from_string("7.0.0") <= version_number < versions.Version.from_string("7.14.0"): - if bad_tagline: - return cls.UNSUPPORTED_PRODUCT - elif bad_build_flavor: - return cls.UNSUPPORTED_DISTRIBUTION - - elif ( - # No version or version less than 6.8.0, and we're not talking to a serverless elasticsearch - (version_number < versions.Version.from_string("6.8.0") and not versions.is_serverless(build_flavor)) - # 6.8.0 and there's a bad 'tagline' - or (versions.Version.from_string("6.8.0") <= version_number < versions.Version.from_string("7.0.0") and bad_tagline) - # 7.14+ and there's a bad 'X-Elastic-Product' HTTP header - or (versions.Version.from_string("7.14.0") <= version_number and bad_product_header) - ): - return cls.UNSUPPORTED_PRODUCT - - return True - - class RallySyncElasticsearch(Elasticsearch): - def __init__(self, *args, **kwargs): - distribution_version = kwargs.pop("distribution_version", None) - distribution_flavor = kwargs.pop("distribution_flavor", None) - super().__init__(*args, **kwargs) - self._verified_elasticsearch = None + def __init__(self, hosts: Any = None, *, distribution_version: str | None = None, distribution_flavor: str | None = None, **kwargs): + super().__init__(hosts, **kwargs) self.distribution_version = distribution_version self.distribution_flavor = distribution_flavor @@ -139,119 +36,3 @@ def options(self, *args, **kwargs): new_self.distribution_version = self.distribution_version new_self.distribution_flavor = self.distribution_flavor return new_self - - def perform_request( - self, - method: str, - path: str, - *, - params: Optional[Mapping[str, Any]] = None, - headers: Optional[Mapping[str, str]] = None, - body: Optional[Any] = None, - ) -> ApiResponse[Any]: - # We need to ensure that we provide content-type and accept headers - if body is not None: - if headers is None: - headers = {"content-type": "application/json", "accept": "application/json"} - else: - if headers.get("content-type") is None: - headers["content-type"] = "application/json" - if headers.get("accept") is None: - headers["accept"] = "application/json" - - if headers: - request_headers = self._headers.copy() - request_headers.update(headers) - else: - request_headers = self._headers - - if self._verified_elasticsearch is None: - info = self.transport.perform_request(method="GET", target="/", headers=request_headers) - info_meta = info.meta - info_body = info.body - - if not 200 <= info_meta.status < 299: - raise HTTP_EXCEPTIONS.get(info_meta.status, ApiError)(message=str(info_body), meta=info_meta, body=info_body) - - self._verified_elasticsearch = _ProductChecker.check_product(info_meta.headers, info_body) - - if self._verified_elasticsearch is not True: - _ProductChecker.raise_error(self._verified_elasticsearch, info_meta, info_body) - - # Converts all parts of a Accept/Content-Type headers - # from application/X -> application/vnd.elasticsearch+X - # see https://github.com/elastic/elasticsearch/issues/51816 - if not self.is_serverless: - if versions.is_version_identifier(self.distribution_version) and ( - versions.Version.from_string(self.distribution_version) >= versions.Version.from_string("8.0.0") - ): - _mimetype_header_to_compat("Accept", headers) - _mimetype_header_to_compat("Content-Type", headers) - - if params: - target = f"{path}?{_quote_query(params)}" - else: - target = path - - meta, resp_body = self.transport.perform_request( - method, - target, - headers=request_headers, - body=body, - request_timeout=self._request_timeout, - max_retries=self._max_retries, - retry_on_status=self._retry_on_status, - retry_on_timeout=self._retry_on_timeout, - client_meta=self._client_meta, - ) - - # HEAD with a 404 is returned as a normal response - # since this is used as an 'exists' functionality. - if not (method == "HEAD" and meta.status == 404) and ( - not 200 <= meta.status < 299 - and (self._ignore_status is DEFAULT or self._ignore_status is None or meta.status not in self._ignore_status) - ): - message = str(resp_body) - - # If the response is an error response try parsing - # the raw Elasticsearch error before raising. - if isinstance(resp_body, dict): - try: - error = resp_body.get("error", message) - if isinstance(error, dict) and "type" in error: - error = error["type"] - message = error - except (ValueError, KeyError, TypeError): - pass - - raise HTTP_EXCEPTIONS.get(meta.status, ApiError)(message=message, meta=meta, body=resp_body) - - # 'Warning' headers should be reraised as 'ElasticsearchWarning' - if "warning" in meta.headers: - warning_header = (meta.headers.get("warning") or "").strip() - warning_messages: Iterable[str] = _WARNING_RE.findall(warning_header) or (warning_header,) - stacklevel = warn_stacklevel() - for warning_message in warning_messages: - warnings.warn( - warning_message, - category=ElasticsearchWarning, - stacklevel=stacklevel, - ) - - if method == "HEAD": - response = HeadApiResponse(meta=meta) - elif isinstance(resp_body, dict): - response = ObjectApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - elif isinstance(resp_body, list): - response = ListApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - elif isinstance(resp_body, str): - response = TextApiResponse( # type: ignore[assignment] - body=resp_body, - meta=meta, - ) - elif isinstance(resp_body, bytes): - response = BinaryApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - else: - response = ApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] - - return response diff --git a/pyproject.toml b/pyproject.toml index 4830a2fd3..5effc3c08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,8 +42,8 @@ classifiers = [ ################################################################################################ dependencies = [ # License: Apache 2.0 - "elasticsearch[async]==8.6.1", - "elastic-transport==8.4.1", + "elasticsearch[async]==9.2.0", + "elastic-transport==9.2.0", # License: MIT "urllib3==1.26.19", # License: Apache 2.0 diff --git a/uv.lock b/uv.lock index bc2ca721b..0cf2de6f5 100644 --- a/uv.lock +++ b/uv.lock @@ -563,27 +563,32 @@ wheels = [ [[package]] name = "elastic-transport" -version = "8.4.1" +version = "9.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, + { name = "sniffio" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6b/db/d934d605258d38bd470c83d535c3a73c3d01e4ad357ecb4336300fbb8e88/elastic-transport-8.4.1.tar.gz", hash = "sha256:e5548997113c5d9566c9a1a51ed67bce50a4871bc0e44b692166461279e4167e", size = 44847, upload-time = "2023-09-25T20:06:26.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/0c/b48a9041e921a75e4c0d17e0dfe929f4d72c4e523084cbe5574101b7a40d/elastic_transport-9.2.0.tar.gz", hash = "sha256:0331466ca8febdb7d168c0fbf159294b0066492733b51da94f4dd28a0ee596cd", size = 77332, upload-time = "2025-10-17T16:12:49.906Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/1c/13bb1826382a1275e9191e9ab5cac3c59247f49c4b4dd96b131ec123d9ff/elastic_transport-8.4.1-py3-none-any.whl", hash = "sha256:c718ce40e8217b6045604961463c10da69a152dda07af4e25b3feae8d7965fc0", size = 59545, upload-time = "2023-09-25T20:06:24.688Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/6a1cc529ec609dae4d681baddde9ad965b8952d1f56dadbb1d27fbb50ec9/elastic_transport-9.2.0-py3-none-any.whl", hash = "sha256:f52b961e58e6b76d488993286907f61a6ddccbdae8e0135ce8d369227b6282d8", size = 65317, upload-time = "2025-10-17T16:12:48.437Z" }, ] [[package]] name = "elasticsearch" -version = "8.6.1" +version = "9.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "anyio" }, { name = "elastic-transport" }, + { name = "python-dateutil" }, + { name = "sniffio" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/b8/105646badde05925d73d0b86db890a100c93e4258be1af8c6a370888fb0e/elasticsearch-8.6.1.tar.gz", hash = "sha256:5c9217c45d36c9872b97681320b20e7fb6eb10867a88ad81345bca13ef92aedf", size = 306022, upload-time = "2023-01-27T23:26:12.897Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/87/6e70b8dac9c781c53edba944b50545431e2bb937d21c06d6009ef18b5f91/elasticsearch-9.2.0.tar.gz", hash = "sha256:99656baa34b10130402b6c38429feddf7aee1c3247566e8f95f0d0267fe9f9ab", size = 875370, upload-time = "2025-10-28T16:57:25.437Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/1e/3c6b72cdeac41c83efdadf8a6385ca921b84cb9c37d125ec9cd2940773bc/elasticsearch-8.6.1-py3-none-any.whl", hash = "sha256:7c340008bf01f81fe633af7f473daed42c30481837aa828646663eb7a426acb8", size = 385396, upload-time = "2023-01-27T23:26:07.685Z" }, + { url = "https://files.pythonhosted.org/packages/21/f2/6b44ee99e90c9d9e183aafeea0c3af87176d4acef446c286223704ea8f2e/elasticsearch-9.2.0-py3-none-any.whl", hash = "sha256:87090fe98c515ec0fce82f633fe11d7e90e04d93581b6b3e05de29efe4cc8b74", size = 960522, upload-time = "2025-10-28T16:57:20.979Z" }, ] [package.optional-dependencies] @@ -668,8 +673,8 @@ requires-dist = [ { name = "certifi" }, { name = "docker", specifier = "==6.0.0" }, { name = "ecs-logging", specifier = "==2.2.0" }, - { name = "elastic-transport", specifier = "==8.4.1" }, - { name = "elasticsearch", extras = ["async"], specifier = "==8.6.1" }, + { name = "elastic-transport", specifier = "==9.2.0" }, + { name = "elasticsearch", extras = ["async"], specifier = "==9.2.0" }, { name = "furo", marker = "extra == 'develop'", specifier = "==2022.6.21" }, { name = "github3-py", marker = "extra == 'develop'", specifier = "==3.2.0" }, { name = "gitpython", marker = "extra == 'develop'", specifier = "==3.1.30" }, From f775f7ca43645d8cb752ab1e39b42e0f85281fb3 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Thu, 27 Nov 2025 13:40:39 +0100 Subject: [PATCH 2/8] It drops deprecated parameter `maxsize` in favor or `connections_per_node`. --- esrally/client/factory.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esrally/client/factory.py b/esrally/client/factory.py index 9fe0186dd..29e4198af 100644 --- a/esrally/client/factory.py +++ b/esrally/client/factory.py @@ -260,7 +260,7 @@ async def on_request_end(session, trace_config_ctx, params): hosts=self.hosts, transport_class=RallyAsyncTransport, ssl_context=self.ssl_context, - maxsize=self.max_connections, + connections_per_node=self.max_connections, **self.client_options, ) From f71320b19eeb4b154cd4643f4d92621b4d35b697 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Thu, 27 Nov 2025 14:13:23 +0100 Subject: [PATCH 3/8] Remove unnecessary `put_lifecycle` method wrapping. --- esrally/client/asynchronous.py | 21 --------------------- esrally/driver/runner.py | 9 +++++---- tests/driver/runner_test.py | 17 ----------------- 3 files changed, 5 insertions(+), 42 deletions(-) diff --git a/esrally/client/asynchronous.py b/esrally/client/asynchronous.py index 480a82902..5a3210a4c 100644 --- a/esrally/client/asynchronous.py +++ b/esrally/client/asynchronous.py @@ -26,7 +26,6 @@ from aiohttp.helpers import BaseTimerContext from elastic_transport import AiohttpHttpNode, AsyncTransport from elasticsearch import AsyncElasticsearch -from elasticsearch._async.client import IlmClient from multidict import CIMultiDict, CIMultiDictProxy from yarl import URL @@ -262,32 +261,12 @@ def __init__(self, *args, **kwargs): super().__init__(*args, node_class=RallyAiohttpHttpNode, **kwargs) -class RallyIlmClient(IlmClient): - async def put_lifecycle(self, *args, **kwargs): - """ - The 'elasticsearch-py' 8.x method signature renames the 'policy' param to 'name', and the previously so-called - 'body' param becomes 'policy' - """ - if args: - kwargs["name"] = args[0] - - if body := kwargs.pop("body", None): - kwargs["policy"] = body.get("policy", {}) - # pylint: disable=missing-kwoa - return await IlmClient.put_lifecycle(self, **kwargs) - - class RallyAsyncElasticsearch(AsyncElasticsearch, RequestContextHolder): def __init__(self, hosts: Any = None, *, distribution_version: str | None = None, distribution_flavor: str | None = None, **kwargs): super().__init__(hosts, **kwargs) self.distribution_version = distribution_version self.distribution_flavor = distribution_flavor - # some ILM method signatures changed in 'elasticsearch-py' 8.x, - # so we override method(s) here to provide BWC for any custom - # runners that aren't using the new kwargs - self.ilm = RallyIlmClient(self) - @property def is_serverless(self): return versions.is_serverless(self.distribution_flavor) diff --git a/esrally/driver/runner.py b/esrally/driver/runner.py index e05ecb177..13e8957d7 100644 --- a/esrally/driver/runner.py +++ b/esrally/driver/runner.py @@ -30,9 +30,10 @@ from io import BytesIO from os.path import commonprefix from types import FunctionType -from typing import Optional +from typing import Any, Optional import ijson +from elasticsearch import AsyncElasticsearch from esrally import exceptions, track, types from esrally.utils import convert @@ -183,7 +184,7 @@ def __init__(self, *args, config=None, **kwargs): async def __aenter__(self): return self - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): """ Runs the actual method that should be benchmarked. @@ -432,7 +433,7 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): return await self.delegate.__aexit__(exc_type, exc_val, exc_tb) -def mandatory(params, key, op): +def mandatory(params: dict[str, Any], key, op) -> Any: try: return params[key] except KeyError: @@ -2727,7 +2728,7 @@ class CreateIlmPolicy(Runner): `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): policy_name = mandatory(params, "policy-name", self) body = mandatory(params, "body", self) policy = body.get("policy", {}) diff --git a/tests/driver/runner_test.py b/tests/driver/runner_test.py index cc3c15637..4c3e32650 100644 --- a/tests/driver/runner_test.py +++ b/tests/driver/runner_test.py @@ -29,7 +29,6 @@ import pytest from esrally import client, config, exceptions -from esrally.client.asynchronous import RallyAsyncElasticsearch from esrally.driver import runner @@ -5843,22 +5842,6 @@ async def test_create_ilm_policy_without_request_params(self, es): filter_path=None, ) - @mock.patch("esrally.client.asynchronous.IlmClient") - @pytest.mark.asyncio - async def test_RallyIlmClient_rewrites_kwargs(self, es_ilm): - es = RallyAsyncElasticsearch(hosts=["http://localhost:9200"]) - es_ilm.put_lifecycle = mock.AsyncMock(return_value={}) - - # simulating a custom runner that hasn't been refactored - # to suit the new 'elasticsearch-py' 8.x kwarg only method signature - await es.ilm.put_lifecycle("test-name", body=self.params["body"]) - - es_ilm.put_lifecycle.assert_awaited_once_with( - es.ilm, - policy=self.params["body"]["policy"], - name="test-name", - ) - class TestDeleteIlmPolicyRunner: params = {"policy-name": "my-ilm-policy", "request-params": {"master_timeout": "30s", "timeout": "30s"}} From ba9453794efe2fee66f261ccdaf3c568f343bc31 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Thu, 27 Nov 2025 14:47:40 +0100 Subject: [PATCH 4/8] Remove unused module and fix factory_test after paramether name change. --- esrally/client/common.py | 59 ------------------------------------ tests/client/common_test.py | 24 --------------- tests/client/factory_test.py | 2 +- 3 files changed, 1 insertion(+), 84 deletions(-) delete mode 100644 esrally/client/common.py delete mode 100644 tests/client/common_test.py diff --git a/esrally/client/common.py b/esrally/client/common.py deleted file mode 100644 index e0ca051c9..000000000 --- a/esrally/client/common.py +++ /dev/null @@ -1,59 +0,0 @@ -import re -from collections.abc import Mapping -from datetime import date, datetime -from typing import Any - -from elastic_transport.client_utils import percent_encode -from elasticsearch import VERSION - - -def _client_major_version_to_str(version: tuple) -> str: - return str(version[0]) - - -_WARNING_RE = re.compile(r"\"([^\"]*)\"") -_COMPAT_MIMETYPE_TEMPLATE = "application/vnd.elasticsearch+%s; compatible-with=" + _client_major_version_to_str(VERSION) -_COMPAT_MIMETYPE_RE = re.compile(r"application/(json|x-ndjson|vnd\.mapbox-vector-tile)") -_COMPAT_MIMETYPE_SUB = _COMPAT_MIMETYPE_TEMPLATE % (r"\g<1>",) - - -def _mimetype_header_to_compat(header, request_headers): - # Converts all parts of a Accept/Content-Type headers - # from application/X -> application/vnd.elasticsearch+X - mimetype = request_headers.get(header, None) if request_headers else None - if mimetype: - request_headers[header] = _COMPAT_MIMETYPE_RE.sub(_COMPAT_MIMETYPE_SUB, mimetype) - - -def _escape(value: Any) -> str: - """ - Escape a single value of a URL string or a query parameter. If it is a list - or tuple, turn it into a comma-separated string first. - """ - - # make sequences into comma-separated stings - if isinstance(value, (list, tuple)): - value = ",".join([_escape(item) for item in value]) - - # dates and datetimes into isoformat - elif isinstance(value, (date, datetime)): - value = value.isoformat() - - # make bools into true/false strings - elif isinstance(value, bool): - value = str(value).lower() - - elif isinstance(value, bytes): - return value.decode("utf-8", "surrogatepass") - - if not isinstance(value, str): - return str(value) - return value - - -def _quote(value: Any) -> str: - return percent_encode(_escape(value), ",*") - - -def _quote_query(query: Mapping[str, Any]) -> str: - return "&".join([f"{k}={_quote(v)}" for k, v in query.items()]) diff --git a/tests/client/common_test.py b/tests/client/common_test.py deleted file mode 100644 index dac449c60..000000000 --- a/tests/client/common_test.py +++ /dev/null @@ -1,24 +0,0 @@ -# Licensed to Elasticsearch B.V. under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch B.V. licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from esrally.client import common - - -# pylint: disable=protected-access -def test_client_major_version_to_str(): - version = (8, 2, 0) - assert common._client_major_version_to_str(version) == "8" diff --git a/tests/client/factory_test.py b/tests/client/factory_test.py index 9abf37fc0..9fc28b97a 100644 --- a/tests/client/factory_test.py +++ b/tests/client/factory_test.py @@ -372,7 +372,7 @@ def test_create_async_client_with_api_key_auth_override(self, es): hosts=["https://localhost:9200"], transport_class=RallyAsyncTransport, ssl_context=f.ssl_context, - maxsize=f.max_connections, + connections_per_node=f.max_connections, verify_certs=True, serializer=f.client_options["serializer"], api_key=api_key, From 5635f57c1e465bba7fec9e9c6f4185497d4c9b62 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Thu, 27 Nov 2025 15:26:36 +0100 Subject: [PATCH 5/8] Update `aiohttp` and `aiosignal` packages versions. --- pyproject.toml | 4 +- uv.lock | 146 ++++++++++++++++++++++++++----------------------- 2 files changed, 80 insertions(+), 70 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5effc3c08..9a36ab52e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,8 +47,8 @@ dependencies = [ # License: MIT "urllib3==1.26.19", # License: Apache 2.0 - "aiohttp==3.10.11", - "aiosignal==1.3.2", + "aiohttp==3.13.2", + "aiosignal==1.4.0", "docker==6.0.0", # avoid specific requests version to fix bug in docker-py "requests<2.32.0", diff --git a/uv.lock b/uv.lock index 0cf2de6f5..faa4c6cea 100644 --- a/uv.lock +++ b/uv.lock @@ -19,7 +19,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.10.11" +version = "3.13.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -28,82 +28,92 @@ dependencies = [ { name = "attrs" }, { name = "frozenlist" }, { name = "multidict" }, + { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/25/a8/8e2ba36c6e3278d62e0c88aa42bb92ddbef092ac363b390dab4421da5cf5/aiohttp-3.10.11.tar.gz", hash = "sha256:9dc2b8f3dcab2e39e0fa309c8da50c3b55e6f34ab25f1a71d3288f24924d33a7", size = 7551886, upload-time = "2024-11-13T16:40:33.335Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/c7/575f9e82d7ef13cb1b45b9db8a5b8fadb35107fb12e33809356ae0155223/aiohttp-3.10.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5077b1a5f40ffa3ba1f40d537d3bec4383988ee51fbba6b74aa8fb1bc466599e", size = 588218, upload-time = "2024-11-13T16:36:38.461Z" }, - { url = "https://files.pythonhosted.org/packages/12/7b/a800dadbd9a47b7f921bfddcd531371371f39b9cd05786c3638bfe2e1175/aiohttp-3.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d6a14a4d93b5b3c2891fca94fa9d41b2322a68194422bef0dd5ec1e57d7d298", size = 400815, upload-time = "2024-11-13T16:36:40.547Z" }, - { url = "https://files.pythonhosted.org/packages/cb/28/7dbd53ab10b0ded397feed914880f39ce075bd39393b8dfc322909754a0a/aiohttp-3.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffbfde2443696345e23a3c597049b1dd43049bb65337837574205e7368472177", size = 392099, upload-time = "2024-11-13T16:36:43.918Z" }, - { url = "https://files.pythonhosted.org/packages/6a/2e/c6390f49e67911711c2229740e261c501685fe7201f7f918d6ff2fd1cfb0/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20b3d9e416774d41813bc02fdc0663379c01817b0874b932b81c7f777f67b217", size = 1224854, upload-time = "2024-11-13T16:36:46.473Z" }, - { url = "https://files.pythonhosted.org/packages/69/68/c96afae129201bff4edbece52b3e1abf3a8af57529a42700669458b00b9f/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b943011b45ee6bf74b22245c6faab736363678e910504dd7531a58c76c9015a", size = 1259641, upload-time = "2024-11-13T16:36:48.28Z" }, - { url = "https://files.pythonhosted.org/packages/63/89/bedd01456442747946114a8c2f30ff1b23d3b2ea0c03709f854c4f354a5a/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48bc1d924490f0d0b3658fe5c4b081a4d56ebb58af80a6729d4bd13ea569797a", size = 1295412, upload-time = "2024-11-13T16:36:50.286Z" }, - { url = "https://files.pythonhosted.org/packages/9b/4d/942198e2939efe7bfa484781590f082135e9931b8bcafb4bba62cf2d8f2f/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e12eb3f4b1f72aaaf6acd27d045753b18101524f72ae071ae1c91c1cd44ef115", size = 1218311, upload-time = "2024-11-13T16:36:53.721Z" }, - { url = "https://files.pythonhosted.org/packages/a3/5b/8127022912f1fa72dfc39cf37c36f83e0b56afc3b93594b1cf377b6e4ffc/aiohttp-3.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f14ebc419a568c2eff3c1ed35f634435c24ead2fe19c07426af41e7adb68713a", size = 1189448, upload-time = "2024-11-13T16:36:55.844Z" }, - { url = "https://files.pythonhosted.org/packages/af/12/752878033c8feab3362c0890a4d24e9895921729a53491f6f6fad64d3287/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:72b191cdf35a518bfc7ca87d770d30941decc5aaf897ec8b484eb5cc8c7706f3", size = 1186484, upload-time = "2024-11-13T16:36:58.472Z" }, - { url = "https://files.pythonhosted.org/packages/61/24/1d91c304fca47d5e5002ca23abab9b2196ac79d5c531258e048195b435b2/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ab2328a61fdc86424ee540d0aeb8b73bbcad7351fb7cf7a6546fc0bcffa0038", size = 1183864, upload-time = "2024-11-13T16:37:00.737Z" }, - { url = "https://files.pythonhosted.org/packages/c1/70/022d28b898314dac4cb5dd52ead2a372563c8590b1eaab9c5ed017eefb1e/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa93063d4af05c49276cf14e419550a3f45258b6b9d1f16403e777f1addf4519", size = 1241460, upload-time = "2024-11-13T16:37:03.175Z" }, - { url = "https://files.pythonhosted.org/packages/c3/15/2b43853330f82acf180602de0f68be62a2838d25d03d2ed40fecbe82479e/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:30283f9d0ce420363c24c5c2421e71a738a2155f10adbb1a11a4d4d6d2715cfc", size = 1258521, upload-time = "2024-11-13T16:37:06.013Z" }, - { url = "https://files.pythonhosted.org/packages/28/38/9ef2076cb06dcc155e7f02275f5da403a3e7c9327b6b075e999f0eb73613/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e5358addc8044ee49143c546d2182c15b4ac3a60be01c3209374ace05af5733d", size = 1207329, upload-time = "2024-11-13T16:37:08.091Z" }, - { url = "https://files.pythonhosted.org/packages/c2/5f/c5329d67a2c83d8ae17a84e11dec14da5773520913bfc191caaf4cd57e50/aiohttp-3.10.11-cp310-cp310-win32.whl", hash = "sha256:e1ffa713d3ea7cdcd4aea9cddccab41edf6882fa9552940344c44e59652e1120", size = 363835, upload-time = "2024-11-13T16:37:10.017Z" }, - { url = "https://files.pythonhosted.org/packages/0f/c6/ca5d70eea2fdbe283dbc1e7d30649a1a5371b2a2a9150db192446f645789/aiohttp-3.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:778cbd01f18ff78b5dd23c77eb82987ee4ba23408cbed233009fd570dda7e674", size = 382169, upload-time = "2024-11-13T16:37:12.603Z" }, - { url = "https://files.pythonhosted.org/packages/73/96/221ec59bc38395a6c205cbe8bf72c114ce92694b58abc8c3c6b7250efa7f/aiohttp-3.10.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:80ff08556c7f59a7972b1e8919f62e9c069c33566a6d28586771711e0eea4f07", size = 587742, upload-time = "2024-11-13T16:37:14.469Z" }, - { url = "https://files.pythonhosted.org/packages/24/17/4e606c969b19de5c31a09b946bd4c37e30c5288ca91d4790aa915518846e/aiohttp-3.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c8f96e9ee19f04c4914e4e7a42a60861066d3e1abf05c726f38d9d0a466e695", size = 400357, upload-time = "2024-11-13T16:37:16.482Z" }, - { url = "https://files.pythonhosted.org/packages/a2/e5/433f59b87ba69736e446824710dd7f26fcd05b24c6647cb1e76554ea5d02/aiohttp-3.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fb8601394d537da9221947b5d6e62b064c9a43e88a1ecd7414d21a1a6fba9c24", size = 392099, upload-time = "2024-11-13T16:37:20.013Z" }, - { url = "https://files.pythonhosted.org/packages/d2/a3/3be340f5063970bb9e47f065ee8151edab639d9c2dce0d9605a325ab035d/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea224cf7bc2d8856d6971cea73b1d50c9c51d36971faf1abc169a0d5f85a382", size = 1300367, upload-time = "2024-11-13T16:37:22.645Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7d/a3043918466cbee9429792ebe795f92f70eeb40aee4ccbca14c38ee8fa4d/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db9503f79e12d5d80b3efd4d01312853565c05367493379df76d2674af881caa", size = 1339448, upload-time = "2024-11-13T16:37:24.834Z" }, - { url = "https://files.pythonhosted.org/packages/2c/60/192b378bd9d1ae67716b71ae63c3e97c48b134aad7675915a10853a0b7de/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f449a50cc33f0384f633894d8d3cd020e3ccef81879c6e6245c3c375c448625", size = 1374875, upload-time = "2024-11-13T16:37:26.799Z" }, - { url = "https://files.pythonhosted.org/packages/e0/d7/cd58bd17f5277d9cc32ecdbb0481ca02c52fc066412de413aa01268dc9b4/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82052be3e6d9e0c123499127782a01a2b224b8af8c62ab46b3f6197035ad94e9", size = 1285626, upload-time = "2024-11-13T16:37:29.02Z" }, - { url = "https://files.pythonhosted.org/packages/bb/b2/da4953643b7dcdcd29cc99f98209f3653bf02023d95ce8a8fd57ffba0f15/aiohttp-3.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20063c7acf1eec550c8eb098deb5ed9e1bb0521613b03bb93644b810986027ac", size = 1246120, upload-time = "2024-11-13T16:37:31.268Z" }, - { url = "https://files.pythonhosted.org/packages/6c/22/1217b3c773055f0cb172e3b7108274a74c0fe9900c716362727303931cbb/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:489cced07a4c11488f47aab1f00d0c572506883f877af100a38f1fedaa884c3a", size = 1265177, upload-time = "2024-11-13T16:37:33.348Z" }, - { url = "https://files.pythonhosted.org/packages/63/5e/3827ad7e61544ed1e73e4fdea7bb87ea35ac59a362d7eb301feb5e859780/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ea9b3bab329aeaa603ed3bf605f1e2a6f36496ad7e0e1aa42025f368ee2dc07b", size = 1257238, upload-time = "2024-11-13T16:37:35.753Z" }, - { url = "https://files.pythonhosted.org/packages/53/31/951f78751d403da6086b662760e6e8b08201b0dcf5357969f48261b4d0e1/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ca117819d8ad113413016cb29774b3f6d99ad23c220069789fc050267b786c16", size = 1315944, upload-time = "2024-11-13T16:37:38.317Z" }, - { url = "https://files.pythonhosted.org/packages/0d/79/06ef7a2a69880649261818b135b245de5a4e89fed5a6987c8645428563fc/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2dfb612dcbe70fb7cdcf3499e8d483079b89749c857a8f6e80263b021745c730", size = 1332065, upload-time = "2024-11-13T16:37:40.725Z" }, - { url = "https://files.pythonhosted.org/packages/10/39/a273857c2d0bbf2152a4201fbf776931c2dac74aa399c6683ed4c286d1d1/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9b615d3da0d60e7d53c62e22b4fd1c70f4ae5993a44687b011ea3a2e49051b8", size = 1291882, upload-time = "2024-11-13T16:37:43.209Z" }, - { url = "https://files.pythonhosted.org/packages/49/39/7aa387f88403febc96e0494101763afaa14d342109329a01b413b2bac075/aiohttp-3.10.11-cp311-cp311-win32.whl", hash = "sha256:29103f9099b6068bbdf44d6a3d090e0a0b2be6d3c9f16a070dd9d0d910ec08f9", size = 363409, upload-time = "2024-11-13T16:37:45.143Z" }, - { url = "https://files.pythonhosted.org/packages/6f/e9/8eb3dc095ce48499d867ad461d02f1491686b79ad92e4fad4df582f6be7b/aiohttp-3.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:236b28ceb79532da85d59aa9b9bf873b364e27a0acb2ceaba475dc61cffb6f3f", size = 382644, upload-time = "2024-11-13T16:37:47.685Z" }, - { url = "https://files.pythonhosted.org/packages/01/16/077057ef3bd684dbf9a8273a5299e182a8d07b4b252503712ff8b5364fd1/aiohttp-3.10.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7480519f70e32bfb101d71fb9a1f330fbd291655a4c1c922232a48c458c52710", size = 584830, upload-time = "2024-11-13T16:37:49.608Z" }, - { url = "https://files.pythonhosted.org/packages/2c/cf/348b93deb9597c61a51b6682e81f7c7d79290249e886022ef0705d858d90/aiohttp-3.10.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f65267266c9aeb2287a6622ee2bb39490292552f9fbf851baabc04c9f84e048d", size = 397090, upload-time = "2024-11-13T16:37:51.539Z" }, - { url = "https://files.pythonhosted.org/packages/70/bf/903df5cd739dfaf5b827b3d8c9d68ff4fcea16a0ca1aeb948c9da30f56c8/aiohttp-3.10.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7400a93d629a0608dc1d6c55f1e3d6e07f7375745aaa8bd7f085571e4d1cee97", size = 392361, upload-time = "2024-11-13T16:37:53.586Z" }, - { url = "https://files.pythonhosted.org/packages/fb/97/e4792675448a2ac5bd56f377a095233b805dd1315235c940c8ba5624e3cb/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f34b97e4b11b8d4eb2c3a4f975be626cc8af99ff479da7de49ac2c6d02d35725", size = 1309839, upload-time = "2024-11-13T16:37:55.68Z" }, - { url = "https://files.pythonhosted.org/packages/96/d0/ba19b1260da6fbbda4d5b1550d8a53ba3518868f2c143d672aedfdbc6172/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7b825da878464a252ccff2958838f9caa82f32a8dbc334eb9b34a026e2c636", size = 1348116, upload-time = "2024-11-13T16:37:58.232Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b9/15100ee7113a2638bfdc91aecc54641609a92a7ce4fe533ebeaa8d43ff93/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f92a344c50b9667827da308473005f34767b6a2a60d9acff56ae94f895f385", size = 1391402, upload-time = "2024-11-13T16:38:00.522Z" }, - { url = "https://files.pythonhosted.org/packages/c5/36/831522618ac0dcd0b28f327afd18df7fb6bbf3eaf302f912a40e87714846/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f1ab987a27b83c5268a17218463c2ec08dbb754195113867a27b166cd6087", size = 1304239, upload-time = "2024-11-13T16:38:04.195Z" }, - { url = "https://files.pythonhosted.org/packages/60/9f/b7230d0c48b076500ae57adb717aa0656432acd3d8febb1183dedfaa4e75/aiohttp-3.10.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1dc0f4ca54842173d03322793ebcf2c8cc2d34ae91cc762478e295d8e361e03f", size = 1256565, upload-time = "2024-11-13T16:38:07.218Z" }, - { url = "https://files.pythonhosted.org/packages/63/c2/35c7b4699f4830b3b0a5c3d5619df16dca8052ae8b488e66065902d559f6/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7ce6a51469bfaacff146e59e7fb61c9c23006495d11cc24c514a455032bcfa03", size = 1269285, upload-time = "2024-11-13T16:38:09.396Z" }, - { url = "https://files.pythonhosted.org/packages/51/48/bc20ea753909bdeb09f9065260aefa7453e3a57f6a51f56f5216adc1a5e7/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aad3cd91d484d065ede16f3cf15408254e2469e3f613b241a1db552c5eb7ab7d", size = 1276716, upload-time = "2024-11-13T16:38:12.039Z" }, - { url = "https://files.pythonhosted.org/packages/0c/7b/a8708616b3810f55ead66f8e189afa9474795760473aea734bbea536cd64/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f4df4b8ca97f658c880fb4b90b1d1ec528315d4030af1ec763247ebfd33d8b9a", size = 1315023, upload-time = "2024-11-13T16:38:15.155Z" }, - { url = "https://files.pythonhosted.org/packages/2a/d6/dfe9134a921e05b01661a127a37b7d157db93428905450e32f9898eef27d/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2e4e18a0a2d03531edbc06c366954e40a3f8d2a88d2b936bbe78a0c75a3aab3e", size = 1342735, upload-time = "2024-11-13T16:38:17.539Z" }, - { url = "https://files.pythonhosted.org/packages/ca/1a/3bd7f18e3909eabd57e5d17ecdbf5ea4c5828d91341e3676a07de7c76312/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ce66780fa1a20e45bc753cda2a149daa6dbf1561fc1289fa0c308391c7bc0a4", size = 1302618, upload-time = "2024-11-13T16:38:19.865Z" }, - { url = "https://files.pythonhosted.org/packages/cf/51/d063133781cda48cfdd1e11fc8ef45ab3912b446feba41556385b3ae5087/aiohttp-3.10.11-cp312-cp312-win32.whl", hash = "sha256:a919c8957695ea4c0e7a3e8d16494e3477b86f33067478f43106921c2fef15bb", size = 360497, upload-time = "2024-11-13T16:38:21.996Z" }, - { url = "https://files.pythonhosted.org/packages/55/4e/f29def9ed39826fe8f85955f2e42fe5cc0cbe3ebb53c97087f225368702e/aiohttp-3.10.11-cp312-cp312-win_amd64.whl", hash = "sha256:b5e29706e6389a2283a91611c91bf24f218962717c8f3b4e528ef529d112ee27", size = 380577, upload-time = "2024-11-13T16:38:24.247Z" }, - { url = "https://files.pythonhosted.org/packages/1f/63/654c185dfe3cf5d4a0d35b6ee49ee6ca91922c694eaa90732e1ba4b40ef1/aiohttp-3.10.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:703938e22434d7d14ec22f9f310559331f455018389222eed132808cd8f44127", size = 577381, upload-time = "2024-11-13T16:38:26.708Z" }, - { url = "https://files.pythonhosted.org/packages/4e/c4/ee9c350acb202ba2eb0c44b0f84376b05477e870444192a9f70e06844c28/aiohttp-3.10.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9bc50b63648840854e00084c2b43035a62e033cb9b06d8c22b409d56eb098413", size = 393289, upload-time = "2024-11-13T16:38:29.207Z" }, - { url = "https://files.pythonhosted.org/packages/3d/7c/30d161a7e3b208cef1b922eacf2bbb8578b7e5a62266a6a2245a1dd044dc/aiohttp-3.10.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f0463bf8b0754bc744e1feb61590706823795041e63edf30118a6f0bf577461", size = 388859, upload-time = "2024-11-13T16:38:31.567Z" }, - { url = "https://files.pythonhosted.org/packages/79/10/8d050e04be447d3d39e5a4a910fa289d930120cebe1b893096bd3ee29063/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6c6dec398ac5a87cb3a407b068e1106b20ef001c344e34154616183fe684288", size = 1280983, upload-time = "2024-11-13T16:38:33.738Z" }, - { url = "https://files.pythonhosted.org/packages/31/b3/977eca40afe643dcfa6b8d8bb9a93f4cba1d8ed1ead22c92056b08855c7a/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcaf2d79104d53d4dcf934f7ce76d3d155302d07dae24dff6c9fffd217568067", size = 1317132, upload-time = "2024-11-13T16:38:35.999Z" }, - { url = "https://files.pythonhosted.org/packages/1a/43/b5ee8e697ed0f96a2b3d80b3058fa7590cda508e9cd256274246ba1cf37a/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25fd5470922091b5a9aeeb7e75be609e16b4fba81cdeaf12981393fb240dd10e", size = 1362630, upload-time = "2024-11-13T16:38:39.016Z" }, - { url = "https://files.pythonhosted.org/packages/28/20/3ae8e993b2990fa722987222dea74d6bac9331e2f530d086f309b4aa8847/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbde2ca67230923a42161b1f408c3992ae6e0be782dca0c44cb3206bf330dee1", size = 1276865, upload-time = "2024-11-13T16:38:41.423Z" }, - { url = "https://files.pythonhosted.org/packages/02/08/1afb0ab7dcff63333b683e998e751aa2547d1ff897b577d2244b00e6fe38/aiohttp-3.10.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:249c8ff8d26a8b41a0f12f9df804e7c685ca35a207e2410adbd3e924217b9006", size = 1230448, upload-time = "2024-11-13T16:38:43.962Z" }, - { url = "https://files.pythonhosted.org/packages/c6/fd/ccd0ff842c62128d164ec09e3dd810208a84d79cd402358a3038ae91f3e9/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:878ca6a931ee8c486a8f7b432b65431d095c522cbeb34892bee5be97b3481d0f", size = 1244626, upload-time = "2024-11-13T16:38:47.089Z" }, - { url = "https://files.pythonhosted.org/packages/9f/75/30e9537ab41ed7cb062338d8df7c4afb0a715b3551cd69fc4ea61cfa5a95/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8663f7777ce775f0413324be0d96d9730959b2ca73d9b7e2c2c90539139cbdd6", size = 1243608, upload-time = "2024-11-13T16:38:49.47Z" }, - { url = "https://files.pythonhosted.org/packages/c2/e0/3e7a62d99b9080793affddc12a82b11c9bc1312916ad849700d2bddf9786/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6cd3f10b01f0c31481fba8d302b61603a2acb37b9d30e1d14e0f5a58b7b18a31", size = 1286158, upload-time = "2024-11-13T16:38:51.947Z" }, - { url = "https://files.pythonhosted.org/packages/71/b8/df67886802e71e976996ed9324eb7dc379e53a7d972314e9c7fe3f6ac6bc/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e8d8aad9402d3aa02fdc5ca2fe68bcb9fdfe1f77b40b10410a94c7f408b664d", size = 1313636, upload-time = "2024-11-13T16:38:54.424Z" }, - { url = "https://files.pythonhosted.org/packages/3c/3b/aea9c3e70ff4e030f46902df28b4cdf486695f4d78fd9c6698827e2bafab/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:38e3c4f80196b4f6c3a85d134a534a56f52da9cb8d8e7af1b79a32eefee73a00", size = 1273772, upload-time = "2024-11-13T16:38:56.846Z" }, - { url = "https://files.pythonhosted.org/packages/e9/9e/4b4c5705270d1c4ee146516ad288af720798d957ba46504aaf99b86e85d9/aiohttp-3.10.11-cp313-cp313-win32.whl", hash = "sha256:fc31820cfc3b2863c6e95e14fcf815dc7afe52480b4dc03393c4873bb5599f71", size = 358679, upload-time = "2024-11-13T16:38:59.787Z" }, - { url = "https://files.pythonhosted.org/packages/28/1d/18ef37549901db94717d4389eb7be807acbfbdeab48a73ff2993fc909118/aiohttp-3.10.11-cp313-cp313-win_amd64.whl", hash = "sha256:4996ff1345704ffdd6d75fb06ed175938c133425af616142e7187f28dc75f14e", size = 378073, upload-time = "2024-11-13T16:39:02.065Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/34/939730e66b716b76046dedfe0842995842fa906ccc4964bba414ff69e429/aiohttp-3.13.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2372b15a5f62ed37789a6b383ff7344fc5b9f243999b0cd9b629d8bc5f5b4155", size = 736471, upload-time = "2025-10-28T20:55:27.924Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/dcbdf2df7f6ca72b0bb4c0b4509701f2d8942cf54e29ca197389c214c07f/aiohttp-3.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7f8659a48995edee7229522984bd1009c1213929c769c2daa80b40fe49a180c", size = 493985, upload-time = "2025-10-28T20:55:29.456Z" }, + { url = "https://files.pythonhosted.org/packages/9d/87/71c8867e0a1d0882dcbc94af767784c3cb381c1c4db0943ab4aae4fed65e/aiohttp-3.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:939ced4a7add92296b0ad38892ce62b98c619288a081170695c6babe4f50e636", size = 489274, upload-time = "2025-10-28T20:55:31.134Z" }, + { url = "https://files.pythonhosted.org/packages/38/0f/46c24e8dae237295eaadd113edd56dee96ef6462adf19b88592d44891dc5/aiohttp-3.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6315fb6977f1d0dd41a107c527fee2ed5ab0550b7d885bc15fee20ccb17891da", size = 1668171, upload-time = "2025-10-28T20:55:36.065Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c6/4cdfb4440d0e28483681a48f69841fa5e39366347d66ef808cbdadddb20e/aiohttp-3.13.2-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6e7352512f763f760baaed2637055c49134fd1d35b37c2dedfac35bfe5cf8725", size = 1636036, upload-time = "2025-10-28T20:55:37.576Z" }, + { url = "https://files.pythonhosted.org/packages/84/37/8708cf678628216fb678ab327a4e1711c576d6673998f4f43e86e9ae90dd/aiohttp-3.13.2-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e09a0a06348a2dd73e7213353c90d709502d9786219f69b731f6caa0efeb46f5", size = 1727975, upload-time = "2025-10-28T20:55:39.457Z" }, + { url = "https://files.pythonhosted.org/packages/e6/2e/3ebfe12fdcb9b5f66e8a0a42dffcd7636844c8a018f261efb2419f68220b/aiohttp-3.13.2-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a09a6d073fb5789456545bdee2474d14395792faa0527887f2f4ec1a486a59d3", size = 1815823, upload-time = "2025-10-28T20:55:40.958Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/ca2ef819488cbb41844c6cf92ca6dd15b9441e6207c58e5ae0e0fc8d70ad/aiohttp-3.13.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b59d13c443f8e049d9e94099c7e412e34610f1f49be0f230ec656a10692a5802", size = 1669374, upload-time = "2025-10-28T20:55:42.745Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fe/1fe2e1179a0d91ce09c99069684aab619bf2ccde9b20bd6ca44f8837203e/aiohttp-3.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:20db2d67985d71ca033443a1ba2001c4b5693fe09b0e29f6d9358a99d4d62a8a", size = 1555315, upload-time = "2025-10-28T20:55:44.264Z" }, + { url = "https://files.pythonhosted.org/packages/5a/2b/f3781899b81c45d7cbc7140cddb8a3481c195e7cbff8e36374759d2ab5a5/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:960c2fc686ba27b535f9fd2b52d87ecd7e4fd1cf877f6a5cba8afb5b4a8bd204", size = 1639140, upload-time = "2025-10-28T20:55:46.626Z" }, + { url = "https://files.pythonhosted.org/packages/72/27/c37e85cd3ece6f6c772e549bd5a253d0c122557b25855fb274224811e4f2/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6c00dbcf5f0d88796151e264a8eab23de2997c9303dd7c0bf622e23b24d3ce22", size = 1645496, upload-time = "2025-10-28T20:55:48.933Z" }, + { url = "https://files.pythonhosted.org/packages/66/20/3af1ab663151bd3780b123e907761cdb86ec2c4e44b2d9b195ebc91fbe37/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fed38a5edb7945f4d1bcabe2fcd05db4f6ec7e0e82560088b754f7e08d93772d", size = 1697625, upload-time = "2025-10-28T20:55:50.377Z" }, + { url = "https://files.pythonhosted.org/packages/95/eb/ae5cab15efa365e13d56b31b0d085a62600298bf398a7986f8388f73b598/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:b395bbca716c38bef3c764f187860e88c724b342c26275bc03e906142fc5964f", size = 1542025, upload-time = "2025-10-28T20:55:51.861Z" }, + { url = "https://files.pythonhosted.org/packages/e9/2d/1683e8d67ec72d911397fe4e575688d2a9b8f6a6e03c8fdc9f3fd3d4c03f/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:204ffff2426c25dfda401ba08da85f9c59525cdc42bda26660463dd1cbcfec6f", size = 1714918, upload-time = "2025-10-28T20:55:53.515Z" }, + { url = "https://files.pythonhosted.org/packages/99/a2/ffe8e0e1c57c5e542d47ffa1fcf95ef2b3ea573bf7c4d2ee877252431efc/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:05c4dd3c48fb5f15db31f57eb35374cb0c09afdde532e7fb70a75aede0ed30f6", size = 1656113, upload-time = "2025-10-28T20:55:55.438Z" }, + { url = "https://files.pythonhosted.org/packages/0d/42/d511aff5c3a2b06c09d7d214f508a4ad8ac7799817f7c3d23e7336b5e896/aiohttp-3.13.2-cp310-cp310-win32.whl", hash = "sha256:e574a7d61cf10351d734bcddabbe15ede0eaa8a02070d85446875dc11189a251", size = 432290, upload-time = "2025-10-28T20:55:56.96Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ea/1c2eb7098b5bad4532994f2b7a8228d27674035c9b3234fe02c37469ef14/aiohttp-3.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:364f55663085d658b8462a1c3f17b2b84a5c2e1ba858e1b79bff7b2e24ad1514", size = 455075, upload-time = "2025-10-28T20:55:58.373Z" }, + { url = "https://files.pythonhosted.org/packages/35/74/b321e7d7ca762638cdf8cdeceb39755d9c745aff7a64c8789be96ddf6e96/aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0", size = 743409, upload-time = "2025-10-28T20:56:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/99/3d/91524b905ec473beaf35158d17f82ef5a38033e5809fe8742e3657cdbb97/aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb", size = 497006, upload-time = "2025-10-28T20:56:01.85Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d3/7f68bc02a67716fe80f063e19adbd80a642e30682ce74071269e17d2dba1/aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9", size = 493195, upload-time = "2025-10-28T20:56:03.314Z" }, + { url = "https://files.pythonhosted.org/packages/98/31/913f774a4708775433b7375c4f867d58ba58ead833af96c8af3621a0d243/aiohttp-3.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613", size = 1747759, upload-time = "2025-10-28T20:56:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/e8/63/04efe156f4326f31c7c4a97144f82132c3bb21859b7bb84748d452ccc17c/aiohttp-3.13.2-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead", size = 1704456, upload-time = "2025-10-28T20:56:06.986Z" }, + { url = "https://files.pythonhosted.org/packages/8e/02/4e16154d8e0a9cf4ae76f692941fd52543bbb148f02f098ca73cab9b1c1b/aiohttp-3.13.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780", size = 1807572, upload-time = "2025-10-28T20:56:08.558Z" }, + { url = "https://files.pythonhosted.org/packages/34/58/b0583defb38689e7f06798f0285b1ffb3a6fb371f38363ce5fd772112724/aiohttp-3.13.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a", size = 1895954, upload-time = "2025-10-28T20:56:10.545Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f3/083907ee3437425b4e376aa58b2c915eb1a33703ec0dc30040f7ae3368c6/aiohttp-3.13.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592", size = 1747092, upload-time = "2025-10-28T20:56:12.118Z" }, + { url = "https://files.pythonhosted.org/packages/ac/61/98a47319b4e425cc134e05e5f3fc512bf9a04bf65aafd9fdcda5d57ec693/aiohttp-3.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab", size = 1606815, upload-time = "2025-10-28T20:56:14.191Z" }, + { url = "https://files.pythonhosted.org/packages/97/4b/e78b854d82f66bb974189135d31fce265dee0f5344f64dd0d345158a5973/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30", size = 1723789, upload-time = "2025-10-28T20:56:16.101Z" }, + { url = "https://files.pythonhosted.org/packages/ed/fc/9d2ccc794fc9b9acd1379d625c3a8c64a45508b5091c546dea273a41929e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40", size = 1718104, upload-time = "2025-10-28T20:56:17.655Z" }, + { url = "https://files.pythonhosted.org/packages/66/65/34564b8765ea5c7d79d23c9113135d1dd3609173da13084830f1507d56cf/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948", size = 1785584, upload-time = "2025-10-28T20:56:19.238Z" }, + { url = "https://files.pythonhosted.org/packages/30/be/f6a7a426e02fc82781afd62016417b3948e2207426d90a0e478790d1c8a4/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf", size = 1595126, upload-time = "2025-10-28T20:56:20.836Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c7/8e22d5d28f94f67d2af496f14a83b3c155d915d1fe53d94b66d425ec5b42/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782", size = 1800665, upload-time = "2025-10-28T20:56:22.922Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/91133c8b68b1da9fc16555706aa7276fdf781ae2bb0876c838dd86b8116e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8", size = 1739532, upload-time = "2025-10-28T20:56:25.924Z" }, + { url = "https://files.pythonhosted.org/packages/17/6b/3747644d26a998774b21a616016620293ddefa4d63af6286f389aedac844/aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec", size = 431876, upload-time = "2025-10-28T20:56:27.524Z" }, + { url = "https://files.pythonhosted.org/packages/c3/63/688462108c1a00eb9f05765331c107f95ae86f6b197b865d29e930b7e462/aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c", size = 456205, upload-time = "2025-10-28T20:56:29.062Z" }, + { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" }, + { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863, upload-time = "2025-10-28T20:56:36.377Z" }, + { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586, upload-time = "2025-10-28T20:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625, upload-time = "2025-10-28T20:56:39.75Z" }, + { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281, upload-time = "2025-10-28T20:56:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431, upload-time = "2025-10-28T20:56:43.162Z" }, + { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846, upload-time = "2025-10-28T20:56:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606, upload-time = "2025-10-28T20:56:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663, upload-time = "2025-10-28T20:56:48.528Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939, upload-time = "2025-10-28T20:56:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" }, + { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" }, + { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" }, + { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" }, + { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" }, + { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" }, + { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" }, + { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" }, + { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" }, + { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" }, + { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" }, + { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" }, + { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" }, + { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" }, ] [[package]] name = "aiosignal" -version = "1.3.2" +version = "1.4.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b5/6d55e80f6d8a08ce22b982eafa278d823b541c925f11ee774b0b9c43473d/aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54", size = 19424, upload-time = "2024-12-13T17:10:40.86Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5", size = 7597, upload-time = "2024-12-13T17:10:38.469Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] [[package]] @@ -664,8 +674,8 @@ dev = [ [package.metadata] requires-dist = [ - { name = "aiohttp", specifier = "==3.10.11" }, - { name = "aiosignal", specifier = "==1.3.2" }, + { name = "aiohttp", specifier = "==3.13.2" }, + { name = "aiosignal", specifier = "==1.4.0" }, { name = "black", marker = "extra == 'develop'", specifier = "==24.10.0" }, { name = "boto3", marker = "extra == 'develop'", specifier = "==1.34.68" }, { name = "boto3", marker = "extra == 's3'", specifier = "==1.34.68" }, From d377d0eb42c3bf4b52190468863b7cc565b35a71 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Thu, 27 Nov 2025 16:07:33 +0100 Subject: [PATCH 6/8] Update docs. --- docs/adding_tracks.rst | 7 +----- docs/car.rst | 2 +- docs/cluster_management.rst | 8 +++---- docs/command_line_reference.rst | 39 ++++++++++++++------------------- docs/elasticsearch_plugins.rst | 4 ++-- docs/migrate.rst | 12 +++++----- docs/pipelines.rst | 4 ++-- docs/quickstart.rst | 4 ++-- docs/race.rst | 12 +++------- docs/recipes.rst | 2 +- docs/track.rst | 4 ++-- 11 files changed, 41 insertions(+), 57 deletions(-) diff --git a/docs/adding_tracks.rst b/docs/adding_tracks.rst index 4eae501be..64b6c5e7f 100644 --- a/docs/adding_tracks.rst +++ b/docs/adding_tracks.rst @@ -295,12 +295,7 @@ You can also show details about your track with ``esrally info --track-path=~/ra 5. force-merge 6. query-match-all (8 clients) -Congratulations, you have created your first track! You can test it with ``esrally race --distribution-version=7.14.1 --track-path=~/rally-tracks/tutorial``. - -.. note:: - - To test the track with Elasticsearch prior to 7.0.0 you need to update ``index.json`` and ``track.json`` as specified in notes above and then execute ``esrally race --distribution-version=6.5.3 --track-path=~/rally-tracks/tutorial``. - +Congratulations, you have created your first track! You can test it with ``esrally race --distribution-version=9.1.6 --track-path=~/rally-tracks/tutorial``. .. _add_track_test_mode: diff --git a/docs/car.rst b/docs/car.rst index 4bbb0d393..8233b7c96 100644 --- a/docs/car.rst +++ b/docs/car.rst @@ -162,7 +162,7 @@ You can now verify that everything works by listing all teams in this team repos This shows all teams that are available on the ``master`` branch of this repository. Suppose you only created tracks on the branch ``2`` because you're interested in the performance of Elasticsearch 2.x, then you can specify also the distribution version:: - esrally list teams --team-repository=private --distribution-version=7.0.0 + esrally list teams --team-repository=private --distribution-version=9.1.6 Rally will follow the same branch fallback logic as described above. diff --git a/docs/cluster_management.rst b/docs/cluster_management.rst index 50eb2beb8..680536b20 100644 --- a/docs/cluster_management.rst +++ b/docs/cluster_management.rst @@ -25,7 +25,7 @@ In this section we will setup a single Elasticsearch node locally, run a benchma First we need to install Elasticearch:: - esrally install --quiet --distribution-version=7.4.2 --node-name="rally-node-0" --network-host="127.0.0.1" --http-port=39200 --master-nodes="rally-node-0" --seed-hosts="127.0.0.1:39300" + esrally install --quiet --distribution-version=9.1.6 --node-name="rally-node-0" --network-host="127.0.0.1" --http-port=39200 --master-nodes="rally-node-0" --seed-hosts="127.0.0.1:39300" The parameter ``--network-host`` defines the network interface this node will bind to and ``--http-port`` defines which port will be exposed for HTTP traffic. Rally will automatically choose the transport port range as 100 above (39300). The parameters ``--master-nodes`` and ``--seed-hosts`` are necessary for the discovery process. Please see the respective Elasticsearch documentation on `discovery `_ for more details. @@ -70,11 +70,11 @@ Levelling Up: Benchmarking a Cluster This approach of being able to manage individual cluster nodes shows its power when we want to setup a cluster consisting of multiple nodes. At the moment Rally only supports a uniform cluster architecture but with this approach we can also setup arbitrarily complex clusters. The following examples shows how to setup a uniform three node cluster on three machines with the IPs ``192.168.14.77``, ``192.168.14.78`` and ``192.168.14.79``. On each machine we will issue the following command (pick the right one per machine):: # on 192.168.14.77 - export INSTALLATION_ID=$(esrally install --quiet --distribution-version=7.4.2 --node-name="rally-node-0" --network-host="192.168.14.77" --http-port=39200 --master-nodes="rally-node-0,rally-node-1,rally-node-2" --seed-hosts="192.168.14.77:39300,192.168.14.78:39300,192.168.14.79:39300" | jq --raw-output '.["installation-id"]') + export INSTALLATION_ID=$(esrally install --quiet --distribution-version=9.1.6 --node-name="rally-node-0" --network-host="192.168.14.77" --http-port=39200 --master-nodes="rally-node-0,rally-node-1,rally-node-2" --seed-hosts="192.168.14.77:39300,192.168.14.78:39300,192.168.14.79:39300" | jq --raw-output '.["installation-id"]') # on 192.168.14.78 - export INSTALLATION_ID=$(esrally install --quiet --distribution-version=7.4.2 --node-name="rally-node-1" --network-host="192.168.14.78" --http-port=39200 --master-nodes="rally-node-0,rally-node-1,rally-node-2" --seed-hosts="192.168.14.77:39300,192.168.14.78:39300,192.168.14.79:39300" | jq --raw-output '.["installation-id"]') + export INSTALLATION_ID=$(esrally install --quiet --distribution-version=9.1.6 --node-name="rally-node-1" --network-host="192.168.14.78" --http-port=39200 --master-nodes="rally-node-0,rally-node-1,rally-node-2" --seed-hosts="192.168.14.77:39300,192.168.14.78:39300,192.168.14.79:39300" | jq --raw-output '.["installation-id"]') # on 192.168.14.79 - export INSTALLATION_ID=$(esrally install --quiet --distribution-version=7.4.2 --node-name="rally-node-2" --network-host="192.168.14.79" --http-port=39200 --master-nodes="rally-node-0,rally-node-1,rally-node-2" --seed-hosts="192.168.14.77:39300,192.168.14.78:39300,192.168.14.79:39300" | jq --raw-output '.["installation-id"]') + export INSTALLATION_ID=$(esrally install --quiet --distribution-version=9.1.6 --node-name="rally-node-2" --network-host="192.168.14.79" --http-port=39200 --master-nodes="rally-node-0,rally-node-1,rally-node-2" --seed-hosts="192.168.14.77:39300,192.168.14.78:39300,192.168.14.79:39300" | jq --raw-output '.["installation-id"]') Then we pick a random race id, e.g. ``fb38013d-5d06-4b81-b81a-b61c8c10f6e5`` and set it on each machine (including the machine where will generate load):: diff --git a/docs/command_line_reference.rst b/docs/command_line_reference.rst index c2b111f33..eb93dff39 100644 --- a/docs/command_line_reference.rst +++ b/docs/command_line_reference.rst @@ -109,23 +109,18 @@ Because ``--quiet`` is specified, Rally will suppress all non-essential output ( This subcommand can be used to download Elasticsearch distributions. Example:: - esrally download --distribution-version=6.8.0 --quiet + esrally download --distribution-version=9.1.6 --quiet -This will download the OSS distribution of Elasticsearch 6.8.0. Because ``--quiet`` is specified, Rally will suppress all non-essential output (banners, progress messages etc.) and only return the location of the binary on the local machine after it has downloaded it:: +This will download the OSS distribution of Elasticsearch 9.1.6. Because ``--quiet`` is specified, Rally will suppress all non-essential output (banners, progress messages etc.) and only return the location of the binary on the local machine after it has downloaded it:: { - "elasticsearch": "/Users/dm/.rally/benchmarks/distributions/elasticsearch-oss-6.8.0.tar.gz" + "elasticsearch": "/path/to/elasticsearch-9.1.6-darwin-aarch64.tar.gz" } To download the default distribution you need to specify a license (via ``--car``):: - esrally download --distribution-version=6.8.0 --car=basic-license --quiet + esrally download --distribution-version=9.1.6 --car=basic-license --quiet -This will show the path to the default distribution:: - - { - "elasticsearch": "/Users/dm/.rally/benchmarks/distributions/elasticsearch-6.8.0.tar.gz" - } ``delete`` ~~~~~~~~~~~ @@ -143,7 +138,7 @@ The ``delete`` subcommand is used to delete records for different configuration This subcommand can be used to install a single Elasticsearch node. Example:: - esrally install --quiet --distribution-version=7.4.2 --node-name="rally-node-0" --network-host="127.0.0.1" --http-port=39200 --master-nodes="rally-node-0" --seed-hosts="127.0.0.1:39300" + esrally install --quiet --distribution-version=9.1.6 --node-name="rally-node-0" --network-host="127.0.0.1" --http-port=39200 --master-nodes="rally-node-0" --seed-hosts="127.0.0.1:39300" This will output the id of this installation:: @@ -310,13 +305,13 @@ Used to specify the current node's name in the cluster when it is setup via the This parameter is useful in benchmarks involved multiple Elasticsearch clusters. It's used to configure the cluster name of the current Elasticsearch node when it is setup via the ``install`` or ``race`` subcommand. The following example sets up two Elasticsearch clusters: ``cluster-1`` and ``cluster-2``, and each has two nodes:: # install node-1 in cluster-1 - esrally install --quiet --distribution-version=8.2.2 --node-name="node-1" --cluster-name=cluster-1 --network-host="192.168.1.1" --http-port=39200 --master-nodes="node-1" --seed-hosts="192.168.1.1:39300,192.168.1.2:39300" + esrally install --quiet --distribution-version=9.1.6 --node-name="node-1" --cluster-name=cluster-1 --network-host="192.168.1.1" --http-port=39200 --master-nodes="node-1" --seed-hosts="192.168.1.1:39300,192.168.1.2:39300" # install node-2 in cluster-1 - esrally install --quiet --distribution-version=8.2.2 --node-name="node-2" --cluster-name=cluster-1 --network-host="192.168.1.2" --http-port=39200 --master-nodes="node-1" --seed-hosts="192.168.1.1:39300,192.168.1.2:39300" + esrally install --quiet --distribution-version=9.1.6 --node-name="node-2" --cluster-name=cluster-1 --network-host="192.168.1.2" --http-port=39200 --master-nodes="node-1" --seed-hosts="192.168.1.1:39300,192.168.1.2:39300" # install node-3 in cluster-2 - esrally install --quiet --distribution-version=8.2.2 --node-name="node-3" --cluster-name=cluster-2 --network-host="192.168.1.3" --http-port=39200 --master-nodes="node-3" --seed-hosts="192.168.1.3:39300,192.168.1.4:39300" + esrally install --quiet --distribution-version=9.1.6 --node-name="node-3" --cluster-name=cluster-2 --network-host="192.168.1.3" --http-port=39200 --master-nodes="node-3" --seed-hosts="192.168.1.3:39300,192.168.1.4:39300" # install node-4 in cluster-2 - esrally install --quiet --distribution-version=8.2.2 --node-name="node-4" --cluster-name=cluster-2 --network-host="192.168.1.4" --http-port=39200 --master-nodes="node-3" --seed-hosts="192.168.1.3:39300,192.168.1.4:39300" + esrally install --quiet --distribution-version=9.1.6 --node-name="node-4" --cluster-name=cluster-2 --network-host="192.168.1.4" --http-port=39200 --master-nodes="node-3" --seed-hosts="192.168.1.3:39300,192.168.1.4:39300" If the ``cluster-name`` parameter is not specified, Rally will use ``rally-benchmark`` as the default cluster name. @@ -425,14 +420,14 @@ Example:: Specifies the name of the target operating system for which an artifact should be downloaded. By default this value is automatically derived based on the operating system Rally is run. This command line flag is only applicable to the ``download`` subcommand and allows to download an artifact for a different operating system. Example:: - esrally download --distribution-version=7.5.1 --target-os=linux + esrally download --distribution-version=9.1.6 --target-os=linux ``target-arch`` ~~~~~~~~~~~~~~~ Specifies the name of the target CPU architecture for which an artifact should be downloaded. By default this value is automatically derived based on the CPU architecture Rally is run. This command line flag is only applicable to the ``download`` subcommand and allows to download an artifact for a different CPU architecture. Example:: - esrally download --distribution-version=7.5.1 --target-arch=x86_64 + esrally download --distribution-version=9.1.6 --target-arch=x86_64 ``car`` @@ -481,7 +476,7 @@ Allows to override variables of Elasticsearch plugins. It accepts a list of comm Example:: - esrally race --track=geonames --distribution-version=6.1.1. --elasticsearch-plugins="x-pack:monitoring-http" --plugin-params="monitoring_type:'http',monitoring_host:'some_remote_host',monitoring_port:10200,monitoring_user:'rally',monitoring_password:'m0n1t0r1ng'" + esrally race --track=geonames --distribution-version=9.1.6 --elasticsearch-plugins="x-pack:monitoring-http" --plugin-params="monitoring_type:'http',monitoring_host:'some_remote_host',monitoring_port:10200,monitoring_user:'rally',monitoring_password:'m0n1t0r1ng'" This enables the HTTP exporter of `X-Pack Monitoring `_ and exports the data to the configured monitoring host. @@ -594,9 +589,9 @@ This command line parameter sets the major version of the JDK that Rally should Example:: # Run a benchmark with defaults - esrally race --track=geonames --distribution-version=7.0.0 + esrally race --track=geonames --distribution-version=9.1.6 # Force to run with JDK 11 - esrally race --track=geonames --distribution-version=7.0.0 --runtime-jdk=11 + esrally race --track=geonames --distribution-version=9.1.6 --runtime-jdk=11 It is also possible to specify the JDK that is bundled with Elasticsearch with the special value ``bundled``. The `JDK is bundled from Elasticsearch 7.0.0 onwards `_. @@ -640,10 +635,10 @@ If you want Rally to launch and benchmark a cluster using a binary distribution, :: - esrally race --track=geonames --distribution-version=7.0.0 + esrally race --track=geonames --distribution-version=9.1.6 -Rally will then benchmark the official Elasticsearch 7.0.0 distribution. Please check our :doc:`version support page ` to see which Elasticsearch versions are currently supported by Rally. +Rally will then benchmark the official Elasticsearch 9.1.6 distribution. Please check our :doc:`version support page ` to see which Elasticsearch versions are currently supported by Rally. ``distribution-repository`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -839,7 +834,7 @@ Here we define the necessary responses for a track that bulk-indexes data:: Save the above responses as ``responses.json`` and execute a benchmark as follows:: - esrally race --track=geonames --challenge=append-no-conflicts-index-only --pipeline=benchmark-only --distribution-version=8.0.0 --client-options="static_responses:'responses.json'" + esrally race --track=geonames --challenge=append-no-conflicts-index-only --pipeline=benchmark-only --distribution-version=9.1.6 --client-options="static_responses:'responses.json'" .. note:: Use ``--pipeline=benchmark-only`` as Rally should not start any cluster when static responses are used. diff --git a/docs/elasticsearch_plugins.rst b/docs/elasticsearch_plugins.rst index 50ac424ed..d1bd84417 100644 --- a/docs/elasticsearch_plugins.rst +++ b/docs/elasticsearch_plugins.rst @@ -44,7 +44,7 @@ In order to tell Rally to install a plugin, use the ``--elasticsearch-plugins`` Example:: - esrally race --track=geonames --distribution-version=7.12.0 --elasticsearch-plugins="analysis-icu,analysis-phonetic" + esrally race --track=geonames --distribution-version=9.1.6 --elasticsearch-plugins="analysis-icu,analysis-phonetic" This will install the plugins ``analysis-icu`` and ``analysis-phonetic`` (in that order). In order to use the features that these plugins provide, you need to write a :doc:`custom track `. @@ -216,7 +216,7 @@ As ``myplugin`` is not a core plugin, the Elasticsearch plugin manager does not [distributions] plugin.myplugin.release.url=https://example.org/myplugin/releases/{{VERSION}}/myplugin-{{VERSION}}.zip -Now you can run benchmarks with the custom Elasticsearch plugin, e.g. with ``esrally race --track=geonames --distribution-version=7.12.0 --elasticsearch-plugins="myplugin:simple"``. +Now you can run benchmarks with the custom Elasticsearch plugin, e.g. with ``esrally race --track=geonames --distribution-version=9.6.1 --elasticsearch-plugins="myplugin:simple"``. For this to work you need ensure two things: diff --git a/docs/migrate.rst b/docs/migrate.rst index 3009bf923..88f8309af 100644 --- a/docs/migrate.rst +++ b/docs/migrate.rst @@ -209,11 +209,11 @@ Rally requires a subcommand Previously a subcommand was optional when running a benchmark. With Rally 2.1.0 a subcommand is always required. So instead of invoking:: - esrally --distribution-version=7.10.0 + esrally --distribution-version=9.1.6 Invoke Rally with the ``race`` subcommand instead:: - esrally race --distribution-version=7.10.0 + esrally race --distribution-version=9.1.6 Running without a track is deprecated @@ -221,11 +221,11 @@ Running without a track is deprecated Previously Rally has implicitly chosen the geonames track as default when ``--track`` was not provided. We want users to make a conscious choice of the workload and not specifying the track explicitly is deprecated (to be removed in Rally 2.2.0). So instead of invoking:: - esrally race --distribution-version=7.10.0 + esrally race --distribution-version=9.1.6 Invoke Rally with ``--track=geonames`` instead:: - esrally race --distribution-version=7.10.0 --track=geonames + esrally race --distribution-version=9.1.6 --track=geonames Migrating to Rally 2.0.4 @@ -236,11 +236,11 @@ Running without a subcommand is deprecated Rally 2.0.4 will warn when invoked without subcommand. So instead of invoking:: - esrally --distribution-version=7.10.0 + esrally --distribution-version=9.1.6 Invoke Rally with the ``race`` subcommand instead:: - esrally race --distribution-version=7.10.0 + esrally race --distribution-version=9.1.6 When Rally is invoked without a subcommand it will issue the following warning on the command line and in the log file:: diff --git a/docs/pipelines.rst b/docs/pipelines.rst index 00b88329f..155dc37fc 100644 --- a/docs/pipelines.rst +++ b/docs/pipelines.rst @@ -30,7 +30,7 @@ from-distribution This pipeline allows to benchmark an official Elasticsearch distribution which will be automatically downloaded by Rally. An example invocation:: - esrally race --track=geonames --pipeline=from-distribution --distribution-version=7.0.0 + esrally race --track=geonames --pipeline=from-distribution --distribution-version=9.1.6 The version numbers have to match the name in the download URL path. @@ -42,7 +42,7 @@ However, this feature is mainly intended for continuous integration environments .. note:: - This pipeline is just mentioned for completeness but Rally will autoselect it for you. All you need to do is to define the ``--distribution-version`` flag. + This pipeline is just mentioned for completeness but Rally will auto-select it for you. All you need to do is to define the ``--distribution-version`` flag. .. _pipelines_from-sources: diff --git a/docs/quickstart.rst b/docs/quickstart.rst index bdcaab3f5..24be0bc9b 100644 --- a/docs/quickstart.rst +++ b/docs/quickstart.rst @@ -18,9 +18,9 @@ Run your first race Now we're ready to run our first :doc:`race `:: - esrally race --distribution-version=6.5.3 --track=geonames + esrally race --distribution-version=9.1.6 --track=geonames -This will download Elasticsearch 6.5.3 and run the `geonames `_ :doc:`track ` against it. After the race, a :doc:`summary report ` is written to the command line::: +This will download Elasticsearch 9.1.6 and run the `geonames `_ :doc:`track ` against it. After the race, a :doc:`summary report ` is written to the command line::: ------------------------------------------------------ diff --git a/docs/race.rst b/docs/race.rst index 7c1b825fd..921657bc1 100644 --- a/docs/race.rst +++ b/docs/race.rst @@ -48,11 +48,11 @@ Starting a Race To start a race you have to define the track and challenge to run. For example:: - esrally race --distribution-version=6.0.0 --track=geopoint --challenge=append-fast-with-conflicts + esrally race --distribution-version=9.1.6 --track=geopoint --challenge=append-fast-with-conflicts Rally will then start racing on this track. If you have never started Rally before, it should look similar to the following output:: - $ esrally race --distribution-version=6.0.0 --track=geopoint --challenge=append-fast-with-conflicts + $ esrally race --distribution-version=9.1.6 --track=geopoint --challenge=append-fast-with-conflicts ____ ____ / __ \____ _/ / /_ __ @@ -61,13 +61,7 @@ Rally will then start racing on this track. If you have never started Rally befo /_/ |_|\__,_/_/_/\__, / /____/ - [INFO] Racing on track [geopoint], challenge [append-fast-with-conflicts] and car ['defaults'] with version [6.0.0]. - [INFO] Downloading Elasticsearch 6.0.0 ... [OK] - [INFO] Rally will delete the benchmark candidate after the benchmark - [INFO] Downloading data from [http://benchmarks.elasticsearch.org.s3.amazonaws.com/corpora/geopoint/documents.json.bz2] (482 MB) to [/Users/dm/.rally/benchmarks/data/geopoint/documents.json.bz2] ... [OK] - [INFO] Decompressing track data from [/Users/dm/.rally/benchmarks/data/geopoint/documents.json.bz2] to [/Users/dm/.rally/benchmarks/data/geopoint/documents.json] (resulting size: 2.28 GB) ... [OK] - [INFO] Preparing file offset table for [/Users/dm/.rally/benchmarks/data/geopoint/documents.json] ... [OK] - Running index-update [ 0% done] + [INFO] Racing on track [geopoint], challenge [append-fast-with-conflicts] and car ['defaults'] with version [9.1.6]. The benchmark will take a while to run, so be patient. diff --git a/docs/recipes.rst b/docs/recipes.rst index 1586bbfa0..fb3371ffd 100644 --- a/docs/recipes.rst +++ b/docs/recipes.rst @@ -84,7 +84,7 @@ To run a benchmark for this scenario follow these steps: 1. :doc:`Install ` and :doc:`configure ` Rally on all machines. Be sure that the same version is installed on all of them and fully :doc:`configured `. 2. Start the :doc:`Rally daemon ` on each machine. The Rally daemon allows Rally to communicate with all remote machines. On the benchmark coordinator run ``esrallyd start --node-ip=10.5.5.5 --coordinator-ip=10.5.5.5`` and on the benchmark candidate machines run ``esrallyd start --node-ip=10.5.5.10 --coordinator-ip=10.5.5.5`` and ``esrallyd start --node-ip=10.5.5.11 --coordinator-ip=10.5.5.5`` respectively. The ``--node-ip`` parameter tells Rally the IP of the machine on which it is running. As some machines have more than one network interface, Rally will not attempt to auto-detect the machine IP. The ``--coordinator-ip`` parameter tells Rally the IP of the benchmark coordinator node. -3. Start the benchmark by invoking Rally as usual on the benchmark coordinator, for example: ``esrally race --track=pmc --distribution-version=7.0.0 --target-hosts=10.5.5.10:39200,10.5.5.11:39200``. Rally will derive from the ``--target-hosts`` parameter that it should provision the nodes ``10.5.5.10`` and ``10.5.5.11``. +3. Start the benchmark by invoking Rally as usual on the benchmark coordinator, for example: ``esrally race --track=pmc --distribution-version=9.1.6 --target-hosts=10.5.5.10:39200,10.5.5.11:39200``. Rally will derive from the ``--target-hosts`` parameter that it should provision the nodes ``10.5.5.10`` and ``10.5.5.11``. 4. After the benchmark has finished you can stop the Rally daemon again. On the benchmark coordinator and on the benchmark candidates run ``esrallyd stop``. .. note:: diff --git a/docs/track.rst b/docs/track.rst index 35cb69c88..833a85972 100644 --- a/docs/track.rst +++ b/docs/track.rst @@ -96,9 +96,9 @@ You can now verify that everything works by listing all tracks in this track rep esrally list tracks --track-repository=private -This shows all tracks that are available on the ``master`` branch of this repository. Suppose you only created tracks on the branch ``2`` because you're interested in the performance of Elasticsearch 2.x, then you can specify also the distribution version:: +This shows all tracks that are available on the ``master`` branch of this repository. Suppose you only created tracks on the branch ``9`` because you're interested in the performance of Elasticsearch 9.x, then you can specify also the distribution version:: - esrally list tracks --track-repository=private --distribution-version=7.0.0 + esrally list tracks --track-repository=private --distribution-version=9.1.6 Rally will follow the same branch fallback logic as described above. From 1fc87b99b05c5ea0396a8a6bc63e2e9934c0dd66 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Thu, 27 Nov 2025 16:51:55 +0100 Subject: [PATCH 7/8] Provide Content-Type and Accept headers in perform_request --- esrally/client/asynchronous.py | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/esrally/client/asynchronous.py b/esrally/client/asynchronous.py index 5a3210a4c..f1df311bc 100644 --- a/esrally/client/asynchronous.py +++ b/esrally/client/asynchronous.py @@ -18,13 +18,14 @@ import asyncio import json import logging +from collections.abc import Mapping from typing import Any, Optional import aiohttp from aiohttp import BaseConnector, RequestInfo from aiohttp.client_proto import ResponseHandler from aiohttp.helpers import BaseTimerContext -from elastic_transport import AiohttpHttpNode, AsyncTransport +from elastic_transport import AiohttpHttpNode, ApiResponse, AsyncTransport from elasticsearch import AsyncElasticsearch from multidict import CIMultiDict, CIMultiDictProxy from yarl import URL @@ -276,3 +277,23 @@ def options(self, *args, **kwargs): new_self.distribution_version = self.distribution_version new_self.distribution_flavor = self.distribution_flavor return new_self + + async def perform_request( + self, + method: str, + path: str, + *, + params: Optional[Mapping[str, Any]] = None, + headers: Optional[Mapping[str, str]] = None, + body: Optional[Any] = None, + endpoint_id: Optional[str] = None, + path_parts: Optional[Mapping[str, Any]] = None, + ) -> ApiResponse[Any]: + # We need to ensure that we provide content-type and accept headers + if body is not None: + headers = headers or {} + headers.setdefault("Content-Type", "application/json") + headers.setdefault("Accept", "application/json") + return await super().perform_request( + method, path, params=params, headers=headers, body=body, endpoint_id=endpoint_id, path_parts=path_parts + ) From 3517598e8bf9dff447164a5820fa209d2f94e0d0 Mon Sep 17 00:00:00 2001 From: Federico Ressi Date: Tue, 2 Dec 2025 11:02:45 +0100 Subject: [PATCH 8/8] WIP this is a transient not working state submitted for backup. --- esrally/client/asynchronous.py | 138 +++++- esrally/client/common.py | 59 +++ esrally/driver/runner.py | 858 ++++++++++++++------------------- tests/client/common_test.py | 24 + tests/driver/runner_test.py | 10 +- 5 files changed, 593 insertions(+), 496 deletions(-) create mode 100644 esrally/client/common.py create mode 100644 tests/client/common_test.py diff --git a/esrally/client/asynchronous.py b/esrally/client/asynchronous.py index f1df311bc..e51bf37b6 100644 --- a/esrally/client/asynchronous.py +++ b/esrally/client/asynchronous.py @@ -18,18 +18,33 @@ import asyncio import json import logging -from collections.abc import Mapping +import warnings +from collections.abc import Iterable, Mapping from typing import Any, Optional import aiohttp from aiohttp import BaseConnector, RequestInfo from aiohttp.client_proto import ResponseHandler from aiohttp.helpers import BaseTimerContext -from elastic_transport import AiohttpHttpNode, ApiResponse, AsyncTransport +from elastic_transport import ( + AiohttpHttpNode, + ApiResponse, + AsyncTransport, + BinaryApiResponse, + HeadApiResponse, + ListApiResponse, + ObjectApiResponse, + TextApiResponse, +) +from elastic_transport.client_utils import DEFAULT from elasticsearch import AsyncElasticsearch +from elasticsearch._async.client import IlmClient +from elasticsearch.compat import warn_stacklevel +from elasticsearch.exceptions import HTTP_EXCEPTIONS, ApiError, ElasticsearchWarning from multidict import CIMultiDict, CIMultiDictProxy from yarl import URL +from esrally.client.common import _WARNING_RE, _mimetype_header_to_compat, _quote_query from esrally.client.context import RequestContextHolder from esrally.utils import io, versions @@ -262,12 +277,32 @@ def __init__(self, *args, **kwargs): super().__init__(*args, node_class=RallyAiohttpHttpNode, **kwargs) +class RallyIlmClient(IlmClient): + async def put_lifecycle(self, *args, **kwargs): + """ + The 'elasticsearch-py' 8.x method signature renames the 'policy' param to 'name', and the previously so-called + 'body' param becomes 'policy' + """ + if args: + kwargs["name"] = args[0] + + if body := kwargs.pop("body", None): + kwargs["policy"] = body.get("policy", {}) + # pylint: disable=missing-kwoa + return await IlmClient.put_lifecycle(self, **kwargs) + + class RallyAsyncElasticsearch(AsyncElasticsearch, RequestContextHolder): def __init__(self, hosts: Any = None, *, distribution_version: str | None = None, distribution_flavor: str | None = None, **kwargs): super().__init__(hosts, **kwargs) self.distribution_version = distribution_version self.distribution_flavor = distribution_flavor + # some ILM method signatures changed in 'elasticsearch-py' 8.x, + # so we override method(s) here to provide BWC for any custom + # runners that aren't using the new kwargs + self.ilm = RallyIlmClient(self) + @property def is_serverless(self): return versions.is_serverless(self.distribution_flavor) @@ -289,11 +324,100 @@ async def perform_request( endpoint_id: Optional[str] = None, path_parts: Optional[Mapping[str, Any]] = None, ) -> ApiResponse[Any]: + if endpoint_id is not None or path_parts is not None: + raise NotImplementedError("Parameters endpoint_id and path_parts are not supported.") + # We need to ensure that we provide content-type and accept headers if body is not None: - headers = headers or {} - headers.setdefault("Content-Type", "application/json") - headers.setdefault("Accept", "application/json") - return await super().perform_request( - method, path, params=params, headers=headers, body=body, endpoint_id=endpoint_id, path_parts=path_parts + if headers is None: + headers = {"content-type": "application/json", "accept": "application/json"} + else: + if headers.get("content-type") is None: + headers["content-type"] = "application/json" + if headers.get("accept") is None: + headers["accept"] = "application/json" + + if headers: + request_headers = self._headers.copy() + request_headers.update(headers) + else: + request_headers = self._headers + + # Converts all parts of a Accept/Content-Type headers + # from application/X -> application/vnd.elasticsearch+X + # see https://github.com/elastic/elasticsearch/issues/51816 + # Not applicable to serverless + if not self.is_serverless: + if versions.is_version_identifier(self.distribution_version) and ( + versions.Version.from_string(self.distribution_version) >= versions.Version.from_string("8.0.0") + ): + _mimetype_header_to_compat("Accept", request_headers) + _mimetype_header_to_compat("Content-Type", request_headers) + + if params: + target = f"{path}?{_quote_query(params)}" + else: + target = path + + meta, resp_body = await self.transport.perform_request( + method, + target, + headers=request_headers, + body=body, + request_timeout=self._request_timeout, + max_retries=self._max_retries, + retry_on_status=self._retry_on_status, + retry_on_timeout=self._retry_on_timeout, + client_meta=self._client_meta, ) + + # HEAD with a 404 is returned as a normal response + # since this is used as an 'exists' functionality. + if not (method == "HEAD" and meta.status == 404) and ( + not 200 <= meta.status < 299 + and (self._ignore_status is DEFAULT or self._ignore_status is None or meta.status not in self._ignore_status) + ): + message = str(resp_body) + + # If the response is an error response try parsing + # the raw Elasticsearch error before raising. + if isinstance(resp_body, dict): + try: + error = resp_body.get("error", message) + if isinstance(error, dict) and "type" in error: + error = error["type"] + message = error + except (ValueError, KeyError, TypeError): + pass + + raise HTTP_EXCEPTIONS.get(meta.status, ApiError)(message=message, meta=meta, body=resp_body) + + # 'Warning' headers should be reraised as 'ElasticsearchWarning' + if "warning" in meta.headers: + warning_header = (meta.headers.get("warning") or "").strip() + warning_messages: Iterable[str] = _WARNING_RE.findall(warning_header) or (warning_header,) + stacklevel = warn_stacklevel() + for warning_message in warning_messages: + warnings.warn( + warning_message, + category=ElasticsearchWarning, + stacklevel=stacklevel, + ) + + if method == "HEAD": + response = HeadApiResponse(meta=meta) + elif isinstance(resp_body, dict): + response = ObjectApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] + elif isinstance(resp_body, list): + response = ListApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] + elif isinstance(resp_body, str): + response = TextApiResponse( # type: ignore[assignment] + body=resp_body, + meta=meta, + ) + elif isinstance(resp_body, bytes): + response = BinaryApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] + else: + response = ApiResponse(body=resp_body, meta=meta) # type: ignore[assignment] + + return response diff --git a/esrally/client/common.py b/esrally/client/common.py new file mode 100644 index 000000000..e0ca051c9 --- /dev/null +++ b/esrally/client/common.py @@ -0,0 +1,59 @@ +import re +from collections.abc import Mapping +from datetime import date, datetime +from typing import Any + +from elastic_transport.client_utils import percent_encode +from elasticsearch import VERSION + + +def _client_major_version_to_str(version: tuple) -> str: + return str(version[0]) + + +_WARNING_RE = re.compile(r"\"([^\"]*)\"") +_COMPAT_MIMETYPE_TEMPLATE = "application/vnd.elasticsearch+%s; compatible-with=" + _client_major_version_to_str(VERSION) +_COMPAT_MIMETYPE_RE = re.compile(r"application/(json|x-ndjson|vnd\.mapbox-vector-tile)") +_COMPAT_MIMETYPE_SUB = _COMPAT_MIMETYPE_TEMPLATE % (r"\g<1>",) + + +def _mimetype_header_to_compat(header, request_headers): + # Converts all parts of a Accept/Content-Type headers + # from application/X -> application/vnd.elasticsearch+X + mimetype = request_headers.get(header, None) if request_headers else None + if mimetype: + request_headers[header] = _COMPAT_MIMETYPE_RE.sub(_COMPAT_MIMETYPE_SUB, mimetype) + + +def _escape(value: Any) -> str: + """ + Escape a single value of a URL string or a query parameter. If it is a list + or tuple, turn it into a comma-separated string first. + """ + + # make sequences into comma-separated stings + if isinstance(value, (list, tuple)): + value = ",".join([_escape(item) for item in value]) + + # dates and datetimes into isoformat + elif isinstance(value, (date, datetime)): + value = value.isoformat() + + # make bools into true/false strings + elif isinstance(value, bool): + value = str(value).lower() + + elif isinstance(value, bytes): + return value.decode("utf-8", "surrogatepass") + + if not isinstance(value, str): + return str(value) + return value + + +def _quote(value: Any) -> str: + return percent_encode(_escape(value), ",*") + + +def _quote_query(query: Mapping[str, Any]) -> str: + return "&".join([f"{k}={_quote(v)}" for k, v in query.items()]) diff --git a/esrally/driver/runner.py b/esrally/driver/runner.py index 13e8957d7..aff3087ed 100644 --- a/esrally/driver/runner.py +++ b/esrally/driver/runner.py @@ -14,8 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +import abc import asyncio +import contextlib import contextvars import json import logging @@ -24,142 +25,132 @@ import sys import time from collections import Counter, OrderedDict +from collections.abc import Callable, Iterable from copy import deepcopy from enum import Enum from functools import total_ordering from io import BytesIO from os.path import commonprefix -from types import FunctionType -from typing import Any, Optional +from typing import Any import ijson -from elasticsearch import AsyncElasticsearch +from elasticsearch import AsyncElasticsearch, BadRequestError +from typing_extensions import Self from esrally import exceptions, track, types +from esrally.client.asynchronous import RallyAsyncElasticsearch from esrally.utils import convert -from esrally.utils.versions import Version +from esrally.utils.versions import Version, is_serverless # Mapping from operation type to specific runner - -__RUNNERS = {} - - -def register_default_runners(config: Optional[types.Config] = None): - register_runner(track.OperationType.Bulk, BulkIndex(), async_runner=True) - register_runner(track.OperationType.ForceMerge, ForceMerge(), async_runner=True) - register_runner(track.OperationType.IndexStats, Retry(IndicesStats()), async_runner=True) - register_runner(track.OperationType.NodeStats, NodeStats(), async_runner=True) - register_runner(track.OperationType.Search, Query(config=config), async_runner=True) - register_runner(track.OperationType.PaginatedSearch, Query(config=config), async_runner=True) - register_runner(track.OperationType.CompositeAgg, Query(config=config), async_runner=True) - register_runner(track.OperationType.ScrollSearch, Query(config=config), async_runner=True) - register_runner(track.OperationType.RawRequest, RawRequest(), async_runner=True) - register_runner(track.OperationType.Composite, Composite(), async_runner=True) - register_runner(track.OperationType.SubmitAsyncSearch, SubmitAsyncSearch(), async_runner=True) - register_runner(track.OperationType.GetAsyncSearch, Retry(GetAsyncSearch(), retry_until_success=True), async_runner=True) - register_runner(track.OperationType.DeleteAsyncSearch, DeleteAsyncSearch(), async_runner=True) - register_runner(track.OperationType.OpenPointInTime, OpenPointInTime(), async_runner=True) - register_runner(track.OperationType.ClosePointInTime, ClosePointInTime(), async_runner=True) - register_runner(track.OperationType.Sql, Sql(), async_runner=True) - register_runner(track.OperationType.FieldCaps, FieldCaps(), async_runner=True) - register_runner(track.OperationType.Esql, Esql(), async_runner=True) +__RUNNERS: dict[str, "Runner"] = {} + + +def register_default_runners(cfg: types.Config | None = None) -> None: + register_runner(track.OperationType.Bulk, BulkIndex()) + register_runner(track.OperationType.ForceMerge, ForceMerge()) + register_runner(track.OperationType.IndexStats, Retry(IndicesStats())) + register_runner(track.OperationType.NodeStats, NodeStats()) + register_runner(track.OperationType.Search, Query(cfg=cfg)) + register_runner(track.OperationType.PaginatedSearch, Query(cfg=cfg)) + register_runner(track.OperationType.CompositeAgg, Query(cfg=cfg)) + register_runner(track.OperationType.ScrollSearch, Query(cfg=cfg)) + register_runner(track.OperationType.RawRequest, RawRequest()) + register_runner(track.OperationType.Composite, Composite()) + register_runner(track.OperationType.SubmitAsyncSearch, SubmitAsyncSearch()) + register_runner(track.OperationType.GetAsyncSearch, Retry(GetAsyncSearch(), retry_until_success=True)) + register_runner(track.OperationType.DeleteAsyncSearch, DeleteAsyncSearch()) + register_runner(track.OperationType.OpenPointInTime, OpenPointInTime()) + register_runner(track.OperationType.ClosePointInTime, ClosePointInTime()) + register_runner(track.OperationType.Sql, Sql()) + register_runner(track.OperationType.FieldCaps, FieldCaps()) + register_runner(track.OperationType.Esql, Esql()) # This is an administrative operation but there is no need for a retry here as we don't issue a request - register_runner(track.OperationType.Sleep, Sleep(), async_runner=True) + register_runner(track.OperationType.Sleep, Sleep()) # these requests should not be retried as they are not idempotent - register_runner(track.OperationType.CreateSnapshot, CreateSnapshot(), async_runner=True) - register_runner(track.OperationType.RestoreSnapshot, RestoreSnapshot(), async_runner=True) - register_runner(track.OperationType.Downsample, Downsample(), async_runner=True) + register_runner(track.OperationType.CreateSnapshot, CreateSnapshot()) + register_runner(track.OperationType.RestoreSnapshot, RestoreSnapshot()) + register_runner(track.OperationType.Downsample, Downsample()) # We treat the following as administrative commands and thus already start to wrap them in a retry. - register_runner(track.OperationType.ClusterHealth, Retry(ClusterHealth()), async_runner=True) - register_runner(track.OperationType.PutPipeline, Retry(PutPipeline()), async_runner=True) - register_runner(track.OperationType.Refresh, Retry(Refresh()), async_runner=True) - register_runner(track.OperationType.CreateIndex, Retry(CreateIndex()), async_runner=True) - register_runner(track.OperationType.DeleteIndex, Retry(DeleteIndex(config=config)), async_runner=True) - register_runner(track.OperationType.CreateComponentTemplate, Retry(CreateComponentTemplate()), async_runner=True) - register_runner(track.OperationType.DeleteComponentTemplate, Retry(DeleteComponentTemplate()), async_runner=True) - register_runner(track.OperationType.CreateComposableTemplate, Retry(CreateComposableTemplate()), async_runner=True) - register_runner(track.OperationType.DeleteComposableTemplate, Retry(DeleteComposableTemplate(config=config)), async_runner=True) - register_runner(track.OperationType.CreateDataStream, Retry(CreateDataStream()), async_runner=True) - register_runner(track.OperationType.DeleteDataStream, Retry(DeleteDataStream()), async_runner=True) - register_runner(track.OperationType.CreateIndexTemplate, Retry(CreateIndexTemplate()), async_runner=True) - register_runner(track.OperationType.DeleteIndexTemplate, Retry(DeleteIndexTemplate()), async_runner=True) - register_runner(track.OperationType.ShrinkIndex, Retry(ShrinkIndex()), async_runner=True) - register_runner(track.OperationType.CreateMlDatafeed, Retry(CreateMlDatafeed()), async_runner=True) - register_runner(track.OperationType.DeleteMlDatafeed, Retry(DeleteMlDatafeed()), async_runner=True) - register_runner(track.OperationType.StartMlDatafeed, Retry(StartMlDatafeed()), async_runner=True) - register_runner(track.OperationType.StopMlDatafeed, Retry(StopMlDatafeed()), async_runner=True) - register_runner(track.OperationType.CreateMlJob, Retry(CreateMlJob()), async_runner=True) - register_runner(track.OperationType.DeleteMlJob, Retry(DeleteMlJob()), async_runner=True) - register_runner(track.OperationType.OpenMlJob, Retry(OpenMlJob()), async_runner=True) - register_runner(track.OperationType.CloseMlJob, Retry(CloseMlJob()), async_runner=True) - register_runner(track.OperationType.DeleteSnapshotRepository, Retry(DeleteSnapshotRepository()), async_runner=True) - register_runner(track.OperationType.CreateSnapshotRepository, Retry(CreateSnapshotRepository()), async_runner=True) - register_runner(track.OperationType.WaitForSnapshotCreate, Retry(WaitForSnapshotCreate()), async_runner=True) - register_runner(track.OperationType.WaitForCurrentSnapshotsCreate, Retry(WaitForCurrentSnapshotsCreate()), async_runner=True) - register_runner(track.OperationType.WaitForRecovery, Retry(IndicesRecovery()), async_runner=True) - register_runner(track.OperationType.PutSettings, Retry(PutSettings()), async_runner=True) - register_runner(track.OperationType.CreateTransform, Retry(CreateTransform()), async_runner=True) - register_runner(track.OperationType.StartTransform, Retry(StartTransform()), async_runner=True) - register_runner(track.OperationType.WaitForTransform, Retry(WaitForTransform()), async_runner=True) - register_runner(track.OperationType.DeleteTransform, Retry(DeleteTransform()), async_runner=True) - register_runner(track.OperationType.TransformStats, Retry(TransformStats()), async_runner=True) - register_runner(track.OperationType.CreateIlmPolicy, Retry(CreateIlmPolicy()), async_runner=True) - register_runner(track.OperationType.DeleteIlmPolicy, Retry(DeleteIlmPolicy()), async_runner=True) - register_runner(track.OperationType.RunUntil, Retry(RunUntil()), async_runner=True) - - -def runner_for(operation_type): + register_runner(track.OperationType.ClusterHealth, Retry(ClusterHealth())) + register_runner(track.OperationType.PutPipeline, Retry(PutPipeline())) + register_runner(track.OperationType.Refresh, Retry(Refresh())) + register_runner(track.OperationType.CreateIndex, Retry(CreateIndex())) + register_runner(track.OperationType.DeleteIndex, Retry(DeleteIndex(cfg=cfg))) + register_runner(track.OperationType.CreateComponentTemplate, Retry(CreateComponentTemplate())) + register_runner(track.OperationType.DeleteComponentTemplate, Retry(DeleteComponentTemplate())) + register_runner(track.OperationType.CreateComposableTemplate, Retry(CreateComposableTemplate())) + register_runner(track.OperationType.DeleteComposableTemplate, Retry(DeleteComposableTemplate(cfg=cfg))) + register_runner(track.OperationType.CreateDataStream, Retry(CreateDataStream())) + register_runner(track.OperationType.DeleteDataStream, Retry(DeleteDataStream())) + register_runner(track.OperationType.CreateIndexTemplate, Retry(CreateIndexTemplate())) + register_runner(track.OperationType.DeleteIndexTemplate, Retry(DeleteIndexTemplate())) + register_runner(track.OperationType.ShrinkIndex, Retry(ShrinkIndex())) + register_runner(track.OperationType.CreateMlDatafeed, Retry(CreateMlDatafeed())) + register_runner(track.OperationType.DeleteMlDatafeed, Retry(DeleteMlDatafeed())) + register_runner(track.OperationType.StartMlDatafeed, Retry(StartMlDatafeed())) + register_runner(track.OperationType.StopMlDatafeed, Retry(StopMlDatafeed())) + register_runner(track.OperationType.CreateMlJob, Retry(CreateMlJob())) + register_runner(track.OperationType.DeleteMlJob, Retry(DeleteMlJob())) + register_runner(track.OperationType.OpenMlJob, Retry(OpenMlJob())) + register_runner(track.OperationType.CloseMlJob, Retry(CloseMlJob())) + register_runner(track.OperationType.DeleteSnapshotRepository, Retry(DeleteSnapshotRepository())) + register_runner(track.OperationType.CreateSnapshotRepository, Retry(CreateSnapshotRepository())) + register_runner(track.OperationType.WaitForSnapshotCreate, Retry(WaitForSnapshotCreate())) + register_runner(track.OperationType.WaitForCurrentSnapshotsCreate, Retry(WaitForCurrentSnapshotsCreate())) + register_runner(track.OperationType.WaitForRecovery, Retry(IndicesRecovery())) + register_runner(track.OperationType.PutSettings, Retry(PutSettings())) + register_runner(track.OperationType.CreateTransform, Retry(CreateTransform())) + register_runner(track.OperationType.StartTransform, Retry(StartTransform())) + register_runner(track.OperationType.WaitForTransform, Retry(WaitForTransform())) + register_runner(track.OperationType.DeleteTransform, Retry(DeleteTransform())) + register_runner(track.OperationType.TransformStats, Retry(TransformStats())) + register_runner(track.OperationType.CreateIlmPolicy, Retry(CreateIlmPolicy())) + register_runner(track.OperationType.DeleteIlmPolicy, Retry(DeleteIlmPolicy())) + register_runner(track.OperationType.RunUntil, Retry(RunUntil())) + + +def runner_for(operation_type: str) -> "Runner": try: return __RUNNERS[operation_type] except KeyError: raise exceptions.RallyError(f"No runner available for operation-type: [{operation_type}]") -def enable_assertions(enabled): +def enable_assertions(enabled: bool) -> None: """ Changes whether assertions are enabled. The status changes for all tasks that are executed after this call. :param enabled: ``True`` to enable assertions, ``False`` to disable them. """ - AssertingRunner.assertions_enabled = enabled + Runner.assertions_enabled = enabled + +RunnerType = Callable[[AsyncElasticsearch, dict[str, Any]], Any] -def register_runner(operation_type, runner, **kwargs): + +def register_runner(operation_type: track.OperationType | str, runner: RunnerType, async_runner: bool | None = None) -> None: logger = logging.getLogger(__name__) - async_runner = kwargs.get("async_runner", False) if isinstance(operation_type, track.OperationType): operation_type = operation_type.to_hyphenated_string() - if not async_runner: - raise exceptions.RallyAssertionError( - f"Runner [{str(runner)}] must be implemented as async runner and registered with async_runner=True." - ) + if not isinstance(runner, Runner): + raise TypeError("Runner must be a subclass of Runner") - if hasattr(unwrap(runner), "multi_cluster"): - if "__aenter__" in dir(runner) and "__aexit__" in dir(runner): - if logger.isEnabledFor(logging.DEBUG): - logger.debug("Registering runner object [%s] for [%s].", str(runner), str(operation_type)) - cluster_aware_runner = _multi_cluster_runner(runner, str(runner), context_manager_enabled=True) - else: - if logger.isEnabledFor(logging.DEBUG): - logger.debug("Registering context-manager capable runner object [%s] for [%s].", str(runner), str(operation_type)) - cluster_aware_runner = _multi_cluster_runner(runner, str(runner)) - # we'd rather use callable() but this will erroneously also classify a class as callable... - elif isinstance(runner, FunctionType): - if logger.isEnabledFor(logging.DEBUG): - logger.debug("Registering runner function [%s] for [%s].", str(runner), str(operation_type)) - cluster_aware_runner = _single_cluster_runner(runner, runner.__name__) - elif "__aenter__" in dir(runner) and "__aexit__" in dir(runner): - if logger.isEnabledFor(logging.DEBUG): - logger.debug("Registering context-manager capable runner object [%s] for [%s].", str(runner), str(operation_type)) - cluster_aware_runner = _single_cluster_runner(runner, str(runner), context_manager_enabled=True) - else: - if logger.isEnabledFor(logging.DEBUG): - logger.debug("Registering runner object [%s] for [%s].", str(runner), str(operation_type)) - cluster_aware_runner = _single_cluster_runner(runner, str(runner)) - - __RUNNERS[operation_type] = _with_completion(_with_assertions(cluster_aware_runner)) + if async_runner is not None: + if not async_runner: + raise exceptions.RallyAssertionError(f"Runner [{runner}] must be implemented as async runner.") + logger.warning("async_runner flag is deprecated as rally now only supports async runners") + + logger.debug("Registering runner [%s] for [%s].", str(runner), str(operation_type)) + __RUNNERS[operation_type] = wrap(runner) + + +def wrap(runner: RunnerType) -> "Runner": + if isinstance(runner, Runner): + return runner + return WrapperRunner(runner) # Only intended for unit-testing! @@ -167,24 +158,42 @@ def remove_runner(operation_type): del __RUNNERS[operation_type] -class Runner: +class Runner(abc.ABC): """ Base class for all operations against Elasticsearch. """ - def __init__(self, *args, config=None, **kwargs): - super().__init__(*args, **kwargs) + assertions_enabled = False + + def __init__(self, *, cfg: types.Config | None = None): self.logger = logging.getLogger(__name__) self.serverless_mode = False self.serverless_operator = False - if config: - self.serverless_mode = convert.to_bool(config.opts("driver", "serverless.mode", mandatory=False, default_value=False)) - self.serverless_operator = convert.to_bool(config.opts("driver", "serverless.operator", mandatory=False, default_value=False)) + if cfg: + self.serverless_mode = convert.to_bool(cfg.opts("driver", "serverless.mode", mandatory=False, default_value=False)) + self.serverless_operator = convert.to_bool(cfg.opts("driver", "serverless.operator", mandatory=False, default_value=False)) - async def __aenter__(self): + async def __aenter__(self) -> Self: return self - async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): + def call(self, es: AsyncElasticsearch, params: dict[str, Any]) -> Any: + return_value = self(es, params) + if return_value is not None and not self.assertions_enabled: + return return_value + + if not (assertions := params.pop("assertions", [])): + return return_value + + name = params.get("name") + if not isinstance(return_value, dict): + raise exceptions.DataError(f"Cannot check assertion in [{name}] as [{self.delegate!r}] did not return a dict.") + + for assertion in assertions: + check_assertion(name, assertion, return_value) + return return_value + + @abc.abstractmethod + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> Any: """ Runs the actual method that should be benchmarked. @@ -196,7 +205,7 @@ async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): """ raise NotImplementedError("abstract operation") - async def __aexit__(self, exc_type, exc_val, exc_tb): + async def __aexit__(self, exc_type, exc_val, exc_tb) -> bool: return False def _default_kw_params(self, params): @@ -209,9 +218,7 @@ def _default_kw_params(self, params): "params": "request-params", "request_timeout": "request-timeout", } - full_result = {k: params.get(v) for (k, v) in kw_dict.items()} - # filter Nones - return dict(filter(lambda kv: kv[1] is not None, full_result.items())) + return {k: params.get(v) for k, v in kw_dict.items() if v is not None} @staticmethod def _transport_request_params(params): @@ -237,57 +244,6 @@ def _transport_request_params(params): return params, request_params, transport_params, headers - -class Delegator: - """ - Mixin to unify delegate handling - """ - - def __init__(self, delegate, *args, **kwargs): - super().__init__(*args, **kwargs) - self.delegate = delegate - - -def unwrap(runner): - """ - Unwraps all delegators until the actual runner. - - :param runner: An arbitrarily nested chain of delegators around a runner. - :return: The innermost runner. - """ - delegate = getattr(runner, "delegate", None) - if delegate: - return unwrap(delegate) - else: - return runner - - -def _single_cluster_runner(runnable, name, context_manager_enabled=False): - # only pass the default ES client - return MultiClientRunner(runnable, name, lambda es: es["default"], context_manager_enabled) - - -def _multi_cluster_runner(runnable, name, context_manager_enabled=False): - # pass all ES clients - return MultiClientRunner(runnable, name, lambda es: es, context_manager_enabled) - - -def _with_assertions(delegate): - return AssertingRunner(delegate) - - -def _with_completion(delegate): - unwrapped_runner = unwrap(delegate) - if hasattr(unwrapped_runner, "completed") and hasattr(unwrapped_runner, "percent_completed"): - return WithCompletion(delegate, unwrapped_runner) - else: - return NoCompletion(delegate) - - -class NoCompletion(Runner, Delegator): - def __init__(self, delegate): - super().__init__(delegate=delegate) - @property def completed(self): return None @@ -296,144 +252,99 @@ def completed(self): def percent_completed(self): return None - async def __call__(self, *args): - return await self.delegate(*args) - def __repr__(self, *args, **kwargs): - return repr(self.delegate) +def _greater_than(expected, actual) -> bool: + return actual > expected - async def __aenter__(self): - await self.delegate.__aenter__() - return self - async def __aexit__(self, exc_type, exc_val, exc_tb): - return await self.delegate.__aexit__(exc_type, exc_val, exc_tb) +def _greater_than_or_equal(expected, actual): + return actual >= expected -class WithCompletion(Runner, Delegator): - def __init__(self, delegate, progressable): - super().__init__(delegate=delegate) - self.progressable = progressable +def _smaller_than(expected, actual): + return actual < expected - @property - def completed(self): - return self.progressable.completed - @property - def percent_completed(self): - return self.progressable.percent_completed +def _smaller_than_or_equal(expected, actual): + return actual <= expected - async def __call__(self, *args): - return await self.delegate(*args) - def __repr__(self, *args, **kwargs): - return repr(self.delegate) +def _equal(expected, actual): + return actual == expected - async def __aenter__(self): - await self.delegate.__aenter__() - return self - async def __aexit__(self, exc_type, exc_val, exc_tb): - return await self.delegate.__aexit__(exc_type, exc_val, exc_tb) +_PREDICATES = { + ">": _greater_than, + ">=": _greater_than_or_equal, + "<": _smaller_than, + "<=": _smaller_than_or_equal, + "==": _equal, +} -class MultiClientRunner(Runner, Delegator): - def __init__(self, runnable, name, client_extractor, context_manager_enabled=False): - super().__init__(delegate=runnable) - self.name = name - self.client_extractor = client_extractor - self.context_manager_enabled = context_manager_enabled +def check_assertion(op_name: str | None, assertion: dict[str, Any], properties: dict[str, Any]): + path = assertion["property"] + predicate_name = assertion["condition"] + expected_value = assertion["value"] + actual_value = properties + for k in path.split("."): + actual_value = actual_value[k] + predicate = _PREDICATES[predicate_name] + if predicate(expected_value, actual_value): + if op_name: + msg = f"Expected [{path}] in [{op_name}] to be {predicate_name} [{expected_value}] but was [{actual_value}]." + else: + msg = f"Expected [{path}] to be {predicate_name} [{expected_value}] but was [{actual_value}]." + raise exceptions.RallyTaskAssertionError(msg) - async def __call__(self, *args): - return await self.delegate(self.client_extractor(args[0]), *args[1:]) - def __repr__(self, *args, **kwargs): - if self.context_manager_enabled: - return "user-defined context-manager enabled runner for [%s]" % self.name - else: - return "user-defined runner for [%s]" % self.name +class WrapperRunner(Runner): + """Runner which is wrapping another runner.""" - async def __aenter__(self): - if self.context_manager_enabled: - await self.delegate.__aenter__() - return self + def __init__(self, delegate: RunnerType, *, cfg: types.Config | None = None): + super().__init__(cfg=cfg) + self.delegate = delegate - async def __aexit__(self, exc_type, exc_val, exc_tb): - if self.context_manager_enabled: - return await self.delegate.__aexit__(exc_type, exc_val, exc_tb) - else: - return False + def __repr__(self): + return getattr(self.delegate, "__name__", None) or repr(self.delegate) + async def __aenter__(self) -> Self: + func = getattr(self.delegate, "__aenter__", None) + if func is None: + return self + return await func() -class AssertingRunner(Runner, Delegator): - assertions_enabled = False + async def __aexit__(self, exc_type, exc_val, exc_tb) -> bool: + func = getattr(self.delegate, "__aexit__", None) + if func is None: + return False + return await func(exc_type, exc_val, exc_tb) - def __init__(self, delegate): - super().__init__(delegate=delegate) - self.predicates = { - ">": self.greater_than, - ">=": self.greater_than_or_equal, - "<": self.smaller_than, - "<=": self.smaller_than_or_equal, - "==": self.equal, - } + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> Any: + return await self.delegate(es, params) - def greater_than(self, expected, actual): - return actual > expected - - def greater_than_or_equal(self, expected, actual): - return actual >= expected - - def smaller_than(self, expected, actual): - return actual < expected - - def smaller_than_or_equal(self, expected, actual): - return actual <= expected - - def equal(self, expected, actual): - return actual == expected - - def check_assertion(self, op_name, assertion, properties): - path = assertion["property"] - predicate_name = assertion["condition"] - expected_value = assertion["value"] - actual_value = properties - for k in path.split("."): - actual_value = actual_value[k] - predicate = self.predicates[predicate_name] - success = predicate(expected_value, actual_value) - if not success: - if op_name: - msg = f"Expected [{path}] in [{op_name}] to be {predicate_name} [{expected_value}] but was [{actual_value}]." - else: - msg = f"Expected [{path}] to be {predicate_name} [{expected_value}] but was [{actual_value}]." - - raise exceptions.RallyTaskAssertionError(msg) - - async def __call__(self, *args): - params = args[1] - return_value = await self.delegate(*args) - if AssertingRunner.assertions_enabled and "assertions" in params: - op_name = params.get("name") - if isinstance(return_value, dict): - for assertion in params["assertions"]: - self.check_assertion(op_name, assertion, return_value) - else: - raise exceptions.DataError(f"Cannot check assertion in [{op_name}] as [{repr(self.delegate)}] did not return a dict.") - return return_value + @property + def completed(self): + return getattr(unwrap(self.delegate), "completed", None) - def __repr__(self, *args, **kwargs): - return repr(self.delegate) + @property + def percent_completed(self): + return getattr(unwrap(self.delegate), "percent_completed", None) - async def __aenter__(self): - await self.delegate.__aenter__() - return self - async def __aexit__(self, exc_type, exc_val, exc_tb): - return await self.delegate.__aexit__(exc_type, exc_val, exc_tb) +def unwrap(runner: RunnerType) -> RunnerType: + """ + Unwraps all delegators until the actual runner. + + :param runner: An arbitrarily nested chain of delegators around a runner. + :return: The innermost runner. + """ + while delegate := getattr(runner, "delegate", None): + runner = delegate + return runner -def mandatory(params: dict[str, Any], key, op) -> Any: +def mandatory(params: dict[str, Any], key: str, op: Any) -> Any: try: return params[key] except KeyError: @@ -443,19 +354,17 @@ def mandatory(params: dict[str, Any], key, op) -> Any: ) -def escape(v): - """ - Escapes values so they can be used as query parameters +def escape(v: Any) -> str | None: + """It escapes values so they can be used as query parameters - :param v: The raw value. May be None. + :param v: The raw value. It may be None. :return: The escaped value. """ if v is None: return None - elif isinstance(v, bool): + if isinstance(v, bool): return str(v).lower() - else: - return str(v) + return str(v) class BulkIndex(Runner): @@ -463,7 +372,7 @@ class BulkIndex(Runner): Bulk indexes the given documents. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: """ Runs one bulk indexing operation. @@ -518,7 +427,7 @@ async def __call__(self, es, params): unit = mandatory(params, "unit", self) # parse responses lazily in the standard case - responses might be large thus parsing skews results and if no # errors have occurred we only need a small amount of information from the potentially large response. - if not detailed_results: + if not detailed_results and isinstance(es, RallyAsyncElasticsearch): es.return_raw_response() if with_action_metadata: @@ -691,7 +600,7 @@ def error_description(self, error_details): description = description + " | TRUNCATED " + self._error_status_summary(error_details) return description - def __repr__(self, *args, **kwargs): + def __repr__(self): return "bulk-index" @@ -700,7 +609,7 @@ class ForceMerge(Runner): Runs a force merge operation against Elasticsearch. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): # pylint: disable=import-outside-toplevel import elasticsearch @@ -717,7 +626,9 @@ async def __call__(self, es, params): except elasticsearch.ConnectionTimeout: pass while not complete: - await asyncio.sleep(params.get("poll-period")) + poll_period = float(params.get("poll-period", -1)) + if poll_period >= 0.0: + await asyncio.sleep(poll_period) tasks = await es.tasks.list(params={"actions": "indices:admin/forcemerge"}) if len(tasks["nodes"]) == 0: # empty nodes response indicates no tasks @@ -725,7 +636,7 @@ async def __call__(self, es, params): else: await es.indices.forcemerge(**merge_params) - def __repr__(self, *args, **kwargs): + def __repr__(self): return "force-merge" @@ -745,7 +656,7 @@ def _get(self, v, path): def _safe_string(self, v): return str(v) if v is not None else None - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: api_kwargs = self._default_kw_params(params) index = api_kwargs.pop("index", "_all") condition = params.get("condition") @@ -773,7 +684,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "indices-stats" @@ -782,11 +693,11 @@ class NodeStats(Runner): Gather node stats for all nodes. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: request_timeout = params.get("request-timeout") await es.options(request_timeout=request_timeout).nodes.stats(metric="_all") - def __repr__(self, *args, **kwargs): + def __repr__(self): return "node-stats" @@ -895,12 +806,12 @@ class Query(Runner): * ``pages``: Total number of pages that have been retrieved. """ - def __init__(self, config=None): - super().__init__(config=config) + def __init__(self, *, cfg: types.Config | None = None): + super().__init__(cfg=cfg) self._search_after_extractor = SearchAfterExtractor() self._composite_agg_extractor = CompositeAggExtractor() - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: params, request_params, transport_params, headers = self._transport_request_params(params) # we don't set headers at the options level because the Query runner sets them via the client's '_perform_request' method es = es.options(**transport_params) @@ -926,9 +837,10 @@ async def __call__(self, es, params): # counter-intuitive but preserves prior behavior headers = None # disable eager response parsing - responses might be huge thus skewing results - es.return_raw_response() + if isinstance(es, RallyAsyncElasticsearch): + es.return_raw_response() - async def _search_after_query(es, params): + async def _search_after_query(es: AsyncElasticsearch, params: dict[str, Any]): index = params.get("index", "_all") pit_op = params.get("with-point-in-time-from") results = { @@ -950,11 +862,9 @@ async def _search_after_query(es, params): body["pit"] = {"id": pit_id, "keep_alive": "1m"} response = await self._raw_search(es, doc_type=None, index=index, body=body.copy(), params=request_params, headers=headers) - parsed, last_sort = self._search_after_extractor( - response, - bool(pit_op), - results.get("hits"), # type: ignore[arg-type] # TODO remove the below ignore when introducing type hints - ) + hits = results.get("hits") + assert hits is None or isinstance(hits, int) + parsed, last_sort = self._search_after_extractor(response, bool(pit_op), hits) results["pages"] = page results["weight"] = page if results.get("hits") is None: @@ -978,7 +888,7 @@ async def _search_after_query(es, params): return results - async def _composite_agg(es, params): + async def _composite_agg(es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: index = params.get("index", "_all") pit_op = params.get("with-point-in-time-from") results = { @@ -1011,12 +921,9 @@ async def _composite_agg(es, params): body_to_send = tree_copy_composite_agg(body, path_to_composite) response = await self._raw_search(es, doc_type=None, index=index, body=body_to_send, params=request_params, headers=headers) - parsed = self._composite_agg_extractor( - response, - bool(pit_op), - path_to_composite, - results.get("hits"), # type: ignore[arg-type] # TODO remove this ignore when introducing type hints - ) + hits = results.get("hits") + assert hits is None or isinstance(hits, int) + parsed = self._composite_agg_extractor(response, bool(pit_op), path_to_composite, hits) results["pages"] = page results["weight"] = page if results.get("hits") is None: @@ -1076,7 +983,7 @@ def tree_copy_composite_agg(obj, key_path): aggs[key_path[0]] = tree_copy_composite_agg(aggs[key_path[0]], key_path[1:]) return obj - async def _request_body_query(es, params): + async def _request_body_query(es: AsyncElasticsearch, params: dict[str, Any]): doc_type = params.get("type") r = await self._raw_search(es, doc_type, index, body, request_params, headers=headers) @@ -1128,7 +1035,7 @@ async def _request_body_query(es, params): "success": True, } - async def _scroll_query(es, params): + async def _scroll_query(es: AsyncElasticsearch, params: dict[str, Any]): hits = 0 hits_relation = None timed_out = False @@ -1160,12 +1067,8 @@ async def _scroll_query(es, params): all_results_collected = (size is not None and hits < size) or hits == 0 else: # /_search/scroll does not accept request_cache so not providing params - r = await es.perform_request( - method="GET", - path="/_search/scroll", - body={"scroll_id": scroll_id, "scroll": "10s"}, - params=None, - headers=headers, + r = await self.request( + es, "GET", "/_search/scroll", body={"scroll_id": scroll_id, "scroll": "10s"}, headers=headers ) props = parse(r, ["timed_out", "took"], ["hits.hits"]) timed_out = timed_out or props.get("timed_out", False) @@ -1232,7 +1135,7 @@ def _query_headers(self, params): else: return {"Accept-Encoding": "identity"} - def __repr__(self, *args, **kwargs): + def __repr__(self): return "query" @@ -1241,7 +1144,7 @@ def __init__(self): # extracts e.g. '[1609780186, "2"]' from '"sort": [1609780186, "2"]' self.sort_pattern = re.compile(r"sort\":([^\]]*])") - def __call__(self, response: BytesIO, get_point_in_time: bool, hits_total: Optional[int]) -> (dict, list): + def __call__(self, response: BytesIO, get_point_in_time: bool, hits_total: int | None) -> (dict, list): # not a class member as we would want to mutate over the course of execution for efficiency properties = ["timed_out", "took"] if get_point_in_time: @@ -1266,7 +1169,7 @@ def _get_last_sort(self, response): """ response_str = response.getvalue().decode("UTF-8") index_of_last_sort = response_str.rfind('"sort"') - last_sort_str = re.search(self.sort_pattern, response_str[index_of_last_sort::]) + last_sort_str = re.search(self.sort_pattern, response_str[index_of_last_sort:]) if last_sort_str is not None: return json.loads(last_sort_str.group(1)) else: @@ -1274,7 +1177,7 @@ def _get_last_sort(self, response): class CompositeAggExtractor: - def __call__(self, response: BytesIO, get_point_in_time: bool, path_to_composite_agg: list, hits_total: Optional[int]) -> dict: + def __call__(self, response: BytesIO, get_point_in_time: bool, path_to_composite_agg: list, hits_total: int | None) -> dict: # not a class member as we would want to mutate over the course of execution for efficiency properties = ["timed_out", "took"] if get_point_in_time: @@ -1303,7 +1206,7 @@ class ClusterHealth(Runner): Get cluster health """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: @total_ordering class ClusterHealthStatus(Enum): UNKNOWN = 0 @@ -1353,7 +1256,7 @@ def status(v): ) return result - def __repr__(self, *args, **kwargs): + def __repr__(self): return "cluster-health" @@ -1362,7 +1265,7 @@ class PutPipeline(Runner): Execute the `put pipeline API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: await es.ingest.put_pipeline( id=mandatory(params, "id", self), body=mandatory(params, "body", self), @@ -1370,7 +1273,7 @@ async def __call__(self, es, params): timeout=params.get("timeout"), ) - def __repr__(self, *args, **kwargs): + def __repr__(self): return "put-pipeline" @@ -1379,11 +1282,11 @@ class Refresh(Runner): Execute the `refresh API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: api_kwargs = self._default_kw_params(params) await es.indices.refresh(**api_kwargs) - def __repr__(self, *args, **kwargs): + def __repr__(self): return "refresh" @@ -1392,10 +1295,10 @@ class CreateIndex(Runner): Execute the `create index API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: indices = mandatory(params, "indices", self) api_kwargs = self._default_kw_params(params) - ## ignore invalid entries rather than erroring + # It ignores invalid entries. for term in ["index", "body"]: api_kwargs.pop(term, None) for index, body in indices: @@ -1406,7 +1309,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "create-index" @@ -1415,7 +1318,7 @@ class CreateDataStream(Runner): Execute the `create data stream API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: data_streams = mandatory(params, "data-streams", self) request_params = mandatory(params, "request-params", self) for data_stream in data_streams: @@ -1426,11 +1329,11 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "create-data-stream" -async def set_destructive_requires_name(es, value): +async def set_destructive_requires_name(es: AsyncElasticsearch, value): """ Sets `action.destructive_requires_name` to provided value :return: the prior setting, if any @@ -1452,7 +1355,7 @@ class DeleteIndex(Runner): Execute the `delete index API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: ops = 0 indices = mandatory(params, "indices", self) @@ -1482,7 +1385,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "delete-index" @@ -1491,7 +1394,7 @@ class DeleteDataStream(Runner): Execute the `delete data stream API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: ops = 0 data_streams = mandatory(params, "data-streams", self) @@ -1513,7 +1416,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "delete-data-stream" @@ -1523,7 +1426,7 @@ class CreateComponentTemplate(Runner): `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: templates = mandatory(params, "templates", self) request_params = mandatory(params, "request-params", self) for name, body in templates: @@ -1534,7 +1437,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "create-component-template" @@ -1544,7 +1447,7 @@ class DeleteComponentTemplate(Runner): `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: template_names = mandatory(params, "templates", self) only_if_exists = mandatory(params, "only-if-exists", self) request_params = mandatory(params, "request-params", self) @@ -1564,7 +1467,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "delete-component-template" @@ -1573,7 +1476,7 @@ class CreateComposableTemplate(Runner): Execute the `PUT index template API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: templates = mandatory(params, "templates", self) request_params = mandatory(params, "request-params", self) for template, body in templates: @@ -1585,7 +1488,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "create-composable-template" @@ -1594,7 +1497,7 @@ class DeleteComposableTemplate(Runner): Execute the `PUT index template API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: templates = mandatory(params, "templates", self) only_if_exists = mandatory(params, "only-if-exists", self) request_params = mandatory(params, "request-params", self) @@ -1634,7 +1537,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "delete-composable-template" @@ -1643,7 +1546,7 @@ class CreateIndexTemplate(Runner): Execute the `PUT index template API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): templates = mandatory(params, "templates", self) request_params = params.get("request-params", {}) for template, body in templates: @@ -1654,7 +1557,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "create-index-template" @@ -1664,7 +1567,7 @@ class DeleteIndexTemplate(Runner): `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: template_names = mandatory(params, "templates", self) only_if_exists = params.get("only-if-exists", False) request_params = params.get("request-params", {}) @@ -1703,7 +1606,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "delete-index-template" @@ -1727,7 +1630,7 @@ async def _wait_for(self, es, idx, description): if not result["success"]: raise exceptions.RallyAssertionError(f"Failed to wait for [{description}].") - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: source_index = mandatory(params, "source-index", self) source_indices_get = await es.indices.get(index=source_index) source_indices = list(source_indices_get.keys()) @@ -1785,7 +1688,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "shrink-index" @@ -1794,15 +1697,12 @@ class CreateMlDatafeed(Runner): Execute the `create datafeed API `_. """ - async def __call__(self, es, params): - # pylint: disable=import-outside-toplevel - import elasticsearch - + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: datafeed_id = mandatory(params, "datafeed-id", self) body = mandatory(params, "body", self) try: await es.ml.put_datafeed(datafeed_id=datafeed_id, body=body) - except elasticsearch.BadRequestError: + except BadRequestError: # TODO: remove the fallback to '_xpack' path when we drop support for Elasticsearch 6.8 await es.perform_request( method="PUT", @@ -1810,7 +1710,7 @@ async def __call__(self, es, params): body=body, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "create-ml-datafeed" @@ -1819,16 +1719,13 @@ class DeleteMlDatafeed(Runner): Execute the `delete datafeed API `_. """ - async def __call__(self, es, params): - # pylint: disable=import-outside-toplevel - import elasticsearch - + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: datafeed_id = mandatory(params, "datafeed-id", self) force = params.get("force", False) try: # we don't want to fail if a datafeed does not exist, thus we ignore 404s. await es.ml.delete_datafeed(datafeed_id=datafeed_id, force=force, ignore=[404]) - except elasticsearch.BadRequestError: + except BadRequestError: # TODO: remove the fallback to '_xpack' path when we drop support for Elasticsearch 6.8 await es.perform_request( method="DELETE", @@ -1836,7 +1733,7 @@ async def __call__(self, es, params): params={"force": escape(force), "ignore": 404}, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "delete-ml-datafeed" @@ -1845,10 +1742,7 @@ class StartMlDatafeed(Runner): Execute the `start datafeed API `_. """ - async def __call__(self, es, params): - # pylint: disable=import-outside-toplevel - import elasticsearch - + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: datafeed_id = mandatory(params, "datafeed-id", self) body = params.get("body") start = params.get("start") @@ -1856,7 +1750,7 @@ async def __call__(self, es, params): timeout = params.get("timeout") try: await es.ml.start_datafeed(datafeed_id=datafeed_id, body=body, start=start, end=end, timeout=timeout) - except elasticsearch.BadRequestError: + except BadRequestError: # TODO: remove the fallback to '_xpack' path when we drop support for Elasticsearch 6.8 await es.perform_request( method="POST", @@ -1864,7 +1758,7 @@ async def __call__(self, es, params): body=body, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "start-ml-datafeed" @@ -1873,7 +1767,7 @@ class StopMlDatafeed(Runner): Execute the `stop datafeed API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): # pylint: disable=import-outside-toplevel import elasticsearch @@ -1895,7 +1789,7 @@ async def __call__(self, es, params): params=request_params, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "stop-ml-datafeed" @@ -1904,7 +1798,7 @@ class CreateMlJob(Runner): Execute the `create job API `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: # pylint: disable=import-outside-toplevel import elasticsearch @@ -1920,7 +1814,7 @@ async def __call__(self, es, params): body=body, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "create-ml-job" @@ -1929,16 +1823,13 @@ class DeleteMlJob(Runner): Execute the `delete job API `_. """ - async def __call__(self, es, params): - # pylint: disable=import-outside-toplevel - import elasticsearch - + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: job_id = mandatory(params, "job-id", self) force = params.get("force", False) # we don't want to fail if a job does not exist, thus we ignore 404s. try: await es.ml.delete_job(job_id=job_id, force=force, ignore=[404]) - except elasticsearch.BadRequestError: + except BadRequestError: # TODO: remove the fallback to '_xpack' path when we drop support for Elasticsearch 6.8 await es.perform_request( method="DELETE", @@ -1946,7 +1837,7 @@ async def __call__(self, es, params): params={"force": escape(force), "ignore": 404}, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "delete-ml-job" @@ -1955,21 +1846,18 @@ class OpenMlJob(Runner): Execute the `open job API `_. """ - async def __call__(self, es, params): - # pylint: disable=import-outside-toplevel - import elasticsearch - + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: job_id = mandatory(params, "job-id", self) try: await es.ml.open_job(job_id=job_id) - except elasticsearch.BadRequestError: + except BadRequestError: # TODO: remove the fallback to '_xpack' path when we drop support for Elasticsearch 6.8 await es.perform_request( method="POST", path=f"/_xpack/ml/anomaly_detectors/{job_id}/_open", ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "open-ml-job" @@ -1978,16 +1866,13 @@ class CloseMlJob(Runner): Execute the `close job API `_. """ - async def __call__(self, es, params): - # pylint: disable=import-outside-toplevel - import elasticsearch - + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: job_id = mandatory(params, "job-id", self) force = params.get("force", False) timeout = params.get("timeout") try: await es.ml.close_job(job_id=job_id, force=force, timeout=timeout) - except elasticsearch.BadRequestError: + except BadRequestError: # TODO: remove the fallback to '_xpack' path when we drop support for Elasticsearch 6.8 request_params = { "force": escape(force), @@ -2001,12 +1886,12 @@ async def __call__(self, es, params): params=request_params, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "close-ml-job" class RawRequest(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: params, request_params, transport_params, headers = self._transport_request_params(params) es = es.options(**transport_params) @@ -2021,13 +1906,13 @@ async def __call__(self, es, params): headers = None # disable eager response parsing - responses might be huge thus skewing results - es.return_raw_response() - + if isinstance(es, RallyAsyncElasticsearch): + es.return_raw_response() await es.perform_request( method=params.get("method", "GET"), path=path, headers=headers, body=params.get("body"), params=request_params ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "raw-request" @@ -2036,14 +1921,16 @@ class Sleep(Runner): Sleeps for the specified duration not issuing any request. """ - async def __call__(self, es, params): - es.on_request_start() + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: + if isinstance(es, RallyAsyncElasticsearch): + es.on_request_start() try: await asyncio.sleep(mandatory(params, "duration", "sleep")) finally: - es.on_request_end() + if isinstance(es, RallyAsyncElasticsearch): + es.on_request_end() - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "sleep" @@ -2052,10 +1939,10 @@ class DeleteSnapshotRepository(Runner): Deletes a snapshot repository """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: await es.snapshot.delete_repository(repository=mandatory(params, "repository", repr(self)), ignore=[404]) - def __repr__(self, *args, **kwargs): + def __repr__(self): return "delete-snapshot-repository" @@ -2064,13 +1951,13 @@ class CreateSnapshotRepository(Runner): Creates a new snapshot repository """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: request_params = params.get("request-params", {}) await es.snapshot.create_repository( name=mandatory(params, "repository", repr(self)), body=mandatory(params, "body", repr(self)), params=request_params ) - def __repr__(self, *args, **kwargs): + def __repr__(self): return "create-snapshot-repository" @@ -2079,7 +1966,7 @@ class CreateSnapshot(Runner): Creates a new snapshot repository """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: wait_for_completion = params.get("wait-for-completion", False) repository = mandatory(params, "repository", repr(self)) snapshot = mandatory(params, "snapshot", repr(self)) @@ -2088,7 +1975,7 @@ async def __call__(self, es, params): api_kwargs = self._default_kw_params(params) await es.snapshot.create(repository=repository, snapshot=snapshot, wait_for_completion=wait_for_completion, **api_kwargs) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "create-snapshot" @@ -2097,7 +1984,7 @@ class WaitForSnapshotCreate(Runner): Waits until a currently running on a given repository has finished successfully and returns detailed metrics. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: repository = mandatory(params, "repository", repr(self)) snapshot = mandatory(params, "snapshot", repr(self)) wait_period = params.get("completion-recheck-wait-period", 1) @@ -2143,7 +2030,7 @@ async def __call__(self, es, params): "file_count": file_count, } - def __repr__(self, *args, **kwargs): + def __repr__(self): return "wait-for-snapshot-create" @@ -2152,7 +2039,7 @@ class WaitForCurrentSnapshotsCreate(Runner): Waits until all currently running snapshots on a given repository have completed """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: repository = mandatory(params, "repository", repr(self)) wait_period = params.get("completion-recheck-wait-period", 1) es_info = await es.info() @@ -2162,7 +2049,10 @@ async def __call__(self, es, params): # significantly reduce response size when lots of snapshots have been taken # only available since ES 8.3.0 (https://github.com/elastic/elasticsearch/pull/86269) - if (Version.from_string(es_version) >= Version.from_string("8.3.0")) or es.is_serverless: + serverless = False + if isinstance(es, RallyAsyncElasticsearch): + serverless = es.is_serverless + if (Version.from_string(es_version) >= Version.from_string("8.3.0")) or serverless: request_args["index_names"] = False while True: @@ -2176,7 +2066,7 @@ async def __call__(self, es, params): # getting detailed stats per snapshot using the snapshot status api can be very expensive. # return nothing and rely on Rally's own service_time measurement for the duration. - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "wait-for-current-snapshots-create" @@ -2185,7 +2075,7 @@ class RestoreSnapshot(Runner): Restores a snapshot from an already registered repository """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: params, request_params, transport_params, headers = self._transport_request_params(params) es = es.options(**transport_params) @@ -2205,12 +2095,12 @@ async def __call__(self, es, params): params=request_params, ) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "restore-snapshot" class IndicesRecovery(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: index = mandatory(params, "index", repr(self)) wait_period = params.get("completion-recheck-wait-period", 1) @@ -2258,7 +2148,7 @@ async def __call__(self, es, params): "stop_time_millis": total_end_millis, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "wait-for-recovery" @@ -2268,10 +2158,10 @@ class PutSettings(Runner): `cluster settings API _. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: await es.cluster.put_settings(body=mandatory(params, "body", repr(self))) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "put-settings" @@ -2280,13 +2170,13 @@ class CreateTransform(Runner): Execute the `create transform API https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html`_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: transform_id = mandatory(params, "transform-id", self) body = mandatory(params, "body", self) defer_validation = params.get("defer-validation", False) await es.transform.put_transform(transform_id=transform_id, body=body, defer_validation=defer_validation) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "create-transform" @@ -2296,13 +2186,13 @@ class StartTransform(Runner): https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html`_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: transform_id = mandatory(params, "transform-id", self) timeout = params.get("timeout") await es.transform.start_transform(transform_id=transform_id, timeout=timeout) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "start-transform" @@ -2311,7 +2201,7 @@ class WaitForTransform(Runner): Wait for the transform until it reaches a certain checkpoint. """ - def __init__(self): + def __init__(self, *, cfg=types.Config): super().__init__() self._completed = False self._percent_completed = 0.0 @@ -2327,7 +2217,7 @@ def completed(self): def percent_completed(self): return self._percent_completed - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: """ stop the transform and wait until transform has finished return stats @@ -2425,7 +2315,7 @@ async def __call__(self, es, params): # sleep for a while, so stats is not called to often await asyncio.sleep(poll_interval) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "wait-for-transform" @@ -2435,13 +2325,13 @@ class DeleteTransform(Runner): https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html`_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: transform_id = mandatory(params, "transform-id", self) force = params.get("force", False) # we don't want to fail if a job does not exist, thus we ignore 404s. await es.transform.delete_transform(transform_id=transform_id, force=force, ignore=[404]) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "delete-transform" @@ -2461,7 +2351,7 @@ def _get(self, v, path): def _safe_string(self, v): return str(v) if v is not None else None - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: api_kwargs = self._default_kw_params(params) transform_id = mandatory(params, "transform-id", self) condition = params.get("condition") @@ -2491,12 +2381,12 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "transform-stats" class SubmitAsyncSearch(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: request_params = params.get("request-params", {}) # defaults wait_for_completion_timeout = 0 to avoid sync fallback for fast searches @@ -2509,7 +2399,7 @@ async def __call__(self, es, params): search_id = response.get("id") CompositeContext.put(op_name, search_id) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "submit-async-search" @@ -2522,7 +2412,7 @@ def async_search_ids(op_names): class GetAsyncSearch(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: success = True searches = mandatory(params, "retrieve-results-for", self) request_params = params.get("request-params", {}) @@ -2549,23 +2439,23 @@ async def __call__(self, es, params): "stats": stats, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "get-async-search" class DeleteAsyncSearch(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> None: searches = mandatory(params, "delete-results-for", self) for search_id, search in async_search_ids(searches): await es.async_search.delete(id=search_id) CompositeContext.remove(search) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "delete-async-search" class OpenPointInTime(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): op_name = mandatory(params, "name", self) index = mandatory(params, "index", self) keep_alive = params.get("keep-alive", "1m") @@ -2573,12 +2463,12 @@ async def __call__(self, es, params): id = response.get("id") CompositeContext.put(op_name, id) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "open-point-in-time" class ClosePointInTime(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): pit_op = mandatory(params, "with-point-in-time-from", self) pit_id = CompositeContext.get(pit_op) request_params = params.get("request-params", {}) @@ -2586,7 +2476,7 @@ async def __call__(self, es, params): await es.close_point_in_time(body=body, params=request_params, headers=None) CompositeContext.remove(pit_op) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "close-point-in-time" @@ -2639,8 +2529,8 @@ class Composite(Runner): Executes a complex request structure which is measured by Rally as one composite operation. """ - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, *, cfg: types.Config | None = None): + super().__init__(cfg=cfg) # Since Composite is marked as serverless.Status.Public, only add public # operation types here. self.supported_op_types = [ @@ -2657,18 +2547,19 @@ def __init__(self, *args, **kwargs): "field-caps", ] - async def run_stream(self, es, stream, connection_limit): - streams = [] + async def run_stream(self, es: AsyncElasticsearch, stream: Iterable[dict[str, Any]], connection_limit): + streams: list[asyncio.Task] = [] timings = [] try: for item in stream: if "stream" in item: streams.append(asyncio.create_task(self.run_stream(es, item["stream"], connection_limit))) - elif "operation-type" in item: + continue + + if "operation-type" in item: # consume all prior streams first if streams: - streams_timings = await asyncio.gather(*streams) - for stream_timings in streams_timings: + for stream_timings in await asyncio.gather(*streams): timings += stream_timings streams = [] op_type = item["operation-type"] @@ -2679,7 +2570,7 @@ async def run_stream(self, es, stream, connection_limit): runner = RequestTiming(runner_for(op_type)) async with connection_limit: async with runner: - response = await runner({"default": es}, item) + response: dict[str, Any] | None = await runner(es, item) if response: # TODO: support calculating dependent's throughput # drop weight and unit metadata but keep the rest @@ -2690,9 +2581,10 @@ async def run_stream(self, es, stream, connection_limit): timings.append(response) else: timings.append(None) + continue + + raise exceptions.RallyAssertionError("Requests structure must contain [stream] or [operation-type].") - else: - raise exceptions.RallyAssertionError("Requests structure must contain [stream] or [operation-type].") except BaseException: # stop all already created tasks in case of exceptions for s in streams: @@ -2702,12 +2594,11 @@ async def run_stream(self, es, stream, connection_limit): # complete any outstanding streams if streams: - streams_timings = await asyncio.gather(*streams) - for stream_timings in streams_timings: + for stream_timings in await asyncio.gather(*streams): timings += stream_timings return timings - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: requests = mandatory(params, "requests", self) max_connections = params.get("max-connections", sys.maxsize) async with CompositeContext(): @@ -2718,7 +2609,7 @@ async def __call__(self, es, params): "dependent_timing": response, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "composite" @@ -2762,7 +2653,7 @@ async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "create-ilm-policy" @@ -2772,7 +2663,7 @@ class DeleteIlmPolicy(Runner): `_. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): policy_name = mandatory(params, "policy-name", self) request_params = params.get("request-params", {}) error_trace = request_params.get("error_trace", None) @@ -2789,7 +2680,7 @@ async def __call__(self, es, params): "success": True, } - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "delete-ilm-policy" @@ -2798,7 +2689,7 @@ class Sql(Runner): Executes an SQL query and optionally paginates through subsequent pages. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): body = mandatory(params, "body", self) if body.get("query") is None: raise exceptions.DataError( @@ -2807,6 +2698,9 @@ async def __call__(self, es, params): ) pages = params.get("pages", 1) + if not isinstance(es, RallyAsyncElasticsearch): + raise TypeError("Expected RallyAsyncElasticsearch, got %r" % type(es)) + es.return_raw_response() r = await es.perform_request(method="POST", path="/_sql", body=body) @@ -2825,7 +2719,7 @@ async def __call__(self, es, params): return {"weight": weight, "unit": "ops", "success": True} - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "sql" @@ -2834,7 +2728,7 @@ class Downsample(Runner): Executes a downsampling operation creating the target index and aggregating data in the source index on the @timestamp field. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): params, request_params, transport_params, request_headers = self._transport_request_params(params) es = es.options(**transport_params) @@ -2867,7 +2761,7 @@ async def __call__(self, es, params): return {"weight": 1, "unit": "ops", "success": True} - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "downsample" @@ -2877,7 +2771,7 @@ class FieldCaps(Runner): ` _. """ - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): index = params.get("index", "_all") fields = params.get("fields", "*") body = params.get("body", {}) @@ -2889,12 +2783,12 @@ async def __call__(self, es, params): return {"weight": 1, "unit": "ops", "success": True} - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "field-caps" class Esql(Runner): - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): params, request_params, transport_params, headers = self._transport_request_params(params) es = es.options(**transport_params) query = mandatory(params, "query", self) @@ -2907,25 +2801,23 @@ async def __call__(self, es, params): # counter-intuitive, but preserves prior behavior headers = None # disable eager response parsing - responses might be huge thus skewing results - es.return_raw_response() + if isinstance(es, RallyAsyncElasticsearch): + es.return_raw_response() await es.perform_request(method="POST", path="/_query", headers=headers, body=body, params=request_params) return {"success": True, "unit": "ops", "weight": 1} - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "esql" -class RequestTiming(Runner, Delegator): - def __init__(self, delegate): - super().__init__(delegate=delegate) +class RequestTiming(WrapperRunner): - async def __aenter__(self): - await self.delegate.__aenter__() - return self - - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: absolute_time = time.time() - with es["default"].new_request_context() as request_context: + request_context = contextlib.nullcontext() + if isinstance(es, RallyAsyncElasticsearch): + request_context = es.new_request_context() + with request_context: return_value = await self.delegate(es, params) if isinstance(return_value, tuple) and len(return_value) == 2: total_ops, total_ops_unit = return_value @@ -2955,13 +2847,10 @@ async def __call__(self, es, params): } return result - async def __aexit__(self, exc_type, exc_val, exc_tb): - return await self.delegate.__aexit__(exc_type, exc_val, exc_tb) - # TODO: Allow to use this from (selected) regular runners and add user documentation. # TODO: It would maybe be interesting to add meta-data on how many retries there were. -class Retry(Runner, Delegator): +class Retry(Runner, WrapperRunner): """ This runner can be used as a wrapper around regular runners to retry operations. @@ -2984,7 +2873,7 @@ async def __aenter__(self): await self.delegate.__aenter__() return self - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]): # pylint: disable=import-outside-toplevel import socket @@ -3048,7 +2937,7 @@ async def __call__(self, es, params): async def __aexit__(self, exc_type, exc_val, exc_tb): return await self.delegate.__aexit__(exc_type, exc_val, exc_tb) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "retryable %s" % repr(self.delegate) @@ -3068,10 +2957,11 @@ def __init__(self, *args, config=None, **kwargs): } self.operators = {"exists": lambda v, _: v is not None, "not_exists": lambda v, _: v is None, **self.numerical_operators} - async def __call__(self, es, params): + async def __call__(self, es: AsyncElasticsearch, params: dict[str, Any]) -> dict[str, Any]: params, request_params, transport_params, headers = self._transport_request_params(params) es = es.options(**transport_params) - es.return_raw_response() + if isinstance(es, RallyAsyncElasticsearch): + es.return_raw_response() path = mandatory(params, "path", self) method = params.get("method", "GET") body = params.get("body", None) @@ -3117,5 +3007,5 @@ def compare_value(self, criterion: dict, value: str | int | float) -> bool: target_value: int | float = criterion.get("value") return self.operators[operator](value, target_value) - def __repr__(self, *args, **kwargs): + def __repr__(self) -> str: return "run-until" diff --git a/tests/client/common_test.py b/tests/client/common_test.py new file mode 100644 index 000000000..dac449c60 --- /dev/null +++ b/tests/client/common_test.py @@ -0,0 +1,24 @@ +# Licensed to Elasticsearch B.V. under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch B.V. licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from esrally.client import common + + +# pylint: disable=protected-access +def test_client_major_version_to_str(): + version = (8, 2, 0) + assert common._client_major_version_to_str(version) == "8" diff --git a/tests/driver/runner_test.py b/tests/driver/runner_test.py index 4c3e32650..e89732cab 100644 --- a/tests/driver/runner_test.py +++ b/tests/driver/runner_test.py @@ -82,19 +82,19 @@ async def __call__(self, es, params): @mock.patch("esrally.driver.runner._multi_cluster_runner") @pytest.mark.asyncio async def test_register_wrapped_runner_with_no_multi_cluster_attribute(self, multi_cluster_runner, single_cluster_runner): - class Wrapper(runner.Delegator): + class Wrapper(runner.DelegatorRunner): def __init__(self, delegate=None): super().__init__(delegate=delegate) - async def __call__(self, *args): - return args + async def __call__(self, es, params): + return (es, params) - class BaseRunner: + class BaseRunner(runner.Runner): async def __call__(self, es, params): pass base_runner = BaseRunner() - wrapped_runner = Wrapper(base_runner) + wrapped_runner = Wrapper(delegate=base_runner) es = None params = {}