Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
506 changes: 481 additions & 25 deletions elasticsearch/_async/client/cat.py

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions elasticsearch/_async/client/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ async def delete(

:param connector_id: The unique identifier of the connector to be deleted
:param delete_sync_jobs: A flag indicating if associated sync jobs should be
also removed. Defaults to false.
also removed.
:param hard: A flag indicating if the connector should be hard deleted.
"""
if connector_id in SKIP_IN_PATH:
Expand Down Expand Up @@ -360,7 +360,7 @@ async def list(

:param connector_name: A comma-separated list of connector names to fetch connector
documents for
:param from_: Starting offset (default: 0)
:param from_: Starting offset
:param include_deleted: A flag to indicate if the desired connector should be
fetched, even if it was soft-deleted.
:param index_name: A comma-separated list of connector index names to fetch connector
Expand Down Expand Up @@ -955,7 +955,7 @@ async def sync_job_list(
`<https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-connector-sync-job-list>`_

:param connector_id: A connector id to fetch connector sync jobs for
:param from_: Starting offset (default: 0)
:param from_: Starting offset
:param job_type: A comma-separated list of job types to fetch the sync jobs for
:param size: Specifies a max number of results to get
:param status: A sync job status to fetch connector sync jobs for
Expand Down
30 changes: 26 additions & 4 deletions elasticsearch/_async/client/indices.py
Original file line number Diff line number Diff line change
Expand Up @@ -833,7 +833,11 @@ async def create_from(
if pretty is not None:
__query["pretty"] = pretty
__body = create_from if create_from is not None else body
__headers = {"accept": "application/json", "content-type": "application/json"}
if not __body:
__body = None
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return await self.perform_request( # type: ignore[return-value]
"PUT",
__path,
Expand Down Expand Up @@ -4549,6 +4553,7 @@ async def refresh(
For data streams, the API runs the refresh operation on the stream’s backing indices.</p>
<p>By default, Elasticsearch periodically refreshes indices every second, but only on indices that have received one search request or more in the last 30 seconds.
You can change this default interval with the <code>index.refresh_interval</code> setting.</p>
<p>In Elastic Cloud Serverless, the default refresh interval is 5 seconds across all indices.</p>
<p>Refresh requests are synchronous and do not return a response until the refresh operation completes.</p>
<p>Refreshes are resource-intensive.
To ensure good cluster performance, it's recommended to wait for Elasticsearch's periodic refresh rather than performing an explicit refresh when possible.</p>
Expand Down Expand Up @@ -5414,7 +5419,9 @@ async def shrink(
path_parts=__path_parts,
)

@_rewrite_parameters()
@_rewrite_parameters(
body_name="index_template",
)
async def simulate_index_template(
self,
*,
Expand All @@ -5425,6 +5432,8 @@ async def simulate_index_template(
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
include_defaults: t.Optional[bool] = None,
index_template: t.Optional[t.Mapping[str, t.Any]] = None,
body: t.Optional[t.Mapping[str, t.Any]] = None,
master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
Expand All @@ -5444,12 +5453,19 @@ async def simulate_index_template(
only be dry-run added if new or can also replace an existing one
:param include_defaults: If true, returns all relevant default configurations
for the index template.
:param index_template:
:param master_timeout: Period to wait for a connection to the master node. If
no response is received before the timeout expires, the request fails and
returns an error.
"""
if name in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'name'")
if index_template is None and body is None:
raise ValueError(
"Empty value passed for parameters 'index_template' and 'body', one of them should be set."
)
elif index_template is not None and body is not None:
raise ValueError("Cannot set both 'index_template' and 'body'")
__path_parts: t.Dict[str, str] = {"name": _quote(name)}
__path = f'/_index_template/_simulate_index/{__path_parts["name"]}'
__query: t.Dict[str, t.Any] = {}
Expand All @@ -5469,12 +5485,18 @@ async def simulate_index_template(
__query["master_timeout"] = master_timeout
if pretty is not None:
__query["pretty"] = pretty
__body = index_template if index_template is not None else body
if not __body:
__body = None
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return await self.perform_request( # type: ignore[return-value]
"POST",
__path,
params=__query,
headers=__headers,
body=__body,
endpoint_id="indices.simulate_index_template",
path_parts=__path_parts,
)
Expand Down Expand Up @@ -5823,8 +5845,8 @@ async def stats(
are requested).
:param include_unloaded_segments: If true, the response includes information
from segments that are not loaded into memory.
:param level: Indicates whether statistics are aggregated at the cluster, index,
or shard level.
:param level: Indicates whether statistics are aggregated at the cluster, indices,
or shards level.
"""
__path_parts: t.Dict[str, str]
if index not in SKIP_IN_PATH and metric not in SKIP_IN_PATH:
Expand Down
12 changes: 11 additions & 1 deletion elasticsearch/_async/client/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -2504,7 +2504,7 @@ async def sparse_embedding(
)

@_rewrite_parameters(
body_fields=("input", "task_settings"),
body_fields=("input", "input_type", "task_settings"),
)
async def text_embedding(
self,
Expand All @@ -2514,6 +2514,7 @@ async def text_embedding(
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
input_type: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
task_settings: t.Optional[t.Any] = None,
timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
Expand All @@ -2529,6 +2530,13 @@ async def text_embedding(

:param inference_id: The inference Id
:param input: Inference input. Either a string or an array of strings.
:param input_type: The input data type for the text embedding model. Possible
values include: * `SEARCH` * `INGEST` * `CLASSIFICATION` * `CLUSTERING` Not
all services support all values. Unsupported values will trigger a validation
exception. Accepted values depend on the configured inference service, refer
to the relevant service-specific documentation for more info. > info > The
`input_type` parameter specified on the root level of the request body will
take precedence over the `input_type` parameter specified in `task_settings`.
:param task_settings: Optional task settings
:param timeout: Specifies the amount of time to wait for the inference request
to complete.
Expand All @@ -2554,6 +2562,8 @@ async def text_embedding(
if not __body:
if input is not None:
__body["input"] = input
if input_type is not None:
__body["input_type"] = input_type
if task_settings is not None:
__body["task_settings"] = task_settings
if not __body:
Expand Down
4 changes: 3 additions & 1 deletion elasticsearch/_async/client/logstash.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,9 @@ async def put_pipeline(

`<https://www.elastic.co/docs/api/doc/elasticsearch/operation/operation-logstash-put-pipeline>`_

:param id: An identifier for the pipeline.
:param id: An identifier for the pipeline. Pipeline IDs must begin with a letter
or underscore and contain only letters, underscores, dashes, hyphens and
numbers.
:param pipeline:
"""
if id in SKIP_IN_PATH:
Expand Down
4 changes: 2 additions & 2 deletions elasticsearch/_async/client/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,8 +404,8 @@ async def stats(
are requested).
:param include_unloaded_segments: If `true`, the response includes information
from segments that are not loaded into memory.
:param level: Indicates whether statistics are aggregated at the cluster, index,
or shard level.
:param level: Indicates whether statistics are aggregated at the node, indices,
or shards level.
:param timeout: Period to wait for a response. If no response is received before
the timeout expires, the request fails and returns an error.
:param types: A comma-separated list of document types for the indexing index
Expand Down
20 changes: 5 additions & 15 deletions elasticsearch/_async/client/shutdown.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,9 @@ async def delete_node(
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[
t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
] = None,
master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
pretty: t.Optional[bool] = None,
timeout: t.Optional[
t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
] = None,
timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html
Expand Down Expand Up @@ -97,9 +93,7 @@ async def get_node(
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[
t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
] = None,
master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Expand Down Expand Up @@ -162,14 +156,10 @@ async def put_node(
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[
t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
] = None,
master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
pretty: t.Optional[bool] = None,
target_node_name: t.Optional[str] = None,
timeout: t.Optional[
t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
] = None,
timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
body: t.Optional[t.Dict[str, t.Any]] = None,
) -> ObjectApiResponse[t.Any]:
"""
Expand Down
174 changes: 174 additions & 0 deletions elasticsearch/_async/client/streams.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

class C:

@_rewrite_parameters()
@_stability_warning(Stability.EXPERIMENTAL)
async def logs_disable(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
pretty: t.Optional[bool] = None,
timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]:
"""
.. raw:: html

<p>Disable logs stream.</p>
<p>Turn off the logs stream feature for this cluster.</p>


`<https://www.elastic.co/docs/api/doc/elasticsearch#TODO>`_

:param master_timeout: The period to wait for a connection to the master node.
If no response is received before the timeout expires, the request fails
and returns an error.
:param timeout: The period to wait for a response. If no response is received
before the timeout expires, the request fails and returns an error.
"""
__path_parts: t.Dict[str, str] = {}
__path = "/_streams/logs/_disable"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if master_timeout is not None:
__query["master_timeout"] = master_timeout
if pretty is not None:
__query["pretty"] = pretty
if timeout is not None:
__query["timeout"] = timeout
__headers = {"accept": "application/json,text/plain"}
return await self.perform_request( # type: ignore[return-value]
"POST",
__path,
params=__query,
headers=__headers,
endpoint_id="streams.logs_disable",
path_parts=__path_parts,
)

@_rewrite_parameters()
@_stability_warning(Stability.EXPERIMENTAL)
async def logs_enable(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
pretty: t.Optional[bool] = None,
timeout: t.Optional[t.Union[str, t.Literal[-1], t.Literal[0]]] = None,
) -> t.Union[ObjectApiResponse[t.Any], TextApiResponse]:
"""
.. raw:: html

<p>Enable logs stream.</p>
<p>Turn on the logs stream feature for this cluster.</p>
<p>NOTE: To protect existing data, this feature can be turned on only if the
cluster does not have existing indices or data streams that match the pattern <code>logs|logs.*</code>.
If those indices or data streams exist, a <code>409 - Conflict</code> response and error is returned.</p>


`<https://www.elastic.co/docs/api/doc/elasticsearch#TODO>`_

:param master_timeout: The period to wait for a connection to the master node.
If no response is received before the timeout expires, the request fails
and returns an error.
:param timeout: The period to wait for a response. If no response is received
before the timeout expires, the request fails and returns an error.
"""
__path_parts: t.Dict[str, str] = {}
__path = "/_streams/logs/_enable"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if master_timeout is not None:
__query["master_timeout"] = master_timeout
if pretty is not None:
__query["pretty"] = pretty
if timeout is not None:
__query["timeout"] = timeout
__headers = {"accept": "application/json,text/plain"}
return await self.perform_request( # type: ignore[return-value]
"POST",
__path,
params=__query,
headers=__headers,
endpoint_id="streams.logs_enable",
path_parts=__path_parts,
)

@_rewrite_parameters()
@_stability_warning(Stability.EXPERIMENTAL)
async def status(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[str, t.Sequence[str]]] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[
t.Union[str, t.Literal["d", "h", "m", "micros", "ms", "nanos", "s"]]
] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
.. raw:: html

<p>Get the status of streams.</p>
<p>Get the current status for all types of streams.</p>


`<https://www.elastic.co/docs/api/doc/elasticsearch#TODO>`_

:param master_timeout: Period to wait for a connection to the master node. If
no response is received before the timeout expires, the request fails and
returns an error.
"""
__path_parts: t.Dict[str, str] = {}
__path = "/_streams/status"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if master_timeout is not None:
__query["master_timeout"] = master_timeout
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return await self.perform_request( # type: ignore[return-value]
"GET",
__path,
params=__query,
headers=__headers,
endpoint_id="streams.status",
path_parts=__path_parts,
)
Loading
Loading