diff --git a/airbyte/__init__.py b/airbyte/__init__.py index 2e7d7b02..c8b20c4d 100644 --- a/airbyte/__init__.py +++ b/airbyte/__init__.py @@ -142,7 +142,7 @@ # Submodules imported here for documentation reasons: https://github.com/mitmproxy/pdoc/issues/757 if TYPE_CHECKING: - # ruff: noqa: TCH004 # imports used for more than type checking + # ruff: noqa: TC004 # imports used for more than type checking from airbyte import ( caches, callbacks, diff --git a/airbyte/_executors/base.py b/airbyte/_executors/base.py index 463de068..9d0a5b4c 100644 --- a/airbyte/_executors/base.py +++ b/airbyte/_executors/base.py @@ -9,12 +9,13 @@ from airbyte import exceptions as exc from airbyte._message_iterators import AirbyteMessageIterator -from airbyte.sources.registry import ConnectorMetadata if TYPE_CHECKING: from collections.abc import Generator, Iterable, Iterator + from airbyte.sources.registry import ConnectorMetadata + _LATEST_VERSION = "latest" @@ -161,7 +162,9 @@ def __init__( if not name and not metadata: raise exc.PyAirbyteInternalError(message="Either name or metadata must be provided.") - self.name: str = name or cast(ConnectorMetadata, metadata).name # metadata is not None here + self.name: str = ( + name or cast("ConnectorMetadata", metadata).name + ) # metadata is not None here self.metadata: ConnectorMetadata | None = metadata self.enforce_version: bool = target_version is not None diff --git a/airbyte/_executors/declarative.py b/airbyte/_executors/declarative.py index dd331537..834afddb 100644 --- a/airbyte/_executors/declarative.py +++ b/airbyte/_executors/declarative.py @@ -54,7 +54,7 @@ def __init__( self.name = name self._manifest_dict: dict if isinstance(manifest, Path): - self._manifest_dict = cast(dict, yaml.safe_load(manifest.read_text())) + self._manifest_dict = cast("dict", yaml.safe_load(manifest.read_text())) elif isinstance(manifest, dict): self._manifest_dict = manifest diff --git a/airbyte/_executors/util.py b/airbyte/_executors/util.py index 7c7c150d..ac05877c 100644 --- a/airbyte/_executors/util.py +++ b/airbyte/_executors/util.py @@ -64,7 +64,7 @@ def _try_get_source_manifest( ) response.raise_for_status() # Raise HTTPError exception if the download failed try: - return cast(dict, yaml.safe_load(response.text)) + return cast("dict", yaml.safe_load(response.text)) except yaml.YAMLError as ex: raise exc.AirbyteConnectorInstallationError( message="Failed to parse the connector manifest YAML.", diff --git a/airbyte/_message_iterators.py b/airbyte/_message_iterators.py index 61edf7e3..be05d85b 100644 --- a/airbyte/_message_iterators.py +++ b/airbyte/_message_iterators.py @@ -3,7 +3,6 @@ from __future__ import annotations -import datetime import sys from collections.abc import Iterator from typing import IO, TYPE_CHECKING, cast @@ -27,6 +26,7 @@ if TYPE_CHECKING: + import datetime from collections.abc import Callable, Generator, Iterable, Iterator from pathlib import Path @@ -98,7 +98,7 @@ def generator() -> Generator[AirbyteMessage, None, None]: data=record, emitted_at=int( cast( - datetime.datetime, record.get(AB_EXTRACTED_AT_COLUMN) + "datetime.datetime", record.get(AB_EXTRACTED_AT_COLUMN) ).timestamp() ), # `meta` and `namespace` are not handled: @@ -134,7 +134,7 @@ def generator() -> Generator[AirbyteMessage, None, None]: yield AirbyteMessage.model_validate_json(next_line) except pydantic.ValidationError: # Handle JSON decoding errors (optional) - raise ValueError("Invalid JSON format") # noqa: B904, TRY003 + raise ValueError("Invalid JSON format") # noqa: B904 return cls(generator()) @@ -149,7 +149,7 @@ def generator() -> Generator[AirbyteMessage, None, None]: yield AirbyteMessage.model_validate_json(line) except pydantic.ValidationError: # Handle JSON decoding errors (optional) - raise ValueError(f"Invalid JSON format in input string: {line}") # noqa: B904, TRY003 + raise ValueError(f"Invalid JSON format in input string: {line}") # noqa: B904 return cls(generator()) @@ -193,6 +193,6 @@ def generator() -> Generator[AirbyteMessage, None, None]: # Handle JSON decoding errors current_file_buffer.close() current_file_buffer = None - raise ValueError("Invalid JSON format") # noqa: B904, TRY003 + raise ValueError("Invalid JSON format") # noqa: B904 return cls(generator()) diff --git a/airbyte/_processors/sql/bigquery.py b/airbyte/_processors/sql/bigquery.py index a2ce1152..ffbd5aac 100644 --- a/airbyte/_processors/sql/bigquery.py +++ b/airbyte/_processors/sql/bigquery.py @@ -99,7 +99,7 @@ class BigQueryTypeConverter(SQLTypeConverter): @classmethod def get_string_type(cls) -> sqlalchemy.types.TypeEngine: """Return the string type for BigQuery.""" - return cast(sqlalchemy.types.TypeEngine, "String") # BigQuery uses STRING for all strings + return cast("sqlalchemy.types.TypeEngine", "String") # BigQuery uses STRING for all strings @overrides def to_sql_type( diff --git a/airbyte/_util/api_util.py b/airbyte/_util/api_util.py index 4bdc0b11..a8fefdc4 100644 --- a/airbyte/_util/api_util.py +++ b/airbyte/_util/api_util.py @@ -14,7 +14,7 @@ from __future__ import annotations import json -from typing import Any +from typing import TYPE_CHECKING, Any import airbyte_api from airbyte_api import api, models @@ -24,15 +24,24 @@ AirbyteError, AirbyteMissingResourceError, AirbyteMultipleResourcesError, + PyAirbyteInputError, ) +if TYPE_CHECKING: + from collections.abc import Callable + + from airbyte_api.models import ( + DestinationConfiguration, + ) + + from airbyte.secrets.base import SecretString + + JOB_WAIT_INTERVAL_SECS = 2.0 JOB_WAIT_TIMEOUT_SECS_DEFAULT = 60 * 60 # 1 hour CLOUD_API_ROOT = "https://api.airbyte.com/v1" -# Helper functions - def status_ok(status_code: int) -> bool: """Check if a status code is OK.""" @@ -41,13 +50,19 @@ def status_ok(status_code: int) -> bool: def get_airbyte_server_instance( *, - api_key: str, api_root: str, -) -> airbyte_api.Airbyte: + client_id: SecretString, + client_secret: SecretString, +) -> airbyte_api.AirbyteAPI: """Get an Airbyte instance.""" return airbyte_api.AirbyteAPI( security=models.Security( - bearer_auth=api_key, + client_credentials=models.SchemeClientCredentials( + client_id=client_id, + client_secret=client_secret, + token_url=api_root + "/applications/token", + # e.g. https://api.airbyte.com/v1/applications/token + ), ), server_url=api_root, ) @@ -60,12 +75,14 @@ def get_workspace( workspace_id: str, *, api_root: str, - api_key: str, + client_id: SecretString, + client_secret: SecretString, ) -> models.WorkspaceResponse: """Get a connection.""" airbyte_instance = get_airbyte_server_instance( - api_key=api_key, api_root=api_root, + client_id=client_id, + client_secret=client_secret, ) response = airbyte_instance.workspaces.get_workspace( api.GetWorkspaceRequest( @@ -84,49 +101,188 @@ def get_workspace( ) -# List, get, and run connections +# List resources def list_connections( workspace_id: str, *, api_root: str, - api_key: str, -) -> list[api.ConnectionResponse]: + client_id: SecretString, + client_secret: SecretString, + name: str | None = None, + name_filter: Callable[[str], bool] | None = None, +) -> list[models.ConnectionResponse]: """Get a connection.""" + if name and name_filter: + raise PyAirbyteInputError(message="You can provide name or name_filter, but not both.") + + name_filter = (lambda n: n == name) if name else name_filter or (lambda _: True) + _ = workspace_id # Not used (yet) airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.connections.list_connections( - api.ListConnectionsRequest()( + api.ListConnectionsRequest( workspace_ids=[workspace_id], ), ) - if status_ok(response.status_code) and response.connections_response: - return response.connections_response.data + if not status_ok(response.status_code) and response.connections_response: + raise AirbyteError( + context={ + "workspace_id": workspace_id, + "response": response, + } + ) + assert response.connections_response is not None + return [ + connection + for connection in response.connections_response.data + if name_filter(connection.name) + ] - raise AirbyteError( - context={ - "workspace_id": workspace_id, - "response": response, - } + +def list_workspaces( + workspace_id: str, + *, + api_root: str, + client_id: SecretString, + client_secret: SecretString, + name: str | None = None, + name_filter: Callable[[str], bool] | None = None, +) -> list[models.WorkspaceResponse]: + """Get a connection.""" + if name and name_filter: + raise PyAirbyteInputError(message="You can provide name or name_filter, but not both.") + + name_filter = (lambda n: n == name) if name else name_filter or (lambda _: True) + + _ = workspace_id # Not used (yet) + airbyte_instance: airbyte_api.AirbyteAPI = get_airbyte_server_instance( + client_id=client_id, + client_secret=client_secret, + api_root=api_root, + ) + + response: api.ListWorkspacesResponse = airbyte_instance.workspaces.list_workspaces( + api.ListWorkspacesRequest( + workspace_ids=[workspace_id], + ), + ) + + if not status_ok(response.status_code) and response.workspaces_response: + raise AirbyteError( + context={ + "workspace_id": workspace_id, + "response": response, + } + ) + assert response.workspaces_response is not None + return [ + workspace for workspace in response.workspaces_response.data if name_filter(workspace.name) + ] + + +def list_sources( + workspace_id: str, + *, + api_root: str, + client_id: SecretString, + client_secret: SecretString, + name: str | None = None, + name_filter: Callable[[str], bool] | None = None, +) -> list[models.SourceResponse]: + """Get a connection.""" + if name and name_filter: + raise PyAirbyteInputError(message="You can provide name or name_filter, but not both.") + + name_filter = (lambda n: n == name) if name else name_filter or (lambda _: True) + + _ = workspace_id # Not used (yet) + airbyte_instance: airbyte_api.AirbyteAPI = get_airbyte_server_instance( + client_id=client_id, + client_secret=client_secret, + api_root=api_root, + ) + response: api.ListSourcesResponse = airbyte_instance.sources.list_sources( + api.ListSourcesRequest( + workspace_ids=[workspace_id], + ), ) + if not status_ok(response.status_code) and response.sources_response: + raise AirbyteError( + context={ + "workspace_id": workspace_id, + "response": response, + } + ) + assert response.sources_response is not None + return [source for source in response.sources_response.data if name_filter(source.name)] + + +def list_destinations( + workspace_id: str, + *, + api_root: str, + client_id: SecretString, + client_secret: SecretString, + name: str | None = None, + name_filter: Callable[[str], bool] | None = None, +) -> list[models.DestinationResponse]: + """Get a connection.""" + if name and name_filter: + raise PyAirbyteInputError(message="You can provide name or name_filter, but not both.") + + name_filter = (lambda n: n == name) if name else name_filter or (lambda _: True) + + _ = workspace_id # Not used (yet) + airbyte_instance = get_airbyte_server_instance( + client_id=client_id, + client_secret=client_secret, + api_root=api_root, + ) + response = airbyte_instance.destinations.list_destinations( + api.ListDestinationsRequest( + workspace_ids=[workspace_id], + ), + ) + + if not status_ok(response.status_code) and response.destinations_response: + raise AirbyteError( + context={ + "workspace_id": workspace_id, + "response": response, + } + ) + assert response.destinations_response is not None + return [ + destination + for destination in response.destinations_response.data + if name_filter(destination.name) + ] + + +# Get and run connections + def get_connection( workspace_id: str, connection_id: str, *, api_root: str, - api_key: str, -) -> api.ConnectionResponse: + client_id: SecretString, + client_secret: SecretString, +) -> models.ConnectionResponse: """Get a connection.""" _ = workspace_id # Not used (yet) airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.connections.get_connection( @@ -137,7 +293,11 @@ def get_connection( if status_ok(response.status_code) and response.connection_response: return response.connection_response - raise AirbyteMissingResourceError(connection_id, "connection", response.text) + raise AirbyteMissingResourceError( + resource_name_or_id=connection_id, + resource_type="connection", + log_text=response.raw_response.text, + ) def run_connection( @@ -145,8 +305,9 @@ def run_connection( connection_id: str, *, api_root: str, - api_key: str, -) -> api.ConnectionResponse: + client_id: SecretString, + client_secret: SecretString, +) -> models.JobResponse: """Get a connection. If block is True, this will block until the connection is finished running. @@ -155,7 +316,8 @@ def run_connection( """ _ = workspace_id # Not used (yet) airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.jobs.create_job( @@ -185,11 +347,13 @@ def get_job_logs( limit: int = 20, *, api_root: str, - api_key: str, -) -> list[api.JobResponse]: + client_id: SecretString, + client_secret: SecretString, +) -> list[models.JobResponse]: """Get a job's logs.""" airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response: api.ListJobsResponse = airbyte_instance.jobs.list_jobs( @@ -213,14 +377,16 @@ def get_job_logs( def get_job_info( - job_id: str, + job_id: int, *, api_root: str, - api_key: str, -) -> api.JobResponse: + client_id: SecretString, + client_secret: SecretString, +) -> models.JobResponse: """Get a job.""" airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.jobs.get_job( @@ -231,7 +397,11 @@ def get_job_info( if status_ok(response.status_code) and response.job_response: return response.job_response - raise AirbyteMissingResourceError(job_id, "job", response.text) + raise AirbyteMissingResourceError( + resource_name_or_id=str(job_id), + resource_type="job", + log_text=response.raw_response.text, + ) # Create, get, and delete sources @@ -241,20 +411,22 @@ def create_source( name: str, *, workspace_id: str, - config: dict[str, Any], + config: models.SourceConfiguration | dict[str, Any], api_root: str, - api_key: str, -) -> api.SourceResponse: + client_id: SecretString, + client_secret: SecretString, +) -> models.SourceResponse: """Get a connection.""" airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response: api.CreateSourceResponse = airbyte_instance.sources.create_source( models.SourceCreateRequest( name=name, workspace_id=workspace_id, - configuration=config, + configuration=config, # Speakeasy API wants a dataclass, not a dict definition_id=None, # Not used alternative to config.sourceType. secret_id=None, # For OAuth, not yet supported ), @@ -272,11 +444,13 @@ def get_source( source_id: str, *, api_root: str, - api_key: str, -) -> api.SourceResponse: + client_id: SecretString, + client_secret: SecretString, +) -> models.SourceResponse: """Get a connection.""" airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.sources.get_source( @@ -284,23 +458,29 @@ def get_source( source_id=source_id, ), ) - if status_ok(response.status_code) and response.connection_response: - return response.connection_response + if status_ok(response.status_code) and response.source_response: + return response.source_response - raise AirbyteMissingResourceError(source_id, "source", response.text) + raise AirbyteMissingResourceError( + resource_name_or_id=source_id, + resource_type="source", + log_text=response.raw_response.text, + ) def delete_source( source_id: str, *, api_root: str, - api_key: str, + client_id: SecretString, + client_secret: SecretString, workspace_id: str | None = None, ) -> None: """Delete a source.""" _ = workspace_id # Not used (yet) airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.sources.delete_source( @@ -324,20 +504,22 @@ def create_destination( name: str, *, workspace_id: str, - config: dict[str, Any], + config: DestinationConfiguration | dict[str, Any], api_root: str, - api_key: str, -) -> api.DestinationResponse: + client_id: SecretString, + client_secret: SecretString, +) -> models.DestinationResponse: """Get a connection.""" airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response: api.CreateDestinationResponse = airbyte_instance.destinations.create_destination( models.DestinationCreateRequest( name=name, workspace_id=workspace_id, - configuration=config, + configuration=config, # Speakeasy API wants a dataclass, not a dict ), ) if status_ok(response.status_code) and response.destination_response: @@ -353,11 +535,13 @@ def get_destination( destination_id: str, *, api_root: str, - api_key: str, -) -> api.DestinationResponse: + client_id: SecretString, + client_secret: SecretString, +) -> models.DestinationResponse: """Get a connection.""" airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.destinations.get_destination( @@ -365,46 +549,47 @@ def get_destination( destination_id=destination_id, ), ) - if status_ok(response.status_code): + if status_ok(response.status_code) and response.destination_response: # TODO: This is a temporary workaround to resolve an issue where # the destination API response is of the wrong type. # https://github.com/airbytehq/pyairbyte/issues/320 raw_response: dict[str, Any] = json.loads(response.raw_response.text) raw_configuration: dict[str, Any] = raw_response["configuration"] + destination_type = raw_response.get("destinationType") - if destination_type == "snowflake": - response.destination_response.configuration = models.DestinationSnowflake.from_dict( - raw_configuration, - ) - if destination_type == "bigquery": - response.destination_response.configuration = models.DestinationBigquery.from_dict( - raw_configuration, - ) - if destination_type == "postgres": - response.destination_response.configuration = models.DestinationPostgres.from_dict( - raw_configuration, - ) - if destination_type == "duckdb": - response.destination_response.configuration = models.DestinationDuckdb.from_dict( - raw_configuration, - ) + destination_mapping = { + "snowflake": models.DestinationSnowflake, + "bigquery": models.DestinationBigquery, + "postgres": models.DestinationPostgres, + "duckdb": models.DestinationDuckdb, + } + if destination_type in destination_mapping: + response.destination_response.configuration = destination_mapping[destination_type]( + **raw_configuration + ) return response.destination_response - raise AirbyteMissingResourceError(destination_id, "destination", response.text) + raise AirbyteMissingResourceError( + resource_name_or_id=destination_id, + resource_type="destination", + log_text=response.raw_response.text, + ) def delete_destination( destination_id: str, *, api_root: str, - api_key: str, + client_id: SecretString, + client_secret: SecretString, workspace_id: str | None = None, ) -> None: """Delete a destination.""" _ = workspace_id # Not used (yet) airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.destinations.delete_destination( @@ -424,20 +609,22 @@ def delete_destination( # Create and delete connections -def create_connection( +def create_connection( # noqa: PLR0913 # Too many arguments name: str, *, source_id: str, destination_id: str, api_root: str, - api_key: str, + client_id: SecretString, + client_secret: SecretString, workspace_id: str | None = None, prefix: str, selected_stream_names: list[str], ) -> models.ConnectionResponse: _ = workspace_id # Not used (yet) airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) stream_configurations: list[models.StreamConfiguration] = [] @@ -448,17 +635,17 @@ def create_connection( ) stream_configurations.append(stream_configuration) - stream_configurations = models.StreamConfigurations(stream_configurations) + stream_configurations_obj = models.StreamConfigurations(stream_configurations) response = airbyte_instance.connections.create_connection( models.ConnectionCreateRequest( name=name, source_id=source_id, destination_id=destination_id, - configurations=stream_configurations, + configurations=stream_configurations_obj, prefix=prefix, ), ) - if not status_ok(response.status_code): + if not status_ok(response.status_code) or response.connection_response is None: raise AirbyteError( context={ "source_id": source_id, @@ -475,13 +662,15 @@ def get_connection_by_name( connection_name: str, *, api_root: str, - api_key: str, + client_id: SecretString, + client_secret: SecretString, ) -> models.ConnectionResponse: """Get a connection.""" connections = list_connections( workspace_id=workspace_id, - api_key=api_key, api_root=api_root, + client_id=client_id, + client_secret=client_secret, ) found: list[models.ConnectionResponse] = [ connection for connection in connections if connection.name == connection_name @@ -508,11 +697,13 @@ def delete_connection( connection_id: str, api_root: str, workspace_id: str, - api_key: str, + client_id: SecretString, + client_secret: SecretString, ) -> None: _ = workspace_id # Not used (yet) airbyte_instance = get_airbyte_server_instance( - api_key=api_key, + client_id=client_id, + client_secret=client_secret, api_root=api_root, ) response = airbyte_instance.connections.delete_connection( diff --git a/airbyte/_util/name_normalizers.py b/airbyte/_util/name_normalizers.py index a8d2c144..a793974b 100644 --- a/airbyte/_util/name_normalizers.py +++ b/airbyte/_util/name_normalizers.py @@ -70,7 +70,7 @@ def normalize(name: str) -> str: result = name # Replace all non-alphanumeric characters with underscores. - result = re.sub("[^A-Za-z0-9]", "_", result.lower()) + result = re.sub(r"[^A-Za-z0-9]", "_", result.lower()) # Check if name starts with a number and prepend "_" if it does. if result and result[0].isdigit(): diff --git a/airbyte/_util/telemetry.py b/airbyte/_util/telemetry.py index 83ae694c..cbe12b5d 100644 --- a/airbyte/_util/telemetry.py +++ b/airbyte/_util/telemetry.py @@ -159,7 +159,7 @@ def _get_analytics_id() -> str | None: if result is False: return None - return cast(str, result) + return cast("str", result) _ANALYTICS_ID = _get_analytics_id() diff --git a/airbyte/_util/text_util.py b/airbyte/_util/text_util.py new file mode 100644 index 00000000..04cb4ad4 --- /dev/null +++ b/airbyte/_util/text_util.py @@ -0,0 +1,22 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +"""Utility functions for working with text.""" + +from __future__ import annotations + +import ulid + + +def generate_ulid() -> str: + """Generate a new ULID.""" + return str(ulid.ULID()) + + +def generate_random_suffix() -> str: + """Generate a random suffix for use in temporary names. + + By default, this function generates a ULID and returns a 9-character string + which will be monotonically sortable. It is not guaranteed to be unique but + is sufficient for small-scale and medium-scale use cases. + """ + ulid_str = generate_ulid().lower() + return ulid_str[:6] + ulid_str[-3:] diff --git a/airbyte/_writers/jsonl.py b/airbyte/_writers/jsonl.py index 1f5d1b4b..621d9bfb 100644 --- a/airbyte/_writers/jsonl.py +++ b/airbyte/_writers/jsonl.py @@ -34,8 +34,8 @@ def _open_new_file( ) -> IO[str]: """Open a new file for writing.""" return cast( - IO[str], - gzip.open( # noqa: SIM115 # Avoiding context manager + "IO[str]", + gzip.open( # Avoiding context manager file_path, mode="wt", encoding="utf-8", diff --git a/airbyte/caches/__init__.py b/airbyte/caches/__init__.py index c4d12717..9d21edd2 100644 --- a/airbyte/caches/__init__.py +++ b/airbyte/caches/__init__.py @@ -16,7 +16,7 @@ # Submodules imported here for documentation reasons: https://github.com/mitmproxy/pdoc/issues/757 if TYPE_CHECKING: - # ruff: noqa: TCH004 + # ruff: noqa: TC004 from airbyte.caches import base, bigquery, duckdb, motherduck, postgres, snowflake, util # We export these classes for easy access: `airbyte.caches...` diff --git a/airbyte/caches/base.py b/airbyte/caches/base.py index b0aab138..d47aadce 100644 --- a/airbyte/caches/base.py +++ b/airbyte/caches/base.py @@ -4,7 +4,7 @@ from __future__ import annotations from pathlib import Path -from typing import IO, TYPE_CHECKING, Any, final +from typing import IO, TYPE_CHECKING, Any, ClassVar, final import pandas as pd import pyarrow as pa @@ -30,7 +30,6 @@ from airbyte._message_iterators import AirbyteMessageIterator from airbyte.caches._state_backend_base import StateBackendBase - from airbyte.datasets._base import DatasetBase from airbyte.progress import ProgressTracker from airbyte.shared.sql_processor import SqlProcessorBase from airbyte.shared.state_providers import StateProviderBase @@ -57,16 +56,23 @@ class CacheBase(SqlConfig, AirbyteWriterInterface): _name: str = PrivateAttr() - _deployed_api_root: str | None = PrivateAttr(default=None) - _deployed_workspace_id: str | None = PrivateAttr(default=None) - _deployed_destination_id: str | None = PrivateAttr(default=None) - - _sql_processor_class: type[SqlProcessorBase] = PrivateAttr() + _sql_processor_class: ClassVar[type[SqlProcessorBase]] _read_processor: SqlProcessorBase = PrivateAttr() _catalog_backend: CatalogBackendBase = PrivateAttr() _state_backend: StateBackendBase = PrivateAttr() + paired_destination_name: ClassVar[str | None] = None + paired_destination_config_class: ClassVar[type | None] = None + + @property + def paired_destination_config(self) -> Any | dict[str, Any]: # noqa: ANN401 # Allow Any return type + """Return a dictionary of destination configuration values.""" + raise NotImplementedError( + f"The type '{type(self).__name__}' does not define an equivalent destination " + "configuration." + ) + def __init__(self, **data: Any) -> None: # noqa: ANN401 """Initialize the cache and backends.""" super().__init__(**data) @@ -231,6 +237,19 @@ def streams(self) -> dict[str, CachedDataset]: return result + @final + def __len__(self) -> int: + """Gets the number of streams.""" + return len(self._catalog_backend.stream_names) + + @final + def __bool__(self) -> bool: + """Always True. + + This is needed so that caches with zero streams are not falsey (None-like). + """ + return True + def get_state_provider( self, source_name: str, @@ -274,7 +293,7 @@ def register_source( incoming_stream_names=stream_names, ) - def __getitem__(self, stream: str) -> DatasetBase: + def __getitem__(self, stream: str) -> CachedDataset: """Return a dataset by stream name.""" return self.streams[stream] diff --git a/airbyte/caches/bigquery.py b/airbyte/caches/bigquery.py index 68baebf1..a6aaf71e 100644 --- a/airbyte/caches/bigquery.py +++ b/airbyte/caches/bigquery.py @@ -17,21 +17,36 @@ from __future__ import annotations -from typing import NoReturn +from typing import TYPE_CHECKING, ClassVar, NoReturn -from pydantic import PrivateAttr +from airbyte_api.models import DestinationBigquery from airbyte._processors.sql.bigquery import BigQueryConfig, BigQuerySqlProcessor from airbyte.caches.base import ( CacheBase, ) from airbyte.constants import DEFAULT_ARROW_MAX_CHUNK_SIZE +from airbyte.destinations._translate_cache_to_dest import ( + bigquery_cache_to_destination_configuration, +) + + +if TYPE_CHECKING: + from airbyte.shared.sql_processor import SqlProcessorBase class BigQueryCache(BigQueryConfig, CacheBase): """The BigQuery cache implementation.""" - _sql_processor_class: type[BigQuerySqlProcessor] = PrivateAttr(default=BigQuerySqlProcessor) + _sql_processor_class: ClassVar[type[SqlProcessorBase]] = BigQuerySqlProcessor + + paired_destination_name: ClassVar[str | None] = "destination-bigquery" + paired_destination_config_class: ClassVar[type | None] = DestinationBigquery + + @property + def paired_destination_config(self) -> DestinationBigquery: + """Return a dictionary of destination configuration values.""" + return bigquery_cache_to_destination_configuration(cache=self) def get_arrow_dataset( self, diff --git a/airbyte/caches/duckdb.py b/airbyte/caches/duckdb.py index 8fdf40ad..3f720e30 100644 --- a/airbyte/caches/duckdb.py +++ b/airbyte/caches/duckdb.py @@ -17,12 +17,18 @@ from __future__ import annotations import warnings +from typing import TYPE_CHECKING, ClassVar +from airbyte_api.models import DestinationDuckdb from duckdb_engine import DuckDBEngineWarning -from pydantic import PrivateAttr from airbyte._processors.sql.duckdb import DuckDBConfig, DuckDBSqlProcessor from airbyte.caches.base import CacheBase +from airbyte.destinations._translate_cache_to_dest import duckdb_cache_to_destination_configuration + + +if TYPE_CHECKING: + from airbyte.shared.sql_processor import SqlProcessorBase # Suppress warnings from DuckDB about reflection on indices. @@ -37,7 +43,15 @@ class DuckDBCache(DuckDBConfig, CacheBase): """A DuckDB cache.""" - _sql_processor_class: type[DuckDBSqlProcessor] = PrivateAttr(default=DuckDBSqlProcessor) + _sql_processor_class: ClassVar[type[SqlProcessorBase]] = DuckDBSqlProcessor + + paired_destination_name: ClassVar[str | None] = "destination-duckdb" + paired_destination_config_class: ClassVar[type | None] = DestinationDuckdb + + @property + def paired_destination_config(self) -> DestinationDuckdb: + """Return a dictionary of destination configuration values.""" + return duckdb_cache_to_destination_configuration(cache=self) # Expose the Cache class and also the Config class. diff --git a/airbyte/caches/motherduck.py b/airbyte/caches/motherduck.py index 762d1b65..69c9b6f4 100644 --- a/airbyte/caches/motherduck.py +++ b/airbyte/caches/motherduck.py @@ -17,23 +17,33 @@ from __future__ import annotations import warnings +from typing import TYPE_CHECKING, ClassVar +from airbyte_api.models import DestinationDuckdb from duckdb_engine import DuckDBEngineWarning from overrides import overrides -from pydantic import Field, PrivateAttr +from pydantic import Field from airbyte._processors.sql.duckdb import DuckDBConfig from airbyte._processors.sql.motherduck import MotherDuckSqlProcessor from airbyte.caches.duckdb import DuckDBCache +from airbyte.destinations._translate_cache_to_dest import ( + motherduck_cache_to_destination_configuration, +) from airbyte.secrets import SecretString +if TYPE_CHECKING: + from airbyte.shared.sql_processor import SqlProcessorBase + + class MotherDuckConfig(DuckDBConfig): """Configuration for the MotherDuck cache.""" database: str = Field() api_key: SecretString = Field() db_path: str = Field(default="md:") + _paired_destination_name: str = "destination-motherduck" @overrides def get_sql_alchemy_url(self) -> SecretString: @@ -61,7 +71,15 @@ def get_database_name(self) -> str: class MotherDuckCache(MotherDuckConfig, DuckDBCache): """Cache that uses MotherDuck for external persistent storage.""" - _sql_processor_class: type[MotherDuckSqlProcessor] = PrivateAttr(default=MotherDuckSqlProcessor) + _sql_processor_class: ClassVar[type[SqlProcessorBase]] = MotherDuckSqlProcessor + + paired_destination_name: ClassVar[str | None] = "destination-bigquery" + paired_destination_config_class: ClassVar[type | None] = DestinationDuckdb + + @property + def paired_destination_config(self) -> DestinationDuckdb: + """Return a dictionary of destination configuration values.""" + return motherduck_cache_to_destination_configuration(cache=self) # Expose the Cache class and also the Config class. diff --git a/airbyte/caches/postgres.py b/airbyte/caches/postgres.py index 0e3f8177..db66bbc7 100644 --- a/airbyte/caches/postgres.py +++ b/airbyte/caches/postgres.py @@ -19,10 +19,19 @@ from __future__ import annotations -from pydantic import PrivateAttr +from typing import TYPE_CHECKING, ClassVar + +from airbyte_api.models import DestinationPostgres from airbyte._processors.sql.postgres import PostgresConfig, PostgresSqlProcessor from airbyte.caches.base import CacheBase +from airbyte.destinations._translate_cache_to_dest import ( + postgres_cache_to_destination_configuration, +) + + +if TYPE_CHECKING: + from airbyte.shared.sql_processor import SqlProcessorBase class PostgresCache(PostgresConfig, CacheBase): @@ -31,7 +40,25 @@ class PostgresCache(PostgresConfig, CacheBase): Also inherits config from the JsonlWriter, which is responsible for writing files to disk. """ - _sql_processor_class = PrivateAttr(default=PostgresSqlProcessor) + _sql_processor_class: ClassVar[type[SqlProcessorBase]] = PostgresSqlProcessor + + paired_destination_name: ClassVar[str | None] = "destination-bigquery" + paired_destination_config_class: ClassVar[type | None] = DestinationPostgres + + @property + def paired_destination_config(self) -> DestinationPostgres: + """Return a dictionary of destination configuration values.""" + return postgres_cache_to_destination_configuration(cache=self) + + def clone_as_cloud_destination_config(self) -> DestinationPostgres: + """Return a DestinationPostgres instance with the same configuration.""" + return DestinationPostgres( + host=self.host, + port=self.port, + username=self.username, + password=self.password, + database=self.database, + ) # Expose the Cache class and also the Config class. diff --git a/airbyte/caches/snowflake.py b/airbyte/caches/snowflake.py index ce8831de..a125cab4 100644 --- a/airbyte/caches/snowflake.py +++ b/airbyte/caches/snowflake.py @@ -21,11 +21,16 @@ from __future__ import annotations -from pydantic import PrivateAttr +from typing import ClassVar + +from airbyte_api.models import DestinationSnowflake from airbyte._processors.sql.snowflake import SnowflakeConfig, SnowflakeSqlProcessor from airbyte.caches.base import CacheBase -from airbyte.shared.sql_processor import RecordDedupeMode +from airbyte.destinations._translate_cache_to_dest import ( + snowflake_cache_to_destination_configuration, +) +from airbyte.shared.sql_processor import RecordDedupeMode, SqlProcessorBase class SnowflakeCache(SnowflakeConfig, CacheBase): @@ -33,7 +38,15 @@ class SnowflakeCache(SnowflakeConfig, CacheBase): dedupe_mode: RecordDedupeMode = RecordDedupeMode.APPEND - _sql_processor_class = PrivateAttr(default=SnowflakeSqlProcessor) + _sql_processor_class: ClassVar[type[SqlProcessorBase]] = SnowflakeSqlProcessor + + paired_destination_name: ClassVar[str | None] = "destination-bigquery" + paired_destination_config_class: ClassVar[type | None] = DestinationSnowflake + + @property + def paired_destination_config(self) -> DestinationSnowflake: + """Return a dictionary of destination configuration values.""" + return snowflake_cache_to_destination_configuration(cache=self) # Expose the Cache class and also the Config class. diff --git a/airbyte/cloud/__init__.py b/airbyte/cloud/__init__.py index ee7a59eb..9ca3bb58 100644 --- a/airbyte/cloud/__init__.py +++ b/airbyte/cloud/__init__.py @@ -66,7 +66,7 @@ # Submodules imported here for documentation reasons: https://github.com/mitmproxy/pdoc/issues/757 if TYPE_CHECKING: - # ruff: noqa: TCH004 + # ruff: noqa: TC004 from airbyte.cloud import connections, constants, sync_results, workspaces diff --git a/airbyte/cloud/_destination_util.py b/airbyte/cloud/_destination_util.py deleted file mode 100644 index 3861bb5b..00000000 --- a/airbyte/cloud/_destination_util.py +++ /dev/null @@ -1,213 +0,0 @@ -# Copyright (c) 2024 Airbyte, Inc., all rights reserved. -"""Cloud destinations for Airbyte.""" - -from __future__ import annotations - -from pathlib import Path -from typing import TYPE_CHECKING, Any - -from airbyte_api.models import ( - DestinationBigquery, - DestinationDuckdb, - DestinationPostgres, - DestinationSnowflake, - StandardInserts, - UsernameAndPassword, -) - -from airbyte.caches import ( - BigQueryCache, - DuckDBCache, - MotherDuckCache, - PostgresCache, - SnowflakeCache, -) -from airbyte.secrets import get_secret - - -if TYPE_CHECKING: - from collections.abc import Callable - - from airbyte.caches.base import CacheBase - - -SNOWFLAKE_PASSWORD_SECRET_NAME = "SNOWFLAKE_PASSWORD" - - -def get_destination_config_from_cache( - cache: CacheBase, -) -> dict[str, str]: - """Get the destination configuration from the cache.""" - conversion_fn_map: dict[str, Callable[[Any], dict[str, str]]] = { - "BigQueryCache": get_bigquery_destination_config, - "DuckDBCache": get_duckdb_destination_config, - "MotherDuckCache": get_motherduck_destination_config, - "PostgresCache": get_postgres_destination_config, - "SnowflakeCache": get_snowflake_destination_config, - } - cache_class_name = cache.__class__.__name__ - if cache_class_name not in conversion_fn_map: - raise ValueError( # noqa: TRY003 - "Cannot convert cache type to destination configuration. Cache type not supported. ", - f"Supported cache types: {list(conversion_fn_map.keys())}", - ) - - conversion_fn = conversion_fn_map[cache_class_name] - return conversion_fn(cache) - - -def get_duckdb_destination_config( - cache: DuckDBCache, -) -> dict[str, str]: - """Get the destination configuration from the DuckDB cache.""" - return DestinationDuckdb( - destination_path=cache.db_path, - schema=cache.schema_name, - ).to_dict() - - -def get_motherduck_destination_config( - cache: MotherDuckCache, -) -> dict[str, str]: - """Get the destination configuration from the DuckDB cache.""" - return DestinationDuckdb( - destination_path=cache.db_path, - schema=cache.schema_name, - motherduck_api_key=cache.api_key, - ).to_dict() - - -def get_postgres_destination_config( - cache: PostgresCache, -) -> dict[str, str]: - """Get the destination configuration from the Postgres cache.""" - return DestinationPostgres( - database=cache.database, - host=cache.host, - password=cache.password, - port=cache.port, - schema=cache.schema_name, - username=cache.username, - ).to_dict() - - -def get_snowflake_destination_config( - cache: SnowflakeCache, -) -> dict[str, str]: - """Get the destination configuration from the Snowflake cache.""" - return DestinationSnowflake( - host=f"{cache.account}.snowflakecomputing.com", - database=cache.get_database_name().upper(), - schema=cache.schema_name.upper(), - warehouse=cache.warehouse, - role=cache.role, - username=cache.username, - credentials=UsernameAndPassword( - password=cache.password, - ), - ).to_dict() - - -def get_bigquery_destination_config( - cache: BigQueryCache, -) -> dict[str, str]: - """Get the destination configuration from the BigQuery cache.""" - credentials_json: str | None = ( - Path(cache.credentials_path).read_text(encoding="utf-8") if cache.credentials_path else None - ) - destination = DestinationBigquery( - project_id=cache.project_name, - dataset_id=cache.dataset_name, - dataset_location="US", - credentials_json=credentials_json, - loading_method=StandardInserts, - ) - return destination.to_dict() - - -def create_bigquery_cache( - destination_configuration: DestinationBigquery, -) -> BigQueryCache: - """Create a new BigQuery cache from the destination configuration.""" - credentials_path = get_secret("BIGQUERY_CREDENTIALS_PATH") - return BigQueryCache( - project_name=destination_configuration.project_id, - dataset_name=destination_configuration.dataset_id, - credentials_path=credentials_path, - ) - - -def create_duckdb_cache( - destination_configuration: DestinationDuckdb, -) -> DuckDBCache: - """Create a new DuckDB cache from the destination configuration.""" - return DuckDBCache( - db_path=destination_configuration.destination_path, - schema_name=destination_configuration.schema, - ) - - -def create_motherduck_cache( - destination_configuration: DestinationDuckdb, -) -> MotherDuckCache: - """Create a new DuckDB cache from the destination configuration.""" - return MotherDuckCache( - database=destination_configuration.destination_path, - schema_name=destination_configuration.schema, - api_key=destination_configuration.motherduck_api_key, - ) - - -def create_postgres_cache( - destination_configuration: DestinationPostgres, -) -> PostgresCache: - """Create a new Postgres cache from the destination configuration.""" - port: int = int(destination_configuration.port) if "port" in destination_configuration else 5432 - return PostgresCache( - database=destination_configuration.database, - host=destination_configuration.host, - password=destination_configuration.password, - port=port, - schema_name=destination_configuration.schema, - username=destination_configuration.username, - ) - - -def create_snowflake_cache( - destination_configuration: DestinationSnowflake, - password_secret_name: str = SNOWFLAKE_PASSWORD_SECRET_NAME, -) -> SnowflakeCache: - """Create a new Snowflake cache from the destination configuration.""" - return SnowflakeCache( - account=destination_configuration.host.split(".snowflakecomputing")[0], - database=destination_configuration.database, - schema_name=destination_configuration.schema, - warehouse=destination_configuration.warehouse, - role=destination_configuration.role, - username=destination_configuration.username, - password=get_secret(password_secret_name), - ) - - -def create_cache_from_destination_config( - destination_configuration: DestinationBigquery - | DestinationDuckdb - | DestinationPostgres - | DestinationSnowflake, -) -> CacheBase: - """Create a new cache from the destination.""" - conversion_fn_map: dict[str, Callable[[dict[str, str]], CacheBase]] = { - "DestinationBigquery": create_bigquery_cache, - "DestinationDuckdb": create_duckdb_cache, - "DestinationPostgres": create_postgres_cache, - "DestinationSnowflake": create_snowflake_cache, - } - destination_class_name = type(destination_configuration).__name__ - if destination_class_name not in conversion_fn_map: - raise ValueError( # noqa: TRY003 - "Cannot convert destination configuration to cache. Destination type not supported. ", - f"Supported destination types: {list(conversion_fn_map.keys())}", - ) - - conversion_fn = conversion_fn_map[destination_class_name] - return conversion_fn(destination_configuration) diff --git a/airbyte/cloud/connections.py b/airbyte/cloud/connections.py index b4543a1f..10cb1d1b 100644 --- a/airbyte/cloud/connections.py +++ b/airbyte/cloud/connections.py @@ -6,6 +6,7 @@ from typing import TYPE_CHECKING, cast from airbyte._util import api_util +from airbyte.cloud.connectors import CloudDestination, CloudSource from airbyte.cloud.sync_results import SyncResult @@ -45,6 +46,13 @@ def __init__( """The ID of the destination.""" self._connection_info: ConnectionResponse | None = None + """The connection info object. (Cached.)""" + + self._cloud_source_object: CloudSource | None = None + """The source object. (Cached.)""" + + self._cloud_destination_object: CloudDestination | None = None + """The destination object. (Cached.)""" def _fetch_connection_info(self) -> ConnectionResponse: """Populate the connection with data from the API.""" @@ -52,7 +60,8 @@ def _fetch_connection_info(self) -> ConnectionResponse: workspace_id=self.workspace.workspace_id, connection_id=self.connection_id, api_root=self.workspace.api_root, - api_key=self.workspace.api_key, + client_id=self.workspace.client_id, + client_secret=self.workspace.client_secret, ) # Properties @@ -66,7 +75,19 @@ def source_id(self) -> str: self._source_id = self._connection_info.source_id - return cast(str, self._source_id) + return cast("str", self._source_id) + + @property + def source(self) -> CloudSource: + """Get the source object.""" + if self._cloud_source_object: + return self._cloud_source_object + + self._cloud_source_object = CloudSource( + workspace=self.workspace, + connector_id=self.source_id, + ) + return self._cloud_source_object @property def destination_id(self) -> str: @@ -77,7 +98,19 @@ def destination_id(self) -> str: self._destination_id = self._connection_info.source_id - return cast(str, self._destination_id) + return cast("str", self._destination_id) + + @property + def destination(self) -> CloudDestination: + """Get the destination object.""" + if self._cloud_destination_object: + return self._cloud_destination_object + + self._cloud_destination_object = CloudDestination( + workspace=self.workspace, + connector_id=self.destination_id, + ) + return self._cloud_destination_object @property def stream_names(self) -> list[str]: @@ -85,7 +118,7 @@ def stream_names(self) -> list[str]: if not self._connection_info: self._connection_info = self._fetch_connection_info() - return [stream.name for stream in self._connection_info.configurations.streams] + return [stream.name for stream in self._connection_info.configurations.streams or []] @property def table_prefix(self) -> str: @@ -93,7 +126,7 @@ def table_prefix(self) -> str: if not self._connection_info: self._connection_info = self._fetch_connection_info() - return self._connection_info.prefix + return self._connection_info.prefix or "" @property def connection_url(self) -> str | None: @@ -117,8 +150,9 @@ def run_sync( connection_response = api_util.run_connection( connection_id=self.connection_id, api_root=self.workspace.api_root, - api_key=self.workspace.api_key, workspace_id=self.workspace.workspace_id, + client_id=self.workspace.client_id, + client_secret=self.workspace.client_secret, ) sync_result = SyncResult( workspace=self.workspace, @@ -146,9 +180,10 @@ def get_previous_sync_logs( sync_logs: list[JobResponse] = api_util.get_job_logs( connection_id=self.connection_id, api_root=self.workspace.api_root, - api_key=self.workspace.api_key, workspace_id=self.workspace.workspace_id, limit=limit, + client_id=self.workspace.client_id, + client_secret=self.workspace.client_secret, ) return [ SyncResult( @@ -162,7 +197,7 @@ def get_previous_sync_logs( def get_sync_result( self, - job_id: str | None = None, + job_id: int | None = None, ) -> SyncResult | None: """Get the sync result for the connection. @@ -189,28 +224,22 @@ def get_sync_result( # Deletions - def _permanently_delete( + def permanently_delete( self, *, - delete_source: bool = False, - delete_destination: bool = False, + cascade_delete_source: bool = False, + cascade_delete_destination: bool = False, ) -> None: """Delete the connection. Args: - delete_source: Whether to also delete the source. - delete_destination: Whether to also delete the destination. + cascade_delete_source: Whether to also delete the source. + cascade_delete_destination: Whether to also delete the destination. """ - self.workspace._permanently_delete_connection( # noqa: SLF001 # Non-public API (for now) - connection=self - ) + self.workspace.permanently_delete_connection(self) - if delete_source: - self.workspace._permanently_delete_source( # noqa: SLF001 # Non-public API (for now) - source=self.source_id - ) + if cascade_delete_source: + self.workspace.permanently_delete_source(self.source_id) - if delete_destination: - self.workspace._permanently_delete_destination( # noqa: SLF001 # Non-public API - destination=self.destination_id, - ) + if cascade_delete_destination: + self.workspace.permanently_delete_destination(self.destination_id) diff --git a/airbyte/cloud/connectors.py b/airbyte/cloud/connectors.py new file mode 100644 index 00000000..ab3b1ecd --- /dev/null +++ b/airbyte/cloud/connectors.py @@ -0,0 +1,74 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +"""Cloud connectors module for working with Cloud sources and destinations.""" + +from __future__ import annotations + +import abc +from typing import TYPE_CHECKING, ClassVar, Literal + + +if TYPE_CHECKING: + from airbyte.cloud.workspaces import CloudWorkspace + + +class CloudConnector(abc.ABC): + """A cloud connector is a deployed source or destination on Airbyte Cloud. + + You can use a connector object to manage the connector. + """ + + connector_type: ClassVar[Literal["source", "destination"]] + """The type of the connector.""" + + def __init__( + self, + workspace: CloudWorkspace, + connector_id: str, + ) -> None: + """Initialize a cloud connector object.""" + self.workspace = workspace + """The workspace that the connector belongs to.""" + self.connector_id = connector_id + """The ID of the connector.""" + + @property + def connector_url(self) -> str: + """Get the URL of the source connector.""" + return f"{self.workspace.workspace_url}/{self.connector_type}s/{self.connector_id}" + + def permanently_delete(self) -> None: + """Permanently delete the connector.""" + if self.connector_type == "source": + self.workspace.permanently_delete_source(self.connector_id) + else: + self.workspace.permanently_delete_destination(self.connector_id) + + +class CloudSource(CloudConnector): + """A cloud source is a source that is deployed on Airbyte Cloud.""" + + connector_type: ClassVar[Literal["source", "destination"]] = "source" + """The type of the connector.""" + + @property + def source_id(self) -> str: + """Get the ID of the source. + + This is an alias for `connector_id`. + """ + return self.connector_id + + +class CloudDestination(CloudConnector): + """A cloud destination is a destination that is deployed on Airbyte Cloud.""" + + connector_type: ClassVar[Literal["source", "destination"]] = "destination" + """The type of the connector.""" + + @property + def destination_id(self) -> str: + """Get the ID of the destination. + + This is an alias for `connector_id`. + """ + return self.connector_id diff --git a/airbyte/cloud/experimental.py b/airbyte/cloud/experimental.py deleted file mode 100644 index c5ccf23a..00000000 --- a/airbyte/cloud/experimental.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright (c) 2024 Airbyte, Inc., all rights reserved. -"""Experimental features for interacting with the Airbyte Cloud API. - -You can use this module to access experimental features in Airbyte Cloud, OSS, and Enterprise. These -features are subject to change and may not be available in all environments. **Future versions of -PyAirbyte may remove or change these features without notice.** - -To use this module, replace an import like this: - -```python -from airbyte.cloud import CloudConnection, CloudWorkspace -``` - -with an import like this: - -```python -from airbyte.cloud.experimental import CloudConnection, CloudWorkspace -``` - -You can toggle between the stable and experimental versions of these classes by changing the import -path. This allows you to test new features without requiring substantial changes to your codebase. - -""" -# ruff: noqa: SLF001 # This file accesses private members of other classes. - -from __future__ import annotations - -import warnings - -from airbyte import exceptions as exc -from airbyte.cloud.connections import CloudConnection as Stable_CloudConnection -from airbyte.cloud.workspaces import CloudWorkspace as Stable_CloudWorkspace - - -# This module is not imported anywhere by default, so this warning should only print if the user -# explicitly imports it. -warnings.warn( - message="The `airbyte.cloud.experimental` module is experimental and may change in the future.", - category=exc.AirbyteExperimentalFeatureWarning, - stacklevel=2, -) - - -class CloudWorkspace(Stable_CloudWorkspace): # noqa: D101 # Docstring inherited from parent. - __doc__ = ( - f"Experimental implementation of `.CloudWorkspace`.\n\n{Stable_CloudConnection.__doc__}" - ) - deploy_connection = Stable_CloudWorkspace._deploy_connection - deploy_source = Stable_CloudWorkspace._deploy_source - deploy_cache_as_destination = Stable_CloudWorkspace._deploy_cache_as_destination - permanently_delete_connection = Stable_CloudWorkspace._permanently_delete_connection - permanently_delete_source = Stable_CloudWorkspace._permanently_delete_source - permanently_delete_destination = Stable_CloudWorkspace._permanently_delete_destination - - -class CloudConnection(Stable_CloudConnection): # noqa: D101 # Docstring inherited from parent. - __doc__ = ( - f"Experimental implementation of `.CloudConnection`.\n\n{Stable_CloudConnection.__doc__}" - ) - permanently_delete = Stable_CloudConnection._permanently_delete diff --git a/airbyte/cloud/sync_results.py b/airbyte/cloud/sync_results.py index 769dae4a..f4767a05 100644 --- a/airbyte/cloud/sync_results.py +++ b/airbyte/cloud/sync_results.py @@ -102,14 +102,14 @@ import time from collections.abc import Iterator, Mapping -from dataclasses import dataclass +from dataclasses import asdict, dataclass from datetime import datetime from typing import TYPE_CHECKING, Any, final from airbyte._util import api_util -from airbyte.cloud._destination_util import create_cache_from_destination_config from airbyte.cloud.constants import FAILED_STATUSES, FINAL_STATUSES from airbyte.datasets import CachedDataset +from airbyte.destinations._translate_dest_to_cache import destination_to_cache from airbyte.exceptions import AirbyteConnectionSyncError, AirbyteConnectionSyncTimeoutError @@ -135,7 +135,7 @@ class SyncResult: workspace: CloudWorkspace connection: CloudConnection - job_id: str + job_id: int table_name_prefix: str = "" table_name_suffix: str = "" _latest_job_info: JobResponse | None = None @@ -155,8 +155,9 @@ def _get_connection_info(self, *, force_refresh: bool = False) -> ConnectionResp self._connection_response = api_util.get_connection( workspace_id=self.workspace.workspace_id, api_root=self.workspace.api_root, - api_key=self.workspace.api_key, connection_id=self.connection.connection_id, + client_id=self.workspace.client_id, + client_secret=self.workspace.client_secret, ) return self._connection_response @@ -166,9 +167,10 @@ def _get_destination_configuration(self, *, force_refresh: bool = False) -> dict destination_response = api_util.get_destination( destination_id=connection_info.destination_id, api_root=self.workspace.api_root, - api_key=self.workspace.api_key, + client_id=self.workspace.client_id, + client_secret=self.workspace.client_secret, ) - return destination_response.configuration + return asdict(destination_response.configuration) def is_job_complete(self) -> bool: """Check if the sync job is complete.""" @@ -186,19 +188,20 @@ def _fetch_latest_job_info(self) -> JobResponse: self._latest_job_info = api_util.get_job_info( job_id=self.job_id, api_root=self.workspace.api_root, - api_key=self.workspace.api_key, + client_id=self.workspace.client_id, + client_secret=self.workspace.client_secret, ) return self._latest_job_info @property def bytes_synced(self) -> int: """Return the number of records processed.""" - return self._fetch_latest_job_info().bytes_synced + return self._fetch_latest_job_info().bytes_synced or 0 @property def records_synced(self) -> int: """Return the number of records processed.""" - return self._fetch_latest_job_info().rows_synced + return self._fetch_latest_job_info().rows_synced or 0 @property def start_time(self) -> datetime: @@ -269,10 +272,8 @@ def get_sql_cache(self) -> CacheBase: if self._cache: return self._cache - destination_configuration: dict[str, Any] = self._get_destination_configuration() - self._cache = create_cache_from_destination_config( - destination_configuration=destination_configuration - ) + destination_configuration = self._get_destination_configuration() + self._cache = destination_to_cache(destination_configuration=destination_configuration) return self._cache def get_sql_engine(self) -> sqlalchemy.engine.Engine: diff --git a/airbyte/cloud/workspaces.py b/airbyte/cloud/workspaces.py index 6bd52a24..e8fb1549 100644 --- a/airbyte/cloud/workspaces.py +++ b/airbyte/cloud/workspaces.py @@ -8,28 +8,20 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from airbyte import exceptions as exc -from airbyte._util.api_util import ( - CLOUD_API_ROOT, - create_connection, - create_destination, - create_source, - delete_connection, - delete_destination, - delete_source, - get_workspace, -) -from airbyte.cloud._destination_util import get_destination_config_from_cache +from airbyte._util import api_util, text_util from airbyte.cloud.connections import CloudConnection -from airbyte.cloud.sync_results import SyncResult -from airbyte.sources.base import Source +from airbyte.cloud.connectors import CloudDestination, CloudSource +from airbyte.destinations.base import Destination +from airbyte.secrets.base import SecretString if TYPE_CHECKING: - from airbyte._util.api_imports import DestinationResponse - from airbyte.caches.base import CacheBase + from collections.abc import Callable + + from airbyte.sources.base import Source @dataclass @@ -41,8 +33,14 @@ class CloudWorkspace: """ workspace_id: str - api_key: str - api_root: str = CLOUD_API_ROOT + client_id: SecretString + client_secret: SecretString + api_root: str = api_util.CLOUD_API_ROOT + + def __post_init__(self) -> None: + """Ensure that the client ID and secret are handled securely.""" + self.client_id = SecretString(self.client_id) + self.client_secret = SecretString(self.client_secret) @property def workspace_url(self) -> str | None: @@ -58,212 +56,208 @@ def connect(self) -> None: serves primarily as a simple check to ensure that the workspace is reachable and credentials are correct. """ - _ = get_workspace( + _ = api_util.get_workspace( api_root=self.api_root, - api_key=self.api_key, workspace_id=self.workspace_id, + client_id=self.client_id, + client_secret=self.client_secret, ) print(f"Successfully connected to workspace: {self.workspace_url}") - # Deploy and delete sources + # Deploy sources and destinations - # TODO: Make this a public API - # https://github.com/airbytehq/pyairbyte/issues/228 - def _deploy_source( + def deploy_source( self, + name: str, source: Source, - ) -> str: + *, + unique: bool = True, + random_name_suffix: bool = False, + ) -> CloudSource: """Deploy a source to the workspace. - Returns the newly deployed source ID. + Returns the newly deployed source. + + Args: + name: The name to use when deploying. + source: The source object to deploy. + unique: Whether to require a unique name. If `True`, duplicate names + are not allowed. Defaults to `True`. + random_name_suffix: Whether to append a random suffix to the name. """ - source_configuration = source.get_config().copy() - source_configuration["sourceType"] = source.name.replace("source-", "") + source_config_dict = source.get_config().copy() + source_config_dict["sourceType"] = source.name.replace("source-", "") + + if random_name_suffix: + name += f" (ID: {text_util.generate_random_suffix()})" + + if unique: + existing = self.list_sources(name=name) + if existing: + raise exc.AirbyteDuplicateResourcesError( + resource_type="source", + resource_name=name, + ) - deployed_source = create_source( - name=f"{source.name.replace('-', ' ').title()} (Deployed by PyAirbyte)", + deployed_source = api_util.create_source( + name=name, api_root=self.api_root, - api_key=self.api_key, workspace_id=self.workspace_id, - config=source_configuration, + config=source_config_dict, + client_id=self.client_id, + client_secret=self.client_secret, + ) + return CloudSource( + workspace=self, + connector_id=deployed_source.source_id, ) - # Set the deployment Ids on the source object - source._deployed_api_root = self.api_root # noqa: SLF001 # Accessing nn-public API - source._deployed_workspace_id = self.workspace_id # noqa: SLF001 # Accessing nn-public API - source._deployed_source_id = deployed_source.source_id # noqa: SLF001 # Accessing nn-public API - - return deployed_source.source_id - - def _permanently_delete_source( + def deploy_destination( self, - source: str | Source, - ) -> None: - """Delete a source from the workspace. + name: str, + destination: Destination | dict[str, Any], + *, + unique: bool = True, + random_name_suffix: bool = False, + ) -> CloudDestination: + """Deploy a destination to the workspace. - You can pass either the source ID `str` or a deployed `Source` object. - """ - if not isinstance(source, str | Source): - raise ValueError(f"Invalid source type: {type(source)}") # noqa: TRY004, TRY003 + Returns the newly deployed destination ID. - if isinstance(source, Source): - if not source._deployed_source_id: # noqa: SLF001 - raise ValueError("Source has not been deployed.") # noqa: TRY003 + Args: + name: The name to use when deploying. + destination: The destination to deploy. Can be a local Airbyte `Destination` object or a + dictionary of configuration values. + unique: Whether to require a unique name. If `True`, duplicate names + are not allowed. Defaults to `True`. + random_name_suffix: Whether to append a random suffix to the name. + """ + if isinstance(destination, Destination): + destination_conf_dict = destination.get_config().copy() + destination_conf_dict["destinationType"] = destination.name.replace("destination-", "") + # raise ValueError(destination_conf_dict) + else: + destination_conf_dict = destination.copy() + if "destinationType" not in destination_conf_dict: + raise exc.PyAirbyteInputError( + message="Missing `destinationType` in configuration dictionary.", + ) - source_id = source._deployed_source_id # noqa: SLF001 + if random_name_suffix: + name += f" (ID: {text_util.generate_random_suffix()})" - elif isinstance(source, str): - source_id = source + if unique: + existing = self.list_destinations(name=name) + if existing: + raise exc.AirbyteDuplicateResourcesError( + resource_type="destination", + resource_name=name, + ) - delete_source( - source_id=source_id, + deployed_destination = api_util.create_destination( + name=name, api_root=self.api_root, - api_key=self.api_key, + workspace_id=self.workspace_id, + config=destination_conf_dict, # Wants a dataclass but accepts dict + client_id=self.client_id, + client_secret=self.client_secret, + ) + return CloudDestination( + workspace=self, + connector_id=deployed_destination.destination_id, ) - # Deploy and delete destinations - - # TODO: Make this a public API - # https://github.com/airbytehq/pyairbyte/issues/228 - def _deploy_cache_as_destination( + def permanently_delete_source( self, - cache: CacheBase, - ) -> str: - """Deploy a cache to the workspace as a new destination. + source: str | CloudSource, + ) -> None: + """Delete a source from the workspace. - Returns the newly deployed destination ID. + You can pass either the source ID `str` or a deployed `Source` object. """ - cache_type_name = cache.__class__.__name__.replace("Cache", "") + if not isinstance(source, (str, CloudSource)): + raise exc.PyAirbyteInputError( + message="Invalid source type.", + input_value=type(source).__name__, + ) - deployed_destination: DestinationResponse = create_destination( - name=f"Destination {cache_type_name} (Deployed by PyAirbyte)", + api_util.delete_source( + source_id=source.connector_id if isinstance(source, CloudSource) else source, api_root=self.api_root, - api_key=self.api_key, - workspace_id=self.workspace_id, - config=get_destination_config_from_cache(cache), + client_id=self.client_id, + client_secret=self.client_secret, ) - # Set the deployment Ids on the source object - cache._deployed_api_root = self.api_root # noqa: SLF001 # Accessing nn-public API - cache._deployed_workspace_id = self.workspace_id # noqa: SLF001 # Accessing nn-public API - cache._deployed_destination_id = deployed_destination.destination_id # noqa: SLF001 # Accessing nn-public API - - return deployed_destination.destination_id + # Deploy and delete destinations - def _permanently_delete_destination( + def permanently_delete_destination( self, - *, - destination: str | None = None, - cache: CacheBase | None = None, + destination: str | CloudDestination, ) -> None: """Delete a deployed destination from the workspace. You can pass either the `Cache` class or the deployed destination ID as a `str`. """ - if destination is None and cache is None: - raise ValueError("You must provide either a destination ID or a cache object.") # noqa: TRY003 - if destination is not None and cache is not None: - raise ValueError( # noqa: TRY003 - "You must provide either a destination ID or a cache object, not both." + if not isinstance(destination, (str, CloudDestination)): + raise exc.PyAirbyteInputError( + message="Invalid destination type.", + input_value=type(destination).__name__, ) - if cache: - if not cache._deployed_destination_id: # noqa: SLF001 - raise ValueError("Cache has not been deployed.") # noqa: TRY003 - - destination = cache._deployed_destination_id # noqa: SLF001 - - if destination is None: - raise ValueError("No destination ID provided.") # noqa: TRY003 - - delete_destination( - destination_id=destination, + api_util.delete_destination( + destination_id=( + destination if isinstance(destination, str) else destination.destination_id + ), api_root=self.api_root, - api_key=self.api_key, + client_id=self.client_id, + client_secret=self.client_secret, ) # Deploy and delete connections - # TODO: Make this a public API - # https://github.com/airbytehq/pyairbyte/issues/228 - def _deploy_connection( + def deploy_connection( self, - source: Source | str, - cache: CacheBase | None = None, - destination: str | None = None, + connection_name: str, + *, + source: CloudSource | str, + selected_streams: list[str], + destination: CloudDestination | str, table_prefix: str | None = None, - selected_streams: list[str] | None = None, ) -> CloudConnection: - """Deploy a source and cache to the workspace as a new connection. + """Create a new connection between an already deployed source and destination. - Returns the newly deployed connection ID as a `str`. + Returns the newly deployed connection object. Args: - source (Source | str): The source to deploy. You can pass either an already deployed - source ID `str` or a PyAirbyte `Source` object. If you pass a `Source` object, - it will be deployed automatically. - cache (CacheBase, optional): The cache to deploy as a new destination. You can provide - `cache` or `destination`, but not both. - destination (str, optional): The destination ID to use. You can provide - `cache` or `destination`, but not both. - table_prefix (str, optional): The table prefix to use for the cache. If not provided, - the cache's table prefix will be used. - selected_streams (list[str], optional): The selected stream names to use for the - connection. If not provided, the source's selected streams will be used. + connection_name: The name of the connection. + source: The deployed source. You can pass a source ID or a CloudSource object. + destination: The deployed destination. You can pass a destination ID or a + CloudDestination object. + table_prefix: Optional. The table prefix to use when syncing to the destination. + selected_streams: The selected stream names to sync within the connection. """ - # Resolve source ID - source_id: str - if isinstance(source, Source): - selected_streams = selected_streams or source.get_selected_streams() - source_id = ( - source._deployed_source_id # noqa: SLF001 # Access to non-public API - or self._deploy_source(source) - ) - else: - source_id = source - if not selected_streams: - raise exc.PyAirbyteInputError( - guidance="You must provide `selected_streams` when deploying a source ID." - ) - - # Resolve destination ID - destination_id: str - if destination: - destination_id = destination - elif cache: - table_prefix = table_prefix if table_prefix is not None else (cache.table_prefix or "") - if not cache._deployed_destination_id: # noqa: SLF001 - destination_id = self._deploy_cache_as_destination(cache) - else: - destination_id = cache._deployed_destination_id # noqa: SLF001 - else: + if not selected_streams: raise exc.PyAirbyteInputError( - guidance="You must provide either a destination ID or a cache object." + guidance="You must provide `selected_streams` when creating a connection." ) - assert source_id is not None - assert destination_id is not None + source_id: str = source if isinstance(source, str) else source.connector_id + destination_id: str = ( + destination if isinstance(destination, str) else destination.connector_id + ) - deployed_connection = create_connection( - name="Connection (Deployed by PyAirbyte)", + deployed_connection = api_util.create_connection( + name=connection_name, source_id=source_id, destination_id=destination_id, api_root=self.api_root, - api_key=self.api_key, workspace_id=self.workspace_id, selected_stream_names=selected_streams, prefix=table_prefix or "", + client_id=self.client_id, + client_secret=self.client_secret, ) - if isinstance(source, Source): - source._deployed_api_root = self.api_root # noqa: SLF001 - source._deployed_workspace_id = self.workspace_id # noqa: SLF001 - source._deployed_source_id = source_id # noqa: SLF001 - if cache: - cache._deployed_api_root = self.api_root # noqa: SLF001 - cache._deployed_workspace_id = self.workspace_id # noqa: SLF001 - cache._deployed_destination_id = deployed_connection.destination_id # noqa: SLF001 - return CloudConnection( workspace=self, connection_id=deployed_connection.connection_id, @@ -285,16 +279,16 @@ def get_connection( connection_id=connection_id, ) - def _permanently_delete_connection( + def permanently_delete_connection( self, connection: str | CloudConnection, *, - delete_source: bool = False, - delete_destination: bool = False, + cascade_delete_source: bool = False, + cascade_delete_destination: bool = False, ) -> None: """Delete a deployed connection from the workspace.""" if connection is None: - raise ValueError("No connection ID provided.") # noqa: TRY003 + raise ValueError("No connection ID provided.") if isinstance(connection, str): connection = CloudConnection( @@ -302,81 +296,91 @@ def _permanently_delete_connection( connection_id=connection, ) - delete_connection( + api_util.delete_connection( connection_id=connection.connection_id, api_root=self.api_root, - api_key=self.api_key, workspace_id=self.workspace_id, + client_id=self.client_id, + client_secret=self.client_secret, ) - if delete_source: - self._permanently_delete_source(source=connection.source_id) - if delete_destination: - self._permanently_delete_destination(destination=connection.destination_id) + if cascade_delete_source: + self.permanently_delete_source(source=connection.source_id) + if cascade_delete_destination: + self.permanently_delete_destination(destination=connection.destination_id) - # Run syncs + # List sources, destinations, and connections - def run_sync( + def list_connections( self, - connection_id: str, + name: str | None = None, *, - wait: bool = True, - wait_timeout: int = 300, - ) -> SyncResult: - """Run a sync on a deployed connection.""" - connection = CloudConnection( - workspace=self, - connection_id=connection_id, + name_filter: Callable | None = None, + ) -> list[CloudConnection]: + """List connections by name in the workspace.""" + connections = api_util.list_connections( + api_root=self.api_root, + workspace_id=self.workspace_id, + name=name, + name_filter=name_filter, + client_id=self.client_id, + client_secret=self.client_secret, ) - return connection.run_sync(wait=wait, wait_timeout=wait_timeout) - - # Get sync results and previous sync logs + return [ + CloudConnection( + workspace=self, + connection_id=connection.connection_id, + source=None, + destination=None, + ) + for connection in connections + if name is None or connection.name == name + ] - def get_sync_result( + def list_sources( self, - connection_id: str, - job_id: str | None = None, - ) -> SyncResult | None: - """Get the sync result for a connection job. - - If `job_id` is not provided, the most recent sync job will be used. - - Returns `None` if job_id is omitted and no previous jobs are found. - """ - connection = CloudConnection( - workspace=self, - connection_id=connection_id, + name: str | None = None, + *, + name_filter: Callable | None = None, + ) -> list[CloudSource]: + """List all sources in the workspace.""" + sources = api_util.list_sources( + api_root=self.api_root, + workspace_id=self.workspace_id, + name=name, + name_filter=name_filter, + client_id=self.client_id, + client_secret=self.client_secret, ) - if job_id is None: - results = self.get_previous_sync_logs( - connection_id=connection_id, - limit=1, + return [ + CloudSource( + workspace=self, + connector_id=source.source_id, ) - if results: - return results[0] - - return None - connection = CloudConnection( - workspace=self, - connection_id=connection_id, - ) - return SyncResult( - workspace=self, - connection=connection, - job_id=job_id, - ) + for source in sources + if name is None or source.name == name + ] - def get_previous_sync_logs( + def list_destinations( self, - connection_id: str, + name: str | None = None, *, - limit: int = 10, - ) -> list[SyncResult]: - """Get the previous sync logs for a connection.""" - connection = CloudConnection( - workspace=self, - connection_id=connection_id, - ) - return connection.get_previous_sync_logs( - limit=limit, + name_filter: Callable | None = None, + ) -> list[CloudDestination]: + """List all destinations in the workspace.""" + destinations = api_util.list_destinations( + api_root=self.api_root, + workspace_id=self.workspace_id, + name=name, + name_filter=name_filter, + client_id=self.client_id, + client_secret=self.client_secret, ) + return [ + CloudDestination( + workspace=self, + connector_id=destination.destination_id, + ) + for destination in destinations + if name is None or destination.name == name + ] diff --git a/airbyte/datasets/_base.py b/airbyte/datasets/_base.py index 7235e0c7..25cd8ff1 100644 --- a/airbyte/datasets/_base.py +++ b/airbyte/datasets/_base.py @@ -2,7 +2,6 @@ from __future__ import annotations from abc import ABC, abstractmethod -from collections.abc import Iterable, Iterator from typing import TYPE_CHECKING, Any, cast from pandas import DataFrame @@ -12,6 +11,8 @@ if TYPE_CHECKING: + from collections.abc import Iterable, Iterator + from pyarrow.dataset import Dataset from airbyte_protocol.models import ConfiguredAirbyteStream @@ -38,7 +39,7 @@ def to_pandas(self) -> DataFrame: # Technically, we return an iterator of Mapping objects. However, pandas # expects an iterator of dict objects. This cast is safe because we know # duck typing is correct for this use case. - return DataFrame(cast(Iterator[dict[str, Any]], self)) + return DataFrame(cast("Iterator[dict[str, Any]]", self)) def to_arrow( self, diff --git a/airbyte/datasets/_sql.py b/airbyte/datasets/_sql.py index fa50c4a2..e0aba4df 100644 --- a/airbyte/datasets/_sql.py +++ b/airbyte/datasets/_sql.py @@ -41,7 +41,7 @@ def __init__( cache: CacheBase, stream_name: str, query_statement: Select, - stream_configuration: ConfiguredAirbyteStream | None | Literal[False] = None, + stream_configuration: ConfiguredAirbyteStream | Literal[False] | None = None, ) -> None: """Initialize the dataset with a cache, stream name, and query statement. @@ -86,7 +86,7 @@ def __iter__(self) -> Iterator[dict[str, Any]]: for row in conn.execute(self._query_statement): # Access to private member required because SQLAlchemy doesn't expose a public API. # https://pydoc.dev/sqlalchemy/latest/sqlalchemy.engine.row.RowMapping.html - yield cast(dict[str, Any], row._mapping) # noqa: SLF001 + yield cast("dict[str, Any]", row._mapping) # noqa: SLF001 def __len__(self) -> int: """Return the number of records in the dataset. @@ -98,7 +98,7 @@ def __len__(self) -> int: with self._cache.processor.get_sql_connection() as conn: self._length = conn.execute(count_query).scalar() - return cast(int, self._length) + return cast("int", self._length) def to_pandas(self) -> DataFrame: return self._cache.get_pandas_dataframe(self._stream_name) @@ -147,7 +147,7 @@ def __init__( self, cache: CacheBase, stream_name: str, - stream_configuration: ConfiguredAirbyteStream | None | Literal[False] = None, + stream_configuration: ConfiguredAirbyteStream | Literal[False] | None = None, ) -> None: """We construct the query statement by selecting all columns from the table. diff --git a/airbyte/destinations/__init__.py b/airbyte/destinations/__init__.py index 6ec39c70..42cebe25 100644 --- a/airbyte/destinations/__init__.py +++ b/airbyte/destinations/__init__.py @@ -86,7 +86,7 @@ # Submodules imported here for documentation reasons: https://github.com/mitmproxy/pdoc/issues/757 if TYPE_CHECKING: - # ruff: noqa: TCH004 # imports used for more than type checking + # ruff: noqa: TC004 # imports used for more than type checking from airbyte.destinations import util diff --git a/airbyte/destinations/_translate_cache_to_dest.py b/airbyte/destinations/_translate_cache_to_dest.py new file mode 100644 index 00000000..afdbf965 --- /dev/null +++ b/airbyte/destinations/_translate_cache_to_dest.py @@ -0,0 +1,131 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +"""Cloud destinations for Airbyte.""" + +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING, Any + +from airbyte_api.models import ( + BatchedStandardInserts, + DestinationBigquery, + DestinationDuckdb, + DestinationPostgres, + DestinationSnowflake, + UsernameAndPassword, +) + +from airbyte.secrets.base import SecretString + + +if TYPE_CHECKING: + from collections.abc import Callable + + from airbyte._util import api_util + from airbyte.caches.base import CacheBase + from airbyte.caches.bigquery import BigQueryCache + from airbyte.caches.duckdb import DuckDBCache + from airbyte.caches.motherduck import MotherDuckCache + from airbyte.caches.postgres import PostgresCache + from airbyte.caches.snowflake import SnowflakeCache + + +SNOWFLAKE_PASSWORD_SECRET_NAME = "SNOWFLAKE_PASSWORD" + + +def cache_to_destination_configuration( + cache: CacheBase, +) -> api_util.DestinationConfiguration: + """Get the destination configuration from the cache.""" + conversion_fn_map: dict[str, Callable[[Any], api_util.DestinationConfiguration]] = { + "BigQueryCache": bigquery_cache_to_destination_configuration, + "bigquery": bigquery_cache_to_destination_configuration, + "DuckDBCache": duckdb_cache_to_destination_configuration, + "duckdb": duckdb_cache_to_destination_configuration, + "MotherDuckCache": motherduck_cache_to_destination_configuration, + "motherduck": motherduck_cache_to_destination_configuration, + "PostgresCache": postgres_cache_to_destination_configuration, + "postgres": postgres_cache_to_destination_configuration, + "SnowflakeCache": snowflake_cache_to_destination_configuration, + "snowflake": snowflake_cache_to_destination_configuration, + } + cache_class_name = cache.__class__.__name__ + if cache_class_name not in conversion_fn_map: + raise ValueError( + "Cannot convert cache type to destination configuration. " + f"Cache type {cache_class_name} not supported. " + f"Supported cache types: {list(conversion_fn_map.keys())}" + ) + + conversion_fn = conversion_fn_map[cache_class_name] + return conversion_fn(cache) + + +def duckdb_cache_to_destination_configuration( + cache: DuckDBCache, +) -> DestinationDuckdb: + """Get the destination configuration from the DuckDB cache.""" + return DestinationDuckdb( + destination_path=str(cache.db_path), + schema=cache.schema_name, + ) + + +def motherduck_cache_to_destination_configuration( + cache: MotherDuckCache, +) -> DestinationDuckdb: + """Get the destination configuration from the DuckDB cache.""" + return DestinationDuckdb( + destination_path=cache.db_path, + schema=cache.schema_name, + motherduck_api_key=cache.api_key, + ) + + +def postgres_cache_to_destination_configuration( + cache: PostgresCache, +) -> DestinationPostgres: + """Get the destination configuration from the Postgres cache.""" + return DestinationPostgres( + database=cache.database, + host=cache.host, + password=cache.password, + port=cache.port, + schema=cache.schema_name, + username=cache.username, + ) + + +def snowflake_cache_to_destination_configuration( + cache: SnowflakeCache, +) -> DestinationSnowflake: + """Get the destination configuration from the Snowflake cache.""" + return DestinationSnowflake( + host=f"{cache.account}.snowflakecomputing.com", + database=cache.get_database_name().upper(), + schema=cache.schema_name.upper(), + warehouse=cache.warehouse, + role=cache.role, + username=cache.username, + credentials=UsernameAndPassword( + password=cache.password, + ), + ) + + +def bigquery_cache_to_destination_configuration( + cache: BigQueryCache, +) -> DestinationBigquery: + """Get the destination configuration from the BigQuery cache.""" + credentials_json: str | None = ( + SecretString(Path(cache.credentials_path).read_text(encoding="utf-8")) + if cache.credentials_path + else None + ) + return DestinationBigquery( + project_id=cache.project_name, + dataset_id=cache.dataset_name, + dataset_location="US", + credentials_json=credentials_json, + loading_method=BatchedStandardInserts(), + ) diff --git a/airbyte/destinations/_translate_dest_to_cache.py b/airbyte/destinations/_translate_dest_to_cache.py new file mode 100644 index 00000000..311de1e7 --- /dev/null +++ b/airbyte/destinations/_translate_dest_to_cache.py @@ -0,0 +1,177 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +"""Cloud destinations for Airbyte.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from airbyte_api.models import ( + DestinationBigquery, + DestinationDuckdb, + DestinationPostgres, + DestinationSnowflake, +) + +from airbyte.caches.base import CacheBase +from airbyte.caches.bigquery import BigQueryCache +from airbyte.caches.duckdb import DuckDBCache +from airbyte.caches.motherduck import MotherDuckCache +from airbyte.caches.postgres import PostgresCache +from airbyte.caches.snowflake import SnowflakeCache +from airbyte.exceptions import PyAirbyteSecretNotFoundError +from airbyte.secrets import get_secret +from airbyte.secrets.base import SecretString + + +if TYPE_CHECKING: + from collections.abc import Callable + + from airbyte._util import api_util + from airbyte.caches.base import CacheBase + + +SNOWFLAKE_PASSWORD_SECRET_NAME = "SNOWFLAKE_PASSWORD" + + +def destination_to_cache( + destination_configuration: api_util.DestinationConfiguration | dict[str, Any], +) -> CacheBase: + """Get the destination configuration from the cache.""" + conversion_fn_map: dict[str, Callable[[Any], CacheBase]] = { + "bigquery": bigquery_destination_to_cache, + "duckdb": duckdb_destination_to_cache, + "motherduck": motherduck_destination_to_cache, + "postgres": postgres_destination_to_cache, + "snowflake": snowflake_destination_to_cache, + } + if isinstance(destination_configuration, dict): + try: + destination_type = ( + destination_configuration.get("DESTINATION_TYPE") + or destination_configuration["destinationType"] + ) + if hasattr(destination_type, "value"): + destination_type = destination_type.value + elif hasattr(destination_type, "_value_"): + destination_type = destination_type._value_ + else: + destination_type = str(destination_type) + except KeyError as ex: + raise ValueError( + f"Missing 'destinationType' in keys {list(destination_configuration.keys())}." + ) from ex + else: + destination_type = destination_configuration.DESTINATION_TYPE.value + + if destination_type not in conversion_fn_map: + raise ValueError( + "Cannot convert destination to a cache configuration. " + f"Destination type {destination_type} not supported. " + f"Supported cache types: {list(conversion_fn_map.keys())}" + ) + + conversion_fn = conversion_fn_map[destination_type] + return conversion_fn(destination_configuration) + + +def bigquery_destination_to_cache( + destination_configuration: DestinationBigquery | dict[str, Any], +) -> BigQueryCache: + """Create a new BigQuery cache from the destination configuration. + + We may have to inject credentials, because they are obfuscated when config + is returned from the REST API. + """ + credentials_path = get_secret("BIGQUERY_CREDENTIALS_PATH") + if isinstance(destination_configuration, dict): + destination_configuration = DestinationBigquery(**destination_configuration) + + return BigQueryCache( + project_name=destination_configuration.project_id, + dataset_name=destination_configuration.dataset_id, + credentials_path=credentials_path, + ) + + +def duckdb_destination_to_cache( + destination_configuration: DestinationDuckdb, +) -> DuckDBCache: + """Create a new DuckDB cache from the destination configuration.""" + return DuckDBCache( + db_path=destination_configuration.destination_path, + schema_name=destination_configuration.schema or "main", + ) + + +def motherduck_destination_to_cache( + destination_configuration: DestinationDuckdb, +) -> MotherDuckCache: + """Create a new DuckDB cache from the destination configuration.""" + if not destination_configuration.motherduck_api_key: + raise ValueError("MotherDuck API key is required for MotherDuck cache.") + + return MotherDuckCache( + database=destination_configuration.destination_path, + schema_name=destination_configuration.schema or "main", + api_key=SecretString(destination_configuration.motherduck_api_key), + ) + + +def postgres_destination_to_cache( + destination_configuration: DestinationPostgres, +) -> PostgresCache: + """Create a new Postgres cache from the destination configuration.""" + port: int = int(destination_configuration.port) if destination_configuration.port else 5432 + if not destination_configuration.password: + raise ValueError("Password is required for Postgres cache.") + + return PostgresCache( + database=destination_configuration.database, + host=destination_configuration.host, + password=destination_configuration.password, + port=port, + schema_name=destination_configuration.schema or "public", + username=destination_configuration.username, + ) + + +def snowflake_destination_to_cache( + destination_configuration: DestinationSnowflake | dict[str, Any], + password_secret_name: str = SNOWFLAKE_PASSWORD_SECRET_NAME, +) -> SnowflakeCache: + """Create a new Snowflake cache from the destination configuration. + + We may have to inject credentials, because they are obfuscated when config + is returned from the REST API. + """ + if isinstance(destination_configuration, dict): + destination_configuration = DestinationSnowflake(**destination_configuration) + + snowflake_password: str | None = None + if ( + destination_configuration.credentials + and hasattr(destination_configuration.credentials, "password") + and isinstance(destination_configuration.credentials.password, str) + ): + destination_password = str(destination_configuration.credentials.password) + if "****" in destination_password: + try: + snowflake_password = get_secret(password_secret_name) + except ValueError as ex: + raise PyAirbyteSecretNotFoundError( + "Password is required for Snowflake cache, but it was not available." + ) from ex + else: + snowflake_password = get_secret(destination_password) + else: + snowflake_password = get_secret(password_secret_name) + + return SnowflakeCache( + account=destination_configuration.host.split(".snowflakecomputing")[0], + database=destination_configuration.database, + schema_name=destination_configuration.schema, + warehouse=destination_configuration.warehouse, + role=destination_configuration.role, + username=destination_configuration.username, + password=snowflake_password, + ) diff --git a/airbyte/destinations/base.py b/airbyte/destinations/base.py index 2d60c29d..206c0279 100644 --- a/airbyte/destinations/base.py +++ b/airbyte/destinations/base.py @@ -9,10 +9,6 @@ import warnings from typing import IO, TYPE_CHECKING, Any, Literal, cast -from airbyte_protocol.models import ( - Type, -) - from airbyte import exceptions as exc from airbyte._connector_base import ConnectorBase from airbyte._message_iterators import AirbyteMessageIterator @@ -42,7 +38,7 @@ class Destination(ConnectorBase, AirbyteWriterInterface): """A class representing a destination that can be called.""" - connector_type: Literal["destination"] = "destination" + connector_type = "destination" def __init__( self, @@ -70,8 +66,8 @@ def write( # noqa: PLR0912, PLR0915 # Too many arguments/statements source_data: Source | ReadResult, *, streams: list[str] | Literal["*"] | None = None, - cache: CacheBase | None | Literal[False] = None, - state_cache: CacheBase | None | Literal[False] = None, + cache: CacheBase | Literal[False] | None = None, + state_cache: CacheBase | Literal[False] | None = None, write_strategy: WriteStrategy = WriteStrategy.AUTO, force_full_refresh: bool = False, ) -> WriteResult: @@ -114,7 +110,7 @@ def write( # noqa: PLR0912, PLR0915 # Too many arguments/statements read_result: ReadResult | None = ( source_data if isinstance(source_data, ReadResult) else None ) - source_name: str = source.name if source else cast(ReadResult, read_result).source_name + source_name: str = source.name if source else cast("ReadResult", read_result).source_name # State providers and writers default to no-op, unless overridden below. cache_state_provider: StateProviderBase = StaticInputState([]) @@ -293,7 +289,7 @@ def _write_airbyte_message_stream( ), ) ): - if destination_message.type is Type.STATE: + if destination_message.state: state_writer.write_state(state_message=destination_message.state) except exc.AirbyteConnectorFailedError as ex: diff --git a/airbyte/destinations/util.py b/airbyte/destinations/util.py index a44a76bb..a27e7596 100644 --- a/airbyte/destinations/util.py +++ b/airbyte/destinations/util.py @@ -75,7 +75,10 @@ def get_destination( # noqa: PLR0913 # Too many arguments ) -def get_noop_destination() -> Destination: +def get_noop_destination( + *, + install_if_missing: bool = True, +) -> Destination: """Get a devnull (no-op) destination. This is useful for performance benchmarking of sources, without @@ -93,6 +96,7 @@ def get_noop_destination() -> Destination: } }, docker_image=True, + install_if_missing=install_if_missing, ) diff --git a/airbyte/exceptions.py b/airbyte/exceptions.py index 0713bb64..110048d8 100644 --- a/airbyte/exceptions.py +++ b/airbyte/exceptions.py @@ -431,7 +431,7 @@ class AirbyteConnectionError(AirbyteError): connection_id: str | None = None """The connection ID where the error occurred.""" - job_id: str | None = None + job_id: int | None = None """The job ID where the error occurred (if applicable).""" job_status: str | None = None @@ -486,6 +486,15 @@ class AirbyteMissingResourceError(AirbyteError): resource_name_or_id: str | None = None +@dataclass +class AirbyteDuplicateResourcesError(AirbyteError): + """Process failed because resource name was not unique.""" + + resource_type: str | None = None + resource_name: str | None = None + + +# Custom Warnings @dataclass class AirbyteMultipleResourcesError(AirbyteError): """Could not locate the resource because multiple matching resources were found.""" diff --git a/airbyte/progress.py b/airbyte/progress.py index 8cbd4a45..178bbfcf 100644 --- a/airbyte/progress.py +++ b/airbyte/progress.py @@ -754,7 +754,7 @@ def _update_display(self, *, force_refresh: bool = False) -> None: if ( not force_refresh and self._last_update_time # if not set, then we definitely need to update - and cast(float, self.elapsed_seconds_since_last_update) < 0.8 # noqa: PLR2004 + and cast("float", self.elapsed_seconds_since_last_update) < 0.8 # noqa: PLR2004 ): return diff --git a/airbyte/secrets/__init__.py b/airbyte/secrets/__init__.py index d797e0c0..a8fe72fc 100644 --- a/airbyte/secrets/__init__.py +++ b/airbyte/secrets/__init__.py @@ -78,7 +78,7 @@ # Submodules imported here for documentation reasons: https://github.com/mitmproxy/pdoc/issues/757 if TYPE_CHECKING: - # ruff: noqa: TCH004 # imports used for more than type checking + # ruff: noqa: TC004 # imports used for more than type checking from airbyte.secrets import ( base, config, diff --git a/airbyte/secrets/base.py b/airbyte/secrets/base.py index f2b80fc2..b9e4ab7d 100644 --- a/airbyte/secrets/base.py +++ b/airbyte/secrets/base.py @@ -31,7 +31,7 @@ class SecretSourceEnum(str, Enum): PROMPT = "prompt" -class SecretString(str): +class SecretString(str): # noqa: FURB189 # Allow subclass from str instead of UserStr """A string that represents a secret. This class is used to mark a string as a secret. When a secret is printed, it @@ -124,14 +124,14 @@ def __get_pydantic_core_schema__( # noqa: PLW3201 # Pydantic dunder @classmethod def __get_pydantic_json_schema__( # noqa: PLW3201 # Pydantic dunder method - cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler + cls, core_schema_: core_schema.CoreSchema, handler: GetJsonSchemaHandler ) -> JsonSchemaValue: """Return a modified JSON schema for the secret string. - `writeOnly=True` is the official way to prevent secrets from being exposed inadvertently. - `Format=password` is a popular and readable convention to indicate the field is sensitive. """ - _ = _core_schema, handler # Unused + _ = core_schema_, handler # Unused return { "type": "string", "format": "password", @@ -226,7 +226,7 @@ def get_value(self) -> SecretString: Subclasses can optionally override this method to provide a more optimized code path. """ - return cast(SecretString, self.parent.get_secret(self.secret_name)) + return cast("SecretString", self.parent.get_secret(self.secret_name)) def parse_json(self) -> dict: """Parse the secret as JSON. diff --git a/airbyte/secrets/google_gsm.py b/airbyte/secrets/google_gsm.py index 0398bf9a..78e190c1 100644 --- a/airbyte/secrets/google_gsm.py +++ b/airbyte/secrets/google_gsm.py @@ -161,7 +161,7 @@ def _fully_qualified_secret_name(self, secret_name: str) -> str: return full_name - def get_secret(self, secret_name: str) -> SecretString | None: + def get_secret(self, secret_name: str) -> SecretString: """Get a named secret from Google Colab user secrets.""" return SecretString( self.secret_client.access_secret_version( diff --git a/airbyte/secrets/util.py b/airbyte/secrets/util.py index 43afeafe..9c5b0d95 100644 --- a/airbyte/secrets/util.py +++ b/airbyte/secrets/util.py @@ -63,7 +63,7 @@ def get_secret( sources[sources.index(source)] = available_sources[source] - secret_managers = cast(list[SecretManager], sources) + secret_managers = cast("list[SecretManager]", sources) if SecretSourceEnum.PROMPT in secret_managers: prompt_source = secret_managers.pop( diff --git a/airbyte/shared/sql_processor.py b/airbyte/shared/sql_processor.py index b52c2593..526bd847 100644 --- a/airbyte/shared/sql_processor.py +++ b/airbyte/shared/sql_processor.py @@ -9,7 +9,6 @@ from collections import defaultdict from contextlib import contextmanager from functools import cached_property -from pathlib import Path from typing import TYPE_CHECKING, cast, final import pandas as pd @@ -57,6 +56,7 @@ if TYPE_CHECKING: from collections.abc import Generator, Iterable + from pathlib import Path from sqlalchemy.engine import Connection, Engine from sqlalchemy.engine.cursor import CursorResult @@ -197,7 +197,7 @@ def __init__( self._setup() self.file_writer = file_writer or self.file_writer_class( - cache_dir=cast(Path, temp_dir), + cache_dir=cast("Path", temp_dir), cleanup=temp_file_cleanup, ) self.type_converter = self.type_converter_class() @@ -267,7 +267,7 @@ def process_airbyte_messages( # Process messages, writing to batches as we go for message in messages: if message.type is Type.RECORD: - record_msg = cast(AirbyteRecordMessage, message.record) + record_msg = cast("AirbyteRecordMessage", message.record) stream_name = record_msg.stream if stream_name not in stream_record_handlers: @@ -286,16 +286,16 @@ def process_airbyte_messages( ) elif message.type is Type.STATE: - state_msg = cast(AirbyteStateMessage, message.state) + state_msg = cast("AirbyteStateMessage", message.state) if state_msg.type in {AirbyteStateType.GLOBAL, AirbyteStateType.LEGACY}: self._pending_state_messages[f"_{state_msg.type}"].append(state_msg) else: - stream_state = cast(AirbyteStreamState, state_msg.stream) + stream_state = cast("AirbyteStreamState", state_msg.stream) stream_name = stream_state.stream_descriptor.name self._pending_state_messages[stream_name].append(state_msg) elif message.type is Type.TRACE: - trace_msg: AirbyteTraceMessage = cast(AirbyteTraceMessage, message.trace) + trace_msg: AirbyteTraceMessage = cast("AirbyteTraceMessage", message.trace) if trace_msg.stream_status and trace_msg.stream_status.status == "SUCCEEDED": # This stream has completed successfully, so go ahead and write the data. # This will also finalize any pending state messages. diff --git a/airbyte/shared/state_providers.py b/airbyte/shared/state_providers.py index 5d53616c..359e8ee8 100644 --- a/airbyte/shared/state_providers.py +++ b/airbyte/shared/state_providers.py @@ -52,7 +52,7 @@ def stream_state_artifacts( return [ state_msg.stream for state_msg in self._state_message_artifacts - if state_msg.type == AirbyteStateType.STREAM + if state_msg and state_msg.type == AirbyteStateType.STREAM and state_msg.stream ] @property @@ -96,7 +96,7 @@ def get_stream_state( self, /, stream_name: str, - not_found: None | AirbyteStateMessage | Literal["raise"] = "raise", + not_found: AirbyteStateMessage | Literal["raise"] | None = "raise", ) -> AirbyteStateMessage: """Return the state message for the specified stream name.""" for state_message in self.state_message_artifacts: diff --git a/airbyte/sources/__init__.py b/airbyte/sources/__init__.py index 7bb4d7be..080a7411 100644 --- a/airbyte/sources/__init__.py +++ b/airbyte/sources/__init__.py @@ -19,15 +19,17 @@ # Submodules imported here for documentation reasons: https://github.com/mitmproxy/pdoc/issues/757 if TYPE_CHECKING: - # ruff: noqa: TCH004 # imports used for more than type checking + # ruff: noqa: TC004 # imports used for more than type checking from airbyte.sources import ( base, + registry, util, ) __all__ = [ # Submodules "base", + "registry", "util", # Factories "get_source", diff --git a/airbyte/sources/base.py b/airbyte/sources/base.py index 23abfd77..638ff715 100644 --- a/airbyte/sources/base.py +++ b/airbyte/sources/base.py @@ -52,7 +52,7 @@ class Source(ConnectorBase): """A class representing a source that can be called.""" - connector_type: Literal["source"] = "source" + connector_type = "source" def __init__( self, @@ -87,10 +87,6 @@ def __init__( if streams is not None: self.select_streams(streams) - self._deployed_api_root: str | None = None - self._deployed_workspace_id: str | None = None - self._deployed_source_id: str | None = None - def set_streams(self, streams: list[str]) -> None: """Deprecated. See select_streams().""" warnings.warn( diff --git a/airbyte/sources/registry.py b/airbyte/sources/registry.py index 291a765f..219b7983 100644 --- a/airbyte/sources/registry.py +++ b/airbyte/sources/registry.py @@ -21,6 +21,9 @@ from airbyte.version import get_version +logger = logging.getLogger("airbyte") + + __cache: dict[str, ConnectorMetadata] | None = None @@ -277,7 +280,7 @@ def _get_registry_cache(*, force_refresh: bool = False) -> dict[str, ConnectorMe return __cache -def get_connector_metadata(name: str) -> None | ConnectorMetadata: +def get_connector_metadata(name: str) -> ConnectorMetadata | None: """Check the cache for the connector. If the cache is empty, populate by calling update_cache. @@ -315,11 +318,11 @@ def get_available_connectors(install_type: InstallType | str | None = None) -> l if install_type is None: # No install type specified. Filter for whatever is runnable. if is_docker_installed(): - logging.info("Docker is detected. Returning all connectors.") + logger.info("Docker is detected. Returning all connectors.") # If Docker is available, return all connectors. return sorted(conn.name for conn in _get_registry_cache().values()) - logging.info("Docker was not detected. Returning only Python and Manifest-only connectors.") + logger.info("Docker was not detected. Returning only Python and Manifest-only connectors.") # If Docker is not available, return only Python and Manifest-based connectors. return sorted( diff --git a/airbyte/sources/util.py b/airbyte/sources/util.py index 1b19f513..dbdc5668 100644 --- a/airbyte/sources/util.py +++ b/airbyte/sources/util.py @@ -125,6 +125,8 @@ def get_source( # noqa: PLR0913 # Too many arguments def get_benchmark_source( num_records: int | str = "5e5", + *, + install_if_missing: bool = True, ) -> Source: """Get a source for benchmarking. @@ -135,10 +137,11 @@ def get_benchmark_source( within a numeric a string, they will be ignored. Args: - num_records (int | str): The number of records to generate. Defaults to "5e5", or + num_records: The number of records to generate. Defaults to "5e5", or 500,000 records. Can be an integer (`1000`) or a string in scientific notation. For example, `"5e6"` will generate 5 million records. + install_if_missing: Whether to install the source if it is not available locally. Returns: Source: The source object for benchmarking. @@ -166,6 +169,7 @@ def get_benchmark_source( }, }, streams="*", + install_if_missing=install_if_missing, ) diff --git a/airbyte/types.py b/airbyte/types.py index 382f5135..28dd4314 100644 --- a/airbyte/types.py +++ b/airbyte/types.py @@ -41,7 +41,7 @@ def _get_airbyte_type( # noqa: PLR0911 # Too many return statements Subtype is only used for array types. Otherwise, subtype will return None. """ - airbyte_type = cast(str, json_schema_property_def.get("airbyte_type", None)) + airbyte_type = cast("str", json_schema_property_def.get("airbyte_type", None)) if airbyte_type: return airbyte_type, None diff --git a/docs/generate.py b/docs/generate.py index 0cf39896..02281768 100755 --- a/docs/generate.py +++ b/docs/generate.py @@ -18,7 +18,7 @@ def run() -> None: """Generate docs for all public modules in PyAirbyte and save them to docs/generated.""" - public_modules = ["airbyte", "airbyte/cloud/experimental.py"] + public_modules = ["airbyte", "airbyte/cli.py"] # recursively delete the docs/generated folder if it exists if pathlib.Path("docs/generated").exists(): diff --git a/examples/run_bigquery_destination.py b/examples/run_bigquery_destination.py index 62f22682..04c1a13a 100644 --- a/examples/run_bigquery_destination.py +++ b/examples/run_bigquery_destination.py @@ -9,10 +9,8 @@ import tempfile import warnings -from typing import cast import airbyte as ab -from airbyte.secrets.base import SecretString from airbyte.secrets.google_gsm import GoogleGSMSecretManager warnings.filterwarnings("ignore", message="Cannot create BigQuery Storage client") @@ -21,13 +19,14 @@ AIRBYTE_INTERNAL_GCP_PROJECT = "dataline-integration-testing" SECRET_NAME = "SECRET_DESTINATION-BIGQUERY_CREDENTIALS__CREDS" -bigquery_destination_secret: dict = cast( - SecretString, +bigquery_destination_secret: dict = ( GoogleGSMSecretManager( project=AIRBYTE_INTERNAL_GCP_PROJECT, credentials_json=ab.get_secret("GCP_GSM_CREDENTIALS"), - ).get_secret(SECRET_NAME), -).parse_json() + ) + .get_secret(SECRET_NAME) + .parse_json() +) def main() -> None: diff --git a/examples/run_bigquery_faker.py b/examples/run_bigquery_faker.py index e6cb14cd..cbfcfd41 100644 --- a/examples/run_bigquery_faker.py +++ b/examples/run_bigquery_faker.py @@ -9,11 +9,9 @@ import tempfile import warnings -from typing import cast import airbyte as ab from airbyte.caches.bigquery import BigQueryCache -from airbyte.secrets.base import SecretString from airbyte.secrets.google_gsm import GoogleGSMSecretManager warnings.filterwarnings("ignore", message="Cannot create BigQuery Storage client") @@ -22,13 +20,14 @@ AIRBYTE_INTERNAL_GCP_PROJECT = "dataline-integration-testing" SECRET_NAME = "SECRET_DESTINATION-BIGQUERY_CREDENTIALS__CREDS" -bigquery_destination_secret: dict = cast( - SecretString, +bigquery_destination_secret: dict = ( GoogleGSMSecretManager( project=AIRBYTE_INTERNAL_GCP_PROJECT, credentials_json=ab.get_secret("GCP_GSM_CREDENTIALS"), - ).get_secret(SECRET_NAME), -).parse_json() + ) + .get_secret(SECRET_NAME) + .parse_json() +) def main() -> None: diff --git a/examples/run_perf_test_reads.py b/examples/run_perf_test_reads.py index 3f69c013..77b54535 100644 --- a/examples/run_perf_test_reads.py +++ b/examples/run_perf_test_reads.py @@ -226,9 +226,9 @@ def main( if destination: destination.write(read_result) else: - assert ( - destination is not None - ), "Destination is required when caching is disabled." + assert destination is not None, ( + "Destination is required when caching is disabled." + ) destination.write(source, cache=False) diff --git a/poetry.lock b/poetry.lock index a2eafbdf..2c12d7f9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-api" -version = "0.49.4" +version = "0.52.1" description = "Python Client SDK for Airbyte API" optional = false python-versions = ">=3.8" files = [ - {file = "airbyte-api-0.49.4.tar.gz", hash = "sha256:070aee2312ac97ff30259a143ee45af3b0a0a6ead7a7d0361ff754ca4778c4e5"}, - {file = "airbyte_api-0.49.4-py3-none-any.whl", hash = "sha256:d5ac5c117cd928784e7c164fbbad315a84d660f42db30d238528988b062f6750"}, + {file = "airbyte-api-0.52.1.tar.gz", hash = "sha256:9c69dcfb4103958f5d0724f6c20907f7dbd3ea2e7a6971b4ba5da6449740c528"}, + {file = "airbyte_api-0.52.1-py3-none-any.whl", hash = "sha256:b6d46820dd1d37c52bd3390d7037ef97592f46f37f81882a40ace888eae7e1ce"}, ] [package.dependencies] @@ -32,13 +32,13 @@ dev = ["pylint (==3.1.0)"] [[package]] name = "airbyte-cdk" -version = "6.7.2" +version = "6.9.0" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<3.13,>=3.10" files = [ - {file = "airbyte_cdk-6.7.2-py3-none-any.whl", hash = "sha256:74d84592b16f3504653bd3458a13e646ac99f81f12d112215a4edc59df055bc7"}, - {file = "airbyte_cdk-6.7.2.tar.gz", hash = "sha256:79f534c274ce8bc06910e94e1d77691fadfe5355bfb736f9c4ec06fadb974f8d"}, + {file = "airbyte_cdk-6.9.0-py3-none-any.whl", hash = "sha256:b4545779b69f78725b17ec000d306aa762b060d170b3292dc568e136f86288b2"}, + {file = "airbyte_cdk-6.9.0.tar.gz", hash = "sha256:b7e745be6f9246dbd80bf44ef46beebcebba9d481e5884c2b57010871066511d"}, ] [package.dependencies] @@ -46,7 +46,6 @@ airbyte-protocol-models-dataclasses = ">=0.14,<0.15" backoff = "*" cachetools = "*" cryptography = ">=42.0.5,<44.0.0" -Deprecated = ">=1.2,<1.3" dpath = ">=2.1.6,<3.0.0" dunamai = ">=1.22.0,<2.0.0" genson = "1.3.0" @@ -77,7 +76,6 @@ xmltodict = ">=0.13.0,<0.14.0" [package.extras] file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "python-calamine (==0.2.3)", "python-snappy (==0.7.3)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] -sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] sql = ["sqlalchemy (>=2.0,!=2.0.36,<3.0)"] vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.8.0)"] @@ -119,24 +117,24 @@ files = [ [[package]] name = "anyio" -version = "4.6.2.post1" +version = "4.7.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" files = [ - {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, - {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, + {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, + {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -470,73 +468,73 @@ files = [ [[package]] name = "coverage" -version = "7.6.8" +version = "7.6.9" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, - {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, - {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, - {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, - {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, - {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, - {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, - {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, - {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, - {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, - {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, - {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, - {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, - {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, - {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, - {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, + {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, + {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, + {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, + {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, + {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, + {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, + {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, + {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, + {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, + {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, + {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, + {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, + {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, + {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, ] [package.extras] @@ -606,23 +604,6 @@ files = [ marshmallow = ">=3.18.0,<4.0.0" typing-inspect = ">=0.4.0,<1" -[[package]] -name = "deprecated" -version = "1.2.15" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -files = [ - {file = "Deprecated-1.2.15-py2.py3-none-any.whl", hash = "sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320"}, - {file = "deprecated-1.2.15.tar.gz", hash = "sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d"}, -] - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "jinja2 (>=3.0.3,<3.1.0)", "setuptools", "sphinx (<2)", "tox"] - [[package]] name = "deptry" version = "0.21.1" @@ -1285,13 +1266,13 @@ trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" -version = "0.28.0" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc"}, - {file = "httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -2081,22 +2062,22 @@ testing = ["google-api-core (>=1.31.5)"] [[package]] name = "protobuf" -version = "5.29.0" +version = "5.29.1" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "protobuf-5.29.0-cp310-abi3-win32.whl", hash = "sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2"}, - {file = "protobuf-5.29.0-cp310-abi3-win_amd64.whl", hash = "sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069"}, - {file = "protobuf-5.29.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20"}, - {file = "protobuf-5.29.0-cp38-cp38-win32.whl", hash = "sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a"}, - {file = "protobuf-5.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86"}, - {file = "protobuf-5.29.0-cp39-cp39-win32.whl", hash = "sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac"}, - {file = "protobuf-5.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368"}, - {file = "protobuf-5.29.0-py3-none-any.whl", hash = "sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f"}, - {file = "protobuf-5.29.0.tar.gz", hash = "sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001"}, + {file = "protobuf-5.29.1-cp310-abi3-win32.whl", hash = "sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110"}, + {file = "protobuf-5.29.1-cp310-abi3-win_amd64.whl", hash = "sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34"}, + {file = "protobuf-5.29.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18"}, + {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155"}, + {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d"}, + {file = "protobuf-5.29.1-cp38-cp38-win32.whl", hash = "sha256:50879eb0eb1246e3a5eabbbe566b44b10348939b7cc1b267567e8c3d07213853"}, + {file = "protobuf-5.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:027fbcc48cea65a6b17028510fdd054147057fa78f4772eb547b9274e5219331"}, + {file = "protobuf-5.29.1-cp39-cp39-win32.whl", hash = "sha256:5a41deccfa5e745cef5c65a560c76ec0ed8e70908a67cc8f4da5fce588b50d57"}, + {file = "protobuf-5.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:012ce28d862ff417fd629285aca5d9772807f15ceb1a0dbd15b88f58c776c98c"}, + {file = "protobuf-5.29.1-py3-none-any.whl", hash = "sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0"}, + {file = "protobuf-5.29.1.tar.gz", hash = "sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb"}, ] [[package]] @@ -3224,114 +3205,114 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.22.1" +version = "0.22.3" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" files = [ - {file = "rpds_py-0.22.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ab27dd4edd84b13309f268ffcdfc07aef8339135ffab7b6d43f16884307a2a48"}, - {file = "rpds_py-0.22.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9d5b925156a746dc1f5f52376fdd1fbdd3f6ffe1fcd6f5e06f77ca79abb940a3"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201650b309c419143775c15209c620627de3c09a27c7fb58375325aec5cce260"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31264187fc934ff1024a4f56775f33c9252d3f4f3e27ec07d1995a26b52702c3"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97c5ffe47ccf92d8b17e10f8a5ce28d015aa1196edc3359684cf31504eae6a14"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9ac7280bd045f472b50306d7efeee051b69e3a2dd1b90f46bd7e86e63b1efa2"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f941fb86195f97be7f6efe04a21b223f05dfe4d1dfb159999e2f8d101e44cc4"}, - {file = "rpds_py-0.22.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f91bfc39f7a64168e08ab831fa497ec5438c1d6c6e2f9e12848d95ad11ac8523"}, - {file = "rpds_py-0.22.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:effcae2152afe7937a28376dbabb25c770ef99ed4e16a4ffeb8e6a4f7c4f06aa"}, - {file = "rpds_py-0.22.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2177e59c033bf0d1bf7de1ced561205963583caf3242c6c700a723034bfb5f8e"}, - {file = "rpds_py-0.22.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:66f4f48a89cdd30ab3a47335df81c76e9a63799d0d84b29c0618371c66fa37b0"}, - {file = "rpds_py-0.22.1-cp310-cp310-win32.whl", hash = "sha256:b07fa9e634234e84096adfa4be3828c8f26e238679c122824b2b3d7131bec578"}, - {file = "rpds_py-0.22.1-cp310-cp310-win_amd64.whl", hash = "sha256:ca4657e9fd0b1b5376942d403d634ce188f79064f0873aa853ab05b10185ceec"}, - {file = "rpds_py-0.22.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:608c84699b2db09c6a8743845b1a3dad36fae53eaaecb241d45b13dff74405fb"}, - {file = "rpds_py-0.22.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dae4eb9b5534e09ba6c6ab496a757e5e394b7e7b08767d25ca37e8d36491114"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09a1f000c5f6e08b298275bae00921e9fbbf2a35dae0a86db2821c058c2201a9"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:580ccbf11f02f948add4cb641843030a89f1463d7c0740cbfc9aca91e9dc34b3"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96559e05bdf938b2048353e10a7920b98f853cefe4482c2064a718d7d0a50bd7"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:128cbaed7ba26116820bcb992405d6a13ea18c8fca1b8c4f59906d858e91e979"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734783dd7da58f76222f458346ddebdb3621686a1a2a667db5049caf0c9956b9"}, - {file = "rpds_py-0.22.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c9ce6b83597d45bec44a2690857ede62fc98223772135f8a7fa90884eb726501"}, - {file = "rpds_py-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bca4428c4a957b78ded3e6e62884ab03f029dce8fa8d34818da0f80f61332b49"}, - {file = "rpds_py-0.22.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1ded65691a1d3fd7d2aa89d2c91aa51f941601bb2ce099739909034d957fef4b"}, - {file = "rpds_py-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72407065ad459db9f3d052ea8c51e02534f02533fc61e51cbab3bd94166f086c"}, - {file = "rpds_py-0.22.1-cp311-cp311-win32.whl", hash = "sha256:eb013aa01b404219f28dc973d9e6310fd4db216d7299253dd355629952e0564e"}, - {file = "rpds_py-0.22.1-cp311-cp311-win_amd64.whl", hash = "sha256:8bd9ec1db79a664f4cbb12878693b73416f4d2cb425d3e27eccc1bdfbdc826ef"}, - {file = "rpds_py-0.22.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8ec41049c90d204a6561238a9ad6c7263ebb7009d9759c98b58078d9d2fec9ba"}, - {file = "rpds_py-0.22.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102be79c4cc47a4aeb5912401185c404cd2601c15a7163bbecff7f1bfe20b669"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a603155db408f773637f9e3a712c6e3cbc521aaa8fa2b99f9ba6106c59a2496"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5dbff9402c2bdf00bf0df9905694b3c292a3847c725651938a72f554351a5fcb"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96b3759d8ab2323324e0a92b2f44834f9d88089b8d1ab6f533b61f4be3411cef"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3029f481b31f329b1fdb4ec4b56935d82210ddd9c6f86ea5a87c06f1e97b161"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d280b4bf09f719b89fd9aab3b71067acc0d0449b7d1eba99a2ade4939cef8296"}, - {file = "rpds_py-0.22.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8e97e19aa7b0b0d801a159f932ce4435f1049c8c38e2bb372bb5bee559ce50"}, - {file = "rpds_py-0.22.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:50e4b5d291105f7063259fe0125b1af902fb34499444d7c5c521dd8328b00939"}, - {file = "rpds_py-0.22.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d3777c446bb1c5fcd82dc3f8776e1a146cd91e80cc1892f8634575ace438d22f"}, - {file = "rpds_py-0.22.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:447ae1104fb32197b9262f772d565d38e834cc2e9edd89350b37b88fed636e70"}, - {file = "rpds_py-0.22.1-cp312-cp312-win32.whl", hash = "sha256:55d371b9d8b0c2a68a50413a8cb01c3c3ce1ea4f768bf77b66669a9a486e101e"}, - {file = "rpds_py-0.22.1-cp312-cp312-win_amd64.whl", hash = "sha256:413a30a99d8683dace3765885920ed27ab662efbb6c98d81db76c397ad1ffd71"}, - {file = "rpds_py-0.22.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa2ba0176037c915d8660a4e46581d645e2c22b5373e466bc8640a794d45861a"}, - {file = "rpds_py-0.22.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4ba6c66fbc6015b2f99e7176fec41793cecb00c4cc357cad038dff85e6ac42ab"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15fa4ca658f8ad22645d3531682b17e5580832efbfa87304c3e62214c79c1e8a"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7833ef6f5d6cb634f296abfd93452fb3eb44c4e9a6ae95c1021eab704c1cee2"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0467838c90435b80793cde486a318fc916ee57f2af54e4b10c72b20cbdcbaa9"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d962e2e89b3a95e3597a34b8c93ced1e98958502c5b8096c9fd69deff279f561"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ce729f1dc8a4a190c34b69f75377bddc004079b2963ab722ab91fafe040be6d"}, - {file = "rpds_py-0.22.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8080467df22feca0fc9c46567001777c6fbc2b4a2683a7137420896051874ca1"}, - {file = "rpds_py-0.22.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0f9eb37d3a60b262a98ab51ee899cac039de9ca0ce68dcf1a6518a09719020b0"}, - {file = "rpds_py-0.22.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:153248f48d6f90a295a502f53ec544a3ffbd21b0bb32f5dca39c4b93a764d6a2"}, - {file = "rpds_py-0.22.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0a53592cdf98cec3dfcdb24ffec8a4797e7656b65700099af43ec7df023b6de4"}, - {file = "rpds_py-0.22.1-cp313-cp313-win32.whl", hash = "sha256:e8056adcefa2dcb67e8bc91ea5eee26df66e8b297a8cd6ff0903f85c70908fa0"}, - {file = "rpds_py-0.22.1-cp313-cp313-win_amd64.whl", hash = "sha256:a451dba533be77454ebcffc85189108fc05f279100835ac76e7989edacb89156"}, - {file = "rpds_py-0.22.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:2ea23f1525d4f64286dbe0947c929d45c3ffe963b2dbed1d3844a2e4938bda42"}, - {file = "rpds_py-0.22.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3aaa22487477de9618ce3b37f99fbe81219ba96f3c2ca84f576f0ab451b83aba"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8954b9ffe60f479a0c0ba40987db2546c735ab02a725ea7fd89342152d4d821d"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8502a02ae3ae67084f5a0bf5a8253b19fa7a887f824e41e016cdb0ac532a06f"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a083221b6a4ecdef38a60c95d8d3223d99449cb4da2544e9644958dc16664eb9"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:542eb246d5be31b5e0a9c8ddb9539416f9b31f58f75bd4ee328bff2b5c58d6fd"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffae97d28ea4f2c613a751d087b75a97fb78311b38cc2e9a2f4587e473ace167"}, - {file = "rpds_py-0.22.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0ff8d5b13ce2357fa8b33a0a2e3775aa71df5bf7c8ba060634c9d15ab12f357"}, - {file = "rpds_py-0.22.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0f057a0c546c42964836b209d8de9ea1a4f4b0432006c6343cbe633d8ca14571"}, - {file = "rpds_py-0.22.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:48ee97c7c6027fd423058675b5a39d0b5f7a1648250b671563d5c9f74ff13ff0"}, - {file = "rpds_py-0.22.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:babec324e8654a59122aaa66936a9a483faa03276db9792f51332475c2dddc4a"}, - {file = "rpds_py-0.22.1-cp313-cp313t-win32.whl", hash = "sha256:e69acdbc132c9592c8dc393af85e38e206ca847c7019a953ff625191c3a12312"}, - {file = "rpds_py-0.22.1-cp313-cp313t-win_amd64.whl", hash = "sha256:c783e4ed68200f4e03c125690d23158b1c49c4b186d458a18debc109bbdc3c2e"}, - {file = "rpds_py-0.22.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2143c3aed85992604d758bbe67da839fb4aab3dd2e1c6dddab5b3ca7162b34a2"}, - {file = "rpds_py-0.22.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f57e2d0f8022783426121b586d7c842ea40ea832a29e28ca36c881b54c74fb28"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c0c324879d483504b07f7b18eb1b50567c434263bbe4866ecce33056162668a"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c40e02cc4f3e18fd39344edb10eebe04bd11cfd13119606b5771e5ea51630d3"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f76c6f319e57007ad52e671ec741d801324760a377e3d4992c9bb8200333ebac"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5cae9b415ea8a6a563566dbf46650222eccc5971c7daa16fbee63aef92ae543"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b09209cdfcacf5eba9cf80367130532e6c02e695252e1f64d3cfcc2356e6e19f"}, - {file = "rpds_py-0.22.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dbe428d0ac6eacaf05402adbaf137f59ad6063848182d1ff294f95ce0f24005b"}, - {file = "rpds_py-0.22.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:626b9feb01bff049a5aec4804f0c58db12585778b4902e5376a95b01f80a7a16"}, - {file = "rpds_py-0.22.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec1ccc2a9f764cd632fb8ab28fdde166250df54fc8d97315a4a6948dc5367639"}, - {file = "rpds_py-0.22.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ef92b1fbe6aa2e7885eb90853cc016b1fc95439a8cc8da6d526880e9e2148695"}, - {file = "rpds_py-0.22.1-cp39-cp39-win32.whl", hash = "sha256:c88535f83f7391cf3a45af990237e3939a6fdfbedaed2571633bfdd0bceb36b0"}, - {file = "rpds_py-0.22.1-cp39-cp39-win_amd64.whl", hash = "sha256:7839b7528faa4d134c183b1f2dd1ee4dc2ca2f899f4f0cfdf00fc04c255262a7"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a0ed14a4162c2c2b21a162c9fcf90057e3e7da18cd171ab344c1e1664f75090e"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:05fdeae9010533e47715c37df83264df0122584e40d691d50cf3607c060952a3"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4659b2e4a5008715099e216050f5c6976e5a4329482664411789968b82e3f17d"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a18aedc032d6468b73ebbe4437129cb30d54fe543cde2f23671ecad76c3aea24"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149b4d875ef9b12a8f5e303e86a32a58f8ef627e57ec97a7d0e4be819069d141"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdaee3947eaaa52dae3ceb9d9f66329e13d8bae35682b1e5dd54612938693934"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36ce951800ed2acc6772fd9f42150f29d567f0423989748052fdb39d9e2b5795"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ab784621d3e2a41916e21f13a483602cc989fd45fff637634b9231ba43d4383b"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:c2a214bf5b79bd39a9de1c991353aaaacafda83ba1374178309e92be8e67d411"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:85060e96953647871957d41707adb8d7bff4e977042fd0deb4fc1881b98dd2fe"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c6f3fd617db422c9d4e12cb8d84c984fe07d6d9cb0950cbf117f3bccc6268d05"}, - {file = "rpds_py-0.22.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f2d1b58a0c3a73f0361759642e80260a6d28eee6501b40fe25b82af33ef83f21"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:76eaa4c087a061a2c8a0a92536405069878a8f530c00e84a9eaf332e70f5561f"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:959ae04ed30cde606f3a0320f0a1f4167a107e685ef5209cce28c5080590bd31"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:198067aa6f3d942ff5d0d655bb1e91b59ae85279d47590682cba2834ac1b97d2"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e7e99e2af59c56c59b6c964d612511b8203480d39d1ef83edc56f2cb42a3f5d"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0545928bdf53dfdfcab284468212efefb8a6608ca3b6910c7fb2e5ed8bdc2dc0"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef7282d8a14b60dd515e47060638687710b1d518f4b5e961caad43fb3a3606f9"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe3f245c2f39a5692d9123c174bc48f6f9fe3e96407e67c6d04541a767d99e72"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efb2ad60ca8637d5f9f653f9a9a8d73964059972b6b95036be77e028bffc68a3"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d8306f27418361b788e3fca9f47dec125457f80122e7e31ba7ff5cdba98343f8"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4c8dc7331e8cbb1c0ea2bcb550adb1777365944ffd125c69aa1117fdef4887f5"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:776a06cb5720556a549829896a49acebb5bdd96c7bba100191a994053546975a"}, - {file = "rpds_py-0.22.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e4f91d702b9ce1388660b3d4a28aa552614a1399e93f718ed0dacd68f23b3d32"}, - {file = "rpds_py-0.22.1.tar.gz", hash = "sha256:157a023bded0618a1eea54979fe2e0f9309e9ddc818ef4b8fc3b884ff38fedd5"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, + {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, + {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, + {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, + {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, + {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, + {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, + {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, + {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, + {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, + {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, + {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, + {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, + {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, + {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, + {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, + {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, + {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, + {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, + {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, + {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, + {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, + {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, + {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, + {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, + {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, + {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, + {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, + {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, + {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, + {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, + {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, ] [[package]] @@ -3350,29 +3331,29 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruff" -version = "0.6.9" +version = "0.8.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, - {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, - {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, - {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, - {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, - {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, - {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, + {file = "ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d"}, + {file = "ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5"}, + {file = "ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93"}, + {file = "ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f"}, + {file = "ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22"}, + {file = "ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1"}, + {file = "ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea"}, + {file = "ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8"}, + {file = "ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5"}, ] [[package]] @@ -3431,13 +3412,13 @@ typing-extensions = "*" [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -3453,56 +3434,56 @@ files = [ [[package]] name = "snowflake-connector-python" -version = "3.12.3" +version = "3.12.4" description = "Snowflake Connector for Python" optional = false python-versions = ">=3.8" files = [ - {file = "snowflake_connector_python-3.12.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:497a096fc379ef0846b2f1cf11a8d7620f0d090f08a77d9e93473845014d57d1"}, - {file = "snowflake_connector_python-3.12.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:055c5808d524497213e4cc9ae91ec3e46cb8342b314e78bc3e139d733dc16741"}, - {file = "snowflake_connector_python-3.12.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a5dc512d62ef693041ed2ad82931231caddc16e14ffc2842da3e3dd4240b83d"}, - {file = "snowflake_connector_python-3.12.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a46448f7279d444084eb84a9cddea67662e80ccfaddf41713b9e9aab2b1242e9"}, - {file = "snowflake_connector_python-3.12.3-cp310-cp310-win_amd64.whl", hash = "sha256:821b774b77129ce9f03729456ac1f21d69fedb50e5ce957178131c7bb3d8279f"}, - {file = "snowflake_connector_python-3.12.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82290134978d11628026b447052219ce8d880e36937204f1f0332dfc3f2e92e9"}, - {file = "snowflake_connector_python-3.12.3-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:20b5c8000ee9cee11b0f9a6ae26640f0d498ce77f7e2ec649a2f0d306523792d"}, - {file = "snowflake_connector_python-3.12.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca6500d16bdbd37da88e589cc3e82b90272471d3aabfe4a79ec1cf4696675acf"}, - {file = "snowflake_connector_python-3.12.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b455ba117a68da436e253899674fae1a93669eaefdde8a903c03eb65b7e87c86"}, - {file = "snowflake_connector_python-3.12.3-cp311-cp311-win_amd64.whl", hash = "sha256:205219fcaeee2d33db5d0d023d60518e3bd8272ce1679be2199d7f362d255054"}, - {file = "snowflake_connector_python-3.12.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d830ca32c864b730cba5d92900d850752199635c4fb0ae0a70ee677f62aee70"}, - {file = "snowflake_connector_python-3.12.3-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:597b0c74ec57ba693191ae2de8db9536e349ee32cab152df657473e498b6fd87"}, - {file = "snowflake_connector_python-3.12.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2215d8a4c5e25ea0d2183fe693c3fdf058cd6035e5c84710d532dc04ab4ffd31"}, - {file = "snowflake_connector_python-3.12.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ba9c261904c1ba7cae6035c7881224cf979da39c8b7c7cb10236fdfc57e505"}, - {file = "snowflake_connector_python-3.12.3-cp312-cp312-win_amd64.whl", hash = "sha256:f0d0fcb948ef0812ab162ec9767622f345554043a07439c0c1a9474c86772320"}, - {file = "snowflake_connector_python-3.12.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fe742a0b2fb1c79a21e95b97c49a05783bc00314d1184d227c5fe5b57688af12"}, - {file = "snowflake_connector_python-3.12.3-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:a8584a44a6bb41d2056cf1b833e629c76e28c5303d2c875c1a23bda46a1cd43a"}, - {file = "snowflake_connector_python-3.12.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd990db8e4886c32ba5c63758e8dc4814e2e75f5fd3fe79d43f7e5ee0fc46793"}, - {file = "snowflake_connector_python-3.12.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4fe7f91f6e44bda877e77403a586d7487ca2c52dc1a32a705b2fea33f9c763a"}, - {file = "snowflake_connector_python-3.12.3-cp38-cp38-win_amd64.whl", hash = "sha256:4994e95eff593dc44c28243ef0ae8d27b8b1aeb96dd64cbcea5bcf0e4dfb77fb"}, - {file = "snowflake_connector_python-3.12.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ac33a7dd54b35f94c4b91369971dbd6467a914dff4b01c46e77e7e6901d7eca4"}, - {file = "snowflake_connector_python-3.12.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:a26876322811fe2b93f6d814dcfe016f1df680a12624026ecf57a6bcdf20f969"}, - {file = "snowflake_connector_python-3.12.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0bb390be2e15b6b7cccab7fbe1ef94e1e9ab13790c974aa44761298cdc2641"}, - {file = "snowflake_connector_python-3.12.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7340f73af4ae72e6af8fe28a1b8e196a0c99943071afc96ce419efb4da80035"}, - {file = "snowflake_connector_python-3.12.3-cp39-cp39-win_amd64.whl", hash = "sha256:c314749bd0151218b654a7d4646a39067ab650bdc86dfebb1884b056b0bdb4b4"}, - {file = "snowflake_connector_python-3.12.3.tar.gz", hash = "sha256:02873c7f7a3b10322e28dddc2be6907f8ab8ecad93d6d6af14c77c2f53091b88"}, + {file = "snowflake_connector_python-3.12.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f141c159e3244bd660279f87f32e39351b2845fcb75f8138f31d2219f983b05"}, + {file = "snowflake_connector_python-3.12.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:091458ba777c24adff659c5c28f0f5bb0bcca8a9b6ecc5641ae25b7c20a8f43d"}, + {file = "snowflake_connector_python-3.12.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23049d341da681ec7131cead71cdf7b1761ae5bcc08bcbdb931dcef6c25e8a5f"}, + {file = "snowflake_connector_python-3.12.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc88a09d77a8ce7e445094b2409b606ddb208b5fc9f7c7a379d0255a8d566e9d"}, + {file = "snowflake_connector_python-3.12.4-cp310-cp310-win_amd64.whl", hash = "sha256:3c33fbba036805c1767ea48eb40ffc3fb79d61f2a4bb4e77b571ea6f6a998be8"}, + {file = "snowflake_connector_python-3.12.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ec5cfaa1526084cf4d0e7849d5ace601245cb4ad9675ab3cd7d799b3abea481"}, + {file = "snowflake_connector_python-3.12.4-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:ff225824b3a0fa5e822442de72172f97028f04ae183877f1305d538d8d6c5d11"}, + {file = "snowflake_connector_python-3.12.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9beced2789dc75e8f1e749aa637e7ec9b03302b4ed4b793ae0f1ff32823370e"}, + {file = "snowflake_connector_python-3.12.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea47450a04ff713f3adf28053e34103bd990291e62daee9721c76597af4b2b5"}, + {file = "snowflake_connector_python-3.12.4-cp311-cp311-win_amd64.whl", hash = "sha256:748f9125854dca07ea471bb2bb3c5bb932a53f9b8a77ba348b50b738c77203ce"}, + {file = "snowflake_connector_python-3.12.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4bcd0371b20d199f15e6a3c0b489bf18e27f2a88c84cf3194b2569ca039fa7d1"}, + {file = "snowflake_connector_python-3.12.4-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:7900d82a450b206fa2ed6c42cd65d9b3b9fd4547eca1696937175fac2a03ba37"}, + {file = "snowflake_connector_python-3.12.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:300f0562aeea55e40ee03b45205dbef7b78f5ba2f1787a278c7b807e7d8db22c"}, + {file = "snowflake_connector_python-3.12.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6762a00948f003be55d7dc5de9de690315d01951a94371ec3db069d9303daba"}, + {file = "snowflake_connector_python-3.12.4-cp312-cp312-win_amd64.whl", hash = "sha256:83ca896790a7463b6c8cd42e1a29b8ea197cc920839ae6ee96a467475eab4ec2"}, + {file = "snowflake_connector_python-3.12.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:886d2cbf0aaa5eac81df05efefaff145773e732d88ded7f6e2465580a4c31d1e"}, + {file = "snowflake_connector_python-3.12.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:c06f553486ba6715f18bb2933446adace4cfbb40f54b7fa0d46839930ecf945a"}, + {file = "snowflake_connector_python-3.12.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158f616e36453e72550937256b160badcea6d4fb38bff21fdf551813ebe409b4"}, + {file = "snowflake_connector_python-3.12.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b80d2e3233419920f116e277ad8d422fbbce172c25209acf9095fdde6293b0"}, + {file = "snowflake_connector_python-3.12.4-cp38-cp38-win_amd64.whl", hash = "sha256:f40bd66199064b73d3b313132d3a8c297aedc9da1489e38bfa2e925d17871f6d"}, + {file = "snowflake_connector_python-3.12.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:34947e4147d65b9211ab785f76c16cfb2f5d5a0b639358f72ccbf524ff3eda6f"}, + {file = "snowflake_connector_python-3.12.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:de703efc36b9b12eab6d75dffeec362b0bbcc2f33ffd9913b0d178d2bf30d8cd"}, + {file = "snowflake_connector_python-3.12.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5010cee09cfe20f72dbd9b1f3ee21d07fd280c03db243aa2c048708e7331f358"}, + {file = "snowflake_connector_python-3.12.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28be3b68e2b1d880373506eab4bae5b323d7de6215630585435ab60d3fea7feb"}, + {file = "snowflake_connector_python-3.12.4-cp39-cp39-win_amd64.whl", hash = "sha256:bc8766d16c98a75a57e65fb7697c6732b871309c2fa79698b54fb18cbd36733c"}, + {file = "snowflake_connector_python-3.12.4.tar.gz", hash = "sha256:289e0691dfbf8ec8b7a8f58bcbb95a819890fe5e5b278fdbfc885059a63a946f"}, ] [package.dependencies] asn1crypto = ">0.24.0,<2.0.0" certifi = ">=2017.4.17" cffi = ">=1.9,<2.0.0" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" cryptography = ">=3.1.0" filelock = ">=3.5,<4" idna = ">=2.5,<4" packaging = "*" platformdirs = ">=2.6.0,<5.0.0" pyjwt = "<3.0.0" -pyOpenSSL = ">=16.2.0,<25.0.0" +pyOpenSSL = ">=22.0.0,<25.0.0" pytz = "*" requests = "<3.0.0" sortedcontainers = ">=2.4.0" tomlkit = "*" -typing-extensions = ">=4.3,<5" +typing_extensions = ">=4.3,<5" [package.extras] development = ["Cython", "coverage", "more-itertools", "numpy (<1.27.0)", "pendulum (!=2.1.1)", "pexpect", "pytest (<7.5.0)", "pytest-cov", "pytest-rerunfailures", "pytest-timeout", "pytest-xdist", "pytzdata"] @@ -3511,13 +3492,13 @@ secure-local-storage = ["keyring (>=23.1.0,<26.0.0)"] [[package]] name = "snowflake-sqlalchemy" -version = "1.7.0" +version = "1.7.1" description = "Snowflake SQLAlchemy Dialect" optional = false python-versions = ">=3.8" files = [ - {file = "snowflake_sqlalchemy-1.7.0-py3-none-any.whl", hash = "sha256:96de0f8a657c215794e178cc7df2dcf2b7d09379dfae259181c48acb9a0c2838"}, - {file = "snowflake_sqlalchemy-1.7.0.tar.gz", hash = "sha256:b1c12a4a30f714c2e93ae4221733f2975d5d305d2046cb400cb90ff86ca48f71"}, + {file = "snowflake_sqlalchemy-1.7.1-py3-none-any.whl", hash = "sha256:eecb63e6830e7fec2a0fc5c583c0e9903fe1b2ea40bcac974e03932cd24662f2"}, + {file = "snowflake_sqlalchemy-1.7.1.tar.gz", hash = "sha256:a06b78d8b83ca74318e6fbb2982b9fbd9ce99e202f502c1f6af7ba69d05da1f5"}, ] [package.dependencies] @@ -3772,13 +3753,13 @@ telegram = ["requests"] [[package]] name = "types-jsonschema" -version = "4.23.0.20240813" +version = "4.23.0.20241208" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" files = [ - {file = "types-jsonschema-4.23.0.20240813.tar.gz", hash = "sha256:c93f48206f209a5bc4608d295ac39f172fb98b9e24159ce577dbd25ddb79a1c0"}, - {file = "types_jsonschema-4.23.0.20240813-py3-none-any.whl", hash = "sha256:be283e23f0b87547316c2ee6b0fd36d95ea30e921db06478029e10b5b6aa6ac3"}, + {file = "types_jsonschema-4.23.0.20241208-py3-none-any.whl", hash = "sha256:87934bd9231c99d8eff94cacfc06ba668f7973577a9bd9e1f9de957c5737313e"}, + {file = "types_jsonschema-4.23.0.20241208.tar.gz", hash = "sha256:e8b15ad01f290ecf6aea53f93fbdf7d4730e4600313e89e8a7f95622f7e87b7c"}, ] [package.dependencies] @@ -3984,80 +3965,6 @@ files = [ [package.dependencies] bracex = ">=2.1.1" -[[package]] -name = "wrapt" -version = "1.17.0" -description = "Module for decorators, wrappers and monkey patching." -optional = false -python-versions = ">=3.8" -files = [ - {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, - {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, - {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, - {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, - {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, - {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, - {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, - {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, - {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, - {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, - {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, - {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, - {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, - {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, - {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, - {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, - {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, - {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, - {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, - {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, - {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, - {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, - {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, - {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, - {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, - {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, - {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, - {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, - {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, - {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, - {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, - {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, - {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, - {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, - {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, - {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, - {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, -] - [[package]] name = "xmltodict" version = "0.13.0" @@ -4072,4 +3979,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "0bb3b49bc31380f7e407774e943fa5eff3c1ccce6e7c4ba8b734915b2ca4bae8" +content-hash = "95af27a8586b9bc849487f493c30f4cf2fdbaf92f344eff5f996a83edef33a0f" diff --git a/pyproject.toml b/pyproject.toml index a2f10850..866a0154 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ enable = true [tool.poetry.dependencies] python = ">=3.10,<3.13" -airbyte-api = "^0.49.2" +airbyte-api = "^0.52.1" airbyte-cdk = "^6.6.4,!=6.7.0rc1,!=6.7.0rc2" airbyte-protocol-models-pdv2 = "^0.13.0" click = "^8.1.7" @@ -68,7 +68,7 @@ pytest-mock = "^3.14.0" pytest-mypy = "^0.10.3" pytest-timeout = "^2.3.1" responses = "^0.25.0" -ruff = "^0.6.4" +ruff = "^0.8.2" sqlalchemy2-stubs = "^0.0.2a38" tomli = "^2.0" types-jsonschema = "^4.20.0.0" @@ -94,14 +94,16 @@ markers = [ ] filterwarnings = [ # syntax: "action:message_regex:category:module:line" # Treat python warnings as errors in pytest - "error", + # "error", # TODO: Revert. Currently disabled because `airbyte_cdk.sources.source.ExperimentalClassWarning` ignore is not working # # Snowflake reregisters its functions, which is fine. "ignore::sqlalchemy.exc.SAWarning", # "GenericFunction is already registered and is going to be overridden."" # Ignore these, specifically on Windows because of file cleanup and locking issues: "ignore:unclosed file:ResourceWarning", + "ignore:Failed to decode:UserWarning", # https://github.com/airbytehq/pyairbyte/issues/320 "ignore::pytest.PytestUnraisableExceptionWarning", # Order matters here. This must be last because it will fail if we # hit another warning during the import. + "ignore::airbyte_cdk.sources.source.ExperimentalClassWarning", "ignore::airbyte.exceptions.AirbyteExperimentalFeatureWarning", # CDK has these refs: "ignore::pydantic.warnings.PydanticDeprecatedSince20", @@ -166,7 +168,6 @@ select = [ "TID", # flake8-tidy-imports "TRY", # tryceratops "TRY002", # Disallow raising vanilla Exception. Create or use a custom exception instead. - "TRY003", # Disallow vanilla string passing. Prefer kwargs to the exception constructur. "UP", # pyupgrade "W", # pycodestyle (warnings) "YTT", # flake8-2020 @@ -176,8 +177,6 @@ ignore = [ # These we don't agree with or don't want to prioritize to enforce: "ANN003", # kwargs missing type annotations - "ANN101", # Type annotations for 'self' args - "ANN102", # Type annotations for 'cls' args "COM812", # Because it conflicts with ruff auto-format "EM", # flake8-errmsgs (may reconsider later) "DJ", # Django linting @@ -201,7 +200,9 @@ ignore = [ "FIX002", # Allow "TODO:" until release (then switch to requiring links via TDO003) "PLW0603", # Using the global statement to update _cache is discouraged "PLW0108", # Lambda may be unnecessary; consider inlining inner function - # "TD003", # Require links for TODOs (now enabled) + "TRY003", # Allow exceptions to receive strings in constructors. + # "TD003", # Require links for TODOs (now enabled) + "UP038", # Allow tuples instead of "|" syntax in `isinstance()` checks ("|" is sometimes slower) ] fixable = ["ALL"] unfixable = [ diff --git a/tests/conftest.py b/tests/conftest.py index 0a340ab6..a5fb2fad 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,8 +17,8 @@ import docker import psycopg import pytest -import ulid from _pytest.nodes import Item +from airbyte._util import text_util from airbyte._util.meta import is_windows from airbyte._util.venv_util import get_bin_dir from airbyte.caches import PostgresCache @@ -79,18 +79,6 @@ def test_priority(item: Item) -> int: items.sort(key=test_priority) for item in items: - # TODO: Remove this 'skip' once Cloud Workspace issue is resolved. - # (Test user apparently deleted.) - if ( - "cloud_workspace_id" in item.fixturenames - or "cloud_workspace_id" in item.fixturenames - ): - item.add_marker( - pytest.mark.skip( - reason="Skipping cloud tests. (FIXME: test user deleted.)" - ) - ) - # Skip tests that require Docker if Docker is not available (including on Windows). if ( "new_postgres_cache" in item.fixturenames @@ -247,7 +235,7 @@ def new_postgres_cache(new_postgres_db: str): database="postgres", schema_name="public", # TODO: Move this to schema name when we support it (breaks as of 2024-01-31): - table_prefix=f"test{str(ulid.ULID())[-6:]}_", + table_prefix=f"test{text_util.generate_random_suffix()}_", ) yield config diff --git a/tests/docs_tests/test_docs_checked_in.py b/tests/docs_tests/test_docs_checked_in.py index 911aacb8..e3167ee2 100644 --- a/tests/docs_tests/test_docs_checked_in.py +++ b/tests/docs_tests/test_docs_checked_in.py @@ -22,6 +22,6 @@ def test_docs_generation(): diff = os.system("git diff --exit-code docs/generated") # if there is a diff, fail the test - assert ( - diff == 0 - ), "Docs are out of date. Please run `poetry run poe docs-generate` and commit the changes." + assert diff == 0, ( + "Docs are out of date. Please run `poetry run poe docs-generate` and commit the changes." + ) diff --git a/tests/integration_tests/cloud/conftest.py b/tests/integration_tests/cloud/conftest.py index 7ee58e11..66a27447 100644 --- a/tests/integration_tests/cloud/conftest.py +++ b/tests/integration_tests/cloud/conftest.py @@ -3,22 +3,35 @@ from __future__ import annotations +import json import os import sys from pathlib import Path +from typing import Any, Generator import pytest from airbyte._util.api_util import CLOUD_API_ROOT +from airbyte._util.temp_files import as_temp_files from airbyte._util.venv_util import get_bin_dir -from airbyte.caches.base import CacheBase from airbyte.cloud import CloudWorkspace +from airbyte.destinations.base import Destination from airbyte.secrets.base import SecretString from airbyte.secrets.google_gsm import GoogleGSMSecretManager +from airbyte_api.models import ( + DestinationBigquery, + DestinationDuckdb, + DestinationPostgres, + DestinationSnowflake, +) + +from airbyte.sources.base import Source +from airbyte.sources.util import get_source AIRBYTE_CLOUD_WORKSPACE_ID = "19d7a891-8e0e-40ac-8a8c-5faf8d11e47c" ENV_MOTHERDUCK_API_KEY = "PYAIRBYTE_MOTHERDUCK_API_KEY" AIRBYTE_CLOUD_API_KEY_SECRET_NAME = "PYAIRBYTE_CLOUD_INTEROP_API_KEY" +AIRBYTE_CLOUD_CREDS_SECRET_NAME = "PYAIRBYTE_CLOUD_INTEROP_CREDS" @pytest.fixture(autouse=True) @@ -33,7 +46,7 @@ def add_venv_bin_to_path(monkeypatch: pytest.MonkeyPatch) -> None: @pytest.fixture -def cloud_workspace_id() -> str: +def workspace_id() -> str: return AIRBYTE_CLOUD_WORKSPACE_ID @@ -42,13 +55,31 @@ def airbyte_cloud_api_root() -> str: return CLOUD_API_ROOT +CloudAPICreds = tuple[SecretString, SecretString] + + @pytest.fixture -def airbyte_cloud_api_key(ci_secret_manager: GoogleGSMSecretManager) -> SecretString: - secret: SecretString | None = ci_secret_manager.get_secret( - AIRBYTE_CLOUD_API_KEY_SECRET_NAME - ) - assert secret, f"Secret '{AIRBYTE_CLOUD_API_KEY_SECRET_NAME}' not found." - return secret +def airbyte_cloud_credentials( + ci_secret_manager: GoogleGSMSecretManager, +) -> CloudAPICreds: + secret = ci_secret_manager.get_secret( + AIRBYTE_CLOUD_CREDS_SECRET_NAME, + ).parse_json() + return SecretString(secret["client_id"]), SecretString(secret["client_secret"]) + + +@pytest.fixture +def airbyte_cloud_client_id( + airbyte_cloud_credentials: CloudAPICreds, +) -> SecretString: + return airbyte_cloud_credentials[0] + + +@pytest.fixture +def airbyte_cloud_client_secret( + airbyte_cloud_credentials: CloudAPICreds, +) -> SecretString: + return airbyte_cloud_credentials[1] @pytest.fixture @@ -58,19 +89,54 @@ def motherduck_api_key(motherduck_secrets: dict) -> SecretString: @pytest.fixture def cloud_workspace( - cloud_workspace_id: str, - airbyte_cloud_api_key: SecretString, + workspace_id: str, airbyte_cloud_api_root: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, ) -> CloudWorkspace: return CloudWorkspace( - cloud_workspace_id=cloud_workspace_id, - api_key=airbyte_cloud_api_key, + workspace_id=workspace_id, api_root=airbyte_cloud_api_root, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) +@pytest.fixture +def deployable_dummy_source() -> Source: + """A local PyAirbyte `Source` object. + + For some reason `source-hardcoded-records` and `source-e2e-tests` are not working. + """ + return get_source( + "source-faker", + streams=["products"], + config={ + "count": 100, + }, + # install_if_missing=False, + ) + + +@pytest.fixture +def deployable_dummy_destination( + new_bigquery_destination: Destination, +) -> Destination: + """A local PyAirbyte `Destination` object. + + # TODO: Use DevNullDestination instead of BigQueryDestination. + # Problem is that 'dev-null' is not accepted on Cloud as of now. + # Need a workaround. + """ + return new_bigquery_destination + + @pytest.fixture(scope="function") -def new_deployable_cache(request) -> CacheBase: +def new_deployable_destination( + request, +) -> ( + DestinationDuckdb | DestinationPostgres | DestinationBigquery | DestinationSnowflake +): """This is a placeholder fixture that will be overridden by pytest_generate_tests().""" return request.getfixturevalue(request.param) @@ -81,19 +147,63 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: This is useful for running the same tests with different cache types, to ensure that the tests can pass across all cache types. """ - deployable_cache_fixtures: dict[str, str] = { + deployable_destination_fixtures: dict[str, str] = { # Ordered by priority (fastest first) - # "MotherDuck": "new_motherduck_cache", + # "MotherDuck": "new_motherduck_destination", # "Postgres": "new_remote_postgres_cache", - "BigQuery": "new_bigquery_cache", - "Snowflake": "new_snowflake_cache", + "BigQuery": "new_bigquery_destination", + "Snowflake": "new_snowflake_destination", } - if "new_deployable_cache" in metafunc.fixturenames: + if "new_deployable_destination" in metafunc.fixturenames: metafunc.parametrize( - "new_deployable_cache", - deployable_cache_fixtures.values(), - ids=deployable_cache_fixtures.keys(), + "new_deployable_destination", + deployable_destination_fixtures.values(), + ids=deployable_destination_fixtures.keys(), indirect=True, scope="function", ) + + +@pytest.fixture(scope="session") +def with_bigquery_credentials_env_vars( + ci_secret_manager: GoogleGSMSecretManager, +) -> Generator[None, Any, None]: + """This fixture sets up the BigQuery credentials file for the session. + + This is needed because when retrieving config from the REST API, the credentials are + obfuscated. + """ + dest_bigquery_config = ci_secret_manager.get_secret( + secret_name="SECRET_DESTINATION-BIGQUERY_CREDENTIALS__CREDS" + ).parse_json() + + credentials_json = dest_bigquery_config["credentials_json"] + with as_temp_files(files_contents=[credentials_json]) as (credentials_path,): + os.environ["BIGQUERY_CREDENTIALS_PATH"] = credentials_path + os.environ["BIGQUERY_CREDENTIALS_JSON"] = json.dumps(credentials_json) + + yield + + return + + +@pytest.fixture(scope="session") +def snowflake_creds(ci_secret_manager: GoogleGSMSecretManager) -> dict: + return ci_secret_manager.get_secret( + "AIRBYTE_LIB_SNOWFLAKE_CREDS", + ).parse_json() + + +@pytest.fixture(scope="session") +def with_snowflake_password_env_var(snowflake_creds: dict): + """This fixture sets up Snowflake credentials for tests. + + This is needed because when retrieving config from the REST API, the credentials are + obfuscated. + """ + os.environ["SNOWFLAKE_PASSWORD"] = snowflake_creds["password"] + + yield + + return diff --git a/tests/integration_tests/cloud/test_cloud_api_util.py b/tests/integration_tests/cloud/test_cloud_api_util.py index d215a615..4c0c54ed 100644 --- a/tests/integration_tests/cloud/test_cloud_api_util.py +++ b/tests/integration_tests/cloud/test_cloud_api_util.py @@ -7,24 +7,98 @@ from __future__ import annotations -import ulid -from airbyte._util import api_util +from airbyte_api.models import DestinationResponse, SourceResponse, WorkspaceResponse +from airbyte._util import api_util, text_util from airbyte_api.models import DestinationDuckdb, SourceFaker +from airbyte.secrets.base import SecretString + + +def test_get_workspace( + workspace_id: str, + airbyte_cloud_api_root: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, +) -> None: + workspace = api_util.get_workspace( + workspace_id=workspace_id, + api_root=airbyte_cloud_api_root, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, + ) + assert workspace.workspace_id == workspace_id + + +def test_list_workspaces( + workspace_id: str, + airbyte_cloud_api_root: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, +) -> None: + result: list[WorkspaceResponse] = api_util.list_workspaces( + workspace_id=workspace_id, + api_root=airbyte_cloud_api_root, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, + ) + assert result + assert len(result) > 0 + assert all(isinstance(workspace, WorkspaceResponse) for workspace in result) + + +def test_list_sources( + workspace_id: str, + airbyte_cloud_api_root: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, +) -> None: + result: list[SourceResponse] = api_util.list_sources( + workspace_id=workspace_id, + api_root=airbyte_cloud_api_root, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, + ) + assert ( + result + and len(result) > 0 + and all(isinstance(source, SourceResponse) for source in result) + ) + + +def test_list_destinations( + workspace_id: str, + airbyte_cloud_api_root: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, +) -> None: + result: list[DestinationResponse] = api_util.list_destinations( + workspace_id=workspace_id, + api_root=airbyte_cloud_api_root, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, + ) + assert ( + result + and len(result) > 0 + and all(isinstance(destination, DestinationResponse) for destination in result) + ) + def test_create_and_delete_source( - cloud_workspace_id: str, + workspace_id: str, airbyte_cloud_api_root: str, - airbyte_cloud_api_key: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, ) -> None: - new_resource_name = "deleteme-source-faker" + str(ulid.ULID()).lower()[-6:] + new_resource_name = "deleteme-source-faker" + text_util.generate_random_suffix() source_config = SourceFaker() source = api_util.create_source( name=new_resource_name, - api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, config=source_config, + api_root=airbyte_cloud_api_root, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) assert source.name == new_resource_name assert source.source_type == "faker" @@ -33,18 +107,22 @@ def test_create_and_delete_source( api_util.delete_source( source_id=source.source_id, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) def test_create_and_delete_destination( - cloud_workspace_id: str, + workspace_id: str, airbyte_cloud_api_root: str, - airbyte_cloud_api_key: str, motherduck_api_key: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, ) -> None: - new_resource_name = "deleteme-destination-faker" + str(ulid.ULID()).lower()[-6:] + new_resource_name = ( + "deleteme-destination-faker" + text_util.generate_random_suffix() + ) destination_config = DestinationDuckdb( destination_path="temp_db", motherduck_api_key=motherduck_api_key, @@ -53,9 +131,10 @@ def test_create_and_delete_destination( destination = api_util.create_destination( name=new_resource_name, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, config=destination_config, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) assert destination.name == new_resource_name assert destination.destination_type == "duckdb" @@ -64,26 +143,33 @@ def test_create_and_delete_destination( api_util.delete_destination( destination_id=destination.destination_id, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) def test_create_and_delete_connection( - cloud_workspace_id: str, + workspace_id: str, airbyte_cloud_api_root: str, - airbyte_cloud_api_key: str, + airbyte_cloud_client_id: SecretString, + airbyte_cloud_client_secret: SecretString, motherduck_api_key: str, ) -> None: - new_source_name = "deleteme-source-faker" + str(ulid.ULID()).lower()[-6:] - new_destination_name = "deleteme-destination-dummy" + str(ulid.ULID()).lower()[-6:] - new_connection_name = "deleteme-connection-dummy" + str(ulid.ULID()).lower()[-6:] + new_source_name = "deleteme-source-faker" + text_util.generate_random_suffix() + new_destination_name = ( + "deleteme-destination-dummy" + text_util.generate_random_suffix() + ) + new_connection_name = ( + "deleteme-connection-dummy" + text_util.generate_random_suffix() + ) source = api_util.create_source( name=new_source_name, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, config=SourceFaker(), + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) assert source.name == new_source_name assert source.source_type == "faker" @@ -92,12 +178,13 @@ def test_create_and_delete_connection( destination = api_util.create_destination( name=new_destination_name, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, config=DestinationDuckdb( destination_path="temp_db", motherduck_api_key=motherduck_api_key, ), + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) assert destination.name == new_destination_name assert destination.destination_type == "duckdb" @@ -106,12 +193,13 @@ def test_create_and_delete_connection( connection = api_util.create_connection( name=new_connection_name, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, source_id=source.source_id, destination_id=destination.destination_id, prefix="", selected_stream_names=["users", "purchases", "products"], + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) assert connection.source_id == source.source_id assert connection.destination_id == destination.destination_id @@ -120,18 +208,21 @@ def test_create_and_delete_connection( api_util.delete_connection( connection_id=connection.connection_id, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) api_util.delete_source( source_id=source.source_id, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) api_util.delete_destination( destination_id=destination.destination_id, api_root=airbyte_cloud_api_root, - api_key=airbyte_cloud_api_key, - cloud_workspace_id=cloud_workspace_id, + workspace_id=workspace_id, + client_id=airbyte_cloud_client_id, + client_secret=airbyte_cloud_client_secret, ) diff --git a/tests/integration_tests/cloud/test_cloud_sql_reads.py b/tests/integration_tests/cloud/test_cloud_sql_reads.py index 3bc7f932..34a9d9da 100644 --- a/tests/integration_tests/cloud/test_cloud_sql_reads.py +++ b/tests/integration_tests/cloud/test_cloud_sql_reads.py @@ -3,12 +3,16 @@ from __future__ import annotations -from contextlib import suppress import airbyte as ab import pandas as pd import pytest from airbyte import cloud +from airbyte.caches.base import CacheBase +from airbyte.caches.bigquery import BigQueryCache +from airbyte.caches.snowflake import SnowflakeCache +from airbyte.caches.postgres import PostgresCache +from airbyte.caches.duckdb import DuckDBCache from airbyte.cloud.sync_results import SyncResult from sqlalchemy.engine.base import Engine @@ -22,57 +26,8 @@ def deployable_source() -> ab.Source: @pytest.fixture -def previous_job_run_id() -> str: - return "10136196" - - -@pytest.mark.super_slow -def test_deploy_and_run_and_read( - cloud_workspace: cloud.CloudWorkspace, - new_deployable_cache: ab.BigQueryCache | ab.SnowflakeCache, - deployable_source: ab.Source, -) -> None: - """Test reading from a cache.""" - - # Deploy source, destination, and connection: - source_id = cloud_workspace._deploy_source(source=deployable_source) - destination_id = cloud_workspace._deploy_cache_as_destination( - cache=new_deployable_cache - ) - connection: cloud.CloudConnection = cloud_workspace._deploy_connection( - source=deployable_source, - cache=new_deployable_cache, - table_prefix=new_deployable_cache.table_prefix, - selected_streams=deployable_source.get_selected_streams(), - ) - - # Run sync and get result: - sync_result: SyncResult = connection.run_sync() - - # TODO: Remove this second run after Destination bug is resolved: - # https://github.com/airbytehq/airbyte/issues/36875 - sync_result: SyncResult = connection.run_sync() - - # Check sync result: - assert sync_result.is_job_complete() - assert set(sync_result.stream_names) == set(["users", "products", "purchases"]) - - dataset: ab.CachedDataset = sync_result.get_dataset(stream_name="users") - assert dataset.stream_name == "users" - data_as_list = list(dataset) - assert len(data_as_list) == 100 - - # Cleanup - with suppress(Exception): - cloud_workspace._permanently_delete_connection( - connection_id=connection, - delete_source=True, - delete_destination=True, - ) - with suppress(Exception): - cloud_workspace._permanently_delete_source(source_id=source_id) - with suppress(Exception): - cloud_workspace._permanently_delete_destination(destination_id=destination_id) +def previous_job_run_id() -> int: + return 10136196 @pytest.mark.parametrize( @@ -93,14 +48,17 @@ def test_deploy_and_run_and_read( def test_read_from_deployed_connection( cloud_workspace: cloud.CloudWorkspace, deployed_connection_id: str, + with_snowflake_password_env_var, + with_bigquery_credentials_env_vars, ) -> None: """Test reading from a cache.""" # Run sync and get result: - sync_result: SyncResult = cloud_workspace.get_sync_result( + sync_result = cloud_workspace.get_connection( connection_id=deployed_connection_id - ) + ).get_sync_result() # Test sync result: + assert sync_result assert sync_result.is_job_complete() cache = sync_result.get_sql_cache() @@ -125,6 +83,59 @@ def test_read_from_deployed_connection( assert pandas_df[col].notnull().all() +@pytest.mark.parametrize( + "deployed_connection_id, cache_type", + [ + pytest.param( + "c7b4d838-a612-495a-9d91-a14e477add51", + SnowflakeCache, + id="Faker->Snowflake", + ), + pytest.param( + "0e1d6b32-b8e3-4b68-91a3-3a314599c782", + BigQueryCache, + id="Faker->BigQuery", + ), + pytest.param( + "", + PostgresCache, + id="Faker->Postgres", + marks=pytest.mark.skip(reason="Not yet supported"), + ), + pytest.param( + "", + DuckDBCache, + id="Faker->MotherDuck", + marks=pytest.mark.skip(reason="Not yet supported"), + ), + ], +) +def test_translate_cloud_job_to_sql_cache( + cloud_workspace: cloud.CloudWorkspace, + deployed_connection_id: str, + cache_type: type[CacheBase], + previous_job_run_id: int, + with_bigquery_credentials_env_vars, + with_snowflake_password_env_var, +) -> None: + """Test reading from a cache.""" + # Run sync and get result: + sync_result: SyncResult | None = cloud_workspace.get_connection( + connection_id=deployed_connection_id + ).get_sync_result( + job_id=previous_job_run_id, + ) + assert sync_result, f"Failed to get sync result for job {previous_job_run_id}" + + # Test sync result: + assert sync_result.is_job_complete() + + cache = sync_result.get_sql_cache() + assert isinstance(cache, cache_type), f"Expected {cache_type}, got {type(cache)}" + sqlalchemy_url = cache.get_sql_alchemy_url() + engine: Engine = sync_result.get_sql_engine() + + @pytest.mark.parametrize( "deployed_connection_id", [ @@ -143,14 +154,18 @@ def test_read_from_deployed_connection( def test_read_from_previous_job( cloud_workspace: cloud.CloudWorkspace, deployed_connection_id: str, - previous_job_run_id: str, + previous_job_run_id: int, + with_bigquery_credentials_env_vars, + with_snowflake_password_env_var, ) -> None: """Test reading from a cache.""" # Run sync and get result: - sync_result: SyncResult = cloud_workspace.get_sync_result( - connection_id=deployed_connection_id, + sync_result: SyncResult | None = cloud_workspace.get_connection( + connection_id=deployed_connection_id + ).get_sync_result( job_id=previous_job_run_id, ) + assert sync_result, f"Failed to get sync result for job {previous_job_run_id}" # Test sync result: assert sync_result.is_job_complete() diff --git a/tests/integration_tests/cloud/test_cloud_sync.py b/tests/integration_tests/cloud/test_cloud_sync.py index 4376c412..29ba4437 100644 --- a/tests/integration_tests/cloud/test_cloud_sync.py +++ b/tests/integration_tests/cloud/test_cloud_sync.py @@ -5,12 +5,14 @@ """ from __future__ import annotations +from dataclasses import asdict import airbyte as ab import pytest -from airbyte.caches import MotherDuckCache +from airbyte._util import text_util from airbyte.cloud import CloudWorkspace from airbyte.cloud.sync_results import SyncResult +from airbyte.destinations.base import Destination @pytest.fixture @@ -19,57 +21,78 @@ def pre_created_connection_id() -> str: @pytest.mark.super_slow +@pytest.mark.parametrize( + "pre_created_connection_id", + [ + "80857d37-1f21-4500-a802-f5ac08d1a3dd", + ], +) def test_run_connection( cloud_workspace: CloudWorkspace, pre_created_connection_id: str, ) -> None: """Test running a connection.""" - sync_result: SyncResult = cloud_workspace.run_sync( - connection_id=pre_created_connection_id - ) + sync_result: SyncResult = cloud_workspace.get_connection( + pre_created_connection_id + ).run_sync() assert sync_result.is_job_complete() assert sync_result.stream_names -@pytest.mark.super_slow def test_get_previous_sync_result( cloud_workspace: CloudWorkspace, pre_created_connection_id: str, ) -> None: """Test running a connection.""" - sync_result: SyncResult = cloud_workspace.get_previous_sync_logs( + sync_result: SyncResult = cloud_workspace.get_connection( connection_id=pre_created_connection_id, - )[0] + ).get_previous_sync_logs()[0] assert sync_result.is_job_complete() assert sync_result.get_job_status() assert sync_result.stream_names @pytest.mark.super_slow -@pytest.mark.skip(reason="This test is not yet complete. It is hanging currently.") +# @pytest.mark.skip(reason="This test is not yet complete. It is hanging currently.") def test_deploy_and_run_connection( cloud_workspace: CloudWorkspace, - motherduck_api_key: str, + new_deployable_destination, + with_bigquery_credentials_env_vars, + with_snowflake_password_env_var, ) -> None: """Test deploying a source and cache to a workspace as a new connection.""" source = ab.get_source( "source-faker", config={"count": 100}, ) - source.check() - - cache = MotherDuckCache( - api_key=motherduck_api_key, - database="temp", - schema_name="public", + cloud_source = cloud_workspace.deploy_source( + name=f"test-source-{text_util.generate_random_suffix()}", + source=source, ) + if not isinstance(new_deployable_destination, (dict, Destination)): + try: + new_deployable_destination = asdict(new_deployable_destination) + except Exception as ex: + raise ValueError( + "new_deployable_destination must be a dictionary or a dataclass. " + f"Instead, it is a {type(new_deployable_destination)}." + ) from ex - connection_id: str = cloud_workspace._deploy_connection(source=source, cache=cache) - sync_result = cloud_workspace.run_sync(connection_id=connection_id) + cloud_destination = cloud_workspace.deploy_destination( + name=f"test-destination-{text_util.generate_random_suffix()}", + destination=new_deployable_destination, + ) + connection = cloud_workspace.deploy_connection( + connection_name=f"test-connection-{text_util.generate_random_suffix()}", + source=cloud_source, + destination=cloud_destination, + selected_streams=source.get_available_streams(), + ) + sync_result = connection.run_sync() _ = sync_result cache = sync_result.get_sql_cache() - assert cache.stream_names + assert list(cache.streams.keys()) assert cache.streams["users"].to_pandas() - cloud_workspace._permanently_delete_connection(connection_id=connection_id) + cloud_workspace.permanently_delete_connection(connection) diff --git a/tests/integration_tests/cloud/test_cloud_workspaces.py b/tests/integration_tests/cloud/test_cloud_workspaces.py index 0dd4eada..cdc47d37 100644 --- a/tests/integration_tests/cloud/test_cloud_workspaces.py +++ b/tests/integration_tests/cloud/test_cloud_workspaces.py @@ -7,10 +7,22 @@ from __future__ import annotations import airbyte as ab -import pytest -from airbyte.caches import MotherDuckCache from airbyte.cloud import CloudWorkspace from airbyte.cloud.connections import CloudConnection +from airbyte.cloud.connectors import CloudSource + + +def test_deploy_destination( + cloud_workspace: CloudWorkspace, + deployable_dummy_destination: ab.Destination, +) -> None: + """Test deploying a source to a workspace.""" + cloud_destination = cloud_workspace.deploy_destination( + name="test-destination", + destination=deployable_dummy_destination, + random_name_suffix=True, + ) + cloud_workspace.permanently_delete_destination(cloud_destination) def test_deploy_source( @@ -22,52 +34,56 @@ def test_deploy_source( config={"count": 100}, ) source.check() - source_id: str = cloud_workspace._deploy_source(source) - - cloud_workspace._permanently_delete_source(source=source_id) + cloud_source: CloudSource = cloud_workspace.deploy_source( + name="test-source", + source=source, + ) + cloud_workspace.permanently_delete_source(cloud_source) -def test_deploy_cache_as_destination( +def test_deploy_dummy_source( + deployable_dummy_source: ab.Source, cloud_workspace: CloudWorkspace, - motherduck_api_key: str, ) -> None: - """Test deploying a cache to a workspace as a destination.""" - cache = MotherDuckCache( - api_key=motherduck_api_key, - database="new_db", - schema_name="public", + """Test deploying a source to a workspace.""" + deployable_dummy_source.check() + + cloud_source: CloudSource = cloud_workspace.deploy_source( + name="test-source", + source=deployable_dummy_source, ) - destination_id: str = cloud_workspace._deploy_cache_as_destination(cache=cache) - cloud_workspace._permanently_delete_destination(destination=destination_id) + cloud_workspace.permanently_delete_source(cloud_source) -@pytest.mark.skip("This test is flaky/failing and needs to be fixed.") def test_deploy_connection( cloud_workspace: CloudWorkspace, - motherduck_api_key: str, + deployable_dummy_source: ab.Source, + deployable_dummy_destination: ab.Destination, ) -> None: """Test deploying a source and cache to a workspace as a new connection.""" - source = ab.get_source( - "source-faker", - config={"count": 100}, + stream_names = deployable_dummy_source.get_selected_streams() + cloud_source = cloud_workspace.deploy_source( + name="test-source", + source=deployable_dummy_source, + random_name_suffix=True, ) - source.check() - - cache = MotherDuckCache( - api_key=motherduck_api_key, - database="new_db", - schema_name="public", - table_prefix="abc_deleteme_", + cloud_destination = cloud_workspace.deploy_destination( + name="test-destination", + destination=deployable_dummy_destination, + random_name_suffix=True, ) - connection: CloudConnection = cloud_workspace._deploy_connection( - source=source, - cache=cache, + connection: CloudConnection = cloud_workspace.deploy_connection( + connection_name="test-connection", + source=cloud_source, + destination=cloud_destination, + selected_streams=stream_names, + table_prefix="zzz_deleteme_", ) - assert set(connection.stream_names) == set(["users", "products", "purchases"]) - assert connection.table_prefix == "abc_deleteme_" - cloud_workspace._permanently_delete_connection( + assert set(connection.stream_names) == set(stream_names) + assert connection.table_prefix == "zzz_deleteme_" + cloud_workspace.permanently_delete_connection( connection=connection, - delete_source=True, - delete_destination=True, + cascade_delete_source=True, + cascade_delete_destination=True, ) diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index 6e0beac3..b1ef25d8 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -3,18 +3,18 @@ from __future__ import annotations -import os from contextlib import suppress +from typing import Any, Generator import airbyte as ab import pytest -import ulid -from airbyte._util import meta +from airbyte._util import meta, text_util from airbyte._util.temp_files import as_temp_files from airbyte.caches.base import CacheBase from airbyte.caches.bigquery import BigQueryCache from airbyte.caches.motherduck import MotherDuckCache from airbyte.caches.snowflake import SnowflakeCache +from airbyte.destinations.base import Destination from airbyte.secrets import GoogleGSMSecretManager, SecretHandle from sqlalchemy import create_engine, text @@ -62,55 +62,108 @@ def new_motherduck_cache( motherduck_secrets, ) -> MotherDuckCache: return MotherDuckCache( - database="integration_tests_deleteany", - schema_name=f"test_deleteme_{str(ulid.ULID()).lower()[-6:]}", + database="my_db", # TODO: Use a dedicated DB for testing + schema_name=f"test_deleteme_{text_util.generate_random_suffix()}", api_key=motherduck_secrets["motherduck_api_key"], ) +@pytest.fixture +def new_motherduck_destination( + motherduck_secrets, +) -> Destination: + return ab.get_destination( + "destination-motherduck", + config=motherduck_secrets, + install_if_missing=False, + ) + + @pytest.fixture(scope="session") -def snowflake_creds(ci_secret_manager: GoogleGSMSecretManager) -> dict: - return ci_secret_manager.get_secret( +def new_snowflake_destination_config(ci_secret_manager: GoogleGSMSecretManager) -> dict: + config = ci_secret_manager.get_secret( "AIRBYTE_LIB_SNOWFLAKE_CREDS", ).parse_json() + config["schema"] = f"test_deleteme_{text_util.generate_random_suffix()}" + return config @pytest.fixture -def new_snowflake_cache(snowflake_creds: dict): - config = SnowflakeCache( - account=snowflake_creds["account"], - username=snowflake_creds["username"], - password=snowflake_creds["password"], - database=snowflake_creds["database"], - warehouse=snowflake_creds["warehouse"], - role=snowflake_creds["role"], - schema_name=f"test{str(ulid.ULID()).lower()[-6:]}", +def new_snowflake_cache( + new_snowflake_destination_config: dict[str, Any], +) -> Generator[SnowflakeCache, Any, None]: + cache = SnowflakeCache( + account=new_snowflake_destination_config["account"], + username=new_snowflake_destination_config["username"], + password=new_snowflake_destination_config["password"], + database=new_snowflake_destination_config["database"], + warehouse=new_snowflake_destination_config["warehouse"], + role=new_snowflake_destination_config["role"], + schema_name=new_snowflake_destination_config["schema"], ) - sqlalchemy_url = config.get_sql_alchemy_url() + sqlalchemy_url = cache.get_sql_alchemy_url() - yield config + yield cache engine = create_engine( - config.get_sql_alchemy_url(), + sqlalchemy_url, future=True, ) with engine.connect() as connection: - connection.execute(text(f"DROP SCHEMA IF EXISTS {config.schema_name}")) + connection.execute( + text(f"DROP SCHEMA IF EXISTS {cache.schema_name}"), + ) @pytest.fixture -def new_bigquery_cache(ci_secret_manager: GoogleGSMSecretManager): +def new_snowflake_destination( + new_snowflake_destination_config: dict[str, Any], +) -> Destination: + dest_config = new_snowflake_destination_config.copy() + _ = dest_config.pop("destinationType", None) + return ab.get_destination( + "destination-snowflake", + config=dest_config, + install_if_missing=False, + ) + + +@pytest.fixture +def new_bigquery_destination_config( + ci_secret_manager: GoogleGSMSecretManager, +) -> dict[str, Any]: dest_bigquery_config = ci_secret_manager.get_secret( "SECRET_DESTINATION-BIGQUERY_CREDENTIALS__CREDS" ).parse_json() + dest_bigquery_config["dataset_id"] = ( + f"test_deleteme_{text_util.generate_random_suffix()}" + ) + return dest_bigquery_config - dataset_name = f"test_deleteme_{str(ulid.ULID()).lower()[-6:]}" - credentials_json = dest_bigquery_config["credentials_json"] + +@pytest.fixture +def new_bigquery_destination( + new_bigquery_destination_config: dict[str, Any], +) -> Destination: + dest_config = new_bigquery_destination_config.copy() + _ = dest_config.pop("destinationType", None) + return ab.get_destination( + "destination-bigquery", + config=dest_config, + install_if_missing=False, + ) + + +@pytest.fixture +def new_bigquery_cache( + new_bigquery_destination_config: dict[str, Any], +) -> Generator[BigQueryCache, Any, None]: + credentials_json = new_bigquery_destination_config["credentials_json"] with as_temp_files([credentials_json]) as (credentials_path,): cache = BigQueryCache( credentials_path=credentials_path, - project_name=dest_bigquery_config["project_id"], - dataset_name=dataset_name, + project_name=new_bigquery_destination_config["project_id"], + dataset_name=new_bigquery_destination_config["dataset_id"], ) yield cache @@ -124,30 +177,6 @@ def new_bigquery_cache(ci_secret_manager: GoogleGSMSecretManager): connection.execute(text(f"DROP SCHEMA IF EXISTS {cache.schema_name}")) -@pytest.fixture(autouse=True, scope="session") -def bigquery_credentials_file(ci_secret_manager: GoogleGSMSecretManager): - dest_bigquery_config = ci_secret_manager.get_secret( - secret_name="SECRET_DESTINATION-BIGQUERY_CREDENTIALS__CREDS" - ).parse_json() - - credentials_json = dest_bigquery_config["credentials_json"] - with as_temp_files(files_contents=[credentials_json]) as (credentials_path,): - os.environ["BIGQUERY_CREDENTIALS_PATH"] = credentials_path - - yield - - return - - -@pytest.fixture(autouse=True, scope="session") -def with_snowflake_password_env_var(snowflake_creds: dict): - os.environ["SNOWFLAKE_PASSWORD"] = snowflake_creds["password"] - - yield - - return - - @pytest.fixture(scope="function") def new_generic_cache(request) -> CacheBase: """This is a placeholder fixture that will be overridden by pytest_generate_tests().""" diff --git a/tests/integration_tests/test_all_cache_types.py b/tests/integration_tests/test_all_cache_types.py index f7966d1e..bf902016 100644 --- a/tests/integration_tests/test_all_cache_types.py +++ b/tests/integration_tests/test_all_cache_types.py @@ -20,6 +20,8 @@ from sqlalchemy import text from viztracer import VizTracer +from airbyte.results import ReadResult + # Product count is always the same, regardless of faker scale. NUM_PRODUCTS = 100 @@ -171,12 +173,14 @@ def test_faker_read( @pytest.mark.slow def test_append_strategy( source_faker_seed_a: ab.Source, - new_generic_cache: ab.caches.CacheBase, + new_duckdb_cache: ab.caches.CacheBase, ) -> None: """Test that the append strategy works as expected.""" + result: ReadResult for _ in range(2): + assert isinstance(new_duckdb_cache, ab.caches.CacheBase) result = source_faker_seed_a.read( - new_generic_cache, write_strategy="append", force_full_refresh=True + new_duckdb_cache, write_strategy="append", force_full_refresh=True ) assert len(list(result.cache.streams["users"])) == FAKER_SCALE_A * 2 @@ -188,6 +192,7 @@ def test_replace_strategy( new_generic_cache: ab.caches.CacheBase, ) -> None: """Test that the append strategy works as expected.""" + result: ReadResult for _ in range(2): result = source_faker_seed_a.read( new_generic_cache, write_strategy="replace", force_full_refresh=True @@ -212,9 +217,9 @@ def test_merge_strategy( # First run, seed A (counts should match the scale or the product count) result = source_faker_seed_a.read(new_generic_cache, write_strategy="merge") - assert ( - len(list(result.cache.streams["users"])) == FAKER_SCALE_A - ), f"Incorrect number of records in the cache. {new_generic_cache}" + assert len(list(result.cache.streams["users"])) == FAKER_SCALE_A, ( + f"Incorrect number of records in the cache. {new_generic_cache}" + ) # Second run, also seed A (should have same exact data, no change in counts) result = source_faker_seed_a.read(new_generic_cache, write_strategy="merge") diff --git a/tests/integration_tests/test_bigquery_cache.py b/tests/integration_tests/test_bigquery_cache.py index 23a07ade..2039399e 100644 --- a/tests/integration_tests/test_bigquery_cache.py +++ b/tests/integration_tests/test_bigquery_cache.py @@ -14,14 +14,13 @@ def test_bigquery_props( ) -> None: """Test that the BigQueryCache properties are set correctly.""" # assert new_bigquery_cache.credentials_path.endswith(".json") - assert ( - new_bigquery_cache.dataset_name == new_bigquery_cache.schema_name - ), "Dataset name should be the same as schema name." - assert ( - new_bigquery_cache.schema_name != "airbyte_raw" + assert new_bigquery_cache.dataset_name == new_bigquery_cache.schema_name, ( + "Dataset name should be the same as schema name." + ) + assert new_bigquery_cache.schema_name != "airbyte_raw", ( "Schema name should not be the default value." ) - assert ( - new_bigquery_cache.get_database_name() == new_bigquery_cache.project_name - ), "Database name should be the same as project name." + assert new_bigquery_cache.get_database_name() == new_bigquery_cache.project_name, ( + "Database name should be the same as project name." + ) diff --git a/tests/integration_tests/test_source_faker_integration.py b/tests/integration_tests/test_source_faker_integration.py index 2117cf3e..7b4632d0 100644 --- a/tests/integration_tests/test_source_faker_integration.py +++ b/tests/integration_tests/test_source_faker_integration.py @@ -9,7 +9,6 @@ from __future__ import annotations import os -import shutil import sys import tempfile import warnings @@ -110,14 +109,6 @@ def all_cache_types( ] -@pytest.mark.xfail(reason="Source is no longer auto-installed in virtualenv.") -def test_which_source_faker() -> None: - """Test that source-faker is available on PATH.""" - assert shutil.which( - "source-faker" - ), f"Can't find source-faker on PATH: {os.environ['PATH']}" - - def test_faker_pks( source_faker_seed_a: ab.Source, duckdb_cache: DuckDBCache, @@ -141,7 +132,7 @@ def test_faker_pks( @pytest.mark.slow def test_replace_strategy( source_faker_seed_a: ab.Source, - all_cache_types: CacheBase, + all_cache_types: list[CacheBase], ) -> None: """Test that the append strategy works as expected.""" for ( @@ -158,7 +149,7 @@ def test_replace_strategy( @pytest.mark.slow def test_append_strategy( source_faker_seed_a: ab.Source, - all_cache_types: CacheBase, + all_cache_types: list[CacheBase], ) -> None: """Test that the append strategy works as expected.""" for ( @@ -181,7 +172,7 @@ def test_merge_strategy( strategy: str, source_faker_seed_a: ab.Source, source_faker_seed_b: ab.Source, - all_cache_types: CacheBase, + all_cache_types: list[CacheBase], ) -> None: """Test that the merge strategy works as expected. @@ -284,6 +275,9 @@ def test_merge_insert_not_supported_for_duckdb( raise e +@pytest.mark.xfail( + reason="Postgres cache appears ready for merge_insert support. More testing needed to confirm." +) @pytest.mark.requires_creds def test_merge_insert_not_supported_for_postgres( source_faker_seed_a: ab.Source, diff --git a/tests/integration_tests/test_source_test_fixture.py b/tests/integration_tests/test_source_test_fixture.py index 9993d5ea..7676ab2a 100644 --- a/tests/integration_tests/test_source_test_fixture.py +++ b/tests/integration_tests/test_source_test_fixture.py @@ -13,25 +13,25 @@ import airbyte as ab import pandas as pd import pytest -import ulid from airbyte import datasets from airbyte import exceptions as exc from airbyte._executors.docker import DockerExecutor from airbyte._executors.local import PathExecutor from airbyte._executors.python import VenvExecutor +from airbyte._util import text_util from airbyte._util.venv_util import get_bin_dir from airbyte.caches import PostgresCache, SnowflakeCache +from airbyte.caches.base import CacheBase from airbyte.constants import AB_INTERNAL_COLUMNS from airbyte.datasets import CachedDataset, LazyDataset, SQLDataset from airbyte.results import ReadResult -from airbyte.shared.sql_processor import SqlProcessorBase from airbyte.sources import registry from airbyte.version import get_version from sqlalchemy import column, text @pytest.fixture(scope="function", autouse=True) -def autouse_source_test_registry(source_test_registry): +def autouse_source_test_registry(source_test_registry) -> None: return @@ -45,12 +45,12 @@ def pop_internal_columns_from_dataset( if not isinstance(record, dict): record = dict(record) - assert ( - internal_column in record - ), f"Column '{internal_column}' should exist in stream data." - assert ( - record[internal_column] is not None - ), f"Column '{internal_column}' should not contain null values." + assert internal_column in record, ( + f"Column '{internal_column}' should exist in stream data." + ) + assert record[internal_column] is not None, ( + f"Column '{internal_column}' should not contain null values." + ) record.pop(internal_column, None) @@ -61,21 +61,21 @@ def pop_internal_columns_from_dataset( def pop_internal_columns_from_dataframe(df: pd.DataFrame) -> pd.DataFrame: for internal_column in AB_INTERNAL_COLUMNS: - assert ( - internal_column in df.columns - ), f"Column '{internal_column}' should exist in stream data." + assert internal_column in df.columns, ( + f"Column '{internal_column}' should exist in stream data." + ) - assert ( - df[internal_column].notnull().all() - ), f"Column '{internal_column}' should not contain null values " + assert df[internal_column].notnull().all(), ( + f"Column '{internal_column}' should not contain null values " + ) return df.drop(columns=AB_INTERNAL_COLUMNS) def assert_data_matches_cache( expected_test_stream_data: dict[str, list[dict[str, str | int]]], - cache: SqlProcessorBase, - streams: list[str] = None, + cache: CacheBase, + streams: list[str] | None = None, ) -> None: for stream_name in streams or expected_test_stream_data.keys(): if len(cache[stream_name]) > 0: @@ -97,7 +97,7 @@ def assert_data_matches_cache( @pytest.fixture(scope="module", autouse=True) -def autouse_source_test_installation(source_test_installation): +def autouse_source_test_installation(source_test_installation) -> None: return @@ -107,7 +107,7 @@ def source_test(source_test_env) -> ab.Source: @pytest.fixture -def expected_test_stream_data() -> dict[str, list[dict[str, str | int]]]: +def expected_test_stream_data() -> dict[str, list[dict[str, str | int | None]]]: return { "stream1": [ { @@ -142,8 +142,9 @@ def expected_test_stream_data() -> dict[str, list[dict[str, str | int]]]: } -def test_registry_get(): +def test_registry_get() -> None: metadata = registry.get_connector_metadata("source-test") + assert metadata assert metadata.name == "source-test" assert metadata.latest_available_version == "0.0.1" @@ -173,7 +174,9 @@ def test_registry_list() -> None: } -def test_list_streams(expected_test_stream_data: dict[str, list[dict[str, str | int]]]): +def test_list_streams( + expected_test_stream_data: dict[str, list[dict[str, str | int]]], +) -> None: source = ab.get_source( "source-test", config={"apiKey": "test"}, install_if_missing=False ) @@ -188,15 +191,15 @@ def test_invalid_config() -> None: source.check() -def test_ensure_installation_detection(): +def test_ensure_installation_detection() -> None: """Assert that install isn't called, since the connector is already installed by the fixture.""" - with patch( - "airbyte._executors.python.VenvExecutor.install" - ) as mock_venv_install, patch( - "airbyte.sources.base.Source.install" - ) as mock_source_install, patch( - "airbyte._executors.python.VenvExecutor.ensure_installation" - ) as mock_ensure_installed: + with ( + patch("airbyte._executors.python.VenvExecutor.install") as mock_venv_install, + patch("airbyte.sources.base.Source.install") as mock_source_install, + patch( + "airbyte._executors.python.VenvExecutor.ensure_installation" + ) as mock_ensure_installed, + ): source = ab.get_source( "source-test", config={"apiKey": 1234}, @@ -208,7 +211,7 @@ def test_ensure_installation_detection(): assert not mock_source_install.called -def test_source_yaml_spec(): +def test_source_yaml_spec() -> None: source = ab.get_source( "source-test", config={"apiKey": 1234}, install_if_missing=False ) @@ -216,12 +219,12 @@ def test_source_yaml_spec(): assert source._yaml_spec.startswith("connectionSpecification:\n $schema:") -def test_non_existing_connector(): +def test_non_existing_connector() -> None: with pytest.raises(exc.AirbyteConnectorNotRegisteredError): ab.get_source("source-not-existing", config={"apiKey": "abc"}) -def test_non_existing_connector_with_local_exe(): +def test_non_existing_connector_with_local_exe() -> None: # We should not complain about the missing source if we provide a local executable source = ab.get_source( "source-not-existing", @@ -231,7 +234,7 @@ def test_non_existing_connector_with_local_exe(): assert isinstance(source.executor, PathExecutor), "Expected PathExecutor." -def test_docker_only_connector(): +def test_docker_only_connector() -> None: source = ab.get_source("source-docker-only", config={"apiKey": "abc"}) assert isinstance(source.executor, DockerExecutor), "Expected DockerExecutor." @@ -251,7 +254,7 @@ def test_version_enforcement( raises: bool, latest_available_version, requested_version, -): +) -> None: """ " Ensures version enforcement works as expected: * If no version is specified, the current version is accepted @@ -296,7 +299,7 @@ def test_version_enforcement( source.executor.ensure_installation(auto_fix=False) -def test_check(): +def test_check() -> None: source = ab.get_source( "source-test", config={"apiKey": "test"}, @@ -305,7 +308,7 @@ def test_check(): source.check() -def test_check_fail(): +def test_check_fail() -> None: source = ab.get_source("source-test", config={"apiKey": "wrong"}) with pytest.raises(Exception): @@ -328,14 +331,14 @@ def test_file_write_and_cleanup() -> None: _ = source.read(cache_wo_cleanup) # We expect all files to be cleaned up: - assert ( - len(list(Path(temp_dir_1).glob("*.jsonl.gz"))) == 0 - ), "Expected files to be cleaned up" + assert len(list(Path(temp_dir_1).glob("*.jsonl.gz"))) == 0, ( + "Expected files to be cleaned up" + ) # There are three streams, but only two of them have data: - assert ( - len(list(Path(temp_dir_2).glob("*.jsonl.gz"))) == 3 - ), "Expected files to exist" + assert len(list(Path(temp_dir_2).glob("*.jsonl.gz"))) == 3, ( + "Expected files to exist" + ) with suppress(Exception): shutil.rmtree(str(temp_dir_root)) @@ -343,7 +346,7 @@ def test_file_write_and_cleanup() -> None: def test_sync_to_duckdb( expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() @@ -357,7 +360,7 @@ def test_sync_to_duckdb( assert_data_matches_cache(expected_test_stream_data, cache) -def test_read_result_mapping(): +def test_read_result_mapping() -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() result: ReadResult = source.read(ab.new_local_cache()) @@ -375,7 +378,9 @@ def test_read_result_mapping(): } -def test_dataset_list_and_len(expected_test_stream_data): +def test_dataset_list_and_len( + expected_test_stream_data, +) -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() @@ -407,11 +412,11 @@ def test_dataset_list_and_len(expected_test_stream_data): def test_read_from_cache( expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: """ Test that we can read from a cache that already has data (identifier by name) """ - cache_name = str(ulid.ULID()) + cache_name = text_util.generate_random_suffix() source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() @@ -427,11 +432,11 @@ def test_read_from_cache( def test_read_isolated_by_prefix( expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: """ Test that cache correctly isolates streams when different table prefixes are used """ - cache_name = str(ulid.ULID()) + cache_name = text_util.generate_random_suffix() db_path = Path(f"./.cache/{cache_name}.duckdb") source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() @@ -462,18 +467,25 @@ def test_read_isolated_by_prefix( second_no_prefix_cache = ab.DuckDBCache(db_path=db_path, table_prefix=None) # validate that the first cache still has full data, while the other two have partial data - assert_data_matches_cache(expected_test_stream_data, second_same_prefix_cache) assert_data_matches_cache( - expected_test_stream_data, second_different_prefix_cache, streams=["stream1"] + expected_test_stream_data, + second_same_prefix_cache, ) assert_data_matches_cache( - expected_test_stream_data, second_no_prefix_cache, streams=["stream1"] + expected_test_stream_data, + second_different_prefix_cache, + streams=["stream1"], + ) + assert_data_matches_cache( + expected_test_stream_data, + second_no_prefix_cache, + streams=["stream1"], ) def test_merge_streams_in_cache( expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: """ Test that we can extend a cache with new streams """ @@ -481,7 +493,7 @@ def test_merge_streams_in_cache( "primary-key-with-dot" ) # Stream not needed for this test. - cache_name = str(ulid.ULID()) + cache_name = text_util.generate_random_suffix() source = ab.get_source("source-test", config={"apiKey": "test"}) cache = ab.new_local_cache(cache_name) @@ -512,7 +524,7 @@ def test_merge_streams_in_cache( def test_read_result_as_list( expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() @@ -538,7 +550,7 @@ def test_read_result_as_list( def test_get_records_result_as_list( expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) stream_1_list = list(source.get_records("stream1")) @@ -560,7 +572,7 @@ def test_get_records_result_as_list( def test_sync_with_merge_to_duckdb( expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: """Test that the merge strategy works as expected. In this test, we sync the same data twice. If the data is not duplicated, we assume @@ -661,7 +673,7 @@ def test_cached_dataset( ) -def test_cached_dataset_filter(): +def test_cached_dataset_filter() -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() @@ -691,21 +703,21 @@ def test_cached_dataset_filter(): filtered_records: list[Mapping[str, Any]] = [row for row in filtered_dataset] # Check that the filter worked - assert ( - len(filtered_records) == 1 - ), f"Case '{case}' had incorrect number of records." + assert len(filtered_records) == 1, ( + f"Case '{case}' had incorrect number of records." + ) # Assert the stream name still matches - assert ( - filtered_dataset.stream_name == stream_name - ), f"Case '{case}' had incorrect stream name." + assert filtered_dataset.stream_name == stream_name, ( + f"Case '{case}' had incorrect stream name." + ) # Check that chaining filters works chained_dataset = filtered_dataset.with_filter("column1 == 'value1'") chained_records = [row for row in chained_dataset] - assert ( - len(chained_records) == 1 - ), f"Case '{case}' had incorrect number of records after chaining filters." + assert len(chained_records) == 1, ( + f"Case '{case}' had incorrect number of records after chaining filters." + ) def test_lazy_dataset_from_source( @@ -771,7 +783,7 @@ def test_check_fail_on_missing_config(method_call): def test_sync_with_merge_to_postgres( new_postgres_cache: PostgresCache, expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: """Test that the merge strategy works as expected. In this test, we sync the same data twice. If the data is not duplicated, we assume @@ -830,7 +842,7 @@ def test_sync_to_postgres( def test_sync_to_snowflake( new_snowflake_cache: SnowflakeCache, expected_test_stream_data: dict[str, list[dict[str, str | int]]], -): +) -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) source.select_all_streams() @@ -867,19 +879,24 @@ def test_sync_limited_streams(expected_test_stream_data): ) -def test_read_stream_nonexisting(): +def test_read_stream_nonexisting() -> None: source = ab.get_source("source-test", config={"apiKey": "test"}) with pytest.raises(Exception): list(source.get_records("non-existing")) -def test_failing_path_connector(): +def test_failing_path_connector() -> None: with pytest.raises(Exception): - ab.get_source("source-test", config={"apiKey": "test"}, use_local_install=True) + source = ab.get_source( + "source-test", + config={"apiKey": "test"}, + local_executable=Path("non-existing"), + ) + source.check() -def test_succeeding_path_connector(monkeypatch): +def test_succeeding_path_connector(monkeypatch) -> None: venv_bin_path = str(get_bin_dir(Path(".venv-source-test"))) source = ab.get_source( @@ -890,7 +907,7 @@ def test_succeeding_path_connector(monkeypatch): source.check() -def test_install_uninstall(): +def test_install_uninstall() -> None: with tempfile.TemporaryDirectory() as temp_dir: source = ab.get_source( "source-test", diff --git a/tests/integration_tests/test_state_handling.py b/tests/integration_tests/test_state_handling.py index adbc288c..d9b7222c 100644 --- a/tests/integration_tests/test_state_handling.py +++ b/tests/integration_tests/test_state_handling.py @@ -8,9 +8,10 @@ import airbyte as ab import pytest -import ulid +from airbyte._util import text_util from airbyte.caches.duckdb import DuckDBCache from airbyte.caches.util import new_local_cache +from airbyte.shared.state_providers import StateProviderBase from airbyte.shared.state_writers import StateWriterBase from airbyte_protocol import models @@ -67,7 +68,7 @@ def test_incremental_state_cache_persistence( source_faker_seed_a.set_config(config_a) source_faker_seed_b.set_config(config_b) - cache_name = str(ulid.ULID()) + cache_name = text_util.generate_random_suffix() cache = new_local_cache(cache_name) result = source_faker_seed_a.read(cache) assert result.processed_records == NUM_PRODUCTS + FAKER_SCALE_A * 2 @@ -77,7 +78,7 @@ def test_incremental_state_cache_persistence( assert result2.processed_records == 0 state_provider = second_cache.get_state_provider("source-faker") - assert len(state_provider.state_message_artifacts) > 0 + assert len(list(state_provider.state_message_artifacts)) > 0 assert len(list(result2.cache.streams["products"])) == NUM_PRODUCTS assert len(list(result2.cache.streams["purchases"])) == FAKER_SCALE_A @@ -97,7 +98,7 @@ def test_incremental_state_prefix_isolation( config_a = source_faker_seed_a.get_config() config_a["always_updated"] = False # disable ensuring new `updated_at` timestamps source_faker_seed_a.set_config(config_a) - cache_name = str(ulid.ULID()) + cache_name = text_util.generate_random_suffix() db_path = Path(f"./.cache/{cache_name}.duckdb") cache = DuckDBCache(db_path=db_path, table_prefix="prefix_") different_prefix_cache = DuckDBCache( @@ -128,7 +129,8 @@ def test_destination_state_writer() -> None: stream=models.AirbyteStreamState( stream_descriptor=models.StreamDescriptor(name=f"stream{i}"), ), - ) + data=None, + ) # type: ignore # missing 'global' (class def error) ) assert state_writer.known_stream_names == { @@ -136,10 +138,11 @@ def test_destination_state_writer() -> None: "stream2", "stream3", } - state_writer_2: StateWriterBase = cache.get_state_provider( - source_name="source-foo", destination_name="destination-bar" + state_provider: StateProviderBase = cache.get_state_provider( + source_name="source-foo", + destination_name="destination-bar", ) - assert state_writer_2.known_stream_names == { + assert state_provider.known_stream_names == { "stream1", "stream2", "stream3", diff --git a/tests/unit_tests/test_anonymous_usage_stats.py b/tests/unit_tests/test_anonymous_usage_stats.py index e0bf19ee..9b292558 100644 --- a/tests/unit_tests/test_anonymous_usage_stats.py +++ b/tests/unit_tests/test_anonymous_usage_stats.py @@ -55,16 +55,16 @@ def test_telemetry_track(monkeypatch, source_test_registry): "os", "application_hash", ]: - assert body["properties"].get( - field, None - ), f"{field} is null in posted body: {body}" - - assert ( - body["properties"].get("source", {}).get("name") == "source-test" - ), f"field1 is null in posted body: {body}" - assert ( - body["properties"].get("cache", {}).get("type") == "DuckDBCache" - ), f"field1 is null in posted body: {body}" + assert body["properties"].get(field, None), ( + f"{field} is null in posted body: {body}" + ) + + assert body["properties"].get("source", {}).get("name") == "source-test", ( + f"field1 is null in posted body: {body}" + ) + assert body["properties"].get("cache", {}).get("type") == "DuckDBCache", ( + f"field1 is null in posted body: {body}" + ) # Check for empty values: for field in body.keys(): diff --git a/tests/unit_tests/test_exceptions.py b/tests/unit_tests/test_exceptions.py index e43dcbcf..57545b4c 100644 --- a/tests/unit_tests/test_exceptions.py +++ b/tests/unit_tests/test_exceptions.py @@ -21,9 +21,9 @@ def test_exceptions(): assert message.count("\n") == 0 assert message != "" assert message.strip() == message - assert any([ - name.startswith(prefix) for prefix in ["Airbyte", "PyAirbyte"] - ]), f"{name} does not start with Airbyte or PyAirbyte" + assert any([name.startswith(prefix) for prefix in ["Airbyte", "PyAirbyte"]]), ( + f"{name} does not start with Airbyte or PyAirbyte" + ) assert name.endswith("Error") diff --git a/tests/unit_tests/test_progress.py b/tests/unit_tests/test_progress.py index 65adc7a6..5eca0add 100644 --- a/tests/unit_tests/test_progress.py +++ b/tests/unit_tests/test_progress.py @@ -129,9 +129,9 @@ def _assert_lines(expected_lines, actual_lines: list[str] | str): if isinstance(actual_lines, list): actual_lines = "\n".join(actual_lines) for line in expected_lines: - assert ( - line in actual_lines - ), f"Missing line:\n{line}\n\nIn lines:\n\n{actual_lines}" + assert line in actual_lines, ( + f"Missing line:\n{line}\n\nIn lines:\n\n{actual_lines}" + ) def test_default_progress_style(monkeypatch):