diff --git a/airbyte/_connector_base.py b/airbyte/_connector_base.py index 7a7dfe49..ad687eef 100644 --- a/airbyte/_connector_base.py +++ b/airbyte/_connector_base.py @@ -188,9 +188,9 @@ def print_config_spec( """Print the configuration spec for this connector. Args: - - format: The format to print the spec in. Must be "yaml" or "json". - - output_file: Optional. If set, the spec will be written to the given file path. Otherwise, - it will be printed to the console. + format: The format to print the spec in. Must be "yaml" or "json". + output_file: Optional. If set, the spec will be written to the given file path. + Otherwise, it will be printed to the console. """ if format not in {"yaml", "json"}: raise exc.PyAirbyteInputError( diff --git a/airbyte/_future_cdk/sql_processor.py b/airbyte/_future_cdk/sql_processor.py index c7dcd2e6..d945317e 100644 --- a/airbyte/_future_cdk/sql_processor.py +++ b/airbyte/_future_cdk/sql_processor.py @@ -431,10 +431,8 @@ def _ensure_compatible_table_schema( Raises an exception if the table schema is not compatible with the schema of the input stream. - - TODO: - - Expand this to check for column types and sizes. """ + # TODO: Expand this to check for column types and sizes. self._add_missing_columns_to_table( stream_name=stream_name, table_name=table_name, diff --git a/airbyte/_message_iterators.py b/airbyte/_message_iterators.py index 713a27ef..82ee71e9 100644 --- a/airbyte/_message_iterators.py +++ b/airbyte/_message_iterators.py @@ -59,7 +59,6 @@ def read(self) -> str: @classmethod def from_read_result(cls, read_result: ReadResult) -> AirbyteMessageIterator: """Create a iterator from a `ReadResult` object.""" - state_provider = read_result.cache.get_state_provider( source_name=read_result.source_name, refresh=True, diff --git a/airbyte/_processors/sql/snowflakecortex.py b/airbyte/_processors/sql/snowflakecortex.py index 5fb0fb6a..ca42948f 100644 --- a/airbyte/_processors/sql/snowflakecortex.py +++ b/airbyte/_processors/sql/snowflakecortex.py @@ -164,7 +164,7 @@ def _add_missing_columns_to_table( stream_name: str, table_name: str, ) -> None: - """Use Snowflake Python connector to add new columns to the table""" + """Use Snowflake Python connector to add new columns to the table.""" columns = self._get_sql_column_definitions(stream_name) existing_columns = self._get_column_list_from_table(table_name) for column_name, column_type in columns.items(): diff --git a/airbyte/caches/_catalog_backend.py b/airbyte/caches/_catalog_backend.py index 8aa61fea..b9e1df67 100644 --- a/airbyte/caches/_catalog_backend.py +++ b/airbyte/caches/_catalog_backend.py @@ -49,10 +49,11 @@ class CachedStream(SqlAlchemyModel): # type: ignore[valid-type,misc] class CatalogBackendBase(abc.ABC): - """ - A class to manage the stream catalog of data synced to a cache: - * What streams exist and to what tables they map - * The JSON schema for each stream + """A class to manage the stream catalog of data synced to a cache. + + This includes: + - What streams exist and to what tables they map + - The JSON schema for each stream """ # Abstract implementations @@ -101,10 +102,11 @@ def get_source_catalog_provider(self, source_name: str) -> CatalogProvider: class SqlCatalogBackend(CatalogBackendBase): - """ - A class to manage the stream catalog of data synced to a cache: + """A class to manage the stream catalog of data synced to a cache. + + This includes: - What streams exist and to what tables they map - - The JSON schema for each stream + - The JSON schema for each stream. """ def __init__( diff --git a/airbyte/caches/_state_backend.py b/airbyte/caches/_state_backend.py index 3d039cf4..ba6659ea 100644 --- a/airbyte/caches/_state_backend.py +++ b/airbyte/caches/_state_backend.py @@ -179,9 +179,9 @@ def _write_state( class SqlStateBackend(StateBackendBase): - """ - A class to manage the stream catalog of data synced to a cache: + """A class to manage the stream catalog of data synced to a cache. + This includes: - What streams exist and to what tables they map - The JSON schema for each stream """ diff --git a/airbyte/caches/bigquery.py b/airbyte/caches/bigquery.py index 7ed5bb5c..33b64724 100644 --- a/airbyte/caches/bigquery.py +++ b/airbyte/caches/bigquery.py @@ -40,7 +40,8 @@ def get_arrow_dataset( max_chunk_size: int = DEFAULT_ARROW_MAX_CHUNK_SIZE, ) -> NoReturn: """Raises NotImplementedError; BigQuery doesn't support `pd.read_sql_table`. - https://github.com/airbytehq/PyAirbyte/issues/165 + + See: https://github.com/airbytehq/PyAirbyte/issues/165 """ raise NotImplementedError( "BigQuery doesn't currently support to_arrow" diff --git a/airbyte/cloud/connections.py b/airbyte/cloud/connections.py index 43156230..b4543a1f 100644 --- a/airbyte/cloud/connections.py +++ b/airbyte/cloud/connections.py @@ -97,10 +97,12 @@ def table_prefix(self) -> str: @property def connection_url(self) -> str | None: + """The URL to the connection.""" return f"{self.workspace.workspace_url}/connections/{self.connection_id}" @property def job_history_url(self) -> str | None: + """The URL to the job history for the connection.""" return f"{self.connection_url}/job-history" # Run Sync diff --git a/airbyte/cloud/experimental.py b/airbyte/cloud/experimental.py index 91bd6223..c5ccf23a 100644 --- a/airbyte/cloud/experimental.py +++ b/airbyte/cloud/experimental.py @@ -41,7 +41,7 @@ ) -class CloudWorkspace(Stable_CloudWorkspace): +class CloudWorkspace(Stable_CloudWorkspace): # noqa: D101 # Docstring inherited from parent. __doc__ = ( f"Experimental implementation of `.CloudWorkspace`.\n\n{Stable_CloudConnection.__doc__}" ) @@ -53,7 +53,7 @@ class CloudWorkspace(Stable_CloudWorkspace): permanently_delete_destination = Stable_CloudWorkspace._permanently_delete_destination -class CloudConnection(Stable_CloudConnection): +class CloudConnection(Stable_CloudConnection): # noqa: D101 # Docstring inherited from parent. __doc__ = ( f"Experimental implementation of `.CloudConnection`.\n\n{Stable_CloudConnection.__doc__}" ) diff --git a/airbyte/cloud/sync_results.py b/airbyte/cloud/sync_results.py index 89beab15..3c250cfa 100644 --- a/airbyte/cloud/sync_results.py +++ b/airbyte/cloud/sync_results.py @@ -346,7 +346,6 @@ def __getitem__(self, key: str) -> CachedDataset: return self.parent.get_dataset(stream_name=key) def __iter__(self) -> Iterator[str]: - """TODO""" return iter(self.parent.stream_names) def __len__(self) -> int: diff --git a/airbyte/cloud/workspaces.py b/airbyte/cloud/workspaces.py index b9bb6a2f..889b5f02 100644 --- a/airbyte/cloud/workspaces.py +++ b/airbyte/cloud/workspaces.py @@ -46,6 +46,7 @@ class CloudWorkspace: @property def workspace_url(self) -> str | None: + """The URL of the workspace.""" return f"{self.api_root}/workspaces/{self.workspace_id}" # Test connection and creds @@ -202,6 +203,10 @@ def _deploy_connection( `cache` or `destination`, but not both. destination (str, optional): The destination ID to use. You can provide `cache` or `destination`, but not both. + table_prefix (str, optional): The table prefix to use for the cache. If not provided, + the cache's table prefix will be used. + selected_streams (list[str], optional): The selected stream names to use for the + connection. If not provided, the source's selected streams will be used. """ # Resolve source ID source_id: str diff --git a/airbyte/documents.py b/airbyte/documents.py index 7d4e8b81..b0408a56 100644 --- a/airbyte/documents.py +++ b/airbyte/documents.py @@ -44,6 +44,7 @@ class Document(BaseModel): last_modified: Optional[datetime.datetime] = Field(default=None) def __str__(self) -> str: + """Return a string representation of the document.""" return self.content @property diff --git a/airbyte/exceptions.py b/airbyte/exceptions.py index bf285e06..58009879 100644 --- a/airbyte/exceptions.py +++ b/airbyte/exceptions.py @@ -80,6 +80,7 @@ def get_message(self) -> str: return self.__doc__.split("\n")[0] if self.__doc__ else "" def __str__(self) -> str: + """Return a string representation of the exception.""" special_properties = ["message", "guidance", "help_url", "log_text", "context"] display_properties = { k: v @@ -109,6 +110,7 @@ def __str__(self) -> str: return exception_str def __repr__(self) -> str: + """Return a string representation of the exception.""" class_name = self.__class__.__name__ properties_str = ", ".join( f"{k}={v!r}" for k, v in self.__dict__.items() if not k.startswith("_") @@ -383,6 +385,7 @@ class AirbyteError(PyAirbyteError): @property def workspace_url(self) -> str | None: + """The URL to the workspace where the error occurred.""" if self.workspace: return self.workspace.workspace_url @@ -404,6 +407,7 @@ class AirbyteConnectionError(AirbyteError): @property def connection_url(self) -> str | None: + """The URL to the connection where the error occurred.""" if self.workspace_url and self.connection_id: return f"{self.workspace_url}/connections/{self.connection_id}" @@ -411,6 +415,7 @@ def connection_url(self) -> str | None: @property def job_history_url(self) -> str | None: + """The URL to the job history where the error occurred.""" if self.connection_url: return f"{self.connection_url}/job-history" @@ -418,6 +423,7 @@ def job_history_url(self) -> str | None: @property def job_url(self) -> str | None: + """The URL to the job where the error occurred.""" if self.job_history_url and self.job_id: return f"{self.job_history_url}#{self.job_id}::0" diff --git a/airbyte/records.py b/airbyte/records.py index 1c0c146e..f4f82576 100644 --- a/airbyte/records.py +++ b/airbyte/records.py @@ -208,6 +208,9 @@ def __init__( Args: from_dict: The dictionary to initialize the StreamRecord with. stream_record_handler: The StreamRecordHandler to use for processing the record. + with_internal_columns: If `True`, the internal columns will be added to the record. + extracted_at: The time the record was extracted. If not provided, the current time will + be used. """ self._stream_handler: StreamRecordHandler = stream_record_handler @@ -252,12 +255,14 @@ def from_record_message( ) def __getitem__(self, key: str) -> Any: # noqa: ANN401 + """Return the item with the given key.""" try: return super().__getitem__(key) except KeyError: return super().__getitem__(self._stream_handler.to_index_case(key)) def __setitem__(self, key: str, value: Any) -> None: # noqa: ANN401 + """Set the item with the given key to the given value.""" index_case_key = self._stream_handler.to_index_case(key) if ( self._stream_handler.prune_extra_fields @@ -268,6 +273,7 @@ def __setitem__(self, key: str, value: Any) -> None: # noqa: ANN401 super().__setitem__(index_case_key, value) def __delitem__(self, key: str) -> None: + """Delete the item with the given key.""" try: super().__delitem__(key) except KeyError: @@ -282,18 +288,22 @@ def __delitem__(self, key: str) -> None: raise KeyError(key) def __contains__(self, key: object) -> bool: + """Return whether the dictionary contains the given key.""" assert isinstance(key, str), "Key must be a string." return super().__contains__(key) or super().__contains__( self._stream_handler.to_index_case(key) ) def __iter__(self) -> Iterator[str]: + """Return an iterator over the keys of the dictionary.""" return iter(super().__iter__()) def __len__(self) -> int: + """Return the number of items in the dictionary.""" return super().__len__() def __eq__(self, other: object) -> bool: + """Return whether the StreamRecord is equal to the given dict or StreamRecord object.""" if isinstance(other, StreamRecord): return dict(self) == dict(other) diff --git a/airbyte/results.py b/airbyte/results.py index 77db9443..7035abab 100644 --- a/airbyte/results.py +++ b/airbyte/results.py @@ -1,4 +1,11 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +"""Module which defines the `ReadResult` and `WriteResult` classes. + +These classes are used to return information about read and write operations, respectively. They +contain information such as the number of records read or written, the cache object, and the +state handlers for a sync. +""" + from __future__ import annotations from collections.abc import Mapping @@ -37,38 +44,50 @@ def __init__( cache: CacheBase, progress_tracker: ProgressTracker, ) -> None: + """Initialize a read result. + + This class should not be created directly. Instead, it should be returned by the `read` + method of the `Source` class. + """ self.source_name = source_name self._progress_tracker = progress_tracker self._cache = cache self._processed_streams = processed_streams def __getitem__(self, stream: str) -> CachedDataset: + """Return the cached dataset for a given stream name.""" if stream not in self._processed_streams: raise KeyError(stream) return CachedDataset(self._cache, stream) def __contains__(self, stream: object) -> bool: + """Return whether a given stream name was included in processing.""" if not isinstance(stream, str): return False return stream in self._processed_streams def __iter__(self) -> Iterator[str]: + """Return an iterator over the stream names that were processed.""" return self._processed_streams.__iter__() def __len__(self) -> int: + """Return the number of streams that were processed.""" return len(self._processed_streams) def get_sql_engine(self) -> Engine: + """Return the SQL engine used by the cache.""" return self._cache.get_sql_engine() @property def processed_records(self) -> int: + """The total number of records read from the source.""" return self._progress_tracker.total_records_read @property def streams(self) -> Mapping[str, CachedDataset]: + """Return a mapping of stream names to cached datasets.""" return { stream_name: CachedDataset(self._cache, stream_name) for stream_name in self._processed_streams @@ -76,6 +95,7 @@ def streams(self) -> Mapping[str, CachedDataset]: @property def cache(self) -> CacheBase: + """Return the cache object.""" return self._cache @@ -96,6 +116,11 @@ def __init__( state_writer: StateWriterBase, progress_tracker: ProgressTracker, ) -> None: + """Initialize a write result. + + This class should not be created directly. Instead, it should be returned by the `write` + method of the `Destination` class. + """ self._destination: Destination = destination self._source_data: Source | ReadResult = source_data self._catalog_provider: CatalogProvider = catalog_provider @@ -104,6 +129,7 @@ def __init__( @property def processed_records(self) -> int: + """The total number of records written to the destination.""" return self._progress_tracker.total_destination_records_delivered def get_state_provider(self) -> StateProviderBase: diff --git a/airbyte/secrets/base.py b/airbyte/secrets/base.py index f790bdb3..a37fc8ea 100644 --- a/airbyte/secrets/base.py +++ b/airbyte/secrets/base.py @@ -19,6 +19,8 @@ class SecretSourceEnum(str, Enum): + """Enumeration of secret sources supported by PyAirbyte.""" + ENV = "env" DOTENV = "dotenv" GOOGLE_COLAB = "google_colab" @@ -31,12 +33,30 @@ class SecretString(str): """A string that represents a secret. This class is used to mark a string as a secret. When a secret is printed, it - will be masked to prevent accidental exposure of sensitive information. + will be masked to prevent accidental exposure of sensitive information when debugging + or when printing containing objects like dictionaries. + + To create a secret string, simply instantiate the class with any string value: + + ```python + secret = SecretString("my_secret_password") + ``` + """ __slots__ = () def __repr__(self) -> str: + """Override the representation of the secret string to return a masked value. + + The secret string is always masked with `****` to prevent accidental exposure, unless + explicitly converted to a string. For instance, printing a config dictionary that contains + a secret will automatically mask the secret value instead of printing it in plain text. + + However, if you explicitly convert the cast the secret as a string, such as when used + in an f-string, the secret will be exposed. This is the desired behavior to allow + secrets to be used in a controlled manner. + """ return "" def is_empty(self) -> bool: @@ -55,7 +75,8 @@ def is_json(self) -> bool: def __bool__(self) -> bool: """Override the boolean value of the secret string. - Always returns `True` without inspecting contents.""" + Always returns `True` without inspecting contents. + """ return True def parse_json(self) -> dict: @@ -94,6 +115,7 @@ def __get_pydantic_core_schema__( # noqa: PLW3201 # Pydantic dunder source_type: Any, # noqa: ANN401 # Must allow `Any` to match Pydantic signature handler: GetCoreSchemaHandler, ) -> CoreSchema: + """Return a modified core schema for the secret string.""" return core_schema.with_info_after_validator_function( function=cls.validate, schema=handler(str), field_name=handler.field_name ) @@ -102,8 +124,7 @@ def __get_pydantic_core_schema__( # noqa: PLW3201 # Pydantic dunder def __get_pydantic_json_schema__( # noqa: PLW3201 # Pydantic dunder method cls, _core_schema: core_schema.CoreSchema, handler: GetJsonSchemaHandler ) -> JsonSchemaValue: - """ - Return a modified JSON schema for the secret string. + """Return a modified JSON schema for the secret string. - `writeOnly=True` is the official way to prevent secrets from being exposed inadvertently. - `Format=password` is a popular and readable convention to indicate the field is sensitive. @@ -157,9 +178,11 @@ def get_secret(self, secret_name: str) -> SecretString | None: ... def __str__(self) -> str: + """Return the name of the secret manager.""" return self.name def __eq__(self, value: object) -> bool: + """Check if the secret manager is equal to another secret manager.""" if isinstance(value, SecretManager): return self.name == value.name @@ -172,6 +195,10 @@ def __eq__(self, value: object) -> bool: return super().__eq__(value) def __hash__(self) -> int: + """Return a hash of the secret manager name. + + This allows the secret manager to be used in sets and dictionaries. + """ return hash(self.name) diff --git a/airbyte/secrets/custom.py b/airbyte/secrets/custom.py index 916401c8..643ae825 100644 --- a/airbyte/secrets/custom.py +++ b/airbyte/secrets/custom.py @@ -22,6 +22,7 @@ class CustomSecretManager(SecretManager, ABC): as_backup = False def __init__(self) -> None: + """Initialize the custom secret manager.""" super().__init__() if self.auto_register: self.register() diff --git a/airbyte/secrets/google_colab.py b/airbyte/secrets/google_colab.py index 5817c5e3..1f4ded0f 100644 --- a/airbyte/secrets/google_colab.py +++ b/airbyte/secrets/google_colab.py @@ -12,6 +12,7 @@ class ColabSecretManager(SecretManager): name = SecretSourceEnum.GOOGLE_COLAB.value def __init__(self) -> None: + """Initialize the Google Colab secret manager.""" try: from google.colab import ( # pyright: ignore[reportMissingImports] # noqa: PLC0415 userdata as colab_userdata, diff --git a/airbyte/secrets/prompt.py b/airbyte/secrets/prompt.py index b51159db..b0a70f9f 100644 --- a/airbyte/secrets/prompt.py +++ b/airbyte/secrets/prompt.py @@ -18,6 +18,10 @@ def get_secret( self, secret_name: str, ) -> SecretString | None: + """Prompt the user to enter a secret. + + As a security measure, the secret is not echoed to the terminal when typed. + """ with contextlib.suppress(Exception): return SecretString(getpass(f"Enter the value for secret '{secret_name}': ")) diff --git a/airbyte/sources/base.py b/airbyte/sources/base.py index 00eddf85..eb17f477 100644 --- a/airbyte/sources/base.py +++ b/airbyte/sources/base.py @@ -1,4 +1,6 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +"""Base class implementation for sources.""" + from __future__ import annotations import json @@ -98,7 +100,7 @@ def set_streams(self, streams: list[str]) -> None: self.select_streams(streams) def _log_warning_preselected_stream(self, streams: str | list[str]) -> None: - """Logs a warning message indicating stream selection which are not selected yet""" + """Logs a warning message indicating stream selection which are not selected yet.""" if streams == "*": print( "Warning: Config is not set yet. All streams will be selected after config is set." @@ -126,7 +128,7 @@ def select_streams(self, streams: str | list[str]) -> None: """Select the stream names that should be read from the connector. Args: - - streams: A list of stream names to select. If set to "*", all streams will be selected. + streams: A list of stream names to select. If set to "*", all streams will be selected. Currently, if this is not set, all streams will be read. """ @@ -268,9 +270,9 @@ def print_config_spec( """Print the configuration spec for this connector. Args: - - format: The format to print the spec in. Must be "yaml" or "json". - - output_file: Optional. If set, the spec will be written to the given file path. Otherwise, - it will be printed to the console. + format: The format to print the spec in. Must be "yaml" or "json". + output_file: Optional. If set, the spec will be written to the given file path. + Otherwise, it will be printed to the console. """ if format not in {"yaml", "json"}: raise exc.PyAirbyteInputError( @@ -349,6 +351,13 @@ def get_configured_catalog( self, streams: Literal["*"] | list[str] | None = None, ) -> ConfiguredAirbyteCatalog: + """Get a configured catalog for the given streams. + + If no streams are provided, the selected streams will be used. If no streams are selected, + all available streams will be used. + + If '*' is provided, all available streams will be used. + """ selected_streams: list[str] = [] if streams is None: selected_streams = self._selected_stream_names or self.get_available_streams() diff --git a/airbyte/sources/registry.py b/airbyte/sources/registry.py index 82f31f0b..43f9b957 100644 --- a/airbyte/sources/registry.py +++ b/airbyte/sources/registry.py @@ -1,4 +1,6 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +"""Connectivity to the connector catalog registry.""" + from __future__ import annotations import json @@ -104,6 +106,8 @@ class InstallType(str, Enum): + """The type of installation for a connector.""" + YAML = "yaml" PYTHON = "python" DOCKER = "docker" @@ -111,6 +115,8 @@ class InstallType(str, Enum): class Language(str, Enum): + """The language of a connector.""" + PYTHON = InstallType.PYTHON.value JAVA = InstallType.JAVA.value diff --git a/airbyte/types.py b/airbyte/types.py index 35fcc6a6..45820c4b 100644 --- a/airbyte/types.py +++ b/airbyte/types.py @@ -102,6 +102,7 @@ def __init__( self, conversion_map: dict | None = None, ) -> None: + """Initialize the type converter.""" self.conversion_map = conversion_map or CONVERSION_MAP @classmethod diff --git a/airbyte/validate.py b/airbyte/validate.py index 075a6eef..475c8151 100644 --- a/airbyte/validate.py +++ b/airbyte/validate.py @@ -59,6 +59,7 @@ def _run_subprocess_and_raise_on_failure(args: list[str]) -> None: def full_tests(connector_name: str, sample_config: str) -> None: + """Run full tests on the connector.""" print("Creating source and validating spec and version...") source = ab.get_source( # TODO: FIXME: noqa: SIM115, PTH123 @@ -91,6 +92,7 @@ def full_tests(connector_name: str, sample_config: str) -> None: def install_only_test(connector_name: str) -> None: + """Test that the connector can be installed and spec can be printed.""" print("Creating source and validating spec is returned successfully...") source = ab.get_source(connector_name) source._get_spec(force_refresh=True) # noqa: SLF001 # Member is private until we have a public API for it. @@ -116,6 +118,7 @@ def run() -> None: def validate(connector_dir: str, sample_config: str, *, validate_install_only: bool) -> None: + """Validate a connector.""" # read metadata.yaml metadata_path = Path(connector_dir) / "metadata.yaml" metadata = yaml.safe_load(Path(metadata_path).read_text(encoding="utf-8"))["data"] diff --git a/airbyte/version.py b/airbyte/version.py index eaa627d1..90aab62b 100644 --- a/airbyte/version.py +++ b/airbyte/version.py @@ -1,4 +1,6 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. +"""Support for PyAirbyte version checks.""" + from __future__ import annotations import importlib.metadata @@ -8,4 +10,5 @@ def get_version() -> str: + """Return the version of PyAirbyte.""" return airbyte_version diff --git a/pyproject.toml b/pyproject.toml index 2654541d..678c1386 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -122,40 +122,40 @@ select = [ "C90", # mccabe (complexity) "COM", # flake8-commas "CPY", # missing copyright notice - # "D", # pydocstyle # TODO: Re-enable when adding docstrings - "DTZ", # flake8-datetimez - "E", # pycodestyle (errors) - "ERA", # flake8-eradicate (commented out code) - "EXE", # flake8-executable - "F", # Pyflakes - "FA", # flake8-future-annotations - "FIX", # flake8-fixme - "FLY", # flynt - "FURB", # Refurb - "I", # isort - "ICN", # flake8-import-conventions - "INP", # flake8-no-pep420 - "INT", # flake8-gettext - "ISC", # flake8-implicit-str-concat - "ICN", # flake8-import-conventions - "LOG", # flake8-logging - "N", # pep8-naming - "PD", # pandas-vet - "PERF", # Perflint - "PIE", # flake8-pie - "PGH", # pygrep-hooks - "PL", # Pylint - "PT", # flake8-pytest-style - "PTH", # flake8-use-pathlib - "PYI", # flake8-pyi - "Q", # flake8-quotes - "RET", # flake8-return - "RSE", # flake8-raise - "RUF", # Ruff-specific rules - "SIM", # flake8-simplify - "SLF", # flake8-self - "SLOT", # flake8-slots - "T10", # debugger calls + "D", # pydocstyle (Docstring conventions) + "DTZ", # flake8-datetimez + "E", # pycodestyle (errors) + "ERA", # flake8-eradicate (commented out code) + "EXE", # flake8-executable + "F", # Pyflakes + "FA", # flake8-future-annotations + "FIX", # flake8-fixme + "FLY", # flynt + "FURB", # Refurb + "I", # isort + "ICN", # flake8-import-conventions + "INP", # flake8-no-pep420 + "INT", # flake8-gettext + "ISC", # flake8-implicit-str-concat + "ICN", # flake8-import-conventions + "LOG", # flake8-logging + "N", # pep8-naming + "PD", # pandas-vet + "PERF", # Perflint + "PIE", # flake8-pie + "PGH", # pygrep-hooks + "PL", # Pylint + "PT", # flake8-pytest-style + "PTH", # flake8-use-pathlib + "PYI", # flake8-pyi + "Q", # flake8-quotes + "RET", # flake8-return + "RSE", # flake8-raise + "RUF", # Ruff-specific rules + "SIM", # flake8-simplify + "SLF", # flake8-self + "SLOT", # flake8-slots + "T10", # debugger calls # "T20", # flake8-print # TODO: Re-enable once we have logging "TCH", # flake8-type-checking "TD", # flake8-todos