Skip to content

Commit 970c4f7

Browse files
chore(deps-dev): bump the development-dependencies group with 3 updates (#3072)
* chore(deps-dev): bump the development-dependencies group with 3 updates Bumps the development-dependencies group with 3 updates: [poetry](https://github.com/python-poetry/poetry), [boto3-stubs](https://github.com/youtype/mypy_boto3_builder) and [ruff](https://github.com/astral-sh/ruff). Updates `poetry` from 1.8.5 to 2.0.1 - [Release notes](https://github.com/python-poetry/poetry/releases) - [Changelog](https://github.com/python-poetry/poetry/blob/main/CHANGELOG.md) - [Commits](python-poetry/poetry@1.8.5...2.0.1) Updates `boto3-stubs` from 1.35.93 to 1.35.97 - [Release notes](https://github.com/youtype/mypy_boto3_builder/releases) - [Commits](https://github.com/youtype/mypy_boto3_builder/commits) Updates `ruff` from 0.8.6 to 0.9.1 - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](astral-sh/ruff@0.8.6...0.9.1) --- updated-dependencies: - dependency-name: poetry dependency-type: direct:development update-type: version-update:semver-major dependency-group: development-dependencies - dependency-name: boto3-stubs dependency-type: direct:development update-type: version-update:semver-patch dependency-group: development-dependencies - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-minor dependency-group: development-dependencies ... Signed-off-by: dependabot[bot] <[email protected]> * fix: apply validate fixes * fix: revert some mypy changes --------- Signed-off-by: dependabot[bot] <[email protected]> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Abdel Jaidi <[email protected]>
1 parent 647b3d5 commit 970c4f7

17 files changed

+198
-197
lines changed

awswrangler/_utils.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -121,8 +121,7 @@ def inner(*args: Any, **kwargs: Any) -> Any:
121121
package_name = INSTALL_MAPPING.get(name)
122122
install_name = package_name if package_name is not None else name
123123
raise ModuleNotFoundError(
124-
f"Missing optional dependency '{name}'. "
125-
f"Use pip install awswrangler[{install_name}] to install it."
124+
f"Missing optional dependency '{name}'. Use pip install awswrangler[{install_name}] to install it."
126125
)
127126
return func(*args, **kwargs)
128127

awswrangler/athena/_read.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -610,7 +610,7 @@ def _unload(
610610
if partitioned_by:
611611
unload_parameters += f" , partitioned_by=ARRAY{partitioned_by}"
612612

613-
sql = f"UNLOAD ({sql}) " f"TO '{path}' " f"WITH ({unload_parameters})"
613+
sql = f"UNLOAD ({sql}) TO '{path}' WITH ({unload_parameters})"
614614
_logger.debug("Executing unload query: %s", sql)
615615
try:
616616
query_id: str = _start_query_execution(

awswrangler/athena/_utils.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -1025,13 +1025,13 @@ def parse_properties(parameters: dict[str, str]) -> str:
10251025

10261026
query_parts += [
10271027
"""ROW FORMAT SERDE """,
1028-
f""" '{table_detail['StorageDescriptor']['SerdeInfo']['SerializationLibrary']}' """,
1028+
f""" '{table_detail["StorageDescriptor"]["SerdeInfo"]["SerializationLibrary"]}' """,
10291029
"""STORED AS INPUTFORMAT """,
1030-
f""" '{table_detail['StorageDescriptor']['InputFormat']}' """,
1030+
f""" '{table_detail["StorageDescriptor"]["InputFormat"]}' """,
10311031
"""OUTPUTFORMAT """,
1032-
f""" '{table_detail['StorageDescriptor']['OutputFormat']}'""",
1032+
f""" '{table_detail["StorageDescriptor"]["OutputFormat"]}'""",
10331033
"""LOCATION""",
1034-
f""" '{table_detail['StorageDescriptor']['Location']}'""",
1034+
f""" '{table_detail["StorageDescriptor"]["Location"]}'""",
10351035
f"""TBLPROPERTIES (\n{tblproperties})""",
10361036
]
10371037
sql = "\n".join(query_parts)

awswrangler/athena/_write_iceberg.py

+7-7
Original file line numberDiff line numberDiff line change
@@ -309,7 +309,7 @@ def _merge_iceberg(
309309
if merge_cols:
310310
if merge_condition == "update":
311311
match_condition = f"""WHEN MATCHED THEN
312-
UPDATE SET {', '.join([f'"{x}" = source."{x}"' for x in df.columns])}"""
312+
UPDATE SET {", ".join([f'"{x}" = source."{x}"' for x in df.columns])}"""
313313
else:
314314
match_condition = ""
315315

@@ -321,16 +321,16 @@ def _merge_iceberg(
321321
sql_statement = f"""
322322
MERGE INTO "{database}"."{table}" target
323323
USING "{database}"."{source_table}" source
324-
ON {' AND '.join(merge_conditions)}
324+
ON {" AND ".join(merge_conditions)}
325325
{match_condition}
326326
WHEN NOT MATCHED THEN
327-
INSERT ({', '.join([f'"{x}"' for x in df.columns])})
328-
VALUES ({', '.join([f'source."{x}"' for x in df.columns])})
327+
INSERT ({", ".join([f'"{x}"' for x in df.columns])})
328+
VALUES ({", ".join([f'source."{x}"' for x in df.columns])})
329329
"""
330330
else:
331331
sql_statement = f"""
332-
INSERT INTO "{database}"."{table}" ({', '.join([f'"{x}"' for x in df.columns])})
333-
SELECT {', '.join([f'"{x}"' for x in df.columns])}
332+
INSERT INTO "{database}"."{table}" ({", ".join([f'"{x}"' for x in df.columns])})
333+
SELECT {", ".join([f'"{x}"' for x in df.columns])}
334334
FROM "{database}"."{source_table}"
335335
"""
336336

@@ -763,7 +763,7 @@ def delete_from_iceberg_table(
763763
sql_statement = f"""
764764
MERGE INTO "{database}"."{table}" target
765765
USING "{database}"."{temp_table}" source
766-
ON {' AND '.join([f'target."{x}" = source."{x}"' for x in merge_cols])}
766+
ON {" AND ".join([f'target."{x}" = source."{x}"' for x in merge_cols])}
767767
WHEN MATCHED THEN
768768
DELETE
769769
"""

awswrangler/dynamodb/_utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ def _remove_dup_pkeys_request_if_any(
253253
if self._extract_pkey_values(item, overwrite_by_pkeys) == pkey_values_new:
254254
self._items_buffer.remove(item)
255255
_logger.debug(
256-
"With overwrite_by_pkeys enabled, skipping " "request:%s",
256+
"With overwrite_by_pkeys enabled, skipping request:%s",
257257
item,
258258
)
259259

awswrangler/oracle.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ def _validate_connection(con: "oracledb.Connection") -> None:
4848

4949

5050
def _get_table_identifier(schema: str | None, table: str) -> str:
51-
schema_str = f'{identifier(schema, sql_mode="ansi")}.' if schema else ""
52-
table_identifier = f'{schema_str}{identifier(table, sql_mode="ansi")}'
51+
schema_str = f"{identifier(schema, sql_mode='ansi')}." if schema else ""
52+
table_identifier = f"{schema_str}{identifier(table, sql_mode='ansi')}"
5353
return table_identifier
5454

5555

@@ -104,10 +104,10 @@ def _create_table(
104104
varchar_lengths=varchar_lengths,
105105
converter_func=_data_types.pyarrow2oracle,
106106
)
107-
cols_str: str = "".join([f'{identifier(k, sql_mode="ansi")} {v},\n' for k, v in oracle_types.items()])[:-2]
107+
cols_str: str = "".join([f"{identifier(k, sql_mode='ansi')} {v},\n" for k, v in oracle_types.items()])[:-2]
108108

109109
if primary_keys:
110-
primary_keys_str = ", ".join([f'{identifier(k, sql_mode="ansi")}' for k in primary_keys])
110+
primary_keys_str = ", ".join([f"{identifier(k, sql_mode='ansi')}" for k in primary_keys])
111111
else:
112112
primary_keys_str = None
113113

@@ -469,17 +469,17 @@ def _generate_upsert_statement(
469469

470470
non_primary_key_columns = [key for key in df.columns if key not in set(primary_keys)]
471471

472-
primary_keys_str = ", ".join([f'{identifier(key, sql_mode="ansi")}' for key in primary_keys])
473-
columns_str = ", ".join([f'{identifier(key, sql_mode="ansi")}' for key in non_primary_key_columns])
472+
primary_keys_str = ", ".join([f"{identifier(key, sql_mode='ansi')}" for key in primary_keys])
473+
columns_str = ", ".join([f"{identifier(key, sql_mode='ansi')}" for key in non_primary_key_columns])
474474

475475
column_placeholders: str = f"({', '.join([':' + str(i + 1) for i in range(len(df.columns))])})"
476476

477477
primary_key_condition_str = " AND ".join(
478-
[f'{identifier(key, sql_mode="ansi")} = :{i+1}' for i, key in enumerate(primary_keys)]
478+
[f"{identifier(key, sql_mode='ansi')} = :{i + 1}" for i, key in enumerate(primary_keys)]
479479
)
480480
assignment_str = ", ".join(
481481
[
482-
f'{identifier(col, sql_mode="ansi")} = :{i + len(primary_keys) + 1}'
482+
f"{identifier(col, sql_mode='ansi')} = :{i + len(primary_keys) + 1}"
483483
for i, col in enumerate(non_primary_key_columns)
484484
]
485485
)

awswrangler/redshift/_utils.py

+4-7
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def _begin_transaction(cursor: "redshift_connector.Cursor") -> None:
7373
def _drop_table(cursor: "redshift_connector.Cursor", schema: str | None, table: str, cascade: bool = False) -> None:
7474
schema_str = f'"{schema}".' if schema else ""
7575
cascade_str = " CASCADE" if cascade else ""
76-
sql = f'DROP TABLE IF EXISTS {schema_str}"{table}"' f"{cascade_str}"
76+
sql = f'DROP TABLE IF EXISTS {schema_str}"{table}"{cascade_str}'
7777
_logger.debug("Executing drop table query:\n%s", sql)
7878
cursor.execute(sql)
7979

@@ -130,10 +130,7 @@ def _add_table_columns(
130130
def _does_table_exist(cursor: "redshift_connector.Cursor", schema: str | None, table: str) -> bool:
131131
schema_str = f"TABLE_SCHEMA = '{schema}' AND" if schema else ""
132132
sql = (
133-
f"SELECT true WHERE EXISTS ("
134-
f"SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE "
135-
f"{schema_str} TABLE_NAME = '{table}'"
136-
f");"
133+
f"SELECT true WHERE EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE {schema_str} TABLE_NAME = '{table}');"
137134
)
138135
_logger.debug("Executing select query:\n%s", sql)
139136
cursor.execute(sql)
@@ -236,12 +233,12 @@ def _validate_parameters(
236233
if sortkey:
237234
if not isinstance(sortkey, list):
238235
raise exceptions.InvalidRedshiftSortkey(
239-
f"sortkey must be a List of items in the columns list: {cols}. " f"Currently value: {sortkey}"
236+
f"sortkey must be a List of items in the columns list: {cols}. Currently value: {sortkey}"
240237
)
241238
for key in sortkey:
242239
if key not in cols:
243240
raise exceptions.InvalidRedshiftSortkey(
244-
f"sortkey must be a List of items in the columns list: {cols}. " f"Currently value: {key}"
241+
f"sortkey must be a List of items in the columns list: {cols}. Currently value: {key}"
245242
)
246243
if primary_keys:
247244
if not isinstance(primary_keys, list):

awswrangler/sqlserver.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -536,8 +536,7 @@ def to_sql(
536536
sql = f"MERGE INTO {table_identifier}\nUSING (VALUES {placeholders}) AS source ({quoted_columns})\n"
537537
sql += f"ON {' AND '.join(f'{table_identifier}.{col}=source.{col}' for col in merge_on_columns)}\n"
538538
sql += (
539-
f"WHEN MATCHED THEN\n UPDATE "
540-
f"SET {', '.join(f'{col}=source.{col}' for col in column_names)}\n"
539+
f"WHEN MATCHED THEN\n UPDATE SET {', '.join(f'{col}=source.{col}' for col in column_names)}\n"
541540
)
542541
sql += (
543542
f"WHEN NOT MATCHED THEN\n INSERT "

awswrangler/timestream/_read.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -409,7 +409,7 @@ def unload_to_files(
409409
timestream_client = _utils.client(service_name="timestream-query", session=boto3_session)
410410

411411
partitioned_by_str: str = (
412-
f"""partitioned_by = ARRAY [{','.join([f"'{col}'" for col in partition_cols])}],\n"""
412+
f"""partitioned_by = ARRAY [{",".join([f"'{col}'" for col in partition_cols])}],\n"""
413413
if partition_cols is not None
414414
else ""
415415
)

poetry.lock

+152-137
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

+3-3
Original file line numberDiff line numberDiff line change
@@ -100,13 +100,13 @@ ray = ["ray"]
100100
setuptools = "*"
101101
wheel = "^0.45.1"
102102
msgpack = "*"
103-
poetry = "^1.8.5"
103+
poetry = "^2.0.1"
104104

105105
# Lint
106-
boto3-stubs = {version = "^1.35.90", extras = ["athena", "cleanrooms", "chime", "cloudwatch", "dynamodb", "ec2", "emr", "emr-serverless", "glue", "kms", "logs", "neptune", "opensearch", "opensearchserverless", "quicksight", "rds", "rds-data", "redshift", "redshift-data", "s3", "secretsmanager", "ssm", "sts", "timestream-query", "timestream-write"]}
106+
boto3-stubs = {version = "^1.35.97", extras = ["athena", "cleanrooms", "chime", "cloudwatch", "dynamodb", "ec2", "emr", "emr-serverless", "glue", "kms", "logs", "neptune", "opensearch", "opensearchserverless", "quicksight", "rds", "rds-data", "redshift", "redshift-data", "s3", "secretsmanager", "ssm", "sts", "timestream-query", "timestream-write"]}
107107
doc8 = "^1.1"
108108
mypy = "^1.14"
109-
ruff = "^0.8.4"
109+
ruff = "^0.9.1"
110110

111111
# Test
112112
moto = "^5.0"

tests/unit/test_mysql.py

+5-14
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ def test_connect_secret_manager(dbname):
211211

212212
def test_insert_with_column_names(mysql_table, mysql_con):
213213
create_table_sql = (
214-
f"CREATE TABLE test.{mysql_table} " "(c0 varchar(100) NULL, " "c1 INT DEFAULT 42 NULL, " "c2 INT NOT NULL);"
214+
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) NULL, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
215215
)
216216
with mysql_con.cursor() as cursor:
217217
cursor.execute(create_table_sql)
@@ -236,7 +236,7 @@ def test_insert_with_column_names(mysql_table, mysql_con):
236236

237237
def test_upsert_distinct(mysql_table, mysql_con):
238238
create_table_sql = (
239-
f"CREATE TABLE test.{mysql_table} " "(c0 varchar(100) NULL, " "c1 INT DEFAULT 42 NULL, " "c2 INT NOT NULL);"
239+
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) NULL, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
240240
)
241241
with mysql_con.cursor() as cursor:
242242
cursor.execute(create_table_sql)
@@ -276,10 +276,7 @@ def test_upsert_distinct(mysql_table, mysql_con):
276276

277277
def test_upsert_duplicate_key(mysql_table, mysql_con):
278278
create_table_sql = (
279-
f"CREATE TABLE test.{mysql_table} "
280-
"(c0 varchar(100) PRIMARY KEY, "
281-
"c1 INT DEFAULT 42 NULL, "
282-
"c2 INT NOT NULL);"
279+
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) PRIMARY KEY, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
283280
)
284281
with mysql_con.cursor() as cursor:
285282
cursor.execute(create_table_sql)
@@ -319,10 +316,7 @@ def test_upsert_duplicate_key(mysql_table, mysql_con):
319316

320317
def test_upsert_replace(mysql_table, mysql_con):
321318
create_table_sql = (
322-
f"CREATE TABLE test.{mysql_table} "
323-
"(c0 varchar(100) PRIMARY KEY, "
324-
"c1 INT DEFAULT 42 NULL, "
325-
"c2 INT NOT NULL);"
319+
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) PRIMARY KEY, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
326320
)
327321
with mysql_con.cursor() as cursor:
328322
cursor.execute(create_table_sql)
@@ -375,10 +369,7 @@ def test_dfs_are_equal_for_different_chunksizes(mysql_table, mysql_con, chunksiz
375369

376370
def test_ignore(mysql_table, mysql_con):
377371
create_table_sql = (
378-
f"CREATE TABLE test.{mysql_table} "
379-
"(c0 varchar(100) PRIMARY KEY, "
380-
"c1 INT DEFAULT 42 NULL, "
381-
"c2 INT NOT NULL);"
372+
f"CREATE TABLE test.{mysql_table} (c0 varchar(100) PRIMARY KEY, c1 INT DEFAULT 42 NULL, c2 INT NOT NULL);"
382373
)
383374
with mysql_con.cursor() as cursor:
384375
cursor.execute(create_table_sql)

tests/unit/test_postgresql.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ def test_connect_secret_manager(dbname):
239239

240240
def test_insert_with_column_names(postgresql_table, postgresql_con):
241241
create_table_sql = (
242-
f"CREATE TABLE public.{postgresql_table} " "(c0 varchar NULL," "c1 int NULL DEFAULT 42," "c2 int NOT NULL);"
242+
f"CREATE TABLE public.{postgresql_table} (c0 varchar NULL,c1 int NULL DEFAULT 42,c2 int NOT NULL);"
243243
)
244244
with postgresql_con.cursor() as cursor:
245245
cursor.execute(create_table_sql)

tests/unit/test_redshift.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1219,7 +1219,7 @@ def test_failed_keep_files(
12191219

12201220
def test_insert_with_column_names(redshift_table: str, redshift_con: redshift_connector.Connection) -> None:
12211221
create_table_sql = (
1222-
f"CREATE TABLE public.{redshift_table} " "(c0 varchar(100), " "c1 integer default 42, " "c2 integer not null);"
1222+
f"CREATE TABLE public.{redshift_table} (c0 varchar(100), c1 integer default 42, c2 integer not null);"
12231223
)
12241224
with redshift_con.cursor() as cursor:
12251225
cursor.execute(create_table_sql)

tests/unit/test_sqlserver.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ def test_connect_secret_manager(dbname):
222222

223223
def test_insert_with_column_names(sqlserver_table, sqlserver_con):
224224
create_table_sql = (
225-
f"CREATE TABLE dbo.{sqlserver_table} " "(c0 varchar(100) NULL," "c1 INT DEFAULT 42 NULL," "c2 INT NOT NULL);"
225+
f"CREATE TABLE dbo.{sqlserver_table} (c0 varchar(100) NULL,c1 INT DEFAULT 42 NULL,c2 INT NOT NULL);"
226226
)
227227
with sqlserver_con.cursor() as cursor:
228228
cursor.execute(create_table_sql)

tutorials/003 - Amazon S3.ipynb

+2-2
Original file line numberDiff line numberDiff line change
@@ -874,10 +874,10 @@
874874
"metadata": {},
875875
"outputs": [],
876876
"source": [
877-
"content = \"1 Herfelingen 27-12-18\\n\" \"2 Lambusart 14-06-18\\n\" \"3 Spormaggiore 15-04-18\"\n",
877+
"content = \"1 Herfelingen 27-12-18\\n2 Lambusart 14-06-18\\n3 Spormaggiore 15-04-18\"\n",
878878
"boto3.client(\"s3\").put_object(Body=content, Bucket=bucket, Key=\"fwf/file1.txt\")\n",
879879
"\n",
880-
"content = \"4 Buizingen 05-09-19\\n\" \"5 San Rafael 04-09-19\"\n",
880+
"content = \"4 Buizingen 05-09-19\\n5 San Rafael 04-09-19\"\n",
881881
"boto3.client(\"s3\").put_object(Body=content, Bucket=bucket, Key=\"fwf/file2.txt\")\n",
882882
"\n",
883883
"path1 = f\"s3://{bucket}/fwf/file1.txt\"\n",

tutorials/024 - Athena Query Metadata.ipynb

+5-5
Original file line numberDiff line numberDiff line change
@@ -127,11 +127,11 @@
127127
}
128128
],
129129
"source": [
130-
"print(f'DataScannedInBytes: {df.query_metadata[\"Statistics\"][\"DataScannedInBytes\"]}')\n",
131-
"print(f'TotalExecutionTimeInMillis: {df.query_metadata[\"Statistics\"][\"TotalExecutionTimeInMillis\"]}')\n",
132-
"print(f'QueryQueueTimeInMillis: {df.query_metadata[\"Statistics\"][\"QueryQueueTimeInMillis\"]}')\n",
133-
"print(f'QueryPlanningTimeInMillis: {df.query_metadata[\"Statistics\"][\"QueryPlanningTimeInMillis\"]}')\n",
134-
"print(f'ServiceProcessingTimeInMillis: {df.query_metadata[\"Statistics\"][\"ServiceProcessingTimeInMillis\"]}')"
130+
"print(f\"DataScannedInBytes: {df.query_metadata['Statistics']['DataScannedInBytes']}\")\n",
131+
"print(f\"TotalExecutionTimeInMillis: {df.query_metadata['Statistics']['TotalExecutionTimeInMillis']}\")\n",
132+
"print(f\"QueryQueueTimeInMillis: {df.query_metadata['Statistics']['QueryQueueTimeInMillis']}\")\n",
133+
"print(f\"QueryPlanningTimeInMillis: {df.query_metadata['Statistics']['QueryPlanningTimeInMillis']}\")\n",
134+
"print(f\"ServiceProcessingTimeInMillis: {df.query_metadata['Statistics']['ServiceProcessingTimeInMillis']}\")"
135135
]
136136
}
137137
],

0 commit comments

Comments
 (0)