Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -330,6 +330,16 @@ results = conn.query("search index=_internal | head 5")
pytest -m integration
```


### Manual Testing
- Located in dev_env
- Do not use pytest or mocking
- run the docker-compose.yaml to stand up services, change directories into the folder in dev_env you wish to test
- execute the following to test your module
```
poetry run python <your test file here>.py
```

### 🧼 Suppress warnings

Add this to `pytest.ini`:
Expand Down
32 changes: 32 additions & 0 deletions dev_env/postgres/test_asyncpostgres.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import asyncio
from pyapiary.dbms_connectors.postgres import AsyncPostgresConnector
from pyapiary.helpers import combine_env_configs, setup_logger
from typing import Dict, Any

async def test_async_db():
# Load config and setup logging
env_config: Dict[str, Any] = combine_env_configs()
logger = setup_logger("pg logger")

# Use 'async with' to handle the background worker threads and connection pool
async with AsyncPostgresConnector(conn_str=env_config["PGSQL_DSN"], logger=logger) as conn:

logger.info("inserting one row (async)")
# Await the execution of the insert
await conn.async_bulk_insert("employees", [{"name": "rob", "department": "hr"}])

base_query = "SELECT * FROM employees"
logger.info("Querying with pagination (async):")

# Await the coroutine to get the actual list of results
rows = await conn.async_query(base_query)

# Standard loop through the returned list
for i, row in enumerate(rows):
print(row)
if i >= 9:
break

if __name__ == "__main__":
# Standard entry point for async scripts
asyncio.run(test_async_db())
20 changes: 20 additions & 0 deletions dev_env/postgres/test_postgres.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from pyapiary.dbms_connectors.postgres import PostgresConnector, AsyncPostgresConnector
from pyapiary.helpers import combine_env_configs, setup_logger
from typing import Dict, Any

env_config: Dict[str, Any] = combine_env_configs()

logger = setup_logger("pg logger")
with PostgresConnector(conn_str=env_config["PGSQL_DSN"], logger=logger) as conn:

# Optional insert test
logger.info("inserting one row")

conn.bulk_insert("employees", [{"name": "rob", "department": "hr"}])
base_query = "SELECT * FROM employees"

logger.info("Querying with pagination:")
for i, row in enumerate(conn.query(base_query)):
print(row)
if i >= 9:
break
1 change: 1 addition & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ services:
- ./dev_env/splunk/init_app.sh:/init_app.sh
entrypoint: [ "/bin/bash", "-c", "/init_app.sh & /sbin/entrypoint.sh start-service" ]

# both PG and ODBC connector use this data source, this is why dev_env/postgres does not have an init.sql
odbc_db:
image: postgres:15-alpine
container_name: odbc_db
Expand Down
127 changes: 124 additions & 3 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ splunk-sdk = "^2.1.1"
httpx = "^0.28.1"
tenacity = "^9.1.2"
pyodbc = "^5.3.0"
psycopg-pool = {extras = ["binary", "pool"], version = "^3.3.1"}
psycopg = {extras = ["pool"], version = "^3.3.4"}
psycopg-binary = "^3.3.4"

[tool.poetry.extras]
odbc = ["pyodbc"]
Expand Down
Loading
Loading