Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 26 additions & 7 deletions bindings/python/fluss/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -301,13 +301,16 @@ class FlussTable:
def __repr__(self) -> str: ...

class AppendWriter:
async def append(self, row: dict | list | tuple) -> None:
def append(self, row: dict | list | tuple) -> WriteResultHandle:
"""Append a single row to the table.

Args:
row: Dictionary mapping field names to values, or
list/tuple of values in schema order

Returns:
WriteResultHandle: Ignore for fire-and-forget, or await handle.wait() for acknowledgement.

Supported Types:
Currently supports primitive types only:
- Boolean, TinyInt, SmallInt, Int, BigInt (integers)
Comment on lines 314 to 316
Copy link

Copilot AI Feb 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

AppendWriter.write_arrow_batch is still typed as returning None in this stub, but the binding returns a WriteResultHandle (so callers can optionally await per-batch ack). Update the stub signature to return WriteResultHandle to match the actual API.

Copilot uses AI. Check for mistakes.
Expand All @@ -319,24 +322,24 @@ class AppendWriter:
Temporal types (Date, Timestamp, Decimal) are not yet supported.

Example:
await writer.append({'id': 1, 'name': 'Alice', 'score': 95.5})
await writer.append([1, 'Alice', 95.5])
writer.append({'id': 1, 'name': 'Alice', 'score': 95.5})
writer.append([1, 'Alice', 95.5])

Note:
For high-throughput bulk loading, prefer write_arrow_batch().
Use flush() to ensure all queued records are sent and acknowledged.
"""
...
def write_arrow(self, table: pa.Table) -> None: ...
def write_arrow_batch(self, batch: pa.RecordBatch) -> None: ...
def write_arrow_batch(self, batch: pa.RecordBatch) -> WriteResultHandle: ...
def write_pandas(self, df: pd.DataFrame) -> None: ...
def flush(self) -> None: ...
async def flush(self) -> None: ...
def __repr__(self) -> str: ...

class UpsertWriter:
"""Writer for upserting and deleting data in a Fluss primary key table."""

async def upsert(self, row: dict | list | tuple) -> None:
def upsert(self, row: dict | list | tuple) -> WriteResultHandle:
"""Upsert a row into the table.

If a row with the same primary key exists, it will be updated.
Expand All @@ -345,21 +348,37 @@ class UpsertWriter:
Args:
row: Dictionary mapping field names to values, or
list/tuple of values in schema order

Returns:
WriteResultHandle: Ignore for fire-and-forget, or await handle.wait() for ack.
"""
...
async def delete(self, pk: dict | list | tuple) -> None:
def delete(self, pk: dict | list | tuple) -> WriteResultHandle:
"""Delete a row from the table by primary key.

Args:
pk: Dictionary with PK column names as keys, or
list/tuple of PK values in PK column order

Returns:
WriteResultHandle: Ignore for fire-and-forget, or await handle.wait() for ack.
"""
...
async def flush(self) -> None:
"""Flush all pending upsert/delete operations to the server."""
...
def __repr__(self) -> str: ...


class WriteResultHandle:
"""Handle for a pending write (append/upsert/delete). Ignore for fire-and-forget, or await handle.wait() for ack."""

async def wait(self) -> None:
"""Wait for server acknowledgment of this write."""
...
def __repr__(self) -> str: ...


class Lookuper:
"""Lookuper for performing primary key lookups on a Fluss table."""

Expand Down
Loading