@@ -275,6 +275,55 @@ def ingest(
275275 incremental_options : Optional [IngestIncrementalOptions ] = None ,
276276 transaction : Optional ["Transaction" ] = None ,
277277 ) -> flight .FlightStreamWriter :
278+ """
279+ Bulk ingest data into a table using Apache Arrow Flight.
280+
281+ This method provides high-performance bulk data loading by streaming
282+ Arrow record batches directly to the server. The writer can be used as
283+ a context manager for automatic resource cleanup.
284+
285+ Args:
286+ table_name: Name of the table to ingest data into.
287+ schema: PyArrow schema defining the table structure.
288+ schema_name: Optional schema name. If not provided, uses the client's
289+ default schema.
290+ catalog_name: Optional catalog name. If not provided, uses the client's
291+ default catalog.
292+ mode: Table creation/append mode. Options:
293+ - CREATE: Create table, fail if it exists
294+ - APPEND: Append to existing table, fail if it doesn't exist
295+ - CREATE_APPEND: Create if not exists, append if exists (default)
296+ - REPLACE: Drop and recreate table if it exists
297+ incremental_options: Options for incremental ingestion, including:
298+ - primary_key: Columns to use as primary key
299+ - cursor_field: Columns used to determine which row to keep in case of conflict on primary key
300+ transaction: Optional transaction to execute ingestion within.
301+
302+ Returns:
303+ FlightStreamWriter for writing record batches to the table.
304+ The writer should be closed after all data is written, or used
305+ as a context manager.
306+
307+ Example:
308+ >>> # Basic ingestion
309+ >>> schema = pa.schema([("id", pa.int64()), ("name", pa.string())])
310+ >>> with client.ingest(table_name="users", schema=schema) as writer:
311+ ... batch = pa.record_batch([[1, 2], ["Alice", "Bob"]], schema=schema)
312+ ... writer.write(batch)
313+
314+ >>> # Incremental ingestion with primary key
315+ >>> from altertable_flightsql.client import IngestIncrementalOptions
316+ >>> opts = IngestIncrementalOptions(
317+ ... primary_key=["id"],
318+ ... cursor_field=["updated_at"]
319+ ... )
320+ >>> with client.ingest(
321+ ... table_name="users",
322+ ... schema=schema,
323+ ... incremental_options=opts
324+ ... ) as writer:
325+ ... writer.write(batch)
326+ """
278327 cmd = sql_pb2 .CommandStatementIngest (
279328 table = table_name ,
280329 table_definition_options = self ._ingest_mode_to_table_definition_options (mode ),
0 commit comments