Skip to content

Commit dcfed20

Browse files
committed
Directly append pyarrow table to types_test fixture
1 parent ba81d13 commit dcfed20

File tree

2 files changed

+3
-14
lines changed

2 files changed

+3
-14
lines changed

crates/integration_tests/testdata/pyiceberg/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
FROM python:3.9-bullseye
1717

18-
RUN pip install pyiceberg[pyarrow]==0.8
18+
RUN pip install pyiceberg[pyarrow]==0.8.1
1919

2020
COPY provision.py .
2121

crates/integration_tests/testdata/pyiceberg/provision.py

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
import os
1919
from pyiceberg.catalog import load_catalog
20-
import pyarrow.parquet as pq
2120
import pyarrow as pa
2221
from datetime import datetime, timedelta
2322

@@ -57,16 +56,6 @@
5756
# Convert to a PyArrow table
5857
table = pa.Table.from_arrays(columns, schema=schema)
5958

60-
# Write to a Parquet file
61-
pq.write_table(table, "types_test.parquet")
62-
63-
# Output the result
64-
print(f"Created a Parquet file with {rows} rows and schema {table.schema}.")
65-
66-
67-
# Load the Parquet file
68-
parquet_file = pq.read_table("./types_test.parquet")
69-
7059
# Connect to the REST catalog
7160
catalog = load_catalog(
7261
"rest",
@@ -82,6 +71,6 @@
8271
# Create a corresponding Iceberg table and append the file to it
8372
iceberg_table = catalog.create_table_if_not_exists(
8473
identifier=f"default.types_test",
85-
schema=parquet_file.schema,
74+
schema=schema,
8675
)
87-
iceberg_table.append(df=parquet_file)
76+
iceberg_table.append(table)

0 commit comments

Comments
 (0)