-
Notifications
You must be signed in to change notification settings - Fork 22
Open
Description
(this is for building arro3-io v0.6.5 for Pyodide)
diff --git a/arro3-io/src/error.rs b/arro3-io/src/error.rs
index ddeec9c..6db9f00 100644
--- a/arro3-io/src/error.rs
+++ b/arro3-io/src/error.rs
@@ -13,6 +13,7 @@ pub enum Arro3IoError {
#[error(transparent)]
ArrowError(#[from] arrow_schema::ArrowError),
+ #[cfg(feature = "async")]
/// A wrapped [object_store::Error]
#[error(transparent)]
ObjectStoreError(#[from] object_store::Error),
@@ -31,6 +32,7 @@ impl From<Arro3IoError> for PyErr {
match error {
Arro3IoError::PyErr(err) => err,
Arro3IoError::ArrowError(err) => PyException::new_err(err.to_string()),
+ #[cfg(feature = "async")]
Arro3IoError::ObjectStoreError(err) => PyException::new_err(err.to_string()),
Arro3IoError::ParquetError(err) => PyException::new_err(err.to_string()),
}
diff --git a/arro3-io/src/lib.rs b/arro3-io/src/lib.rs
index 6c81722..778b23a 100644
--- a/arro3-io/src/lib.rs
+++ b/arro3-io/src/lib.rs
@@ -39,8 +39,11 @@ fn _io(py: Python, m: &Bound<PyModule>) -> PyResult<()> {
m.add_wrapped(wrap_pyfunction!(___version))?;
+ #[cfg(feature = "async")]
+ {
pyo3_object_store::register_store_module(py, m, "arro3.io", "store")?;
pyo3_object_store::register_exceptions_module(py, m, "arro3.io", "exceptions")?;
+ }
m.add_wrapped(wrap_pyfunction!(csv::infer_csv_schema))?;
m.add_wrapped(wrap_pyfunction!(csv::read_csv))?;
@@ -57,7 +60,8 @@ fn _io(py: Python, m: &Bound<PyModule>) -> PyResult<()> {
m.add_wrapped(wrap_pyfunction!(ipc::write_ipc_stream))?;
m.add_wrapped(wrap_pyfunction!(parquet::read_parquet))?;
- m.add_wrapped(wrap_pyfunction!(parquet::read_parquet_async))?;
+ #[cfg(feature = "async")]
+ { m.add_wrapped(wrap_pyfunction!(parquet::read_parquet_async))?; }
m.add_wrapped(wrap_pyfunction!(parquet::write_parquet))?;
Ok(())
diff --git a/arro3-io/src/parquet.rs b/arro3-io/src/parquet.rs
index 5380904..ab31345 100644
--- a/arro3-io/src/parquet.rs
+++ b/arro3-io/src/parquet.rs
@@ -5,6 +5,7 @@ use std::sync::Arc;
use arrow_array::{RecordBatchIterator, RecordBatchReader};
use parquet::arrow::arrow_reader::ParquetRecordBatchReaderBuilder;
use parquet::arrow::arrow_writer::ArrowWriterOptions;
+#[cfg(feature = "async")]
use parquet::arrow::async_reader::ParquetObjectReader;
use parquet::arrow::ArrowWriter;
use parquet::basic::{Compression, Encoding};
@@ -17,6 +18,7 @@ use pyo3_arrow::error::PyArrowResult;
use pyo3_arrow::export::Arro3RecordBatchReader;
use pyo3_arrow::input::AnyRecordBatch;
use pyo3_arrow::{PyRecordBatchReader, PyTable};
+#[cfg(feature = "async")]
use pyo3_object_store::PyObjectStore;
use crate::error::Arro3IoResult;
@@ -44,6 +46,7 @@ pub fn read_parquet(file: FileReader) -> PyArrowResult<Arro3RecordBatchReader> {
Ok(PyRecordBatchReader::new(iter).into())
}
+#[cfg(feature = "async")]
#[pyfunction]
#[pyo3(signature = (path, *, store))]
pub fn read_parquet_async<'py>(
@@ -58,6 +61,7 @@ pub fn read_parquet_async<'py>(
Ok(fut)
}
+#[cfg(feature = "async")]
async fn read_parquet_async_inner(
store: Arc<dyn object_store::ObjectStore>,
path: String,
Metadata
Metadata
Assignees
Labels
No labels