Skip to content
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 18 additions & 4 deletions arro3-io/src/parquet.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
use std::any::Any;
use std::collections::HashMap;
use std::str::FromStr;
use std::sync::Arc;

use arrow_array::{RecordBatchIterator, RecordBatchReader};
use parquet::arrow::arrow_reader::ParquetRecordBatchReaderBuilder;
use parquet::arrow::arrow_writer::ArrowWriterOptions;
use parquet::arrow::ArrowWriter;
use parquet::arrow::{ArrowWriter, ProjectionMask};
use parquet::basic::{Compression, Encoding};
use parquet::file::properties::{WriterProperties, WriterVersion};
use parquet::format::KeyValue;
use parquet::schema::types::ColumnPath;
use parquet::schema::types::{ColumnPath, SchemaDescriptor};
use pyo3::exceptions::{PyTypeError, PyValueError};
use pyo3::prelude::*;
use pyo3_arrow::error::PyArrowResult;
Expand All @@ -19,8 +20,21 @@ use pyo3_arrow::PyRecordBatchReader;
use crate::utils::{FileReader, FileWriter};

#[pyfunction]
pub fn read_parquet(py: Python, file: FileReader) -> PyArrowResult<PyObject> {
let builder = ParquetRecordBatchReaderBuilder::try_new(file).unwrap();
pub fn read_parquet(
py: Python,
file: FileReader,
rgs: Option<Vec<usize>>,
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Certainly! It really ought to be on the reader (.select() or such) anyway, so that you only read the parquet metadata once. Or maybe have a separate "fetch details" function. This was only minimalist POC to show that you can pick data portions like this.

columns: Option<Vec<usize>>,
) -> PyArrowResult<PyObject> {
let mut builder = ParquetRecordBatchReaderBuilder::try_new(file).unwrap();
if let Some(nn) = rgs {
builder = builder.with_row_groups(nn);
}

if let Some(cols) = columns {
let projection = ProjectionMask::leaves(builder.parquet_schema(), cols);
builder = builder.with_projection(projection);
}

let metadata = builder.schema().metadata().clone();
let reader = builder.build().unwrap();
Expand Down