|
| 1 | +// Copyright 2024 Google LLC |
| 2 | +// |
| 3 | +// Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +// you may not use this file except in compliance with the License. |
| 5 | +// You may obtain a copy of the License at |
| 6 | +// |
| 7 | +// https://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +// |
| 9 | +// Unless required by applicable law or agreed to in writing, software |
| 10 | +// distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +// See the License for the specific language governing permissions and |
| 13 | +// limitations under the License. |
| 14 | + |
| 15 | +import {Transform, TransformCallback} from 'stream'; |
| 16 | +import { |
| 17 | + RecordBatchReader, |
| 18 | + RecordBatch, |
| 19 | + RecordBatchStreamReader, |
| 20 | + Vector, |
| 21 | +} from 'apache-arrow'; |
| 22 | +import * as protos from '../../protos/protos'; |
| 23 | + |
| 24 | +type ReadRowsResponse = |
| 25 | + protos.google.cloud.bigquery.storage.v1.IReadRowsResponse; |
| 26 | +type ReadSession = protos.google.cloud.bigquery.storage.v1.IReadSession; |
| 27 | + |
| 28 | +interface TableCell { |
| 29 | + v?: any; |
| 30 | +} |
| 31 | +interface TableRow { |
| 32 | + f?: Array<TableCell>; |
| 33 | +} |
| 34 | + |
| 35 | +/** |
| 36 | + * ArrowRawTransform implements a node stream Transform that reads |
| 37 | + * ReadRowsResponse from BigQuery Storage Read API and convert |
| 38 | + * a raw Arrow Record Batch. |
| 39 | + */ |
| 40 | +export class ArrowRawTransform extends Transform { |
| 41 | + constructor() { |
| 42 | + super({ |
| 43 | + readableObjectMode: false, |
| 44 | + writableObjectMode: true, |
| 45 | + }); |
| 46 | + } |
| 47 | + |
| 48 | + _transform( |
| 49 | + response: ReadRowsResponse, |
| 50 | + _: BufferEncoding, |
| 51 | + callback: TransformCallback |
| 52 | + ): void { |
| 53 | + if ( |
| 54 | + !( |
| 55 | + response.arrowRecordBatch && |
| 56 | + response.arrowRecordBatch.serializedRecordBatch |
| 57 | + ) |
| 58 | + ) { |
| 59 | + callback(null); |
| 60 | + return; |
| 61 | + } |
| 62 | + callback(null, response.arrowRecordBatch?.serializedRecordBatch); |
| 63 | + } |
| 64 | +} |
| 65 | + |
| 66 | +/** |
| 67 | + * ArrowRecordReaderTransform implements a node stream Transform that reads |
| 68 | + * a byte stream of raw Arrow Record Batch and convert to a stream of Arrow |
| 69 | + * RecordBatchStreamReader. |
| 70 | + */ |
| 71 | +export class ArrowRecordReaderTransform extends Transform { |
| 72 | + private session: ReadSession; |
| 73 | + |
| 74 | + constructor(session: ReadSession) { |
| 75 | + super({ |
| 76 | + objectMode: true, |
| 77 | + }); |
| 78 | + this.session = session; |
| 79 | + } |
| 80 | + |
| 81 | + _transform( |
| 82 | + serializedRecordBatch: Uint8Array, |
| 83 | + _: BufferEncoding, |
| 84 | + callback: TransformCallback |
| 85 | + ): void { |
| 86 | + const buf = Buffer.concat([ |
| 87 | + this.session.arrowSchema?.serializedSchema as Uint8Array, |
| 88 | + serializedRecordBatch, |
| 89 | + ]); |
| 90 | + const reader = RecordBatchReader.from(buf); |
| 91 | + callback(null, reader); |
| 92 | + } |
| 93 | +} |
| 94 | + |
| 95 | +/** |
| 96 | + * ArrowRecordBatchTransform implements a node stream Transform that reads |
| 97 | + * a RecordBatchStreamReader and convert a stream of Arrow RecordBatch. |
| 98 | + */ |
| 99 | +export class ArrowRecordBatchTransform extends Transform { |
| 100 | + constructor() { |
| 101 | + super({ |
| 102 | + objectMode: true, |
| 103 | + }); |
| 104 | + } |
| 105 | + |
| 106 | + _transform( |
| 107 | + reader: RecordBatchStreamReader, |
| 108 | + _: BufferEncoding, |
| 109 | + callback: TransformCallback |
| 110 | + ): void { |
| 111 | + const batches = reader.readAll(); |
| 112 | + for (const row of batches) { |
| 113 | + this.push(row); |
| 114 | + } |
| 115 | + callback(null); |
| 116 | + } |
| 117 | +} |
| 118 | + |
| 119 | +/** |
| 120 | + * ArrowRecordBatchTableRowTransform implements a node stream Transform that reads |
| 121 | + * an Arrow RecordBatch and convert a stream of BigQuery TableRow. |
| 122 | + */ |
| 123 | +export class ArrowRecordBatchTableRowTransform extends Transform { |
| 124 | + constructor() { |
| 125 | + super({ |
| 126 | + objectMode: true, |
| 127 | + }); |
| 128 | + } |
| 129 | + |
| 130 | + _transform( |
| 131 | + batch: RecordBatch, |
| 132 | + _: BufferEncoding, |
| 133 | + callback: TransformCallback |
| 134 | + ): void { |
| 135 | + const rows = new Array(batch.numRows); |
| 136 | + for (let i = 0; i < batch.numRows; i++) { |
| 137 | + rows[i] = { |
| 138 | + f: new Array(batch.numCols), |
| 139 | + }; |
| 140 | + } |
| 141 | + for (let j = 0; j < batch.numCols; j++) { |
| 142 | + const column = batch.selectAt([j]); |
| 143 | + const columnName = column.schema.fields[0].name; |
| 144 | + for (let i = 0; i < batch.numRows; i++) { |
| 145 | + const fieldData = column.get(i); |
| 146 | + const fieldValue = fieldData?.toJSON()[columnName]; |
| 147 | + rows[i].f[j] = { |
| 148 | + v: convertArrowValue(fieldValue), |
| 149 | + }; |
| 150 | + } |
| 151 | + } |
| 152 | + for (let i = 0; i < batch.numRows; i++) { |
| 153 | + this.push(rows[i]); |
| 154 | + } |
| 155 | + callback(null); |
| 156 | + } |
| 157 | +} |
| 158 | + |
| 159 | +function convertArrowValue(fieldValue: any): any { |
| 160 | + if (typeof fieldValue === 'object') { |
| 161 | + if (fieldValue instanceof Vector) { |
| 162 | + const arr = fieldValue.toJSON(); |
| 163 | + return arr.map((v: any) => { |
| 164 | + return {v: convertArrowValue(v)}; |
| 165 | + }); |
| 166 | + } |
| 167 | + const tableRow: TableRow = {f: []}; |
| 168 | + Object.keys(fieldValue).forEach(key => { |
| 169 | + tableRow.f?.push({ |
| 170 | + v: convertArrowValue(fieldValue[key]), |
| 171 | + }); |
| 172 | + }); |
| 173 | + return tableRow; |
| 174 | + } |
| 175 | + return fieldValue; |
| 176 | +} |
0 commit comments