Djjanks commented on code in PR #14:
URL: https://github.com/apache/arrow-js/pull/14#discussion_r2324681999
##########
src/ipc/reader.ts:
##########
@@ -354,12 +358,31 @@ abstract class RecordBatchReaderImpl<T extends TypeMap =
any> implements RecordB
return this;
}
- protected _loadRecordBatch(header: metadata.RecordBatch, body: any) {
- const children = this._loadVectors(header, body, this.schema.fields);
+ protected _loadRecordBatch(header: metadata.RecordBatch, body:
Uint8Array): RecordBatch<T> {
+ let children: Data<any>[];
+ if (header.compression != null) {
+ const codec = compressionRegistry.get(header.compression.type);
+ if (codec?.decode && typeof codec.decode === 'function') {
+ const { decommpressedBody, buffers } =
this._decompressBuffers(header, body, codec);
+ children = this._loadCompressedVectors(header,
decommpressedBody, this.schema.fields);
+ header = new metadata.RecordBatch(
+ header.length,
+ header.nodes,
+ buffers,
+ null
+ );
+ } else {
+ throw new Error('Record batch is compressed but codec not
found');
+ }
+ } else {
+ children = this._loadVectors(header, body, this.schema.fields);
+ }
+
const data = makeData({ type: new Struct(this.schema.fields), length:
header.length, children });
return new RecordBatch(this.schema, data);
}
- protected _loadDictionaryBatch(header: metadata.DictionaryBatch, body:
any) {
+
+ protected _loadDictionaryBatch(header: metadata.DictionaryBatch, body:
Uint8Array) {
Review Comment:
Thank you! I have made a new commit with the correct compression and
decompression dictionary functionality. Previously, compression did not work on
dictionary batches. Could you please try it in your project?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]