@@ -23,10 +23,8 @@ const ZSTD_WINDOW_BUFFER_SIZE: usize = 1 << 23; // 8MB
2323pub const UnbundleError = error {
2424 DecompressionFailed ,
2525 InvalidTarHeader ,
26- UnexpectedEndOfStream ,
2726 FileCreateFailed ,
2827 DirectoryCreateFailed ,
29- FileWriteFailed ,
3028 HashMismatch ,
3129 InvalidFilename ,
3230 FileTooLarge ,
@@ -36,6 +34,8 @@ pub const UnbundleError = error{
3634 DictionaryIdFlagUnsupported ,
3735 MalformedBlock ,
3836 MalformedFrame ,
37+ StreamError ,
38+ WriteFailed ,
3939} || std .mem .Allocator .Error ;
4040
4141/// Context for error reporting during unbundle operations
@@ -388,7 +388,7 @@ fn HashingReader(comptime ReaderType: type) type {
388388
389389 const Self = @This ();
390390 pub const Error = ReaderType .Error ;
391- pub const Reader = std .io . Reader (* Self , Error , read );
391+ pub const Reader = std .Io . GenericReader (* Self , Error , read );
392392
393393 pub fn read (self : * Self , buffer : []u8 ) Error ! usize {
394394 const n = try self .child_reader .read (buffer );
@@ -425,12 +425,13 @@ pub fn unbundleStream(
425425 };
426426
427427 var window_buffer : [ZSTD_WINDOW_BUFFER_SIZE ]u8 = undefined ;
428- var zstd_stream = std .compress .zstd .Decompress .init (hashing_reader .reader (), & window_buffer , .{});
429- const decompressed_reader = zstd_stream .reader ();
428+ var reader_buffer : [4096 ]u8 = undefined ;
429+ var reader = hashing_reader .reader ().adaptToNewApi (& reader_buffer ).new_interface ;
430+ var zstd_stream = std .compress .zstd .Decompress .init (& reader , & window_buffer , .{});
430431
431432 var file_name_buffer : [std .fs .max_path_bytes ]u8 = undefined ;
432433 var link_name_buffer : [std .fs .max_path_bytes ]u8 = undefined ;
433- var tar_iterator = std .tar .iterator ( decompressed_reader , .{
434+ var tar_iterator = std .tar .Iterator . init ( & zstd_stream . reader , .{
434435 .file_name_buffer = & file_name_buffer ,
435436 .link_name_buffer = & link_name_buffer ,
436437 });
@@ -460,18 +461,13 @@ pub fn unbundleStream(
460461 data_extracted = true ;
461462 },
462463 .file = > {
463- const file_writer = try extract_writer .createFile (file_path );
464- defer extract_writer .finishFile (file_writer );
465-
466464 var buffer : [STREAM_BUFFER_SIZE ]u8 = undefined ;
467- var bytes_remaining = entry .size ;
468- while (bytes_remaining > 0 ) {
469- const to_read = @min (buffer .len , bytes_remaining );
470- const bytes_read = entry .reader ().readAll (buffer [0.. to_read ]) catch return error .UnexpectedEndOfStream ;
471- if (bytes_read == 0 ) return error .UnexpectedEndOfStream ;
472- file_writer .writeAll (buffer [0.. bytes_read ]) catch return error .FileWriteFailed ;
473- bytes_remaining -= bytes_read ;
474- }
465+ const old_writer = try extract_writer .createFile (file_path );
466+ var file_writer = old_writer .adaptToNewApi (& buffer ).new_interface ;
467+ defer extract_writer .finishFile (old_writer );
468+
469+ tar_iterator .streamRemaining (entry , & file_writer ) catch return error .StreamError ;
470+ try file_writer .flush ();
475471
476472 data_extracted = true ;
477473 },
@@ -505,14 +501,6 @@ pub fn unbundleStream(
505501 }
506502
507503 // TODO: Add symlink support to ExtractWriter interface
508- var buffer : [STREAM_BUFFER_SIZE ]u8 = undefined ;
509- var bytes_remaining = entry .size ;
510- while (bytes_remaining > 0 ) {
511- const to_read = @min (buffer .len , bytes_remaining );
512- const bytes_read = entry .reader ().readAll (buffer [0.. to_read ]) catch return error .UnexpectedEndOfStream ;
513- bytes_remaining -= bytes_read ;
514- }
515-
516504 data_extracted = true ;
517505 },
518506 }
0 commit comments