Skip to content

Commit ae54c9a

Browse files
committed
Upgrade readers and writers in unbundle
1 parent 85d3399 commit ae54c9a

File tree

2 files changed

+29
-29
lines changed

2 files changed

+29
-29
lines changed

src/bundle/test_bundle.zig

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -838,7 +838,10 @@ test "unbundle with existing directory error" {
838838
defer bundle_file.close();
839839

840840
// This should succeed but the CLI would error on existing directory
841-
try bundle.unbundle(bundle_file.reader(), tmp_dir, &allocator, filename, null);
841+
var reader_buffer: [1024]u8 = undefined;
842+
var reader_interface = bundle_file.reader(&reader_buffer).interface;
843+
var reader = reader_interface.adaptToOldInterface();
844+
try bundle.unbundle(&reader, tmp_dir, &allocator, filename, null);
842845
}
843846

844847
test "unbundle multiple archives" {
@@ -911,7 +914,10 @@ test "unbundle multiple archives" {
911914
const dir_name = fname[0 .. fname.len - 8]; // Remove .tar.zst
912915
const extract_dir = try tmp_dir.makeOpenPath(dir_name, .{});
913916

914-
try bundle.unbundle(bundle_file.reader(), extract_dir, &allocator, fname, null);
917+
var reader_buffer: [1024]u8 = undefined;
918+
var reader_interface = bundle_file.reader(&reader_buffer).interface;
919+
var reader = reader_interface.adaptToOldInterface();
920+
try bundle.unbundle(&reader, extract_dir, &allocator, fname, null);
915921
}
916922

917923
// Verify extraction
@@ -1046,7 +1052,10 @@ test "double roundtrip bundle -> unbundle -> bundle -> unbundle" {
10461052

10471053
const extract_dir = try unbundle1_dir.makeOpenPath("extracted1", .{});
10481054

1049-
try bundle.unbundle(bundle_file.reader(), extract_dir, &allocator, filename1, null);
1055+
var reader_buffer: [1024]u8 = undefined;
1056+
var reader_interface = bundle_file.reader(&reader_buffer).interface;
1057+
var reader = reader_interface.adaptToOldInterface();
1058+
try bundle.unbundle(&reader, extract_dir, &allocator, filename1, null);
10501059
}
10511060

10521061
// Second bundle (from first extraction)
@@ -1085,7 +1094,10 @@ test "double roundtrip bundle -> unbundle -> bundle -> unbundle" {
10851094

10861095
const extract_dir = try unbundle2_dir.makeOpenPath("extracted2", .{});
10871096

1088-
try bundle.unbundle(bundle_file.reader(), extract_dir, &allocator, filename2, null);
1097+
var reader_buffer: [1024]u8 = undefined;
1098+
var reader_interface = bundle_file.reader(&reader_buffer).interface;
1099+
var reader = reader_interface.adaptToOldInterface();
1100+
try bundle.unbundle(&reader, extract_dir, &allocator, filename2, null);
10891101
}
10901102

10911103
// Verify all files match original content

src/unbundle/unbundle.zig

Lines changed: 13 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -23,10 +23,8 @@ const ZSTD_WINDOW_BUFFER_SIZE: usize = 1 << 23; // 8MB
2323
pub const UnbundleError = error{
2424
DecompressionFailed,
2525
InvalidTarHeader,
26-
UnexpectedEndOfStream,
2726
FileCreateFailed,
2827
DirectoryCreateFailed,
29-
FileWriteFailed,
3028
HashMismatch,
3129
InvalidFilename,
3230
FileTooLarge,
@@ -36,6 +34,8 @@ pub const UnbundleError = error{
3634
DictionaryIdFlagUnsupported,
3735
MalformedBlock,
3836
MalformedFrame,
37+
StreamError,
38+
WriteFailed,
3939
} || std.mem.Allocator.Error;
4040

4141
/// Context for error reporting during unbundle operations
@@ -388,7 +388,7 @@ fn HashingReader(comptime ReaderType: type) type {
388388

389389
const Self = @This();
390390
pub const Error = ReaderType.Error;
391-
pub const Reader = std.io.Reader(*Self, Error, read);
391+
pub const Reader = std.Io.GenericReader(*Self, Error, read);
392392

393393
pub fn read(self: *Self, buffer: []u8) Error!usize {
394394
const n = try self.child_reader.read(buffer);
@@ -425,12 +425,13 @@ pub fn unbundleStream(
425425
};
426426

427427
var window_buffer: [ZSTD_WINDOW_BUFFER_SIZE]u8 = undefined;
428-
var zstd_stream = std.compress.zstd.Decompress.init(hashing_reader.reader(), &window_buffer, .{});
429-
const decompressed_reader = zstd_stream.reader();
428+
var reader_buffer: [4096]u8 = undefined;
429+
var reader = hashing_reader.reader().adaptToNewApi(&reader_buffer).new_interface;
430+
var zstd_stream = std.compress.zstd.Decompress.init(&reader, &window_buffer, .{});
430431

431432
var file_name_buffer: [std.fs.max_path_bytes]u8 = undefined;
432433
var link_name_buffer: [std.fs.max_path_bytes]u8 = undefined;
433-
var tar_iterator = std.tar.iterator(decompressed_reader, .{
434+
var tar_iterator = std.tar.Iterator.init(&zstd_stream.reader, .{
434435
.file_name_buffer = &file_name_buffer,
435436
.link_name_buffer = &link_name_buffer,
436437
});
@@ -460,18 +461,13 @@ pub fn unbundleStream(
460461
data_extracted = true;
461462
},
462463
.file => {
463-
const file_writer = try extract_writer.createFile(file_path);
464-
defer extract_writer.finishFile(file_writer);
465-
466464
var buffer: [STREAM_BUFFER_SIZE]u8 = undefined;
467-
var bytes_remaining = entry.size;
468-
while (bytes_remaining > 0) {
469-
const to_read = @min(buffer.len, bytes_remaining);
470-
const bytes_read = entry.reader().readAll(buffer[0..to_read]) catch return error.UnexpectedEndOfStream;
471-
if (bytes_read == 0) return error.UnexpectedEndOfStream;
472-
file_writer.writeAll(buffer[0..bytes_read]) catch return error.FileWriteFailed;
473-
bytes_remaining -= bytes_read;
474-
}
465+
const old_writer = try extract_writer.createFile(file_path);
466+
var file_writer = old_writer.adaptToNewApi(&buffer).new_interface;
467+
defer extract_writer.finishFile(old_writer);
468+
469+
tar_iterator.streamRemaining(entry, &file_writer) catch return error.StreamError;
470+
try file_writer.flush();
475471

476472
data_extracted = true;
477473
},
@@ -505,14 +501,6 @@ pub fn unbundleStream(
505501
}
506502

507503
// TODO: Add symlink support to ExtractWriter interface
508-
var buffer: [STREAM_BUFFER_SIZE]u8 = undefined;
509-
var bytes_remaining = entry.size;
510-
while (bytes_remaining > 0) {
511-
const to_read = @min(buffer.len, bytes_remaining);
512-
const bytes_read = entry.reader().readAll(buffer[0..to_read]) catch return error.UnexpectedEndOfStream;
513-
bytes_remaining -= bytes_read;
514-
}
515-
516504
data_extracted = true;
517505
},
518506
}

0 commit comments

Comments
 (0)