Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Reading multi-source compressed JSONL files #17161

Open
wants to merge 19 commits into
base: branch-24.12
Choose a base branch
from
Open
3 changes: 2 additions & 1 deletion cpp/src/io/comp/io_uncomp.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (c) 2018-2022, NVIDIA CORPORATION.
* Copyright (c) 2018-2024, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -43,6 +43,7 @@ size_t decompress(compression_type compression,
host_span<uint8_t> dst,
rmm::cuda_stream_view stream);

size_t estimate_uncompressed_size(compression_type compression, host_span<uint8_t const> src);
/**
* @brief GZIP header flags
* See https://tools.ietf.org/html/rfc1952
Expand Down
66 changes: 65 additions & 1 deletion cpp/src/io/comp/uncomp.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -336,9 +336,9 @@ std::vector<uint8_t> decompress(compression_type compression, host_span<uint8_t
cdfh_ofs += cdfh_len;
}
}
}
if (compression != compression_type::AUTO) break;
[[fallthrough]];
}
case compression_type::BZIP2:
if (src.size() > 4) {
auto const* fhdr = reinterpret_cast<bz2_file_header_s const*>(raw);
Expand Down Expand Up @@ -560,5 +560,69 @@ size_t decompress(compression_type compression,
}
}

size_t estimate_uncompressed_size(compression_type compression, host_span<uint8_t const> src)
shrshi marked this conversation as resolved.
Show resolved Hide resolved
{
auto raw = src.data();
switch (compression) {
case compression_type::NONE: return src.size();
case compression_type::GZIP: {
gz_archive_s gz;
if (ParseGZArchive(&gz, src.data(), src.size())) return gz.isize;
}
case compression_type::ZIP: {
zip_archive_s za;
if (OpenZipArchive(&za, src.data(), src.size())) {
size_t cdfh_ofs = 0;
for (int i = 0; i < za.eocd->num_entries; i++) {
auto const* cdfh = reinterpret_cast<zip_cdfh_s const*>(
reinterpret_cast<uint8_t const*>(za.cdfh) + cdfh_ofs);
int cdfh_len = sizeof(zip_cdfh_s) + cdfh->fname_len + cdfh->extra_len + cdfh->comment_len;
if (cdfh_ofs + cdfh_len > za.eocd->cdir_size || cdfh->sig != 0x0201'4b50) {
// Bad cdir
break;
}
// For now, only accept with non-zero file sizes and DEFLATE
if (cdfh->comp_method == 8 && cdfh->comp_size > 0 && cdfh->uncomp_size > 0) {
size_t lfh_ofs = cdfh->hdr_ofs;
auto const* lfh = reinterpret_cast<zip_lfh_s const*>(raw + lfh_ofs);
if (lfh_ofs + sizeof(zip_lfh_s) <= src.size() && lfh->sig == 0x0403'4b50 &&
lfh_ofs + sizeof(zip_lfh_s) + lfh->fname_len + lfh->extra_len <= src.size()) {
if (lfh->comp_method == 8 && lfh->comp_size > 0 && lfh->uncomp_size > 0) {
size_t file_start = lfh_ofs + sizeof(zip_lfh_s) + lfh->fname_len + lfh->extra_len;
size_t file_end = file_start + lfh->comp_size;
if (file_end <= src.size()) {
// Pick the first valid file of non-zero size (only 1 file expected in archive)
return lfh->uncomp_size;
}
}
}
}
cdfh_ofs += cdfh_len;
}
}
}
case compression_type::SNAPPY: {
uint32_t uncompressed_size;
auto cur = src.begin();
auto const end = src.end();
// Read uncompressed length (varint)
{
uint32_t l = 0, c;
uncompressed_size = 0;
do {
c = *cur++;
auto const lo7 = c & 0x7f;
if (l >= 28 && c > 0xf) { return 0; }
uncompressed_size |= lo7 << l;
l += 7;
} while (c > 0x7f && cur < end);
CUDF_EXPECTS(uncompressed_size != 0 and cur < end, "Destination buffer too small");
}
return uncompressed_size;
}
default: return 0;
}
}

} // namespace io
} // namespace cudf
Loading
Loading