Script 'mail_helper' called by obssrc Hello community, here is the log from the commit of package libheif for openSUSE:Factory checked in at 2026-01-17 21:41:44 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/libheif (Old) and /work/SRC/openSUSE:Factory/.libheif.new.1928 (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "libheif" Sat Jan 17 21:41:44 2026 rev:50 rq:1327647 version:1.21.2 Changes: -------- --- /work/SRC/openSUSE:Factory/libheif/libheif.changes 2026-01-08 15:25:35.496483669 +0100 +++ /work/SRC/openSUSE:Factory/.libheif.new.1928/libheif.changes 2026-01-17 21:41:47.995586866 +0100 @@ -1,0 +2,9 @@ +Fri Jan 16 08:12:26 UTC 2026 - Arjen de Korte <[email protected]> + +- update to 1.21.2: + * build script for JS/WASM now supports building with JPEG2000 and + "ISO23001-17 Uncompressed" support. + * image sequence SAI data now works when using the OpenH264 decoder + plugin + +------------------------------------------------------------------- Old: ---- libheif-1.21.1.tar.gz New: ---- libheif-1.21.2.tar.gz ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ libheif.spec ++++++ --- /var/tmp/diff_new_pack.layWGU/_old 2026-01-17 21:41:48.799619802 +0100 +++ /var/tmp/diff_new_pack.layWGU/_new 2026-01-17 21:41:48.799619802 +0100 @@ -1,7 +1,7 @@ # # spec file for package libheif # -# Copyright (c) 2025 SUSE LLC and contributors +# Copyright (c) 2026 SUSE LLC and contributors # # All modifications and additions to the file contributed by third parties # remain the property of their copyright owners, unless otherwise agreed @@ -46,7 +46,7 @@ %bcond_with heif_view Name: libheif%{?psuffix} -Version: 1.21.1 +Version: 1.21.2 Release: 0 Summary: HEIF/AVIF file format decoder and encoder License: LGPL-3.0-only AND MIT ++++++ libheif-1.21.1.tar.gz -> libheif-1.21.2.tar.gz ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/CMakeLists.txt new/libheif-1.21.2/CMakeLists.txt --- old/libheif-1.21.1/CMakeLists.txt 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/CMakeLists.txt 2026-01-16 09:03:07.000000000 +0100 @@ -1,6 +1,6 @@ cmake_minimum_required (VERSION 3.16.3) # Oldest Ubuntu LTS (20.04 currently) -project(libheif LANGUAGES C CXX VERSION 1.21.1) +project(libheif LANGUAGES C CXX VERSION 1.21.2) # compatibility_version is never allowed to be decreased for any specific SONAME. # Libtool in the libheif-1.15.1 release had set it to 17.0.0, so we have to use this for the v1.x.y versions. diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/build-emscripten.sh new/libheif-1.21.2/build-emscripten.sh --- old/libheif-1.21.1/build-emscripten.sh 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/build-emscripten.sh 2026-01-16 09:03:07.000000000 +0100 @@ -19,12 +19,22 @@ SRCDIR=$1 CORES="${CORES:-`nproc --all`}" + ENABLE_LIBDE265="${ENABLE_LIBDE265:-1}" LIBDE265_VERSION="${LIBDE265_VERSION:-1.0.15}" + ENABLE_AOM="${ENABLE_AOM:-0}" AOM_VERSION="${AOM_VERSION:-3.6.1}" + # Webcodecs is not on by default b/c asyncify increases the binary size considerably ENABLE_WEBCODECS="${ENABLE_WEBCODECS:-0}" + +ENABLE_UNCOMPRESSED="${ENABLE_UNCOMPRESSED:-0}" + +# J2K still defunct. OpenJPEG compiles, but library is not picked up by libheif cmake. +ENABLE_OPENJPEG="${ENABLE_OPENJPEG:-0}" +OPENJPEG_VERSION="${OPENJPEG_VERSION:-2.5.4}" + STANDALONE="${STANDALONE:-0}" DEBUG="${DEBUG:-0}" USE_ES6="${USE_ES6:-0}" @@ -90,11 +100,42 @@ LIBRARY_LINKER_FLAGS="$LIBRARY_LINKER_FLAGS -L${AOM_DIR} -laom" fi +CONFIGURE_ARGS_OPENJPEG="" +if [ "$ENABLE_OPENJPEG" = "1" ]; then + [ -s "openjpeg-${OPENJPEG__VERSION}.tar.gz" ] || curl \ + -L \ + -o openjpeg-${OPENJPEG_VERSION}.tar.gz \ + "https://github.com/uclouvain/openjpeg/archive/refs/tags/v${OPENJPEG_VERSION}.tar.gz" + if [ ! -s "openjpeg-${OPENJPEG_VERSION}/bin/libopenjp2.a" ]; then + mkdir -p openjpeg-${OPENJPEG_VERSION}/openjpeg-source + tar xf openjpeg-${OPENJPEG_VERSION}.tar.gz -C openjpeg-${OPENJPEG_VERSION}/openjpeg-source + cd openjpeg-${OPENJPEG_VERSION} + emcmake cmake openjpeg-source/openjpeg-${OPENJPEG_VERSION} \ + -DBUILD_SHARED_LIBS=0 \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=openjpeg-install + + emmake make -j${CORES} + emmake make install -j${CORES} + + cd .. + fi + + J2K_DIR="$(pwd)/openjpeg-${OPENJPEG_VERSION}" + CONFIGURE_ARGS_OPENJPEG="-DWITH_OpenJPEG_DECODER=ON -DCMAKE_PREFIX_PATH=${J2K_DIR}/openjpeg-install -DCMAKE_FIND_ROOT_PATH_MODE_PACKAGE=BOTH" + LIBRARY_LINKER_FLAGS="$LIBRARY_LINKER_FLAGS -L${J2K_DIR}/bin -lopenjp2" +fi + CONFIGURE_ARGS_WEBCODECS="" if [ "$ENABLE_WEBCODECS" = "1" ]; then CONFIGURE_ARGS_WEBCODECS="-DWITH_WEBCODECS=ON" fi +CONFIGURE_ARGS_UNCOMPRESSED="" +if [ "$ENABLE_UNCOMPRESSED" = "1" ]; then + CONFIGURE_ARGS_UNCOMPRESSED="-DWITH_UNCOMPRESSED_CODEC=ON" +fi + EXTRA_EXE_LINKER_FLAGS="-lembind" EXTRA_COMPILER_FLAGS="" if [ "$STANDALONE" = "1" ]; then @@ -110,7 +151,9 @@ -DCMAKE_EXE_LINKER_FLAGS="${LIBRARY_LINKER_FLAGS} ${EXTRA_EXE_LINKER_FLAGS}" \ $CONFIGURE_ARGS_LIBDE265 \ $CONFIGURE_ARGS_AOM \ - $CONFIGURE_ARGS_WEBCODECS + $CONFIGURE_ARGS_WEBCODECS \ + $CONFIGURE_ARGS_UNCOMPRESSED \ + $CONFIGURE_ARGS_OPENJPEG VERBOSE=1 emmake make -j${CORES} diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/examples/heif_enc.cc new/libheif-1.21.2/examples/heif_enc.cc --- old/libheif-1.21.1/examples/heif_enc.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/examples/heif_enc.cc 2026-01-16 09:03:07.000000000 +0100 @@ -295,7 +295,7 @@ << " --no-alpha do not save alpha channel\n" << " --no-thumb-alpha do not save alpha channel in thumbnail image\n" << " --verbose enable logging output (more will increase logging level)\n" - << " -b, --bit-depth # bit-depth of generated HEIF/AVIF file when using 16-bit PNG input (default: 10 bit)\n" + << " -b, --bit-depth # number of bits to use from an 16-bit PNG input, valid range: 9-16 (default: 10 bit)\n" << " --premultiplied-alpha input image has premultiplied alpha\n" #if WITH_HEADER_COMPRESSION << " --enable-metadata-compression ALGO enable metadata item compression (experimental)\n" @@ -1275,6 +1275,10 @@ break; case 'b': output_bit_depth = atoi(optarg); + if (output_bit_depth < 9 || output_bit_depth > 16) { + std::cerr << "Bit depth for input HDR images must be 9-16 bits.\n"; + return 5; + } break; case 'A': force_enc_av1f = true; @@ -1733,6 +1737,15 @@ } + // If we get a list of image filenames, but no '-o' option, assume that the last option + // denotes the output filename. + + if (output_filename.empty() && args.size() > 1) { + output_filename = args.back(); + args.pop_back(); + } + + if (!lossless) { heif_encoder_set_lossy_quality(encoder, quality); } diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/api/libheif/heif_sequences.h new/libheif-1.21.2/libheif/api/libheif/heif_sequences.h --- old/libheif-1.21.1/libheif/api/libheif/heif_sequences.h 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/api/libheif/heif_sequences.h 2026-01-16 09:03:07.000000000 +0100 @@ -137,7 +137,7 @@ }; LIBHEIF_API -heif_auxiliary_track_info_type heif_track_get_auxiliary_info_type(const heif_track*); +enum heif_auxiliary_track_info_type heif_track_get_auxiliary_info_type(const heif_track*); LIBHEIF_API const char* heif_track_get_auxiliary_info_type_urn(const heif_track*); diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/box.cc new/libheif-1.21.2/libheif/box.cc --- old/libheif-1.21.1/libheif/box.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/box.cc 2026-01-16 09:03:07.000000000 +0100 @@ -880,6 +880,10 @@ box = std::make_shared<Box_tref>(); break; + case fourcc("sdtp"): + box = std::make_shared<Box_sdtp>(); + break; + default: box = std::make_shared<Box_other>(hdr.get_short_type()); break; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/box.h new/libheif-1.21.2/libheif/box.h --- old/libheif-1.21.1/libheif/box.h 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/box.h 2026-01-16 09:03:07.000000000 +0100 @@ -1484,7 +1484,7 @@ principal_point_y -= clap->top(image_height); } - void apply_imir(const Box_imir* imir, int image_width, int image_height) { + void apply_imir(const Box_imir* imir, uint32_t image_width, uint32_t image_height) { switch (imir->get_mirror_direction()) { case heif_transform_mirror_direction_horizontal: focal_length_x *= -1; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/codecs/uncompressed/unc_boxes.h new/libheif-1.21.2/libheif/codecs/uncompressed/unc_boxes.h --- old/libheif-1.21.1/libheif/codecs/uncompressed/unc_boxes.h 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/codecs/uncompressed/unc_boxes.h 2026-01-16 09:03:07.000000000 +0100 @@ -48,6 +48,8 @@ Error write(StreamWriter& writer) const override; + bool is_essential() const override { return true; } + struct Component { uint16_t component_type; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/compression_zlib.cc new/libheif-1.21.2/libheif/compression_zlib.cc --- old/libheif-1.21.1/libheif/compression_zlib.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/compression_zlib.cc 2026-01-16 09:03:07.000000000 +0100 @@ -91,8 +91,8 @@ // decompress data with zlib - const int outBufferSize = 8192; - uint8_t dst[outBufferSize]; + std::vector<uint8_t> dst; + dst.resize(8192); z_stream strm; memset(&strm, 0, sizeof(z_stream)); @@ -100,8 +100,8 @@ strm.avail_in = (int)compressed_input.size(); strm.next_in = (Bytef*) compressed_input.data(); - strm.avail_out = outBufferSize; - strm.next_out = (Bytef*) dst; + strm.avail_out = (uInt)dst.size(); + strm.next_out = (Bytef*) dst.data(); strm.zalloc = Z_NULL; strm.zfree = Z_NULL; @@ -117,15 +117,26 @@ } do { - strm.next_out = dst; - strm.avail_out = outBufferSize; + strm.avail_out = (uInt)dst.size(); + strm.next_out = (Bytef*) dst.data(); - err = inflate(&strm, Z_FINISH); - if (err == Z_BUF_ERROR || err == Z_OK) { - // this is the usual case when we run out of buffer space - // -> do nothing + err = inflate(&strm, Z_NO_FLUSH); + + if (err == Z_BUF_ERROR) { + if (dst.size() >= 65536) { // TODO: make this a security limit + inflateEnd(&strm); + std::stringstream sstr; + sstr << "Error performing zlib inflate: maximum output buffer size exceeded\n"; + return Error(heif_error_Memory_allocation_error, heif_suberror_Compression_initialisation_error, sstr.str()); + } + + dst.resize(dst.size() * 2); + strm.next_out = dst.data(); + strm.avail_out = (uInt)dst.size(); + continue; } - else if (err == Z_NEED_DICT || err == Z_DATA_ERROR || err == Z_STREAM_ERROR) { + + if (err == Z_NEED_DICT || err == Z_DATA_ERROR || err == Z_STREAM_ERROR) { inflateEnd(&strm); std::stringstream sstr; sstr << "Error performing zlib inflate: " << (strm.msg ? strm.msg : "NULL") << " (" << err << ")\n"; @@ -133,7 +144,7 @@ } // append decoded data to output - output.insert(output.end(), dst, dst + outBufferSize - strm.avail_out); + output.insert(output.end(), dst.begin(), dst.end() - strm.avail_out); } while (err != Z_STREAM_END); diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/image-items/grid.cc new/libheif-1.21.2/libheif/image-items/grid.cc --- old/libheif-1.21.1/libheif/image-items/grid.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/image-items/grid.cc 2026-01-16 09:03:07.000000000 +0100 @@ -773,7 +773,7 @@ // copy over extra properties to grid item if (tile_x == 0 && tile_y == 0) { - auto property_boxes = encoded_image->generate_property_boxes(); + auto property_boxes = encoded_image->generate_property_boxes(false); for (auto& property : property_boxes) { add_property(property, is_property_essential(property)); } @@ -867,7 +867,7 @@ // copy over extra properties to grid item - auto property_boxes = tiles[0]->generate_property_boxes(); + auto property_boxes = tiles[0]->generate_property_boxes(true); for (auto& property : property_boxes) { griditem->add_property(property, griditem->is_property_essential(property)); } diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/image-items/image_item.cc new/libheif-1.21.2/libheif/image-items/image_item.cc --- old/libheif-1.21.1/libheif/image-items/image_item.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/image-items/image_item.cc 2026-01-16 09:03:07.000000000 +0100 @@ -354,7 +354,7 @@ // copy over ImageExtraData into image item *static_cast<ImageExtraData*>(this) = static_cast<ImageExtraData>(*image); - auto extra_data_properties = image->generate_property_boxes(); + auto extra_data_properties = image->generate_property_boxes(false); codedImage.properties.insert(codedImage.properties.end(), extra_data_properties.begin(), extra_data_properties.end()); diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/image-items/unc_image.cc new/libheif-1.21.2/libheif/image-items/unc_image.cc --- old/libheif-1.21.1/libheif/image-items/unc_image.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/image-items/unc_image.cc 2026-01-16 09:03:07.000000000 +0100 @@ -154,6 +154,12 @@ uint64_t offset = 0; for (heif_channel channel : {heif_channel_Y, heif_channel_Cb, heif_channel_Cr}) { + if (src_image->get_bits_per_pixel(channel) != 8) { + return Error(heif_error_Unsupported_feature, + heif_suberror_Unsupported_data_version, + "Unsupported colourspace"); + } + size_t src_stride; uint32_t src_width = src_image->get_width(channel); uint32_t src_height = src_image->get_height(channel); @@ -296,8 +302,15 @@ { channels = {heif_channel_Y}; } + for (heif_channel channel : channels) { + if (src_image->get_bits_per_pixel(channel) != 8) { + return Error(heif_error_Unsupported_feature, + heif_suberror_Unsupported_data_version, + "Unsupported colourspace"); + } + size_t src_stride; const uint8_t* src_data = src_image->get_plane(channel, &src_stride); uint64_t out_size = static_cast<uint64_t>(src_image->get_height()) * src_stride; diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/pixelimage.cc new/libheif-1.21.2/libheif/pixelimage.cc --- old/libheif-1.21.1/libheif/pixelimage.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/pixelimage.cc 2026-01-16 09:03:07.000000000 +0100 @@ -99,7 +99,7 @@ bool ImageExtraData::has_nclx_color_profile() const { - return m_color_profile_nclx != nclx_profile::defaults(); + return m_color_profile_nclx != nclx_profile::undefined(); } @@ -178,7 +178,7 @@ } -std::vector<std::shared_ptr<Box>> ImageExtraData::generate_property_boxes() const +std::vector<std::shared_ptr<Box>> ImageExtraData::generate_property_boxes(bool generate_colr_boxes) const { std::vector<std::shared_ptr<Box>> properties; @@ -218,16 +218,18 @@ properties.push_back(itai); } - // --- colr (nclx) + if (generate_colr_boxes) { + // --- colr (nclx) - if (has_nclx_color_profile()) { - properties.push_back(get_colr_box_nclx()); - } + if (has_nclx_color_profile()) { + properties.push_back(get_colr_box_nclx()); + } - // --- colr (icc) + // --- colr (icc) - if (has_icc_color_profile()) { - properties.push_back(get_colr_box_icc()); + if (has_icc_color_profile()) { + properties.push_back(get_colr_box_icc()); + } } return properties; @@ -575,6 +577,10 @@ plane->m_width * bytes_per_pixel); } + // --- replace existing image plane with reallocated plane + + delete[] planeIter.second.allocated_mem; + planeIter.second = newPlane; plane = &planeIter.second; } diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/pixelimage.h new/libheif-1.21.2/libheif/pixelimage.h --- old/libheif-1.21.1/libheif/pixelimage.h 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/pixelimage.h 2026-01-16 09:03:07.000000000 +0100 @@ -77,7 +77,10 @@ public: virtual ~ImageExtraData(); - std::vector<std::shared_ptr<Box>> generate_property_boxes() const; + // TODO: Decide who is responsible for writing the colr boxes. + // Currently it is distributed over various places. + // Either here, in image_item.cc or in grid.cc. + std::vector<std::shared_ptr<Box>> generate_property_boxes(bool generate_colr_boxes) const; // --- color profile diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/plugins/decoder_openh264.cc new/libheif-1.21.2/libheif/plugins/decoder_openh264.cc --- old/libheif-1.21.1/libheif/plugins/decoder_openh264.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/plugins/decoder_openh264.cc 2026-01-16 09:03:07.000000000 +0100 @@ -40,7 +40,7 @@ struct Packet { std::vector<uint8_t> data; - // uintptr_t pts; currently unused + uintptr_t pts; }; std::deque<Packet> input_data; @@ -173,7 +173,7 @@ } -heif_error openh264_push_data(void* decoder_raw, const void* frame_data, size_t frame_size) +heif_error openh264_push_data2(void* decoder_raw, const void* frame_data, size_t frame_size, uintptr_t user_data) { auto* decoder = (openh264_decoder*) decoder_raw; @@ -183,19 +183,21 @@ openh264_decoder::Packet pkt; pkt.data.insert(pkt.data.end(), input_data, input_data + frame_size); + pkt.pts = user_data; decoder->input_data.push_back(std::move(pkt)); return {heif_error_Ok, heif_suberror_Unspecified, kSuccess}; } -heif_error openh264_push_data2(void* decoder_raw, const void* frame_data, size_t frame_size, uintptr_t user_data) +heif_error openh264_push_data(void* decoder_raw, const void* frame_data, size_t frame_size) { - return openh264_push_data(decoder_raw, frame_data, frame_size); + return openh264_push_data2(decoder_raw, frame_data, frame_size, 0); } -heif_error openh264_decode_next_image(void* decoder_raw, heif_image** out_img, - const heif_security_limits* limits) +heif_error openh264_decode_next_image2(void* decoder_raw, heif_image** out_img, + uintptr_t* out_user_data, + const heif_security_limits* limits) { auto* decoder = (openh264_decoder*) decoder_raw; ISVCDecoder* pSvcDecoder = decoder->decoder; @@ -218,8 +220,9 @@ int iRet; - if (!decoder->input_data.empty()) { + sDstBufInfo.uiInBsTimeStamp = decoder->input_data.front().pts; + const std::vector<uint8_t>& indata = decoder->input_data.front().data; std::vector<uint8_t> scdata; @@ -310,6 +313,10 @@ return heif_error_ok; } + if (out_user_data) { + *out_user_data = sDstBufInfo.uiOutYuvTimeStamp; + } + /* // TODO: I receive an iBufferStatus==0, but the output image is still decoded if (sDstBufInfo.iBufferStatus == 0) { @@ -404,15 +411,11 @@ return heif_error_ok; } -heif_error openh264_decode_next_image2(void* decoder_raw, heif_image** out_img, - uintptr_t* out_user_data, - const heif_security_limits* limits) -{ - if (out_user_data) { - *out_user_data = 0; // TODO: not supported by openH264 - } - return openh264_decode_next_image(decoder_raw, out_img, limits); +heif_error openh264_decode_next_image(void* decoder_raw, heif_image** out_img, + const heif_security_limits* limits) +{ + return openh264_decode_next_image2(decoder_raw, out_img, nullptr, limits); } heif_error openh264_decode_image(void* decoder_raw, heif_image** out_img) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/sequences/seq_boxes.cc new/libheif-1.21.2/libheif/sequences/seq_boxes.cc --- old/libheif-1.21.1/libheif/sequences/seq_boxes.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/sequences/seq_boxes.cc 2026-01-16 09:03:07.000000000 +0100 @@ -852,17 +852,6 @@ } -size_t Box_stsc::get_number_of_samples() const -{ - size_t total = 0; - for (const auto& entry : m_entries) { - total += entry.samples_per_chunk; - } - - return total; -} - - Error Box_stsc::parse(BitstreamRange& range, const heif_security_limits* limits) { parse_full_box_header(range); @@ -893,6 +882,13 @@ "'sample_description_index' in 'stsc' must not be 0."}; } + if (entry.samples_per_chunk > limits->max_sequence_frames) { + return { + heif_error_Invalid_input, + heif_suberror_Unspecified, + "Number of chunk samples in `stsc` box exceeds security limits of maximum number of frames."}; + } + m_entries[i] = entry; } @@ -1898,15 +1894,24 @@ } writer.write8(m_default_sample_info_size); - writer.write32(m_num_samples); if (m_default_sample_info_size == 0) { assert(m_num_samples == m_sample_sizes.size()); - for (uint8_t size : m_sample_sizes) { - writer.write8(size); + uint32_t num_nonnull_samples = static_cast<uint32_t>(m_sample_sizes.size()); + while (num_nonnull_samples > 0 && m_sample_sizes[num_nonnull_samples-1] == 0) { + num_nonnull_samples--; + } + + writer.write32(num_nonnull_samples); + + for (size_t i = 0; i < num_nonnull_samples; i++) { + writer.write8(m_sample_sizes[i]); } } + else { + writer.write32(m_num_samples); + } prepend_header(writer, box_start); @@ -1963,24 +1968,24 @@ } -void Box_saio::add_sample_offset(uint64_t s) +void Box_saio::add_chunk_offset(uint64_t s) { if (s > 0xFFFFFFFF) { m_need_64bit = true; set_version(1); } - m_sample_offset.push_back(s); + m_chunk_offset.push_back(s); } -uint64_t Box_saio::get_sample_offset(uint32_t idx) const +uint64_t Box_saio::get_chunk_offset(uint32_t idx) const { - if (idx >= m_sample_offset.size()) { + if (idx >= m_chunk_offset.size()) { return 0; } else { - return m_sample_offset[idx]; + return m_chunk_offset[idx]; } } @@ -2006,8 +2011,8 @@ sstr << fourcc_to_string(m_aux_info_type_parameter) << "\n"; } - for (size_t i = 0; i < m_sample_offset.size(); i++) { - sstr << indent << "[" << i << "] : 0x" << std::hex << m_sample_offset[i] << "\n"; + for (size_t i = 0; i < m_chunk_offset.size(); i++) { + sstr << indent << "[" << i << "] : 0x" << std::hex << m_chunk_offset[i] << "\n"; } return sstr.str(); @@ -2020,7 +2025,7 @@ writer.set_position(m_offset_start_pos); - for (uint64_t ptr : m_sample_offset) { + for (uint64_t ptr : m_chunk_offset) { if (get_version() == 0 && ptr + offset > std::numeric_limits<uint32_t>::max()) { writer.write32(0); // TODO: error } else if (get_version() == 0) { @@ -2043,16 +2048,16 @@ writer.write32(m_aux_info_type_parameter); } - if (m_sample_offset.size() > std::numeric_limits<uint32_t>::max()) { + if (m_chunk_offset.size() > std::numeric_limits<uint32_t>::max()) { return Error{heif_error_Unsupported_feature, heif_suberror_Unspecified, - "Maximum number of samples exceeded"}; + "Maximum number of chunks exceeded"}; } - writer.write32(static_cast<uint32_t>(m_sample_offset.size())); + writer.write32(static_cast<uint32_t>(m_chunk_offset.size())); m_offset_start_pos = writer.get_position(); - for (uint64_t size : m_sample_offset) { + for (uint64_t size : m_chunk_offset) { if (m_need_64bit) { writer.write64(size); } else { @@ -2075,26 +2080,28 @@ m_aux_info_type_parameter = range.read32(); } - uint32_t num_samples = range.read32(); + uint32_t num_chunks = range.read32(); - if (limits && num_samples > limits->max_sequence_frames) { + // We have no explicit maximum on the number of chunks. + // Use the maximum number of frames as an upper limit. + if (limits && num_chunks > limits->max_sequence_frames) { return { heif_error_Memory_allocation_error, heif_suberror_Security_limit_exceeded, - "Number of 'saio' samples exceeds the maximum number of sequence frames." + "Number of 'saio' chunks exceeds the maximum number of sequence frames." }; } // check required memory - uint64_t mem_size = num_samples * sizeof(uint64_t); + uint64_t mem_size = num_chunks * sizeof(uint64_t); if (auto err = m_memory_handle.alloc(mem_size, limits, "the 'saio' table")) { return err; } - m_sample_offset.resize(num_samples); + m_chunk_offset.resize(num_chunks); - for (uint32_t i = 0; i < num_samples; i++) { + for (uint32_t i = 0; i < num_chunks; i++) { uint64_t offset; if (get_version() == 1) { offset = range.read64(); @@ -2103,7 +2110,7 @@ offset = range.read32(); } - m_sample_offset[i] = offset; + m_chunk_offset[i] = offset; if (range.error()) { return range.get_error(); @@ -2112,6 +2119,49 @@ return Error::Ok; } + + +std::string Box_sdtp::dump(Indent& indent) const +{ + std::stringstream sstr; + sstr << FullBox::dump(indent); + + assert(m_sample_information.size() <= UINT32_MAX); + + for (uint32_t i = 0; i < static_cast<uint32_t>(m_sample_information.size()); i++) { + const char* spaces = " "; + int nSpaces = 6; + int k = i; + while (k >= 10 && nSpaces < 12) { + k /= 10; + nSpaces++; + } + + spaces = spaces + 12 - nSpaces; + + sstr << indent << "[" << i << "] : is_leading=" << (int) get_is_leading(i) << "\n" + << indent << spaces << "depends_on=" << (int) get_depends_on(i) << "\n" + << indent << spaces << "is_depended_on=" << (int) get_is_depended_on(i) << "\n" + << indent << spaces << "has_redundancy=" << (int) get_has_redundancy(i) << "\n"; + } + + return sstr.str(); +} + + +Error Box_sdtp::parse(BitstreamRange& range, const heif_security_limits* limits) +{ + parse_full_box_header(range); + + // We have no easy way to get the number of samples from 'saiz' or 'stz2' as specified + // in the standard. Instead, we read until the end of the box. + size_t nSamples = range.get_remaining_bytes(); + + m_sample_information.resize(nSamples); + range.read(m_sample_information.data(), nSamples); + + return Error::Ok; +} Error Box_tref::parse(BitstreamRange& range, const heif_security_limits* limits) diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/sequences/seq_boxes.h new/libheif-1.21.2/libheif/sequences/seq_boxes.h --- old/libheif-1.21.1/libheif/sequences/seq_boxes.h 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/sequences/seq_boxes.h 2026-01-16 09:03:07.000000000 +0100 @@ -437,8 +437,6 @@ return m_entries.back().samples_per_chunk == 0; } - size_t get_number_of_samples() const; - protected: Error parse(BitstreamRange& range, const heif_security_limits*) override; @@ -466,6 +464,8 @@ const std::vector<uint32_t>& get_offsets() const { return m_offsets; } + size_t get_number_of_chunks() const { return m_offsets.size(); } + void patch_file_pointers(StreamWriter&, size_t offset) override; protected: @@ -841,6 +841,8 @@ uint8_t get_sample_size(uint32_t idx); + bool have_samples_constant_size() const { return m_default_sample_info_size != 0; } + uint32_t get_num_samples() const { return m_num_samples; } std::string dump(Indent&) const override; @@ -876,12 +878,12 @@ uint32_t get_aux_info_type_parameter() const { return m_aux_info_type_parameter; } - void add_sample_offset(uint64_t offset); + void add_chunk_offset(uint64_t offset); - // This will be 1 if all infos are written contiguously - size_t get_num_samples() const { return m_sample_offset.size(); } + // If this is 1, the SAI data of all samples is written contiguously in the file. + size_t get_num_chunks() const { return m_chunk_offset.size(); } - uint64_t get_sample_offset(uint32_t idx) const; + uint64_t get_chunk_offset(uint32_t idx) const; std::string dump(Indent&) const override; @@ -901,13 +903,42 @@ bool m_need_64bit = false; mutable uint64_t m_offset_start_pos; - // If sample_offset==1, all samples are stored contiguous in the file - std::vector<uint64_t> m_sample_offset; + // If |chunk_offset|==1, the SAI data of all samples is stored contiguously in the file + std::vector<uint64_t> m_chunk_offset; MemoryHandle m_memory_handle; }; +class Box_sdtp : public FullBox { +public: + Box_sdtp() + { + set_short_type(fourcc("sdtp")); + } + + std::string dump(Indent&) const override; + + const char* debug_box_name() const override { return "Independent and Disposable Samples"; } + + // Error write(StreamWriter& writer) const override; + + uint8_t get_is_leading(uint32_t sampleIdx) const { return (m_sample_information[sampleIdx] >> 6) & 3; } + + uint8_t get_depends_on(uint32_t sampleIdx) const { return (m_sample_information[sampleIdx] >> 4) & 3; } + + uint8_t get_is_depended_on(uint32_t sampleIdx) const { return (m_sample_information[sampleIdx] >> 2) & 3; } + + uint8_t get_has_redundancy(uint32_t sampleIdx) const { return (m_sample_information[sampleIdx]) & 3; } + +protected: + Error parse(BitstreamRange& range, const heif_security_limits*) override; + +private: + std::vector<uint8_t> m_sample_information; +}; + + class Box_tref : public Box { public: diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/sequences/track.cc new/libheif-1.21.2/libheif/sequences/track.cc --- old/libheif-1.21.1/libheif/sequences/track.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/sequences/track.cc 2026-01-16 09:03:07.000000000 +0100 @@ -96,7 +96,7 @@ if (m_interleaved && !m_data.empty()) { // TODO: I think this does not work because the image data does not know that there is SAI in-between uint64_t pos = file->append_mdat_data(m_data); - m_saio->add_sample_offset(pos); + m_saio->add_chunk_offset(pos); m_data.clear(); } @@ -109,28 +109,42 @@ if (!m_data.empty()) { uint64_t pos = file->append_mdat_data(m_data); - m_saio->add_sample_offset(pos); + m_saio->add_chunk_offset(pos); } } SampleAuxInfoReader::SampleAuxInfoReader(std::shared_ptr<Box_saiz> saiz, - std::shared_ptr<Box_saio> saio) + std::shared_ptr<Box_saio> saio, + const std::vector<std::shared_ptr<Chunk>>& chunks) { m_saiz = saiz; m_saio = saio; - m_contiguous = (saio->get_num_samples() == 1); - if (m_contiguous) { - uint64_t offset = saio->get_sample_offset(0); - auto nSamples = saiz->get_num_samples(); + bool oneChunk = (saio->get_num_chunks() == 1); + + uint32_t current_chunk = 0; + uint64_t offset = saio->get_chunk_offset(0); + uint32_t nSamples = saiz->get_num_samples(); + + m_contiguous_and_constant_size = (oneChunk && m_saiz->have_samples_constant_size()); + + if (m_contiguous_and_constant_size) { + m_singleChunk_offset = offset; + } + else { + m_sample_offsets.resize(nSamples); for (uint32_t i = 0; i < nSamples; i++) { - m_contiguous_offsets.push_back(offset); + if (!oneChunk && i > chunks[current_chunk]->last_sample_number()) { + current_chunk++; + assert(current_chunk < chunks.size()); + offset = saio->get_chunk_offset(current_chunk); + } + + m_sample_offsets[i] = offset; offset += saiz->get_sample_size(i); } - - // TODO: we could add a special case for contiguous data with constant size } } @@ -144,22 +158,33 @@ } -Result<std::vector<uint8_t> > SampleAuxInfoReader::get_sample_info(const HeifFile* file, uint32_t idx) +Result<std::vector<uint8_t> > SampleAuxInfoReader::get_sample_info(const HeifFile* file, uint32_t sample_idx) { uint64_t offset; - if (m_contiguous) { - offset = m_contiguous_offsets[idx]; + uint8_t size; + + if (m_contiguous_and_constant_size) { + size = m_saiz->get_sample_size(0); + offset = m_singleChunk_offset + sample_idx * size; } else { - offset = m_saio->get_sample_offset(idx); - } + size = m_saiz->get_sample_size(sample_idx); + if (size > 0) { + if (sample_idx >= m_sample_offsets.size()) { + return {}; + } - uint8_t size = m_saiz->get_sample_size(idx); + offset = m_sample_offsets[sample_idx]; + } + } std::vector<uint8_t> data; - Error err = file->append_data_from_file_range(data, offset, size); - if (err) { - return err; + + if (size > 0) { + Error err = file->append_data_from_file_range(data, offset, size); + if (err) { + return err; + } } return data; @@ -312,14 +337,6 @@ }; } - if (m_stsc->get_number_of_samples() != m_stsz->num_samples()) { - return { - heif_error_Invalid_input, - heif_suberror_Unspecified, - "Number of samples in 'stsc' and 'stsz' is inconsistent." - }; - } - const std::vector<uint32_t>& chunk_offsets = m_stco->get_offsets(); assert(chunk_offsets.size() <= (size_t) std::numeric_limits<uint32_t>::max()); // There cannot be more than uint32_t chunks. @@ -358,6 +375,14 @@ } } + if (current_sample_idx + sampleToChunk.samples_per_chunk > m_stsz->num_samples()) { + return { + heif_error_Invalid_input, + heif_suberror_Unspecified, + "Number of samples in 'stsc' box exceeds sample sizes in 'stsz' box." + }; + } + auto chunk = std::make_shared<Chunk>(m_heif_context, m_id, current_sample_idx, sampleToChunk.samples_per_chunk, m_stco->get_offsets()[chunk_idx], @@ -409,20 +434,21 @@ } if (saio) { - if (saio->get_num_samples() != saiz->get_num_samples()) { + if (saio->get_num_chunks() != 1 && + saio->get_num_chunks() != m_stco->get_number_of_chunks()) { return Error{ heif_error_Invalid_input, heif_suberror_Unspecified, - "Number of samples in 'saiz' box does not match 'saio' box." + "Invalid number of chunks in 'saio' box." }; } if (aux_info_type == fourcc("suid")) { - m_aux_reader_content_ids = std::make_unique<SampleAuxInfoReader>(saiz, saio); + m_aux_reader_content_ids = std::make_unique<SampleAuxInfoReader>(saiz, saio, m_chunks); } if (aux_info_type == fourcc("stai")) { - m_aux_reader_tai_timestamps = std::make_unique<SampleAuxInfoReader>(saiz, saio); + m_aux_reader_tai_timestamps = std::make_unique<SampleAuxInfoReader>(saiz, saio, m_chunks); } } else { @@ -976,17 +1002,21 @@ uint64_t current_decoding_time = 0; uint32_t current_chunk = 0; + uint32_t current_sample_in_chunk_idx = 0; for (uint32_t i = 0; i < m_num_samples; i++) { SampleTiming timing; timing.sampleIdx = i; + timing.sampleInChunkIdx = current_sample_in_chunk_idx; timing.media_decoding_time = current_decoding_time; timing.sample_duration_media_time = m_stts->get_sample_duration(i); current_decoding_time += timing.sample_duration_media_time; + current_sample_in_chunk_idx++; while (current_chunk < m_chunks.size() && i > m_chunks[current_chunk]->last_sample_number()) { current_chunk++; + current_sample_in_chunk_idx=0; if (current_chunk > m_chunks.size()) { timing.chunkIdx = 0; // TODO: error diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/sequences/track.h new/libheif-1.21.2/libheif/sequences/track.h --- old/libheif-1.21.1/libheif/sequences/track.h 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/sequences/track.h 2026-01-16 09:03:07.000000000 +0100 @@ -67,18 +67,24 @@ { public: SampleAuxInfoReader(std::shared_ptr<Box_saiz>, - std::shared_ptr<Box_saio>); + std::shared_ptr<Box_saio>, + const std::vector<std::shared_ptr<Chunk>>& chunks); heif_sample_aux_info_type get_type() const; - Result<std::vector<uint8_t>> get_sample_info(const HeifFile* file, uint32_t idx); + Result<std::vector<uint8_t>> get_sample_info(const HeifFile* file, uint32_t sample_idx); private: std::shared_ptr<Box_saiz> m_saiz; std::shared_ptr<Box_saio> m_saio; - bool m_contiguous; - std::vector<uint64_t> m_contiguous_offsets; + // If there is only one chunk and the SAI data sizes are constant, we do not need an offset table. + // We just store the base offset and can directly calculate the sample offset from that. + bool m_contiguous_and_constant_size=false; + uint64_t m_singleChunk_offset=0; + + // For chunked data or non-constant sample sizes, we use a table with the offsets for all SAI samples. + std::vector<uint64_t> m_sample_offsets; }; @@ -202,6 +208,7 @@ struct SampleTiming { uint32_t sampleIdx = 0; + uint32_t sampleInChunkIdx = 0; uint32_t chunkIdx = 0; uint64_t presentation_time = 0; // TODO uint64_t media_composition_time = 0; // TODO diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/libheif/sequences/track_visual.cc new/libheif-1.21.2/libheif/sequences/track_visual.cc --- old/libheif-1.21.1/libheif/sequences/track_visual.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/libheif/sequences/track_visual.cc 2026-01-16 09:03:07.000000000 +0100 @@ -639,7 +639,7 @@ auto& user_data = m_frame_user_data[frame_number]; - int32_t decoding_time = static_cast<int32_t>(m_stsc->get_number_of_samples()) * m_sample_duration; + int32_t decoding_time = static_cast<int32_t>(m_stsz->num_samples()) * m_sample_duration; int32_t composition_time = static_cast<int32_t>(frame_number) * m_sample_duration; Error err = write_sample_data(data.bitstream, diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/scripts/install-ci-linux.sh new/libheif-1.21.2/scripts/install-ci-linux.sh --- old/libheif-1.21.1/scripts/install-ci-linux.sh 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/scripts/install-ci-linux.sh 2026-01-16 09:03:07.000000000 +0100 @@ -59,6 +59,12 @@ ls -lR $BUILD_ROOT/libde265/dist fi +if [ "$WITH_LIBDE265" = "3" ]; then + INSTALL_PACKAGES="$INSTALL_PACKAGES \ + libde265-dev \ + " +fi + if [ "$WITH_AOM" = "1" ]; then ADD_LIBHEIF_PPA=1 INSTALL_PACKAGES="$INSTALL_PACKAGES \ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/tests/cxx_wrapper.cc new/libheif-1.21.2/tests/cxx_wrapper.cc --- old/libheif-1.21.1/tests/cxx_wrapper.cc 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/tests/cxx_wrapper.cc 2026-01-16 09:03:07.000000000 +0100 @@ -37,6 +37,10 @@ img.create(16,16, heif_colorspace_RGB, heif_chroma_interleaved_RGBA); + heif::ColorProfile_nclx nclx; + nclx.set_matrix_coefficients(heif_matrix_coefficients_SMPTE_240M); + img.set_nclx_color_profile(nclx); + heif::ColorProfile_nclx a = img.get_nclx_color_profile(); heif::ColorProfile_nclx b(a); } \ No newline at end of file diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/libheif-1.21.1/third-party/dav1d.cmd new/libheif-1.21.2/third-party/dav1d.cmd --- old/libheif-1.21.1/third-party/dav1d.cmd 2025-12-31 14:03:16.000000000 +0100 +++ new/libheif-1.21.2/third-party/dav1d.cmd 2026-01-16 09:03:07.000000000 +0100 @@ -10,7 +10,7 @@ : # If you're running this on Windows, be sure you've already run this (from your VC2019 install dir): : # "C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC\Auxiliary\Build\vcvars64.bat" -git clone -b 1.5.0 --depth 1 https://code.videolan.org/videolan/dav1d.git +git clone -b 1.5.3 --depth 1 https://code.videolan.org/videolan/dav1d.git cd dav1d
