From dd0c311ea29a20bec4182383ef957197bc5dd980 Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:09:11 -0700 Subject: [PATCH 1/8] Move libaec-native to edu.ucar.unidata namespace and version based off native lib --- .../libaec-native/build.gradle.kts | 32 +++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/native-compression/libaec-native/build.gradle.kts b/native-compression/libaec-native/build.gradle.kts index 5e2c7115d3..980c300e8a 100644 --- a/native-compression/libaec-native/build.gradle.kts +++ b/native-compression/libaec-native/build.gradle.kts @@ -7,14 +7,24 @@ import java.net.URL import java.security.DigestInputStream import java.security.MessageDigest -plugins { id("java-library-conventions") } +plugins { + id("java-base-conventions") + id("artifact-publishing-conventions") +} + +group = "edu.ucar.unidata" + +var aecVersion = "1.1.3" +var build = "0" + +version = "${aecVersion}.${build}" description = "Jar distribution of native libraries for libaec compression." project.extra["project.title"] = "Native libraries for libaec." // zip file produced by GitHub workflow -val libaecNative = "libaec-native-1.1.3-fec016ecd4b8ff1918877e582898d4257c405168.zip" +val libaecNative = "libaec-native-${aecVersion}-fec016ecd4b8ff1918877e582898d4257c405168.zip" // sha256 checksum from GitHub workflow output val expectedChecksum = "3db1ba7bc95b48eff74501382b90b0c7d0770a98f369d8c376c8ca4b6003487e" @@ -58,3 +68,21 @@ val processNativeResources = } tasks.processResources { dependsOn(processNativeResources) } + +var publishTaskName = "nativeLibs" + +publishing { + publications { + create(publishTaskName) { + from(components["java"]) + versionMapping { + usage("java-api") { fromResolutionOf("runtimeClasspath") } + usage("java-runtime") { fromResolutionResult() } + } + } + } +} + +tasks + .matching { it.group == "publishing" } + .forEach { it.enabled = System.getProperty("unidata.native.publish")?.toBoolean() ?: false } From bf95a686d86ea24b27591aafddff5481aaad3187 Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:10:09 -0700 Subject: [PATCH 2/8] Create libblosc2 native jar --- .../libblosc2-native/build.gradle.kts | 89 +++++++++++++++++++ 1 file changed, 89 insertions(+) create mode 100644 native-compression/libblosc2-native/build.gradle.kts diff --git a/native-compression/libblosc2-native/build.gradle.kts b/native-compression/libblosc2-native/build.gradle.kts new file mode 100644 index 0000000000..ccfe5fd9e3 --- /dev/null +++ b/native-compression/libblosc2-native/build.gradle.kts @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2025 University Corporation for Atmospheric Research/Unidata + * See LICENSE for license information. + */ + +import java.net.URL +import java.security.DigestInputStream +import java.security.MessageDigest + +plugins { + id("java-base-conventions") + id("artifact-publishing-conventions") +} + +group = "edu.ucar.unidata" + +var bloscVersion = "2.22.0" +var build = "0" + +version = "${bloscVersion}.${build}" + +description = "Jar distribution of native libraries for libblosc2 compression." + +project.extra["project.title"] = "Native libraries for libblosc2." + +// zip file produced by GitHub workflow +val libblosc2Native = + "libblosc2-native-${bloscVersion}-b4a818caa60bbf90bc92ec51ffd01ce3f293c8a4.zip" + +// sha256 checksum from GitHub workflow output +val expectedChecksum = "895226b29a314c0beae92de66a181b66833873e8c3a8a501d0b12193bde30610" + +val resourceZip = file("$rootDir/project-files/native/libblosc2/$libblosc2Native") +val fetchNativeResources = + tasks.register("fetchNativeResources") { + outputs.file(resourceZip) + doLast { + if (!resourceZip.exists()) { + logger.info("Fetching native libblosc2 libraries.") + var actualChecksum = "" + val resourceUrl = + "https://downloads.unidata.ucar.edu/netcdf-java/native/libblosc2/$libblosc2Native" + URL(resourceUrl).openStream().use { ips -> + val dips = DigestInputStream(ips, MessageDigest.getInstance("SHA-256")) + resourceZip.outputStream().use { ops -> dips.copyTo(ops) } + actualChecksum = dips.messageDigest.digest().toHexString() + } + if (actualChecksum != expectedChecksum) { + throw RuntimeException( + String.format( + "Error: checksum on libblosc2.zip does not match expected value.\n" + + " Expected: %s\n Actual: %s\n", + expectedChecksum, + actualChecksum, + ) + ) + } + } + } + } + +val processNativeResources = + tasks.register("processNativeResources", Copy::class) { + inputs.file(resourceZip) + from(zipTree(resourceZip)) + eachFile { relativePath = RelativePath(true, *relativePath.segments.drop(1).toTypedArray()) } + destinationDir = layout.buildDirectory.dir("resources/main").get().asFile + dependsOn(fetchNativeResources) + } + +tasks.processResources { dependsOn(processNativeResources) } + +var publishTaskName = "nativeLibs" + +publishing { + publications { + create(publishTaskName) { + from(components["java"]) + versionMapping { + usage("java-api") { fromResolutionOf("runtimeClasspath") } + usage("java-runtime") { fromResolutionResult() } + } + } + } +} + +tasks + .matching { it.group == "publishing" } + .forEach { it.enabled = System.getProperty("unidata.native.publish")?.toBoolean() ?: false } From d24d99325ac5f575e7610953fbad7b75d71b8cf5 Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:10:28 -0700 Subject: [PATCH 3/8] Document how to publish native jars --- native-compression/README | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 native-compression/README diff --git a/native-compression/README b/native-compression/README new file mode 100644 index 0000000000..eeb47ef84e --- /dev/null +++ b/native-compression/README @@ -0,0 +1,10 @@ +# Publishing Native Jars + +The native jars are versioned based on the native library version (plus a build number). +These jars are also published separate to the rest of the project jars. +To publish a native jar, you must set the `` system property when running the gradle publish command. +For example, to publish the libblosc2-native jar, run: + +``` +./gradlew -D"unidata.native.publish=true" :libblosc2-native:publish +``` \ No newline at end of file From fb541c86cbcb44c2753c84bfef8cc6705d623cfc Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:11:48 -0700 Subject: [PATCH 4/8] Create libblosc2-jna subproject * JNA wrapper for libblosc2 decompression * Move blosc filter from cdm-core to libblosc2-jna --- .../src/main/java/ucar/nc2/filter/Blosc.java | 57 ------- .../services/ucar.nc2.filter.FilterProvider | 1 - .../libblosc2-jna/build.gradle.kts | 34 +++++ .../jna/libblosc2/BloscFilter.java | 68 +++++++++ .../compression/jna/libblosc2/LibBlosc2.java | 129 ++++++++++++++++ .../services/ucar.nc2.filter.FilterProvider | 1 + .../jna/libblosc2/TestLibBlosc2.java | 100 ++++++++++++ .../libblosc2/TestLibBlosc2Multithreaded.java | 143 ++++++++++++++++++ 8 files changed, 475 insertions(+), 58 deletions(-) delete mode 100644 cdm/core/src/main/java/ucar/nc2/filter/Blosc.java create mode 100644 native-compression/libblosc2-jna/build.gradle.kts create mode 100644 native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/BloscFilter.java create mode 100644 native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/LibBlosc2.java create mode 100644 native-compression/libblosc2-jna/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider create mode 100644 native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2.java create mode 100644 native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2Multithreaded.java diff --git a/cdm/core/src/main/java/ucar/nc2/filter/Blosc.java b/cdm/core/src/main/java/ucar/nc2/filter/Blosc.java deleted file mode 100644 index 03032db8c9..0000000000 --- a/cdm/core/src/main/java/ucar/nc2/filter/Blosc.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2021 University Corporation for Atmospheric Research/Unidata - * See LICENSE for license information. - */ - -package ucar.nc2.filter; - -import java.util.Map; - - -// TODO: Still to be implemented -public class Blosc extends Filter { - - private static final String name = "blosc"; - - private static final int id = 32001; - - public Blosc(Map properties) {} - - @Override - public String getName() { - return name; - } - - @Override - public int getId() { - return id; - } - - @Override - public byte[] encode(byte[] dataIn) { - return new byte[0]; - } - - @Override - public byte[] decode(byte[] dataIn) { - return new byte[0]; - } - - public static class Provider implements FilterProvider { - - @Override - public String getName() { - return name; - } - - @Override - public int getId() { - return id; - } - - @Override - public Filter create(Map properties) { - return new Blosc(properties); - } - } -} diff --git a/cdm/core/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider b/cdm/core/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider index 9ae0eaf89a..6cfec87786 100644 --- a/cdm/core/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider +++ b/cdm/core/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider @@ -1,4 +1,3 @@ -ucar.nc2.filter.Blosc$Provider ucar.nc2.filter.Deflate$Provider ucar.nc2.filter.Checksum32$Fletcher32Provider ucar.nc2.filter.Checksum32$Adler32Provider diff --git a/native-compression/libblosc2-jna/build.gradle.kts b/native-compression/libblosc2-jna/build.gradle.kts new file mode 100644 index 0000000000..d2e0f47618 --- /dev/null +++ b/native-compression/libblosc2-jna/build.gradle.kts @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2025 University Corporation for Atmospheric Research/Unidata + * See LICENSE for license information. + */ + +plugins { id("java-library-conventions") } + +description = "Java bindings for decoding C-Blosc2 compression using JNA." + +extra["project.title"] = "C-Blosc2 compression decoder using JNA" + +dependencies { + implementation(platform(project(":netcdf-java-platform"))) + + api(libs.jna) + + implementation(project(":cdm-core")) + + implementation(libs.slf4j.api) + + testImplementation(platform(project(":netcdf-java-testing-platform"))) + + testImplementation(project(":cdm-test-utils")) + + testImplementation(libs.google.truth) + + testCompileOnly(libs.junit4) + + testRuntimeOnly(project(":libblosc2-native")) + + testRuntimeOnly(libs.junit5.platformLauncher) + testRuntimeOnly(libs.junit5.vintageEngine) + testRuntimeOnly(libs.logback.classic) +} diff --git a/native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/BloscFilter.java b/native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/BloscFilter.java new file mode 100644 index 0000000000..764d91712c --- /dev/null +++ b/native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/BloscFilter.java @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2021-2025 University Corporation for Atmospheric Research/Unidata + * See LICENSE for license information. + */ + +package edu.ucar.unidata.compression.jna.libblosc2; + +import java.util.HashMap; +import java.util.Map; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import ucar.nc2.filter.Filter; +import ucar.nc2.filter.FilterProvider; + +public class BloscFilter extends Filter { + private static final Logger logger = LoggerFactory.getLogger(BloscFilter.class); + private static final String name = "blosc"; + private static final int id = 32001; + + private final Map properties; + + public BloscFilter(Map properties) { + this.properties = new HashMap<>(properties); + logger.debug("Blosc properties = {}", this.properties); + } + + @Override + public String getName() { + return name; + } + + @Override + public int getId() { + return id; + } + + @Override + public byte[] encode(byte[] dataIn) { + return new byte[0]; + } + + @Override + public byte[] decode(byte[] dataIn) { + return LibBlosc2.decode(dataIn); + } + + public Map properties() { + return new HashMap<>(properties); + } + + public static class Provider implements FilterProvider { + + @Override + public String getName() { + return name; + } + + @Override + public int getId() { + return id; + } + + @Override + public Filter create(Map properties) { + return new BloscFilter(properties); + } + } +} diff --git a/native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/LibBlosc2.java b/native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/LibBlosc2.java new file mode 100644 index 0000000000..d46fdd8825 --- /dev/null +++ b/native-compression/libblosc2-jna/src/main/java/edu/ucar/unidata/compression/jna/libblosc2/LibBlosc2.java @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2025 University Corporation for Atmospheric Research/Unidata + * See LICENSE for license information. + */ + +package edu.ucar.unidata.compression.jna.libblosc2; + +import com.sun.jna.Memory; +import com.sun.jna.Native; +import com.sun.jna.NativeLong; +import com.sun.jna.Pointer; +import com.sun.jna.ptr.NativeLongByReference; +import java.io.File; +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * JNA access to C-Blosc2. Not a full implementation, just the functions + * actually used for decoding (and testing). This is a partial transliteration + * of the C-Blosc2 library file include/blosc2.h. + * + * @author sarms + * @since 5.10.0 + */ + +public final class LibBlosc2 { + private static final org.slf4j.Logger log = org.slf4j.LoggerFactory.getLogger(LibBlosc2.class); + // C-Blosc2 library name on Windows is libblosc2.dll, but JNA does not automatically add + // "lib" to the name when searching on Windows as it does on other OSs, so we must add + // it explicitly. + private static final String libName = System.getProperty("os.name").startsWith("Windows") ? "libblosc2" : "blosc2"; + + // manage initialization / destruction of the native library internal state + private static final AtomicBoolean initialized = new AtomicBoolean(false); + private static final Object libraryStatusLock = new Object(); + + static { + try { + File library = Native.extractFromResourcePath(libName); + Native.register(library.getAbsolutePath()); + log.debug("Using blosc2 library from libblosc2-native.jar"); + } catch (IOException e) { + try { + Native.register(libName); + log.debug("Using libblosc2 library from system"); + } catch (UnsatisfiedLinkError ule) { + String message = + "libblosc2 C library not present. To read this data, include the libblosc2-native jar in your classpath " + + "(edu.ucar:libblosc2-native) or install libblosc2 on your system."; + log.error(message); + throw new RuntimeException(message, ule); + } + } + } + + /** + * Initialize the Blosc library environment. + *

+ * This method sets up the necessary internal structures and prepares the + * Blosc2 compression and decompression functionalities for use. + * It should be called before performing any operations with the library. + *

+ * It is recommended to pair this method with a call to {@link #blosc2_destroy()} + * to ensure proper cleanup and resource management. + */ + public static void init() { + synchronized (libraryStatusLock) { + if (!initialized.get()) { + blosc2_init(); + initialized.set(true); + } + } + } + + /** + * Destroys the Blosc2 library environment and releases all associated resources. + *

+ * This method ensures that any internal structures or resources allocated during + * the library's initialization are properly cleaned up and released. It should + * be called after completing all operations with the Blosc2 library to avoid + * resource leaks. + *

+ * This method is thread-safe. If the library has not been initialized, the + * method will return without performing any operations. + */ + public static void destroy() { + synchronized (libraryStatusLock) { + if (initialized.get()) { + blosc2_destroy(); + initialized.set(false); + } + } + } + + public static boolean isInitialized() { + return initialized.get(); + } + + public static byte[] decode(byte[] src) { + NativeLongByReference nbytes = new NativeLongByReference(); + NativeLongByReference cbytes = new NativeLongByReference(); + NativeLongByReference blocksize = new NativeLongByReference(); + blosc1_cbuffer_sizes(src, nbytes, cbytes, blocksize); + + Memory decompressed = new Memory(nbytes.getValue().intValue()); + blosc1_decompress(src, decompressed, nbytes.getValue()); + return decompressed.getByteArray(0, nbytes.getValue().intValue()); + } + + // BLOSC_EXPORT void blosc2_init(void); + static native void blosc2_init(); + + // BLOSC_EXPORT void blosc2_init(void); + static native void blosc2_destroy(); + + // BLOSC_EXPORT int blosc1_compress(int clevel, int doshuffle, size_t typesize, + // size_t nbytes, const void* src, void* dest, size_t destsize); + static native int blosc1_compress(int clevel, int doshuffle, NativeLong typesize, NativeLong nbytes, Pointer src, + Pointer dest, NativeLong destsize); + + // BLOSC_EXPORT int blosc1_decompress(const void* src, void* dest, size_t destsize) + static native int blosc1_decompress(byte[] src, Pointer dest, NativeLong destsize); + + // BLOSC_EXPORT void blosc1_cbuffer_sizes(const void* cbuffer, size_t* nbytes, + // size_t* cbytes, size_t* blocksize); + static native void blosc1_cbuffer_sizes(byte[] compressedData, NativeLongByReference nbytes, + NativeLongByReference cbytes, NativeLongByReference blocksize); + +} diff --git a/native-compression/libblosc2-jna/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider b/native-compression/libblosc2-jna/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider new file mode 100644 index 0000000000..84d779258f --- /dev/null +++ b/native-compression/libblosc2-jna/src/main/resources/META-INF/services/ucar.nc2.filter.FilterProvider @@ -0,0 +1 @@ +edu.ucar.unidata.compression.jna.libblosc2.BloscFilter$Provider \ No newline at end of file diff --git a/native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2.java b/native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2.java new file mode 100644 index 0000000000..ead7d77c1a --- /dev/null +++ b/native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2.java @@ -0,0 +1,100 @@ +/* + * Copyright (c) 2025 University Corporation for Atmospheric Research/Unidata + * See LICENSE for license information. + */ + +package edu.ucar.unidata.compression.jna.libblosc2; + +import static com.google.common.truth.Truth.assertThat; + +import com.sun.jna.Memory; +import com.sun.jna.NativeLong; +import com.sun.jna.ptr.NativeLongByReference; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestLibBlosc2 { + + private static float[] sourceData; + private static int sourceSizeBytes; + private static Memory sourceMemory; + + @BeforeClass + public static void init() { + // generate data + int numberOfValues = 100; + sourceData = new float[numberOfValues]; + for (int i = 0; i < numberOfValues; i++) { + sourceData[i] = i * 0.15f; + } + sourceSizeBytes = sourceData.length * Float.BYTES; + sourceMemory = new Memory(sourceSizeBytes); + sourceMemory.write(0, sourceData, 0, sourceData.length); + LibBlosc2.init(); + } + + @AfterClass + public static void destroy() { + sourceMemory.close(); + LibBlosc2.destroy(); + } + + @Test + public void testRoundTrip() { + float[] dataRoundTrip; + + // compress data + try (Memory compressedMemory = new Memory(sourceSizeBytes)) { + int compressedSize = LibBlosc2.blosc1_compress(2, 1, new NativeLong(4), new NativeLong(sourceSizeBytes), + sourceMemory, compressedMemory, new NativeLong(sourceSizeBytes)); + + // verify that the compressed size is smaller than the original + assertThat(compressedSize).isLessThan((int) sourceSizeBytes); + + NativeLongByReference nbytes = new NativeLongByReference(); + NativeLongByReference cbytes = new NativeLongByReference(); + NativeLongByReference blocksize = new NativeLongByReference(); + LibBlosc2.blosc1_cbuffer_sizes(compressedMemory.getByteArray(0, compressedSize), nbytes, cbytes, blocksize); + + // check that nbytes is equal to the original size + assertThat(nbytes.getValue()).isEqualTo(new NativeLong(sourceSizeBytes)); + // check that cbytes is equal to the size returned by blosc1_compress + assertThat(cbytes.getValue()).isEqualTo(new NativeLong(compressedSize)); + + // decompress the freshly compressed data + byte[] compressedBytes = compressedMemory.getByteArray(0, cbytes.getValue().intValue()); + Memory decompressedMemory = new Memory(sourceSizeBytes); + LibBlosc2.blosc1_decompress(compressedBytes, decompressedMemory, new NativeLong(sourceSizeBytes)); + dataRoundTrip = decompressedMemory.getFloatArray(0, sourceData.length); + } + + // check that round tripped data is the same as the source data + assertThat(dataRoundTrip).isEqualTo(sourceData); + } + + @Test + public void testDecompressHelper() { + byte[] compressedBytes, decompressedBytes; + + try (Memory compressedMemory = new Memory(sourceSizeBytes)) { + int compressedSize = LibBlosc2.blosc1_compress(2, 1, new NativeLong(4), new NativeLong(sourceSizeBytes), + sourceMemory, compressedMemory, new NativeLong(sourceSizeBytes)); + + // verify that the compressed size is smaller than the original + assertThat(compressedSize).isLessThan((int) sourceSizeBytes); + + // decompress the freshly compressed data + compressedBytes = compressedMemory.getByteArray(0, compressedSize); + Memory decompressedMemory = new Memory(sourceSizeBytes); + LibBlosc2.blosc1_decompress(compressedBytes, decompressedMemory, new NativeLong(sourceSizeBytes)); + decompressedBytes = decompressedMemory.getByteArray(0, sourceSizeBytes); + } + + assertThat(decompressedBytes).isNotNull(); + assertThat(decompressedBytes.length).isEqualTo(sourceSizeBytes); + byte[] helperDecompressedBytes = LibBlosc2.decode(compressedBytes); + assertThat(helperDecompressedBytes).isEqualTo(decompressedBytes); + } + +} diff --git a/native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2Multithreaded.java b/native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2Multithreaded.java new file mode 100644 index 0000000000..85ea678e9c --- /dev/null +++ b/native-compression/libblosc2-jna/src/test/java/edu/ucar/unidata/compression/jna/libblosc2/TestLibBlosc2Multithreaded.java @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2025 University Corporation for Atmospheric Research/Unidata + * See LICENSE for license information. + */ + +package edu.ucar.unidata.compression.jna.libblosc2; + +import static com.google.common.truth.Truth.assertThat; +import static com.google.common.truth.Truth.assertWithMessage; + +import com.sun.jna.Memory; +import com.sun.jna.NativeLong; +import java.util.Random; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestLibBlosc2Multithreaded { + private static final Random r = new Random(); + + AtomicReference failed = new AtomicReference<>(); + CountDownLatch startupLatch, readyLatch, finishedLatch; + boolean testDebugMessages = true; + + @BeforeClass + public static void init() { + LibBlosc2.init(); + } + + @AfterClass + public static void destroy() { + LibBlosc2.destroy(); + } + + static class Blosc2TestCase { + byte[] expectedDecompressedData; + byte[] compressedData; + + Blosc2TestCase(double[] sourceData, int clevel, int doshuffle) { + int sourceSizeInBytes = sourceData.length * Double.BYTES; + try (Memory sourceMemory = new Memory(sourceSizeInBytes); + Memory compressedMemory = new Memory(sourceSizeInBytes)) { + sourceMemory.write(0, sourceData, 0, sourceData.length); + int compressedSize = LibBlosc2.blosc1_compress(clevel, doshuffle, new NativeLong(Double.BYTES), + new NativeLong(sourceSizeInBytes), sourceMemory, compressedMemory, new NativeLong(sourceSizeInBytes)); + assertThat(compressedSize).isLessThan(sourceSizeInBytes); + assertThat(compressedSize).isNotEqualTo(0); + this.compressedData = compressedMemory.getByteArray(0, compressedSize); + this.expectedDecompressedData = sourceMemory.getByteArray(0, sourceSizeInBytes); + } + } + } + + public static double[] createRandom(int n) { + double[] sourceData = new double[n]; + double scale = r.nextFloat(); + double offset = r.nextFloat(); + for (int i = 0; i < n; i++) { + sourceData[i] = i * scale + offset; + } + return sourceData; + } + + public class DecompressRunnable implements Runnable { + + Blosc2TestCase myTestCase; + + DecompressRunnable(Blosc2TestCase testCase) { + this.myTestCase = testCase; + } + + @Override + public void run() { + if (testDebugMessages) { + System.out.println(Thread.currentThread().getId() + ", awaiting execution signal"); + } + try { + readyLatch.countDown(); + boolean startupReady = startupLatch.await(1, TimeUnit.SECONDS); + assertWithMessage("test startup took too long").that(startupReady).isTrue(); + } catch (InterruptedException e) { + failed.set(new AssertionError("test startup took too long", e));; + } + if (testDebugMessages) { + System.out.println(Thread.currentThread().getId() + ", executing run() method!"); + } + assertThat(LibBlosc2.decode(myTestCase.compressedData)).isEqualTo(myTestCase.expectedDecompressedData); + if (testDebugMessages) { + System.out.println(Thread.currentThread().getId() + ", finished!"); + } + finishedLatch.countDown(); + } + } + + void runTest(Blosc2TestCase[] myTestCases) { + startupLatch = new CountDownLatch(1); + if (testDebugMessages) { + System.out.println("Main thread is: " + Thread.currentThread().getName()); + } + + finishedLatch = new CountDownLatch(myTestCases.length); + readyLatch = new CountDownLatch(myTestCases.length); + for (Blosc2TestCase myTestCase : myTestCases) { + Thread t = new Thread(new DecompressRunnable(myTestCase)); + t.start(); + } + // ensure threaded tests start running at the same time + try { + boolean testThreadsReady = readyLatch.await(1, TimeUnit.SECONDS); + assertWithMessage("test threads took too long to prepare").that(testThreadsReady).isTrue(); + } catch (InterruptedException e) { + failed.set(new AssertionError("test threads took too long to prepare", e));; + } + // trigger threaded tests to start running + if (testDebugMessages) { + System.out.println("Start testing threads from: " + Thread.currentThread().getName()); + } + startupLatch.countDown(); + // wait for tests to finish + try { + boolean threadsComplete = finishedLatch.await(5, TimeUnit.SECONDS); + assertWithMessage("test threads failed to complete").that(threadsComplete).isTrue(); + } catch (InterruptedException e) { + failed.set(new AssertionError("test threads took too long to complete", e));; + } + if (failed.get() != null) { + throw failed.get(); + } + } + + @Test + public void testMultithreaded() { + Blosc2TestCase[] differentParamTestCases = new Blosc2TestCase[] {new Blosc2TestCase(createRandom(500), 3, 1), + new Blosc2TestCase(createRandom(100000), 2, 2), new Blosc2TestCase(createRandom(10000), 2, 1), + new Blosc2TestCase(createRandom(500000), 4, 2), new Blosc2TestCase(createRandom(800000), 9, 1), + new Blosc2TestCase(createRandom(1000000), 9, 1), new Blosc2TestCase(createRandom(700000), 8, 2), + new Blosc2TestCase(createRandom(300000), 9, 1)}; + runTest(differentParamTestCases); + } +} From 0bd562f2a358fdc5ff5162b9562dfbad6b9a047a Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:12:56 -0700 Subject: [PATCH 5/8] Add libblosc2 support to netcdf-java --- build-logic/src/main/kotlin/base-conventions.gradle.kts | 2 ++ settings.gradle.kts | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/build-logic/src/main/kotlin/base-conventions.gradle.kts b/build-logic/src/main/kotlin/base-conventions.gradle.kts index f55008feb7..1292d84cdc 100644 --- a/build-logic/src/main/kotlin/base-conventions.gradle.kts +++ b/build-logic/src/main/kotlin/base-conventions.gradle.kts @@ -48,6 +48,8 @@ val publicArtifacts = ":legacy", ":libaec-jna", ":libaec-native", + ":libblosc2-jna", + ":libblosc2-native", ":netcdf4", ":opendap", ":udunits", diff --git a/settings.gradle.kts b/settings.gradle.kts index 7dd23118c6..23c6b2469c 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -45,6 +45,10 @@ include(":libaec-native") project(":libaec-native").projectDir = file("native-compression/libaec-native") +include(":libblosc2-native") + +project(":libblosc2-native").projectDir = file("native-compression/libblosc2-native") + // // critical subprojects // tricky main/test interdependencies...not circular, however @@ -75,6 +79,10 @@ include(":libaec-jna") project(":libaec-jna").projectDir = file("native-compression/libaec-jna") +include(":libblosc2-jna") + +project(":libblosc2-jna").projectDir = file("native-compression/libblosc2-jna") + include("netcdf4") include("opendap") From e4109182712a0361a2eab91a8dbfc95c22a5a38f Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Fri, 19 Dec 2025 17:13:14 -0700 Subject: [PATCH 6/8] Add blosc support to zarr Closes Unidata/netcdf-java#1447 --- cdm/zarr/build.gradle.kts | 4 ++++ .../java/ucar/nc2/iosp/zarr/TestZarrIosp.java | 22 +++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/cdm/zarr/build.gradle.kts b/cdm/zarr/build.gradle.kts index 7171615687..a50a57706a 100644 --- a/cdm/zarr/build.gradle.kts +++ b/cdm/zarr/build.gradle.kts @@ -20,6 +20,8 @@ dependencies { implementation(libs.jackson.databind) implementation(libs.slf4j.api) + runtimeOnly(project(":libblosc2-jna")) + testImplementation(platform(project(":netcdf-java-testing-platform"))) testImplementation(project(":cdm-s3")) @@ -38,6 +40,8 @@ dependencies { testCompileOnly(libs.junit4) + testRuntimeOnly(project(":libblosc2-native")) + testRuntimeOnly(libs.junit5.platformLauncher) testRuntimeOnly(libs.junit5.vintageEngine) } diff --git a/cdm/zarr/src/test/java/ucar/nc2/iosp/zarr/TestZarrIosp.java b/cdm/zarr/src/test/java/ucar/nc2/iosp/zarr/TestZarrIosp.java index 20184b6849..70165eb353 100644 --- a/cdm/zarr/src/test/java/ucar/nc2/iosp/zarr/TestZarrIosp.java +++ b/cdm/zarr/src/test/java/ucar/nc2/iosp/zarr/TestZarrIosp.java @@ -15,6 +15,8 @@ import ucar.ma2.ArrayLong; import ucar.ma2.DataType; import ucar.ma2.InvalidRangeException; +import ucar.ma2.MAMath; +import ucar.ma2.MAMath.MinMax; import ucar.ma2.Section; import ucar.nc2.*; @@ -399,4 +401,24 @@ public void testGeozarrSharedDimension() throws IOException { } } } + + @Test + public void testBloscCompressedData() throws IOException { + NetcdfFile ncfile = NetcdfFiles.open(SCALAR_GEOZARR_DATA); + Variable bloscVariable = ncfile.findVariable("temperature"); + assertThat(bloscVariable != null).isTrue(); + Attribute compressor = bloscVariable.findAttribute("_Compressor"); + assertThat(compressor).isNotNull(); + assertThat(compressor.getStringValue()).isEqualTo("blosc"); + Array data = bloscVariable.read(); + assertThat(data).isNotNull(); + MinMax mm = MAMath.getMinMax(data); + assertThat(mm).isNotNull(); + // values from python zarr implementation + double expectedMin = 1.9752643660053693e-05; + double expectedMax = 0.9999808929333684; + assertThat(mm.min).isWithin(1e-9).of(expectedMin); + assertThat(mm.max).isWithin(1e-7).of(expectedMax); + } + } From 5e87f76b8fe2a82fc3a1ffaa391b83857ae22907 Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Tue, 30 Dec 2025 11:17:04 -0700 Subject: [PATCH 7/8] Update documentation to include blosc support information --- docs/src/site/_config.yml | 8 +++++- docs/src/site/pages/netcdfJava/Upgrade.md | 19 +++++++++++--- .../netcdfJava/coordattributeconvention.md | 2 +- .../developer/CdmUtilityPrograms.md | 4 +-- docs/src/site/pages/netcdfJava/faq.md | 4 +-- .../overview/BuildingFromSource.md | 2 +- .../overview/UsingNetcdfJava.md | 25 ++++++++++++++++--- 7 files changed, 50 insertions(+), 14 deletions(-) diff --git a/docs/src/site/_config.yml b/docs/src/site/_config.yml index db79bf45ea..7401e7eeb1 100644 --- a/docs/src/site/_config.yml +++ b/docs/src/site/_config.yml @@ -64,7 +64,7 @@ sidebars: - netcdfJavaTutorial_sidebar # project logo (32x32), name, main landing page (currently used in the footer) -project_logo: https://www.unidata.ucar.edu/images/logos/thredds_netcdf-32x32.png +project_logo: https://assets.unidata.ucar.edu/images/logos/thredds_netcdf-32x32.png project_name: netCDF-Java project_landing_page: https://unidata.ucar.edu/software/netcdf-java/ @@ -74,3 +74,9 @@ description: "This is a documentation site for the netCDF-Java, which is part of # needed for sitemap.xml file only # must include trailing slash, leave off the version :-) base_docs_url: https://docs.unidata.ucar.edu/netcdf-java/ + +# these will appear in various doc pages +java_version_build: 17 +java_version_runtime: 8 +libaec_version: 1.1.3.0 +cblosc2_version: 2.22.0.0 diff --git a/docs/src/site/pages/netcdfJava/Upgrade.md b/docs/src/site/pages/netcdfJava/Upgrade.md index ed8ab18edf..b3c381d64e 100644 --- a/docs/src/site/pages/netcdfJava/Upgrade.md +++ b/docs/src/site/pages/netcdfJava/Upgrade.md @@ -8,8 +8,8 @@ permalink: upgrade.html ## Requirements -* Java 8 or later is required to use the library. -* Starting with version 5.10.0, Java 17 is required to build the library. +* Java {{ site.java_version_runtime }} or later is required to use the library. +* Java {{ site.java_version_build }} is required to build the library. ## Quick Navigation * [Summary of changes for v5.10.x](#netcdf-java-api-changes-510x) @@ -29,10 +29,23 @@ permalink: upgrade.html Point release notes: * [5.10.0](https://github.com/Unidata/netcdf-java/releases/tag/v5.10.0){:target="_blank"} (_yyyy-mm-dd, unreleased_) -Starting with 5.10.x, the netCDF-Java library requires Java 17 to build (although the build will produce Java 8 bytecode, so the minimum supported version is still Java 8). +Starting with 5.10.0, the netCDF-Java library requires Java 17 to build (although the build will produce Java 8 bytecode, so the minimum supported version is still Java 8). Note: we are looking to update the minimum version of the JVM we support for the project. Please consider taking a moment to participate in the [poll on GitHub](https://github.com/Unidata/netcdf-java/discussions/1468){:target="_blank"}. +The 5.10.0 release adds support for reading blosc compressed data using the C-Blosc2 native library. +The new artifact for JNA support is `edu.ucar.unidata:libblosc2-jna` (the current version is `2.22.0.0`). + +### Native jar group name changes + +Going forard, all native jars will be published under the `edu.ucar.unidata` group. +Their version will match the version of the native library that they contain, plus a build number. +This is to help make clear which version of the native library is being used by the jar. +Starting with this release, `libaec-native` will be published under the `edu.ucar.unidata` group as well. +Consider using the netcdf-java-bom artifact to manage your netCDF-Java dependencies, as it will always contain the latest versions of the native jars. +The new `edu.ucar.unidata:libaec-native:1.1.3.0` is equivalent to the previous `edu.ucar:libaec-native:5.9.1`. +Please use the new group name for any `*-native` artifacts going forward. + ## netCDF-Java API Changes (5.9.x) Point release notes: diff --git a/docs/src/site/pages/netcdfJava/coordattributeconvention.md b/docs/src/site/pages/netcdfJava/coordattributeconvention.md index 8b15e1d549..67a171f067 100644 --- a/docs/src/site/pages/netcdfJava/coordattributeconvention.md +++ b/docs/src/site/pages/netcdfJava/coordattributeconvention.md @@ -8,7 +8,7 @@ toc: false {% include image.html file="netcdf-java/tutorial/coordsystems/CoordSys.png" alt="Coordinate Systems UML" caption="Coordinate Systems UML" %} -For an overview, see: [CDM Object Model]( /common_data_model_overview.html). The Coordinate Axes for a Variable must use a subset of the Variables's dimensions. +For an overview, see: [CDM Object Model](common_data_model_overview.html). The Coordinate Axes for a Variable must use a subset of the Variables's dimensions. ## Goals of _Coordinate Attribute Convention diff --git a/docs/src/site/pages/netcdfJava/developer/CdmUtilityPrograms.md b/docs/src/site/pages/netcdfJava/developer/CdmUtilityPrograms.md index 07d265f13c..5ac58db8e8 100644 --- a/docs/src/site/pages/netcdfJava/developer/CdmUtilityPrograms.md +++ b/docs/src/site/pages/netcdfJava/developer/CdmUtilityPrograms.md @@ -152,7 +152,7 @@ java -Xmx1g -classpath netcdfAll-.jar ucar.nc2.ft.point.writer.CFPointW ## GribCdmIndex -Write GRIB Collection Indexes from an XML file containing a [GRIB ``](/grib_feature_collections_ref.html) XML element. +Write GRIB Collection Indexes from an XML file containing a [GRIB ``](grib_feature_collections_ref.html) XML element. ~~~bash java -Xmx1g -classpath netcdfAll-.jar ucar.nc2.grib.collection.GribCdmIndex [options] @@ -173,7 +173,7 @@ Example: java -Xmx1g -classpath netcdfAll-.jar ucar.nc2.grib.collection.GribCdmIndex -fc /data/fc/gfs_example.xml ~~~ -Note that the output file is placed in the root directory of the collection, as specified by the [Collection Specification](/collection_spec_string_ref.html) of the GRIB [``](feature_collections_ref.html). +Note that the output file is placed in the root directory of the collection, as specified by the [Collection Specification string](https://docs.unidata.ucar.edu/tds/current/userguide/collection_spec_string_ref.html){:target="_blank"} of the GRIB [``](grib_feature_collections_ref.html). ## FeatureScan diff --git a/docs/src/site/pages/netcdfJava/faq.md b/docs/src/site/pages/netcdfJava/faq.md index efc6667423..225caef2e1 100644 --- a/docs/src/site/pages/netcdfJava/faq.md +++ b/docs/src/site/pages/netcdfJava/faq.md @@ -212,11 +212,11 @@ In the following example, the _type_ may be byte, short, int or long: ### Reading #### Q: What files can the library read? -See [File Types])/common_data_model_overview.html). +See [File Types](common_data_model_overview.html). #### Q: How do I read a file of type X? -In general, you [open any CDM file](common_data_model_overview.html) in the same way, and access it through the [extended netCDF data model(/common_data_model_overview.html#data-access-layer-object-model). The whole point of the CDM is to hide the details of the file format. However, some file type may require special handling: +In general, you [open any CDM file](common_data_model_overview.html) in the same way, and access it through the [extended netCDF data model](common_data_model_overview.html#data-access-layer-object-model). The whole point of the CDM is to hide the details of the file format. However, some file type may require special handling: GRIB and BUFR files may require special tables that the CDM doesn't have. Open the file as above and see 1) if you get any error messages, 2) if any of the variables have "Unknown" in their name, and 3) if the data looks wrong. If any of those happen, prepare to enter [GRIB table hell realm](grib_tables.html). (BUFR is arguably worse, but there's nothing yet that you can do about it). diff --git a/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md b/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md index 74e432dbeb..c55efd87e1 100644 --- a/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md +++ b/docs/src/site/pages/netcdfJava_tutorial/overview/BuildingFromSource.md @@ -10,7 +10,7 @@ toc: false The netCDF-Java source code is hosted on GitHub, and — as of v4.6.1 — we use Gradle to build it. Ant and Maven builds are no longer supported. -To build, you need Git and, as of v5.9.2, Java 17 or higher (required for Gradle 9.0.0 or higher). +To build, you need Git and, as of v5.10.0, Java 17 or higher (required for Gradle 9.0.0 or higher). Note that the bytecode produced by the Gradle build will be compatible with Java 8, and the test task will run using JDK 8 (thanks to the Gradle Toolchain feature). If the build cannot find a suitable JDK for testing, you will need to specify one in your `gradle.properties` file (see the [Gradle toolchain documentation](https://docs.gradle.org/current/userguide/toolchains.html#sec:custom_loc){:target="_blank"} for more details). The tests can be run using other JDK versions using `testX`, where `X` is the version number of one of the LTS releases of Java (currently `11`, `17`, `21`, or `25`). diff --git a/docs/src/site/pages/netcdfJava_tutorial/overview/UsingNetcdfJava.md b/docs/src/site/pages/netcdfJava_tutorial/overview/UsingNetcdfJava.md index 1387de3817..515a143488 100644 --- a/docs/src/site/pages/netcdfJava_tutorial/overview/UsingNetcdfJava.md +++ b/docs/src/site/pages/netcdfJava_tutorial/overview/UsingNetcdfJava.md @@ -35,7 +35,7 @@ For maximum compatibility, please include the Unidata repository after any other Next, select modules based on the functionality you need. In the minimal case, you’ll just want `cdm-core` and a logger. -`cdm` implements the CDM data model and allows you to read NetCD-3 files (and a number of other file types). +`cdm-core` implements the [Common Data Model (CDM)](common_data_model_overview.html) and allows you to read NetCD-3 files (and a number of other file types). An example using JDK14 logging: ~~~xml @@ -128,8 +128,10 @@ dependencies { ## Native compression code +### libaec + netCDF-Java `>=v5.8.0` supports libaec compression for GRIB2 messages using JNA to call the libaec C library. -To ease the use of this feature, we now distribute a jar file containing the native libraries for the following platforms/architectures: +To ease the use of this feature, we distribute a jar file containing the native libraries for the following platforms/architectures: |--- | platform | x86-64 | aarch64 @@ -138,14 +140,29 @@ To ease the use of this feature, we now distribute a jar file containing the nat | MacOS | | | Windows | | -If you are using on the of the supported platform/architecture combinations above, you may include the `edu.ucar:libaec-native:${netcdfJavaVersion}` artifact in your project to bypass the need to install libaec on your system. +If you are using on the of the supported platform/architecture combinations above, you may include the `edu.ucar.unidata:libaec-native:{{ site.libaec_version }}` artifact in your project to bypass the need to install libaec on your system. Otherwise, libaec will need to be installed and reachable in your system library path in order to read data compressed using libaec. +### C-Blosc2 + +netCDF-Java `>=v5.10.0` supports blosc compression using JNA to call the C-Blosc2 library. +To ease the use of this feature, we distribute a jar file containing the native libraries for the following platforms/architectures: + +|--- +| platform | x86-64 | aarch64 +|:-|:-:|:-: +| Linux | | +| MacOS | | +| Windows | | + +If you are using on the of the supported platform/architecture combinations above, you may include the `edu.ucar.unidata:libblosc2-native:{{ site.cblosc2_version }}` artifact in your project to bypass the need to install C-Blosc2 on your system. +Otherwise, C-Blosc2 will need to be installed and reachable in your system library path in order to read data compressed using blosc. + ## Building with netcdfAll This is the appropriate option if you’re not using a dependency management tool like Maven or Gradle and you don’t care about jar size or compatibility with other libraries. Simply include netcdfAll-${netcdfJavaVersion}.jar on the classpath when you run your program. You’ll also need a logger. -Currently, the netcdfAll jar does not include `cdm-s3` due to the size of the AWS S3 SDK dependency, and does not include the `libaec-native` (native library binaries for libaec). +Currently, the netcdfAll jar does not include `cdm-s3` due to the size of the AWS S3 SDK dependency, and does not include any `edu.ucar.unidata:*-native` jars (native library binaries for compression). The netcdfAll jar can be found at https://downloads.unidata.ucar.edu, or (starting with `5.9.0`) on the [GitHub release page](https://github.com/Unidata/netcdf-java/releases). ## Logging From 5a5e311f290199cf12ac4c7f9ffe8a9112289006 Mon Sep 17 00:00:00 2001 From: Sean Arms <67096+lesserwhirls@users.noreply.github.com> Date: Tue, 30 Dec 2025 11:25:39 -0700 Subject: [PATCH 8/8] Update macos-13 test runner to macos-15-intel --- .github/workflows/test-native-compression.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test-native-compression.yml b/.github/workflows/test-native-compression.yml index f04c18ca96..ad3592d646 100644 --- a/.github/workflows/test-native-compression.yml +++ b/.github/workflows/test-native-compression.yml @@ -15,16 +15,16 @@ jobs: ubuntu-24.04, ubuntu-24.04-arm, windows-2022, - macos-13, macos-14, - macos-15 + macos-15, + macos-15-intel ] name: netCDF-Java Native Compression Tests runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 - name: Setup JDK 8, 17 - if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'macos-13' }} + if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'macos-15-intel' }} uses: actions/setup-java@v5 with: distribution: 'temurin' @@ -32,7 +32,7 @@ jobs: 8 17 - name: Setup JDK 21 - if: ${{ matrix.os != 'ubuntu-24.04' && matrix.os != 'macos-13' }} + if: ${{ matrix.os != 'ubuntu-24.04' && matrix.os != 'macos-15-intel' }} uses: actions/setup-java@v5 with: distribution: 'temurin' @@ -47,12 +47,12 @@ jobs: restore-keys: | ${{ runner.os }}-gradle- - name: Run libaec JNA tests - if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'macos-13' }} + if: ${{ matrix.os == 'ubuntu-24.04' || matrix.os == 'macos-15-intel' }} run: ./gradlew -Dorg.gradle.java.installations.auto-detect=true clean :libaec-jna:test env: JDK8: /usr/thredds-test-environment/temurin8 - name: Run libaec JNA tests (JDK 21 tests) - if: ${{ matrix.os != 'ubuntu-24.04' && matrix.os != 'macos-13' }} + if: ${{ matrix.os != 'ubuntu-24.04' && matrix.os != 'macos-15-intel' }} run: ./gradlew clean :libaec-jna:test21 - uses: actions/upload-artifact@v4 if: failure()