Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ngff export axis order #100

Merged
merged 3 commits into from
Dec 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -532,12 +532,7 @@ public <T extends RealType<T> & NativeType<T>, M extends N5DatasetMetadata, N ex
// get the image to save
final RandomAccessibleInterval<T> baseImg = getBaseImage();

final M baseMetadata;
if (impMeta != null)
baseMetadata = (M)impMeta.readMetadata(image);
else
baseMetadata = null;

final M baseMetadata = initializeBaseMetadata();
currentChannelMetadata = copyMetadata(baseMetadata);
M currentMetadata;

Expand All @@ -561,6 +556,7 @@ public <T extends RealType<T> & NativeType<T>, M extends N5DatasetMetadata, N ex
final double[] currentResolution = new double[nd];
System.arraycopy(baseResolution, 0, currentResolution, 0, nd);

// TODO here
final N multiscaleMetadata = initializeMultiscaleMetadata((M)currentMetadata, channelDataset);
currentTranslation = new double[nd];

Expand Down Expand Up @@ -629,6 +625,53 @@ public <T extends RealType<T> & NativeType<T>, M extends N5DatasetMetadata, N ex
n5.close();
}

@SuppressWarnings("unchecked")
protected <M extends N5DatasetMetadata> M initializeBaseMetadata() {

M baseMetadata = null;
if (impMeta != null) {
try {
baseMetadata = (M) impMeta.readMetadata(image);
} catch (IOException e) {
}
}

if (impMeta instanceof NgffToImagePlus) {

/*
* ImagePlus axes need to be permuted before conversion to ngff (i.e. from XYCZT
* to XYZCT) The data are permuted elsewhere, here we ensure the metadata
* reflect that chagnge
*/
final NgffSingleScaleAxesMetadata ngffMeta = (NgffSingleScaleAxesMetadata) baseMetadata;
baseMetadata = (M) new NgffSingleScaleAxesMetadata(ngffMeta.getPath(), ngffMeta.getScale(),
ngffMeta.getTranslation(), permuteAxesForNgff(ngffMeta.getAxes()), ngffMeta.getAttributes());
}

return baseMetadata;
}

protected Axis[] permuteAxesForNgff(final Axis[] axes) {

boolean hasC = false;
boolean hasZ = false;
boolean hasT = false;
for (int i = 0; i < axes.length; i++) {
hasC = hasC || axes[i].getName().equals("c");
hasZ = hasZ || axes[i].getName().equals("z");
hasT = hasT || axes[i].getName().equals("t");
}

if (hasC && hasZ) {
if (hasT)
return new Axis[] { axes[0], axes[1], axes[3], axes[2], axes[4] };
else
return new Axis[] { axes[0], axes[1], axes[3], axes[2] };
}

return axes;
}

protected void initializeDataset() {

dataset = image.getShortTitle();
Expand Down Expand Up @@ -936,10 +979,16 @@ protected <T extends RealType<T> & NativeType<T>, M extends N5DatasetMetadata> L
// some metadata styles never split channels, return input image in that
// case
if (metadataStyle.equals(NONE) || metadataStyle.equals(N5Importer.MetadataCustomKey) ||
metadataStyle.equals(N5Importer.MetadataOmeZarrKey) ||
metadataStyle.equals(N5Importer.MetadataImageJKey)) {
return Collections.singletonList(img);
}
else if (metadataStyle.equals(N5Importer.MetadataOmeZarrKey)) {

if (image.getNChannels() > 1 && image.getNSlices() > 1)
return Collections.singletonList(Views.permute(img, 2, 3));
else
return Collections.singletonList(img);
}

// otherwise, split channels
final ArrayList<RandomAccessibleInterval<T>> channels = new ArrayList<>();
Expand All @@ -962,6 +1011,7 @@ protected <T extends RealType<T> & NativeType<T>, M extends N5DatasetMetadata> L
// make a 4d image in order XYZT
channelImg = Views.permute(Views.addDimension(channelImg, 0, 0), 2, 3);
}

channels.add(channelImg);
}

Expand Down Expand Up @@ -1311,16 +1361,6 @@ private static <T extends NativeType<T>> RandomAccessibleInterval<T> downsampleA
return BlockAlgoUtils.cellImg(blocks, dimensions, cellDimensions);
}

private int[] sliceBlockSize(final int exclude) {

return removeElement(chunkSize, exclude);
}

private long[] sliceDownsamplingFactors(final int exclude) {

return removeElement(currentAbsoluteDownsampling, exclude);
}

private static int[] removeElement(final int[] arr, final int excludeIndex) {

final int[] out = new int[arr.length - 1];
Expand Down
116 changes: 101 additions & 15 deletions src/test/java/org/janelia/saalfeldlab/n5/metadata/NgffTests.java
Original file line number Diff line number Diff line change
@@ -1,36 +1,61 @@
package org.janelia.saalfeldlab.n5.metadata;

import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Arrays;

import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.N5Exception;
import org.janelia.saalfeldlab.n5.N5FSReader;
import org.janelia.saalfeldlab.n5.N5Reader;
import org.janelia.saalfeldlab.n5.ij.N5Importer;
import org.janelia.saalfeldlab.n5.ij.N5ScalePyramidExporter;
import org.janelia.saalfeldlab.n5.universe.N5Factory;
import org.janelia.saalfeldlab.n5.universe.metadata.NgffMultiScaleGroupAttributes;
import org.janelia.saalfeldlab.n5.universe.metadata.NgffMultiScaleGroupAttributes.MultiscaleDataset;
import org.janelia.saalfeldlab.n5.universe.metadata.axes.Axis;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;


import ij.ImagePlus;
import ij.gui.NewImage;

public class NgffTests {

private N5FSReader n5;

@Before
public void setUp() throws IOException {
private final String n5Root = "src/test/resources/ngff.n5";

private static File baseDir;

@BeforeClass
public static void setup() {

try {
baseDir = Files.createTempDirectory("ngff-tests-").toFile();
baseDir.deleteOnExit();

} catch (IOException e) {
e.printStackTrace();
}
}

final String n5Root = "src/test/resources/ngff.n5";
n5 = new N5FSReader(n5Root);
}
@Test
public void testNgffGroupAttributeParsing() {

@Test
public void testNgffGroupAttributeParsing() {
final double eps = 1e-9;
try( final N5FSReader n5 = new N5FSReader(n5Root) ) {

final double eps = 1e-9;
try {
NgffMultiScaleGroupAttributes[] multiscales = n5.getAttribute("ngff_grpAttributes", "multiscales", NgffMultiScaleGroupAttributes[].class );
NgffMultiScaleGroupAttributes[] multiscales = n5.getAttribute("ngff_grpAttributes", "multiscales",
NgffMultiScaleGroupAttributes[].class);
Assert.assertEquals("one set of multiscales", 1, multiscales.length);

MultiscaleDataset[] datasets = multiscales[0].datasets;
Assert.assertEquals("num levels", 6, datasets.length);

Expand All @@ -48,6 +73,67 @@ public void testNgffGroupAttributeParsing() {
fail("Ngff parsing failed");
e.printStackTrace();
}
}
}

@Test
public void testNgffExportAxisOrder() {

testNgfffAxisOrder("xyczt", new int[] { 10, 8, 6, 4, 2 });

testNgfffAxisOrder("xyzt", new int[] { 10, 8, 1, 4, 2 });
testNgfffAxisOrder("xyct", new int[] { 10, 8, 6, 1, 2 });
testNgfffAxisOrder("xycz", new int[] { 10, 8, 6, 4, 1 });

testNgfffAxisOrder("xyc", new int[] { 10, 8, 6, 1, 1 });
testNgfffAxisOrder("xyz", new int[] { 10, 8, 1, 4, 1 });
testNgfffAxisOrder("xyt", new int[] { 10, 8, 1, 1, 2 });
}

public void testNgfffAxisOrder(final String dataset, int[] size) {

final int nx = size[0];
final int ny = size[1];
final int nc = size[2];
final int nz = size[3];
final int nt = size[4];

final String metadataType = N5Importer.MetadataOmeZarrKey;
final String compressionType = N5ScalePyramidExporter.RAW_COMPRESSION;

final ImagePlus imp = NewImage.createImage("test", nx, ny, nz * nc * nt, 8, NewImage.FILL_BLACK);
imp.setDimensions(nc, nz, nt);

final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
writer.setOptions(imp, baseDir.getAbsolutePath(), dataset, N5ScalePyramidExporter.ZARR_FORMAT, "64", false,
N5ScalePyramidExporter.DOWN_SAMPLE, metadataType, compressionType);
writer.run();

final long[] expectedDims = Arrays.stream(new long[] { nx, ny, nz, nc, nt }).filter(x -> x > 1).toArray();

try (final N5Reader n5 = new N5Factory().openReader(baseDir.getAbsolutePath())) {

assertTrue(n5.exists(dataset));
assertTrue(n5.datasetExists(dataset + "/s0"));

final DatasetAttributes dsetAttrs = n5.getDatasetAttributes(dataset + "/s0");
assertArrayEquals("dimensions", expectedDims, dsetAttrs.getDimensions());

int i = 0;
final Axis[] axes = n5.getAttribute(dataset, "multiscales[0]/axes", Axis[].class);

if (nt > 1)
assertEquals("t", axes[i++].getName());

if (nc > 1)
assertEquals("c", axes[i++].getName());

if (nz > 1)
assertEquals("z", axes[i++].getName());

assertEquals("y", axes[i++].getName());
assertEquals("x", axes[i++].getName());
}

}

}
Loading