diff --git a/README.md b/README.md index 8c4d953..35b9f9e 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Hatch +# Hatch 3.0.0 This tool converts the largest image in a VSI, SVS, or TIF image into a new TIFF image with a freshly created image pyramid with each scaling 1/2 dimensions each scale. @@ -12,15 +12,16 @@ Features: Usage: ``` -hatch +hatch -src -dest + +or see + +hatch -help ``` Credits and Acknowledgements: Many of the core files are modified files from the OME Bioformats Projects (https://github.com/ome/bioformats) such as (but not limited to): CellSensReader -OMETiffWriter TiffCompression TiffParser -TiffSaver -TiffWriter diff --git a/dependency-reduced-pom.xml b/dependency-reduced-pom.xml index 2742a53..8be93ca 100644 --- a/dependency-reduced-pom.xml +++ b/dependency-reduced-pom.xml @@ -3,7 +3,7 @@ 4.0.0 edu.stonybrook.bmi hatch - 2.2.0 + 3.0.0 @@ -29,11 +29,19 @@ maven-enforcer-plugin 3.3.0 - - - - - + + + enforce + + enforce + + + + + + + + maven-shade-plugin @@ -108,7 +116,7 @@ build-native package - build + compile-no-fork @@ -116,7 +124,6 @@ hatch edu.stonybrook.bmi.hatch.Hatch true - true true true @@ -124,7 +131,6 @@ -H:ConfigurationFileDirectories=config --enable-url-protocols=https -H:+AddAllCharsets - --static diff --git a/ha.txt b/ha.txt new file mode 100644 index 0000000..c22df96 --- /dev/null +++ b/ha.txt @@ -0,0 +1,63 @@ +[INFO] edu.stonybrook.bmi:hatch:jar:3.0.0 +[INFO] +- ome:formats-gpl:jar:6.12.0:compile +[INFO] | +- org.openmicroscopy:ome-common:jar:6.0.14:compile +[INFO] | | +- io.minio:minio:jar:5.0.2:compile +[INFO] | | | +- com.google.http-client:google-http-client-xml:jar:1.20.0:compile +[INFO] | | | | +- com.google.http-client:google-http-client:jar:1.20.0:compile +[INFO] | | | | | \- org.apache.httpcomponents:httpclient:jar:4.0.1:compile +[INFO] | | | | | \- org.apache.httpcomponents:httpcore:jar:4.0.1:compile +[INFO] | | | | \- xpp3:xpp3:jar:1.1.4c:compile +[INFO] | | | +- com.squareup.okhttp3:okhttp:jar:3.7.0:compile +[INFO] | | | \- com.squareup.okio:okio:jar:1.12.0:compile +[INFO] | | +- com.fasterxml.jackson.core:jackson-databind:jar:2.14.1:compile +[INFO] | | | \- com.fasterxml.jackson.core:jackson-core:jar:2.14.1:compile +[INFO] | | +- com.fasterxml.jackson.core:jackson-annotations:jar:2.14.1:compile +[INFO] | | +- com.google.guava:guava:jar:29.0-jre:compile +[INFO] | | | +- com.google.guava:failureaccess:jar:1.0.1:compile +[INFO] | | | +- com.google.guava:listenablefuture:jar:9999.0-empty-to-avoid-conflict-with-guava:compile +[INFO] | | | +- com.google.code.findbugs:jsr305:jar:3.0.2:compile +[INFO] | | | +- org.checkerframework:checker-qual:jar:2.11.1:compile +[INFO] | | | +- com.google.errorprone:error_prone_annotations:jar:2.3.4:compile +[INFO] | | | \- com.google.j2objc:j2objc-annotations:jar:1.3:compile +[INFO] | | +- xalan:serializer:jar:2.7.2:runtime +[INFO] | | \- xalan:xalan:jar:2.7.2:runtime +[INFO] | +- org.openmicroscopy:ome-xml:jar:6.3.2:compile +[INFO] | | \- org.openmicroscopy:specification:jar:6.3.2:compile +[INFO] | +- ome:formats-bsd:jar:6.12.0:compile +[INFO] | | +- org.openmicroscopy:ome-codecs:jar:0.4.4:compile +[INFO] | | | +- org.openmicroscopy:ome-jai:jar:0.1.3:compile +[INFO] | | | \- io.airlift:aircompressor:jar:0.21:compile +[INFO] | | +- ome:turbojpeg:jar:6.12.0:compile +[INFO] | | +- org.scijava:native-lib-loader:jar:2.1.4:compile +[INFO] | | +- com.jgoodies:jgoodies-forms:jar:1.7.2:compile +[INFO] | | | \- com.jgoodies:jgoodies-common:jar:1.7.0:compile +[INFO] | | +- commons-lang:commons-lang:jar:2.6:compile +[INFO] | | +- org.perf4j:perf4j:jar:0.9.16:compile +[INFO] | | +- cisd:jhdf5:jar:19.04.0:compile +[INFO] | | | +- cisd:base:jar:18.09.0:compile +[INFO] | | | +- commons-io:commons-io:jar:2.7:compile +[INFO] | | | \- org.apache.commons:commons-lang3:jar:3.10:compile +[INFO] | | +- com.drewnoakes:metadata-extractor:jar:2.18.0:compile +[INFO] | | | \- com.adobe.xmp:xmpcore:jar:6.1.11:compile +[INFO] | | +- ome:jxrlib-all:jar:0.2.4:compile +[INFO] | | +- xerces:xercesImpl:jar:2.12.2:compile +[INFO] | | +- xml-apis:xml-apis:jar:1.4.01:compile +[INFO] | | \- org.yaml:snakeyaml:jar:1.32:compile +[INFO] | +- ome:formats-api:jar:6.12.0:compile +[INFO] | +- org.openmicroscopy:ome-mdbtools:jar:5.3.2:compile +[INFO] | +- org.openmicroscopy:metakit:jar:5.3.5:compile +[INFO] | +- org.openmicroscopy:ome-poi:jar:5.3.7:compile +[INFO] | | \- commons-logging:commons-logging:jar:1.2:compile +[INFO] | +- org.slf4j:slf4j-api:jar:1.7.30:compile +[INFO] | +- woolz:JWlz:jar:1.4.0:compile +[INFO] | +- joda-time:joda-time:jar:2.2:compile +[INFO] | +- com.esotericsoftware:kryo:jar:4.0.2:compile +[INFO] | | +- com.esotericsoftware:reflectasm:jar:1.11.3:compile +[INFO] | | | \- org.ow2.asm:asm:jar:5.0.4:compile +[INFO] | | +- com.esotericsoftware:minlog:jar:1.3.0:compile +[INFO] | | \- org.objenesis:objenesis:jar:2.5.1:compile +[INFO] | +- org.json:json:jar:20090211:compile +[INFO] | \- org.xerial:sqlite-jdbc:jar:3.28.0:compile +[INFO] +- com.beust:jcommander:jar:1.82:compile +[INFO] \- com.github.jai-imageio:jai-imageio-jpeg2000:jar:1.4.0:compile +[INFO] \- com.github.jai-imageio:jai-imageio-core:jar:1.4.0:compile diff --git a/nbactions-hatchjar.xml b/nbactions-hatchjar.xml index 28a6694..dfa312e 100644 --- a/nbactions-hatchjar.xml +++ b/nbactions-hatchjar.xml @@ -12,7 +12,7 @@ ${exec.vmArgs} -classpath %classpath ${exec.mainClass} ${exec.appArgs} - -v -src \vsi\tk\7-24-20 -dest \vsi\tk\wow + -v -src \boom\TCGA-AC-A2QJ-01Z-00-DX1.48C303BB-5A23-4037-BD28-77629A8CD9DA.svs -dest \boom\TCGA-AC-A2QJ-01Z-00-DX1.48C303BB-5A23-4037-BD28-77629A8CD9DA.tif edu.stonybrook.bmi.hatch.Hatch java @@ -29,7 +29,7 @@ -agentlib:jdwp=transport=dt_socket,server=n,address=${jpda.address} ${exec.vmArgs} -classpath %classpath ${exec.mainClass} ${exec.appArgs} - -v -src \vsi\tk\7-24-20 -dest \vsi\tk\wow + -v -src \boom\TCGA-AC-A2QJ-01Z-00-DX1.48C303BB-5A23-4037-BD28-77629A8CD9DA.svs -dest \boom\TCGA-AC-A2QJ-01Z-00-DX1.48C303BB-5A23-4037-BD28-77629A8CD9DA.tif edu.stonybrook.bmi.hatch.Hatch java true @@ -49,7 +49,7 @@ ${exec.vmArgs} -classpath %classpath ${exec.mainClass} ${exec.appArgs} edu.stonybrook.bmi.hatch.Hatch java - -v -src \vsi\tk\7-24-20 -dest \vsi\tk\wow + -v -src \boom\TCGA-AC-A2QJ-01Z-00-DX1.48C303BB-5A23-4037-BD28-77629A8CD9DA.svs -dest \boom\TCGA-AC-A2QJ-01Z-00-DX1.48C303BB-5A23-4037-BD28-77629A8CD9DA.tif diff --git a/pom.xml b/pom.xml index 33eed24..80ebc13 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 edu.stonybrook.bmi hatch - 2.2.0 + 3.0.0 jar UTF-8 @@ -29,12 +29,23 @@ jcommander 1.82 + @@ -76,9 +87,19 @@ org.apache.maven.plugins maven-enforcer-plugin 3.3.0 - - - + + + enforce + + enforce + + + + + + + + org.apache.maven.plugins @@ -131,7 +152,7 @@ build-native - build + compile-no-fork package @@ -140,7 +161,6 @@ hatch edu.stonybrook.bmi.hatch.Hatch true - true true true diff --git a/src/main/java/edu/stonybrook/bmi/hatch/BaseTiffReader.java b/src/main/java/edu/stonybrook/bmi/hatch/BaseTiffReader.java index 2759ab0..5585f92 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/BaseTiffReader.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/BaseTiffReader.java @@ -95,6 +95,11 @@ protected void initMetadata() throws FormatException, IOException { initStandardMetadata(); initMetadataStore(); } + + @Override + public IFDList getIFDs() { + return ifds; + } /** * Parses standard metadata. diff --git a/src/main/java/edu/stonybrook/bmi/hatch/Bug2022.java b/src/main/java/edu/stonybrook/bmi/hatch/Bug2022.java index f80ebb6..17c7fc8 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/Bug2022.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/Bug2022.java @@ -1,7 +1,3 @@ -/* - * Click nbfs://nbhost/SystemFileSystem/Templates/Licenses/license-default.txt to change this license - * Click nbfs://nbhost/SystemFileSystem/Templates/Classes/Class.java to edit this template - */ package edu.stonybrook.bmi.hatch; /** diff --git a/src/main/java/edu/stonybrook/bmi/hatch/CellSensReader.java b/src/main/java/edu/stonybrook/bmi/hatch/CellSensReader.java index 4d3eaae..79c4b5a 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/CellSensReader.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/CellSensReader.java @@ -358,22 +358,16 @@ public class CellSensReader extends FormatReader { // -- Fields -- private String[] usedFiles; - private HashMap fileMap = new HashMap(); - + private HashMap fileMap = new HashMap<>(); private TiffParser parser; private IFDList ifds; - private ArrayList tileOffsets = new ArrayList(); - private boolean jpeg = false; - private ArrayList rows = new ArrayList(); private ArrayList cols = new ArrayList(); private ArrayList compressionType = new ArrayList(); private ArrayList tileX = new ArrayList(); private ArrayList tileY = new ArrayList(); - - private ArrayList> tileMap = - new ArrayList>(); + private ArrayList> tileMap = new ArrayList>(); private ArrayList nDimensions = new ArrayList(); private boolean inDimensionProperties = false; private boolean foundChannelTag = false; @@ -403,7 +397,6 @@ public CellSensReader() { // -- CellSensReader API methods -- - public boolean failOnMissingETS() { MetadataOptions options = getMetadataOptions(); if (options instanceof DynamicMetadataOptions) { @@ -420,6 +413,11 @@ public boolean failOnMissingETS() { public int fileGroupOption(String id) throws FormatException, IOException { return FormatTools.MUST_GROUP; } + + @Override + public IFDList getIFDs() { + return ifds; + } /* @see loci.formats.IFormatReader#isSingleFile(String) */ @Override @@ -494,9 +492,7 @@ public byte[] openThumbBytes(int no) throws FormatException, IOException { * @see loci.formats.IFormatReader#openBytes(int, byte[], int, int, int, int) */ @Override - public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) - throws FormatException, IOException - { + public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) throws FormatException, IOException { FormatTools.checkPlaneParameters(this, no, buf.length, x, y, w, h); if (getCoreIndex() < core.size() - 1 && getCoreIndex() < rows.size()) { @@ -508,8 +504,7 @@ public byte[] openBytes(int no, byte[] buf, int x, int y, int w, int h) Region intersection = null; byte[] tileBuf = null; - int pixel = - getRGBChannelCount() * FormatTools.getBytesPerPixel(getPixelType()); + int pixel = getRGBChannelCount() * FormatTools.getBytesPerPixel(getPixelType()); int outputRowLen = w * pixel; for (int row=0; row "+currentId); parser = new TiffParser(currentId); } @@ -575,7 +572,6 @@ public void close(boolean fileOnly) throws IOException { usedFiles = null; fileMap.clear(); tileOffsets.clear(); - jpeg = false; rows.clear(); cols.clear(); compressionType.clear(); @@ -619,24 +615,18 @@ protected void initFile(String id) throws FormatException, IOException { id = vsiFile.getAbsolutePath(); } } - parser = new TiffParser(id); ifds = parser.getMainIFDs(); - try (RandomAccessInputStream vsi = new RandomAccessInputStream(id)) { vsi.order(parser.getStream().isLittleEndian()); vsi.seek(8); readTags(vsi, false, ""); } - - ArrayList files = new ArrayList(); + ArrayList files = new ArrayList<>(); Location file = new Location(id).getAbsoluteFile(); - Location dir = file.getParentFile(); - String name = file.getName(); name = name.substring(0, name.lastIndexOf(".")); - Location pixelsDir = new Location(dir, "_" + name + "_"); String[] stackDirs = pixelsDir.list(true); if (stackDirs != null) { @@ -656,7 +646,6 @@ protected void initFile(String id) throws FormatException, IOException { } files.add(file.getAbsolutePath()); usedFiles = files.toArray(new String[files.size()]); - if (expectETS && files.size() == 1) { String message = "Missing expected .ets files in " + pixelsDir.getAbsolutePath(); if (failOnMissingETS()) { @@ -988,23 +977,27 @@ public byte[] getRaw(byte[] rawbuffer, int no, int row, int col) throws FormatEx } Pyramid pyramid = pyramids.get(pyramidIndex); for (String dim : pyramid.dimensionOrdering.keySet()) { - int index = pyramid.dimensionOrdering.get(dim) + 2; - if (dim.equals("Z")) { - t.coordinate[index] = zct[0]; - } - else if (dim.equals("C")) { - t.coordinate[index] = zct[1]; - } - else if (dim.equals("T")) { - t.coordinate[index] = zct[2]; - } + int index = pyramid.dimensionOrdering.get(dim) + 2; + switch (dim) { + case "Z": + t.coordinate[index] = zct[0]; + break; + case "C": + t.coordinate[index] = zct[1]; + break; + case "T": + t.coordinate[index] = zct[2]; + break; + default: + break; + } } if (resIndex > 0) { t.coordinate[t.coordinate.length - 1] = resIndex; } ArrayList map = tileMap.get(getCoreIndex()); Integer index = map.indexOf(t); - if (index == null || index < 0) { + if (index < 0) { // fill in the tile with the stored background color // usually this is either black or white byte[] tile = new byte[getTileSize()]; @@ -1020,34 +1013,35 @@ else if (dim.equals("T")) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ImageIO.write(bi, "jpg", baos ); tile = baos.toByteArray(); - System.out.println("BUFFER UNDER"); - return tile; + throw new Error("BUFFER UNDER"); + //return tile; } Long offset = tileOffsets.get(getCoreIndex())[index]; byte[] buf = null; IFormatReader reader = null; try (RandomAccessInputStream ets = new RandomAccessInputStream(fileMap.get(getCoreIndex()))) { - ets.seek(offset); - CodecOptions options = new CodecOptions(); - options.interleaved = isInterleaved(); - options.littleEndian = isLittleEndian(); - int tileSize = getTileSize(); - if (tileSize == 0) { - tileSize = tileX.get(getCoreIndex()) * tileY.get(getCoreIndex()) * 10; - } - options.maxBytes = (int) (offset + tileSize); - // long end = index < tileOffsets.get(getCoreIndex()).length - 1 ? tileOffsets.get(getCoreIndex())[index + 1] : ets.length(); - switch (compressionType.get(getCoreIndex())) { - case JPEG: - buf = JPEGTools.FindFirstEOI(ets,rawbuffer); - break; - default: - throw new Error("NOT JPEG!!"); - } + ets.seek(offset); + CodecOptions options = new CodecOptions(); + options.interleaved = isInterleaved(); + options.littleEndian = isLittleEndian(); + int tileSize = getTileSize(); + if (tileSize == 0) { + tileSize = tileX.get(getCoreIndex()) * tileY.get(getCoreIndex()) * 10; + } + options.maxBytes = (int) (offset + tileSize); + // long end = index < tileOffsets.get(getCoreIndex()).length - 1 ? tileOffsets.get(getCoreIndex())[index + 1] : ets.length(); + switch (compressionType.get(getCoreIndex())) { + case JPEG: + buf = JPEGTools.FindFirstEOI(ets,rawbuffer); + //X2TIF.Display(buf, 0, 0); + break; + default: + throw new Error("NOT JPEG!!"); + } } finally { - if (reader != null) { - reader.close(); - } + if (reader != null) { + reader.close(); + } } return buf; } @@ -1224,8 +1218,7 @@ private void parseETSFile(RandomAccessInputStream etsFile, String file, int s) int tileZ = etsFile.readInt(); etsFile.skipBytes(4 * 17); // pixel info hints - byte[] color = new byte[ - ms.sizeC * FormatTools.getBytesPerPixel(convertPixelType(pixelType))]; + byte[] color = new byte[ms.sizeC * FormatTools.getBytesPerPixel(convertPixelType(pixelType))]; etsFile.read(color); backgroundColor.put(getCoreIndex(), color); @@ -1242,7 +1235,7 @@ private void parseETSFile(RandomAccessInputStream etsFile, String file, int s) tileOffsets.add(new Long[nUsedChunks]); - ArrayList tmpTiles = new ArrayList(); + ArrayList tmpTiles = new ArrayList<>(); for (int chunk=0; chunk(); + metadata = new Hashtable<>(); - core = new ArrayList(); + core = new ArrayList<>(); CoreMetadata core0 = new CoreMetadata(); core.add(core0); core0.orderCertain = true; @@ -236,9 +236,10 @@ protected void initFile(String id) throws FormatException, IOException { } } - /** Returns the list of available metadata options. */ + /** Returns the list of available metadata options. + * @return */ protected ArrayList getAvailableOptions() { - ArrayList optionsList = new ArrayList(); + ArrayList optionsList = new ArrayList<>(); optionsList.add(DynamicMetadataOptions.METADATA_LEVEL_KEY); optionsList.add(DynamicMetadataOptions.READER_VALIDATE_KEY); return optionsList; @@ -256,22 +257,14 @@ protected boolean isUsedFile(String file) { } /** Adds an entry to the specified Hashtable. */ - protected void addMeta(String key, Object value, - Hashtable meta) - { - if (key == null || value == null || - getMetadataOptions().getMetadataLevel() == MetadataLevel.MINIMUM) - { + protected void addMeta(String key, Object value, Hashtable meta) { + if (key == null || value == null || getMetadataOptions().getMetadataLevel() == MetadataLevel.MINIMUM) { return; } - key = key.trim(); - boolean string = value instanceof String || value instanceof Character; - // string value, if passed in value is a string String val = string ? String.valueOf(value) : null; - if (filterMetadata || (saveOriginalMetadata && (getMetadataStore() instanceof OMEXMLMetadata))) { @@ -1843,5 +1836,5 @@ private boolean isOmero(String id) { public abstract byte[] getRawBytes(byte[] rawbuffer, int no, int row, int col); public abstract byte[] getRawBytes(IFD ifd, int no, int row, int col); public abstract byte[] getDecodedTile(byte[] rawbuffer, int no, int row, int col); - + public abstract IFDList getIFDs(); } diff --git a/src/main/java/edu/stonybrook/bmi/hatch/Hatch.java b/src/main/java/edu/stonybrook/bmi/hatch/Hatch.java index 0df2049..8dae8e7 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/Hatch.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/Hatch.java @@ -13,14 +13,13 @@ import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Stream; -import org.apache.jena.rdf.model.Model; /** * * @author erich */ public class Hatch { - public static String software = "hatch 2.2.0 by Wing-n-Beak"; + public static String software = "hatch 3.0.0 by Wing-n-Beak"; private static final String[] ext = new String[] {".vsi", ".svs", ".tif"}; public static final String HELP = Hatch.software+"\n"+ """ @@ -86,7 +85,6 @@ public static void main(String[] args) { System.exit(0); } else if (params.src.exists()) { if (params.src.isDirectory()) { - System.out.println(params.src+" is directory"); if (!params.dest.exists()) { params.dest.mkdir(); } @@ -96,15 +94,11 @@ public static void main(String[] args) { Traverse(params); } } else { - System.out.println(params.src+" is not a directory"); if (params.dest.exists()) { - System.out.println(params.dest+" does exist"); if (!params.dest.isDirectory()) { - System.out.println(params.dest+" is not a directory"); - X2TIF v2t = new X2TIF(params, params.src.toString(), params.dest.toString()); - v2t.Execute(); - try { - v2t.close(); + params.dest.delete(); + try (X2TIF v2t = new X2TIF(params, params.src.toString(), params.dest.toString())){ + v2t.Execute(); } catch (Exception ex) { Logger.getLogger(Hatch.class.getName()).log(Level.SEVERE, null, ex); } @@ -112,9 +106,11 @@ public static void main(String[] args) { jc.usage(); } } else { - System.out.println(params.dest+" does not exist"); - X2TIF v2t = new X2TIF(params, params.src.toString(), params.dest.toString()); - v2t.Execute(); + try (X2TIF v2t = new X2TIF(params, params.src.toString(), params.dest.toString());) { + v2t.Execute(); + } catch (Exception ex) { + Logger.getLogger(Hatch.class.getName()).log(Level.SEVERE, null, ex); + } } } } else { @@ -126,7 +122,7 @@ public static void main(String[] args) { } } -class FileProcessor implements Callable { +class FileProcessor implements Callable { private final HatchParameters params; private final String src; private final String dest; @@ -138,7 +134,7 @@ public FileProcessor(HatchParameters params, String src, String dest) { } @Override - public Model call() { + public String call() { try (X2TIF v2t = new X2TIF(params, src, dest)) { v2t.Execute(); } catch (Exception ex) { diff --git a/src/main/java/edu/stonybrook/bmi/hatch/HatchParameters.java b/src/main/java/edu/stonybrook/bmi/hatch/HatchParameters.java index f535da5..2ae5f21 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/HatchParameters.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/HatchParameters.java @@ -27,7 +27,7 @@ public boolean isHelp() { public int fp = 1; @Parameter(names = "-cores", description = "# of cores for processing") - public int cores = 1; + public int cores = Runtime.getRuntime().availableProcessors(); @Parameter public List parameters = Lists.newArrayList(); @@ -37,4 +37,7 @@ public boolean isHelp() { @Parameter(names = "-meta") public boolean meta = false; + + @Parameter(names = "-jp2", hidden = true) + public boolean jp2 = false; } \ No newline at end of file diff --git a/src/main/java/edu/stonybrook/bmi/hatch/HatchSaver.java b/src/main/java/edu/stonybrook/bmi/hatch/HatchSaver.java new file mode 100644 index 0000000..49d1fd4 --- /dev/null +++ b/src/main/java/edu/stonybrook/bmi/hatch/HatchSaver.java @@ -0,0 +1,74 @@ +package edu.stonybrook.bmi.hatch; + +import java.io.IOException; +import loci.common.RandomAccessOutputStream; +import loci.formats.FormatException; +import loci.formats.tiff.IFD; +import loci.formats.tiff.TiffSaver; + +/** + * + * @author erich + */ +public class HatchSaver extends TiffSaver { + private Long sequentialTileFilePointer; + + /** + * + * @param out + * @param filename + */ + public HatchSaver(RandomAccessOutputStream out, String filename) { + super(out,filename); + } + + public void writeIFDStrips(IFD ifd, byte[][] strips, int nChannels, boolean last, int x, int y) throws FormatException, IOException { + int tilesPerRow = (int) ifd.getTilesPerRow(); + int tilesPerColumn = (int) ifd.getTilesPerColumn(); + boolean interleaved = ifd.getPlanarConfiguration() == 1; + long[] byteCounts; + long[] offsets; + long[] ifdByteCounts = ifd.getIFDLongArray(IFD.TILE_BYTE_COUNTS); + byteCounts = ifdByteCounts; + int tileOrStripOffsetX = x / (int) ifd.getTileWidth(); + int tileOrStripOffsetY = y / (int) ifd.getTileLength(); + int firstOffset = (tileOrStripOffsetY * tilesPerRow) + tileOrStripOffsetX; + long[] ifdOffsets = ifd.getIFDLongArray(IFD.TILE_OFFSETS); + offsets = ifdOffsets.clone(); + ifd.putIFDValue(IFD.TILE_BYTE_COUNTS, byteCounts); + ifd.putIFDValue(IFD.TILE_OFFSETS, offsets); + long fp = out.getFilePointer(); + if (tileOrStripOffsetX == 0 && tileOrStripOffsetY == 0) { + sequentialTileFilePointer = fp; + } else { + fp = sequentialTileFilePointer; + } + if (fp == out.getFilePointer()) { + writeIFD(ifd, 0); + } + int tileCount = tilesPerRow * tilesPerColumn; + long stripStartPos = out.length(); + long totalStripSize = 0; + for (byte[] strip : strips) { + totalStripSize += strip.length; + } + out.seek(stripStartPos + totalStripSize); + out.seek(stripStartPos); + long stripOffset = 0; + for (int i=0; i"; - - public static final String COMPANION_KEY = "ometiff.companion"; - - // -- Fields -- - - private String[][] imageLocations; - private OMEXMLMetadata omeMeta; - private OMEXMLService service; - private Map ifdCounts = new HashMap(); - - private Map uuids = new HashMap(); - - // -- Constructor -- - - public OMETiffWriter() { - super("OME-TIFF", - new String[] {"ome.tif", "ome.tiff", "ome.tf2", "ome.tf8", "ome.btf"}); - } - - // -- IFormatHandler API methods -- - - /* @see loci.formats.IFormatHandler#close() */ - @Override - public void close() throws IOException { - try { - if (currentId != null) { - setupServiceAndMetadata(); - - // remove any BinData and old TiffData elements from the OME-XML - service.removeBinData(omeMeta); - service.removeTiffData(omeMeta); - - for (int series=0; series files = new ArrayList(); - for (String[] s : imageLocations) { - for (String f : s) { - if (!files.contains(f) && f != null) { - files.add(f); - - String xml = null; - if (null != companion) { - xml = getBinaryOnlyOMEXML(f, companion, companionUUID); - } else { - xml = getOMEXML(f); - } - xml = insertWarningComment(xml); - if (getMetadataOptions().isValidate()) { - service.validateOMEXML(xml); - } - - // write OME-XML to the first IFD's comment - //saveComment(f, xml); - } - } - } - } - } - catch (DependencyException de) { - throw new RuntimeException(de); - } - catch (ServiceException se) { - throw new RuntimeException(se); - } - catch (FormatException fe) { - throw new RuntimeException(fe); - } - catch (IllegalArgumentException iae) { - throw new RuntimeException(iae); - } - finally { - super.close(); - - boolean canReallyClose = - omeMeta == null || ifdCounts.size() == omeMeta.getImageCount(); - - if (omeMeta != null && canReallyClose) { - int omePlaneCount = 0; - for (int i=0; i0) { - ifd.putIFDValue(IFD.NEW_SUBFILE_TYPE, (long) 1); - } - if (this.interleaved) { - super.saveBytes(no, buf, ifd, x, y, w, h); - } else { - System.out.println("NOT INTERLEAVED"); - super.saveBytes(no, Convert2Interleaved(buf), ifd, x, y, w, h); - } - - int index = no; - while (imageLocations[series][index] != null) { - if (index < imageLocations[series].length - 1) { - index++; - } else { - break; - } - } - imageLocations[series][index] = currentId; - } - - public byte[] Convert2Interleaved(byte[] bytes) { - byte[] neo = new byte[bytes.length]; - int plane = bytes.length/3; - for (int i=0; i') + 1); - String suffix = xml.substring(xml.indexOf('>') + 1); - return prefix + WARNING_COMMENT + suffix; - } - - private String getOMEXML(String file) throws FormatException, IOException { - // generate UUID and add to OME element - String uuid = "urn:uuid:" + getUUID(new Location(file).getName()); - omeMeta.setUUID(uuid); - - OMEXMLMetadataRoot root = (OMEXMLMetadataRoot) omeMeta.getRoot(); - root.setCreator(FormatTools.CREATOR); - - String xml; - try { - xml = service.getOMEXML(omeMeta); - } - catch (ServiceException se) { - throw new FormatException(se); - } - return xml; - } - - private String getBinaryOnlyOMEXML( - String file, String companion, String companionUUID) throws - FormatException, IOException, DependencyException, ServiceException { - ServiceFactory factory = new ServiceFactory(); - OMEXMLService service = factory.getInstance(OMEXMLService.class); - OMEXMLMetadata meta = service.createOMEXMLMetadata(); - String uuid = "urn:uuid:" + getUUID(new Location(file).getName()); - meta.setUUID(uuid); - meta.setBinaryOnlyMetadataFile(new Location(companion).getName()); - meta.setBinaryOnlyUUID(companionUUID); - OMEXMLMetadataRoot root = (OMEXMLMetadataRoot) meta.getRoot(); - root.setCreator(FormatTools.CREATOR); - return service.getOMEXML(meta); - } - - private void saveComment(String file, String xml) throws IOException { - if (out != null) out.close(); - out = new RandomAccessOutputStream(file); - RandomAccessInputStream in = null; - try { - TiffSaver saver = new TiffSaver(out, file); - saver.setBigTiff(isBigTiff); - in = new RandomAccessInputStream(file); - saver.overwriteLastIFDOffset(in); - saver.overwriteComment(in, xml); - } - catch (FormatException exc) { - IOException io = new IOException("Unable to append OME-XML comment"); - io.initCause(exc); - throw io; - } - finally { - if (out != null) out.close(); - if (in != null) in.close(); - } - } - - private void populateTiffData(OMEXMLMetadata omeMeta, int[] zct, - int ifd, int series, int plane) - { - omeMeta.setTiffDataFirstZ(new NonNegativeInteger(zct[0]), series, plane); - omeMeta.setTiffDataFirstC(new NonNegativeInteger(zct[1]), series, plane); - omeMeta.setTiffDataFirstT(new NonNegativeInteger(zct[2]), series, plane); - omeMeta.setTiffDataIFD(new NonNegativeInteger(ifd), series, plane); - omeMeta.setTiffDataPlaneCount(new NonNegativeInteger(1), series, plane); - } - - private void populateImage(OMEXMLMetadata omeMeta, int series) { - String dimensionOrder = omeMeta.getPixelsDimensionOrder(series).toString(); - int sizeZ = omeMeta.getPixelsSizeZ(series).getValue().intValue(); - int sizeC = omeMeta.getPixelsSizeC(series).getValue().intValue(); - int sizeT = omeMeta.getPixelsSizeT(series).getValue().intValue(); - - int imageCount = getPlaneCount(); - - if (imageCount == 0) { - omeMeta.setTiffDataPlaneCount(new NonNegativeInteger(0), series, 0); - return; - } - - PositiveInteger samplesPerPixel = new PositiveInteger((sizeZ * sizeC * sizeT) / imageCount); - for (int c=0; c 1 && c == 1) { - c = r.getChannelSamplesPerPixel(series, 0).getValue(); - } - - return z * c * t; - } - -} diff --git a/src/main/java/edu/stonybrook/bmi/hatch/Pyramid.java b/src/main/java/edu/stonybrook/bmi/hatch/Pyramid.java index c29d841..96ff49e 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/Pyramid.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/Pyramid.java @@ -1,6 +1,7 @@ package edu.stonybrook.bmi.hatch; import static edu.stonybrook.bmi.hatch.Pyramid.CompressionSize; +import java.awt.Color; import java.awt.Graphics; import java.awt.geom.AffineTransform; import java.awt.image.AffineTransformOp; @@ -30,7 +31,6 @@ * @author erich */ public class Pyramid { - private static int cores; private JPEGBuffer[][] tiles; private int tilesX; private int tilesY; @@ -41,14 +41,17 @@ public class Pyramid { public static float DownScale = 0.5f; private int width; private int height; + private final HatchParameters params; - public Pyramid(int tilesX, int tilesY, int tileSizeX, int tileSizeY) { + public Pyramid(HatchParameters params, int tilesX, int tilesY, int tileSizeX, int tileSizeY, int width, int height) { + this.params = params; this.tilesX = tilesX; this.tilesY = tilesY; this.tileSizeX = tileSizeX; this.tileSizeY = tileSizeY; + this.height = height; + this.width = width; tiles = new JPEGBuffer[tilesX][tilesY]; - cores = Runtime.getRuntime().availableProcessors(); } public int gettilesX() { @@ -78,16 +81,9 @@ public void put(byte[] raw, int x, int y, int effTileSizeX, int effTileSizeY, fl } private void CalculateHeightandWidth() { - int w = 0; - int h = 0; - for (int a=0; a()); + ThreadPoolExecutor engine = new ThreadPoolExecutor(params.cores,params.cores,0L,TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>()); engine.prestartAllCoreThreads(); System.out.println("SHRINK : "+tilesX+" "+tilesY); JPEGBuffer c = tiles[tilesX-1][tilesY-1]; if (c.GetBufferImage().getWidth()==1) { System.out.println("shrink clip x"); - tilesX--; + tilesX--; } if (c.GetBufferImage().getHeight()==1) { - System.out.println("shrink clip y"); - tilesY--; + System.out.println("shrink clip y"); + tilesY--; } for (int a=0; a0) {} + while (!engine.isTerminated()) { + try { + Thread.sleep(500); + } catch (InterruptedException ex) { + Logger.getLogger(Pyramid.class.getName()).log(Level.SEVERE, null, ex); + } + } System.out.println("Shrink Process complete..."); CalculateHeightandWidth(); } - public void put(BufferedImage bi, int x, int y, float scale) { - AffineTransform at = new AffineTransform(); - at.scale(DownScale,DownScale); - AffineTransformOp scaleOp = new AffineTransformOp(at, AffineTransformOp.TYPE_BILINEAR); - BufferedImage target = new BufferedImage(bi.getWidth()/2,bi.getHeight()/2,bi.getType()); - scaleOp.filter(bi, target); - put(target,x,y); - } - public void Lump() { - ThreadPoolExecutor engine = new ThreadPoolExecutor(cores,cores,0L,TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>()); + ThreadPoolExecutor engine = new ThreadPoolExecutor(params.cores,params.cores,0L,TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>()); engine.prestartAllCoreThreads(); xscale++; int neotilesX = (int) Math.ceil(tilesX/2f); @@ -221,7 +223,13 @@ public void Lump() { } } engine.shutdown(); - while ((engine.getQueue().size()+engine.getActiveCount())>0) {} + while (!engine.isTerminated()) { + try { + Thread.sleep(500); + } catch (InterruptedException ex) { + Logger.getLogger(Pyramid.class.getName()).log(Level.SEVERE, null, ex); + } + } System.out.println("Merge Process complete..."); tiles = neotiles; tilesX = neotilesX; @@ -246,7 +254,7 @@ public BufferedImage call() throws Exception { AffineTransform at = new AffineTransform(); at.scale(Pyramid.DownScale,Pyramid.DownScale); AffineTransformOp scaleOp = new AffineTransformOp(at, AffineTransformOp.TYPE_BILINEAR); - BufferedImage target = new BufferedImage(bi.getWidth()/2,bi.getHeight()/2,bi.getType()); + BufferedImage target = new BufferedImage((int)(Pyramid.DownScale*bi.getWidth()),(int)(Pyramid.DownScale*bi.getHeight()),bi.getType()); scaleOp.filter(bi, target); pyramid.put(target,a,b); return null; @@ -266,18 +274,20 @@ public MergeProcessor(Pyramid pyramid, JPEGBuffer[][] neotiles, int a, int b) { this.b = b; } - public BufferedImage Merge(BufferedImage nw, BufferedImage ne, BufferedImage sw, BufferedImage se) { + private BufferedImage Merge(BufferedImage nw, BufferedImage ne, BufferedImage sw, BufferedImage se) { BufferedImage bi = new BufferedImage(2*pyramid.gettileSizeX(),2*pyramid.gettileSizeY(),nw.getType()); Graphics g = bi.getGraphics(); + g.setColor(Color.BLACK); + g.fillRect(0, 0, bi.getWidth(), bi.getHeight()); g.drawImage(nw, 0, 0, null); if (ne!=null) { - g.drawImage(ne, pyramid.gettileSizeX(), 0, null); + g.drawImage(ne, nw.getWidth(), 0, null); } if (sw!=null) { - g.drawImage(sw, 0, pyramid.gettileSizeY(), null); + g.drawImage(sw, 0, nw.getHeight(), null); } if (se!=null) { - g.drawImage(se, pyramid.gettileSizeX(), pyramid.gettileSizeY(), null); + g.drawImage(se, nw.getWidth(), nw.getHeight(), null); } return bi; } @@ -302,7 +312,7 @@ public JPEGBuffer call() throws Exception { int ny = b/2; neotiles[nx][ny] = new JPEGBuffer(Merge(nw,ne,sw,se),CompressionSize); } else { - System.out.println("NW NULL!"); + throw new Error("NW TILE NULL!!!"); } return null; } diff --git a/src/main/java/edu/stonybrook/bmi/hatch/SVSReader.java b/src/main/java/edu/stonybrook/bmi/hatch/SVSReader.java index a539132..a597cf9 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/SVSReader.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/SVSReader.java @@ -43,8 +43,7 @@ public class SVSReader extends BaseTiffReader { // -- Constants -- /** Logger for this class. */ - private static final Logger LOGGER = - LoggerFactory.getLogger(SVSReader.class); + private static final Logger LOGGER = LoggerFactory.getLogger(SVSReader.class); /** TIFF image description prefix for Aperio SVS files. */ private static final String APERIO_IMAGE_DESCRIPTION_PREFIX = "Aperio Image"; diff --git a/src/main/java/edu/stonybrook/bmi/hatch/Test2.java b/src/main/java/edu/stonybrook/bmi/hatch/Test2.java new file mode 100644 index 0000000..c7db000 --- /dev/null +++ b/src/main/java/edu/stonybrook/bmi/hatch/Test2.java @@ -0,0 +1,18 @@ +package edu.stonybrook.bmi.hatch; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; +import javax.imageio.ImageIO; + +/** + * + * @author erich + */ +public class Test2 { + + public static void main(String[] args) throws IOException { + BufferedImage bi = ImageIO.read(new File("\\boom\\atook.jp2")); + ImageIO.write(bi, "jpeg", new File("\\boom\\RAY.jpg")); + } +} diff --git a/src/main/java/edu/stonybrook/bmi/hatch/TiffParser.java b/src/main/java/edu/stonybrook/bmi/hatch/TiffParser.java index 782666f..dffe656 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/TiffParser.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/TiffParser.java @@ -77,13 +77,16 @@ public class TiffParser implements Closeable { // -- Constructors -- - /** Constructs a new TIFF parser from the given file name. */ + /** Constructs a new TIFF parser from the given file name. + * @param filename + * @throws java.io.IOException */ public TiffParser(String filename) throws IOException { this(new RandomAccessInputStream(filename)); canClose = true; } - /** Constructs a new TIFF parser from the given input source. */ + /** Constructs a new TIFF parser from the given input source. + * @param in */ public TiffParser(RandomAccessInputStream in) { this.in = in; doCaching = true; @@ -255,10 +258,8 @@ public IFDList getSubIFDs(IFD ifd) throws IOException { @Deprecated public IFDList getIFDs() throws IOException { if (ifdList != null) return ifdList; - long[] offsets = getIFDOffsets(); IFDList ifds = new IFDList(); - for (long offset : offsets) { IFD ifd = getIFD(offset); if (ifd == null) continue; @@ -281,7 +282,6 @@ public IFDList getIFDs() throws IOException { } } if (doCaching) ifdList = ifds; - return ifds; } @@ -709,14 +709,12 @@ public byte[] getTile(IFD ifd, byte[] buf, int row, int col) throws FormatExcept int pixel = ifd.getBytesPerSample()[0]; int effectiveChannels = planarConfig == 2 ? 1 : samplesPerPixel; - if (ifd.get(IFD.STRIP_BYTE_COUNTS) instanceof OnDemandLongArray) { - OnDemandLongArray counts = (OnDemandLongArray) ifd.get(IFD.STRIP_BYTE_COUNTS); + if (ifd.get(IFD.STRIP_BYTE_COUNTS) instanceof OnDemandLongArray counts) { if (counts != null) { counts.setStream(in); } } - if (ifd.get(IFD.TILE_BYTE_COUNTS) instanceof OnDemandLongArray) { - OnDemandLongArray counts = (OnDemandLongArray) ifd.get(IFD.TILE_BYTE_COUNTS); + if (ifd.get(IFD.TILE_BYTE_COUNTS) instanceof OnDemandLongArray counts) { if (counts != null) { counts.setStream(in); } @@ -741,8 +739,8 @@ else if (stripByteCounts[countIndex] < 0 && countIndex > 0) { stripByteCounts[countIndex] = stripByteCounts[countIndex - 1]; } - long stripOffset = 0; - long nStrips = 0; + long stripOffset; + long nStrips; if (ifd.getOnDemandStripOffsets() != null) { OnDemandLongArray stripOffsets = ifd.getOnDemandStripOffsets(); @@ -1391,18 +1389,16 @@ public byte[] getRawTile(IFD ifd, byte[] buf, int row, int col) throws FormatExc long tileLength = ifd.getTileLength(); int samplesPerPixel = ifd.getSamplesPerPixel(); int planarConfig = ifd.getPlanarConfiguration(); - loci.formats.tiff.TiffCompression compression = ifd.getCompression(); + //loci.formats.tiff.TiffCompression compression = ifd.getCompression(); long numTileCols = ifd.getTilesPerRow(); int pixel = ifd.getBytesPerSample()[0]; int effectiveChannels = planarConfig == 2 ? 1 : samplesPerPixel; - if (ifd.get(IFD.STRIP_BYTE_COUNTS) instanceof OnDemandLongArray) { - OnDemandLongArray counts = (OnDemandLongArray) ifd.get(IFD.STRIP_BYTE_COUNTS); + if (ifd.get(IFD.STRIP_BYTE_COUNTS) instanceof OnDemandLongArray counts) { if (counts != null) { counts.setStream(in); } } - if (ifd.get(IFD.TILE_BYTE_COUNTS) instanceof OnDemandLongArray) { - OnDemandLongArray counts = (OnDemandLongArray) ifd.get(IFD.TILE_BYTE_COUNTS); + if (ifd.get(IFD.TILE_BYTE_COUNTS) instanceof OnDemandLongArray counts) { if (counts != null) { counts.setStream(in); } @@ -1439,37 +1435,28 @@ public byte[] getRawTile(IFD ifd, byte[] buf, int row, int col) throws FormatExc } int tileSize = (int) stripByteCounts[countIndex]; if (jpegTable != null) { - tileSize += jpegTable.length - 2 + APP14.length; + tileSize += jpegTable.length - 4 + APP14.length; } byte[] tile = new byte[tileSize]; LOGGER.debug("Reading tile Length {} Offset {}", tile.length, stripOffset); - // System.out.println("PHOTO INTEROP : "+photoInterp); if (jpegTable != null) { System.arraycopy(jpegTable, 0, tile, 0, jpegTable.length - 2); switch (photoInterp) { case Y_CB_CR: - System.arraycopy(APP14Y_CB_CR, 0, tile, jpegTable.length - 1, APP14.length); + System.arraycopy(APP14Y_CB_CR, 0, tile, jpegTable.length - 2, APP14.length); break; case RGB: - System.arraycopy(APP14, 0, tile, jpegTable.length - 1, APP14.length); + System.arraycopy(APP14, 0, tile, jpegTable.length - 2, APP14.length); break; default: throw new Error("Can't handle PhotoInterp "+photoInterp); } in.seek(stripOffset + 2); - in.read(tile, jpegTable.length - 1 + APP14.length, tile.length - (jpegTable.length - 1 + APP14.length)); + in.read(tile, jpegTable.length - 2 + APP14.length, tile.length - (jpegTable.length - 2 + APP14.length)); } else { in.seek(stripOffset); in.read(tile); } - // reverse bits in each byte if FillOrder == 2 - if (ifd.getIFDIntValue(IFD.FILL_ORDER) == 2 && compression.getCode() <= loci.formats.tiff.TiffCompression.GROUP_4_FAX.getCode()) { - for (int i=0; i> 24); - } - } - codecOptions.maxBytes = (int) Math.max(size, tile.length); - codecOptions.ycbcr = ifd.getPhotometricInterpretation() == PhotoInterp.Y_CB_CR && ifd.getIFDIntValue(IFD.Y_CB_CR_SUB_SAMPLING) == 1 && ycbcrCorrection; - return JPEGTools.FindFirstEOI(tile); + return tile; } } diff --git a/src/main/java/edu/stonybrook/bmi/hatch/TiffReader.java b/src/main/java/edu/stonybrook/bmi/hatch/TiffReader.java index 4db0677..a13465b 100644 --- a/src/main/java/edu/stonybrook/bmi/hatch/TiffReader.java +++ b/src/main/java/edu/stonybrook/bmi/hatch/TiffReader.java @@ -36,14 +36,9 @@ public class TiffReader extends BaseTiffReader { // -- Constants -- /** Logger for this class. */ - private static final Logger LOGGER = - LoggerFactory.getLogger(TiffReader.class); - - public static final String[] TIFF_SUFFIXES = - {"tif", "tiff", "tf2", "tf8", "btf"}; - + private static final Logger LOGGER = LoggerFactory.getLogger(TiffReader.class); + public static final String[] TIFF_SUFFIXES = {"tif", "tiff", "tf2", "tf8", "btf"}; public static final String[] COMPANION_SUFFIXES = {"xml", "txt"}; - public static final int IMAGEJ_TAG = 50839; // -- Fields -- diff --git a/src/main/java/edu/stonybrook/bmi/hatch/TiffSaver.java b/src/main/java/edu/stonybrook/bmi/hatch/TiffSaver.java deleted file mode 100644 index 14441b1..0000000 --- a/src/main/java/edu/stonybrook/bmi/hatch/TiffSaver.java +++ /dev/null @@ -1,1125 +0,0 @@ -package edu.stonybrook.bmi.hatch; - -import java.io.ByteArrayOutputStream; -import java.io.Closeable; -import java.io.DataOutputStream; -import java.io.IOException; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.TreeSet; - -import loci.common.ByteArrayHandle; -import loci.common.Constants; -import loci.common.RandomAccessInputStream; -import loci.common.RandomAccessOutputStream; -import loci.formats.FormatException; -import loci.formats.FormatTools; -import loci.formats.codec.CodecOptions; -import loci.formats.tiff.IFD; -import loci.formats.tiff.IFDList; -import loci.formats.tiff.IFDType; -import loci.formats.tiff.PhotoInterp; -import loci.formats.tiff.TiffConstants; -import loci.formats.tiff.TiffIFDEntry; -import loci.formats.tiff.TiffRational; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Parses TIFF data from an input source. - * - * @author Curtis Rueden ctrueden at wisc.edu - * @author Eric Kjellman egkjellman at wisc.edu - * @author Melissa Linkert melissa at glencoesoftware.com - * @author Chris Allan callan at blackcat.ca - */ -public class TiffSaver implements Closeable { - - // -- Constructor -- - - private static final Logger LOGGER = LoggerFactory.getLogger(TiffSaver.class); - - // -- Fields -- - - /** Output stream to use when saving TIFF data. */ - protected RandomAccessOutputStream out; - - /** Output filename. */ - protected String filename; - - /** Output bytes. */ - protected ByteArrayHandle bytes; - - /** Whether or not to write BigTIFF data. */ - private boolean bigTiff = false; - private boolean sequentialWrite = false; - - /** Store tile offsets and original file pointer when writing sequentially. */ - private List sequentialTileOffsets; - private Long sequentialTileFilePointer; - - /** The codec options if set. */ - private CodecOptions options; - - // -- Constructors -- - /** - * Constructs a new TIFF saver from the given filename. - * @param filename Filename of the output stream that we may use to create - * extra input or output streams as required. - */ - public TiffSaver(String filename) throws IOException { - this(new RandomAccessOutputStream(filename), filename); - } - - /** - * Constructs a new TIFF saver from the given output source. - * @param out Output stream to save TIFF data to. - * @param filename Filename of the output stream that we may use to create - * extra input or output streams as required. - */ - public TiffSaver(RandomAccessOutputStream out, String filename) { - if (out == null) { - throw new IllegalArgumentException( - "Output stream expected to be not-null"); - } - if (filename == null) { - throw new IllegalArgumentException( - "Filename expected to be not null"); - } - this.out = out; - this.filename = filename; - } - - /** - * Constructs a new TIFF saver from the given output source. - * @param out Output stream to save TIFF data to. - * @param bytes In memory byte array handle that we may use to create - * extra input or output streams as required. - */ - public TiffSaver(RandomAccessOutputStream out, ByteArrayHandle bytes) { - if (out == null) { - throw new IllegalArgumentException( - "Output stream expected to be not-null"); - } - if (bytes == null) { - throw new IllegalArgumentException( - "Bytes expected to be not null"); - } - this.out = out; - this.bytes = bytes; - } - - // -- TiffSaver methods -- - - /** - * Closes the output stream if not null. - * @throws IOException Thrown if an error occurred while closing. - */ - public void close() throws IOException { - if (out != null) { - out.close(); - } - } - - /** - * Sets whether or not we know that the planes will be written sequentially. - * If we are writing planes sequentially and set this flag, then performance - * is slightly improved. - */ - public void setWritingSequentially(boolean sequential) { - sequentialWrite = sequential; - } - - /** Gets the stream from which TIFF data is being saved. */ - public RandomAccessOutputStream getStream() { - return out; - } - - /** Sets whether or not little-endian data should be written. */ - public void setLittleEndian(boolean littleEndian) { - out.order(littleEndian); - } - - /** Sets whether or not BigTIFF data should be written. */ - public void setBigTiff(boolean bigTiff) { - this.bigTiff = bigTiff; - } - - /** Returns whether or not we are writing little-endian data. */ - public boolean isLittleEndian() { - return out.isLittleEndian(); - } - - /** Returns whether or not we are writing BigTIFF data. */ - public boolean isBigTiff() { return bigTiff; } - - /** - * Sets the codec options. - * @param options The value to set. - */ - public void setCodecOptions(CodecOptions options) { - this.options = options; - } - - /** Writes the TIFF file header. */ - public void writeHeader() throws IOException { - // write endianness indicator - out.seek(0); - if (isLittleEndian()) { - out.writeByte(TiffConstants.LITTLE); - out.writeByte(TiffConstants.LITTLE); - } - else { - out.writeByte(TiffConstants.BIG); - out.writeByte(TiffConstants.BIG); - } - // write magic number - if (bigTiff) { - out.writeShort(TiffConstants.BIG_TIFF_MAGIC_NUMBER); - } - else out.writeShort(TiffConstants.MAGIC_NUMBER); - - // write the offset to the first IFD - - // for vanilla TIFFs, 8 is the offset to the first IFD - // for BigTIFFs, 8 is the number of bytes in an offset - if (bigTiff) { - out.writeShort(8); - out.writeShort(0); - - // write the offset to the first IFD for BigTIFF files - out.writeLong(16); - } - else { - out.writeInt(8); - } - } - - /** - */ - public void writeImage(byte[][] buf, IFDList ifds, int pixelType) - throws FormatException, IOException - { - if (ifds == null) { - throw new FormatException("IFD cannot be null"); - } - if (buf == null) { - throw new FormatException("Image data cannot be null"); - } - for (int i=0; inull. - * @param no the plane index within the current series. - * @param pixelType The type of pixels. - * @param x The X-coordinate of the top-left corner. - * @param y The Y-coordinate of the top-left corner. - * @param w The width of the rectangle. - * @param h The height of the rectangle. - * @param last Pass true if it is the last image, - * false otherwise. - * @throws FormatException - * @throws IOException - */ - public void writeImage(byte[] buf, IFD ifd, int no, int pixelType, int x, int y, int w, int h, boolean last) throws FormatException, IOException { - writeImage(buf, ifd, no, pixelType, x, y, w, h, last, null, false); - } - - public void writeImage(byte[] buf, IFD ifd, int no, int pixelType, int x, int y, int w, int h, boolean last, Integer nChannels, boolean copyDirectly) throws FormatException, IOException { - LOGGER.debug("Attempting to write image."); - //b/c method is public should check parameters again - if (buf == null) { - throw new FormatException("Image data cannot be null"); - } - - if (ifd == null) { - throw new FormatException("IFD cannot be null"); - } - - // These operations are synchronized - TiffCompression compression; - int tileWidth, tileHeight, nStrips; - boolean interleaved; - ByteArrayOutputStream[] stripBuf; - synchronized (this) { - int bytesPerPixel = FormatTools.getBytesPerPixel(pixelType); - int blockSize = w * h * bytesPerPixel; - nChannels = 3; - interleaved = ifd.getPlanarConfiguration() == 1; - makeValidIFD(ifd, pixelType, nChannels); - compression = TiffCompression.JPEG; - tileWidth = (int) ifd.getTileWidth(); - tileHeight = (int) ifd.getTileLength(); - int tilesPerRow = (int) ifd.getTilesPerRow(); - int rowsPerStrip = (int) ifd.getRowsPerStrip()[0]; - int stripSize = rowsPerStrip * tileWidth * bytesPerPixel; - nStrips = ((w + tileWidth - 1) / tileWidth) * ((h + tileHeight - 1) / tileHeight); - if (interleaved) { - stripSize *= nChannels; - } else { - nStrips *= nChannels; - } - stripBuf = new ByteArrayOutputStream[nStrips]; - DataOutputStream[] stripOut = new DataOutputStream[nStrips]; - for (int strip=0; strip= h || col >= w) { - stripOut[strip].writeByte(0); - } else if (off < buf.length) { - stripOut[strip].writeByte(buf[off]); - } - else { - stripOut[strip].writeByte(0); - } - } - else { - int off = c * blockSize + ndx + n; - int realStrip = (c * (nStrips / nChannels)) + strip; - if (row >= h || col >= w) { - stripOut[realStrip].writeByte(0); - } else if (off < buf.length) { - stripOut[realStrip].writeByte(buf[off]); - } - else { - stripOut[realStrip].writeByte(0); - } - } - } - } - } - } - } - } - } - } - - // Compress strips according to given differencing and compression schemes, - // this operation is NOT synchronized and is the ONLY portion of the - // TiffWriter.saveBytes() --> TiffSaver.writeImage() stack that is NOT - // synchronized. - byte[][] strips = new byte[nStrips][]; - for (int strip=0; stripnull. - * @param no the plane index within the series. - * @param strips The strips to write to the file. - * @param last Pass true if it is the last image, - * false otherwise. - * @param x The initial X offset of the strips/tiles to write. - * @param y The initial Y offset of the strips/tiles to write. - * @throws FormatException - * @throws IOException - */ - private void writeImageIFD(IFD ifd, int no, byte[][] strips, int nChannels, boolean last, int x, int y) throws FormatException, IOException { - LOGGER.debug("Attempting to write image IFD."); - boolean isTiled = ifd.isTiled(); - long defaultByteCount = 0L; - RandomAccessInputStream in = null; - try { - if (!sequentialWrite) { - if (filename != null) { - in = new RandomAccessInputStream(filename); - } else if (bytes != null) { - in = new RandomAccessInputStream(bytes); - } else { - throw new IllegalArgumentException("Filename and bytes are null, cannot create new input stream!"); - } - TiffParser parser = new TiffParser(in); - long[] ifdOffsets = parser.getIFDOffsets(); - LOGGER.debug("IFD offsets: {}", Arrays.toString(ifdOffsets)); - if (no < ifdOffsets.length) { - out.seek(ifdOffsets[no]); - LOGGER.debug("Reading IFD from {} in non-sequential write.", - ifdOffsets[no]); - ifd = parser.getIFD(ifdOffsets[no]); - } else if (no > 0 && no - 1 < ifdOffsets.length) { - IFD copy = parser.getIFD(ifdOffsets[no - 1]); - for (Integer tag : copy.keySet()) { - if (!ifd.containsKey(tag)) { - ifd.put(tag, copy.get(tag)); - } - } - long next = parser.getNextOffset(ifdOffsets[no - 1]); - out.seek(next); - } - } else if (isTiled) { - defaultByteCount = strips[0].length; - } - writeIFDStrips(ifd, no, strips, nChannels, last, x, y, defaultByteCount); - } finally { - if (in != null) { - in.close(); - } - } - } - - public void writeIFD(IFD ifd, long nextOffset) - throws FormatException, IOException - { - TreeSet keys = new TreeSet(ifd.keySet()); - int keyCount = keys.size(); - - if (ifd.containsKey(new Integer(IFD.LITTLE_ENDIAN))) keyCount--; - if (ifd.containsKey(new Integer(IFD.BIG_TIFF))) keyCount--; - if (ifd.containsKey(new Integer(IFD.REUSE))) keyCount--; - - long fp = out.getFilePointer(); - int bytesPerEntry = bigTiff ? TiffConstants.BIG_TIFF_BYTES_PER_ENTRY : - TiffConstants.BYTES_PER_ENTRY; - int ifdBytes = (bigTiff ? 16 : 6) + bytesPerEntry * keyCount; - - if (bigTiff) out.writeLong(keyCount); - else out.writeShort(keyCount); - - ByteArrayHandle extra = new ByteArrayHandle(); - RandomAccessOutputStream extraStream = new RandomAccessOutputStream(extra); - - for (Integer key : keys) { - if (key.equals(IFD.LITTLE_ENDIAN) || key.equals(IFD.BIG_TIFF) || - key.equals(IFD.REUSE)) continue; - - Object value = ifd.get(key); - writeIFDValue(extraStream, ifdBytes + fp, key.intValue(), value); - } - if (bigTiff) out.seek(out.getFilePointer()); - writeIntValue(out, nextOffset); - out.write(extra.getBytes(), 0, (int) extra.length()); - extraStream.close(); - } - - /** - * Writes the given IFD value to the given output object. - * @param extraOut buffer to which "extra" IFD information should be written - * @param offset global offset to use for IFD offset values - * @param tag IFD tag to write - * @param value IFD value to write - */ - public void writeIFDValue(RandomAccessOutputStream extraOut, long offset, int tag, Object value) throws FormatException, IOException { - extraOut.order(isLittleEndian()); - - // convert singleton objects into arrays, for simplicity - if (value instanceof Short) { - value = new short[] {((Short) value).shortValue()}; - } - else if (value instanceof Integer) { - value = new int[] {((Integer) value).intValue()}; - } - else if (value instanceof Long) { - value = new long[] {((Long) value).longValue()}; - } - else if (value instanceof TiffRational) { - value = new TiffRational[] {(TiffRational) value}; - } - else if (value instanceof Float) { - value = new float[] {((Float) value).floatValue()}; - } - else if (value instanceof Double) { - value = new double[] {((Double) value).doubleValue()}; - } - - int dataLength = bigTiff ? 8 : 4; - - // write directory entry to output buffers - out.writeShort(tag); // tag - if (value instanceof short[]) { - short[] q = (short[]) value; - out.writeShort(IFDType.BYTE.getCode()); - writeIntValue(out, q.length); - if (q.length <= dataLength) { - for (int i=0; i= offsets.length) { - throw new FormatException( - "No such IFD (" + ifd + " of " + offsets.length + ")"); - } - overwriteIFDValue(raf, offsets[ifd], tag, value, true); - } - - /** - * Surgically overwrites an existing IFD value with the given one. This - * method requires that the IFD directory entry already exist. It - * intelligently updates the count field of the entry to match the new - * length. If the new length is longer than the old length, it appends the - * new data to the end of the file and updates the offset field; if not, or - * if the old data is already at the end of the file, it overwrites the old - * data in place. - * - * @param raf the input stream representing the file to be edited - * @param ifdOffset the offset to the IFD - * @param tag the tag code - * @param value the new value for the tag - */ - public void overwriteIFDValue(RandomAccessInputStream raf, - long ifdOffset, int tag, Object value) throws FormatException, IOException - { - overwriteIFDValue(raf, ifdOffset, tag, value, false); - } - - /** - * Surgically overwrites an existing IFD value with the given one. This - * method requires that the IFD directory entry already exist. It - * intelligently updates the count field of the entry to match the new - * length. If the new length is longer than the old length, it appends the - * new data to the end of the file and updates the offset field; if not, or - * if the old data is already at the end of the file, it overwrites the old - * data in place. - * - * @param raf the input stream representing the file to be edited - * @param ifdOffset the offset to the IFD - * @param tag the tag code - * @param value the new value for the tag - * @param skipHeaderCheck true if the TIFF header does not need to be checked - */ - public void overwriteIFDValue(RandomAccessInputStream raf, - long ifdOffset, int tag, Object value, boolean skipHeaderCheck) - throws FormatException, IOException - { - if (!skipHeaderCheck) { - raf.seek(0); - TiffParser parser = new TiffParser(raf); - Boolean valid = parser.checkHeader(); - if (valid == null) { - throw new FormatException("Invalid TIFF header"); - } - - boolean little = valid.booleanValue(); - boolean bigTiff = parser.isBigTiff(); - - setLittleEndian(little); - setBigTiff(bigTiff); - } - - TiffParser parser = new TiffParser(raf); - boolean little = isLittleEndian(); - boolean bigTiff = isBigTiff(); - long offset = bigTiff ? 8 : 4; // offset to the IFD - - int bytesPerEntry = bigTiff ? - TiffConstants.BIG_TIFF_BYTES_PER_ENTRY : TiffConstants.BYTES_PER_ENTRY; - - raf.seek(ifdOffset); - - // get the number of directory entries - long num = bigTiff ? raf.readLong() : raf.readUnsignedShort(); - - // search directory entries for proper tag - for (int i=0; i (offset / bytesPerElement)) { - out.seek(entry.getValueOffset()); - for (int b=0; b 0) { - out.seek(newOffset); - out.write(extraBuf.getByteBuffer(), 0, (int) extraBuf.length()); - } - return; - } - } - - throw new FormatException("Tag not found (" + IFD.getIFDTagName(tag) + ")"); - } - - /** Convenience method for overwriting a file's first ImageDescription. */ - public void overwriteComment(RandomAccessInputStream in, Object value) - throws FormatException, IOException - { - overwriteIFDValue(in, 0, IFD.IMAGE_DESCRIPTION, value); - } - - // -- Helper methods -- - - /** - * Coverts a list to a primitive array. - * @param l The list of Long to convert. - * @return A primitive array of type long[] with the values from - * l. - */ - private long[] toPrimitiveArray(List l) { - long[] toReturn = new long[l.size()]; - for (int i = 0; i < l.size(); i++) { - toReturn[i] = l.get(i); - } - return toReturn; - } - - /** - * Write the given value to the given RandomAccessOutputStream. - * If the 'bigTiff' flag is set, then the value will be written as an 8 byte - * long; otherwise, it will be written as a 4 byte integer. - */ - private void writeIntValue(RandomAccessOutputStream out, long offset) - throws IOException - { - if (bigTiff) { - out.writeLong(offset); - } - else { - out.writeInt((int) offset); - } - } - - /** - * Makes a valid IFD. - * - * @param ifd The IFD to handle. - * @param pixelType The pixel type. - * @param nChannels The number of channels. - */ - private void makeValidIFD(IFD ifd, int pixelType, int nChannels) { - int bytesPerPixel = FormatTools.getBytesPerPixel(pixelType); - int bps = 8 * bytesPerPixel; - int[] bpsArray = new int[nChannels]; - Arrays.fill(bpsArray, bps); - ifd.putIFDValue(IFD.BITS_PER_SAMPLE, bpsArray); - - if (FormatTools.isFloatingPoint(pixelType)) { - ifd.putIFDValue(IFD.SAMPLE_FORMAT, 3); - } - if (ifd.getIFDValue(IFD.COMPRESSION) == null) { - ifd.putIFDValue(IFD.COMPRESSION, TiffCompression.UNCOMPRESSED.getCode()); - } - - PhotoInterp pi = PhotoInterp.BLACK_IS_ZERO; - if (nChannels == 1 && ifd.getIFDValue(IFD.COLOR_MAP) != null) { - pi = PhotoInterp.RGB_PALETTE; - } else if (nChannels == 3) { - pi = PhotoInterp.RGB; - } - //System.out.println("PHOTOMETRIC_INTERPRETATION : "+pi); - if (!ifd.containsKey(IFD.PHOTOMETRIC_INTERPRETATION)) { - //System.out.println("PHOTOMETRIC_INTERPRETATION : "+pi); - ifd.putIFDValue(IFD.PHOTOMETRIC_INTERPRETATION, pi.getCode()); - } - ifd.putIFDValue(IFD.PHOTOMETRIC_INTERPRETATION, 6); - - ifd.putIFDValue(IFD.SAMPLES_PER_PIXEL, nChannels); -/* - if (ifd.get(IFD.X_RESOLUTION) == null) { - ifd.putIFDValue(IFD.X_RESOLUTION, new TiffRational(1, 1)); - } - if (ifd.get(IFD.Y_RESOLUTION) == null) { - ifd.putIFDValue(IFD.Y_RESOLUTION, new TiffRational(1, 1)); - }*/ -/* - if (ifd.containsKey(IFD.X_RESOLUTION)) { - ifd.remove(IFD.X_RESOLUTION); - } - if (ifd.containsKey(IFD.Y_RESOLUTION)) { - ifd.remove(IFD.Y_RESOLUTION); - }*/ - /* - if (ifd.get(IFD.X_RESOLUTION) == null) { - ifd.putIFDValue(IFD.X_RESOLUTION, 512); - } - if (ifd.get(IFD.Y_RESOLUTION) == null) { - ifd.putIFDValue(IFD.Y_RESOLUTION, 512); - }*/ - if (ifd.get(IFD.SOFTWARE) == null) { - ifd.putIFDValue(IFD.SOFTWARE, Hatch.software); - } - if (ifd.get(IFD.ROWS_PER_STRIP) == null && ifd.get(IFD.TILE_WIDTH) == null && ifd.get(IFD.TILE_LENGTH) == null) { - ifd.putIFDValue(IFD.ROWS_PER_STRIP, new long[] {1}); - } - if (ifd.get(IFD.IMAGE_DESCRIPTION) == null) { - ifd.putIFDValue(IFD.IMAGE_DESCRIPTION, ""); - } - } - - private void writeIFDStrips(IFD ifd, int no, byte[][] strips, int nChannels, boolean last, int x, int y, long defaultByteCount) throws FormatException, IOException { - int tilesPerRow = (int) ifd.getTilesPerRow(); - int tilesPerColumn = (int) ifd.getTilesPerColumn(); - boolean interleaved = ifd.getPlanarConfiguration() == 1; - boolean isTiled = ifd.isTiled(); - - // record strip byte counts and offsets - List byteCounts = new ArrayList<>(); - List offsets = new ArrayList<>(); - long totalTiles = tilesPerRow * tilesPerColumn; - - if (!interleaved) { - totalTiles *= nChannels; - } - - if (ifd.containsKey(IFD.STRIP_BYTE_COUNTS) || ifd.containsKey(IFD.TILE_BYTE_COUNTS)) { - long[] ifdByteCounts = isTiled ? ifd.getIFDLongArray(IFD.TILE_BYTE_COUNTS) : ifd.getStripByteCounts(); - for (long stripByteCount : ifdByteCounts) { - byteCounts.add(stripByteCount); - } - } else { - while (byteCounts.size() < totalTiles) { - byteCounts.add(defaultByteCount); - } - } - int tileOrStripOffsetX = x / (int) ifd.getTileWidth(); - int tileOrStripOffsetY = y / (int) ifd.getTileLength(); - int firstOffset = (tileOrStripOffsetY * tilesPerRow) + tileOrStripOffsetX; - if (ifd.containsKey(IFD.STRIP_OFFSETS) - || ifd.containsKey(IFD.TILE_OFFSETS)) { - long[] ifdOffsets = isTiled ? - ifd.getIFDLongArray(IFD.TILE_OFFSETS) : ifd.getStripOffsets(); - for (int i = 0; i < ifdOffsets.length; i++) { - offsets.add(ifdOffsets[i]); - } - } else { - while (offsets.size() < totalTiles) { - offsets.add(0L); - } - if (isTiled && tileOrStripOffsetX == 0 && tileOrStripOffsetY == 0) { - sequentialTileOffsets = offsets; - } else if (isTiled) { - offsets = sequentialTileOffsets; - } - } - - if (isTiled) { - ifd.putIFDValue(IFD.TILE_BYTE_COUNTS, toPrimitiveArray(byteCounts)); - ifd.putIFDValue(IFD.TILE_OFFSETS, toPrimitiveArray(offsets)); - } else { - ifd.putIFDValue(IFD.STRIP_BYTE_COUNTS, toPrimitiveArray(byteCounts)); - ifd.putIFDValue(IFD.STRIP_OFFSETS, toPrimitiveArray(offsets)); - } - - long fp = out.getFilePointer(); - if (isTiled && tileOrStripOffsetX == 0 && tileOrStripOffsetY == 0) { - sequentialTileFilePointer = fp; - } - else if (isTiled) { - fp = sequentialTileFilePointer; - } - if (fp == out.getFilePointer()) { // Create IFD only if at the end of file - writeIFD(ifd, 0); - } - - // strips.length is the total number of strips being written during - // this method call, which is no more than the total number of - // strips in the image - // - // for single-channel or interleaved image data, the strips are written - // in order - // for multi-channel non-interleaved image data, the strip indexing has - // to correct for the fact that each strip represents a single channel - // - // for example, in a 3 channel non-interleaved image with 2 calls to - // writeImageIFD each of which writes half of the image: - // - we expect 6 strips to be written in total; the first call writes - // {0, 2, 4}, the second writes {1, 3, 5} - // - in each call to writeImageIFD: - // * strips.length is 3 - // * interleaved is false - // * nChannels is 3 - // * tileCount is 2 - - int tileCount = isTiled ? tilesPerRow * tilesPerColumn : 1; - - long stripStartPos = out.length(); - long totalStripSize = 0; - for (int i=0; i= BIG_TIFF_CUTOFF) { - if (canDetectBigTiff) { - LOGGER.info("Switching to BigTIFF (by file size)"); - isBigTiff = true; - } - else { - LOGGER.info("Automatic BigTIFF disabled but pixel byte count = {}", - totalBytes); - } - } - } - } - - synchronized (this) { - setupTiffSaver(); - } - } - - // -- TiffWriter API methods -- - - /** - * Saves the given image to the specified (possibly already open) file. - * The IFD hashtable allows specification of TIFF parameters such as bit - * depth, compression and units. Use one IFD instance per plane. - */ - public void saveBytes(int no, byte[] buf, IFD ifd) - throws IOException, FormatException - { - int w = getSizeX(); - int h = getSizeY(); - saveBytes(no, buf, ifd, 0, 0, w, h); - } - - /** - * Saves the given image to the specified series in the current file. - * The IFD hashtable allows specification of TIFF parameters such as bit - * depth, compression and units. Use one IFD instance per plane. - */ - public void saveBytes(int no, byte[] buf, IFD ifd, int x, int y, int w, int h) throws IOException, FormatException { - if (checkParams) checkParams(no, buf, x, y, w, h); - if (ifd == null) ifd = new IFD(); - MetadataRetrieve retrieve = getMetadataRetrieve(); - int type = FormatTools.pixelTypeFromString(retrieve.getPixelsType(series).toString()); - int index = no; - int currentTileSizeX = getTileSizeX(); - int currentTileSizeY = getTileSizeY(); - boolean usingTiling = currentTileSizeX > 0 && currentTileSizeY > 0; - if (usingTiling) { - ifd.put(new Integer(IFD.TILE_WIDTH), new Long(currentTileSizeX)); - ifd.put(new Integer(IFD.TILE_LENGTH), new Long(currentTileSizeY)); - } - if (usingTiling && (currentTileSizeX < w || currentTileSizeY < h)) { - int numTilesX = (w + (x % currentTileSizeX) + currentTileSizeX - 1) / currentTileSizeX; - int numTilesY = (h + (y % currentTileSizeY) + currentTileSizeY - 1) / currentTileSizeY; - for (int yTileIndex = 0; yTileIndex < numTilesY; yTileIndex++) { - for (int xTileIndex = 0; xTileIndex < numTilesX; xTileIndex++) { - Region tileParams = new Region(); - tileParams.width = xTileIndex < numTilesX - 1 ? currentTileSizeX - (x % currentTileSizeX) : w - (currentTileSizeX * xTileIndex); - tileParams.height = yTileIndex < numTilesY - 1 ? currentTileSizeY - (y % currentTileSizeY) : h - (currentTileSizeY * yTileIndex); - tileParams.x = x + (xTileIndex * currentTileSizeX) - (xTileIndex > 0 ? (x % currentTileSizeX) : 0); - tileParams.y = y + (yTileIndex * currentTileSizeY) - (yTileIndex > 0 ? (y % currentTileSizeY) : 0); - byte [] tileBuf = getTile(buf, tileParams, new Region(x, y, w, h)); - - // This operation is synchronized - synchronized (this) { - // This operation is synchronized against the TIFF saver. - synchronized (tiffSaver) { - index = prepareToWriteImage(no, tileBuf, ifd, tileParams.x, tileParams.y, tileParams.width, tileParams.height); - if (index == -1) { - return; - } - } - } - boolean lastPlane = no == getPlaneCount() - 1; - boolean lastSeries = getSeries() == retrieve.getImageCount() - 1; - boolean lastResolution = getResolution() == getResolutionCount() - 1; - tiffSaver.writeImage(tileBuf, ifd, index, type, tileParams.x, tileParams.y, tileParams.width, tileParams.height, - lastPlane && lastSeries && lastResolution); - } - } - } - else { - // This operation is synchronized - synchronized (this) { - // This operation is synchronized against the TIFF saver. - synchronized (tiffSaver) { - index = prepareToWriteImage(no, buf, ifd, x, y, w, h); - if (index == -1) { - return; - } - } - } - - boolean lastPlane = no == getPlaneCount() - 1; - boolean lastSeries = getSeries() == retrieve.getImageCount() - 1; - boolean lastResolution = getResolution() == getResolutionCount() - 1; - tiffSaver.writeImage(buf, ifd, index, type, x, y, w, h, lastPlane && lastSeries && lastResolution); - } - } - - /** - * Performs the preparation for work prior to the usage of the TIFF saver. - * This method is factored out from saveBytes() in an attempt to - * ensure thread safety. - */ - protected int prepareToWriteImage(int no, byte[] buf, IFD ifd, int x, int y, int w, int h) throws IOException, FormatException { - MetadataRetrieve retrieve = getMetadataRetrieve(); - boolean littleEndian = false; - if (retrieve.getPixelsBigEndian(series) != null) { - littleEndian = !retrieve.getPixelsBigEndian(series).booleanValue(); - } - else if (retrieve.getPixelsBinDataCount(series) == 0) { - littleEndian = !retrieve.getPixelsBinDataBigEndian(series, 0).booleanValue(); - } - - // Ensure that no more than one thread manipulated the initialized array - // at one time. - synchronized (this) { - if (!initialized[series][no]) { - initialized[series][no] = true; - - try (RandomAccessInputStream tmp = createInputStream()) { - tmp.order(littleEndian); - if (tmp.length() == 0) { - synchronized (this) { - // write TIFF header - tiffSaver.writeHeader(); - } - } - } - } - } - - int c = getSamplesPerPixel(); - int type = FormatTools.pixelTypeFromString( - retrieve.getPixelsType(series).toString()); - int bytesPerPixel = FormatTools.getBytesPerPixel(type); - - int blockSize = w * h * c * bytesPerPixel; - if (blockSize > buf.length) { - c = buf.length / (w * h * bytesPerPixel); - } - - formatCompression(ifd); - byte[][] lut = AWTImageTools.get8BitLookupTable(cm); - if (lut != null) { - int[] colorMap = new int[lut.length * lut[0].length]; - for (int i=0; i= 4294967296L; - if (isBigTiff) { - throw new FormatException("File is too large; call setBigTiff(true)"); - } - } - - // write the image - ifd.put(new Integer(IFD.LITTLE_ENDIAN), new Boolean(littleEndian)); - if (!ifd.containsKey(IFD.REUSE)) { - ifd.put(IFD.REUSE, out.length()); - out.seek(out.length()); - } - else { - out.seek((Long) ifd.get(IFD.REUSE)); - } - - ifd.putIFDValue(IFD.PLANAR_CONFIGURATION,interleaved || getSamplesPerPixel() == 1 ? 1 : 2); - - int sampleFormat = 1; - if (FormatTools.isSigned(type)) sampleFormat = 2; - if (FormatTools.isFloatingPoint(type)) sampleFormat = 3; - int[] ha = {1,1,1}; - ifd.putIFDValue(IFD.SAMPLE_FORMAT, ha); - - int channels = retrieve.getPixelsSizeC(series).getValue().intValue(); - int z = retrieve.getPixelsSizeZ(series).getValue().intValue(); - int t = retrieve.getPixelsSizeT(series).getValue().intValue(); - //ifd.putIFDValue(IFD.IMAGE_DESCRIPTION,"ImageJ=\nhyperstack=true\nimages=" + (channels * z * t) + "\nchannels=" + channels + "\nslices=" + z + "\nframes=" + t); - - int index = (no * getResolutionCount()) + getResolution(); - int currentSeries = getSeries(); - int currentResolution = getResolution(); - for (int i=0; imaxx) { - maxseries = ii; - maxx = x.sizeX; - } - ii++; - } - if (params.verbose) System.out.println("MAX IMAGE SIZE IS SERIES : "+maxseries); - return maxseries; - } + //private final Model m; + private final HatchWriter writer; + private IMetadata meta; + private byte compression; - private void SetupWriter() { - writer = new OMETiffWriter(); - ServiceFactory factory; - try { - factory = new ServiceFactory(); - OMEXMLService service = factory.getInstance(OMEXMLService.class); - meta = service.createOMEXMLMetadata(); - int i=0; - meta.setImageID("Image:"+i,i); - meta.setPixelsID("Pixels:"+i,i); - meta.setChannelID("Channel:"+i+":0", i, 0); - meta.setChannelSamplesPerPixel(new PositiveInteger(3),i, 0); - meta.setPixelsBigEndian(false, i); - meta.setPixelsInterleaved(true, i); - meta.setPixelsDimensionOrder(DimensionOrder.XYCZT, i); - meta.setPixelsType(PixelType.UINT8, i); - meta.setPixelsSizeX(new PositiveInteger(width),i); - meta.setPixelsSizeY(new PositiveInteger(height),i); - meta.setPixelsSizeZ(new PositiveInteger(1), i); - meta.setPixelsSizeC(new PositiveInteger(3), i); - meta.setPixelsSizeT(new PositiveInteger(1), i); - meta.setPixelsPhysicalSizeX(ppx, i); - meta.setPixelsPhysicalSizeY(ppy, i); - meta.setPixelsPhysicalSizeZ(new Length(1, UNITS.MICROMETER), i); - int w = width; - int h = height; - for (int j=1; jmaxx) { + maxseries = ii; + maxx = x.sizeX; + } + ii++; + } + if (params.verbose) System.out.println("MAX IMAGE SIZE IS SERIES : "+maxseries); + return maxseries; + } + public int effSize(int tileX, int width) { return (tileX + tileSizeX) < width ? tileSizeX : width - tileX; } - public void SetPPS() { + private void SetPPS() { Double physicalSizeX = ppx == null || ppx.value(UNITS.MICROMETER) == null ? null : ppx.value(UNITS.MICROMETER).doubleValue(); if (physicalSizeX == null || physicalSizeX == 0) { physicalSizeX = 0d; @@ -244,7 +198,7 @@ public void SetPPS() { public void Dump2File3(byte[] buffer, int a, int b) { try { - File f = new File("/vsi/dump/0 === "+a+"-"+b+".jpg"); + File f = new File("/boom/dump/0 === "+a+"-"+b+".jp2"); if (!f.getParentFile().exists()) { f.getParentFile().mkdirs(); } @@ -261,7 +215,7 @@ public void Dump2File3(byte[] buffer, int a, int b) { public void DumpBI2File3(BufferedImage bi, int a, int b) { try { - File f = new File("/vsi/dump2/0 === "+a+"-"+b+"X.jpg"); + File f = new File("/boom/dump2/0 === "+a+"-"+b+"X.jpg"); if (!f.getParentFile().exists()) { f.getParentFile().mkdirs(); } @@ -287,6 +241,14 @@ public void DumpBI2File3(BufferedImage bi, int a, int b) { Logger.getLogger(Pyramid.class.getName()).log(Level.SEVERE, null, ex); } } + + public static void Display(byte[] buffer, int from, int to) { + System.out.println(""); + for (int i=from; i<(buffer.length+to); i++) { + System.out.print(String.format("%02x",buffer[i])+" "); + } + System.out.println(""); + } public void readWriteTiles() throws FormatException, IOException { if (params.verbose) { @@ -297,42 +259,105 @@ public void readWriteTiles() throws FormatException, IOException { int nYTiles = height / tileSizeY; if (nXTiles * tileSizeX != width) nXTiles++; if (nYTiles * tileSizeY != height) nYTiles++; - pyramid = new Pyramid(nXTiles,nYTiles,tileSizeX,tileSizeY); - writer.setSeries(0); - writer.setResolution(0); + int numtiles = nXTiles*nYTiles; + pyramid = new Pyramid(params,nXTiles,nYTiles,tileSizeX,tileSizeY,width,height); byte[] rawbuffer = new byte[TileSize+20]; + IFD ifd = new IFD(); + ifd.put(IFD.RESOLUTION_UNIT, 3); + ifd.put(IFD.X_RESOLUTION, px); + ifd.put(IFD.Y_RESOLUTION, py); + ifd.put(IFD.TILE_WIDTH, tileSizeX); + ifd.put(IFD.TILE_LENGTH, tileSizeY); + ifd.put(IFD.IMAGE_WIDTH, (long) width); + ifd.put(IFD.IMAGE_LENGTH, (long) height); + ifd.put(IFD.TILE_OFFSETS, new long[numtiles]); + ifd.put(IFD.TILE_BYTE_COUNTS, new long[numtiles]); + String comp = (String) reader.metadata.get("Compression"); + if (comp==null) { + comp = "UNKNOWN"; + } + switch (comp) { + case "JPEG-2000": + compression = 2; + //ifd.put(IFD.COMPRESSION, 34712); + ifd.put(IFD.COMPRESSION, 33005); + //ifd.put(IFD.COMPRESSION, 33003); + break; + case "JPEG": + case "UNKNOWN": + compression = 0; + ifd.put(IFD.COMPRESSION, 7); + break; + default: + throw new Error("Should never get here"); + } + ifd.put(IFD.BITS_PER_SAMPLE, new int[] {8, 8, 8}); + ifd.put(IFD.SAMPLES_PER_PIXEL, 3); + ifd.put(IFD.PLANAR_CONFIGURATION, 1); + ifd.put(IFD.SOFTWARE, Hatch.software); + ifd.putIFDValue(IFD.IMAGE_DESCRIPTION, ""); + ifd.put(IFD.ORIENTATION, 1); + ifd.put(IFD.X_RESOLUTION, px); + ifd.put(IFD.Y_RESOLUTION, py); + ifd.put(IFD.RESOLUTION_UNIT, 3); + ifd.put(IFD.SAMPLE_FORMAT, new int[] {1, 1, 1}); + if (inputFile.toLowerCase().endsWith(".vsi")) { + ifd.put(IFD.Y_CB_CR_SUB_SAMPLING, new int[] {2, 1}); + ifd.putIFDValue(IFD.PHOTOMETRIC_INTERPRETATION, PhotoInterp.Y_CB_CR.getCode()); + } else if (inputFile.toLowerCase().endsWith(".svs")) { + IFD x = reader.getIFDs().get(maximage); + ifd.putIFDValue(IFD.PHOTOMETRIC_INTERPRETATION, (int) x.get(IFD.PHOTOMETRIC_INTERPRETATION)); + //ifd.putIFDValue(IFD.PHOTOMETRIC_INTERPRETATION, PhotoInterp.RGB.getCode()); + } else { + throw new Error("IFD.PHOTOMETRIC_INTERPRETATION ERROR!!!"); + } + JPEG2000Codec codec = new JPEG2000Codec(); for (int y=0; y