diff -Nru libcommons-compress-java-1.12/debian/changelog libcommons-compress-java-1.13/debian/changelog --- libcommons-compress-java-1.12/debian/changelog 2016-12-21 14:38:24.000000000 +0000 +++ libcommons-compress-java-1.13/debian/changelog 2017-01-05 23:59:52.000000000 +0000 @@ -1,3 +1,9 @@ +libcommons-compress-java (1.13-1) unstable; urgency=medium + + * New upstream release + + -- Emmanuel Bourg Fri, 06 Jan 2017 00:59:52 +0100 + libcommons-compress-java (1.12-1) unstable; urgency=medium * New upstream release diff -Nru libcommons-compress-java-1.12/debian/maven.rules libcommons-compress-java-1.13/debian/maven.rules --- libcommons-compress-java-1.12/debian/maven.rules 2016-12-21 14:37:29.000000000 +0000 +++ libcommons-compress-java-1.13/debian/maven.rules 2017-01-05 23:58:25.000000000 +0000 @@ -1 +1,2 @@ junit junit * s/.*/4.x/ * * +org.apache.commons commons-parent pom s/.*/debian/ * * diff -Nru libcommons-compress-java-1.12/findbugs-exclude-filter.xml libcommons-compress-java-1.13/findbugs-exclude-filter.xml --- libcommons-compress-java-1.12/findbugs-exclude-filter.xml 2016-03-30 12:43:48.000000000 +0000 +++ libcommons-compress-java-1.13/findbugs-exclude-filter.xml 2016-12-25 11:57:03.000000000 +0000 @@ -173,4 +173,17 @@ + + + + + + + + + + + + diff -Nru libcommons-compress-java-1.12/pom.xml libcommons-compress-java-1.13/pom.xml --- libcommons-compress-java-1.12/pom.xml 2016-06-18 15:20:58.000000000 +0000 +++ libcommons-compress-java-1.13/pom.xml 2016-12-25 12:09:13.000000000 +0000 @@ -20,12 +20,12 @@ org.apache.commons commons-parent - 39 + 41 org.apache.commons commons-compress - 1.12 + 1.13 Apache Commons Compress http://commons.apache.org/proper/commons-compress/ @@ -37,8 +37,8 @@ - 1.6 - 1.6 + 1.7 + 1.7 compress COMPRESS 12310904 @@ -46,7 +46,11 @@ ${project.version} RC1 1.6.4 - 3.6 + 3.7 + 0.9.3 + + + true @@ -64,7 +68,7 @@ org.tukaani xz - 1.5 + 1.6 true @@ -195,6 +199,30 @@ + + org.apache.rat + apache-rat-plugin + ${commons.rat.version} + + + + src/test/resources/** + .pmd + .projectile + + + + + com.github.siom79.japicmp + japicmp-maven-plugin + ${commons.japicmp.version} + + + true + false + + + @@ -249,12 +277,6 @@ - - org.codehaus.mojo - cobertura-maven-plugin - 2.7 - - org.apache.maven.plugins maven-pmd-plugin @@ -303,28 +325,13 @@ org.codehaus.mojo findbugs-maven-plugin - 3.0.3 + 3.0.4 Normal Default ${basedir}/findbugs-exclude-filter.xml - - org.apache.rat - apache-rat-plugin - ${commons.rat.version} - - - - src/test/resources/** - .pmd - .gitignore - .gitattributes - .projectile - - - @@ -377,6 +384,34 @@ + + + + + travis + + + env.TRAVIS + true + + + + + + org.jacoco + jacoco-maven-plugin + ${commons.jacoco.version} + + + org.eluder.coveralls + coveralls-maven-plugin + 4.3.0 + + EpochMillis + + + diff -Nru libcommons-compress-java-1.12/RELEASE-NOTES.txt libcommons-compress-java-1.13/RELEASE-NOTES.txt --- libcommons-compress-java-1.12/RELEASE-NOTES.txt 2016-06-18 15:20:42.000000000 +0000 +++ libcommons-compress-java-1.13/RELEASE-NOTES.txt 2016-12-25 12:16:34.000000000 +0000 @@ -5,10 +5,62 @@ lzma, xz, Snappy, traditional Unix Compress, DEFLATE and ar, cpio, jar, tar, zip, dump, 7z, arj. +Release 1.13 +------------ + +Commons Compress 1.13 is the first version to require Java 7 at +runtime. + +Changes in this version include: + +New features: +o SevenZFile, SevenZOutputFile, ZipFile and + ZipArchiveOutputStream can now work on non-file resources if + they can be accessed via SeekableByteChannel. + Issue: COMPRESS-327. +o Allow compressor extensions through a standard JRE ServiceLoader. + Issue: COMPRESS-368. +o Allow archive extensions through a standard JRE ServiceLoader. + Issue: COMPRESS-369. +o Add write support for the legacy LZMA format, this requires XZ + for Java 1.6. + Issue: COMPRESS-373. +o Add write support for the legacy LZMA stream to 7z, this + requires XZ for Java 1.6. + Issue: COMPRESS-374. +o Allow the clients of ParallelScatterZipCreator to provide + ZipArchiveEntryRequestSupplier. + Issue: COMPRESS-375. Thanks to Plamen Totev. +o Add a version-independent link to the API docs of the latest + release. + Issue: COMPRESS-372. + +Fixed Bugs: +o BitInputStream could return bad results when overflowing + internally - if two consecutive reads tried to read more than + 64 bits. + Issue: COMPRESS-363. +o ZipArchiveInputStream.closeEntry does not properly advance to + next entry if there are junk bytes at end of data section. + Issue: COMPRESS-364. Thanks to Mike Mole. +o ZipArchiveInputStream now throws an Exception if it encounters + a broken ZIP archive rather than signaling end-of-archive. + Issue: COMPRESS-367. Thanks to Mike Mole. +o ScatterZipOutputStream didn't close the StreamCompressor + causing a potential resource leak. + Issue: COMPRESS-377. + +Changes: +o Update Java requirement from 6 to 7. + Issue: COMPRESS-360. +o Clarified which TarArchiveEntry methods are useless for + entries read from an archive. + Issue: COMPRESS-366. + Release 1.12 ------------ -Commons Compress 1.12 is the first version to require Java6 at +Commons Compress 1.12 is the first version to require Java 6 at runtime. Release 1.12 changes the behavior of BZip2CompressorOutputStream's diff -Nru libcommons-compress-java-1.12/src/changes/changes.xml libcommons-compress-java-1.13/src/changes/changes.xml --- libcommons-compress-java-1.12/src/changes/changes.xml 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/changes/changes.xml 2016-12-25 11:57:03.000000000 +0000 @@ -42,8 +42,65 @@ commons-compress - + + Update Java requirement from 6 to 7. + + + BitInputStream could return bad results when overflowing + internally - if two consecutive reads tried to read more than + 64 bits. + + + Clarified which TarArchiveEntry methods are useless for + entries read from an archive. + + + ZipArchiveInputStream.closeEntry does not properly advance to + next entry if there are junk bytes at end of data section + + + SevenZFile, SevenZOutputFile, ZipFile and + ZipArchiveOutputStream can now work on non-file resources if + they can be accessed via SeekableByteChannel. + + + Allow compressor extensions through a standard JRE ServiceLoader. + + + Allow archive extensions through a standard JRE ServiceLoader. + + + Add write support for the legacy LZMA format, this requires XZ + for Java 1.6. + + + Add write support for the legacy LZMA stream to 7z, this + requires XZ for Java 1.6. + + + Allow the clients of ParallelScatterZipCreator to provide + ZipArchiveEntryRequestSupplier. + + + ZipArchiveInputStream now throws an Exception if it encounters + a broken ZIP archive rather than signaling end-of-archive. + + + ScatterZipOutputStream didn't close the StreamCompressor + causing a potential resource leak. + + + Add a version-independent link to the API docs of the latest + release. + + + archiveInputStreamProviders; + + private SortedMap archiveOutputStreamProviders; + + private static ArrayList findArchiveStreamProviders() { + return Lists.newArrayList(serviceLoaderIterator()); + } + + static void putAll(Set names, ArchiveStreamProvider provider, + TreeMap map) { + for (String name : names) { + map.put(toKey(name), provider); + } + } + + private static Iterator serviceLoaderIterator() { + return new ServiceLoaderIterator<>(ArchiveStreamProvider.class); + } + + private static String toKey(final String name) { + return name.toUpperCase(Locale.ROOT); + } + + /** + * Constructs a new sorted map from input stream provider names to provider + * objects. + * + *

+ * The map returned by this method will have one entry for each provider for + * which support is available in the current Java virtual machine. If two or + * more supported provider have the same name then the resulting map will + * contain just one of them; which one it will contain is not specified. + *

+ * + *

+ * The invocation of this method, and the subsequent use of the resulting + * map, may cause time-consuming disk or network I/O operations to occur. + * This method is provided for applications that need to enumerate all of + * the available providers, for example to allow user provider selection. + *

+ * + *

+ * This method may return different results at different times if new + * providers are dynamically made available to the current Java virtual + * machine. + *

+ * + * @return An immutable, map from names to provider objects + * @since 1.13 + */ + public static SortedMap findAvailableArchiveInputStreamProviders() { + return AccessController.doPrivileged(new PrivilegedAction>() { + @Override + public SortedMap run() { + TreeMap map = new TreeMap<>(); + putAll(SINGLETON.getInputStreamArchiveNames(), SINGLETON, map); + for (ArchiveStreamProvider provider : findArchiveStreamProviders()) { + putAll(provider.getInputStreamArchiveNames(), provider, map); + } + return map; + } + }); + } + + /** + * Constructs a new sorted map from output stream provider names to provider + * objects. + * + *

+ * The map returned by this method will have one entry for each provider for + * which support is available in the current Java virtual machine. If two or + * more supported provider have the same name then the resulting map will + * contain just one of them; which one it will contain is not specified. + *

+ * + *

+ * The invocation of this method, and the subsequent use of the resulting + * map, may cause time-consuming disk or network I/O operations to occur. + * This method is provided for applications that need to enumerate all of + * the available providers, for example to allow user provider selection. + *

+ * + *

+ * This method may return different results at different times if new + * providers are dynamically made available to the current Java virtual + * machine. + *

+ * + * @return An immutable, map from names to provider objects + * @since 1.13 + */ + public static SortedMap findAvailableArchiveOutputStreamProviders() { + return AccessController.doPrivileged(new PrivilegedAction>() { + @Override + public SortedMap run() { + TreeMap map = new TreeMap<>(); + putAll(SINGLETON.getOutputStreamArchiveNames(), SINGLETON, map); + for (ArchiveStreamProvider provider : findArchiveStreamProviders()) { + putAll(provider.getOutputStreamArchiveNames(), provider, map); + } + return map; + } + }); + } /** * Create an instance using the platform default encoding. @@ -180,7 +312,7 @@ } /** - * Create an archive input stream from an archiver name and an input stream. + * Creates an archive input stream from an archiver name and an input stream. * * @param archiverName the archive name, * i.e. {@value #AR}, {@value #ARJ}, {@value #ZIP}, {@value #TAR}, {@value #JAR}, {@value #CPIO}, {@value #DUMP} or {@value #SEVEN_Z} @@ -191,9 +323,14 @@ * read from a stream * @throws IllegalArgumentException if the archiver name or stream is null */ - public ArchiveInputStream createArchiveInputStream( - final String archiverName, final InputStream in) + public ArchiveInputStream createArchiveInputStream(final String archiverName, final InputStream in) throws ArchiveException { + return createArchiveInputStream(archiverName, in, entryEncoding); + } + + @Override + public ArchiveInputStream createArchiveInputStream(final String archiverName, final InputStream in, + final String actualEncoding) throws ArchiveException { if (archiverName == null) { throw new IllegalArgumentException("Archivername must not be null."); @@ -207,38 +344,38 @@ return new ArArchiveInputStream(in); } if (ARJ.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new ArjArchiveInputStream(in, entryEncoding); + if (actualEncoding != null) { + return new ArjArchiveInputStream(in, actualEncoding); } return new ArjArchiveInputStream(in); } if (ZIP.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new ZipArchiveInputStream(in, entryEncoding); + if (actualEncoding != null) { + return new ZipArchiveInputStream(in, actualEncoding); } return new ZipArchiveInputStream(in); } if (TAR.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new TarArchiveInputStream(in, entryEncoding); + if (actualEncoding != null) { + return new TarArchiveInputStream(in, actualEncoding); } return new TarArchiveInputStream(in); } if (JAR.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new JarArchiveInputStream(in, entryEncoding); + if (actualEncoding != null) { + return new JarArchiveInputStream(in, actualEncoding); } return new JarArchiveInputStream(in); } if (CPIO.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new CpioArchiveInputStream(in, entryEncoding); + if (actualEncoding != null) { + return new CpioArchiveInputStream(in, actualEncoding); } return new CpioArchiveInputStream(in); } if (DUMP.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new DumpArchiveInputStream(in, entryEncoding); + if (actualEncoding != null) { + return new DumpArchiveInputStream(in, actualEncoding); } return new DumpArchiveInputStream(in); } @@ -246,11 +383,16 @@ throw new StreamingNotSupportedException(SEVEN_Z); } + final ArchiveStreamProvider archiveStreamProvider = getArchiveInputStreamProviders().get(toKey(archiverName)); + if (archiveStreamProvider != null) { + return archiveStreamProvider.createArchiveInputStream(archiverName, in, actualEncoding); + } + throw new ArchiveException("Archiver: " + archiverName + " not found."); } /** - * Create an archive output stream from an archiver name and an output stream. + * Creates an archive output stream from an archiver name and an output stream. * * @param archiverName the archive name, * i.e. {@value #AR}, {@value #ZIP}, {@value #TAR}, {@value #JAR} or {@value #CPIO} @@ -261,8 +403,14 @@ * written to a stream * @throws IllegalArgumentException if the archiver name or stream is null */ + public ArchiveOutputStream createArchiveOutputStream(final String archiverName, final OutputStream out) + throws ArchiveException { + return createArchiveOutputStream(archiverName, out, entryEncoding); + } + + @Override public ArchiveOutputStream createArchiveOutputStream( - final String archiverName, final OutputStream out) + final String archiverName, final OutputStream out, final String actualEncoding) throws ArchiveException { if (archiverName == null) { throw new IllegalArgumentException("Archivername must not be null."); @@ -276,32 +424,38 @@ } if (ZIP.equalsIgnoreCase(archiverName)) { final ZipArchiveOutputStream zip = new ZipArchiveOutputStream(out); - if (entryEncoding != null) { - zip.setEncoding(entryEncoding); + if (actualEncoding != null) { + zip.setEncoding(actualEncoding); } return zip; } if (TAR.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new TarArchiveOutputStream(out, entryEncoding); + if (actualEncoding != null) { + return new TarArchiveOutputStream(out, actualEncoding); } return new TarArchiveOutputStream(out); } if (JAR.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new JarArchiveOutputStream(out, entryEncoding); + if (actualEncoding != null) { + return new JarArchiveOutputStream(out, actualEncoding); } return new JarArchiveOutputStream(out); } if (CPIO.equalsIgnoreCase(archiverName)) { - if (entryEncoding != null) { - return new CpioArchiveOutputStream(out, entryEncoding); + if (actualEncoding != null) { + return new CpioArchiveOutputStream(out, actualEncoding); } return new CpioArchiveOutputStream(out); } if (SEVEN_Z.equalsIgnoreCase(archiverName)) { throw new StreamingNotSupportedException(SEVEN_Z); } + + final ArchiveStreamProvider archiveStreamProvider = getArchiveOutputStreamProviders().get(toKey(archiverName)); + if (archiveStreamProvider != null) { + return archiveStreamProvider.createArchiveOutputStream(archiverName, out, actualEncoding); + } + throw new ArchiveException("Archiver: " + archiverName + " not found."); } @@ -327,7 +481,7 @@ throw new IllegalArgumentException("Mark is not supported."); } - final byte[] signature = new byte[12]; + final byte[] signature = new byte[SIGNATURE_SIZE]; in.mark(signature.length); try { int signatureLength = IOUtils.readFully(in, signature); @@ -347,7 +501,7 @@ } // Dump needs a bigger buffer to check the signature; - final byte[] dumpsig = new byte[32]; + final byte[] dumpsig = new byte[DUMP_SIGNATURE_SIZE]; in.mark(dumpsig.length); signatureLength = IOUtils.readFully(in, dumpsig); in.reset(); @@ -356,18 +510,18 @@ } // Tar needs an even bigger buffer to check the signature; read the first block - final byte[] tarheader = new byte[512]; - in.mark(tarheader.length); - signatureLength = IOUtils.readFully(in, tarheader); + final byte[] tarHeader = new byte[TAR_HEADER_SIZE]; + in.mark(tarHeader.length); + signatureLength = IOUtils.readFully(in, tarHeader); in.reset(); - if (TarArchiveInputStream.matches(tarheader, signatureLength)) { + if (TarArchiveInputStream.matches(tarHeader, signatureLength)) { return createArchiveInputStream(TAR, in); } // COMPRESS-117 - improve auto-recognition - if (signatureLength >= 512) { + if (signatureLength >= TAR_HEADER_SIZE) { TarArchiveInputStream tais = null; try { - tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader)); + tais = new TarArchiveInputStream(new ByteArrayInputStream(tarHeader)); // COMPRESS-191 - verify the header checksum if (tais.getNextTarEntry().isCheckSumOK()) { return createArchiveInputStream(TAR, in); @@ -388,4 +542,30 @@ throw new ArchiveException("No Archiver found for the stream signature"); } + public SortedMap getArchiveInputStreamProviders() { + if (archiveInputStreamProviders == null) { + archiveInputStreamProviders = Collections + .unmodifiableSortedMap(findAvailableArchiveInputStreamProviders()); + } + return archiveInputStreamProviders; + } + + public SortedMap getArchiveOutputStreamProviders() { + if (archiveOutputStreamProviders == null) { + archiveOutputStreamProviders = Collections + .unmodifiableSortedMap(findAvailableArchiveOutputStreamProviders()); + } + return archiveOutputStreamProviders; + } + + @Override + public Set getInputStreamArchiveNames() { + return Sets.newHashSet(AR, ARJ, ZIP, TAR, JAR, CPIO, DUMP, SEVEN_Z); + } + + @Override + public Set getOutputStreamArchiveNames() { + return Sets.newHashSet(AR, ZIP, TAR, JAR, CPIO, SEVEN_Z); + } + } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamProvider.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers; + +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Set; + +/** + * Creates Archive {@link ArchiveInputStream}s and {@link ArchiveOutputStream}s. + * + * @since 1.13 + */ +public interface ArchiveStreamProvider { + + /** + * Creates an archive input stream from an archiver name and an input + * stream. + * + * @param name + * the archive name, i.e. + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#AR}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ARJ}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ZIP}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#TAR}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#JAR}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#CPIO}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#DUMP} + * or + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#SEVEN_Z} + * @param in + * the input stream + * @param encoding + * encoding name or null for the default + * @return the archive input stream + * @throws ArchiveException + * if the archiver name is not known + * @throws StreamingNotSupportedException + * if the format cannot be read from a stream + * @throws IllegalArgumentException + * if the archiver name or stream is null + */ + ArchiveInputStream createArchiveInputStream(final String name, final InputStream in, final String encoding) + throws ArchiveException; + + /** + * Creates an archive output stream from an archiver name and an output + * stream. + * + * @param name + * the archive name, i.e. + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#AR}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#ZIP}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#TAR}, + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#JAR} + * or + * {@value org.apache.commons.compress.archivers.ArchiveStreamFactory#CPIO} + * @param out + * the output stream + * @param encoding + * encoding name or null for the default + * @return the archive output stream + * @throws ArchiveException + * if the archiver name is not known + * @throws StreamingNotSupportedException + * if the format cannot be written to a stream + * @throws IllegalArgumentException + * if the archiver name or stream is null + */ + ArchiveOutputStream createArchiveOutputStream(final String name, final OutputStream out, final String encoding) + throws ArchiveException; + + /** + * Gets all the input stream archive names for this provider + * + * @return all the input archive names for this provider + */ + Set getInputStreamArchiveNames(); + + /** + * Gets all the output stream archive names for this provider + * + * @return all the output archive names for this provider + */ + Set getOutputStreamArchiveNames(); + +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -216,54 +216,54 @@ if (basicHeaderBytes == null) { return null; } - final DataInputStream basicHeader = new DataInputStream( - new ByteArrayInputStream(basicHeaderBytes)); - - final int firstHeaderSize = basicHeader.readUnsignedByte(); - final byte[] firstHeaderBytes = new byte[firstHeaderSize - 1]; - basicHeader.readFully(firstHeaderBytes); - final DataInputStream firstHeader = new DataInputStream( - new ByteArrayInputStream(firstHeaderBytes)); - - final LocalFileHeader localFileHeader = new LocalFileHeader(); - localFileHeader.archiverVersionNumber = firstHeader.readUnsignedByte(); - localFileHeader.minVersionToExtract = firstHeader.readUnsignedByte(); - localFileHeader.hostOS = firstHeader.readUnsignedByte(); - localFileHeader.arjFlags = firstHeader.readUnsignedByte(); - localFileHeader.method = firstHeader.readUnsignedByte(); - localFileHeader.fileType = firstHeader.readUnsignedByte(); - localFileHeader.reserved = firstHeader.readUnsignedByte(); - localFileHeader.dateTimeModified = read32(firstHeader); - localFileHeader.compressedSize = 0xffffFFFFL & read32(firstHeader); - localFileHeader.originalSize = 0xffffFFFFL & read32(firstHeader); - localFileHeader.originalCrc32 = 0xffffFFFFL & read32(firstHeader); - localFileHeader.fileSpecPosition = read16(firstHeader); - localFileHeader.fileAccessMode = read16(firstHeader); - pushedBackBytes(20); - localFileHeader.firstChapter = firstHeader.readUnsignedByte(); - localFileHeader.lastChapter = firstHeader.readUnsignedByte(); - - readExtraData(firstHeaderSize, firstHeader, localFileHeader); - - localFileHeader.name = readString(basicHeader); - localFileHeader.comment = readString(basicHeader); - - final ArrayList extendedHeaders = new ArrayList(); - int extendedHeaderSize; - while ((extendedHeaderSize = read16(in)) > 0) { - final byte[] extendedHeaderBytes = new byte[extendedHeaderSize]; - readFully(in, extendedHeaderBytes); - final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in); - final CRC32 crc32 = new CRC32(); - crc32.update(extendedHeaderBytes); - if (extendedHeaderCrc32 != crc32.getValue()) { - throw new IOException("Extended header CRC32 verification failure"); + try (final DataInputStream basicHeader = new DataInputStream(new ByteArrayInputStream(basicHeaderBytes))) { + + final int firstHeaderSize = basicHeader.readUnsignedByte(); + final byte[] firstHeaderBytes = new byte[firstHeaderSize - 1]; + basicHeader.readFully(firstHeaderBytes); + try (final DataInputStream firstHeader = new DataInputStream(new ByteArrayInputStream(firstHeaderBytes))) { + + final LocalFileHeader localFileHeader = new LocalFileHeader(); + localFileHeader.archiverVersionNumber = firstHeader.readUnsignedByte(); + localFileHeader.minVersionToExtract = firstHeader.readUnsignedByte(); + localFileHeader.hostOS = firstHeader.readUnsignedByte(); + localFileHeader.arjFlags = firstHeader.readUnsignedByte(); + localFileHeader.method = firstHeader.readUnsignedByte(); + localFileHeader.fileType = firstHeader.readUnsignedByte(); + localFileHeader.reserved = firstHeader.readUnsignedByte(); + localFileHeader.dateTimeModified = read32(firstHeader); + localFileHeader.compressedSize = 0xffffFFFFL & read32(firstHeader); + localFileHeader.originalSize = 0xffffFFFFL & read32(firstHeader); + localFileHeader.originalCrc32 = 0xffffFFFFL & read32(firstHeader); + localFileHeader.fileSpecPosition = read16(firstHeader); + localFileHeader.fileAccessMode = read16(firstHeader); + pushedBackBytes(20); + localFileHeader.firstChapter = firstHeader.readUnsignedByte(); + localFileHeader.lastChapter = firstHeader.readUnsignedByte(); + + readExtraData(firstHeaderSize, firstHeader, localFileHeader); + + localFileHeader.name = readString(basicHeader); + localFileHeader.comment = readString(basicHeader); + + final ArrayList extendedHeaders = new ArrayList<>(); + int extendedHeaderSize; + while ((extendedHeaderSize = read16(in)) > 0) { + final byte[] extendedHeaderBytes = new byte[extendedHeaderSize]; + readFully(in, extendedHeaderBytes); + final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in); + final CRC32 crc32 = new CRC32(); + crc32.update(extendedHeaderBytes); + if (extendedHeaderCrc32 != crc32.getValue()) { + throw new IOException("Extended header CRC32 verification failure"); + } + extendedHeaders.add(extendedHeaderBytes); + } + localFileHeader.extendedHeaders = extendedHeaders.toArray(new byte[extendedHeaders.size()][]); + + return localFileHeader; } - extendedHeaders.add(extendedHeaderBytes); } - localFileHeader.extendedHeaders = extendedHeaders.toArray(new byte[extendedHeaders.size()][]); - - return localFileHeader; } private void readExtraData(final int firstHeaderSize, final DataInputStream firstHeader, diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -82,9 +82,9 @@ private final InputStream in; // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) - private final byte[] TWO_BYTES_BUF = new byte[2]; - private final byte[] FOUR_BYTES_BUF = new byte[4]; - private final byte[] SIX_BYTES_BUF = new byte[6]; + private final byte[] twoBytesBuf = new byte[2]; + private final byte[] fourBytesBuf = new byte[4]; + private final byte[] sixBytesBuf = new byte[6]; private final int blockSize; @@ -234,26 +234,30 @@ if (this.entry != null) { closeEntry(); } - readFully(TWO_BYTES_BUF, 0, TWO_BYTES_BUF.length); - if (CpioUtil.byteArray2long(TWO_BYTES_BUF, false) == MAGIC_OLD_BINARY) { + readFully(twoBytesBuf, 0, twoBytesBuf.length); + if (CpioUtil.byteArray2long(twoBytesBuf, false) == MAGIC_OLD_BINARY) { this.entry = readOldBinaryEntry(false); - } else if (CpioUtil.byteArray2long(TWO_BYTES_BUF, true) + } else if (CpioUtil.byteArray2long(twoBytesBuf, true) == MAGIC_OLD_BINARY) { this.entry = readOldBinaryEntry(true); } else { - System.arraycopy(TWO_BYTES_BUF, 0, SIX_BYTES_BUF, 0, - TWO_BYTES_BUF.length); - readFully(SIX_BYTES_BUF, TWO_BYTES_BUF.length, - FOUR_BYTES_BUF.length); - final String magicString = ArchiveUtils.toAsciiString(SIX_BYTES_BUF); - if (magicString.equals(MAGIC_NEW)) { - this.entry = readNewEntry(false); - } else if (magicString.equals(MAGIC_NEW_CRC)) { - this.entry = readNewEntry(true); - } else if (magicString.equals(MAGIC_OLD_ASCII)) { - this.entry = readOldAsciiEntry(); - } else { - throw new IOException("Unknown magic [" + magicString + "]. Occured at byte: " + getBytesRead()); + System.arraycopy(twoBytesBuf, 0, sixBytesBuf, 0, + twoBytesBuf.length); + readFully(sixBytesBuf, twoBytesBuf.length, + fourBytesBuf.length); + final String magicString = ArchiveUtils.toAsciiString(sixBytesBuf); + switch (magicString) { + case MAGIC_NEW: + this.entry = readNewEntry(false); + break; + case MAGIC_NEW_CRC: + this.entry = readNewEntry(true); + break; + case MAGIC_OLD_ASCII: + this.entry = readOldAsciiEntry(); + break; + default: + throw new IOException("Unknown magic [" + magicString + "]. Occured at byte: " + getBytesRead()); } } @@ -272,7 +276,7 @@ private void skip(final int bytes) throws IOException{ // bytes cannot be more than 3 bytes if (bytes > 0) { - readFully(FOUR_BYTES_BUF, 0, bytes); + readFully(fourBytesBuf, 0, bytes); } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -77,7 +77,7 @@ private final short entryFormat; private final HashMap names = - new HashMap(); + new HashMap<>(); private long crc = 0; @@ -299,7 +299,7 @@ writeAsciiLong(devMin, 8, 16); writeAsciiLong(entry.getRemoteDeviceMaj(), 8, 16); writeAsciiLong(entry.getRemoteDeviceMin(), 8, 16); - writeAsciiLong(entry.getName().length() + 1, 8, 16); + writeAsciiLong(entry.getName().length() + 1l, 8, 16); writeAsciiLong(entry.getChksum(), 8, 16); writeCString(entry.getName()); pad(entry.getHeaderPadCount()); @@ -330,7 +330,7 @@ writeAsciiLong(entry.getNumberOfLinks(), 6, 8); writeAsciiLong(entry.getRemoteDevice(), 6, 8); writeAsciiLong(entry.getTime(), 11, 8); - writeAsciiLong(entry.getName().length() + 1, 6, 8); + writeAsciiLong(entry.getName().length() + 1l, 6, 8); writeAsciiLong(entry.getSize(), 11, 8); writeCString(entry.getName()); } @@ -360,7 +360,7 @@ writeBinaryLong(entry.getNumberOfLinks(), 2, swapHalfWord); writeBinaryLong(entry.getRemoteDevice(), 2, swapHalfWord); writeBinaryLong(entry.getTime(), 4, swapHalfWord); - writeBinaryLong(entry.getName().length() + 1, 2, swapHalfWord); + writeBinaryLong(entry.getName().length() + 1l, 2, swapHalfWord); writeBinaryLong(entry.getSize(), 4, swapHalfWord); writeCString(entry.getName()); pad(entry.getHeaderPadCount()); @@ -519,7 +519,7 @@ } if (tmp.length() <= length) { - final long insertLength = length - tmp.length(); + final int insertLength = length - tmp.length(); for (int pos = 0; pos < insertLength; pos++) { tmp.insert(0, "0"); } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java 2016-12-25 11:57:03.000000000 +0000 @@ -823,7 +823,7 @@ } public static Set find(final int code) { - final Set set = new HashSet(); + final Set set = new HashSet<>(); for (final PERMISSION p : PERMISSION.values()) { if ((code & p.code) == p.code) { diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -63,10 +63,10 @@ protected TapeInputStream raw; // map of ino -> dirent entry. We can use this to reconstruct full paths. - private final Map names = new HashMap(); + private final Map names = new HashMap<>(); // map of ino -> (directory) entry when we're missing one or more elements in the path. - private final Map pending = new HashMap(); + private final Map pending = new HashMap<>(); // queue of (directory) entries where we now have the full path. private Queue queue; @@ -136,7 +136,7 @@ // use priority based on queue to ensure parent directories are // released first. - queue = new PriorityQueue(10, + queue = new PriorityQueue<>(10, new Comparator() { @Override public int compare(final DumpArchiveEntry p, final DumpArchiveEntry q) { @@ -185,7 +185,7 @@ } // we don't do anything with this yet. - if (raw.skip(DumpArchiveConstants.TP_SIZE * active.getHeaderCount()) + if (raw.skip((long) DumpArchiveConstants.TP_SIZE * active.getHeaderCount()) == -1) { throw new EOFException(); } @@ -209,7 +209,7 @@ } // we don't do anything with this yet. - if (raw.skip(DumpArchiveConstants.TP_SIZE * active.getHeaderCount()) + if (raw.skip((long) DumpArchiveConstants.TP_SIZE * active.getHeaderCount()) == -1) { throw new EOFException(); } @@ -264,7 +264,7 @@ // skip any remaining segments for prior file. while (DumpArchiveConstants.SEGMENT_TYPE.ADDR == active.getHeaderType()) { - if (raw.skip(DumpArchiveConstants.TP_SIZE + if (raw.skip((long) DumpArchiveConstants.TP_SIZE * (active.getHeaderCount() - active.getHeaderHoles())) == -1) { throw new EOFException(); @@ -415,7 +415,7 @@ private String getPath(final DumpArchiveEntry entry) { // build the stack of elements. It's possible that we're // still missing an intermediate value and if so we - final Stack elements = new Stack(); + final Stack elements = new Stack<>(); Dirent dirent = null; for (int i = entry.getIno();; i = dirent.getParentIno()) { diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -38,7 +38,7 @@ private byte[] blockBuffer = new byte[DumpArchiveConstants.TP_SIZE]; private int currBlkIdx = -1; private int blockSize = DumpArchiveConstants.TP_SIZE; - private static final int recordSize = DumpArchiveConstants.TP_SIZE; + private static final int RECORD_SIZE = DumpArchiveConstants.TP_SIZE; private int readOffset = DumpArchiveConstants.TP_SIZE; private boolean isCompressed = false; private long bytesRead = 0; @@ -68,18 +68,18 @@ throws IOException { this.isCompressed = isCompressed; - blockSize = recordSize * recsPerBlock; + blockSize = RECORD_SIZE * recsPerBlock; // save first block in case we need it again final byte[] oldBuffer = blockBuffer; // read rest of new block blockBuffer = new byte[blockSize]; - System.arraycopy(oldBuffer, 0, blockBuffer, 0, recordSize); - readFully(blockBuffer, recordSize, blockSize - recordSize); + System.arraycopy(oldBuffer, 0, blockBuffer, 0, RECORD_SIZE); + readFully(blockBuffer, RECORD_SIZE, blockSize - RECORD_SIZE); this.currBlkIdx = 0; - this.readOffset = recordSize; + this.readOffset = RECORD_SIZE; } /** @@ -100,7 +100,7 @@ @Override public int read() throws IOException { throw new IllegalArgumentException( - "all reads must be multiple of record size (" + recordSize + + "all reads must be multiple of record size (" + RECORD_SIZE + " bytes."); } @@ -114,9 +114,9 @@ */ @Override public int read(final byte[] b, int off, final int len) throws IOException { - if ((len % recordSize) != 0) { + if ((len % RECORD_SIZE) != 0) { throw new IllegalArgumentException( - "all reads must be multiple of record size (" + recordSize + + "all reads must be multiple of record size (" + RECORD_SIZE + " bytes."); } @@ -160,9 +160,9 @@ */ @Override public long skip(final long len) throws IOException { - if ((len % recordSize) != 0) { + if ((len % RECORD_SIZE) != 0) { throw new IllegalArgumentException( - "all reads must be multiple of record size (" + recordSize + + "all reads must be multiple of record size (" + RECORD_SIZE + " bytes."); } @@ -182,10 +182,10 @@ if ((readOffset + (len - bytes)) <= blockSize) { // we can read entirely from the buffer. - n = len - bytes; + n = (long) len - bytes; } else { // copy what we can from the buffer. - n = blockSize - readOffset; + n = (long) blockSize - readOffset; } // do not copy data but still increment counters. @@ -223,7 +223,7 @@ } // copy data, increment counters. - final byte[] b = new byte[recordSize]; + final byte[] b = new byte[RECORD_SIZE]; System.arraycopy(blockBuffer, readOffset, b, 0, b.length); return b; @@ -236,7 +236,7 @@ * @throws IOException on error */ public byte[] readRecord() throws IOException { - final byte[] result = new byte[recordSize]; + final byte[] result = new byte[RECORD_SIZE]; // the read implementation will loop internally as long as // input is available diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/Lister.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/Lister.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/Lister.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/Lister.java 2016-12-25 11:57:03.000000000 +0000 @@ -39,25 +39,27 @@ usage(); return; } - System.out.println("Analysing "+args[0]); + System.out.println("Analysing " + args[0]); final File f = new File(args[0]); if (!f.isFile()) { System.err.println(f + " doesn't exist or is a directory"); } - final InputStream fis = new BufferedInputStream(new FileInputStream(f)); - ArchiveInputStream ais; - if (args.length > 1) { - ais = factory.createArchiveInputStream(args[1], fis); - } else { - ais = factory.createArchiveInputStream(fis); + try (final InputStream fis = new BufferedInputStream(new FileInputStream(f)); + final ArchiveInputStream ais = createArchiveInputStream(args, fis)) { + System.out.println("Created " + ais.toString()); + ArchiveEntry ae; + while ((ae = ais.getNextEntry()) != null) { + System.out.println(ae.getName()); + } } - System.out.println("Created "+ais.toString()); - ArchiveEntry ae; - while((ae=ais.getNextEntry()) != null){ - System.out.println(ae.getName()); + } + + private static ArchiveInputStream createArchiveInputStream(final String[] args, final InputStream fis) + throws ArchiveException { + if (args.length > 1) { + return factory.createArchiveInputStream(args[1], fis); } - ais.close(); - fis.close(); + return factory.createArchiveInputStream(fis); } private static void usage() { diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedRandomAccessFileInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedRandomAccessFileInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedRandomAccessFileInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedRandomAccessFileInputStream.java 1970-01-01 00:00:00.000000000 +0000 @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -package org.apache.commons.compress.archivers.sevenz; - -import java.io.IOException; -import java.io.InputStream; -import java.io.RandomAccessFile; - -class BoundedRandomAccessFileInputStream extends InputStream { - private final RandomAccessFile file; - private long bytesRemaining; - - public BoundedRandomAccessFileInputStream(final RandomAccessFile file, - final long size) { - this.file = file; - this.bytesRemaining = size; - } - - @Override - public int read() throws IOException { - if (bytesRemaining > 0) { - --bytesRemaining; - return file.read(); - } - return -1; - } - - @Override - public int read(final byte[] b, final int off, final int len) throws IOException { - if (bytesRemaining == 0) { - return -1; - } - int bytesToRead = len; - if (bytesToRead > bytesRemaining) { - bytesToRead = (int) bytesRemaining; - } - final int bytesRead = file.read(b, off, bytesToRead); - if (bytesRead >= 0) { - bytesRemaining -= bytesRead; - } - return bytesRead; - } - - @Override - public void close() { - // the nested RandomAccessFile is controlled externally - } -} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.sevenz; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; + +class BoundedSeekableByteChannelInputStream extends InputStream { + private static final int MAX_BUF_LEN = 8192; + private final ByteBuffer buffer; + private final SeekableByteChannel channel; + private long bytesRemaining; + + public BoundedSeekableByteChannelInputStream(final SeekableByteChannel channel, + final long size) { + this.channel = channel; + this.bytesRemaining = size; + if (size < MAX_BUF_LEN && size > 0) { + buffer = ByteBuffer.allocate((int) size); + } else { + buffer = ByteBuffer.allocate(MAX_BUF_LEN); + } + } + + @Override + public int read() throws IOException { + if (bytesRemaining > 0) { + --bytesRemaining; + int read = read(1); + if (read < 0) { + return read; + } + return buffer.get() & 0xff; + } + return -1; + } + + @Override + public int read(final byte[] b, final int off, final int len) throws IOException { + if (bytesRemaining == 0) { + return -1; + } + int bytesToRead = len; + if (bytesToRead > bytesRemaining) { + bytesToRead = (int) bytesRemaining; + } + int bytesRead; + ByteBuffer buf; + if (bytesToRead <= buffer.capacity()) { + buf = buffer; + bytesRead = read(bytesToRead); + } else { + buf = ByteBuffer.allocate(bytesToRead); + bytesRead = channel.read(buf); + buf.flip(); + } + if (bytesRead >= 0) { + buf.get(b, off, bytesRead); + bytesRemaining -= bytesRead; + } + return bytesRead; + } + + private int read(int len) throws IOException { + buffer.rewind().limit(len); + int read = channel.read(buffer); + buffer.flip(); + return read; + } + + @Override + public void close() { + // the nested channel is controlled externally + } +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java 2016-12-25 11:57:03.000000000 +0000 @@ -82,8 +82,7 @@ if (parent != null && !parent.exists() && !parent.mkdirs()) { throw new IOException("Cannot create " + parent); } - final FileOutputStream fos = new FileOutputStream(outFile); - try { + try (final FileOutputStream fos = new FileOutputStream(outFile)) { final long total = entry.getSize(); long off = 0; while (off < total) { @@ -99,8 +98,6 @@ off += bytesRead; fos.write(BUF, 0, bytesRead); } - } finally { - fos.close(); } } }; @@ -127,14 +124,11 @@ if (!f.isFile()) { System.err.println(f + " doesn't exist or is a directory"); } - final SevenZFile archive = new SevenZFile(f); - try { + try (final SevenZFile archive = new SevenZFile(f)) { SevenZArchiveEntry ae; while((ae=archive.getNextEntry()) != null) { mode.takeAction(archive, ae); } - } finally { - archive.close(); } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/CoderBase.java 2016-12-25 11:57:03.000000000 +0000 @@ -50,14 +50,14 @@ /** * @return property-bytes to write in a Folder block */ - byte[] getOptionsAsProperties(final Object options) { + byte[] getOptionsAsProperties(final Object options) throws IOException { return NONE; } /** * @return configuration options that have been used to create the given InputStream from the given Coder */ - Object getOptionsFromCoder(final Coder coder, final InputStream in) { + Object getOptionsFromCoder(final Coder coder, final InputStream in) throws IOException { return null; } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java 2016-12-25 11:57:03.000000000 +0000 @@ -18,7 +18,6 @@ package org.apache.commons.compress.archivers.sevenz; import java.io.FilterInputStream; -import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -32,13 +31,12 @@ import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; +import org.apache.commons.compress.utils.FlushShieldFilterOutputStream; import org.tukaani.xz.ARMOptions; import org.tukaani.xz.ARMThumbOptions; import org.tukaani.xz.FilterOptions; -import org.tukaani.xz.FinishableOutputStream; import org.tukaani.xz.FinishableWrapperOutputStream; import org.tukaani.xz.IA64Options; -import org.tukaani.xz.LZMAInputStream; import org.tukaani.xz.PowerPCOptions; import org.tukaani.xz.SPARCOptions; import org.tukaani.xz.X86Options; @@ -99,22 +97,6 @@ } } - static class LZMADecoder extends CoderBase { - @Override - InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength, - final Coder coder, final byte[] password) throws IOException { - final byte propsByte = coder.properties[0]; - long dictSize = coder.properties[1]; - for (int i = 1; i < 4; i++) { - dictSize |= (coder.properties[i + 1] & 0xffl) << (8 * i); - } - if (dictSize > LZMAInputStream.DICT_SIZE_MAX) { - throw new IOException("Dictionary larger than 4GiB maximum size used in " + archiveName); - } - return new LZMAInputStream(in, uncompressedLength, propsByte, (int) dictSize); - } - } - static class BCJDecoder extends CoderBase { private final FilterOptions opts; BCJDecoder(final FilterOptions opts) { @@ -133,14 +115,11 @@ e); } } + + @SuppressWarnings("resource") @Override OutputStream encode(final OutputStream out, final Object options) { - final FinishableOutputStream fo = opts.getOutputStream(new FinishableWrapperOutputStream(out)); - return new FilterOutputStream(fo) { - @Override - public void flush() { - } - }; + return new FlushShieldFilterOutputStream(opts.getOutputStream(new FinishableWrapperOutputStream(out))); } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java 2016-12-25 11:57:03.000000000 +0000 @@ -35,6 +35,7 @@ return new DeltaOptions(getOptionsFromCoder(coder)).getInputStream(in); } + @SuppressWarnings("resource") @Override OutputStream encode(final OutputStream out, final Object options) throws IOException { final int distance = numberOptionOrDefault(options, 1); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java 2016-12-25 11:57:03.000000000 +0000 @@ -52,7 +52,7 @@ * from the output of the first and so on.

*/ Iterable getOrderedCoders() { - final LinkedList l = new LinkedList(); + final LinkedList l = new LinkedList<>(); int current = (int) packedStreams[0]; // more that 2^31 coders? while (current != -1) { l.addLast(coders[current]); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.sevenz; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import org.apache.commons.compress.utils.FlushShieldFilterOutputStream; +import org.tukaani.xz.LZMA2Options; +import org.tukaani.xz.LZMAInputStream; +import org.tukaani.xz.LZMAOutputStream; + +class LZMADecoder extends CoderBase { + LZMADecoder() { + super(LZMA2Options.class, Number.class); + } + + @Override + InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength, + final Coder coder, final byte[] password) throws IOException { + final byte propsByte = coder.properties[0]; + final int dictSize = getDictionarySize(coder); + if (dictSize > LZMAInputStream.DICT_SIZE_MAX) { + throw new IOException("Dictionary larger than 4GiB maximum size used in " + archiveName); + } + return new LZMAInputStream(in, uncompressedLength, propsByte, dictSize); + } + + @SuppressWarnings("resource") + @Override + OutputStream encode(final OutputStream out, final Object opts) + throws IOException { + // NOOP as LZMAOutputStream throws an exception in flush + return new FlushShieldFilterOutputStream(new LZMAOutputStream(out, getOptions(opts), false)); + } + + @Override + byte[] getOptionsAsProperties(final Object opts) throws IOException { + final LZMA2Options options = getOptions(opts); + final byte props = (byte) ((options.getPb() * 5 + options.getLp()) * 9 + options.getLc()); + int dictSize = options.getDictSize(); + return new byte[] { + props, + (byte) (dictSize & 0xff), + (byte) ((dictSize >> 8) & 0xff), + (byte) ((dictSize >> 16) & 0xff), + (byte) ((dictSize >> 24) & 0xff), + }; + } + + @Override + Object getOptionsFromCoder(final Coder coder, final InputStream in) throws IOException { + final byte propsByte = coder.properties[0]; + int props = propsByte & 0xFF; + int pb = props / (9 * 5); + props -= pb * 9 * 5; + int lp = props / 9; + int lc = props - lp * 9; + LZMA2Options opts = new LZMA2Options(); + opts.setPb(pb); + opts.setLcLp(lc, lp); + opts.setDictSize(getDictionarySize(coder)); + return opts; + } + + private int getDictionarySize(final Coder coder) throws IllegalArgumentException { + long dictSize = coder.properties[1]; + for (int i = 1; i < 4; i++) { + dictSize |= (coder.properties[i + 1] & 0xffl) << (8 * i); + } + return (int) dictSize; + } + + private LZMA2Options getOptions(final Object opts) throws IOException { + if (opts instanceof LZMA2Options) { + return (LZMA2Options) opts; + } + final LZMA2Options options = new LZMA2Options(); + options.setDictSize(numberOptionOrDefault(opts)); + return options; + } + + private int numberOptionOrDefault(final Object opts) { + return numberOptionOrDefault(opts, LZMA2Options.DICT_SIZE_DEFAULT); + } +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZArchiveEntry.java 2016-12-25 11:57:03.000000000 +0000 @@ -455,7 +455,7 @@ */ public void setContentMethods(final Iterable methods) { if (methods != null) { - final LinkedList l = new LinkedList(); + final LinkedList l = new LinkedList<>(); for (final SevenZMethodConfiguration m : methods) { l.addLast(m); } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java 2016-12-25 11:57:03.000000000 +0000 @@ -20,15 +20,19 @@ import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.Closeable; -import java.io.DataInput; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; -import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.SeekableByteChannel; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; +import java.util.EnumSet; import java.util.LinkedList; import java.util.zip.CRC32; @@ -38,7 +42,7 @@ import org.apache.commons.compress.utils.IOUtils; /** - * Reads a 7z file, using RandomAccessFile under + * Reads a 7z file, using SeekableByteChannel under * the covers. *

* The 7z file format is a flexible container @@ -55,7 +59,7 @@ * Hence the official tools recommend against * using it for backup purposes on *nix, and * recommend .tar.7z or .tar.lzma or .tar.xz - * instead. + * instead. *

* Both the header and file contents may be * compressed and/or encrypted. With both @@ -70,19 +74,20 @@ static final int SIGNATURE_HEADER_SIZE = 32; private final String fileName; - private RandomAccessFile file; + private SeekableByteChannel channel; private final Archive archive; private int currentEntryIndex = -1; private int currentFolderIndex = -1; private InputStream currentFolderInputStream = null; private byte[] password; - private final ArrayList deferredBlockStreams = new ArrayList(); + private final ArrayList deferredBlockStreams = new ArrayList<>(); - static final byte[] sevenZSignature = { + // shared with SevenZOutputFile and tests, neither mutates it + static final byte[] sevenZSignature = { //NOSONAR (byte)'7', (byte)'z', (byte)0xBC, (byte)0xAF, (byte)0x27, (byte)0x1C }; - + /** * Reads a file as 7z archive * @@ -93,9 +98,69 @@ * @throws IOException if reading the archive fails */ public SevenZFile(final File filename, final byte[] password) throws IOException { + this(Files.newByteChannel(filename.toPath(), EnumSet.of(StandardOpenOption.READ)), + filename.getAbsolutePath(), password, true); + } + + /** + * Reads a SeekableByteChannel as 7z archive + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to read from an in-memory archive.

+ * + * @param channel the channel to read + * @throws IOException if reading the archive fails + * @since 1.13 + */ + public SevenZFile(final SeekableByteChannel channel) throws IOException { + this(channel, "unknown archive", null); + } + + /** + * Reads a SeekableByteChannel as 7z archive + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to read from an in-memory archive.

+ * + * @param channel the channel to read + * @param password optional password if the archive is encrypted - + * the byte array is supposed to be the UTF16-LE encoded + * representation of the password. + * @throws IOException if reading the archive fails + * @since 1.13 + */ + public SevenZFile(final SeekableByteChannel channel, + final byte[] password) throws IOException { + this(channel, "unknown archive", password); + } + + /** + * Reads a SeekableByteChannel as 7z archive + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to read from an in-memory archive.

+ * + * @param channel the channel to read + * @param filename name of the archive - only used for error reporting + * @param password optional password if the archive is encrypted - + * the byte array is supposed to be the UTF16-LE encoded + * representation of the password. + * @throws IOException if reading the archive fails + * @since 1.13 + */ + public SevenZFile(final SeekableByteChannel channel, String filename, + final byte[] password) throws IOException { + this(channel, filename, password, false); + } + + private SevenZFile(final SeekableByteChannel channel, String filename, + final byte[] password, boolean closeOnError) throws IOException { boolean succeeded = false; - this.file = new RandomAccessFile(filename, "r"); - this.fileName = filename.getAbsolutePath(); + this.channel = channel; + this.fileName = filename; try { archive = readHeaders(password); if (password != null) { @@ -106,12 +171,12 @@ } succeeded = true; } finally { - if (!succeeded) { - this.file.close(); + if (!succeeded && closeOnError) { + this.channel.close(); } } } - + /** * Reads a file as unencrypted 7z archive * @@ -128,11 +193,11 @@ */ @Override public void close() throws IOException { - if (file != null) { + if (channel != null) { try { - file.close(); + channel.close(); } finally { - file = null; + channel = null; if (password != null) { Arrays.fill(password, (byte) 0); } @@ -140,7 +205,7 @@ } } } - + /** * Returns the next Archive Entry in this archive. * @@ -157,7 +222,7 @@ buildDecodingStream(); return entry; } - + /** * Returns meta-data of all archive entries. * @@ -174,132 +239,126 @@ public Iterable getEntries() { return Arrays.asList(archive.files); } - + private Archive readHeaders(final byte[] password) throws IOException { + ByteBuffer buf = ByteBuffer.allocate(12 /* signature + 2 bytes version + 4 bytes CRC */) + .order(ByteOrder.LITTLE_ENDIAN); + readFully(buf); final byte[] signature = new byte[6]; - file.readFully(signature); + buf.get(signature); if (!Arrays.equals(signature, sevenZSignature)) { throw new IOException("Bad 7z signature"); } // 7zFormat.txt has it wrong - it's first major then minor - final byte archiveVersionMajor = file.readByte(); - final byte archiveVersionMinor = file.readByte(); + final byte archiveVersionMajor = buf.get(); + final byte archiveVersionMinor = buf.get(); if (archiveVersionMajor != 0) { throw new IOException(String.format("Unsupported 7z version (%d,%d)", archiveVersionMajor, archiveVersionMinor)); } - final long startHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(file.readInt()); + final long startHeaderCrc = 0xffffFFFFL & buf.getInt(); final StartHeader startHeader = readStartHeader(startHeaderCrc); - + final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize; if (nextHeaderSizeInt != startHeader.nextHeaderSize) { throw new IOException("cannot handle nextHeaderSize " + startHeader.nextHeaderSize); } - file.seek(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset); - final byte[] nextHeader = new byte[nextHeaderSizeInt]; - file.readFully(nextHeader); + channel.position(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset); + buf = ByteBuffer.allocate(nextHeaderSizeInt).order(ByteOrder.LITTLE_ENDIAN); + readFully(buf); final CRC32 crc = new CRC32(); - crc.update(nextHeader); + crc.update(buf.array()); if (startHeader.nextHeaderCrc != crc.getValue()) { throw new IOException("NextHeader CRC mismatch"); } - - final ByteArrayInputStream byteStream = new ByteArrayInputStream(nextHeader); - DataInputStream nextHeaderInputStream = new DataInputStream( - byteStream); + Archive archive = new Archive(); - int nid = nextHeaderInputStream.readUnsignedByte(); + int nid = getUnsignedByte(buf); if (nid == NID.kEncodedHeader) { - nextHeaderInputStream = - readEncodedHeader(nextHeaderInputStream, archive, password); + buf = readEncodedHeader(buf, archive, password); // Archive gets rebuilt with the new header archive = new Archive(); - nid = nextHeaderInputStream.readUnsignedByte(); + nid = getUnsignedByte(buf); } if (nid == NID.kHeader) { - readHeader(nextHeaderInputStream, archive); - nextHeaderInputStream.close(); + readHeader(buf, archive); } else { throw new IOException("Broken or unsupported archive: no Header"); } return archive; } - + private StartHeader readStartHeader(final long startHeaderCrc) throws IOException { final StartHeader startHeader = new StartHeader(); - DataInputStream dataInputStream = null; - try { - dataInputStream = new DataInputStream(new CRC32VerifyingInputStream( - new BoundedRandomAccessFileInputStream(file, 20), 20, startHeaderCrc)); + // using Stream rather than ByteBuffer for the benefit of the + // built-in CRC check + try (DataInputStream dataInputStream = new DataInputStream(new CRC32VerifyingInputStream( + new BoundedSeekableByteChannelInputStream(channel, 20), 20, startHeaderCrc))) { startHeader.nextHeaderOffset = Long.reverseBytes(dataInputStream.readLong()); startHeader.nextHeaderSize = Long.reverseBytes(dataInputStream.readLong()); startHeader.nextHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(dataInputStream.readInt()); return startHeader; - } finally { - if (dataInputStream != null) { - dataInputStream.close(); - } } } - - private void readHeader(final DataInput header, final Archive archive) throws IOException { - int nid = header.readUnsignedByte(); - + + private void readHeader(final ByteBuffer header, final Archive archive) throws IOException { + int nid = getUnsignedByte(header); + if (nid == NID.kArchiveProperties) { readArchiveProperties(header); - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + if (nid == NID.kAdditionalStreamsInfo) { throw new IOException("Additional streams unsupported"); //nid = header.readUnsignedByte(); } - + if (nid == NID.kMainStreamsInfo) { readStreamsInfo(header, archive); - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + if (nid == NID.kFilesInfo) { readFilesInfo(header, archive); - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + if (nid != NID.kEnd) { throw new IOException("Badly terminated header, found " + nid); } } - - private void readArchiveProperties(final DataInput input) throws IOException { + + private void readArchiveProperties(final ByteBuffer input) throws IOException { // FIXME: the reference implementation just throws them away? - int nid = input.readUnsignedByte(); + int nid = getUnsignedByte(input); while (nid != NID.kEnd) { final long propertySize = readUint64(input); final byte[] property = new byte[(int)propertySize]; - input.readFully(property); - nid = input.readUnsignedByte(); + input.get(property); + nid = getUnsignedByte(input); } } - - private DataInputStream readEncodedHeader(final DataInputStream header, final Archive archive, - final byte[] password) throws IOException { + + private ByteBuffer readEncodedHeader(final ByteBuffer header, final Archive archive, + final byte[] password) throws IOException { readStreamsInfo(header, archive); - + // FIXME: merge with buildDecodingStream()/buildDecoderStack() at some stage? final Folder folder = archive.folders[0]; final int firstPackStreamIndex = 0; final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos + 0; - - file.seek(folderOffset); - InputStream inputStreamStack = new BoundedRandomAccessFileInputStream(file, + + channel.position(folderOffset); + InputStream inputStreamStack = new BoundedSeekableByteChannelInputStream(channel, archive.packSizes[firstPackStreamIndex]); for (final Coder coder : folder.getOrderedCoders()) { if (coder.numInStreams != 1 || coder.numOutStreams != 1) { throw new IOException("Multi input/output stream coders are not yet supported"); } - inputStreamStack = Coders.addDecoder(fileName, inputStreamStack, + inputStreamStack = Coders.addDecoder(fileName, inputStreamStack, //NOSONAR folder.getUnpackSizeForCoder(coder), coder, password); } if (folder.hasCrc) { @@ -307,87 +366,84 @@ folder.getUnpackSize(), folder.crc); } final byte[] nextHeader = new byte[(int)folder.getUnpackSize()]; - final DataInputStream nextHeaderInputStream = new DataInputStream(inputStreamStack); - try { + try (DataInputStream nextHeaderInputStream = new DataInputStream(inputStreamStack)) { nextHeaderInputStream.readFully(nextHeader); - } finally { - nextHeaderInputStream.close(); } - return new DataInputStream(new ByteArrayInputStream(nextHeader)); + return ByteBuffer.wrap(nextHeader).order(ByteOrder.LITTLE_ENDIAN); } - - private void readStreamsInfo(final DataInput header, final Archive archive) throws IOException { - int nid = header.readUnsignedByte(); - + + private void readStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException { + int nid = getUnsignedByte(header); + if (nid == NID.kPackInfo) { readPackInfo(header, archive); - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + if (nid == NID.kUnpackInfo) { readUnpackInfo(header, archive); - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } else { // archive without unpack/coders info archive.folders = new Folder[0]; } - + if (nid == NID.kSubStreamsInfo) { readSubStreamsInfo(header, archive); - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + if (nid != NID.kEnd) { throw new IOException("Badly terminated StreamsInfo"); } } - - private void readPackInfo(final DataInput header, final Archive archive) throws IOException { + + private void readPackInfo(final ByteBuffer header, final Archive archive) throws IOException { archive.packPos = readUint64(header); final long numPackStreams = readUint64(header); - int nid = header.readUnsignedByte(); + int nid = getUnsignedByte(header); if (nid == NID.kSize) { archive.packSizes = new long[(int)numPackStreams]; for (int i = 0; i < archive.packSizes.length; i++) { archive.packSizes[i] = readUint64(header); } - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + if (nid == NID.kCRC) { archive.packCrcsDefined = readAllOrBits(header, (int)numPackStreams); archive.packCrcs = new long[(int)numPackStreams]; for (int i = 0; i < (int)numPackStreams; i++) { if (archive.packCrcsDefined.get(i)) { - archive.packCrcs[i] = 0xffffFFFFL & Integer.reverseBytes(header.readInt()); + archive.packCrcs[i] = 0xffffFFFFL & header.getInt(); } } - - nid = header.readUnsignedByte(); + + nid = getUnsignedByte(header); } - + if (nid != NID.kEnd) { throw new IOException("Badly terminated PackInfo (" + nid + ")"); } } - - private void readUnpackInfo(final DataInput header, final Archive archive) throws IOException { - int nid = header.readUnsignedByte(); + + private void readUnpackInfo(final ByteBuffer header, final Archive archive) throws IOException { + int nid = getUnsignedByte(header); if (nid != NID.kFolder) { throw new IOException("Expected kFolder, got " + nid); } final long numFolders = readUint64(header); final Folder[] folders = new Folder[(int)numFolders]; archive.folders = folders; - final int external = header.readUnsignedByte(); + final int external = getUnsignedByte(header); if (external != 0) { throw new IOException("External unsupported"); } for (int i = 0; i < (int)numFolders; i++) { folders[i] = readFolder(header); } - - nid = header.readUnsignedByte(); + + nid = getUnsignedByte(header); if (nid != NID.kCodersUnpackSize) { throw new IOException("Expected kCodersUnpackSize, got " + nid); } @@ -397,34 +453,34 @@ folder.unpackSizes[i] = readUint64(header); } } - - nid = header.readUnsignedByte(); + + nid = getUnsignedByte(header); if (nid == NID.kCRC) { final BitSet crcsDefined = readAllOrBits(header, (int)numFolders); for (int i = 0; i < (int)numFolders; i++) { if (crcsDefined.get(i)) { folders[i].hasCrc = true; - folders[i].crc = 0xffffFFFFL & Integer.reverseBytes(header.readInt()); + folders[i].crc = 0xffffFFFFL & header.getInt(); } else { folders[i].hasCrc = false; } } - - nid = header.readUnsignedByte(); + + nid = getUnsignedByte(header); } - + if (nid != NID.kEnd) { throw new IOException("Badly terminated UnpackInfo"); } } - - private void readSubStreamsInfo(final DataInput header, final Archive archive) throws IOException { + + private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException { for (final Folder folder : archive.folders) { folder.numUnpackSubStreams = 1; } int totalUnpackStreams = archive.folders.length; - - int nid = header.readUnsignedByte(); + + int nid = getUnsignedByte(header); if (nid == NID.kNumUnpackStream) { totalUnpackStreams = 0; for (final Folder folder : archive.folders) { @@ -432,14 +488,14 @@ folder.numUnpackSubStreams = (int)numStreams; totalUnpackStreams += numStreams; } - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + final SubStreamsInfo subStreamsInfo = new SubStreamsInfo(); subStreamsInfo.unpackSizes = new long[totalUnpackStreams]; subStreamsInfo.hasCrc = new BitSet(totalUnpackStreams); subStreamsInfo.crcs = new long[totalUnpackStreams]; - + int nextUnpackStream = 0; for (final Folder folder : archive.folders) { if (folder.numUnpackSubStreams == 0) { @@ -456,22 +512,22 @@ subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - sum; } if (nid == NID.kSize) { - nid = header.readUnsignedByte(); + nid = getUnsignedByte(header); } - + int numDigests = 0; for (final Folder folder : archive.folders) { if (folder.numUnpackSubStreams != 1 || !folder.hasCrc) { numDigests += folder.numUnpackSubStreams; } } - + if (nid == NID.kCRC) { final BitSet hasMissingCrc = readAllOrBits(header, numDigests); final long[] missingCrcs = new long[numDigests]; for (int i = 0; i < numDigests; i++) { if (hasMissingCrc.get(i)) { - missingCrcs[i] = 0xffffFFFFL & Integer.reverseBytes(header.readInt()); + missingCrcs[i] = 0xffffFFFFL & header.getInt(); } } int nextCrc = 0; @@ -490,34 +546,34 @@ } } } - - nid = header.readUnsignedByte(); + + nid = getUnsignedByte(header); } - + if (nid != NID.kEnd) { throw new IOException("Badly terminated SubStreamsInfo"); } - + archive.subStreamsInfo = subStreamsInfo; } - - private Folder readFolder(final DataInput header) throws IOException { + + private Folder readFolder(final ByteBuffer header) throws IOException { final Folder folder = new Folder(); - + final long numCoders = readUint64(header); final Coder[] coders = new Coder[(int)numCoders]; long totalInStreams = 0; long totalOutStreams = 0; for (int i = 0; i < coders.length; i++) { coders[i] = new Coder(); - final int bits = header.readUnsignedByte(); + final int bits = getUnsignedByte(header); final int idSize = bits & 0xf; final boolean isSimple = (bits & 0x10) == 0; final boolean hasAttributes = (bits & 0x20) != 0; final boolean moreAlternativeMethods = (bits & 0x80) != 0; - + coders[i].decompressionMethodId = new byte[idSize]; - header.readFully(coders[i].decompressionMethodId); + header.get(coders[i].decompressionMethodId); if (isSimple) { coders[i].numInStreams = 1; coders[i].numOutStreams = 1; @@ -530,7 +586,7 @@ if (hasAttributes) { final long propertiesSize = readUint64(header); coders[i].properties = new byte[(int)propertiesSize]; - header.readFully(coders[i].properties); + header.get(coders[i].properties); } // would need to keep looping as above: while (moreAlternativeMethods) { @@ -541,7 +597,7 @@ folder.coders = coders; folder.totalInputStreams = totalInStreams; folder.totalOutputStreams = totalOutStreams; - + if (totalOutStreams == 0) { throw new IOException("Total output streams can't be 0"); } @@ -553,7 +609,7 @@ bindPairs[i].outIndex = readUint64(header); } folder.bindPairs = bindPairs; - + if (totalInStreams < numBindPairs) { throw new IOException("Total input streams can't be less than the number of bind pairs"); } @@ -576,12 +632,12 @@ } } folder.packedStreams = packedStreams; - + return folder; } - - private BitSet readAllOrBits(final DataInput header, final int size) throws IOException { - final int areAllDefined = header.readUnsignedByte(); + + private BitSet readAllOrBits(final ByteBuffer header, final int size) throws IOException { + final int areAllDefined = getUnsignedByte(header); final BitSet bits; if (areAllDefined != 0) { bits = new BitSet(size); @@ -593,23 +649,23 @@ } return bits; } - - private BitSet readBits(final DataInput header, final int size) throws IOException { + + private BitSet readBits(final ByteBuffer header, final int size) throws IOException { final BitSet bits = new BitSet(size); int mask = 0; int cache = 0; for (int i = 0; i < size; i++) { if (mask == 0) { mask = 0x80; - cache = header.readUnsignedByte(); + cache = getUnsignedByte(header); } bits.set(i, (cache & mask) != 0); mask >>>= 1; } return bits; } - - private void readFilesInfo(final DataInput header, final Archive archive) throws IOException { + + private void readFilesInfo(final ByteBuffer header, final Archive archive) throws IOException { final long numFiles = readUint64(header); final SevenZArchiveEntry[] files = new SevenZArchiveEntry[(int)numFiles]; for (int i = 0; i < files.length; i++) { @@ -619,7 +675,7 @@ BitSet isEmptyFile = null; BitSet isAnti = null; while (true) { - final int propertyType = header.readUnsignedByte(); + final int propertyType = getUnsignedByte(header); if (propertyType == 0) { break; } @@ -644,7 +700,7 @@ break; } case NID.kName: { - final int external = header.readUnsignedByte(); + final int external = getUnsignedByte(header); if (external != 0) { throw new IOException("Not implemented"); } @@ -652,7 +708,7 @@ throw new IOException("File names length invalid"); } final byte[] names = new byte[(int)(size - 1)]; - header.readFully(names); + header.get(names); int nextFile = 0; int nextName = 0; for (int i = 0; i < names.length; i += 2) { @@ -668,56 +724,56 @@ } case NID.kCTime: { final BitSet timesDefined = readAllOrBits(header, files.length); - final int external = header.readUnsignedByte(); + final int external = getUnsignedByte(header); if (external != 0) { throw new IOException("Unimplemented"); } for (int i = 0; i < files.length; i++) { files[i].setHasCreationDate(timesDefined.get(i)); if (files[i].getHasCreationDate()) { - files[i].setCreationDate(Long.reverseBytes(header.readLong())); + files[i].setCreationDate(header.getLong()); } } break; } case NID.kATime: { final BitSet timesDefined = readAllOrBits(header, files.length); - final int external = header.readUnsignedByte(); + final int external = getUnsignedByte(header); if (external != 0) { throw new IOException("Unimplemented"); } for (int i = 0; i < files.length; i++) { files[i].setHasAccessDate(timesDefined.get(i)); if (files[i].getHasAccessDate()) { - files[i].setAccessDate(Long.reverseBytes(header.readLong())); + files[i].setAccessDate(header.getLong()); } } break; } case NID.kMTime: { final BitSet timesDefined = readAllOrBits(header, files.length); - final int external = header.readUnsignedByte(); + final int external = getUnsignedByte(header); if (external != 0) { throw new IOException("Unimplemented"); } for (int i = 0; i < files.length; i++) { files[i].setHasLastModifiedDate(timesDefined.get(i)); if (files[i].getHasLastModifiedDate()) { - files[i].setLastModifiedDate(Long.reverseBytes(header.readLong())); + files[i].setLastModifiedDate(header.getLong()); } } break; } case NID.kWinAttributes: { final BitSet attributesDefined = readAllOrBits(header, files.length); - final int external = header.readUnsignedByte(); + final int external = getUnsignedByte(header); if (external != 0) { throw new IOException("Unimplemented"); } for (int i = 0; i < files.length; i++) { files[i].setHasWindowsAttributes(attributesDefined.get(i)); if (files[i].getHasWindowsAttributes()) { - files[i].setWindowsAttributes(Integer.reverseBytes(header.readInt())); + files[i].setWindowsAttributes(header.getInt()); } } break; @@ -728,7 +784,7 @@ case NID.kDummy: { // 7z 9.20 asserts the content is all zeros and ignores the property // Compress up to 1.8.1 would throw an exception, now we ignore it (see COMPRESS-287 - + if (skipBytesFully(header, size) < size) { throw new IOException("Incomplete kDummy property"); } @@ -766,10 +822,10 @@ archive.files = files; calculateStreamMap(archive); } - + private void calculateStreamMap(final Archive archive) throws IOException { final StreamMap streamMap = new StreamMap(); - + int nextFolderPackStreamIndex = 0; final int numFolders = archive.folders != null ? archive.folders.length : 0; streamMap.folderFirstPackStreamIndex = new int[numFolders]; @@ -777,7 +833,7 @@ streamMap.folderFirstPackStreamIndex[i] = nextFolderPackStreamIndex; nextFolderPackStreamIndex += archive.folders[i].packedStreams.length; } - + long nextPackStreamOffset = 0; final int numPackSizes = archive.packSizes != null ? archive.packSizes.length : 0; streamMap.packStreamOffsets = new long[numPackSizes]; @@ -785,7 +841,7 @@ streamMap.packStreamOffsets[i] = nextPackStreamOffset; nextPackStreamOffset += archive.packSizes[i]; } - + streamMap.folderFirstFileIndex = new int[numFolders]; streamMap.fileFolderIndex = new int[archive.files.length]; int nextFolderIndex = 0; @@ -816,10 +872,10 @@ nextFolderUnpackStreamIndex = 0; } } - + archive.streamMap = streamMap; } - + private void buildDecodingStream() throws IOException { final int folderIndex = archive.streamMap.fileFolderIndex[currentEntryIndex]; if (folderIndex < 0) { @@ -845,7 +901,7 @@ currentFolderInputStream.close(); currentFolderInputStream = null; } - + final Folder folder = archive.folders[folderIndex]; final int firstPackStreamIndex = archive.streamMap.folderFirstPackStreamIndex[folderIndex]; final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos + @@ -857,18 +913,18 @@ if (file.getHasCrc()) { fileStream = new CRC32VerifyingInputStream(fileStream, file.getSize(), file.getCrcValue()); } - + deferredBlockStreams.add(fileStream); } private InputStream buildDecoderStack(final Folder folder, final long folderOffset, final int firstPackStreamIndex, final SevenZArchiveEntry entry) throws IOException { - file.seek(folderOffset); + channel.position(folderOffset); InputStream inputStreamStack = new BufferedInputStream( - new BoundedRandomAccessFileInputStream(file, + new BoundedSeekableByteChannelInputStream(channel, archive.packSizes[firstPackStreamIndex])); - final LinkedList methods = new LinkedList(); + final LinkedList methods = new LinkedList<>(); for (final Coder coder : folder.getOrderedCoders()) { if (coder.numInStreams != 1 || coder.numOutStreams != 1) { throw new IOException("Multi input/output stream coders are not yet supported"); @@ -886,7 +942,7 @@ } return inputStreamStack; } - + /** * Reads a byte of data. * @@ -897,7 +953,7 @@ public int read() throws IOException { return getCurrentStream().read(); } - + private InputStream getCurrentStream() throws IOException { if (archive.files[currentEntryIndex].getSize() == 0) { return new ByteArrayInputStream(new byte[0]); @@ -905,14 +961,14 @@ if (deferredBlockStreams.isEmpty()) { throw new IllegalStateException("No current 7z entry (call getNextEntry() first)."); } - + while (deferredBlockStreams.size() > 1) { // In solid compression mode we need to decompress all leading folder' // streams to get access to an entry. We defer this until really needed // so that entire blocks can be skipped without wasting time for decompression. - final InputStream stream = deferredBlockStreams.remove(0); - IOUtils.skip(stream, Long.MAX_VALUE); - stream.close(); + try (final InputStream stream = deferredBlockStreams.remove(0)) { + IOUtils.skip(stream, Long.MAX_VALUE); + } } return deferredBlockStreams.get(0); @@ -929,7 +985,7 @@ public int read(final byte[] b) throws IOException { return read(b, 0, b.length); } - + /** * Reads data into an array of bytes. * @@ -943,23 +999,27 @@ public int read(final byte[] b, final int off, final int len) throws IOException { return getCurrentStream().read(b, off, len); } - - private static long readUint64(final DataInput in) throws IOException { + + private static long readUint64(final ByteBuffer in) throws IOException { // long rather than int as it might get shifted beyond the range of an int - final long firstByte = in.readUnsignedByte(); + final long firstByte = getUnsignedByte(in); int mask = 0x80; long value = 0; for (int i = 0; i < 8; i++) { if ((firstByte & mask) == 0) { return value | ((firstByte & (mask - 1)) << (8 * i)); } - final long nextByte = in.readUnsignedByte(); + final long nextByte = getUnsignedByte(in); value |= nextByte << (8 * i); mask >>>= 1; } return value; } + private static int getUnsignedByte(ByteBuffer buf) { + return buf.get() & 0xff; + } + /** * Checks if the signature matches what is expected for a 7z file. * @@ -983,30 +1043,25 @@ return true; } - private static long skipBytesFully(final DataInput input, long bytesToSkip) throws IOException { + private static long skipBytesFully(final ByteBuffer input, long bytesToSkip) throws IOException { if (bytesToSkip < 1) { return 0; } - long skipped = 0; - while (bytesToSkip > Integer.MAX_VALUE) { - final long skippedNow = skipBytesFully(input, Integer.MAX_VALUE); - if (skippedNow == 0) { - return skipped; - } - skipped += skippedNow; - bytesToSkip -= skippedNow; - } - while (bytesToSkip > 0) { - final int skippedNow = input.skipBytes((int) bytesToSkip); - if (skippedNow == 0) { - return skipped; - } - skipped += skippedNow; - bytesToSkip -= skippedNow; + int current = input.position(); + int maxSkip = input.remaining(); + if (maxSkip < bytesToSkip) { + bytesToSkip = maxSkip; } - return skipped; + input.position(current + (int) bytesToSkip); + return bytesToSkip; } - + + private void readFully(ByteBuffer buf) throws IOException { + buf.rewind(); + IOUtils.readFully(channel, buf); + buf.flip(); + } + @Override public String toString() { return archive.toString(); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFile.java 2016-12-25 11:57:03.000000000 +0000 @@ -24,11 +24,16 @@ import java.io.File; import java.io.IOException; import java.io.OutputStream; -import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.SeekableByteChannel; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; import java.util.Date; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.LinkedList; @@ -43,8 +48,8 @@ * @since 1.6 */ public class SevenZOutputFile implements Closeable { - private final RandomAccessFile file; - private final List files = new ArrayList(); + private final SeekableByteChannel channel; + private final List files = new ArrayList<>(); private int numNonEmptyStreams = 0; private final CRC32 crc32 = new CRC32(); private final CRC32 compressedCrc32 = new CRC32(); @@ -54,19 +59,36 @@ private CountingOutputStream[] additionalCountingStreams; private Iterable contentMethods = Collections.singletonList(new SevenZMethodConfiguration(SevenZMethod.LZMA2)); - private final Map additionalSizes = new HashMap(); - + private final Map additionalSizes = new HashMap<>(); + /** * Opens file to write a 7z archive to. * - * @param filename name of the file to write to + * @param filename the file to write to * @throws IOException if opening the file fails */ public SevenZOutputFile(final File filename) throws IOException { - file = new RandomAccessFile(filename, "rw"); - file.seek(SevenZFile.SIGNATURE_HEADER_SIZE); + this(Files.newByteChannel(filename.toPath(), + EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, + StandardOpenOption.TRUNCATE_EXISTING))); } - + + /** + * Prepares channel to write a 7z archive to. + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to write to an in-memory archive.

+ * + * @param channel the channel to write to + * @throws IOException if the channel cannot be positioned properly + * @since 1.13 + */ + public SevenZOutputFile(final SeekableByteChannel channel) throws IOException { + this.channel = channel; + channel.position(SevenZFile.SIGNATURE_HEADER_SIZE); + } + /** * Sets the default compression method to use for entry contents - the * default is LZMA2. @@ -111,9 +133,9 @@ if (!finished) { finish(); } - file.close(); + channel.close(); } - + /** * Create an archive entry using the inputFile and entryName provided. * @@ -145,7 +167,7 @@ final SevenZArchiveEntry entry = (SevenZArchiveEntry) archiveEntry; files.add(entry); } - + /** * Closes the archive entry. * @throws IOException on error @@ -157,10 +179,10 @@ } final SevenZArchiveEntry entry = files.get(files.size() - 1); - if (fileBytesWritten > 0) { + if (fileBytesWritten > 0) { // this implies currentOutputStream != null entry.setHasStream(true); ++numNonEmptyStreams; - entry.setSize(currentOutputStream.getBytesWritten()); + entry.setSize(currentOutputStream.getBytesWritten()); //NOSONAR entry.setCompressedSize(fileBytesWritten); entry.setCrcValue(crc32.getValue()); entry.setCompressedCrcValue(compressedCrc32.getValue()); @@ -193,7 +215,7 @@ public void write(final int b) throws IOException { getCurrentOutputStream().write(b); } - + /** * Writes a byte array to the current archive entry. * @param b The byte array to be written. @@ -202,7 +224,7 @@ public void write(final byte[] b) throws IOException { write(b, 0, b.length); } - + /** * Writes part of a byte array to the current archive entry. * @param b The byte array to be written. @@ -215,7 +237,7 @@ getCurrentOutputStream().write(b, off, len); } } - + /** * Finishes the addition of entries to this archive, without closing it. * @@ -226,42 +248,47 @@ throw new IOException("This archive has already been finished"); } finished = true; - - final long headerPosition = file.getFilePointer(); - + + final long headerPosition = channel.position(); + final ByteArrayOutputStream headerBaos = new ByteArrayOutputStream(); final DataOutputStream header = new DataOutputStream(headerBaos); - + writeHeader(header); header.flush(); final byte[] headerBytes = headerBaos.toByteArray(); - file.write(headerBytes); - + channel.write(ByteBuffer.wrap(headerBytes)); + final CRC32 crc32 = new CRC32(); - + crc32.update(headerBytes); + + ByteBuffer bb = ByteBuffer.allocate(SevenZFile.sevenZSignature.length + + 2 /* version */ + + 4 /* start header CRC */ + + 8 /* next header position */ + + 8 /* next header length */ + + 4 /* next header CRC */) + .order(ByteOrder.LITTLE_ENDIAN); // signature header - file.seek(0); - file.write(SevenZFile.sevenZSignature); + channel.position(0); + bb.put(SevenZFile.sevenZSignature); // version - file.write(0); - file.write(2); - + bb.put((byte) 0).put((byte) 2); + + // placeholder for start header CRC + bb.putInt(0); + // start header - final ByteArrayOutputStream startHeaderBaos = new ByteArrayOutputStream(); - final DataOutputStream startHeaderStream = new DataOutputStream(startHeaderBaos); - startHeaderStream.writeLong(Long.reverseBytes(headerPosition - SevenZFile.SIGNATURE_HEADER_SIZE)); - startHeaderStream.writeLong(Long.reverseBytes(0xffffFFFFL & headerBytes.length)); + bb.putLong(headerPosition - SevenZFile.SIGNATURE_HEADER_SIZE) + .putLong(0xffffFFFFL & headerBytes.length) + .putInt((int) crc32.getValue()); crc32.reset(); - crc32.update(headerBytes); - startHeaderStream.writeInt(Integer.reverseBytes((int)crc32.getValue())); - startHeaderStream.flush(); - final byte[] startHeaderBytes = startHeaderBaos.toByteArray(); - crc32.reset(); - crc32.update(startHeaderBytes); - file.writeInt(Integer.reverseBytes((int) crc32.getValue())); - file.write(startHeaderBytes); + crc32.update(bb.array(), SevenZFile.sevenZSignature.length + 6, 20); + bb.putInt(SevenZFile.sevenZSignature.length + 2, (int) crc32.getValue()); + bb.flip(); + channel.write(bb); } - + /* * Creation of output stream is deferred until data is actually * written as some codecs might write header information even for @@ -280,7 +307,7 @@ } OutputStream out = new OutputStreamWrapper(); - final ArrayList moreStreams = new ArrayList(); + final ArrayList moreStreams = new ArrayList<>(); boolean first = true; for (final SevenZMethodConfiguration m : getContentMethods(files.get(files.size() - 1))) { if (!first) { @@ -300,13 +327,13 @@ super.write(b); crc32.update(b); } - + @Override public void write(final byte[] b) throws IOException { super.write(b); crc32.update(b); } - + @Override public void write(final byte[] b, final int off, final int len) throws IOException { @@ -323,37 +350,37 @@ private void writeHeader(final DataOutput header) throws IOException { header.write(NID.kHeader); - + header.write(NID.kMainStreamsInfo); writeStreamsInfo(header); writeFilesInfo(header); header.write(NID.kEnd); } - + private void writeStreamsInfo(final DataOutput header) throws IOException { if (numNonEmptyStreams > 0) { writePackInfo(header); writeUnpackInfo(header); } - + writeSubStreamsInfo(header); - + header.write(NID.kEnd); } - + private void writePackInfo(final DataOutput header) throws IOException { header.write(NID.kPackInfo); - + writeUint64(header, 0); writeUint64(header, 0xffffFFFFL & numNonEmptyStreams); - + header.write(NID.kSize); for (final SevenZArchiveEntry entry : files) { if (entry.hasStream()) { writeUint64(header, entry.getCompressedSize()); } } - + header.write(NID.kCRC); header.write(1); // "allAreDefined" == true for (final SevenZArchiveEntry entry : files) { @@ -361,13 +388,13 @@ header.writeInt(Integer.reverseBytes((int) entry.getCompressedCrcValue())); } } - + header.write(NID.kEnd); } - + private void writeUnpackInfo(final DataOutput header) throws IOException { header.write(NID.kUnpackInfo); - + header.write(NID.kFolder); writeUint64(header, numNonEmptyStreams); header.write(0); @@ -389,7 +416,7 @@ writeUint64(header, entry.getSize()); } } - + header.write(NID.kCRC); header.write(1); // "allAreDefined" == true for (final SevenZArchiveEntry entry : files) { @@ -397,10 +424,10 @@ header.writeInt(Integer.reverseBytes((int) entry.getCrcValue())); } } - + header.write(NID.kEnd); } - + private void writeFolder(final DataOutput header, final SevenZArchiveEntry entry) throws IOException { final ByteArrayOutputStream bos = new ByteArrayOutputStream(); int numCoders = 0; @@ -411,7 +438,7 @@ writeUint64(header, numCoders); header.write(bos.toByteArray()); - for (int i = 0; i < numCoders - 1; i++) { + for (long i = 0; i < numCoders - 1; i++) { writeUint64(header, i + 1); writeUint64(header, i); } @@ -434,10 +461,10 @@ bos.write(properties); } } - + private void writeSubStreamsInfo(final DataOutput header) throws IOException { header.write(NID.kSubStreamsInfo); -// +// // header.write(NID.kCRC); // header.write(1); // for (final SevenZArchiveEntry entry : files) { @@ -445,13 +472,13 @@ // header.writeInt(Integer.reverseBytes(entry.getCrc())); // } // } -// +// header.write(NID.kEnd); } - + private void writeFilesInfo(final DataOutput header) throws IOException { header.write(NID.kFilesInfo); - + writeUint64(header, files.size()); writeFileEmptyStreams(header); @@ -464,7 +491,7 @@ writeFileWindowsAttributes(header); header.write(NID.kEnd); } - + private void writeFileEmptyStreams(final DataOutput header) throws IOException { boolean hasEmptyStreams = false; for (final SevenZArchiveEntry entry : files) { @@ -488,7 +515,7 @@ header.write(contents); } } - + private void writeFileEmptyFiles(final DataOutput header) throws IOException { boolean hasEmptyFiles = false; int emptyStreamCounter = 0; @@ -511,7 +538,7 @@ header.write(contents); } } - + private void writeFileAntiItems(final DataOutput header) throws IOException { boolean hasAntiItems = false; final BitSet antiItems = new BitSet(0); @@ -534,10 +561,10 @@ header.write(contents); } } - + private void writeFileNames(final DataOutput header) throws IOException { header.write(NID.kName); - + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final DataOutputStream out = new DataOutputStream(baos); out.write(0); @@ -730,7 +757,7 @@ } private static Iterable reverse(final Iterable i) { - final LinkedList l = new LinkedList(); + final LinkedList l = new LinkedList<>(); for (final T t : i) { l.addFirst(t); } @@ -738,29 +765,39 @@ } private class OutputStreamWrapper extends OutputStream { + private static final int BUF_SIZE = 8192; + private final ByteBuffer buffer = ByteBuffer.allocate(BUF_SIZE); @Override public void write(final int b) throws IOException { - file.write(b); + buffer.clear(); + buffer.put((byte) b).flip(); + channel.write(buffer); compressedCrc32.update(b); fileBytesWritten++; } - + @Override public void write(final byte[] b) throws IOException { OutputStreamWrapper.this.write(b, 0, b.length); } - + @Override public void write(final byte[] b, final int off, final int len) throws IOException { - file.write(b, off, len); + if (len > BUF_SIZE) { + channel.write(ByteBuffer.wrap(b, off, len)); + } else { + buffer.clear(); + buffer.put(b, off, len).flip(); + channel.write(buffer); + } compressedCrc32.update(b, off, len); fileBytesWritten += len; } @Override public void flush() throws IOException { - // no reason to flush a RandomAccessFile + // no reason to flush the channel } @Override diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java 2016-12-25 11:57:03.000000000 +0000 @@ -376,7 +376,7 @@ * @return True if the entries are equal. */ public boolean equals(final TarArchiveEntry it) { - return getName().equals(it.getName()); + return it != null && getName().equals(it.getName()); } /** @@ -651,6 +651,9 @@ /** * Get this entry's file. * + *

This method is only useful for entries created from a {@code + * File} but not for entries read from an archive.

+ * * @return This entry's file. */ public File getFile() { @@ -943,6 +946,9 @@ * If this entry represents a file, and the file is a directory, return * an array of TarEntries for this entry's children. * + *

This method is only useful for entries created from a {@code + * File} but not for entries read from an archive.

+ * * @return An array of TarEntry's for this entry's children. */ public TarArchiveEntry[] getDirectoryEntries() { @@ -978,7 +984,7 @@ writeEntryHeader(outbuf, TarUtils.FALLBACK_ENCODING, false); } catch (final IOException ex2) { // impossible - throw new RuntimeException(ex2); + throw new RuntimeException(ex2); //NOSONAR } } } @@ -1065,7 +1071,7 @@ parseTarHeader(header, TarUtils.DEFAULT_ENCODING, true); } catch (final IOException ex2) { // not really possible - throw new RuntimeException(ex2); + throw new RuntimeException(ex2); //NOSONAR } } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -49,7 +49,7 @@ private static final int SMALL_BUFFER_SIZE = 256; - private final byte[] SMALL_BUF = new byte[SMALL_BUFFER_SIZE]; + private final byte[] smallBuf = new byte[SMALL_BUFFER_SIZE]; /** The size the TAR header */ private final int recordSize; @@ -79,7 +79,7 @@ final String encoding; // the global PAX header - private Map globalPaxHeaders = new HashMap(); + private Map globalPaxHeaders = new HashMap<>(); /** * Constructor for TarInputStream. @@ -357,8 +357,8 @@ // read in the name final ByteArrayOutputStream longName = new ByteArrayOutputStream(); int length = 0; - while ((length = read(SMALL_BUF)) >= 0) { - longName.write(SMALL_BUF, 0, length); + while ((length = read(smallBuf)) >= 0) { + longName.write(smallBuf, 0, length); } getNextEntry(); if (currEntry == null) { @@ -451,7 +451,7 @@ // https://www.gnu.org/software/tar/manual/html_section/tar_92.html#SEC188 Map parsePaxHeaders(final InputStream i) throws IOException { - final Map headers = new HashMap(globalPaxHeaders); + final Map headers = new HashMap<>(globalPaxHeaders); // Format is "length keyword=value\n"; while(true){ // get length int ch; diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -275,7 +275,7 @@ throw new IOException("Stream has already been finished"); } final TarArchiveEntry entry = (TarArchiveEntry) archiveEntry; - final Map paxHeaders = new HashMap(); + final Map paxHeaders = new HashMap<>(); final String entryName = entry.getName(); final boolean paxHeaderContainsPath = handleLongName(entry, entryName, paxHeaders, "path", TarConstants.LF_GNUTYPE_LONGNAME, "file name"); @@ -633,7 +633,7 @@ private void failForBigNumber(final String field, final long value, final long maxValue, final String additionalMsg) { if (value < 0 || value > maxValue) { - throw new RuntimeException(field + " '" + value + throw new RuntimeException(field + " '" + value //NOSONAR + "' is too big ( > " + maxValue + " )." + additionalMsg); } @@ -677,14 +677,14 @@ // of which are the link's name final TarArchiveEntry longLinkEntry = new TarArchiveEntry(TarConstants.GNU_LONGLINK, linkType); - longLinkEntry.setSize(len + 1); // +1 for NUL + longLinkEntry.setSize(len + 1l); // +1 for NUL transferModTime(entry, longLinkEntry); putArchiveEntry(longLinkEntry); write(encodedName.array(), encodedName.arrayOffset(), len); write(0); // NUL terminator closeArchiveEntry(); } else if (longFileMode != LONGFILE_TRUNCATE) { - throw new RuntimeException(fieldName + " '" + name + throw new RuntimeException(fieldName + " '" + name //NOSONAR + "' is too long ( > " + TarConstants.NAMELEN + " bytes)"); } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java 2016-12-25 11:57:03.000000000 +0000 @@ -193,7 +193,7 @@ if (negative) { // 2's complement val--; - val ^= (long) Math.pow(2, (length - 1) * 8) - 1; + val ^= (long) Math.pow(2.0, (length - 1) * 8.0) - 1; } return negative ? -val : val; } @@ -267,7 +267,7 @@ return parseName(buffer, offset, length, FALLBACK_ENCODING); } catch (final IOException ex2) { // impossible - throw new RuntimeException(ex2); + throw new RuntimeException(ex2); //NOSONAR } } } @@ -328,7 +328,7 @@ FALLBACK_ENCODING); } catch (final IOException ex2) { // impossible - throw new RuntimeException(ex2); + throw new RuntimeException(ex2); //NOSONAR } } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.java 2016-12-25 11:57:03.000000000 +0000 @@ -56,7 +56,7 @@ try { unicodeName = text.getBytes(CharsetNames.UTF_8); } catch (final UnsupportedEncodingException e) { - throw new RuntimeException("FATAL: UTF-8 encoding not supported.", e); + throw new RuntimeException("FATAL: UTF-8 encoding not supported.", e); //NOSONAR } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/AsiExtraField.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/AsiExtraField.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/AsiExtraField.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/AsiExtraField.java 2016-12-25 11:57:03.000000000 +0000 @@ -331,7 +331,7 @@ return cloned; } catch (final CloneNotSupportedException cnfe) { // impossible - throw new RuntimeException(cnfe); + throw new RuntimeException(cnfe); //NOSONAR } } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java 2016-12-25 11:57:03.000000000 +0000 @@ -38,7 +38,7 @@ private static final Map> implementations; static { - implementations = new ConcurrentHashMap>(); + implementations = new ConcurrentHashMap<>(); register(AsiExtraField.class); register(X5455_ExtendedTimestamp.class); register(X7875_NewUnix.class); @@ -66,11 +66,11 @@ final ZipExtraField ze = (ZipExtraField) c.newInstance(); implementations.put(ze.getHeaderId(), c); } catch (final ClassCastException cc) { - throw new RuntimeException(c + " doesn\'t implement ZipExtraField"); + throw new RuntimeException(c + " doesn\'t implement ZipExtraField"); //NOSONAR } catch (final InstantiationException ie) { - throw new RuntimeException(c + " is not a concrete class"); + throw new RuntimeException(c + " is not a concrete class"); //NOSONAR } catch (final IllegalAccessException ie) { - throw new RuntimeException(c + "\'s no-arg constructor is not public"); + throw new RuntimeException(c + "\'s no-arg constructor is not public"); //NOSONAR } } @@ -135,7 +135,7 @@ public static ZipExtraField[] parse(final byte[] data, final boolean local, final UnparseableExtraField onUnparseableData) throws ZipException { - final List v = new ArrayList(); + final List v = new ArrayList<>(); int start = 0; LOOP: while (start <= data.length - WORD) { @@ -181,10 +181,8 @@ length); } v.add(ze); - } catch (final InstantiationException ie) { + } catch (final InstantiationException | IllegalAccessException ie) { throw (ZipException) new ZipException(ie.getMessage()).initCause(ie); - } catch (final IllegalAccessException iae) { - throw (ZipException) new ZipException(iae.getMessage()).initCause(iae); } start += length + WORD; } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java 2016-12-25 11:57:03.000000000 +0000 @@ -239,7 +239,7 @@ return super.clone(); } catch (final CloneNotSupportedException ex) { // impossible - throw new RuntimeException("GeneralPurposeBit is not Cloneable?", ex); + throw new RuntimeException("GeneralPurposeBit is not Cloneable?", ex); //NOSONAR } } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java 2016-12-25 11:57:03.000000000 +0000 @@ -55,7 +55,7 @@ private final List streams = synchronizedList(new ArrayList()); private final ExecutorService es; private final ScatterGatherBackingStoreSupplier backingStoreSupplier; - private final List> futures = new ArrayList>(); + private final List> futures = new ArrayList<>(); private final long startedAt = System.currentTimeMillis(); private long compressionDoneAt = 0; @@ -74,7 +74,8 @@ private ScatterZipOutputStream createDeferred(final ScatterGatherBackingStoreSupplier scatterGatherBackingStoreSupplier) throws IOException { final ScatterGatherBackingStore bs = scatterGatherBackingStoreSupplier.get(); - final StreamCompressor sc = StreamCompressor.create(Deflater.DEFAULT_COMPRESSION, bs); + // lifecycle is bound to the ScatterZipOutputStream returned + final StreamCompressor sc = StreamCompressor.create(Deflater.DEFAULT_COMPRESSION, bs); //NOSONAR return new ScatterZipOutputStream(bs, sc); } @@ -86,7 +87,7 @@ streams.add(scatterStream); return scatterStream; } catch (final IOException e) { - throw new RuntimeException(e); + throw new RuntimeException(e); //NOSONAR } } }; @@ -137,6 +138,19 @@ } /** + * Adds an archive entry to this archive. + *

+ * This method is expected to be called from a single client thread + *

+ * + * @param zipArchiveEntryRequestSupplier Should supply the entry to be added. + * @since 1.13 + */ + public void addArchiveEntry(final ZipArchiveEntryRequestSupplier zipArchiveEntryRequestSupplier) { + submit(createCallable(zipArchiveEntryRequestSupplier)); + } + + /** * Submit a callable for compression. * * @see ParallelScatterZipCreator#createCallable for details of if/when to use this. @@ -180,6 +194,31 @@ }; } + /** + * Create a callable that will compress archive entry supplied by {@link ZipArchiveEntryRequestSupplier}. + * + *

This method is expected to be called from a single client thread.

+ * + * The same as {@link #createCallable(ZipArchiveEntry, InputStreamSupplier)}, but the archive entry + * to be added is supplied by a {@link ZipArchiveEntryRequestSupplier}. + * + * @see #createCallable(ZipArchiveEntry, InputStreamSupplier) + * + * @param zipArchiveEntryRequestSupplier Should supply the entry to be added. + * @return A callable that should subsequently passed to #submit, possibly in a wrapped/adapted from. The + * value of this callable is not used, but any exceptions happening inside the compression + * will be propagated through the callable. + * @since 1.13 + */ + public final Callable createCallable(final ZipArchiveEntryRequestSupplier zipArchiveEntryRequestSupplier) { + return new Callable() { + @Override + public Object call() throws Exception { + tlScatterStreams.get().addArchiveEntry(zipArchiveEntryRequestSupplier.get()); + return null; + } + }; + } /** * Write the contents this to the target {@link ZipArchiveOutputStream}. @@ -202,7 +241,7 @@ } es.shutdown(); - es.awaitTermination(1000 * 60, TimeUnit.SECONDS); // == Infinity. We really *must* wait for this to complete + es.awaitTermination(1000 * 60l, TimeUnit.SECONDS); // == Infinity. We really *must* wait for this to complete // It is important that all threads terminate before we go on, ensure happens-before relationship compressionDoneAt = System.currentTimeMillis(); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/PKWareExtraHeader.java 2016-12-25 11:57:03.000000000 +0000 @@ -216,7 +216,7 @@ private static final Map codeToEnum; static { - final Map cte = new HashMap(); + final Map cte = new HashMap<>(); for (final EncryptionAlgorithm method : values()) { cte.put(Integer.valueOf(method.getCode()), method); } @@ -271,7 +271,7 @@ private static final Map codeToEnum; static { - final Map cte = new HashMap(); + final Map cte = new HashMap<>(); for (final HashAlgorithm method : values()) { cte.put(Integer.valueOf(method.getCode()), method); } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -46,7 +46,7 @@ * @since 1.10 */ public class ScatterZipOutputStream implements Closeable { - private final Queue items = new ConcurrentLinkedQueue(); + private final Queue items = new ConcurrentLinkedQueue<>(); private final ScatterGatherBackingStore backingStore; private final StreamCompressor streamCompressor; @@ -92,11 +92,8 @@ * @throws IOException If writing fails */ public void addArchiveEntry(final ZipArchiveEntryRequest zipArchiveEntryRequest) throws IOException { - final InputStream payloadStream = zipArchiveEntryRequest.getPayloadStream(); - try { + try (final InputStream payloadStream = zipArchiveEntryRequest.getPayloadStream()) { streamCompressor.deflate(payloadStream, zipArchiveEntryRequest.getMethod()); - } finally { - payloadStream.close(); } items.add(new CompressedEntry(zipArchiveEntryRequest, streamCompressor.getCrc32(), streamCompressor.getBytesWrittenForLastEntry(), streamCompressor.getBytesRead())); @@ -110,13 +107,14 @@ */ public void writeTo(final ZipArchiveOutputStream target) throws IOException { backingStore.closeForWriting(); - final InputStream data = backingStore.getInputStream(); - for (final CompressedEntry compressedEntry : items) { - final BoundedInputStream rawStream = new BoundedInputStream(data, compressedEntry.compressedSize); - target.addRawArchiveEntry(compressedEntry.transferToArchiveEntry(), rawStream); - rawStream.close(); + try (final InputStream data = backingStore.getInputStream()) { + for (final CompressedEntry compressedEntry : items) { + try (final BoundedInputStream rawStream = new BoundedInputStream(data, + compressedEntry.compressedSize)) { + target.addRawArchiveEntry(compressedEntry.transferToArchiveEntry(), rawStream); + } + } } - data.close(); } @@ -127,6 +125,7 @@ @Override public void close() throws IOException { backingStore.close(); + streamCompressor.close(); } /** @@ -150,7 +149,8 @@ */ public static ScatterZipOutputStream fileBased(final File file, final int compressionLevel) throws FileNotFoundException { final ScatterGatherBackingStore bs = new FileBasedScatterGatherBackingStore(file); - final StreamCompressor sc = StreamCompressor.create(compressionLevel, bs); + // lifecycle is bound to the ScatterZipOutputStream returned + final StreamCompressor sc = StreamCompressor.create(compressionLevel, bs); //NOSONAR return new ScatterZipOutputStream(bs, sc); } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding.java 2016-12-25 11:57:03.000000000 +0000 @@ -105,7 +105,7 @@ public Simple8BitZipEncoding(final char[] highChars) { this.highChars = highChars.clone(); final List temp = - new ArrayList(this.highChars.length); + new ArrayList<>(this.highChars.length); byte code = 127; diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java 2016-12-25 11:57:03.000000000 +0000 @@ -24,6 +24,8 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; import java.util.zip.CRC32; import java.util.zip.Deflater; import java.util.zip.ZipEntry; @@ -54,9 +56,9 @@ private long sourcePayloadLength = 0; private long totalWrittenToOutputStream = 0; - private static final int bufferSize = 4096; - private final byte[] outputBuffer = new byte[bufferSize]; - private final byte[] readerBuf = new byte[bufferSize]; + private static final int BUFFER_SIZE = 4096; + private final byte[] outputBuffer = new byte[BUFFER_SIZE]; + private final byte[] readerBuf = new byte[BUFFER_SIZE]; StreamCompressor(final Deflater deflater) { this.def = deflater; @@ -97,6 +99,18 @@ /** * Create a stream compressor with the given compression level. * + * @param os The SeekableByteChannel to receive output + * @param deflater The deflater to use for the compressor + * @return A stream compressor + * @since 1.13 + */ + static StreamCompressor create(final SeekableByteChannel os, final Deflater deflater) { + return new SeekableByteChannelCompressor(deflater, os); + } + + /** + * Create a stream compressor with the given compression level. + * * @param compressionLevel The {@link Deflater} compression level * @param bs The ScatterGatherBackingStore to receive output * @return A stream compressor @@ -307,4 +321,20 @@ raf.write(data, offset, length); } } + + private static final class SeekableByteChannelCompressor extends StreamCompressor { + private final SeekableByteChannel channel; + + public SeekableByteChannelCompressor(final Deflater deflater, + final SeekableByteChannel channel) { + super(deflater); + this.channel = channel; + } + + @Override + protected final void writeOut(final byte[] data, final int offset, final int length) + throws IOException { + channel.write(ByteBuffer.wrap(data, offset, length)); + } + } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -34,6 +34,12 @@ private static final int MAX_TABLE_SIZE = 1 << MAX_CODE_SIZE; private final boolean[] isUsed; + /** + * IOException is not actually thrown! + * + * @param inputStream + * @throws IOException IOException is not actually thrown! + */ public UnshrinkingInputStream(final InputStream inputStream) throws IOException { super(inputStream, ByteOrder.LITTLE_ENDIAN); setClearCode(DEFAULT_CODE_SIZE); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java 2016-12-25 11:57:03.000000000 +0000 @@ -28,8 +28,8 @@ public class UnsupportedZipFeatureException extends ZipException { private final Feature reason; - private final ZipArchiveEntry entry; - private static final long serialVersionUID = 20130101L; + private transient final ZipArchiveEntry entry; + private static final long serialVersionUID = 20161219L; /** * Creates an exception. @@ -92,7 +92,7 @@ * ZIP Features that may or may not be supported. * @since 1.1 */ - public static class Feature { + public static class Feature implements java.io.Serializable { /** * The entry is encrypted. */ diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java 2016-12-25 11:57:03.000000000 +0000 @@ -321,7 +321,7 @@ * @param fields an array of extra fields */ public void setExtraFields(final ZipExtraField[] fields) { - final List newFields = new ArrayList(); + final List newFields = new ArrayList<>(); for (final ZipExtraField field : fields) { if (field instanceof UnparseableExtraFieldData) { unparseableExtra = (UnparseableExtraFieldData) field; @@ -469,7 +469,7 @@ throw new java.util.NoSuchElementException(); } - final List newResult = new ArrayList(); + final List newResult = new ArrayList<>(); for (final ZipExtraField extraField : extraFields) { if (!type.equals(extraField.getHeaderId())){ newResult.add( extraField); @@ -540,7 +540,7 @@ mergeExtraFields(local, true); } catch (final ZipException e) { // actually this is not possible as of Commons Compress 1.1 - throw new RuntimeException("Error parsing extra fields for entry: " + throw new RuntimeException("Error parsing extra fields for entry: " //NOSONAR + getName() + " - " + e.getMessage(), e); } } @@ -566,7 +566,7 @@ ExtraFieldUtils.UnparseableExtraField.READ); mergeExtraFields(central, false); } catch (final ZipException e) { - throw new RuntimeException(e.getMessage(), e); + throw new RuntimeException(e.getMessage(), e); //NOSONAR } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequestSupplier.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequestSupplier.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequestSupplier.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequestSupplier.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +/** + * Supplies {@link ZipArchiveEntryRequest}. + * + * Implementations are required to support thread-handover. While an instance will + * not be accessed concurrently by multiple threads, it will be called by + * a different thread than it was created on. + * + * @since 1.13 + */ +public interface ZipArchiveEntryRequestSupplier { + + /** + * Supply a {@link ZipArchiveEntryRequest} to be added to a parallel archive. + * @return The {@link ZipArchiveEntryRequest} instance. Should never be null. + */ + ZipArchiveEntryRequest get(); +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -152,11 +152,11 @@ private static final long TWO_EXP_32 = ZIP64_MAGIC + 1; // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) - private final byte[] LFH_BUF = new byte[LFH_LEN]; - private final byte[] SKIP_BUF = new byte[1024]; - private final byte[] SHORT_BUF = new byte[SHORT]; - private final byte[] WORD_BUF = new byte[WORD]; - private final byte[] TWO_DWORD_BUF = new byte[2 * DWORD]; + private final byte[] lfhBuf = new byte[LFH_LEN]; + private final byte[] skipBuf = new byte[1024]; + private final byte[] shortBuf = new byte[SHORT]; + private final byte[] wordBuf = new byte[WORD]; + private final byte[] twoDwordBuf = new byte[2 * DWORD]; private int entriesRead = 0; @@ -232,31 +232,32 @@ // first local file header - look for it and fail with // the appropriate error message if this is a split // archive. - readFirstLocalFileHeader(LFH_BUF); + readFirstLocalFileHeader(lfhBuf); } else { - readFully(LFH_BUF); + readFully(lfhBuf); } } catch (final EOFException e) { return null; } - final ZipLong sig = new ZipLong(LFH_BUF); + final ZipLong sig = new ZipLong(lfhBuf); if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG)) { hitCentralDirectory = true; skipRemainderOfArchive(); + return null; } if (!sig.equals(ZipLong.LFH_SIG)) { - return null; + throw new ZipException(String.format("Unexpected record signature: 0X%X", sig.getValue())); } int off = WORD; current = new CurrentEntry(); - final int versionMadeBy = ZipShort.getValue(LFH_BUF, off); + final int versionMadeBy = ZipShort.getValue(lfhBuf, off); off += SHORT; current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK); - final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(LFH_BUF, off); + final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(lfhBuf, off); final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames(); final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; current.hasDataDescriptor = gpFlag.usesDataDescriptor(); @@ -264,32 +265,32 @@ off += SHORT; - current.entry.setMethod(ZipShort.getValue(LFH_BUF, off)); + current.entry.setMethod(ZipShort.getValue(lfhBuf, off)); off += SHORT; - final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(LFH_BUF, off)); + final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(lfhBuf, off)); current.entry.setTime(time); off += WORD; ZipLong size = null, cSize = null; if (!current.hasDataDescriptor) { - current.entry.setCrc(ZipLong.getValue(LFH_BUF, off)); + current.entry.setCrc(ZipLong.getValue(lfhBuf, off)); off += WORD; - cSize = new ZipLong(LFH_BUF, off); + cSize = new ZipLong(lfhBuf, off); off += WORD; - size = new ZipLong(LFH_BUF, off); + size = new ZipLong(lfhBuf, off); off += WORD; } else { off += 3 * WORD; } - final int fileNameLen = ZipShort.getValue(LFH_BUF, off); + final int fileNameLen = ZipShort.getValue(lfhBuf, off); off += SHORT; - final int extraLen = ZipShort.getValue(LFH_BUF, off); + final int extraLen = ZipShort.getValue(lfhBuf, off); off += SHORT; final byte[] fileName = new byte[fileNameLen]; @@ -549,7 +550,7 @@ long skipped = 0; while (skipped < value) { final long rem = value - skipped; - final int x = read(SKIP_BUF, 0, (int) (SKIP_BUF.length > rem ? rem : SKIP_BUF.length)); + final int x = read(skipBuf, 0, (int) (skipBuf.length > rem ? rem : skipBuf.length)); if (x == -1) { return skipped; } @@ -616,11 +617,11 @@ } // Ensure all entry bytes are read - if (current.bytesReadFromStream <= current.entry.getCompressedSize() - && !current.hasDataDescriptor) { + if (currentEntryHasOutstandingBytes()) { drainCurrentEntryData(); } else { - skip(Long.MAX_VALUE); + // this is guaranteed to exhaust the stream + skip(Long.MAX_VALUE); //NOSONAR final long inB = current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED ? getBytesInflated() : current.bytesRead; @@ -632,6 +633,12 @@ // Pushback any required bytes if (diff > 0) { pushback(buf.array(), buf.limit() - diff, diff); + current.bytesReadFromStream -= diff; + } + + // Drain remainder of entry if not all data bytes were required + if (currentEntryHasOutstandingBytes()) { + drainCurrentEntryData(); } } @@ -646,6 +653,18 @@ } /** + * If the compressed size of the current entry is included in the entry header + * and there are any outstanding bytes in the underlying stream, then + * this returns true. + * + * @return true, if current entry is determined to have outstanding bytes, false otherwise + */ + private boolean currentEntryHasOutstandingBytes() { + return current.bytesReadFromStream <= current.entry.getCompressedSize() + && !current.hasDataDescriptor; + } + + /** * Read all data of the current entry from the underlying stream * that hasn't been read, yet. */ @@ -709,12 +728,12 @@ } private void readDataDescriptor() throws IOException { - readFully(WORD_BUF); - ZipLong val = new ZipLong(WORD_BUF); + readFully(wordBuf); + ZipLong val = new ZipLong(wordBuf); if (ZipLong.DD_SIG.equals(val)) { // data descriptor with signature, skip sig - readFully(WORD_BUF); - val = new ZipLong(WORD_BUF); + readFully(wordBuf); + val = new ZipLong(wordBuf); } current.entry.setCrc(val.getValue()); @@ -729,15 +748,15 @@ // descriptor (ignoring archive decryption headers for now). // If so, push back eight bytes and assume sizes are four // bytes, otherwise sizes are eight bytes each. - readFully(TWO_DWORD_BUF); - final ZipLong potentialSig = new ZipLong(TWO_DWORD_BUF, DWORD); + readFully(twoDwordBuf); + final ZipLong potentialSig = new ZipLong(twoDwordBuf, DWORD); if (potentialSig.equals(ZipLong.CFH_SIG) || potentialSig.equals(ZipLong.LFH_SIG)) { - pushback(TWO_DWORD_BUF, DWORD, DWORD); - current.entry.setCompressedSize(ZipLong.getValue(TWO_DWORD_BUF)); - current.entry.setSize(ZipLong.getValue(TWO_DWORD_BUF, WORD)); + pushback(twoDwordBuf, DWORD, DWORD); + current.entry.setCompressedSize(ZipLong.getValue(twoDwordBuf)); + current.entry.setSize(ZipLong.getValue(twoDwordBuf, WORD)); } else { - current.entry.setCompressedSize(ZipEightByteInteger.getLongValue(TWO_DWORD_BUF)); - current.entry.setSize(ZipEightByteInteger.getLongValue(TWO_DWORD_BUF, DWORD)); + current.entry.setCompressedSize(ZipEightByteInteger.getLongValue(twoDwordBuf)); + current.entry.setSize(ZipEightByteInteger.getLongValue(twoDwordBuf, DWORD)); } } @@ -898,12 +917,12 @@ // skip over central directory. One LFH has been read too much // already. The calculation discounts file names and extra // data so it will be too short. - realSkip(entriesRead * CFH_LEN - LFH_LEN); + realSkip((long) entriesRead * CFH_LEN - LFH_LEN); findEocdRecord(); - realSkip(ZipFile.MIN_EOCD_SIZE - WORD /* signature */ - SHORT /* comment len */); - readFully(SHORT_BUF); + realSkip((long) ZipFile.MIN_EOCD_SIZE - WORD /* signature */ - SHORT /* comment len */); + readFully(shortBuf); // file comment - realSkip(ZipShort.getValue(SHORT_BUF)); + realSkip(ZipShort.getValue(shortBuf)); } /** @@ -955,7 +974,7 @@ long skipped = 0; while (skipped < value) { final long rem = value - skipped; - final int x = in.read(SKIP_BUF, 0, (int) (SKIP_BUF.length > rem ? rem : SKIP_BUF.length)); + final int x = in.read(skipBuf, 0, (int) (skipBuf.length > rem ? rem : skipBuf.length)); if (x == -1) { return; } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -23,9 +23,12 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.io.RandomAccessFile; import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; import java.util.Calendar; +import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -55,11 +58,11 @@ * attributes and extra fields with different layouts for local file * data and central directory entries. * - *

This class will try to use {@link java.io.RandomAccessFile - * RandomAccessFile} when you know that the output is going to go to a - * file.

+ *

This class will try to use {@link + * java.nio.channels.SeekableByteChannel} when it knows that the + * output is going to go to a file.

* - *

If RandomAccessFile cannot be used, this implementation will use + *

If SeekableByteChannel cannot be used, this implementation will use * a Data Descriptor to store size and CRC information for {@link * #DEFLATED DEFLATED} entries, this means, you don't need to * calculate them yourself. Unfortunately this is not possible for @@ -71,7 +74,7 @@ * extensions and thus individual entries and archives larger than 4 * GB or with more than 65536 entries in most cases but explicit * control is provided via {@link #setUseZip64}. If the stream can not - * user RandomAccessFile and you try to write a ZipArchiveEntry of + * use SeekableByteChannel and you try to write a ZipArchiveEntry of * unknown size then Zip64 extensions will be disabled by default.

* * @NotThreadSafe @@ -171,7 +174,7 @@ * List of ZipArchiveEntries written so far. */ private final List entries = - new LinkedList(); + new LinkedList<>(); private final StreamCompressor streamCompressor; @@ -201,7 +204,7 @@ * Holds the offsets of the LFH starts for each entry. */ private final Map offsets = - new HashMap(); + new HashMap<>(); /** * The encoding to use for filenames and the file comment. @@ -230,7 +233,7 @@ /** * Optional random access output. */ - private final RandomAccessFile raf; + private final SeekableByteChannel channel; private final OutputStream out; @@ -268,7 +271,7 @@ */ public ZipArchiveOutputStream(final OutputStream out) { this.out = out; - this.raf = null; + this.channel = null; def = new Deflater(level, true); streamCompressor = StreamCompressor.create(out, def); } @@ -280,20 +283,45 @@ * @throws IOException on error */ public ZipArchiveOutputStream(final File file) throws IOException { + def = new Deflater(level, true); OutputStream o = null; - RandomAccessFile _raf = null; + SeekableByteChannel _channel = null; + StreamCompressor _streamCompressor = null; try { - _raf = new RandomAccessFile(file, "rw"); - _raf.setLength(0); + _channel = Files.newByteChannel(file.toPath(), + EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, + StandardOpenOption.READ, + StandardOpenOption.TRUNCATE_EXISTING)); + // will never get opened properly when an exception is thrown so doesn't need to get closed + _streamCompressor = StreamCompressor.create(_channel, def); //NOSONAR } catch (final IOException e) { - IOUtils.closeQuietly(_raf); - _raf = null; + IOUtils.closeQuietly(_channel); + _channel = null; o = new FileOutputStream(file); + _streamCompressor = StreamCompressor.create(o, def); } - def = new Deflater(level, true); - streamCompressor = StreamCompressor.create(_raf, def); out = o; - raf = _raf; + channel = _channel; + streamCompressor = _streamCompressor; + } + + /** + * Creates a new ZIP OutputStream writing to a SeekableByteChannel. + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to write to an in-memory archive using random + * access.

+ * + * @param channel the channel to zip to + * @throws IOException on error + * @since 1.13 + */ + public ZipArchiveOutputStream(SeekableByteChannel channel) throws IOException { + this.channel = channel; + def = new Deflater(level, true); + streamCompressor = StreamCompressor.create(channel, def); + out = null; } /** @@ -306,7 +334,7 @@ * @return true if seekable */ public boolean isSeekable() { - return raf != null; + return channel != null; } /** @@ -490,12 +518,12 @@ /** * Writes all necessary data for this entry. * + * @param phased This entry is second phase of a 2-phase zip creation, size, compressed size and crc + * are known in ZipArchiveEntry * @throws IOException on error * @throws Zip64RequiredException if the entry's uncompressed or * compressed size exceeds 4 GByte and {@link #setUseZip64} * is {@link Zip64Mode#Never}. - * @param phased This entry is second phase of a 2-phase zip creation, size, compressed size and crc - * are known in ZipArchiveEntry */ private void closeCopiedEntry(final boolean phased) throws IOException { preClose(); @@ -506,7 +534,7 @@ } private void closeEntry(final boolean actuallyNeedsZip64, final boolean phased) throws IOException { - if (!phased && raf != null) { + if (!phased && channel != null) { rewriteSizesAndCrc(actuallyNeedsZip64); } @@ -585,7 +613,7 @@ entry.entry.setCompressedSize(bytesWritten); entry.entry.setCrc(crc); - } else if (raf == null) { + } else if (channel == null) { if (entry.entry.getCrc() != crc) { throw new ZipException("bad CRC checksum for entry " + entry.entry.getName() + ": " @@ -601,7 +629,7 @@ + " instead of " + bytesWritten); } - } else { /* method is STORED and we used RandomAccessFile */ + } else { /* method is STORED and we used SeekableByteChannel */ entry.entry.setSize(bytesWritten); entry.entry.setCompressedSize(bytesWritten); entry.entry.setCrc(crc); @@ -640,9 +668,9 @@ */ private void rewriteSizesAndCrc(final boolean actuallyNeedsZip64) throws IOException { - final long save = raf.getFilePointer(); + final long save = channel.position(); - raf.seek(entry.localDataStart); + channel.position(entry.localDataStart); writeOut(ZipLong.getBytes(entry.entry.getCrc())); if (!hasZip64Extra(entry.entry) || !actuallyNeedsZip64) { writeOut(ZipLong.getBytes(entry.entry.getCompressedSize())); @@ -656,8 +684,8 @@ final ByteBuffer name = getName(entry.entry); final int nameLen = name.limit() - name.position(); // seek to ZIP64 extra, skip header and size information - raf.seek(entry.localDataStart + 3 * WORD + 2 * SHORT - + nameLen + 2 * SHORT); + channel.position(entry.localDataStart + 3 * WORD + 2 * SHORT + + nameLen + 2 * SHORT); // inside the ZIP64 extra uncompressed size comes // first, unlike the LFH, CD or data descriptor writeOut(ZipEightByteInteger.getBytes(entry.entry.getSize())); @@ -666,7 +694,7 @@ if (!actuallyNeedsZip64) { // do some cleanup: // * rewrite version needed to extract - raf.seek(entry.localDataStart - 5 * SHORT); + channel.position(entry.localDataStart - 5 * SHORT); writeOut(ZipShort.getBytes(INITIAL_VERSION)); // * remove ZIP64 extra so it doesn't get written @@ -682,7 +710,7 @@ } } } - raf.seek(save); + channel.position(save); } /** @@ -731,7 +759,7 @@ final Zip64ExtendedInformationExtraField z64 = getZip64Extra(entry.entry); // just a placeholder, real data will be in data - // descriptor or inserted later via RandomAccessFile + // descriptor or inserted later via SeekableByteChannel ZipEightByteInteger size = ZipEightByteInteger.ZERO; ZipEightByteInteger compressedSize = ZipEightByteInteger.ZERO; if (phased){ @@ -777,8 +805,8 @@ */ private void validateSizeInformation(final Zip64Mode effectiveMode) throws ZipException { - // Size/CRC not required if RandomAccessFile is used - if (entry.entry.getMethod() == STORED && raf == null) { + // Size/CRC not required if SeekableByteChannel is used + if (entry.entry.getMethod() == STORED && channel == null) { if (entry.entry.getSize() == ArchiveEntry.SIZE_UNKNOWN) { throw new ZipException("uncompressed size is required for" + " STORED method when not writing to a" @@ -818,7 +846,7 @@ || entry.getSize() >= ZIP64_MAGIC || entry.getCompressedSize() >= ZIP64_MAGIC || (entry.getSize() == ArchiveEntry.SIZE_UNKNOWN - && raf != null && mode != Zip64Mode.Never); + && channel != null && mode != Zip64Mode.Never); } /** @@ -946,32 +974,32 @@ } /* - * Various ZIP constants + * Various ZIP constants shared between this class, ZipArchiveInputStream and ZipFile */ /** * local file header signature */ - static final byte[] LFH_SIG = ZipLong.LFH_SIG.getBytes(); + static final byte[] LFH_SIG = ZipLong.LFH_SIG.getBytes(); //NOSONAR /** * data descriptor signature */ - static final byte[] DD_SIG = ZipLong.DD_SIG.getBytes(); + static final byte[] DD_SIG = ZipLong.DD_SIG.getBytes(); //NOSONAR /** * central file header signature */ - static final byte[] CFH_SIG = ZipLong.CFH_SIG.getBytes(); + static final byte[] CFH_SIG = ZipLong.CFH_SIG.getBytes(); //NOSONAR /** * end of central dir signature */ - static final byte[] EOCD_SIG = ZipLong.getBytes(0X06054B50L); + static final byte[] EOCD_SIG = ZipLong.getBytes(0X06054B50L); //NOSONAR /** * ZIP64 end of central dir signature */ - static final byte[] ZIP64_EOCD_SIG = ZipLong.getBytes(0X06064B50L); + static final byte[] ZIP64_EOCD_SIG = ZipLong.getBytes(0X06064B50L); //NOSONAR /** * ZIP64 end of central dir locator signature */ - static final byte[] ZIP64_EOCD_LOC_SIG = ZipLong.getBytes(0X07064B50L); + static final byte[] ZIP64_EOCD_LOC_SIG = ZipLong.getBytes(0X07064B50L); //NOSONAR /** * Writes next block of compressed data to the output stream. @@ -1036,7 +1064,7 @@ // CRC if (phased){ putLong(ze.getCrc(), buf, LFH_CRC_OFFSET); - } else if (zipMethod == DEFLATED || raf != null) { + } else if (zipMethod == DEFLATED || channel != null) { System.arraycopy(LZERO, 0, buf, LFH_CRC_OFFSET, WORD); } else { putLong(ze.getCrc(), buf, LFH_CRC_OFFSET); @@ -1053,7 +1081,7 @@ } else if (phased) { putLong(ze.getCompressedSize(), buf, LFH_COMPRESSED_SIZE_OFFSET); putLong(ze.getSize(), buf, LFH_ORIGINAL_SIZE_OFFSET); - } else if (zipMethod == DEFLATED || raf != null) { + } else if (zipMethod == DEFLATED || channel != null) { System.arraycopy(LZERO, 0, buf, LFH_COMPRESSED_SIZE_OFFSET, WORD); System.arraycopy(LZERO, 0, buf, LFH_ORIGINAL_SIZE_OFFSET, WORD); } else { // Stored @@ -1115,7 +1143,7 @@ * @throws IOException on error */ protected void writeDataDescriptor(final ZipArchiveEntry ze) throws IOException { - if (ze.getMethod() != DEFLATED || raf != null) { + if (ze.getMethod() != DEFLATED || channel != null) { return; } writeCounted(DD_SIG); @@ -1359,7 +1387,7 @@ + DWORD /* number of entries in CD on this disk */ + DWORD /* total number of entries */ + DWORD /* size of CD */ - + DWORD /* offset of CD */ + + (long) DWORD /* offset of CD */ )); // version made by and version needed to extract @@ -1436,7 +1464,7 @@ } private boolean isDeflatedToOutputStream(final int zipMethod) { - return zipMethod == DEFLATED && raf == null; + return zipMethod == DEFLATED && channel == null; } @@ -1481,7 +1509,7 @@ System.err.println("Adding z64 for " + ze.getName() + ", method: " + ze.getMethod() + " (" + (ze.getMethod() == STORED) + ")" - + ", raf: " + (raf != null)); + + ", channel: " + (channel != null)); */ z64 = new Zip64ExtendedInformationExtraField(); } @@ -1513,7 +1541,7 @@ */ private Zip64Mode getEffectiveZip64Mode(final ZipArchiveEntry ze) { if (zip64Mode != Zip64Mode.AsNeeded - || raf != null + || channel != null || ze.getMethod() != DEFLATED || ze.getSize() != ArchiveEntry.SIZE_UNKNOWN) { return zip64Mode; @@ -1539,8 +1567,8 @@ * corrupt archives so they can clean up any temporary files.

*/ void destroy() throws IOException { - if (raf != null) { - raf.close(); + if (channel != null) { + channel.close(); } if (out != null) { out.close(); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java 2016-12-25 11:57:03.000000000 +0000 @@ -69,7 +69,7 @@ static { final Map se = - new HashMap(); + new HashMap<>(); final char[] cp437_high_chars = new char[] { 0x00c7, 0x00fc, 0x00e9, 0x00e2, 0x00e4, 0x00e0, diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java 2016-12-25 11:57:03.000000000 +0000 @@ -23,11 +23,15 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; -import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.channels.SeekableByteChannel; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; +import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -57,7 +61,7 @@ * *

It doesn't extend java.util.zip.ZipFile as it would * have to reimplement all methods anyway. Like - * java.util.ZipFile, it uses RandomAccessFile under the + * java.util.ZipFile, it uses SeekableByteChannel under the * covers and supports compressed and uncompressed entries. As of * Apache Commons Compress 1.3 it also transparently supports Zip64 * extensions and thus individual entries and archives larger than 4 @@ -90,13 +94,13 @@ * directory. */ private final List entries = - new LinkedList(); + new LinkedList<>(); /** * Maps String to list of ZipArchiveEntrys, name -> actual entries. */ private final Map> nameMap = - new HashMap>(HASH_SIZE); + new HashMap<>(HASH_SIZE); private static final class OffsetEntry { private long headerOffset = -1; @@ -125,7 +129,7 @@ /** * The actual data source. */ - private final RandomAccessFile archive; + private final SeekableByteChannel archive; /** * Whether to look for and use Unicode extra fields. @@ -138,10 +142,13 @@ private volatile boolean closed = true; // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) - private final byte[] DWORD_BUF = new byte[DWORD]; - private final byte[] WORD_BUF = new byte[WORD]; - private final byte[] CFH_BUF = new byte[CFH_LEN]; - private final byte[] SHORT_BUF = new byte[SHORT]; + private final byte[] dwordBuf = new byte[DWORD]; + private final byte[] wordBuf = new byte[WORD]; + private final byte[] cfhBuf = new byte[CFH_LEN]; + private final byte[] shortBuf = new byte[SHORT]; + private final ByteBuffer dwordBbuf = ByteBuffer.wrap(dwordBuf); + private final ByteBuffer wordBbuf = ByteBuffer.wrap(wordBuf); + private final ByteBuffer cfhBbuf = ByteBuffer.wrap(cfhBuf); /** * Opens the given file for reading, assuming "UTF8" for file names. @@ -207,11 +214,80 @@ */ public ZipFile(final File f, final String encoding, final boolean useUnicodeExtraFields) throws IOException { - this.archiveName = f.getAbsolutePath(); + this(Files.newByteChannel(f.toPath(), EnumSet.of(StandardOpenOption.READ)), + f.getAbsolutePath(), encoding, useUnicodeExtraFields, true); + } + + /** + * Opens the given channel for reading, assuming "UTF8" for file names. + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to read from an in-memory archive.

+ * + * @param channel the archive. + * + * @throws IOException if an error occurs while reading the file. + * @since 1.13 + */ + public ZipFile(final SeekableByteChannel channel) + throws IOException { + this(channel, "unknown archive", ZipEncodingHelper.UTF8, true); + } + + /** + * Opens the given channel for reading, assuming the specified + * encoding for file names. + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to read from an in-memory archive.

+ * + * @param channel the archive. + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * + * @throws IOException if an error occurs while reading the file. + * @since 1.13 + */ + public ZipFile(final SeekableByteChannel channel, final String encoding) + throws IOException { + this(channel, "unknown archive", encoding, true); + } + + /** + * Opens the given channel for reading, assuming the specified + * encoding for file names. + * + *

{@link + * org.apache.commons.compress.utils.SeekableInMemoryByteChannel} + * allows you to read from an in-memory archive.

+ * + * @param channel the archive. + * @param archiveName name of the archive, used for error messages only. + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * @param useUnicodeExtraFields whether to use InfoZIP Unicode + * Extra Fields (if present) to set the file names. + * + * @throws IOException if an error occurs while reading the file. + * @since 1.13 + */ + public ZipFile(final SeekableByteChannel channel, final String archiveName, + final String encoding, final boolean useUnicodeExtraFields) + throws IOException { + this(channel, archiveName, encoding, useUnicodeExtraFields, false); + } + + private ZipFile(final SeekableByteChannel channel, final String archiveName, + final String encoding, final boolean useUnicodeExtraFields, + final boolean closeOnError) + throws IOException { + this.archiveName = archiveName; this.encoding = encoding; this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); this.useUnicodeExtraFields = useUnicodeExtraFields; - archive = new RandomAccessFile(f, "r"); + archive = channel; boolean success = false; try { final Map entriesWithoutUTF8Flag = @@ -220,7 +296,7 @@ success = true; } finally { closed = !success; - if (!success) { + if (!success && closeOnError) { IOUtils.closeQuietly(archive); } } @@ -282,7 +358,7 @@ */ public Enumeration getEntriesInPhysicalOrder() { final ZipArchiveEntry[] allEntries = entries.toArray(new ZipArchiveEntry[entries.size()]); - Arrays.sort(allEntries, OFFSET_COMPARATOR); + Arrays.sort(allEntries, offsetComparator); return Collections.enumeration(Arrays.asList(allEntries)); } @@ -331,7 +407,7 @@ ZipArchiveEntry[] entriesOfThatName = new ZipArchiveEntry[0]; if (nameMap.containsKey(name)) { entriesOfThatName = nameMap.get(name).toArray(entriesOfThatName); - Arrays.sort(entriesOfThatName, OFFSET_COMPARATOR); + Arrays.sort(entriesOfThatName, offsetComparator); } return Arrays.asList(entriesOfThatName); } @@ -406,8 +482,9 @@ final OffsetEntry offsetEntry = ((Entry) ze).getOffsetEntry(); ZipUtil.checkRequestedFeatures(ze); final long start = offsetEntry.dataOffset; + // doesn't get closed if the method is not supported, but doesn't hold any resources either final BoundedInputStream bis = - new BoundedInputStream(start, ze.getCompressedSize()); + new BoundedInputStream(start, ze.getCompressedSize()); //NOSONAR switch (ZipMethod.getMethodByCode(ze.getMethod())) { case STORED: return bis; @@ -466,15 +543,8 @@ */ public String getUnixSymlink(final ZipArchiveEntry entry) throws IOException { if (entry != null && entry.isUnixSymlink()) { - InputStream in = null; - try { - in = getInputStream(entry); - final byte[] symlinkBytes = IOUtils.toByteArray(in); - return zipEncoding.decode(symlinkBytes); - } finally { - if (in != null) { - in.close(); - } + try (InputStream in = getInputStream(entry)) { + return zipEncoding.decode(IOUtils.toByteArray(in)); } } return null; @@ -537,12 +607,13 @@ private Map populateFromCentralDirectory() throws IOException { final HashMap noUTF8Flag = - new HashMap(); + new HashMap<>(); positionAtCentralDirectory(); - archive.readFully(WORD_BUF); - long sig = ZipLong.getValue(WORD_BUF); + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); + long sig = ZipLong.getValue(wordBuf); if (sig != CFH_SIG && startsWithLocalFileHeader()) { throw new IOException("central directory is empty, can't expand" @@ -551,8 +622,9 @@ while (sig == CFH_SIG) { readCentralDirectoryEntry(noUTF8Flag); - archive.readFully(WORD_BUF); - sig = ZipLong.getValue(WORD_BUF); + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); + sig = ZipLong.getValue(wordBuf); } return noUTF8Flag; } @@ -569,80 +641,81 @@ private void readCentralDirectoryEntry(final Map noUTF8Flag) throws IOException { - archive.readFully(CFH_BUF); + cfhBbuf.rewind(); + IOUtils.readFully(archive, cfhBbuf); int off = 0; final OffsetEntry offset = new OffsetEntry(); final Entry ze = new Entry(offset); - final int versionMadeBy = ZipShort.getValue(CFH_BUF, off); + final int versionMadeBy = ZipShort.getValue(cfhBuf, off); off += SHORT; ze.setVersionMadeBy(versionMadeBy); ze.setPlatform((versionMadeBy >> BYTE_SHIFT) & NIBLET_MASK); - ze.setVersionRequired(ZipShort.getValue(CFH_BUF, off)); + ze.setVersionRequired(ZipShort.getValue(cfhBuf, off)); off += SHORT; // version required - final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(CFH_BUF, off); + final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(cfhBuf, off); final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames(); final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; ze.setGeneralPurposeBit(gpFlag); - ze.setRawFlag(ZipShort.getValue(CFH_BUF, off)); + ze.setRawFlag(ZipShort.getValue(cfhBuf, off)); off += SHORT; //noinspection MagicConstant - ze.setMethod(ZipShort.getValue(CFH_BUF, off)); + ze.setMethod(ZipShort.getValue(cfhBuf, off)); off += SHORT; - final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(CFH_BUF, off)); + final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(cfhBuf, off)); ze.setTime(time); off += WORD; - ze.setCrc(ZipLong.getValue(CFH_BUF, off)); + ze.setCrc(ZipLong.getValue(cfhBuf, off)); off += WORD; - ze.setCompressedSize(ZipLong.getValue(CFH_BUF, off)); + ze.setCompressedSize(ZipLong.getValue(cfhBuf, off)); off += WORD; - ze.setSize(ZipLong.getValue(CFH_BUF, off)); + ze.setSize(ZipLong.getValue(cfhBuf, off)); off += WORD; - final int fileNameLen = ZipShort.getValue(CFH_BUF, off); + final int fileNameLen = ZipShort.getValue(cfhBuf, off); off += SHORT; - final int extraLen = ZipShort.getValue(CFH_BUF, off); + final int extraLen = ZipShort.getValue(cfhBuf, off); off += SHORT; - final int commentLen = ZipShort.getValue(CFH_BUF, off); + final int commentLen = ZipShort.getValue(cfhBuf, off); off += SHORT; - final int diskStart = ZipShort.getValue(CFH_BUF, off); + final int diskStart = ZipShort.getValue(cfhBuf, off); off += SHORT; - ze.setInternalAttributes(ZipShort.getValue(CFH_BUF, off)); + ze.setInternalAttributes(ZipShort.getValue(cfhBuf, off)); off += SHORT; - ze.setExternalAttributes(ZipLong.getValue(CFH_BUF, off)); + ze.setExternalAttributes(ZipLong.getValue(cfhBuf, off)); off += WORD; final byte[] fileName = new byte[fileNameLen]; - archive.readFully(fileName); + IOUtils.readFully(archive, ByteBuffer.wrap(fileName)); ze.setName(entryEncoding.decode(fileName), fileName); // LFH offset, - offset.headerOffset = ZipLong.getValue(CFH_BUF, off); + offset.headerOffset = ZipLong.getValue(cfhBuf, off); // data offset will be filled later entries.add(ze); final byte[] cdExtraData = new byte[extraLen]; - archive.readFully(cdExtraData); + IOUtils.readFully(archive, ByteBuffer.wrap(cdExtraData)); ze.setCentralDirectoryExtra(cdExtraData); setSizesAndOffsetFromZip64Extra(ze, offset, diskStart); final byte[] comment = new byte[commentLen]; - archive.readFully(comment); + IOUtils.readFully(archive, ByteBuffer.wrap(comment)); ze.setComment(entryEncoding.decode(comment)); if (!hasUTF8Flag && useUnicodeExtraFields) { @@ -801,12 +874,13 @@ positionAtEndOfCentralDirectoryRecord(); boolean found = false; final boolean searchedForZip64EOCD = - archive.getFilePointer() > ZIP64_EOCDL_LENGTH; + archive.position() > ZIP64_EOCDL_LENGTH; if (searchedForZip64EOCD) { - archive.seek(archive.getFilePointer() - ZIP64_EOCDL_LENGTH); - archive.readFully(WORD_BUF); + archive.position(archive.position() - ZIP64_EOCDL_LENGTH); + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); found = Arrays.equals(ZipArchiveOutputStream.ZIP64_EOCD_LOC_SIG, - WORD_BUF); + wordBuf); } if (!found) { // not a ZIP64 archive @@ -832,17 +906,20 @@ throws IOException { skipBytes(ZIP64_EOCDL_LOCATOR_OFFSET - WORD /* signature has already been read */); - archive.readFully(DWORD_BUF); - archive.seek(ZipEightByteInteger.getLongValue(DWORD_BUF)); - archive.readFully(WORD_BUF); - if (!Arrays.equals(WORD_BUF, ZipArchiveOutputStream.ZIP64_EOCD_SIG)) { + dwordBbuf.rewind(); + IOUtils.readFully(archive, dwordBbuf); + archive.position(ZipEightByteInteger.getLongValue(dwordBuf)); + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); + if (!Arrays.equals(wordBuf, ZipArchiveOutputStream.ZIP64_EOCD_SIG)) { throw new ZipException("archive's ZIP64 end of central " + "directory locator is corrupt."); } skipBytes(ZIP64_EOCD_CFD_LOCATOR_OFFSET - WORD /* signature has already been read */); - archive.readFully(DWORD_BUF); - archive.seek(ZipEightByteInteger.getLongValue(DWORD_BUF)); + dwordBbuf.rewind(); + IOUtils.readFully(archive, dwordBbuf); + archive.position(ZipEightByteInteger.getLongValue(dwordBuf)); } /** @@ -855,8 +932,9 @@ private void positionAtCentralDirectory32() throws IOException { skipBytes(CFD_LOCATOR_OFFSET); - archive.readFully(WORD_BUF); - archive.seek(ZipLong.getValue(WORD_BUF)); + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); + archive.position(ZipLong.getValue(wordBuf)); } /** @@ -881,22 +959,26 @@ final long maxDistanceFromEnd, final byte[] sig) throws IOException { boolean found = false; - long off = archive.length() - minDistanceFromEnd; + long off = archive.size() - minDistanceFromEnd; final long stopSearching = - Math.max(0L, archive.length() - maxDistanceFromEnd); + Math.max(0L, archive.size() - maxDistanceFromEnd); if (off >= 0) { for (; off >= stopSearching; off--) { - archive.seek(off); - int curr = archive.read(); - if (curr == -1) { + archive.position(off); + try { + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); + wordBbuf.flip(); + } catch (EOFException ex) { break; } + int curr = wordBbuf.get(); if (curr == sig[POS_0]) { - curr = archive.read(); + curr = wordBbuf.get(); if (curr == sig[POS_1]) { - curr = archive.read(); + curr = wordBbuf.get(); if (curr == sig[POS_2]) { - curr = archive.read(); + curr = wordBbuf.get(); if (curr == sig[POS_3]) { found = true; break; @@ -907,7 +989,7 @@ } } if (found) { - archive.seek(off); + archive.position(off); } return found; } @@ -917,14 +999,12 @@ * skipping failed. */ private void skipBytes(final int count) throws IOException { - int totalSkipped = 0; - while (totalSkipped < count) { - final int skippedNow = archive.skipBytes(count - totalSkipped); - if (skippedNow <= 0) { - throw new EOFException(); - } - totalSkipped += skippedNow; + long currentPosition = archive.position(); + long newPosition = currentPosition + count; + if (newPosition > archive.size()) { + throw new EOFException(); } + archive.position(newPosition); } /** @@ -940,7 +1020,7 @@ /* last mod file date */ + SHORT /* crc-32 */ + WORD /* compressed size */ + WORD - /* uncompressed size */ + WORD; + /* uncompressed size */ + (long) WORD; /** * Walks through all recorded entries and adds the data available @@ -958,22 +1038,17 @@ final Entry ze = (Entry) zipArchiveEntry; final OffsetEntry offsetEntry = ze.getOffsetEntry(); final long offset = offsetEntry.headerOffset; - archive.seek(offset + LFH_OFFSET_FOR_FILENAME_LENGTH); - archive.readFully(SHORT_BUF); - final int fileNameLen = ZipShort.getValue(SHORT_BUF); - archive.readFully(SHORT_BUF); - final int extraFieldLen = ZipShort.getValue(SHORT_BUF); - int lenToSkip = fileNameLen; - while (lenToSkip > 0) { - final int skipped = archive.skipBytes(lenToSkip); - if (skipped <= 0) { - throw new IOException("failed to skip file name in" - + " local file header"); - } - lenToSkip -= skipped; - } + archive.position(offset + LFH_OFFSET_FOR_FILENAME_LENGTH); + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); + wordBbuf.flip(); + wordBbuf.get(shortBuf); + final int fileNameLen = ZipShort.getValue(shortBuf); + wordBbuf.get(shortBuf); + final int extraFieldLen = ZipShort.getValue(shortBuf); + skipBytes(fileNameLen); final byte[] localExtraData = new byte[extraFieldLen]; - archive.readFully(localExtraData); + IOUtils.readFully(archive, ByteBuffer.wrap(localExtraData)); ze.setExtra(localExtraData); offsetEntry.dataOffset = offset + LFH_OFFSET_FOR_FILENAME_LENGTH + SHORT + SHORT + fileNameLen + extraFieldLen; @@ -987,7 +1062,7 @@ final String name = ze.getName(); LinkedList entriesOfThatName = nameMap.get(name); if (entriesOfThatName == null) { - entriesOfThatName = new LinkedList(); + entriesOfThatName = new LinkedList<>(); nameMap.put(name, entriesOfThatName); } entriesOfThatName.addLast(ze); @@ -999,17 +1074,20 @@ * it may be an empty archive. */ private boolean startsWithLocalFileHeader() throws IOException { - archive.seek(0); - archive.readFully(WORD_BUF); - return Arrays.equals(WORD_BUF, ZipArchiveOutputStream.LFH_SIG); + archive.position(0); + wordBbuf.rewind(); + IOUtils.readFully(archive, wordBbuf); + return Arrays.equals(wordBuf, ZipArchiveOutputStream.LFH_SIG); } /** * InputStream that delegates requests to the underlying - * RandomAccessFile, making sure that only bytes from a certain + * SeekableByteChannel, making sure that only bytes from a certain * range can be read. */ private class BoundedInputStream extends InputStream { + private static final int MAX_BUF_LEN = 8192; + private final ByteBuffer buffer; private long remaining; private long loc; private boolean addDummyByte = false; @@ -1017,6 +1095,11 @@ BoundedInputStream(final long start, final long remaining) { this.remaining = remaining; loc = start; + if (remaining < MAX_BUF_LEN && remaining > 0) { + buffer = ByteBuffer.allocate((int) remaining); + } else { + buffer = ByteBuffer.allocate(MAX_BUF_LEN); + } } @Override @@ -1029,8 +1112,12 @@ return -1; } synchronized (archive) { - archive.seek(loc++); - return archive.read(); + archive.position(loc++); + int read = read(1); + if (read < 0) { + return read; + } + return buffer.get() & 0xff; } } @@ -1052,18 +1139,34 @@ if (len > remaining) { len = (int) remaining; } + ByteBuffer buf; int ret = -1; synchronized (archive) { - archive.seek(loc); - ret = archive.read(b, off, len); + archive.position(loc); + if (len <= buffer.capacity()) { + buf = buffer; + ret = read(len); + } else { + buf = ByteBuffer.allocate(len); + ret = archive.read(buf); + buf.flip(); + } } if (ret > 0) { + buf.get(b, off, ret); loc += ret; remaining -= ret; } return ret; } + private int read(int len) throws IOException { + buffer.rewind().limit(len); + int read = archive.read(buffer); + buffer.flip(); + return read; + } + /** * Inflater needs an extra dummy byte for nowrap - see * Inflater's javadocs. @@ -1090,7 +1193,7 @@ * * @since 1.1 */ - private final Comparator OFFSET_COMPARATOR = + private final Comparator offsetComparator = new Comparator() { @Override public int compare(final ZipArchiveEntry e1, final ZipArchiveEntry e2) { diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipLong.java 2016-12-25 11:57:03.000000000 +0000 @@ -209,7 +209,7 @@ return super.clone(); } catch (final CloneNotSupportedException cnfe) { // impossible - throw new RuntimeException(cnfe); + throw new RuntimeException(cnfe); //NOSONAR } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipMethod.java 2016-12-25 11:57:03.000000000 +0000 @@ -173,7 +173,7 @@ private static final Map codeToEnum; static { - final Map cte = new HashMap(); + final Map cte = new HashMap<>(); for (final ZipMethod method : values()) { cte.put(method.getCode(), method); } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/archivers/zip/ZipShort.java 2016-12-25 11:57:03.000000000 +0000 @@ -151,7 +151,7 @@ return super.clone(); } catch (final CloneNotSupportedException cnfe) { // impossible - throw new RuntimeException(cnfe); + throw new RuntimeException(cnfe); //NOSONAR } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/changes/ChangeSet.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/changes/ChangeSet.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/changes/ChangeSet.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/changes/ChangeSet.java 2016-12-25 11:57:03.000000000 +0000 @@ -34,7 +34,7 @@ */ public final class ChangeSet { - private final Set changes = new LinkedHashSet(); + private final Set changes = new LinkedHashSet<>(); /** * Deletes the file with the filename from the archive. @@ -161,6 +161,6 @@ * @return the changes as a copy */ Set getChanges() { - return new LinkedHashSet(changes); + return new LinkedHashSet<>(changes); } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java 2016-12-25 11:57:03.000000000 +0000 @@ -113,7 +113,7 @@ throws IOException { final ChangeSetResults results = new ChangeSetResults(); - final Set workingSet = new LinkedHashSet(changes); + final Set workingSet = new LinkedHashSet<>(changes); for (final Iterator it = workingSet.iterator(); it.hasNext();) { final Change change = it.next(); @@ -273,7 +273,8 @@ } @Override public ArchiveEntry next() { - return current = nestedEnum.nextElement(); + current = nestedEnum.nextElement(); + return current; } @Override public InputStream getInputStream() throws IOException { diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java 2016-12-25 11:57:03.000000000 +0000 @@ -25,9 +25,9 @@ * Stores the results of an performed ChangeSet operation. */ public class ChangeSetResults { - private final List addedFromChangeSet = new ArrayList(); - private final List addedFromStream = new ArrayList(); - private final List deleted = new ArrayList(); + private final List addedFromChangeSet = new ArrayList<>(); + private final List addedFromStream = new ArrayList<>(); + private final List deleted = new ArrayList<>(); /** * Adds the filename of a recently deleted file to the result list. diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/bzip2/BlockSort.java 2016-12-25 11:57:03.000000000 +0000 @@ -446,8 +446,10 @@ private int[] eclass; private int[] getEclass() { - return eclass == null - ? (eclass = new int[quadrant.length / 2]) : eclass; + if (eclass == null) { + eclass = new int[quadrant.length / 2]; + } + return eclass; } /* @@ -643,7 +645,7 @@ HAMMER: while (true) { if (onceRunned) { fmap[j] = a; - if ((j -= h) <= mj) { + if ((j -= h) <= mj) { //NOSONAR break HAMMER; } } else { @@ -661,7 +663,7 @@ if (block[i1 + 3] == block[i2 + 3]) { if (block[i1 + 4] == block[i2 + 4]) { if (block[i1 + 5] == block[i2 + 5]) { - if (block[(i1 += 6)] == block[(i2 += 6)]) { + if (block[(i1 += 6)] == block[(i2 += 6)]) { //NOSONAR int x = lastShadow; X: while (x > 0) { x -= 4; @@ -674,10 +676,10 @@ if (quadrant[i1 + 2] == quadrant[i2 + 2]) { if (block[i1 + 4] == block[i2 + 4]) { if (quadrant[i1 + 3] == quadrant[i2 + 3]) { - if ((i1 += 4) >= lastPlus1) { + if ((i1 += 4) >= lastPlus1) { //NOSONAR i1 -= lastPlus1; } - if ((i2 += 4) >= lastPlus1) { + if ((i2 += 4) >= lastPlus1) { //NOSONAR i2 -= lastPlus1; } workDoneShadow++; @@ -971,7 +973,8 @@ runningOrder[i] = i; } - for (int h = 364; h != 1;) { + // h = 364, 121, 40, 13, 4, 1 + for (int h = 364; h != 1;) { //NOSONAR h /= 3; for (int i = h; i <= 255; i++) { final int vv = runningOrder[i]; diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2Utils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2Utils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2Utils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/bzip2/BZip2Utils.java 2016-12-25 11:57:03.000000000 +0000 @@ -33,7 +33,7 @@ static { final Map uncompressSuffix = - new LinkedHashMap(); + new LinkedHashMap<>(); // backwards compatibilty: BZip2Utils never created the short // tbz form, so .tar.bz2 has to be added explicitly uncompressSuffix.put(".tar.bz2", ".tar"); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java 2016-12-25 11:57:03.000000000 +0000 @@ -21,6 +21,15 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Iterator; +import java.util.Locale; +import java.util.Set; +import java.util.SortedMap; +import java.util.TreeMap; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; @@ -29,66 +38,83 @@ import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream; +import org.apache.commons.compress.compressors.lzma.LZMACompressorOutputStream; import org.apache.commons.compress.compressors.lzma.LZMAUtils; -import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; -import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream; -import org.apache.commons.compress.compressors.xz.XZUtils; import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream; import org.apache.commons.compress.compressors.pack200.Pack200CompressorOutputStream; import org.apache.commons.compress.compressors.snappy.FramedSnappyCompressorInputStream; import org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream; +import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; +import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream; +import org.apache.commons.compress.compressors.xz.XZUtils; import org.apache.commons.compress.compressors.z.ZCompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.compress.utils.Lists; +import org.apache.commons.compress.utils.ServiceLoaderIterator; +import org.apache.commons.compress.utils.Sets; /** - *

Factory to create Compressor[In|Out]putStreams from names. To add other + *

+ * Factory to create Compressor[In|Out]putStreams from names. To add other * implementations you should extend CompressorStreamFactory and override the - * appropriate methods (and call their implementation from super of course).

+ * appropriate methods (and call their implementation from super of course). + *

* * Example (Compressing a file): * *
- * final OutputStream out = new FileOutputStream(output); 
- * CompressorOutputStream cos = 
- *      new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.BZIP2, out);
+ * final OutputStream out = new FileOutputStream(output);
+ * CompressorOutputStream cos = new CompressorStreamFactory()
+ *         .createCompressorOutputStream(CompressorStreamFactory.BZIP2, out);
  * IOUtils.copy(new FileInputStream(input), cos);
  * cos.close();
  * 
* * Example (Decompressing a file): + * *
- * final InputStream is = new FileInputStream(input); 
- * CompressorInputStream in = 
- *      new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.BZIP2, is);
+ * final InputStream is = new FileInputStream(input);
+ * CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.BZIP2,
+ *         is);
  * IOUtils.copy(in, new FileOutputStream(output));
  * in.close();
  * 
- * @Immutable provided that the deprecated method setDecompressConcatenated is not used. + * + * @Immutable provided that the deprecated method setDecompressConcatenated is + * not used. * @ThreadSafe even if the deprecated method setDecompressConcatenated is used */ -public class CompressorStreamFactory { +public class CompressorStreamFactory implements CompressorStreamProvider { + + private static final CompressorStreamFactory SINGLETON = new CompressorStreamFactory(); /** - * Constant (value {@value}) used to identify the BZIP2 compression algorithm. + * Constant (value {@value}) used to identify the BZIP2 compression + * algorithm. + * * @since 1.1 */ public static final String BZIP2 = "bzip2"; /** - * Constant (value {@value}) used to identify the GZIP compression algorithm. - * Not supported as an output stream type. + * Constant (value {@value}) used to identify the GZIP compression + * algorithm. Not supported as an output stream type. + * * @since 1.1 */ public static final String GZIP = "gz"; - + /** - * Constant (value {@value}) used to identify the PACK200 compression algorithm. + * Constant (value {@value}) used to identify the PACK200 compression + * algorithm. + * * @since 1.3 */ public static final String PACK200 = "pack200"; /** * Constant (value {@value}) used to identify the XZ compression method. + * * @since 1.4 */ public static final String XZ = "xz"; @@ -96,50 +122,202 @@ /** * Constant (value {@value}) used to identify the LZMA compression method. * Not supported as an output stream type. + * * @since 1.6 */ public static final String LZMA = "lzma"; /** - * Constant (value {@value}) used to identify the "framed" Snappy compression method. - * Not supported as an output stream type. + * Constant (value {@value}) used to identify the "framed" Snappy + * compression method. Not supported as an output stream type. + * * @since 1.7 */ public static final String SNAPPY_FRAMED = "snappy-framed"; /** - * Constant (value {@value}) used to identify the "raw" Snappy compression method. - * Not supported as an output stream type. + * Constant (value {@value}) used to identify the "raw" Snappy compression + * method. Not supported as an output stream type. + * * @since 1.7 */ public static final String SNAPPY_RAW = "snappy-raw"; /** - * Constant (value {@value}) used to identify the traditional Unix compress method. - * Not supported as an output stream type. + * Constant (value {@value}) used to identify the traditional Unix compress + * method. Not supported as an output stream type. + * * @since 1.7 */ public static final String Z = "z"; /** * Constant (value {@value}) used to identify the Deflate compress method. + * * @since 1.9 */ public static final String DEFLATE = "deflate"; /** - * If true, decompress until the end of the input. - * If false, stop after the first stream and leave the - * input position to point to the next byte after the stream + * Constructs a new sorted map from input stream provider names to provider + * objects. + * + *

+ * The map returned by this method will have one entry for each provider for + * which support is available in the current Java virtual machine. If two or + * more supported provider have the same name then the resulting map will + * contain just one of them; which one it will contain is not specified. + *

+ * + *

+ * The invocation of this method, and the subsequent use of the resulting + * map, may cause time-consuming disk or network I/O operations to occur. + * This method is provided for applications that need to enumerate all of + * the available providers, for example to allow user provider selection. + *

+ * + *

+ * This method may return different results at different times if new + * providers are dynamically made available to the current Java virtual + * machine. + *

+ * + * @return An immutable, map from names to provider objects + * @since 1.13 + */ + public static SortedMap findAvailableCompressorInputStreamProviders() { + return AccessController.doPrivileged(new PrivilegedAction>() { + @Override + public SortedMap run() { + final TreeMap map = new TreeMap<>(); + putAll(SINGLETON.getInputStreamCompressorNames(), SINGLETON, map); + for (final CompressorStreamProvider provider : findCompressorStreamProviders()) { + putAll(provider.getInputStreamCompressorNames(), provider, map); + } + return map; + } + }); + } + + /** + * Constructs a new sorted map from output stream provider names to provider + * objects. + * + *

+ * The map returned by this method will have one entry for each provider for + * which support is available in the current Java virtual machine. If two or + * more supported provider have the same name then the resulting map will + * contain just one of them; which one it will contain is not specified. + *

+ * + *

+ * The invocation of this method, and the subsequent use of the resulting + * map, may cause time-consuming disk or network I/O operations to occur. + * This method is provided for applications that need to enumerate all of + * the available providers, for example to allow user provider selection. + *

+ * + *

+ * This method may return different results at different times if new + * providers are dynamically made available to the current Java virtual + * machine. + *

+ * + * @return An immutable, map from names to provider objects + * @since 1.13 + */ + public static SortedMap findAvailableCompressorOutputStreamProviders() { + return AccessController.doPrivileged(new PrivilegedAction>() { + @Override + public SortedMap run() { + final TreeMap map = new TreeMap<>(); + putAll(SINGLETON.getOutputStreamCompressorNames(), SINGLETON, map); + for (final CompressorStreamProvider provider : findCompressorStreamProviders()) { + putAll(provider.getOutputStreamCompressorNames(), provider, map); + } + return map; + } + + }); + } + private static ArrayList findCompressorStreamProviders() { + return Lists.newArrayList(serviceLoaderIterator()); + } + + public static String getBzip2() { + return BZIP2; + } + + public static String getDeflate() { + return DEFLATE; + } + + public static String getGzip() { + return GZIP; + } + + public static String getLzma() { + return LZMA; + } + + public static String getPack200() { + return PACK200; + } + + public static CompressorStreamFactory getSingleton() { + return SINGLETON; + } + + public static String getSnappyFramed() { + return SNAPPY_FRAMED; + } + + public static String getSnappyRaw() { + return SNAPPY_RAW; + } + + public static String getXz() { + return XZ; + } + + public static String getZ() { + return Z; + } + + static void putAll(final Set names, final CompressorStreamProvider provider, + final TreeMap map) { + for (final String name : names) { + map.put(toKey(name), provider); + } + } + + private static Iterator serviceLoaderIterator() { + return new ServiceLoaderIterator<>(CompressorStreamProvider.class); + } + + private static String toKey(final String name) { + return name.toUpperCase(Locale.ROOT); + } + + /** + * If true, decompress until the end of the input. If false, stop after the + * first stream and leave the input position to point to the next byte after + * the stream */ private final Boolean decompressUntilEOF; - // This is Boolean so setDecompressConcatenated can determine whether it has been set by the ctor - // once the setDecompressConcatenated method has been removed, it can revert to boolean + // This is Boolean so setDecompressConcatenated can determine whether it has + // been set by the ctor + // once the setDecompressConcatenated method has been removed, it can revert + // to boolean + private SortedMap compressorInputStreamProviders; + + private SortedMap compressorOutputStreamProviders; + /** - * If true, decompress until the end of the input. - * If false, stop after the first stream and leave the - * input position to point to the next byte after the stream + * If true, decompress until the end of the input. If false, stop after the + * first stream and leave the input position to point to the next byte after + * the stream */ private volatile boolean decompressConcatenated = false; @@ -147,62 +325,41 @@ * Create an instance with the decompress Concatenated option set to false. */ public CompressorStreamFactory() { - this.decompressUntilEOF = null; + this.decompressUntilEOF = null; } /** * Create an instance with the provided decompress Concatenated option. - * @param decompressUntilEOF - * if true, decompress until the end of the - * input; if false, stop after the first - * stream and leave the input position to point - * to the next byte after the stream. - * This setting applies to the gzip, bzip2 and xz formats only. + * + * @param decompressUntilEOF + * if true, decompress until the end of the input; if false, stop + * after the first stream and leave the input position to point + * to the next byte after the stream. This setting applies to the + * gzip, bzip2 and xz formats only. * @since 1.10 */ public CompressorStreamFactory(final boolean decompressUntilEOF) { this.decompressUntilEOF = Boolean.valueOf(decompressUntilEOF); - // Also copy to existing variable so can continue to use that as the current value + // Also copy to existing variable so can continue to use that as the + // current value this.decompressConcatenated = decompressUntilEOF; } /** - * Whether to decompress the full input or only the first stream - * in formats supporting multiple concatenated input streams. - * - *

This setting applies to the gzip, bzip2 and xz formats only.

- * - * @param decompressConcatenated - * if true, decompress until the end of the - * input; if false, stop after the first - * stream and leave the input position to point - * to the next byte after the stream - * @since 1.5 - * @deprecated 1.10 use the {@link #CompressorStreamFactory(boolean)} constructor instead - * @throws IllegalStateException if the constructor {@link #CompressorStreamFactory(boolean)} - * was used to create the factory - */ - @Deprecated - public void setDecompressConcatenated(final boolean decompressConcatenated) { - if (this.decompressUntilEOF != null) { - throw new IllegalStateException("Cannot override the setting defined by the constructor"); - } - this.decompressConcatenated = decompressConcatenated; - } - - /** - * Create an compressor input stream from an input stream, autodetecting - * the compressor type from the first few bytes of the stream. The InputStream + * Create an compressor input stream from an input stream, autodetecting the + * compressor type from the first few bytes of the stream. The InputStream * must support marks, like BufferedInputStream. * - * @param in the input stream + * @param in + * the input stream * @return the compressor input stream - * @throws CompressorException if the compressor name is not known - * @throws IllegalArgumentException if the stream is null or does not support mark + * @throws CompressorException + * if the compressor name is not known + * @throws IllegalArgumentException + * if the stream is null or does not support mark * @since 1.1 */ - public CompressorInputStream createCompressorInputStream(final InputStream in) - throws CompressorException { + public CompressorInputStream createCompressorInputStream(final InputStream in) throws CompressorException { if (in == null) { throw new IllegalArgumentException("Stream must not be null."); } @@ -241,13 +398,11 @@ return new DeflateCompressorInputStream(in); } - if (XZUtils.matches(signature, signatureLength) && - XZUtils.isXZCompressionAvailable()) { + if (XZUtils.matches(signature, signatureLength) && XZUtils.isXZCompressionAvailable()) { return new XZCompressorInputStream(in, decompressConcatenated); } - if (LZMAUtils.matches(signature, signatureLength) && - LZMAUtils.isLZMACompressionAvailable()) { + if (LZMAUtils.matches(signature, signatureLength) && LZMAUtils.isLZMACompressionAvailable()) { return new LZMACompressorInputStream(in); } @@ -259,36 +414,46 @@ } /** - * Create a compressor input stream from a compressor name and an input stream. + * Creates a compressor input stream from a compressor name and an input + * stream. * - * @param name of the compressor, - * i.e. {@value #GZIP}, {@value #BZIP2}, {@value #XZ}, {@value #LZMA}, - * {@value #PACK200}, {@value #SNAPPY_RAW}, {@value #SNAPPY_FRAMED}, - * {@value #Z} or {@value #DEFLATE} - * @param in the input stream + * @param name + * of the compressor, i.e. {@value #GZIP}, {@value #BZIP2}, + * {@value #XZ}, {@value #LZMA}, {@value #PACK200}, + * {@value #SNAPPY_RAW}, {@value #SNAPPY_FRAMED}, {@value #Z} or + * {@value #DEFLATE} + * @param in + * the input stream * @return compressor input stream - * @throws CompressorException if the compressor name is not known - * @throws IllegalArgumentException if the name or input stream is null + * @throws CompressorException + * if the compressor name is not known + * @throws IllegalArgumentException + * if the name or input stream is null */ - public CompressorInputStream createCompressorInputStream(final String name, - final InputStream in) throws CompressorException { + public CompressorInputStream createCompressorInputStream(final String name, final InputStream in) + throws CompressorException { + return createCompressorInputStream(name, in, decompressConcatenated); + } + + @Override + public CompressorInputStream createCompressorInputStream(final String name, final InputStream in, + final boolean actualDecompressConcatenated) throws CompressorException { if (name == null || in == null) { - throw new IllegalArgumentException( - "Compressor name and stream must not be null."); + throw new IllegalArgumentException("Compressor name and stream must not be null."); } try { if (GZIP.equalsIgnoreCase(name)) { - return new GzipCompressorInputStream(in, decompressConcatenated); + return new GzipCompressorInputStream(in, actualDecompressConcatenated); } if (BZIP2.equalsIgnoreCase(name)) { - return new BZip2CompressorInputStream(in, decompressConcatenated); + return new BZip2CompressorInputStream(in, actualDecompressConcatenated); } if (XZ.equalsIgnoreCase(name)) { - return new XZCompressorInputStream(in, decompressConcatenated); + return new XZCompressorInputStream(in, actualDecompressConcatenated); } if (LZMA.equalsIgnoreCase(name)) { @@ -316,29 +481,36 @@ } } catch (final IOException e) { - throw new CompressorException( - "Could not create CompressorInputStream.", e); + throw new CompressorException("Could not create CompressorInputStream.", e); } + final CompressorStreamProvider compressorStreamProvider = getCompressorInputStreamProviders().get(toKey(name)); + if (compressorStreamProvider != null) { + return compressorStreamProvider.createCompressorInputStream(name, in, actualDecompressConcatenated); + } + throw new CompressorException("Compressor: " + name + " not found."); } /** - * Create an compressor output stream from an compressor name and an output stream. + * Creates an compressor output stream from an compressor name and an output + * stream. * - * @param name the compressor name, - * i.e. {@value #GZIP}, {@value #BZIP2}, {@value #XZ}, - * {@value #PACK200} or {@value #DEFLATE} - * @param out the output stream + * @param name + * the compressor name, i.e. {@value #GZIP}, {@value #BZIP2}, + * {@value #XZ}, {@value #PACK200} or {@value #DEFLATE} + * @param out + * the output stream * @return the compressor output stream - * @throws CompressorException if the archiver name is not known - * @throws IllegalArgumentException if the archiver name or stream is null + * @throws CompressorException + * if the archiver name is not known + * @throws IllegalArgumentException + * if the archiver name or stream is null */ - public CompressorOutputStream createCompressorOutputStream( - final String name, final OutputStream out) + @Override + public CompressorOutputStream createCompressorOutputStream(final String name, final OutputStream out) throws CompressorException { if (name == null || out == null) { - throw new IllegalArgumentException( - "Compressor name and stream must not be null."); + throw new IllegalArgumentException("Compressor name and stream must not be null."); } try { @@ -359,19 +531,84 @@ return new Pack200CompressorOutputStream(out); } + if (LZMA.equalsIgnoreCase(name)) { + return new LZMACompressorOutputStream(out); + } + if (DEFLATE.equalsIgnoreCase(name)) { return new DeflateCompressorOutputStream(out); } } catch (final IOException e) { - throw new CompressorException( - "Could not create CompressorOutputStream", e); + throw new CompressorException("Could not create CompressorOutputStream", e); + } + final CompressorStreamProvider compressorStreamProvider = getCompressorOutputStreamProviders().get(toKey(name)); + if (compressorStreamProvider != null) { + return compressorStreamProvider.createCompressorOutputStream(name, out); } throw new CompressorException("Compressor: " + name + " not found."); } + public SortedMap getCompressorInputStreamProviders() { + if (compressorInputStreamProviders == null) { + compressorInputStreamProviders = Collections + .unmodifiableSortedMap(findAvailableCompressorInputStreamProviders()); + } + return compressorInputStreamProviders; + } + + public SortedMap getCompressorOutputStreamProviders() { + if (compressorOutputStreamProviders == null) { + compressorOutputStreamProviders = Collections + .unmodifiableSortedMap(findAvailableCompressorOutputStreamProviders()); + } + return compressorOutputStreamProviders; + } + // For Unit tests boolean getDecompressConcatenated() { return decompressConcatenated; } + + public Boolean getDecompressUntilEOF() { + return decompressUntilEOF; + } + + @Override + public Set getInputStreamCompressorNames() { + return Sets.newHashSet(GZIP, BZIP2, XZ, LZMA, PACK200, SNAPPY_RAW, SNAPPY_FRAMED, Z, DEFLATE); + } + + @Override + public Set getOutputStreamCompressorNames() { + return Sets.newHashSet(GZIP, BZIP2, XZ, LZMA, PACK200, DEFLATE); + } + + /** + * Whether to decompress the full input or only the first stream in formats + * supporting multiple concatenated input streams. + * + *

+ * This setting applies to the gzip, bzip2 and xz formats only. + *

+ * + * @param decompressConcatenated + * if true, decompress until the end of the input; if false, stop + * after the first stream and leave the input position to point + * to the next byte after the stream + * @since 1.5 + * @deprecated 1.10 use the {@link #CompressorStreamFactory(boolean)} + * constructor instead + * @throws IllegalStateException + * if the constructor {@link #CompressorStreamFactory(boolean)} + * was used to create the factory + */ + @Deprecated + public void setDecompressConcatenated(final boolean decompressConcatenated) { + if (this.decompressUntilEOF != null) { + throw new IllegalStateException("Cannot override the setting defined by the constructor"); + } + this.decompressConcatenated = decompressConcatenated; + } + } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors; + +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Set; + +/** + * Creates Compressor {@link CompressorInputStream}s and + * {@link CompressorOutputStream}s. + * + * @since 1.13 + */ +public interface CompressorStreamProvider { + + /** + * Creates a compressor input stream from a compressor name and an input + * stream. + * + * @param name + * of the compressor, i.e. + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#GZIP}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#BZIP2}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#XZ}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#LZMA}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#PACK200}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#SNAPPY_RAW}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#SNAPPY_FRAMED}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#Z} + * or + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#DEFLATE} + * @param in + * the input stream + * @param decompressUntilEOF + * if true, decompress until the end of the input; if false, stop + * after the first stream and leave the input position to point + * to the next byte after the stream. This setting applies to the + * gzip, bzip2 and xz formats only. + * @return compressor input stream + * @throws CompressorException + * if the compressor name is not known + * @throws IllegalArgumentException + * if the name or input stream is null + */ + CompressorInputStream createCompressorInputStream(final String name, final InputStream in, + final boolean decompressUntilEOF) throws CompressorException; + + /** + * Creates a compressor output stream from an compressor name and an output + * stream. + * + * @param name + * the compressor name, i.e. + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#GZIP}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#BZIP2}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#XZ}, + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#PACK200} + * or + * {@value org.apache.commons.compress.compressors.CompressorStreamFactory#DEFLATE} + * @param out + * the output stream + * @return the compressor output stream + * @throws CompressorException + * if the archiver name is not known + * @throws IllegalArgumentException + * if the archiver name or stream is null + */ + CompressorOutputStream createCompressorOutputStream(final String name, final OutputStream out) + throws CompressorException; + + /** + * Gets all the input stream compressor names for this provider + * + * @return all the input compressor names for this provider + */ + Set getInputStreamCompressorNames(); + + /** + * Gets all the output stream compressor names for this provider + * + * @return all the output compressor names for this provider + */ + Set getOutputStreamCompressorNames(); + +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/FileNameUtil.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/FileNameUtil.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/FileNameUtil.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/FileNameUtil.java 2016-12-25 11:57:03.000000000 +0000 @@ -35,7 +35,7 @@ * versions of those file types. For example: from ".tar" to ".tgz". */ private final Map compressSuffix = - new HashMap(); + new HashMap<>(); /** * Map from common filename suffixes of compressed files to the diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/gzip/GzipUtils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/gzip/GzipUtils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/gzip/GzipUtils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/gzip/GzipUtils.java 2016-12-25 11:57:03.000000000 +0000 @@ -35,7 +35,7 @@ // compressed extension of .tar as FileNameUtil will use the // first one found final Map uncompressSuffix = - new LinkedHashMap(); + new LinkedHashMap<>(); uncompressSuffix.put(".tgz", ".tar"); uncompressSuffix.put(".taz", ".tar"); uncompressSuffix.put(".svgz", ".svg"); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorOutputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorOutputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorOutputStream.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/lzma/LZMACompressorOutputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.lzma; + +import java.io.IOException; +import java.io.OutputStream; +import org.tukaani.xz.LZMA2Options; +import org.tukaani.xz.LZMAOutputStream; + +import org.apache.commons.compress.compressors.CompressorOutputStream; + +/** + * LZMA compressor. + * @since 1.13 + */ +public class LZMACompressorOutputStream extends CompressorOutputStream { + private final LZMAOutputStream out; + + /** + * Creates a LZMA compressor. + * + * @param outputStream the stream to wrap + * @throws IOException on error + */ + public LZMACompressorOutputStream(final OutputStream outputStream) + throws IOException { + out = new LZMAOutputStream(outputStream, new LZMA2Options(), -1); + } + + /** {@inheritDoc} */ + @Override + public void write(final int b) throws IOException { + out.write(b); + } + + /** {@inheritDoc} */ + @Override + public void write(final byte[] buf, final int off, final int len) throws IOException { + out.write(buf, off, len); + } + + /** + * Doesn't do anything as {@link LZMAOutputStream} doesn't support flushing. + */ + @Override + public void flush() throws IOException { + out.flush(); + } + + /** + * Finishes compression without closing the underlying stream. + * No more data can be written to this stream after finishing. + * @throws IOException on error + */ + public void finish() throws IOException { + out.finish(); + } + + /** {@inheritDoc} */ + @Override + public void close() throws IOException { + out.close(); + } +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/lzma/LZMAUtils.java 2016-12-25 11:57:03.000000000 +0000 @@ -45,7 +45,7 @@ private static volatile CachedAvailability cachedLZMAAvailability; static { - final Map uncompressSuffix = new HashMap(); + final Map uncompressSuffix = new HashMap<>(); uncompressSuffix.put(".lzma", ""); uncompressSuffix.put("-lzma", ""); fileNameUtil = new FileNameUtil(uncompressSuffix, ".lzma"); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -168,27 +168,26 @@ private Pack200CompressorInputStream(final InputStream in, final File f, final Pack200Strategy mode, final Map props) - throws IOException { + throws IOException { originalInput = in; streamBridge = mode.newStreamBridge(); - final JarOutputStream jarOut = new JarOutputStream(streamBridge); - final Pack200.Unpacker u = Pack200.newUnpacker(); - if (props != null) { - u.properties().putAll(props); - } - if (f == null) { - u.unpack(new FilterInputStream(in) { + try (final JarOutputStream jarOut = new JarOutputStream(streamBridge)) { + final Pack200.Unpacker u = Pack200.newUnpacker(); + if (props != null) { + u.properties().putAll(props); + } + if (f == null) { + u.unpack(new FilterInputStream(in) { @Override - public void close() { + public void close() { // unpack would close this stream but we // want to give the user code more control } - }, - jarOut); - } else { - u.unpack(f, jarOut); + }, jarOut); + } else { + u.unpack(f, jarOut); + } } - jarOut.close(); } @Override @@ -225,7 +224,7 @@ try { streamBridge.getInput().mark(limit); } catch (final IOException ex) { - throw new RuntimeException(ex); + throw new RuntimeException(ex); //NOSONAR } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -133,8 +133,8 @@ JarInputStream ji = null; boolean success = false; try { - p.pack(ji = new JarInputStream(streamBridge.getInput()), - originalOutput); + ji = new JarInputStream(streamBridge.getInput()); + p.pack(ji, originalOutput); success = true; } finally { if (!success) { diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java 2016-12-25 11:57:03.000000000 +0000 @@ -22,7 +22,6 @@ import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import java.io.OutputStream; import java.util.HashMap; import java.util.Map; import java.util.jar.JarFile; @@ -126,37 +125,27 @@ * @throws IOException if reading or writing fails */ public static void normalize(final File from, final File to, Map props) - throws IOException { + throws IOException { if (props == null) { - props = new HashMap(); + props = new HashMap<>(); } props.put(Pack200.Packer.SEGMENT_LIMIT, "-1"); - final File f = File.createTempFile("commons-compress", "pack200normalize"); - f.deleteOnExit(); + final File tempFile = File.createTempFile("commons-compress", "pack200normalize"); try { - OutputStream os = new FileOutputStream(f); - JarFile j = null; - try { - final Pack200.Packer p = Pack200.newPacker(); - p.properties().putAll(props); - p.pack(j = new JarFile(from), os); - j = null; - os.close(); - os = null; - - final Pack200.Unpacker u = Pack200.newUnpacker(); - os = new JarOutputStream(new FileOutputStream(to)); - u.unpack(f, (JarOutputStream) os); - } finally { - if (j != null) { - j.close(); - } - if (os != null) { - os.close(); - } + try (FileOutputStream fos = new FileOutputStream(tempFile); + JarFile jarFile = new JarFile(from)) { + final Pack200.Packer packer = Pack200.newPacker(); + packer.properties().putAll(props); + packer.pack(jarFile, fos); + } + final Pack200.Unpacker unpacker = Pack200.newUnpacker(); + try (JarOutputStream jos = new JarOutputStream(new FileOutputStream(to))) { + unpacker.unpack(tempFile, jos); } } finally { - f.delete(); + if (!tempFile.delete()) { + tempFile.deleteOnExit(); + } } } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/StreamBridge.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/StreamBridge.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/pack200/StreamBridge.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/pack200/StreamBridge.java 2016-12-25 11:57:03.000000000 +0000 @@ -33,7 +33,7 @@ */ abstract class StreamBridge extends FilterOutputStream { private InputStream input; - private final Object INPUT_LOCK = new Object(); + private final Object inputLock = new Object(); protected StreamBridge(final OutputStream out) { super(out); @@ -47,7 +47,7 @@ * Provides the input view. */ InputStream getInput() throws IOException { - synchronized (INPUT_LOCK) { + synchronized (inputLock) { if (input == null) { input = getInputView(); } @@ -65,7 +65,7 @@ */ void stop() throws IOException { close(); - synchronized (INPUT_LOCK) { + synchronized (inputLock) { if (input != null) { input.close(); input = null; diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -58,6 +58,7 @@ /** The underlying stream to read compressed data from */ private final PushbackInputStream in; + /** The dialect to expect */ private final FramedSnappyDialect dialect; @@ -201,8 +202,8 @@ uncompressedBytesRemaining = readSize() - 4 /* CRC */; expectedChecksum = unmask(readCrc()); } else if (type == COMPRESSED_CHUNK_TYPE) { - boolean expectChecksum = dialect.usesChecksumWithCompressedChunks(); - final long size = readSize() - (expectChecksum ? 4 : 0); + final boolean expectChecksum = dialect.usesChecksumWithCompressedChunks(); + final long size = readSize() - (expectChecksum ? 4l : 0l); if (expectChecksum) { expectedChecksum = unmask(readCrc()); } else { diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyDialect.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyDialect.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyDialect.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/snappy/FramedSnappyDialect.java 2016-12-25 11:57:03.000000000 +0000 @@ -36,8 +36,8 @@ private final boolean streamIdentifier, checksumWithCompressedChunks; - private FramedSnappyDialect(boolean hasStreamIdentifier, - boolean usesChecksumWithCompressedChunks) { + private FramedSnappyDialect(final boolean hasStreamIdentifier, + final boolean usesChecksumWithCompressedChunks) { this.streamIdentifier = hasStreamIdentifier; this.checksumWithCompressedChunks = usesChecksumWithCompressedChunks; } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.java 2016-12-25 11:57:03.000000000 +0000 @@ -60,19 +60,19 @@ while(len > 7) { final int c0 =(b[off+0] ^ localCrc) & 0xff; - final int c1 =(b[off+1] ^ (localCrc >>>= 8)) & 0xff; - final int c2 =(b[off+2] ^ (localCrc >>>= 8)) & 0xff; - final int c3 =(b[off+3] ^ (localCrc >>>= 8)) & 0xff; - localCrc = (T[T8_7_start + c0] ^ T[T8_6_start + c1]) - ^ (T[T8_5_start + c2] ^ T[T8_4_start + c3]); + final int c1 =(b[off+1] ^ (localCrc >>>= 8)) & 0xff; //NOSONAR + final int c2 =(b[off+2] ^ (localCrc >>>= 8)) & 0xff; //NOSONAR + final int c3 =(b[off+3] ^ (localCrc >>>= 8)) & 0xff; //NOSONAR + localCrc = (T[T8_7_START + c0] ^ T[T8_6_START + c1]) + ^ (T[T8_5_START + c2] ^ T[T8_4_START + c3]); final int c4 = b[off+4] & 0xff; final int c5 = b[off+5] & 0xff; final int c6 = b[off+6] & 0xff; final int c7 = b[off+7] & 0xff; - localCrc ^= (T[T8_3_start + c4] ^ T[T8_2_start + c5]) - ^ (T[T8_1_start + c6] ^ T[T8_0_start + c7]); + localCrc ^= (T[T8_3_START + c4] ^ T[T8_2_START + c5]) + ^ (T[T8_1_START + c6] ^ T[T8_0_START + c7]); off += 8; len -= 8; @@ -80,13 +80,13 @@ /* loop unroll - duff's device style */ switch(len) { - case 7: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; - case 6: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; - case 5: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; - case 4: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; - case 3: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; - case 2: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; - case 1: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + case 7: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; + case 6: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; + case 5: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; + case 4: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; + case 3: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; + case 2: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; + case 1: localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)]; default: /* nothing */ } @@ -97,21 +97,21 @@ @Override final public void update(final int b) { - crc = (crc >>> 8) ^ T[T8_0_start + ((crc ^ b) & 0xff)]; + crc = (crc >>> 8) ^ T[T8_0_START + ((crc ^ b) & 0xff)]; } // CRC polynomial tables generated by: // java -cp build/test/classes/:build/classes/ \ // org.apache.hadoop.util.TestPureJavaCrc32\$Table 82F63B78 - private static final int T8_0_start = 0*256; - private static final int T8_1_start = 1*256; - private static final int T8_2_start = 2*256; - private static final int T8_3_start = 3*256; - private static final int T8_4_start = 4*256; - private static final int T8_5_start = 5*256; - private static final int T8_6_start = 6*256; - private static final int T8_7_start = 7*256; + private static final int T8_0_START = 0*256; + private static final int T8_1_START = 1*256; + private static final int T8_2_START = 2*256; + private static final int T8_3_START = 3*256; + private static final int T8_4_START = 4*256; + private static final int T8_5_START = 5*256; + private static final int T8_6_START = 6*256; + private static final int T8_7_START = 7*256; private static final int[] T = new int[] { /* T8_0 */ diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/compressors/xz/XZUtils.java 2016-12-25 11:57:03.000000000 +0000 @@ -48,7 +48,7 @@ private static volatile CachedAvailability cachedXZAvailability; static { - final Map uncompressSuffix = new HashMap(); + final Map uncompressSuffix = new HashMap<>(); uncompressSuffix.put(".txz", ".tar"); uncompressSuffix.put(".xz", ""); uncompressSuffix.put("-xz", ""); diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java 2016-12-25 11:57:03.000000000 +0000 @@ -75,7 +75,8 @@ try { buffer1 = expected.getBytes(CharsetNames.US_ASCII); } catch (final UnsupportedEncodingException e) { - throw new RuntimeException(e); // Should not happen + // Should not happen + throw new RuntimeException(e); //NOSONAR } return isEqual(buffer1, 0, buffer1.length, buffer, offset, length, false); } @@ -102,7 +103,8 @@ try { return inputString.getBytes(CharsetNames.US_ASCII); } catch (final UnsupportedEncodingException e) { - throw new RuntimeException(e); // Should never happen + // Should never happen + throw new RuntimeException(e); //NOSONAR } } @@ -116,7 +118,8 @@ try { return new String(inputBytes, CharsetNames.US_ASCII); } catch (final UnsupportedEncodingException e) { - throw new RuntimeException(e); // Should never happen + // Should never happen + throw new RuntimeException(e); //NOSONAR } } @@ -132,7 +135,8 @@ try { return new String(inputBytes, offset, length, CharsetNames.US_ASCII); } catch (final UnsupportedEncodingException e) { - throw new RuntimeException(e); // Should never happen + // Should never happen + throw new RuntimeException(e); //NOSONAR } } @@ -270,7 +274,7 @@ * @return a sanitized version of the argument * @since Compress 1.12 */ - public static String sanitize(String s) { + public static String sanitize(final String s) { final char[] cs = s.toCharArray(); final char[] chars = cs.length <= MAX_SANITIZED_NAME_LENGTH ? cs : Arrays.copyOf(cs, MAX_SANITIZED_NAME_LENGTH); if (cs.length > MAX_SANITIZED_NAME_LENGTH) { @@ -278,12 +282,10 @@ chars[i] = '.'; } } - final int len = chars.length; final StringBuilder sb = new StringBuilder(); - for (int i = 0; i < len; i++) { - final char c = chars[i]; + for (final char c : chars) { if (!Character.isISOControl(c)) { - Character.UnicodeBlock block = Character.UnicodeBlock.of(c); + final Character.UnicodeBlock block = Character.UnicodeBlock.of(c); if (block != null && block != Character.UnicodeBlock.SPECIALS) { sb.append(c); continue; diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/BitInputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/BitInputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/BitInputStream.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/BitInputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -82,7 +82,7 @@ if (count < 0 || count > MAXIMUM_CACHE_SIZE) { throw new IllegalArgumentException("count must not be negative or greater than " + MAXIMUM_CACHE_SIZE); } - while (bitsCachedSize < count) { + while (bitsCachedSize < count && bitsCachedSize < 57) { final long nextByte = in.read(); if (nextByte < 0) { return nextByte; @@ -95,15 +95,43 @@ } bitsCachedSize += 8; } + int overflowBits = 0; + long overflow = 0l; + if (bitsCachedSize < count) { + // bitsCachedSize >= 57 and left-shifting it 8 bits would cause an overflow + int bitsToAddCount = count - bitsCachedSize; + overflowBits = 8 - bitsToAddCount; + final long nextByte = in.read(); + if (nextByte < 0) { + return nextByte; + } + if (byteOrder == ByteOrder.LITTLE_ENDIAN) { + long bitsToAdd = nextByte & MASKS[bitsToAddCount]; + bitsCached |= (bitsToAdd << bitsCachedSize); + overflow = (nextByte >>> bitsToAddCount) & MASKS[overflowBits]; + } else { + bitsCached <<= bitsToAddCount; + long bitsToAdd = (nextByte >>> (overflowBits)) & MASKS[bitsToAddCount]; + bitsCached |= bitsToAdd; + overflow = nextByte & MASKS[overflowBits]; + } + bitsCachedSize = count; + } final long bitsOut; - if (byteOrder == ByteOrder.LITTLE_ENDIAN) { - bitsOut = (bitsCached & MASKS[count]); - bitsCached >>>= count; + if (overflowBits == 0) { + if (byteOrder == ByteOrder.LITTLE_ENDIAN) { + bitsOut = (bitsCached & MASKS[count]); + bitsCached >>>= count; + } else { + bitsOut = (bitsCached >> (bitsCachedSize - count)) & MASKS[count]; + } + bitsCachedSize -= count; } else { - bitsOut = (bitsCached >> (bitsCachedSize - count)) & MASKS[count]; + bitsOut = bitsCached & MASKS[count]; + bitsCached = overflow; + bitsCachedSize = overflowBits; } - bitsCachedSize -= count; return bitsOut; } } diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/FlushShieldFilterOutputStream.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/FlushShieldFilterOutputStream.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/FlushShieldFilterOutputStream.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/FlushShieldFilterOutputStream.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.utils; + +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.OutputStream; + +/** + * Re-implements {@link FilterOutputStream#flush()} to do nothing. + */ +public class FlushShieldFilterOutputStream extends FilterOutputStream { + + public FlushShieldFilterOutputStream(OutputStream out) { + super(out); + } + + @Override + public void flush() throws IOException { + // NO IMPLEMENTATION. + } + +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/IOUtils.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/IOUtils.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/IOUtils.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/IOUtils.java 2016-12-25 11:57:03.000000000 +0000 @@ -20,9 +20,12 @@ import java.io.ByteArrayOutputStream; import java.io.Closeable; +import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; /** * Utility functions @@ -166,6 +169,35 @@ return count; } + /** + * Reads {@code b.remaining()} bytes from the given channel + * starting at the current channel's position. + * + *

This method reads repeatedly from the channel until the + * requested number of bytes are read. This method blocks until + * the requested number of bytes are read, the end of the channel + * is detected, or an exception is thrown.

+ * + * @param channel the channel to read from + * @param b the buffer into which the data is read. + * @throws IOException - if an I/O error occurs. + * @throws EOFException - if the channel reaches the end before reading all the bytes. + */ + public static void readFully(ReadableByteChannel channel, ByteBuffer b) throws IOException { + final int expectedLength = b.remaining(); + int read = 0; + while (read < expectedLength) { + int readNow = channel.read(b); + if (readNow <= 0) { + break; + } + read += readNow; + } + if (read < expectedLength) { + throw new EOFException(); + } + } + // toByteArray(InputStream) copied from: // commons/proper/io/trunk/src/main/java/org/apache/commons/io/IOUtils.java?revision=1428941 // January 8th, 2013 diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/Iterators.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/Iterators.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/Iterators.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/Iterators.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.utils; + +import java.util.Collection; +import java.util.Iterator; +import java.util.Objects; + +/** + * Iterator utilities. + * + * @since 1.13. + */ +public class Iterators { + + /** + * Adds all the elements in the source {@code iterator} to the target + * {@code collection}. + * + *

+ * When this method returns, the {@code iterator} will be "empty": its + * {@code hasNext()} method returns {@code false}. + *

+ * + * @return {@code true} if the target {@code collection} was modified as a + * result of this operation + */ + public static boolean addAll(final Collection collection, final Iterator iterator) { + Objects.requireNonNull(collection); + Objects.requireNonNull(iterator); + boolean wasModified = false; + while (iterator.hasNext()) { + wasModified |= collection.add(iterator.next()); + } + return wasModified; + } + + private Iterators() { + // do not instantiate + } + +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/Lists.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/Lists.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/Lists.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/Lists.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.utils; + +import java.util.ArrayList; +import java.util.Iterator; + +/** + * List utilities + * + * @since 1.13 + */ +public class Lists { + + /** + * Creates a new {@link ArrayList}. + * + * @return a new {@link ArrayList} + */ + public static ArrayList newArrayList() { + return new ArrayList<>(); + } + + /** + * Creates a new {@link ArrayList} filled with the contents of the given + * {@code iterator}. + * + * @param iterator + * the source iterator + * @return a new {@link ArrayList} + */ + public static ArrayList newArrayList(final Iterator iterator) { + final ArrayList list = newArrayList(); + Iterators.addAll(list, iterator); + return list; + } + + private Lists() { + // do not instantiate + } + +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannel.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.utils; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.ClosedChannelException; +import java.nio.channels.SeekableByteChannel; +import java.util.Arrays; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * A {@link SeekableByteChannel} implementation that wraps a byte[]. + * + *

When this channel is used for writing an internal buffer grows to accommodate + * incoming data. A natural size limit is the value of {@link Integer#MAX_VALUE}. + * Internal buffer can be accessed via {@link SeekableInMemoryByteChannel#array()}.

+ * + * @since 1.13 + * @NotThreadSafe + */ +public class SeekableInMemoryByteChannel implements SeekableByteChannel { + + private static final int NAIVE_RESIZE_LIMIT = Integer.MAX_VALUE >> 1; + + private byte[] data; + private final AtomicBoolean closed = new AtomicBoolean(); + private int position, size; + + /** + * Constructor taking a byte array. + * + *

This constructor is intended to be used with pre-allocated buffer or when + * reading from a given byte array.

+ * + * @param data input data or pre-allocated array. + */ + public SeekableInMemoryByteChannel(byte[] data) { + this.data = data; + size = data.length; + } + + /** + * Parameterless constructor - allocates internal buffer by itself. + */ + public SeekableInMemoryByteChannel() { + this(new byte[0]); + } + + /** + * Constructor taking a size of storage to be allocated. + * + *

Creates a channel and allocates internal storage of a given size.

+ * + * @param size size of internal buffer to allocate, in bytes. + */ + public SeekableInMemoryByteChannel(int size) { + this(new byte[size]); + } + + @Override + public long position() { + return position; + } + + @Override + public SeekableByteChannel position(long newPosition) throws IOException { + ensureOpen(); + if (newPosition < 0L || newPosition > Integer.MAX_VALUE) { + throw new IllegalArgumentException("Position has to be in range 0.. " + Integer.MAX_VALUE); + } + position = (int) newPosition; + return this; + } + + @Override + public long size() { + return size; + } + + @Override + public SeekableByteChannel truncate(long newSize) { + if (size > newSize) { + size = (int) newSize; + } + repositionIfNecessary(); + return this; + } + + @Override + public int read(ByteBuffer buf) throws IOException { + ensureOpen(); + repositionIfNecessary(); + int wanted = buf.remaining(); + int possible = size - position; + if (possible <= 0) { + return -1; + } + if (wanted > possible) { + wanted = possible; + } + buf.put(data, position, wanted); + position += wanted; + return wanted; + } + + @Override + public void close() { + closed.set(true); + } + + @Override + public boolean isOpen() { + return !closed.get(); + } + + @Override + public int write(ByteBuffer b) throws IOException { + ensureOpen(); + int wanted = b.remaining(); + int possibleWithoutResize = size - position; + if (wanted > possibleWithoutResize) { + int newSize = position + wanted; + if (newSize < 0) { // overflow + resize(Integer.MAX_VALUE); + wanted = Integer.MAX_VALUE - position; + } else { + resize(newSize); + } + } + b.get(data, position, wanted); + position += wanted; + if (size < position) { + size = position; + } + return wanted; + } + + /** + * Obtains the array backing this channel. + * + *

NOTE: + * The returned buffer is not aligned with containing data, use + * {@link #size()} to obtain the size of data stored in the buffer.

+ * + * @return internal byte array. + */ + public byte[] array() { + return data; + } + + private void resize(int newLength) { + int len = data.length; + if (len <= 0) { + len = 1; + } + if (newLength < NAIVE_RESIZE_LIMIT) { + while (len < newLength) { + len <<= 1; + } + } else { // avoid overflow + len = newLength; + } + data = Arrays.copyOf(data, len); + } + + private void ensureOpen() throws ClosedChannelException { + if (!isOpen()) { + throw new ClosedChannelException(); + } + } + + private void repositionIfNecessary() { + if (position > size) { + position = size; + } + } + +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/ServiceLoaderIterator.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/ServiceLoaderIterator.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/ServiceLoaderIterator.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/ServiceLoaderIterator.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.utils; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.ServiceConfigurationError; +import java.util.ServiceLoader; + +/** + * Iterates all services for a given class through the standard + * {@link ServiceLoader} mechanism. + * + * @param + * The service to load + * @since 1.13 + */ +public class ServiceLoaderIterator implements Iterator { + + private E nextServiceLoader; + private final Class service; + private final Iterator serviceLoaderIterator; + + public ServiceLoaderIterator(final Class service) { + this(service, ClassLoader.getSystemClassLoader()); + } + + public ServiceLoaderIterator(final Class service, final ClassLoader classLoader) { + this.service = service; + final ServiceLoader serviceLoader = ServiceLoader.load(service, classLoader); + serviceLoaderIterator = serviceLoader.iterator(); + nextServiceLoader = null; + } + + private boolean getNextServiceLoader() { + while (nextServiceLoader == null) { + try { + if (!serviceLoaderIterator.hasNext()) { + return false; + } + nextServiceLoader = serviceLoaderIterator.next(); + } catch (final ServiceConfigurationError e) { + if (e.getCause() instanceof SecurityException) { + // Ignore security exceptions + // TODO Log? + continue; + } + throw e; + } + } + return true; + } + + @Override + public boolean hasNext() { + return getNextServiceLoader(); + } + + @Override + public E next() { + if (!getNextServiceLoader()) { + throw new NoSuchElementException("No more elements for service " + service.getName()); + } + final E tempNext = nextServiceLoader; + nextServiceLoader = null; + return tempNext; + } + + @Override + public void remove() { + throw new UnsupportedOperationException("service=" + service.getName()); + } + +} diff -Nru libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/Sets.java libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/Sets.java --- libcommons-compress-java-1.12/src/main/java/org/apache/commons/compress/utils/Sets.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/main/java/org/apache/commons/compress/utils/Sets.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.utils; + +import java.util.Collections; +import java.util.HashSet; + +/** + * Set utilities + * + * @since 1.13 + */ +public class Sets { + + private Sets() { + // Do not instantiate + } + + /** + * Creates a new HashSet filled with the given elements + * + * @param elements + * the elements to fill the new set + * @return A new HasSet + */ + public static HashSet newHashSet(@SuppressWarnings("unchecked") E... elements) { + final HashSet set = new HashSet<>(elements.length); + Collections.addAll(set, elements); + return set; + } +} diff -Nru libcommons-compress-java-1.12/src/site/site.xml libcommons-compress-java-1.13/src/site/site.xml --- libcommons-compress-java-1.12/src/site/site.xml 2016-06-18 15:21:18.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/site.xml 2016-12-25 11:57:03.000000000 +0000 @@ -35,6 +35,8 @@ + + diff -Nru libcommons-compress-java-1.12/src/site/xdoc/download_compress.xml libcommons-compress-java-1.13/src/site/xdoc/download_compress.xml --- libcommons-compress-java-1.12/src/site/xdoc/download_compress.xml 2016-06-18 15:21:43.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/xdoc/download_compress.xml 2016-12-25 12:04:20.000000000 +0000 @@ -31,10 +31,21 @@ | 2) Set the following properties in the component's pom: | | - commons.componentid (required, alphabetic, lower case) | | - commons.release.version (required) | - | - commons.binary.suffix (optional) | + | - commons.release.name (required) | + | - commons.binary.suffix (optional) | | (defaults to "-bin", set to "" for pre-maven2 releases) | + | - commons.release.desc (optional) | + | - commons.release.subdir (optional) | + | | + | - commons.release.2/3.version (conditional) | + | - commons.release.2/3.name (conditional) | + | - commons.release.2/3.binary.suffix (optional) | + | - commons.release.2/3.desc (optional) | + | - commons.release.2/3.subdir (optional) | | | | 3) Example Properties | + | (commons.release.name inherited by parent: | + | ${project.artifactId}-${commons.release.version} | | | | | | math | @@ -46,17 +57,17 @@ Download Apache Commons Compress - Commons Documentation Team + Apache Commons Documentation Team

We recommend you use a mirror to download our release - builds, but you must verify the integrity of + builds, but you must verify the integrity of the downloaded files using signatures downloaded from our main distribution directories. Recent releases (48 hours) may not yet - be available from the mirrors. + be available from all the mirrors.

@@ -88,39 +99,44 @@

+ It is essential that you + verify the integrity + of downloaded files, preferably using the PGP signature (*.asc files); + failing that using the MD5 hash (*.md5 checksum files). +

+

The KEYS - link links to the code signing keys used to sign the product. - The PGP link downloads the OpenPGP compatible signature from our main site. - The MD5 link downloads the checksum from the main site. + file contains the public PGP keys used by Apache Commons developers + to sign releases.

-
+
- - - + + + - - - + + +
commons-compress-1.12-bin.tar.gzmd5pgpcommons-compress-1.13-bin.tar.gzmd5pgp
commons-compress-1.12-bin.zipmd5pgpcommons-compress-1.13-bin.zipmd5pgp
- - - + + + - - - + + +
commons-compress-1.12-src.tar.gzmd5pgpcommons-compress-1.13-src.tar.gzmd5pgp
commons-compress-1.12-src.zipmd5pgpcommons-compress-1.13-src.zipmd5pgp
@@ -131,7 +147,7 @@

diff -Nru libcommons-compress-java-1.12/src/site/xdoc/examples.xml libcommons-compress-java-1.13/src/site/xdoc/examples.xml --- libcommons-compress-java-1.12/src/site/xdoc/examples.xml 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/xdoc/examples.xml 2016-12-25 11:57:03.000000000 +0000 @@ -36,7 +36,7 @@ cpio, dump, tar and zip. Pack200 is a special case as it can only compress JAR files.

-

We currently only provide read support for lzma, arj, +

We currently only provide read support for arj, dump and Z. arj can only read uncompressed archives, 7z can read archives with many compression and encryption algorithms supported by 7z but doesn't support encryption when writing @@ -262,10 +262,11 @@ ]]>

ZipArchiveOutputStream can use some internal - optimizations exploiting RandomAccessFile if it - knows it is writing to a file rather than a non-seekable + optimizations exploiting SeekableByteChannel if it + knows it is writing to a seekable output rather than a non-seekable stream. If you are writing to a file, you should use the - constructor that accepts a File argument rather + constructor that accepts a File or + SeekableByteChannel argument rather than the one using an OutputStream or the factory method in ArchiveStreamFactory.

@@ -290,6 +291,17 @@ } ]]> +

Reading entries from an in-memory zip archive using + SeekableInMemoryByteChannel and ZipFile class:

+ +

Creating a zip file with multiple threads:

A simple implementation to create a zip file might look like this: @@ -544,11 +556,11 @@ -

Note that Commons Compress currently only supports - a subset of compression and encryption algorithms used for 7z - archives. For writing only uncompressed entries, - LZMA2, BZIP2 and Deflate are supported - reading also supports - LZMA and AES-256/SHA-256.

+

Note that Commons Compress currently only supports a subset + of compression and encryption algorithms used for 7z archives. + For writing only uncompressed entries, LZMA, LZMA2, BZIP2 and + Deflate are supported - reading also supports + AES-256/SHA-256.

Multipart archives are not supported at all.

@@ -578,6 +590,13 @@ means compression ratio will likely be worse when using Commons Compress compared to the native 7z executable.

+

Reading or writing requires a + SeekableByteChannel that will be obtained + transparently when reading from or writing to a file. The + class + org.apache.commons.compress.utils.SeekableInMemoryByteChannel + allows you to read from or write to an in-memory archive.

+

Adding an entry to a 7z archive:

+ +

Uncompressing a given in-memory 7z archive:

+
@@ -654,6 +682,53 @@ + + +

+ Starting in release 1.13, it is now possible to add Compressor- and ArchiverStream implementations using the + Java's ServiceLoader + mechanism. +

+
+ + + +

+ To provide your own compressor, you must make available on the classpath a file called + META-INF/services/org.apache.commons.compress.compressors.CompressorStreamProvider. +

+

+ This file MUST contain one fully-qualified class name per line. +

+

+ For example: +

+
org.apache.commons.compress.compressors.TestCompressorStreamProvider
+

+ This class MUST implement the Commons Compress interface + org.apache.commons.compress.compressors.CompressorStreamProvider. +

+
+ + + +

+ To provide your own compressor, you must make available on the classpath a file called + META-INF/services/org.apache.commons.compress.archivers.ArchiveStreamProvider. +

+

+ This file MUST contain one fully-qualified class name per line. +

+

+ For example: +

+
org.apache.commons.compress.archivers.TestArchiveStreamProvider
+

+ This class MUST implement the Commons Compress interface + org.apache.commons.compress.archivers.ArchiveStreamProvider. +

+
+
diff -Nru libcommons-compress-java-1.12/src/site/xdoc/index.xml libcommons-compress-java-1.13/src/site/xdoc/index.xml --- libcommons-compress-java-1.12/src/site/xdoc/index.xml 2016-06-18 15:24:57.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/xdoc/index.xml 2016-12-25 12:11:24.000000000 +0000 @@ -51,29 +51,31 @@
-

The current release is 1.12 and requires Java 6.

+

The current release is 1.13 and requires Java 7.

Below we highlight some new features, for a full list of changes see the Changes Report.

- +
    -
  • Added support for the Snappy dialect used in iWork archives.
  • -
  • SevenZFile throws an - IllegalStateException for empty entries.
  • -
  • BZip2CompressorOutputStream no longer tries to - finish the output stream in finalize. This is a breaking - change for code that relied on the finalizer.
  • -
  • Various fixes and improvements for tar, cpio - and zip.
  • +
  • The 7z package as well as + ZipArchiveOutputStream and + ZipFile can now use + SeekableByteChannel when random acces is + needed. This allows archives to be read from inputs and + written to outputs that are seekable but are not + represented by Files.
  • +
  • It is now possible to add Compressor- and + ArchiverStream implementations using the JDK's + ServiceLoader mechanism. Please see + Extending Commons Compress.
  • +
  • Added support for writing the legacy LZMA format as + compressor stream and inside 7z archives - this requires + XZ for Java 1.6.
-
@@ -91,14 +93,14 @@ by the java.util.jar package of the Java class library. XZ and lzma support is provided by the public domain XZ for - Java library. As of Commons Compress 1.12 support for - the lzma, Z and Snappy formats is read-only.

+ Java library. As of Commons Compress 1.13 support for + the Z and Snappy formats is read-only.

The ar, arj, cpio, dump, tar, 7z and zip formats are supported as archivers where the zip implementation provides capabilities that go beyond the features found in java.util.zip. As of Commons Compress - 1.12 support for the dump and arj formats is + 1.13 support for the dump and arj formats is read-only - 7z can read most compressed and encrypted archives but only write unencrypted ones. LZMA(2) support in 7z requires XZ for diff -Nru libcommons-compress-java-1.12/src/site/xdoc/issue-tracking.xml libcommons-compress-java-1.13/src/site/xdoc/issue-tracking.xml --- libcommons-compress-java-1.12/src/site/xdoc/issue-tracking.xml 2016-01-13 17:48:31.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/xdoc/issue-tracking.xml 2016-12-25 12:06:52.000000000 +0000 @@ -43,35 +43,35 @@ --> - Commons Compress Issue tracking - Commons Documentation Team + Apache Commons Compress Issue tracking + Apache Commons Documentation Team -

+

- Commons Compress uses ASF JIRA for tracking issues. - See the Commons Compress JIRA project page. + Apache Commons Compress uses ASF JIRA for tracking issues. + See the Apache Commons Compress JIRA project page.

- To use JIRA you may need to create an account + To use JIRA you may need to create an account (if you have previously created/updated Commons issues using Bugzilla an account will have been automatically - created and you can use the Forgot Password + created and you can use the Forgot Password page to get a new password).

If you would like to report a bug, or raise an enhancement request with - Commons Compress please do the following: + Apache Commons Compress please do the following:

    -
  1. Search existing open bugs. +
  2. Search existing open bugs. If you find your issue listed then please add a comment with your details.
  3. Search the mailing list archive(s). You may find your issue or idea has already been discussed.
  4. Decide if your issue is a bug or an enhancement.
  5. -
  6. Submit either a bug report - or enhancement request.
  7. +
  8. Submit either a bug report + or enhancement request.

@@ -80,7 +80,7 @@
  • the more information you provide, the better we can help you
  • test cases are vital, particularly for any proposed enhancements
  • -
  • the developers of Commons Compress are all unpaid volunteers
  • +
  • the developers of Apache Commons Compress are all unpaid volunteers

@@ -92,9 +92,9 @@

You may also find these links useful:

diff -Nru libcommons-compress-java-1.12/src/site/xdoc/limitations.xml libcommons-compress-java-1.13/src/site/xdoc/limitations.xml --- libcommons-compress-java-1.12/src/site/xdoc/limitations.xml 2016-01-13 17:48:31.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/xdoc/limitations.xml 2016-12-25 11:57:03.000000000 +0000 @@ -31,14 +31,17 @@ href="http://tukaani.org/xz/java.html">XZ for Java library.
  • only Files are supported as input/output, - not streams. In Compress 1.7 + not streams. Starting with Compress 1.13 + SeekableByteChannel is supported as well.
  • +
  • In Compress 1.7 ArchiveStreamFactory will not auto-detect 7z archives, starting with 1.8 it will throw a StreamingNotSupportedException when reading from a 7z archive.
  • -
  • Encryption, solid compression, header compression and - LZMA (not LZMA2) are only supported when reading - archives
  • +
  • Encryption, solid compression and header compression and + are only supported when reading archives
  • +
  • Commons Compress 1.12 and earlier didn't support writing + LZMA.
  • Several of the "methods" supported by 7z are not implemented in Compress.
  • No support for multi-volume archives
  • @@ -52,10 +55,10 @@
    -

    AR archives can not contain directories - this is a - limitation of the format rather than one of Compress' - implementation.

      +
    • AR archives can not contain directories - this is a + limitation of the format rather than one of Compress' + implementation.
    • file names longer than 16 characters are only fully supported using the BSD dialect, the GNU/SRV4 dialect is only supported when reading archives.
    • @@ -108,7 +111,8 @@
    • the format requires the otherwise optional XZ for Java library.
    • -
    • read-only support
    • +
    • Commons Compress 1.12 and earlier only support reading + the format
    @@ -138,8 +142,8 @@
      -
    • In Compress 1.7 and 1.8 - CompressorStreamFactory is not able to +
    • Prior to Compress 1.8.1 + CompressorStreamFactory was not able to auto-detect streams using .Z compression.
    • read-only support
    @@ -168,7 +172,7 @@
  • ZipArchiveEntry#getLastModifiedDate uses ZipEntry#getTime under the covers which may return different times for the same archive when using - different versions onf Java.
  • + different versions of Java.
    diff -Nru libcommons-compress-java-1.12/src/site/xdoc/mail-lists.xml libcommons-compress-java-1.13/src/site/xdoc/mail-lists.xml --- libcommons-compress-java-1.12/src/site/xdoc/mail-lists.xml 2016-01-13 17:48:31.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/xdoc/mail-lists.xml 2016-12-25 12:07:03.000000000 +0000 @@ -41,14 +41,14 @@ --> - Commons Compress Mailing Lists - Commons Documentation Team + Apache Commons Compress Mailing Lists + Apache Commons Documentation Team

    - Commons Compress shares mailing lists with all the other + Apache Commons Compress shares mailing lists with all the other Commons Components. To make it easier for people to only read messages related to components they are interested in, the convention in Commons is to prefix the subject line of messages with the component's name, @@ -58,24 +58,27 @@

    - Questions related to the usage of Commons Compress should be posted to the + Questions related to the usage of Apache Commons Compress should be posted to the User List.
    The Developer List - is for questions and discussion related to the development of Commons Compress. + is for questions and discussion related to the development of Apache Commons Compress.
    Please do not cross-post; developers are also subscribed to the user list. +
    + You must be subscribed to post to the mailing lists. Follow the Subscribe links below + to subscribe.

    Note: please don't send patches or attachments to any of the mailing lists. - Patches are best handled via the Issue Tracking system. - Otherwise, please upload the file to a public server and include the URL in the mail. + Patches are best handled via the Issue Tracking system. + Otherwise, please upload the file to a public server and include the URL in the mail.

    -
    +

    - Please prefix the subject line of any messages for Commons Compress + Please prefix the subject line of any messages for Apache Commons Compress with [compress] - thanks!

    @@ -96,7 +99,7 @@ Commons User List

    - Questions on using Commons Compress. + Questions on using Apache Commons Compress.

    Subscribe @@ -114,7 +117,7 @@ Commons Developer List

    - Discussion of development of Commons Compress. + Discussion of development of Apache Commons Compress.

    Subscribe @@ -185,10 +188,10 @@ General announcements of Apache project releases.

    - Subscribe - Unsubscribe + Subscribe + Unsubscribe read only - mail-archives.apache.org + mail-archives.apache.org markmail.org
    old.nabble.com
    www.mail-archive.com
    diff -Nru libcommons-compress-java-1.12/src/site/xdoc/zip.xml libcommons-compress-java-1.13/src/site/xdoc/zip.xml --- libcommons-compress-java-1.12/src/site/xdoc/zip.xml 2016-01-13 17:48:31.000000000 +0000 +++ libcommons-compress-java-1.13/src/site/xdoc/zip.xml 2016-12-25 11:57:03.000000000 +0000 @@ -104,18 +104,25 @@

    If possible, you should always prefer ZipFile over ZipArchiveInputStream.

    +

    ZipFile requires a + SeekableByteChannel that will be obtain + transparently when reading from a file. The class + org.apache.commons.compress.utils.SeekableInMemoryByteChannel + allows you to read from an in-memory archive.

    + -

    ZipArchiveOutputStream has two constructors, - one of them uses a File argument, the other - uses an OutputStream. The File - version will try to use RandomAccessFile and - fall back to using a FileOutputStream - internally if that fails.

    +

    ZipArchiveOutputStream has three constructors, + one of them uses a File argument, one a + SeekableByteChannel and the last uses an + OutputStream. The File version will + try to use SeekableByteChannel and fall back to + using a FileOutputStream internally if that + fails.

    If ZipArchiveOutputStream can - use RandomAccessFile it can employ some + use SeekableByteChannel it can employ some optimizations that lead to smaller archives. It also makes it possible to add uncompressed (setMethod used with STORED) entries of unknown size when @@ -124,7 +131,11 @@ an OutputStream.

    If you know you are writing to a file, you should always - prefer the File-arg constructor.

    + prefer the File- or + SeekableByteChannel-arg constructors. The class + org.apache.commons.compress.utils.SeekableInMemoryByteChannel + allows you to write to an in-memory archive.

    +
    @@ -331,7 +342,7 @@ uncompressed sizes of the entry are known when putArchiveEntry is called or ZipArchiveOutputStream - uses RandomAccessFile + uses SeekableByteChannel (see above). If only the uncompressed size is known ZipArchiveOutputStream will assume the @@ -375,7 +386,7 @@ the format's limits is exceeded. Archives created in this mode will be readable by all unarchivers; they may be slightly smaller than archives created - with RandomAccessFile + with SeekableByteChannel in Zip64Mode.AsNeeded mode if some of the entries had unknown sizes.

    @@ -391,7 +402,7 @@ reached because Apache Commons Compress' own API (ArchiveEntry's size information uses a long) or its usage of Java collections - or RandomAccessFile internally. The table + or SeekableByteChannel internally. The table below shows the theoretical limits supported by Apache Commons Compress. In practice it is very likely that you'd run out of memory or your file system won't allow files that diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/AbstractTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/AbstractTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/AbstractTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/AbstractTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -168,7 +168,7 @@ try { archive = File.createTempFile("test", "." + archivename); archive.deleteOnExit(); - archiveList = new ArrayList(); + archiveList = new ArrayList<>(); stream = new FileOutputStream(archive); out = factory.createArchiveOutputStream(archivename, stream); @@ -227,7 +227,7 @@ protected File createEmptyArchive(final String archivename) throws Exception { ArchiveOutputStream out = null; OutputStream stream = null; - archiveList = new ArrayList(); + archiveList = new ArrayList<>(); try { archive = File.createTempFile("empty", "." + archivename); archive.deleteOnExit(); @@ -254,7 +254,7 @@ protected File createSingleEntryArchive(final String archivename) throws Exception { ArchiveOutputStream out = null; OutputStream stream = null; - archiveList = new ArrayList(); + archiveList = new ArrayList<>(); try { archive = File.createTempFile("empty", "." + archivename); archive.deleteOnExit(); @@ -284,13 +284,10 @@ */ protected void checkArchiveContent(final File archive, final List expected) throws Exception { - final InputStream is = new FileInputStream(archive); - try { + try (InputStream is = new FileInputStream(archive)) { final BufferedInputStream buf = new BufferedInputStream(is); final ArchiveInputStream in = factory.createArchiveInputStream(buf); this.checkArchiveContent(in, expected); - } finally { - is.close(); } } @@ -330,11 +327,8 @@ outfile.mkdirs(); } else { outfile.getParentFile().mkdirs(); - final OutputStream out = new FileOutputStream(outfile); - try { - copied=IOUtils.copy(in, out); - } finally { - out.close(); + try (OutputStream out = new FileOutputStream(outfile)) { + copied = IOUtils.copy(in, out); } } final long size = entry.getSize(); @@ -384,12 +378,9 @@ final File tmpDir = createTempDir(); final File tmpFile = File.createTempFile("testfile", "", tmpDir); tmpFile.deleteOnExit(); - final FileOutputStream fos = new FileOutputStream(tmpFile); - try { - fos.write(new byte[] {'f', 'o', 'o'}); - return new File[] {tmpDir, tmpFile}; - } finally { - fos.close(); + try (FileOutputStream fos = new FileOutputStream(tmpFile)) { + fos.write(new byte[] { 'f', 'o', 'o' }); + return new File[] { tmpDir, tmpFile }; } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/ArchiveReadTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/ArchiveReadTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/ArchiveReadTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/ArchiveReadTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -49,7 +49,7 @@ private static final ClassLoader CLASSLOADER = ArchiveReadTest.class.getClassLoader(); private static final File ARCDIR = new File(CLASSLOADER.getResource("archives").getFile()); - private static final ArrayList FILELIST = new ArrayList(); + private static final ArrayList FILELIST = new ArrayList<>(); private final File file; @@ -75,7 +75,7 @@ @Parameters(name = "file={0}") public static Collection data() { assertTrue(ARCDIR.exists()); - final Collection params = new ArrayList(); + final Collection params = new ArrayList<>(); for (final String f : ARCDIR.list(new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -41,10 +41,8 @@ } private void checkLongNameEntry(final String archive) throws Exception { - final FileInputStream fis = new FileInputStream(getFile(archive)); - ArArchiveInputStream s = null; - try { - s = new ArArchiveInputStream(new BufferedInputStream(fis)); + try (final FileInputStream fis = new FileInputStream(getFile(archive)); + final ArArchiveInputStream s = new ArArchiveInputStream(new BufferedInputStream(fis))) { ArchiveEntry e = s.getNextEntry(); assertEquals("this_is_a_long_file_name.txt", e.getName()); assertEquals(14, e.getSize()); @@ -58,11 +56,6 @@ s.read(bye); assertEquals("Bye\n", ArchiveUtils.toAsciiString(bye)); assertNull(s.getNextEntry()); - } finally { - if (s != null) { - s.close(); - } - fis.close(); } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -34,51 +34,30 @@ @Test public void testLongFileNamesCauseExceptionByDefault() { - ArArchiveOutputStream os = null; - try { - os = new ArArchiveOutputStream(new ByteArrayOutputStream()); - final ArArchiveEntry ae = new ArArchiveEntry("this_is_a_long_name.txt", - 0); + try (ArArchiveOutputStream os = new ArArchiveOutputStream(new ByteArrayOutputStream())) { + final ArArchiveEntry ae = new ArArchiveEntry("this_is_a_long_name.txt", 0); os.putArchiveEntry(ae); fail("Expected an exception"); } catch (final IOException ex) { assertTrue(ex.getMessage().startsWith("filename too long")); - } finally { - closeQuietly(os); } } @Test public void testLongFileNamesWorkUsingBSDDialect() throws Exception { - FileOutputStream fos = null; - ArArchiveOutputStream os = null; final File[] df = createTempDirAndFile(); - try { - fos = new FileOutputStream(df[1]); - os = new ArArchiveOutputStream(fos); + try (FileOutputStream fos = new FileOutputStream(df[1]); + ArArchiveOutputStream os = new ArArchiveOutputStream(fos)) { os.setLongFileMode(ArArchiveOutputStream.LONGFILE_BSD); - final ArArchiveEntry ae = new ArArchiveEntry("this_is_a_long_name.txt", - 14); + final ArArchiveEntry ae = new ArArchiveEntry("this_is_a_long_name.txt", 14); os.putArchiveEntry(ae); - os.write(new byte[] { - 'H', 'e', 'l', 'l', 'o', ',', ' ', - 'w', 'o', 'r', 'l', 'd', '!', '\n' - }); + os.write(new byte[] { 'H', 'e', 'l', 'l', 'o', ',', ' ', 'w', 'o', 'r', 'l', 'd', '!', '\n' }); os.closeArchiveEntry(); - os.close(); - os = null; - fos = null; - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("this_is_a_long_name.txt"); checkArchiveContent(df[1], expected); } finally { - if (os != null) { - os.close(); - } - if (fos != null) { - fos.close(); - } rmdir(df[0]); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -92,17 +92,19 @@ public void testOptionalFinish() throws Exception { final OutputStream out1 = new ByteArrayOutputStream(); - ArchiveOutputStream aos1 = factory.createArchiveOutputStream("zip", out1); - aos1.putArchiveEntry(new ZipArchiveEntry("dummy")); - aos1.closeArchiveEntry(); - aos1.close(); + try (ArchiveOutputStream aos1 = factory.createArchiveOutputStream("zip", out1)) { + aos1.putArchiveEntry(new ZipArchiveEntry("dummy")); + aos1.closeArchiveEntry(); + } - aos1 = factory.createArchiveOutputStream("jar", out1); - aos1.putArchiveEntry(new JarArchiveEntry("dummy")); - aos1.closeArchiveEntry(); - aos1.close(); + final ArchiveOutputStream finishTest; + try (ArchiveOutputStream aos1 = factory.createArchiveOutputStream("jar", out1)) { + finishTest = aos1; + aos1.putArchiveEntry(new JarArchiveEntry("dummy")); + aos1.closeArchiveEntry(); + } try { - aos1.finish(); + finishTest.finish(); fail("finish() cannot follow close()"); } catch (final IOException io) { // Exception expected @@ -141,9 +143,9 @@ ArchiveOutputStream aos1; aos1 = factory.createArchiveOutputStream(archiveType, out1); aos1.putArchiveEntry(aos1.createArchiveEntry(dummy, "dummy")); - InputStream is = new FileInputStream(dummy); - IOUtils.copy(is, aos1); - is.close(); + try (InputStream is = new FileInputStream(dummy)) { + IOUtils.copy(is, aos1); + } aos1.closeArchiveEntry(); aos1.close(); // omitted finish @@ -157,9 +159,9 @@ } aos1.putArchiveEntry(aos1.createArchiveEntry(dummy, "dummy")); - is = new FileInputStream(dummy); - IOUtils.copy(is, aos1); - is.close(); + try (InputStream is = new FileInputStream(dummy)) { + IOUtils.copy(is, aos1); + } // TODO check if second putArchiveEntry() can follow without closeAE? diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArchiveServiceLoaderTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArchiveServiceLoaderTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArchiveServiceLoaderTest.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArchiveServiceLoaderTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; + +import org.apache.commons.compress.archivers.TestArchiveStreamProvider.ArchiveInvocationConfirmationException; +import org.junit.Test; + +public class ArchiveServiceLoaderTest { + + @Test(expected = ArchiveInvocationConfirmationException.class) + public void testInputStream() throws ArchiveException { + new ArchiveStreamFactory().createArchiveInputStream("ArchiveTestInput1", new ByteArrayInputStream(new byte[] {})); + } + + @Test(expected = ArchiveInvocationConfirmationException.class) + public void testOutputStream() throws ArchiveException { + new ArchiveStreamFactory().createArchiveOutputStream("ArchiveTestOutput1", new ByteArrayOutputStream()); + } + +} diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArchiveStreamFactoryTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -63,38 +63,26 @@ */ @Test public void aiffFilesAreNoTARs() throws Exception { - final FileInputStream fis = new FileInputStream("src/test/resources/testAIFF.aif"); - try { - final InputStream is = new BufferedInputStream(fis); - try { + try (FileInputStream fis = new FileInputStream("src/test/resources/testAIFF.aif")) { + try (InputStream is = new BufferedInputStream(fis)) { new ArchiveStreamFactory().createArchiveInputStream(is); fail("created an input stream for a non-archive"); } catch (final ArchiveException ae) { assertTrue(ae.getMessage().startsWith("No Archiver found")); - } finally { - is.close(); } - } finally { - fis.close(); - } + } } @Test public void testCOMPRESS209() throws Exception { - final FileInputStream fis = new FileInputStream("src/test/resources/testCompress209.doc"); - try { - final InputStream bis = new BufferedInputStream(fis); - try { + try (FileInputStream fis = new FileInputStream("src/test/resources/testCompress209.doc")) { + try (InputStream bis = new BufferedInputStream(fis)) { new ArchiveStreamFactory().createArchiveInputStream(bis); fail("created an input stream for a non-archive"); } catch (final ArchiveException ae) { assertTrue(ae.getMessage().startsWith("No Archiver found")); - } finally { - bis.close(); } - } finally { - fis.close(); - } + } } @Test(expected = StreamingNotSupportedException.class) @@ -118,20 +106,14 @@ */ @Test public void detectsAndThrowsFor7z() throws Exception { - final FileInputStream fis = new FileInputStream("src/test/resources/bla.7z"); - try { - final InputStream bis = new BufferedInputStream(fis); - try { + try (FileInputStream fis = new FileInputStream("src/test/resources/bla.7z")) { + try (InputStream bis = new BufferedInputStream(fis)) { new ArchiveStreamFactory().createArchiveInputStream(bis); fail("Expected a StreamingNotSupportedException"); } catch (final StreamingNotSupportedException ex) { assertEquals(ArchiveStreamFactory.SEVEN_Z, ex.getFormat()); - } finally { - bis.close(); } - } finally { - fis.close(); - } + } } /** @@ -141,22 +123,13 @@ */ @Test public void skipsPK00Prefix() throws Exception { - final FileInputStream fis = new FileInputStream("src/test/resources/COMPRESS-208.zip"); - try { - final InputStream bis = new BufferedInputStream(fis); - try { - final ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(bis); - try { + try (FileInputStream fis = new FileInputStream("src/test/resources/COMPRESS-208.zip")) { + try (InputStream bis = new BufferedInputStream(fis)) { + try (ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(bis)) { assertTrue(ais instanceof ZipArchiveInputStream); - } finally { - ais.close(); } - } finally { - bis.close(); } - } finally { - fis.close(); - } + } } @Test @@ -195,6 +168,7 @@ final String fieldName; final String type; final boolean hasOutputStream; + TestData(final String testFile, final String type, final boolean hasOut, final String expectedEncoding, final ArchiveStreamFactory fac, final String fieldName) { this.testFile = testFile; this.expectedEncoding = expectedEncoding; @@ -203,6 +177,12 @@ this.type = type; this.hasOutputStream = hasOut; } + + @Override + public String toString() { + return "TestData [testFile=" + testFile + ", expectedEncoding=" + expectedEncoding + ", fac=" + fac + + ", fieldName=" + fieldName + ", type=" + type + ", hasOutputStream=" + hasOutputStream + "]"; + } } @SuppressWarnings("deprecation") // test of deprecated method @@ -233,8 +213,6 @@ dflt = UNKNOWN; try { dflt = getField(new ArjArchiveInputStream(new FileInputStream(getFile("bla.arj"))), "charsetName"); - } catch (final ArchiveException e) { - e.printStackTrace(); } catch (final Exception e) { e.printStackTrace(); } @@ -242,8 +220,6 @@ dflt = UNKNOWN; try { dflt = getField(new DumpArchiveInputStream(new FileInputStream(getFile("bla.dump"))), "encoding"); - } catch (final ArchiveException e) { - e.printStackTrace(); } catch (final Exception e) { e.printStackTrace(); } @@ -291,34 +267,38 @@ @Test public void testEncodingInputStreamAutodetect() throws Exception { int failed = 0; - for(int i = 1; i <= TESTS.length; i++) { - final TestData test = TESTS[i-1]; - final ArchiveInputStream ais = getInputStreamFor(test.testFile, test.fac); - final String field = getField(ais,test.fieldName); - if (!eq(test.expectedEncoding,field)) { - System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + " type: " + test.type); - failed++; + for (int i = 1; i <= TESTS.length; i++) { + final TestData test = TESTS[i - 1]; + try (final ArchiveInputStream ais = getInputStreamFor(test.testFile, test.fac)) { + final String field = getField(ais, test.fieldName); + if (!eq(test.expectedEncoding, field)) { + System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + + " type: " + test.type); + failed++; + } } } if (failed > 0) { - fail("Tests failed: " + failed); + fail("Tests failed: " + failed + " out of " + TESTS.length); } } @Test public void testEncodingInputStream() throws Exception { int failed = 0; - for(int i = 1; i <= TESTS.length; i++) { - final TestData test = TESTS[i-1]; - final ArchiveInputStream ais = getInputStreamFor(test.type, test.testFile, test.fac); - final String field = getField(ais,test.fieldName); - if (!eq(test.expectedEncoding,field)) { - System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + " type: " + test.type); - failed++; + for (int i = 1; i <= TESTS.length; i++) { + final TestData test = TESTS[i - 1]; + try (final ArchiveInputStream ais = getInputStreamFor(test.type, test.testFile, test.fac)) { + final String field = getField(ais, test.fieldName); + if (!eq(test.expectedEncoding, field)) { + System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + + " type: " + test.type); + failed++; + } } } if (failed > 0) { - fail("Tests failed: " + failed); + fail("Tests failed: " + failed + " out of " + TESTS.length); } } @@ -328,16 +308,18 @@ for(int i = 1; i <= TESTS.length; i++) { final TestData test = TESTS[i-1]; if (test.hasOutputStream) { - final ArchiveOutputStream ais = getOutputStreamFor(test.type, test.fac); - final String field = getField(ais, test.fieldName); - if (!eq(test.expectedEncoding, field)) { - System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + field + " type: " + test.type); - failed++; + try (final ArchiveOutputStream ais = getOutputStreamFor(test.type, test.fac)) { + final String field = getField(ais, test.fieldName); + if (!eq(test.expectedEncoding, field)) { + System.out.println("Failed test " + i + ". expected: " + test.expectedEncoding + " actual: " + + field + " type: " + test.type); + failed++; + } } } } if (failed > 0) { - fail("Tests failed: " + failed); + fail("Tests failed: " + failed + " out of " + TESTS.length); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -80,18 +80,16 @@ // UnArArchive Operation final File input = output; - final InputStream is = new FileInputStream(input); - final ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream(new BufferedInputStream(is)); - final ArArchiveEntry entry = (ArArchiveEntry)in.getNextEntry(); + try (final InputStream is = new FileInputStream(input); + final ArchiveInputStream in = new ArchiveStreamFactory() + .createArchiveInputStream(new BufferedInputStream(is))) { + final ArArchiveEntry entry = (ArArchiveEntry) in.getNextEntry(); - final File target = new File(dir, entry.getName()); - final OutputStream out = new FileOutputStream(target); - - IOUtils.copy(in, out); - - out.close(); - in.close(); - is.close(); + final File target = new File(dir, entry.getName()); + try (final OutputStream out = new FileOutputStream(target)) { + IOUtils.copy(in, out); + } + } } @Test diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -92,7 +92,7 @@ final ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream("cpio", is); - final Map result = new HashMap(); + final Map result = new HashMap<>(); ArchiveEntry entry = null; while ((entry = in.getNextEntry()) != null) { final File cpioget = new File(dir, entry.getName()); diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -31,29 +31,23 @@ @Test public void testNotADumpArchive() throws Exception { - final FileInputStream is = new FileInputStream(getFile("bla.zip")); - try { + try (FileInputStream is = new FileInputStream(getFile("bla.zip"))) { new DumpArchiveInputStream(is); fail("expected an exception"); } catch (final ArchiveException ex) { // expected assertTrue(ex.getCause() instanceof ShortFileException); - } finally { - is.close(); } } @Test public void testNotADumpArchiveButBigEnough() throws Exception { - final FileInputStream is = new FileInputStream(getFile("zip64support.tar.bz2")); - try { + try (FileInputStream is = new FileInputStream(getFile("zip64support.tar.bz2"))) { new DumpArchiveInputStream(is); fail("expected an exception"); } catch (final ArchiveException ex) { // expected assertTrue(ex.getCause() instanceof UnrecognizedFormatException); - } finally { - is.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -90,14 +90,11 @@ } private void archiveDetection(final File f) throws Exception { - final InputStream is = new FileInputStream(f); - try { + try (InputStream is = new FileInputStream(f)) { assertEquals(DumpArchiveInputStream.class, - new ArchiveStreamFactory() - .createArchiveInputStream(new BufferedInputStream(is)) - .getClass()); - } finally { - is.close(); + new ArchiveStreamFactory() + .createArchiveInputStream(new BufferedInputStream(is)) + .getClass()); } } @@ -112,17 +109,14 @@ } private void checkDumpArchive(final File f) throws Exception { - final ArrayList expected = new ArrayList(); + final ArrayList expected = new ArrayList<>(); expected.add(""); expected.add("lost+found/"); expected.add("test1.xml"); expected.add("test2.xml"); - final InputStream is = new FileInputStream(f); - try { + try (InputStream is = new FileInputStream(f)) { checkArchiveContent(new DumpArchiveInputStream(is), - expected); - } finally { - is.close(); + expected); } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/LongPathTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -56,7 +56,7 @@ private static final ClassLoader CLASSLOADER = LongPathTest.class.getClassLoader(); private static final File ARCDIR = new File(CLASSLOADER.getResource("longpath").getFile()); - private static final ArrayList FILELIST = new ArrayList(); + private static final ArrayList FILELIST = new ArrayList<>(); private final File file; @@ -81,7 +81,7 @@ @Parameters(name = "file={0}") public static Collection data() { - final Collection params = new ArrayList(); + final Collection params = new ArrayList<>(); for (final String f : ARCDIR.list(new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/LongSymLinkTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -56,7 +56,7 @@ private static final ClassLoader CLASSLOADER = LongSymLinkTest.class.getClassLoader(); private static final File ARCDIR = new File(CLASSLOADER.getResource("longsymlink").getFile()); - private static final ArrayList FILELIST = new ArrayList(); + private static final ArrayList FILELIST = new ArrayList<>(); private final File file; @@ -81,7 +81,7 @@ @Parameters(name = "file={0}") public static Collection data() { - final Collection params = new ArrayList(); + final Collection params = new ArrayList<>(); for (final String f : ARCDIR.list(new FilenameFilter() { @Override public boolean accept(final File dir, final String name) { diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -21,6 +21,7 @@ import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.FileInputStream; import java.io.IOException; import java.security.NoSuchAlgorithmException; import java.util.Arrays; @@ -33,6 +34,8 @@ import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.PasswordRequiredException; +import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.compress.utils.SeekableInMemoryByteChannel; import org.junit.Test; public class SevenZFileTest extends AbstractTestCase { @@ -43,7 +46,7 @@ @Test public void testRandomlySkippingEntries() throws Exception { // Read sequential reference. - final Map entriesByName = new HashMap(); + final Map entriesByName = new HashMap<>(); SevenZFile archive = new SevenZFile(getFile("COMPRESS-320/Copy.7z")); SevenZArchiveEntry entry; while ((entry = archive.getNextEntry()) != null) { @@ -103,11 +106,8 @@ @Test public void testAllEmptyFilesArchive() throws Exception { - final SevenZFile archive = new SevenZFile(getFile("7z-empty-mhc-off.7z")); - try { + try (SevenZFile archive = new SevenZFile(getFile("7z-empty-mhc-off.7z"))) { assertNotNull(archive.getNextEntry()); - } finally { - archive.close(); } } @@ -164,15 +164,12 @@ */ @Test public void testCompressedHeaderWithNonDefaultDictionarySize() throws Exception { - final SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-256.7z")); - try { + try (SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-256.7z"))) { int count = 0; while (sevenZFile.getNextEntry() != null) { count++; } assertEquals(446, count); - } finally { - sevenZFile.close(); } } @@ -194,60 +191,64 @@ @Test public void testReadingBackLZMA2DictSize() throws Exception { final File output = new File(dir, "lzma2-dictsize.7z"); - final SevenZOutputFile outArchive = new SevenZOutputFile(output); - try { + try (SevenZOutputFile outArchive = new SevenZOutputFile(output)) { outArchive.setContentMethods(Arrays.asList(new SevenZMethodConfiguration(SevenZMethod.LZMA2, 1 << 20))); final SevenZArchiveEntry entry = new SevenZArchiveEntry(); entry.setName("foo.txt"); outArchive.putArchiveEntry(entry); outArchive.write(new byte[] { 'A' }); outArchive.closeArchiveEntry(); - } finally { - outArchive.close(); } - final SevenZFile archive = new SevenZFile(output); - try { + try (SevenZFile archive = new SevenZFile(output)) { final SevenZArchiveEntry entry = archive.getNextEntry(); final SevenZMethodConfiguration m = entry.getContentMethods().iterator().next(); assertEquals(SevenZMethod.LZMA2, m.getMethod()); assertEquals(1 << 20, m.getOptions()); - } finally { - archive.close(); } } @Test public void testReadingBackDeltaDistance() throws Exception { final File output = new File(dir, "delta-distance.7z"); - final SevenZOutputFile outArchive = new SevenZOutputFile(output); - try { + try (SevenZOutputFile outArchive = new SevenZOutputFile(output)) { outArchive.setContentMethods(Arrays.asList(new SevenZMethodConfiguration(SevenZMethod.DELTA_FILTER, 32), - new SevenZMethodConfiguration(SevenZMethod.LZMA2))); + new SevenZMethodConfiguration(SevenZMethod.LZMA2))); final SevenZArchiveEntry entry = new SevenZArchiveEntry(); entry.setName("foo.txt"); outArchive.putArchiveEntry(entry); outArchive.write(new byte[] { 'A' }); outArchive.closeArchiveEntry(); - } finally { - outArchive.close(); } - final SevenZFile archive = new SevenZFile(output); - try { + try (SevenZFile archive = new SevenZFile(output)) { final SevenZArchiveEntry entry = archive.getNextEntry(); final SevenZMethodConfiguration m = entry.getContentMethods().iterator().next(); assertEquals(SevenZMethod.DELTA_FILTER, m.getMethod()); assertEquals(32, m.getOptions()); - } finally { - archive.close(); } } @Test public void getEntriesOfUnarchiveTest() throws IOException { - final SevenZFile sevenZFile = new SevenZFile(getFile("bla.7z")); - try { + try (SevenZFile sevenZFile = new SevenZFile(getFile("bla.7z"))) { + final Iterable entries = sevenZFile.getEntries(); + final Iterator iter = entries.iterator(); + SevenZArchiveEntry entry = iter.next(); + assertEquals("test1.xml", entry.getName()); + entry = iter.next(); + assertEquals("test2.xml", entry.getName()); + assertFalse(iter.hasNext()); + } + } + + @Test + public void getEntriesOfUnarchiveInMemoryTest() throws IOException { + byte[] data = null; + try (FileInputStream fis = new FileInputStream(getFile("bla.7z"))) { + data = IOUtils.toByteArray(fis); + } + try (SevenZFile sevenZFile = new SevenZFile(new SeekableInMemoryByteChannel(data))) { final Iterable entries = sevenZFile.getEntries(); final Iterator iter = entries.iterator(); SevenZArchiveEntry entry = iter.next(); @@ -255,8 +256,6 @@ entry = iter.next(); assertEquals("test2.xml", entry.getName()); assertFalse(iter.hasNext()); - } finally { - sevenZFile.close(); } } @@ -265,13 +264,12 @@ */ @Test public void readEntriesOfSize0() throws IOException { - final SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-348.7z")); - try { + try (SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-348.7z"))) { int entries = 0; SevenZArchiveEntry entry = sevenZFile.getNextEntry(); while (entry != null) { entries++; - int b = sevenZFile.read(); + final int b = sevenZFile.read(); if ("2.txt".equals(entry.getName()) || "5.txt".equals(entry.getName())) { assertEquals(-1, b); } else { @@ -280,50 +278,42 @@ entry = sevenZFile.getNextEntry(); } assertEquals(5, entries); - } finally { - sevenZFile.close(); } } private void test7zUnarchive(final File f, final SevenZMethod m, final byte[] password) throws Exception { - final SevenZFile sevenZFile = new SevenZFile(f, password); - try { + try (SevenZFile sevenZFile = new SevenZFile(f, password)) { SevenZArchiveEntry entry = sevenZFile.getNextEntry(); assertEquals("test1.xml", entry.getName()); assertEquals(m, entry.getContentMethods().iterator().next().getMethod()); entry = sevenZFile.getNextEntry(); assertEquals("test2.xml", entry.getName()); assertEquals(m, entry.getContentMethods().iterator().next().getMethod()); - final byte[] contents = new byte[(int)entry.getSize()]; + final byte[] contents = new byte[(int) entry.getSize()]; int off = 0; while ((off < contents.length)) { final int bytesRead = sevenZFile.read(contents, off, contents.length - off); - assert(bytesRead >= 0); + assert (bytesRead >= 0); off += bytesRead; } assertEquals(TEST2_CONTENT, new String(contents, "UTF-8")); assertNull(sevenZFile.getNextEntry()); - } finally { - sevenZFile.close(); } } private void checkHelloWorld(final String filename) throws Exception { - final SevenZFile sevenZFile = new SevenZFile(getFile(filename)); - try { + try (SevenZFile sevenZFile = new SevenZFile(getFile(filename))) { final SevenZArchiveEntry entry = sevenZFile.getNextEntry(); assertEquals("Hello world.txt", entry.getName()); - final byte[] contents = new byte[(int)entry.getSize()]; + final byte[] contents = new byte[(int) entry.getSize()]; int off = 0; while ((off < contents.length)) { final int bytesRead = sevenZFile.read(contents, off, contents.length - off); - assert(bytesRead >= 0); + assert (bytesRead >= 0); off += bytesRead; } assertEquals("Hello, world!\n", new String(contents, "UTF-8")); assertNull(sevenZFile.getNextEntry()); - } finally { - sevenZFile.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfigurationTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfigurationTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfigurationTest.java 2016-01-13 17:48:31.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZMethodConfigurationTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -30,6 +30,19 @@ } @Test + public void shouldAllowLZMA2OptionsForLZMA() { + Assert.assertNotNull(new SevenZMethodConfiguration(SevenZMethod.LZMA, + new LZMA2Options()) + .getOptions()); + } + + @Test + public void shouldAllowNumberForLZMA() { + Assert.assertNotNull(new SevenZMethodConfiguration(SevenZMethod.LZMA, 42) + .getOptions()); + } + + @Test public void shouldAllowLZMA2OptionsForLZMA2() { Assert.assertNotNull(new SevenZMethodConfiguration(SevenZMethod.LZMA2, new LZMA2Options()) @@ -55,6 +68,11 @@ } @Test(expected = IllegalArgumentException.class) + public void shouldNotAllowStringOptionsForLZMA() { + new SevenZMethodConfiguration(SevenZMethod.LZMA, ""); + } + + @Test(expected = IllegalArgumentException.class) public void shouldNotAllowStringOptionsForLZMA2() { new SevenZMethodConfiguration(SevenZMethod.LZMA2, ""); } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -29,6 +29,7 @@ import java.util.Date; import java.util.Iterator; import org.apache.commons.compress.AbstractTestCase; +import org.apache.commons.compress.utils.SeekableInMemoryByteChannel; import org.tukaani.xz.LZMA2Options; public class SevenZOutputFileTest extends AbstractTestCase { @@ -61,8 +62,7 @@ cal.add(Calendar.HOUR, -1); final Date creationDate = cal.getTime(); - final SevenZOutputFile outArchive = new SevenZOutputFile(output); - try { + try (SevenZOutputFile outArchive = new SevenZOutputFile(output)) { SevenZArchiveEntry entry = outArchive.createArchiveEntry(dir, "foo/"); outArchive.putArchiveEntry(entry); outArchive.closeArchiveEntry(); @@ -96,20 +96,17 @@ outArchive.closeArchiveEntry(); outArchive.finish(); - } finally { - outArchive.close(); } - final SevenZFile archive = new SevenZFile(output); - try { + try (SevenZFile archive = new SevenZFile(output)) { SevenZArchiveEntry entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals("foo/", entry.getName()); assertTrue(entry.isDirectory()); assertFalse(entry.isAntiItem()); entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals("foo/bar", entry.getName()); assertFalse(entry.isDirectory()); assertFalse(entry.isAntiItem()); @@ -119,7 +116,7 @@ assertEquals(creationDate, entry.getCreationDate()); entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals("xyzzy", entry.getName()); assertEquals(1, entry.getSize()); assertFalse(entry.getHasAccessDate()); @@ -127,13 +124,13 @@ assertEquals(0, archive.read()); entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals("baz/", entry.getName()); assertTrue(entry.isDirectory()); assertTrue(entry.isAntiItem()); entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals("dada", entry.getName()); assertEquals(2, entry.getSize()); final byte[] content = new byte[2]; @@ -142,9 +139,7 @@ assertEquals(42, content[1]); assertEquals(17, entry.getWindowsAttributes()); - assert(archive.getNextEntry() == null); - } finally { - archive.close(); + assert (archive.getNextEntry() == null); } } @@ -152,28 +147,22 @@ @Test public void testDirectoriesOnly() throws Exception { output = new File(dir, "dirs.7z"); - final SevenZOutputFile outArchive = new SevenZOutputFile(output); - try { + try (SevenZOutputFile outArchive = new SevenZOutputFile(output)) { final SevenZArchiveEntry entry = new SevenZArchiveEntry(); entry.setName("foo/"); entry.setDirectory(true); outArchive.putArchiveEntry(entry); outArchive.closeArchiveEntry(); - } finally { - outArchive.close(); } - final SevenZFile archive = new SevenZFile(output); - try { + try (SevenZFile archive = new SevenZFile(output)) { final SevenZArchiveEntry entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals("foo/", entry.getName()); assertTrue(entry.isDirectory()); assertFalse(entry.isAntiItem()); - assert(archive.getNextEntry() == null); - } finally { - archive.close(); + assert (archive.getNextEntry() == null); } } @@ -181,15 +170,12 @@ @Test public void testCantFinishTwice() throws Exception { output = new File(dir, "finish.7z"); - final SevenZOutputFile outArchive = new SevenZOutputFile(output); - try { + try (SevenZOutputFile outArchive = new SevenZOutputFile(output)) { outArchive.finish(); outArchive.finish(); fail("shouldn't be able to call finish twice"); } catch (final IOException ex) { assertEquals("This archive has already been finished", ex.getMessage()); - } finally { - outArchive.close(); } } @@ -307,7 +293,7 @@ @Test public void testStackOfContentCompressions() throws Exception { output = new File(dir, "multiple-methods.7z"); - final ArrayList methods = new ArrayList(); + final ArrayList methods = new ArrayList<>(); methods.add(new SevenZMethodConfiguration(SevenZMethod.LZMA2)); methods.add(new SevenZMethodConfiguration(SevenZMethod.COPY)); methods.add(new SevenZMethodConfiguration(SevenZMethod.DEFLATE)); @@ -316,6 +302,16 @@ } @Test + public void testStackOfContentCompressionsInMemory() throws Exception { + final ArrayList methods = new ArrayList<>(); + methods.add(new SevenZMethodConfiguration(SevenZMethod.LZMA2)); + methods.add(new SevenZMethodConfiguration(SevenZMethod.COPY)); + methods.add(new SevenZMethodConfiguration(SevenZMethod.DEFLATE)); + methods.add(new SevenZMethodConfiguration(SevenZMethod.BZIP2)); + createAndReadBack(new SeekableInMemoryByteChannel(), methods); + } + + @Test public void testDeflateWithConfiguration() throws Exception { output = new File(dir, "deflate-options.7z"); // Deflater.BEST_SPEED @@ -332,6 +328,22 @@ } @Test + public void testLzmaWithIntConfiguration() throws Exception { + output = new File(dir, "lzma-options.7z"); + // 1 MB dictionary + createAndReadBack(output, Collections + .singletonList(new SevenZMethodConfiguration(SevenZMethod.LZMA, 1 << 20))); + } + + @Test + public void testLzmaWithOptionsConfiguration() throws Exception { + output = new File(dir, "lzma-options2.7z"); + final LZMA2Options opts = new LZMA2Options(1); + createAndReadBack(output, Collections + .singletonList(new SevenZMethodConfiguration(SevenZMethod.LZMA, opts))); + } + + @Test public void testLzma2WithIntConfiguration() throws Exception { output = new File(dir, "lzma2-options.7z"); // 1 MB dictionary @@ -350,22 +362,16 @@ @Test public void testArchiveWithMixedMethods() throws Exception { output = new File(dir, "mixed-methods.7z"); - final SevenZOutputFile outArchive = new SevenZOutputFile(output); - try { + try (SevenZOutputFile outArchive = new SevenZOutputFile(output)) { addFile(outArchive, 0, true); addFile(outArchive, 1, true, Arrays.asList(new SevenZMethodConfiguration(SevenZMethod.BZIP2))); - } finally { - outArchive.close(); } - final SevenZFile archive = new SevenZFile(output); - try { + try (SevenZFile archive = new SevenZFile(output)) { assertEquals(Boolean.TRUE, - verifyFile(archive, 0, Arrays.asList(new SevenZMethodConfiguration(SevenZMethod.LZMA2)))); + verifyFile(archive, 0, Arrays.asList(new SevenZMethodConfiguration(SevenZMethod.LZMA2)))); assertEquals(Boolean.TRUE, - verifyFile(archive, 1, Arrays.asList(new SevenZMethodConfiguration(SevenZMethod.BZIP2)))); - } finally { - archive.close(); + verifyFile(archive, 1, Arrays.asList(new SevenZMethodConfiguration(SevenZMethod.BZIP2)))); } } @@ -376,25 +382,21 @@ : numberOfFiles + 1; int nonEmptyFilesAdded = 0; output = new File(dir, "COMPRESS252-" + numberOfFiles + "-" + numberOfNonEmptyFiles + ".7z"); - final SevenZOutputFile archive = new SevenZOutputFile(output); - try { + try (SevenZOutputFile archive = new SevenZOutputFile(output)) { addDir(archive); for (int i = 0; i < numberOfFiles; i++) { addFile(archive, i, (i + 1) % nonEmptyModulus == 0 && nonEmptyFilesAdded++ < numberOfNonEmptyFiles); } - } finally { - archive.close(); } verifyCompress252(output, numberOfFiles, numberOfNonEmptyFiles); } private void verifyCompress252(final File output, final int numberOfFiles, final int numberOfNonEmptyFiles) throws Exception { - final SevenZFile archive = new SevenZFile(output); int filesFound = 0; int nonEmptyFilesFound = 0; - try { + try (SevenZFile archive = new SevenZFile(output)) { verifyDir(archive); Boolean b = verifyFile(archive, filesFound++); while (b != null) { @@ -403,8 +405,6 @@ } b = verifyFile(archive, filesFound++); } - } finally { - archive.close(); } assertEquals(numberOfFiles + 1, filesFound); assertEquals(numberOfNonEmptyFiles, nonEmptyFilesFound); @@ -464,14 +464,14 @@ private void testRoundTrip(final SevenZMethod method) throws Exception { output = new File(dir, method + "-roundtrip.7z"); - final ArrayList methods = new ArrayList(); + final ArrayList methods = new ArrayList<>(); methods.add(new SevenZMethodConfiguration(method)); createAndReadBack(output, methods); } private void testFilterRoundTrip(final SevenZMethodConfiguration method) throws Exception { output = new File(dir, method.getMethod() + "-roundtrip.7z"); - final ArrayList methods = new ArrayList(); + final ArrayList methods = new ArrayList<>(); methods.add(method); methods.add(new SevenZMethodConfiguration(SevenZMethod.LZMA2)); createAndReadBack(output, methods); @@ -486,11 +486,23 @@ outArchive.close(); } - final SevenZFile archive = new SevenZFile(output); - try { + try (SevenZFile archive = new SevenZFile(output)) { assertEquals(Boolean.TRUE, verifyFile(archive, 0, methods)); + } + } + + private void createAndReadBack(final SeekableInMemoryByteChannel output, final Iterable methods) throws Exception { + final SevenZOutputFile outArchive = new SevenZOutputFile(output); + outArchive.setContentMethods(methods); + try { + addFile(outArchive, 0, true); } finally { - archive.close(); + outArchive.close(); + } + try (SevenZFile archive = + new SevenZFile(new SeekableInMemoryByteChannel(output.array()), "in memory", + null)) { + assertEquals(Boolean.TRUE, verifyFile(archive, 0, methods)); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -38,6 +38,11 @@ } @Test + public void testSevenZArchiveCreationUsingLZMA() throws Exception { + testSevenZArchiveCreation(SevenZMethod.LZMA); + } + + @Test public void testSevenZArchiveCreationUsingLZMA2() throws Exception { testSevenZArchiveCreation(SevenZMethod.LZMA2); } @@ -74,22 +79,19 @@ } finally { outArchive.close(); } - - final SevenZFile archive = new SevenZFile(output); - try { + + try (SevenZFile archive = new SevenZFile(output)) { SevenZArchiveEntry entry; - + entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals(entry.getName(), file1.getName()); - + entry = archive.getNextEntry(); - assert(entry != null); + assert (entry != null); assertEquals(entry.getName(), file2.getName()); - - assert(archive.getNextEntry() == null); - } finally { - archive.close(); + + assert (archive.getNextEntry() == null); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -156,16 +156,13 @@ @Test public void testCompress197() throws Exception { - final TarArchiveInputStream tar = getTestStream("/COMPRESS-197.tar"); - try { + try (TarArchiveInputStream tar = getTestStream("/COMPRESS-197.tar")) { TarArchiveEntry entry = tar.getNextTarEntry(); while (entry != null) { entry = tar.getNextTarEntry(); } } catch (final IOException e) { fail("COMPRESS-197: " + e.getMessage()); - } finally { - tar.close(); } } @@ -214,9 +211,8 @@ @Test public void readsArchiveCompletely_COMPRESS245() throws Exception { - final InputStream is = TarArchiveInputStreamTest.class - .getResourceAsStream("/COMPRESS-245.tar.gz"); - try { + try (InputStream is = TarArchiveInputStreamTest.class + .getResourceAsStream("/COMPRESS-245.tar.gz")) { final InputStream gin = new GZIPInputStream(is); final TarArchiveInputStream tar = new TarArchiveInputStream(gin); int count = 0; @@ -229,8 +225,6 @@ tar.close(); } catch (final IOException e) { fail("COMPRESS-245: " + e.getMessage()); - } finally { - is.close(); } } @@ -285,16 +279,13 @@ */ @Test public void shouldReadGNULongNameEntryWithWrongName() throws Exception { - final TarArchiveInputStream is = getTestStream("/COMPRESS-324.tar"); - try { + try (TarArchiveInputStream is = getTestStream("/COMPRESS-324.tar")) { final TarArchiveEntry entry = is.getNextTarEntry(); assertEquals("1234567890123456789012345678901234567890123456789012345678901234567890" - + "1234567890123456789012345678901234567890123456789012345678901234567890" - + "1234567890123456789012345678901234567890123456789012345678901234567890" - + "1234567890123456789012345678901234567890.txt", - entry.getName()); - } finally { - is.close(); + + "1234567890123456789012345678901234567890123456789012345678901234567890" + + "1234567890123456789012345678901234567890123456789012345678901234567890" + + "1234567890123456789012345678901234567890.txt", + entry.getName()); } } @@ -303,13 +294,10 @@ */ @Test public void survivesBlankLinesInPaxHeader() throws Exception { - final TarArchiveInputStream is = getTestStream("/COMPRESS-355.tar"); - try { + try (TarArchiveInputStream is = getTestStream("/COMPRESS-355.tar")) { final TarArchiveEntry entry = is.getNextTarEntry(); assertEquals("package/package.json", entry.getName()); assertNull(is.getNextTarEntry()); - } finally { - is.close(); } } @@ -318,13 +306,10 @@ */ @Test public void survivesPaxHeaderWithNameEndingInSlash() throws Exception { - final TarArchiveInputStream is = getTestStream("/COMPRESS-356.tar"); - try { + try (TarArchiveInputStream is = getTestStream("/COMPRESS-356.tar")) { final TarArchiveEntry entry = is.getNextTarEntry(); assertEquals("package/package.json", entry.getName()); assertNull(is.getNextTarEntry()); - } finally { - is.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -144,7 +144,7 @@ @Test public void testWriteSimplePaxHeaders() throws Exception { - final Map m = new HashMap(); + final Map m = new HashMap<>(); m.put("a", "b"); final byte[] data = writePaxHeader(m); assertEquals("00000000006 ", @@ -158,7 +158,7 @@ @Test public void testPaxHeadersWithLength99() throws Exception { - final Map m = new HashMap(); + final Map m = new HashMap<>(); m.put("a", "0123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" @@ -177,7 +177,7 @@ @Test public void testPaxHeadersWithLength101() throws Exception { - final Map m = new HashMap(); + final Map m = new HashMap<>(); m.put("a", "0123456789012345678901234567890123456789" + "01234567890123456789012345678901234567890123456789" diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/TestArchiveStreamProvider.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/TestArchiveStreamProvider.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/TestArchiveStreamProvider.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/TestArchiveStreamProvider.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers; + +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +public class TestArchiveStreamProvider implements ArchiveStreamProvider { + + public static final class ArchiveInvocationConfirmationException extends ArchiveException { + + private static final long serialVersionUID = 1L; + + public ArchiveInvocationConfirmationException(final String message) { + super(message); + } + } + + @Override + public ArchiveInputStream createArchiveInputStream(final String name, final InputStream in, final String encoding) throws ArchiveException { + throw new ArchiveInvocationConfirmationException(name); + } + + @Override + public ArchiveOutputStream createArchiveOutputStream(final String name, final OutputStream out, final String encoding) throws ArchiveException { + throw new ArchiveInvocationConfirmationException(name); + } + + @Override + public Set getInputStreamArchiveNames() { + final HashSet set = new HashSet<>(); + Collections.addAll(set, "ArchiveTestInput1"); + return set; + } + + @Override + public Set getOutputStreamArchiveNames() { + final HashSet set = new HashSet<>(); + Collections.addAll(set, "ArchiveTestOutput1"); + return set; + } + +} diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java 2016-12-25 11:57:03.000000000 +0000 @@ -54,12 +54,10 @@ usage(); } if (cl.useStream) { - final BufferedInputStream fs = - new BufferedInputStream(new FileInputStream(f)); - try { + try (BufferedInputStream fs = new BufferedInputStream(new FileInputStream(f))) { final ZipArchiveInputStream zs = - new ZipArchiveInputStream(fs, cl.encoding, true, - cl.allowStoredEntriesWithDataDescriptor); + new ZipArchiveInputStream(fs, cl.encoding, true, + cl.allowStoredEntriesWithDataDescriptor); for (ArchiveEntry entry = zs.getNextEntry(); entry != null; entry = zs.getNextEntry()) { @@ -69,27 +67,19 @@ extract(cl.dir, ze, zs); } } - } finally { - fs.close(); } } else { - final ZipFile zf = new ZipFile(f, cl.encoding); - try { + try (ZipFile zf = new ZipFile(f, cl.encoding)) { for (final Enumeration entries = zf.getEntries(); entries.hasMoreElements(); ) { final ZipArchiveEntry ze = entries.nextElement(); list(ze); if (cl.dir != null) { - final InputStream is = zf.getInputStream(ze); - try { + try (InputStream is = zf.getInputStream(ze)) { extract(cl.dir, ze, is); - } finally { - is.close(); } } } - } finally { - zf.close(); } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -109,31 +109,55 @@ } private Map writeEntries(final ParallelScatterZipCreator zipCreator) { - final Map entries = new HashMap(); + final Map entries = new HashMap<>(); for (int i = 0; i < NUMITEMS; i++){ final byte[] payloadBytes = ("content" + i).getBytes(); final ZipArchiveEntry za = createZipArchiveEntry(entries, i, payloadBytes); - zipCreator.addArchiveEntry(za, new InputStreamSupplier() { + final InputStreamSupplier iss = new InputStreamSupplier() { @Override public InputStream get() { return new ByteArrayInputStream(payloadBytes); } - }); + }; + if (i % 2 == 0) { + zipCreator.addArchiveEntry(za, iss); + } else { + final ZipArchiveEntryRequestSupplier zaSupplier = new ZipArchiveEntryRequestSupplier() { + @Override + public ZipArchiveEntryRequest get() { + return ZipArchiveEntryRequest.createZipArchiveEntryRequest(za, iss); + } + }; + zipCreator.addArchiveEntry(zaSupplier); + } } return entries; } private Map writeEntriesAsCallable(final ParallelScatterZipCreator zipCreator) { - final Map entries = new HashMap(); + final Map entries = new HashMap<>(); for (int i = 0; i < NUMITEMS; i++){ final byte[] payloadBytes = ("content" + i).getBytes(); final ZipArchiveEntry za = createZipArchiveEntry(entries, i, payloadBytes); - final Callable callable = zipCreator.createCallable(za, new InputStreamSupplier() { + final InputStreamSupplier iss = new InputStreamSupplier() { @Override public InputStream get() { return new ByteArrayInputStream(payloadBytes); } - }); + }; + final Callable callable; + if (i % 2 == 0) { + callable = zipCreator.createCallable(za, iss); + } else { + final ZipArchiveEntryRequestSupplier zaSupplier = new ZipArchiveEntryRequestSupplier() { + @Override + public ZipArchiveEntryRequest get() { + return ZipArchiveEntryRequest.createZipArchiveEntryRequest(za, iss); + } + }; + callable = zipCreator.createCallable(zaSupplier); + } + zipCreator.submit(callable); } return entries; diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java 2016-12-25 11:57:03.000000000 +0000 @@ -211,15 +211,14 @@ zos.setUseZip64(mode); } write100KFilesToStream(zos); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { final long end = a.length(); // validate "end of central directory" is at // the end of the file and contains the magic // value 0xFFFF as "number of entries". a.seek(end - - 22 /* length of EOCD without file comment */); + - 22 /* length of EOCD without file comment */); final byte[] eocd = new byte[12]; a.readFully(eocd); assertArrayEquals(new byte[] { @@ -230,17 +229,17 @@ // entries (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, - }, eocd); + }, eocd); // validate "Zip64 end of central directory // locator" is right in front of the EOCD and // the location of the "Zip64 end of central // directory record" seems correct final long expectedZ64EocdOffset = end - 22 /* eocd.length */ - - 20 /* z64 eocd locator.length */ - - 56 /* z64 eocd without extensible data sector */; + - 20 /* z64 eocd locator.length */ + - 56 /* z64 eocd without extensible data sector */; final byte[] loc = - ZipEightByteInteger.getBytes(expectedZ64EocdOffset); + ZipEightByteInteger.getBytes(expectedZ64EocdOffset); a.seek(end - 22 - 20); final byte[] z64EocdLoc = new byte[20]; a.readFully(z64EocdLoc); @@ -254,7 +253,7 @@ loc[4], loc[5], loc[6], loc[7], // total number of disks 1, 0, 0, 0, - }, z64EocdLoc); + }, z64EocdLoc); // validate "Zip64 end of central directory // record" is where it is supposed to be, the @@ -281,7 +280,7 @@ 0, 0, 0, 0, (byte) 0xA0, (byte) 0x86, 1, 0, 0, 0, 0, 0, - }, z64EocdStart); + }, z64EocdStart); a.seek(expectedZ64EocdOffset + 48 /* skip size */); final byte[] cdOffset = new byte[8]; a.readFully(cdOffset); @@ -294,9 +293,7 @@ a.readFully(sig); assertArrayEquals(new byte[] { (byte) 0x50, (byte) 0x4b, 1, 2, - }, sig); - } finally { - a.close(); + }, sig); } } }; @@ -383,16 +380,15 @@ } write3EntriesCreatingBigArchiveToStream(zos); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); // skip first two entries a.skipBytes(2 * 47 /* CD entry of file with file name length 1 and no extra data */ - + 2 * (mode == Zip64Mode.Always ? 4 : 0) + + 2 * (mode == Zip64Mode.Always ? 4 : 0) /* empty ZIP64 extra fields if mode is Always */ - ); + ); // grab third entry, verify offset is // 0xFFFFFFFF and it has a ZIP64 extended @@ -410,7 +406,7 @@ 0, 8, // method 0, 0 - }, header); + }, header); // ignore timestamp, CRC, compressed size a.skipBytes(12); final byte[] rest = new byte[23]; @@ -434,7 +430,7 @@ (byte) 0xFF, (byte) 0xFF, // file name (byte) '2' - }, rest); + }, rest); final byte[] extra = new byte[4]; a.readFully(extra); assertArrayEquals(new byte[] { @@ -442,7 +438,7 @@ 1, 0, // size 8, 0 - }, extra); + }, extra); // read offset of LFH final byte[] offset = new byte[8]; @@ -453,9 +449,7 @@ a.readFully(sig); assertArrayEquals(new byte[] { (byte) 0x50, (byte) 0x4b, 3, 4, - }, sig); - } finally { - a.close(); + }, sig); } } }; @@ -542,13 +536,9 @@ } else { assertEquals(1, zae.getSize()); - final InputStream i = - zf.getInputStream(zae); - try { + try (InputStream i = zf.getInputStream(zae)) { assertNotNull(i); assertEquals(42, i.read()); - } finally { - i.close(); } } } @@ -596,8 +586,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); // grab first entry, verify sizes are 0xFFFFFFFF @@ -616,7 +605,7 @@ 0, 8, // method 0, 0 - }, header); + }, header); // ignore timestamp a.skipBytes(4); byte[] rest = new byte[31]; @@ -643,7 +632,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); final byte[] extra = new byte[20]; a.readFully(extra); // 5e9 == 0x12A05F200 @@ -658,7 +647,7 @@ // compressed size 0, (byte) 0xF2, 5, (byte) 0x2A, 1, 0, 0, 0, - }, extra); + }, extra); // and now validate local file header a.seek(0); @@ -673,7 +662,7 @@ 0, 8, // method 0, 0 - }, header); + }, header); // ignore timestamp a.skipBytes(4); rest = new byte[17]; @@ -691,7 +680,7 @@ 20, 0, // file name (byte) '0' - }, rest); + }, rest); a.readFully(extra); // 5e9 == 0x12A05F200 assertArrayEquals(new byte[] { @@ -705,9 +694,7 @@ // compressed size 0, (byte) 0xF2, 5, (byte) 0x2A, 1, 0, 0, 0, - }, extra); - } finally { - a.close(); + }, extra); } } }; @@ -836,9 +823,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = - new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); final long cfhPos = a.getFilePointer(); @@ -859,7 +844,7 @@ 8, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); byte[] rest = new byte[31]; @@ -886,7 +871,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); final byte[] extra = new byte[20]; a.readFully(extra); // 5e9 == 0x12A05F200 @@ -901,7 +886,7 @@ // compressed size (byte) 0x68, (byte) 0x27, (byte) 0x4A, 0, 0, 0, 0, 0, - }, extra); + }, extra); // validate data descriptor a.seek(cfhPos - 24); @@ -912,7 +897,7 @@ (byte) 0x50, (byte) 0x4b, 7, 8, // CRC (byte) 0x50, (byte) 0x6F, (byte) 0x31, (byte) 0x5c, - }, dd); + }, dd); dd = new byte[16]; a.readFully(dd); assertArrayEquals(new byte[] { @@ -922,7 +907,7 @@ // original size 0, (byte) 0xF2, 5, (byte) 0x2A, 1, 0, 0, 0, - }, dd); + }, dd); // and now validate local file header a.seek(0); @@ -937,7 +922,7 @@ 8, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); rest = new byte[17]; @@ -955,7 +940,7 @@ 20, 0, // file name (byte) '0' - }, rest); + }, rest); a.readFully(extra); assertArrayEquals(new byte[] { // Header-ID @@ -968,9 +953,7 @@ // compressed size 0, 0, 0, 0, 0, 0, 0, 0, - }, extra); - } finally { - a.close(); + }, extra); } } }; @@ -1081,8 +1064,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); // grab first entry, verify @@ -1102,7 +1084,7 @@ 0, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); byte[] rest = new byte[31]; @@ -1129,7 +1111,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); byte[] extra = new byte[20]; a.readFully(extra); // 5e9 == 0x12A05F200 @@ -1144,7 +1126,7 @@ // compressed size (byte) 0x68, (byte) 0x27, (byte) 0x4A, 0, 0, 0, 0, 0, - }, extra); + }, extra); // and now validate local file header a.seek(0); @@ -1159,7 +1141,7 @@ 0, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); rest = new byte[17]; @@ -1177,7 +1159,7 @@ 20, 0, // file name (byte) '0' - }, rest); + }, rest); extra = new byte[20]; a.readFully(extra); assertArrayEquals(new byte[] { @@ -1191,9 +1173,7 @@ // compressed size (byte) 0x68, (byte) 0x27, (byte) 0x4A, 0, 0, 0, 0, 0, - }, extra); - } finally { - a.close(); + }, extra); } } }; @@ -1340,8 +1320,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); // grab first CF entry, verify sizes are 1e6 and it @@ -1360,7 +1339,7 @@ 0, 8, // method 0, 0 - }, header); + }, header); // ignore timestamp a.skipBytes(4); byte[] rest = new byte[31]; @@ -1388,14 +1367,14 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); // and now validate local file header: this one // has a ZIP64 extra field if and only if size was // unknown and mode was not Never or the mode was // Always (regardless of size) final boolean hasExtra = mode == Zip64Mode.Always - || (mode == Zip64Mode.AsNeeded && !knownSize); + || (mode == Zip64Mode.AsNeeded && !knownSize); a.seek(0); header = new byte[10]; a.readFully(header); @@ -1408,7 +1387,7 @@ 0, 8, // method 0, 0 - }, header); + }, header); // ignore timestamp a.skipBytes(4); rest = new byte[17]; @@ -1427,7 +1406,7 @@ (byte) (!hasExtra ? 0 : 20), 0, // file name (byte) '0' - }, rest); + }, rest); if (hasExtra) { final byte[] extra = new byte[20]; a.readFully(extra); @@ -1442,10 +1421,8 @@ // compressed size (byte) 0x40, (byte) 0x42, (byte) 0x0F, 0, 0, 0, 0, 0, - }, extra); + }, extra); } - } finally { - a.close(); } } }; @@ -1517,8 +1494,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); // grab first CF entry, verify sizes are 1e6 and it @@ -1536,7 +1512,7 @@ 0, 8, // method 0, 0 - }, header); + }, header); // ignore timestamp a.skipBytes(4); byte[] rest = new byte[31]; @@ -1564,7 +1540,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); byte[] extra = new byte[4]; a.readFully(extra); @@ -1573,7 +1549,7 @@ 1, 0, // size of extra 0, 0, - }, extra); + }, extra); // and now validate local file header: this one // has a ZIP64 extra field as the mode was @@ -1590,7 +1566,7 @@ 0, 8, // method 0, 0 - }, header); + }, header); // ignore timestamp a.skipBytes(4); rest = new byte[17]; @@ -1609,7 +1585,7 @@ 20, 0, // file name (byte) '0' - }, rest); + }, rest); extra = new byte[20]; a.readFully(extra); @@ -1624,9 +1600,7 @@ // compressed size (byte) 0x40, (byte) 0x42, (byte) 0x0F, 0, 0, 0, 0, 0, - }, extra); - } finally { - a.close(); + }, extra); } } }; @@ -1681,8 +1655,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); final long cfhPos = a.getFilePointer(); @@ -1702,7 +1675,7 @@ 8, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); final byte[] crc = new byte[4]; @@ -1710,7 +1683,7 @@ assertArrayEquals(new byte[] { (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, crc); + }, crc); // skip compressed size a.skipBytes(4); byte[] rest = new byte[23]; @@ -1734,7 +1707,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); // validate data descriptor a.seek(cfhPos - 16); @@ -1745,7 +1718,7 @@ (byte) 0x50, (byte) 0x4b, 7, 8, // CRC (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, dd); + }, dd); // skip uncompressed size a.skipBytes(4); dd = new byte[4]; @@ -1753,7 +1726,7 @@ assertArrayEquals(new byte[] { // original size (byte) 0x40, (byte) 0x42, (byte) 0x0F, 0, - }, dd); + }, dd); // and now validate local file header a.seek(0); @@ -1768,7 +1741,7 @@ 8, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); rest = new byte[17]; @@ -1786,9 +1759,7 @@ 0, 0, // file name (byte) '0' - }, rest); - } finally { - a.close(); + }, rest); } } }; @@ -1852,8 +1823,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); final long cfhPos = a.getFilePointer(); @@ -1873,7 +1843,7 @@ 8, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); final byte[] crc = new byte[4]; @@ -1881,7 +1851,7 @@ assertArrayEquals(new byte[] { (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, crc); + }, crc); // skip compressed size a.skipBytes(4); byte[] rest = new byte[23]; @@ -1905,7 +1875,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); byte[] extra = new byte[4]; a.readFully(extra); assertArrayEquals(new byte[] { @@ -1913,7 +1883,7 @@ 1, 0, // size of extra 0, 0, - }, extra); + }, extra); // validate data descriptor a.seek(cfhPos - 24); @@ -1924,7 +1894,7 @@ (byte) 0x50, (byte) 0x4b, 7, 8, // CRC (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, dd); + }, dd); // skip compressed size a.skipBytes(8); dd = new byte[8]; @@ -1933,7 +1903,7 @@ // original size (byte) 0x40, (byte) 0x42, (byte) 0x0F, 0, 0, 0, 0, 0 - }, dd); + }, dd); // and now validate local file header a.seek(0); @@ -1948,7 +1918,7 @@ 8, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); rest = new byte[17]; @@ -1966,7 +1936,7 @@ 20, 0, // file name (byte) '0' - }, rest); + }, rest); extra = new byte[20]; a.readFully(extra); @@ -1981,9 +1951,7 @@ // compressed size 0, 0, 0, 0, 0, 0, 0, 0, - }, extra); - } finally { - a.close(); + }, extra); } } }; @@ -2037,8 +2005,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); // grab first CD entry, verify sizes are not @@ -2057,7 +2024,7 @@ 0, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); byte[] crc = new byte[4]; @@ -2065,7 +2032,7 @@ assertArrayEquals(new byte[] { (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, crc); + }, crc); // skip compressed size a.skipBytes(4); byte[] rest = new byte[23]; @@ -2088,7 +2055,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); // and now validate local file header a.seek(0); @@ -2103,7 +2070,7 @@ 0, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); crc = new byte[4]; @@ -2111,14 +2078,14 @@ assertArrayEquals(new byte[] { (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, crc); + }, crc); // skip compressed size a.skipBytes(4); rest = new byte[9]; a.readFully(rest); - final boolean hasExtra = - mode == Zip64Mode.AsNeeded && !knownSize; + final boolean hasExtra = + mode == Zip64Mode.AsNeeded && !knownSize; assertArrayEquals(new byte[] { // Original Size @@ -2129,7 +2096,7 @@ (byte) (!hasExtra ? 0 : 20), 0, // file name (byte) '0' - }, rest); + }, rest); if (hasExtra) { final byte[] extra = new byte[12]; a.readFully(extra); @@ -2145,10 +2112,8 @@ // compressed size, // don't want to // hard-code it - }, extra); + }, extra); } - } finally { - a.close(); } } }; @@ -2210,8 +2175,7 @@ zos.closeArchiveEntry(); zos.close(); - final RandomAccessFile a = new RandomAccessFile(f, "r"); - try { + try (RandomAccessFile a = new RandomAccessFile(f, "r")) { getLengthAndPositionAtCentralDirectory(a); // grab first CD entry, verify sizes are not @@ -2230,14 +2194,14 @@ 0, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); byte[] crc = new byte[4]; a.readFully(crc); assertArrayEquals(new byte[] { (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, crc); + }, crc); // skip compressed size a.skipBytes(4); byte[] rest = new byte[23]; @@ -2260,7 +2224,7 @@ 0, 0, 0, 0, // file name (byte) '0' - }, rest); + }, rest); byte[] extra = new byte[4]; a.readFully(extra); assertArrayEquals(new byte[] { @@ -2268,7 +2232,7 @@ 1, 0, // size of extra 0, 0, - }, extra); + }, extra); // and now validate local file header a.seek(0); @@ -2283,7 +2247,7 @@ 0, 8, // method 8, 0, - }, header); + }, header); // ignore timestamp a.skipBytes(4); crc = new byte[4]; @@ -2291,7 +2255,7 @@ assertArrayEquals(new byte[] { (byte) 0x9E, (byte) 0xCB, (byte) 0x79, (byte) 0x12, - }, crc); + }, crc); rest = new byte[13]; a.readFully(rest); @@ -2306,7 +2270,7 @@ 20, 0, // file name (byte) '0' - }, rest); + }, rest); extra = new byte[12]; a.readFully(extra); @@ -2322,9 +2286,7 @@ // compressed size, // don't want to // hard-code it - }, extra); - } finally { - a.close(); + }, extra); } } }; @@ -2479,8 +2441,7 @@ long read = 0; final Random r = new Random(System.currentTimeMillis()); int readNow; - final InputStream zin = zf.getInputStream(zae); - try { + try (InputStream zin = zf.getInputStream(zae)) { while ((readNow = zin.read(buf, 0, buf.length)) > 0) { // testing all bytes for a value of 0 is going to take // too long, just pick a few ones randomly @@ -2490,8 +2451,6 @@ } read += readNow; } - } finally { - zin.close(); } assertEquals(FIVE_BILLION, read); assertFalse(e.hasMoreElements()); diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -30,6 +30,7 @@ import java.io.IOException; import java.io.InputStream; import java.util.Arrays; +import java.util.zip.ZipException; import org.apache.commons.compress.utils.IOUtils; import org.junit.Test; @@ -161,15 +162,12 @@ */ @Test public void testReadingOfFirstStoredEntry() throws Exception { - final ZipArchiveInputStream in = new ZipArchiveInputStream(new FileInputStream(getFile("COMPRESS-264.zip"))); - - try { + + try (ZipArchiveInputStream in = new ZipArchiveInputStream(new FileInputStream(getFile("COMPRESS-264.zip")))) { final ZipArchiveEntry ze = in.getNextZipEntry(); assertEquals(5, ze.getSize()); - assertArrayEquals(new byte[] {'d', 'a', 't', 'a', '\n'}, - IOUtils.toByteArray(in)); - } finally { - in.close(); + assertArrayEquals(new byte[] { 'd', 'a', 't', 'a', '\n' }, + IOUtils.toByteArray(in)); } } @@ -180,33 +178,81 @@ */ @Test public void testMessageWithCorruptFileName() throws Exception { - final ZipArchiveInputStream in = new ZipArchiveInputStream(new FileInputStream(getFile("COMPRESS-351.zip"))); - try { + try (ZipArchiveInputStream in = new ZipArchiveInputStream(new FileInputStream(getFile("COMPRESS-351.zip")))) { ZipArchiveEntry ze = in.getNextZipEntry(); while (ze != null) { ze = in.getNextZipEntry(); } fail("expected EOFException"); - } catch (EOFException ex) { - String m = ex.getMessage(); + } catch (final EOFException ex) { + final String m = ex.getMessage(); assertTrue(m.startsWith("Truncated ZIP entry: ?2016")); // the first character is not printable - } finally { - in.close(); } } @Test public void testUnzipBZip2CompressedEntry() throws Exception { - final ZipArchiveInputStream in = new ZipArchiveInputStream(new FileInputStream(getFile("bzip2-zip.zip"))); - - try { + + try (ZipArchiveInputStream in = new ZipArchiveInputStream(new FileInputStream(getFile("bzip2-zip.zip")))) { final ZipArchiveEntry ze = in.getNextZipEntry(); assertEquals(42, ze.getSize()); final byte[] expected = new byte[42]; - Arrays.fill(expected , (byte)'a'); + Arrays.fill(expected, (byte) 'a'); assertArrayEquals(expected, IOUtils.toByteArray(in)); + } + } + + /** + * Test case for + * COMPRESS-364. + */ + @Test + public void testWithBytesAfterData() throws Exception { + final int expectedNumEntries = 2; + final InputStream is = ZipArchiveInputStreamTest.class + .getResourceAsStream("/archive_with_bytes_after_data.zip"); + final ZipArchiveInputStream zip = new ZipArchiveInputStream(is); + + try { + int actualNumEntries = 0; + ZipArchiveEntry zae = zip.getNextZipEntry(); + while (zae != null) { + actualNumEntries++; + readEntry(zip, zae); + zae = zip.getNextZipEntry(); + } + assertEquals(expectedNumEntries, actualNumEntries); } finally { - in.close(); + zip.close(); } } + + /** + * getNextZipEntry() should throw a ZipException rather than return + * null when an unexpected structure is encountered. + */ + @Test + public void testThrowOnInvalidEntry() throws Exception { + final InputStream is = ZipArchiveInputStreamTest.class + .getResourceAsStream("/invalid-zip.zip"); + final ZipArchiveInputStream zip = new ZipArchiveInputStream(is); + + try { + zip.getNextZipEntry(); + fail("IOException expected"); + } catch (ZipException expected) { + assertTrue(expected.getMessage().contains("Unexpected record signature")); + } finally { + zip.close(); + } + } + + private static byte[] readEntry(ZipArchiveInputStream zip, ZipArchiveEntry zae) throws IOException { + final int len = (int)zae.getSize(); + final byte[] buff = new byte[len]; + zip.read(buff, 0, len); + + return buff; + } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -34,6 +34,7 @@ import java.util.zip.ZipEntry; import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.compress.utils.SeekableInMemoryByteChannel; import org.junit.After; import org.junit.Test; @@ -75,6 +76,40 @@ } @Test + public void testCDOrderInMemory() throws Exception { + byte[] data = null; + try (FileInputStream fis = new FileInputStream(getFile("ordertest.zip"))) { + data = IOUtils.toByteArray(fis); + } + + zf = new ZipFile(new SeekableInMemoryByteChannel(data), ZipEncodingHelper.UTF8); + final ArrayList l = Collections.list(zf.getEntries()); + assertEntryName(l, 0, "AbstractUnicodeExtraField"); + assertEntryName(l, 1, "AsiExtraField"); + assertEntryName(l, 2, "ExtraFieldUtils"); + assertEntryName(l, 3, "FallbackZipEncoding"); + assertEntryName(l, 4, "GeneralPurposeBit"); + assertEntryName(l, 5, "JarMarker"); + assertEntryName(l, 6, "NioZipEncoding"); + assertEntryName(l, 7, "Simple8BitZipEncoding"); + assertEntryName(l, 8, "UnicodeCommentExtraField"); + assertEntryName(l, 9, "UnicodePathExtraField"); + assertEntryName(l, 10, "UnixStat"); + assertEntryName(l, 11, "UnparseableExtraFieldData"); + assertEntryName(l, 12, "UnrecognizedExtraField"); + assertEntryName(l, 13, "ZipArchiveEntry"); + assertEntryName(l, 14, "ZipArchiveInputStream"); + assertEntryName(l, 15, "ZipArchiveOutputStream"); + assertEntryName(l, 16, "ZipEncoding"); + assertEntryName(l, 17, "ZipEncodingHelper"); + assertEntryName(l, 18, "ZipExtraField"); + assertEntryName(l, 19, "ZipUtil"); + assertEntryName(l, 20, "ZipLong"); + assertEntryName(l, 21, "ZipShort"); + assertEntryName(l, 22, "ZipFile"); + } + + @Test public void testPhysicalOrder() throws Exception { readOrderTest(); final ArrayList l = Collections.list(zf.getEntriesInPhysicalOrder()); @@ -179,7 +214,7 @@ @Test public void testUnixSymlinkSampleFile() throws Exception { final String entryPrefix = "COMPRESS-214_unix_symlinks/"; - final TreeMap expectedVals = new TreeMap(); + final TreeMap expectedVals = new TreeMap<>(); // I threw in some Japanese characters to keep things interesting. expectedVals.put(entryPrefix + "link1", "../COMPRESS-214_unix_symlinks/./a/b/c/../../../\uF999"); diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -31,6 +31,7 @@ import java.util.Enumeration; import java.util.List; import java.util.zip.ZipEntry; +import java.util.zip.ZipException; import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.archivers.zip.Zip64Mode; @@ -41,6 +42,7 @@ import org.apache.commons.compress.archivers.zip.ZipFile; import org.apache.commons.compress.archivers.zip.ZipMethod; import org.apache.commons.compress.utils.IOUtils; +import org.apache.commons.compress.utils.SeekableInMemoryByteChannel; import org.junit.Assert; import org.junit.Test; @@ -77,7 +79,7 @@ out.close(); // Unarchive the same - final List results = new ArrayList(); + final List results = new ArrayList<>(); final InputStream is = new FileInputStream(output); ArchiveInputStream in = null; @@ -89,11 +91,8 @@ while((entry = (ZipArchiveEntry)in.getNextEntry()) != null) { final File outfile = new File(resultDir.getCanonicalPath() + "/result/" + entry.getName()); outfile.getParentFile().mkdirs(); - final OutputStream o = new FileOutputStream(outfile); - try { + try (OutputStream o = new FileOutputStream(outfile)) { IOUtils.copy(in, o); - } finally { - o.close(); } results.add(outfile); } @@ -112,6 +111,49 @@ } /** + * Archives 2 files and unarchives it again. If the file contents of result + * and source is the same, it looks like the operations have worked + * @throws Exception + */ + @Test + public void testZipArchiveCreationInMemory() throws Exception { + final File file1 = getFile("test1.xml"); + final File file2 = getFile("test2.xml"); + final byte[] file1Contents = new byte[(int) file1.length()]; + final byte[] file2Contents = new byte[(int) file2.length()]; + IOUtils.readFully(new FileInputStream(file1), file1Contents); + IOUtils.readFully(new FileInputStream(file2), file2Contents); + + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(); + try (ZipArchiveOutputStream os = new ZipArchiveOutputStream(c)) { + os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml")); + os.write(file1Contents); + os.closeArchiveEntry(); + + os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml")); + os.write(file2Contents); + os.closeArchiveEntry(); + } + + // Unarchive the same + final List results = new ArrayList<>(); + + try (ArchiveInputStream in = new ArchiveStreamFactory() + .createArchiveInputStream("zip", new ByteArrayInputStream(c.array()))) { + + ZipArchiveEntry entry; + while((entry = (ZipArchiveEntry)in.getNextEntry()) != null) { + byte[] result = new byte[(int) entry.getSize()]; + IOUtils.readFully(in, result); + results.add(result); + } + } + + assertArrayEquals(results.get(0), file1Contents); + assertArrayEquals(results.get(1), file2Contents); + } + + /** * Simple unarchive test. Asserts nothing. * @throws Exception */ @@ -135,14 +177,11 @@ @Test public void testSkipsPK00Prefix() throws Exception { final File input = getFile("COMPRESS-208.zip"); - final InputStream is = new FileInputStream(input); - final ArrayList al = new ArrayList(); + final ArrayList al = new ArrayList<>(); al.add("test1.xml"); al.add("test2.xml"); - try { + try (InputStream is = new FileInputStream(input)) { checkArchiveContent(new ZipArchiveInputStream(is), al); - } finally { - is.close(); } } @@ -177,9 +216,7 @@ @Test public void testSkipEntryWithUnsupportedCompressionMethod() throws IOException { - final ZipArchiveInputStream zip = - new ZipArchiveInputStream(new FileInputStream(getFile("moby.zip"))); - try { + try (ZipArchiveInputStream zip = new ZipArchiveInputStream(new FileInputStream(getFile("moby.zip")))) { final ZipArchiveEntry entry = zip.getNextZipEntry(); assertEquals("method", ZipMethod.TOKENIZATION.getCode(), entry.getMethod()); assertEquals("README", entry.getName()); @@ -190,8 +227,6 @@ e.printStackTrace(); fail("COMPRESS-93: Unable to skip an unsupported zip entry"); } - } finally { - zip.close(); } } @@ -208,7 +243,8 @@ public void testListAllFilesWithNestedArchive() throws Exception { final File input = getFile("OSX_ArchiveWithNestedArchive.zip"); - final List results = new ArrayList(); + final List results = new ArrayList<>(); + final List expectedExceptions = new ArrayList<>(); final InputStream is = new FileInputStream(input); ArchiveInputStream in = null; @@ -216,15 +252,20 @@ in = new ArchiveStreamFactory().createArchiveInputStream("zip", is); ZipArchiveEntry entry = null; - while((entry = (ZipArchiveEntry)in.getNextEntry()) != null) { + while ((entry = (ZipArchiveEntry) in.getNextEntry()) != null) { results.add(entry.getName()); final ArchiveInputStream nestedIn = new ArchiveStreamFactory().createArchiveInputStream("zip", in); - ZipArchiveEntry nestedEntry = null; - while((nestedEntry = (ZipArchiveEntry)nestedIn.getNextEntry()) != null) { - results.add(nestedEntry.getName()); + try { + ZipArchiveEntry nestedEntry = null; + while ((nestedEntry = (ZipArchiveEntry) nestedIn.getNextEntry()) != null) { + results.add(nestedEntry.getName()); + } + } catch (ZipException ex) { + // expected since you cannot create a final ArchiveInputStream from test3.xml + expectedExceptions.add(ex); } - // nested stream must not be closed here + // nested stream must not be closed here } } finally { if (in != null) { @@ -233,10 +274,11 @@ } is.close(); - results.contains("NestedArchiv.zip"); - results.contains("test1.xml"); - results.contains("test2.xml"); - results.contains("test3.xml"); + assertTrue(results.contains("NestedArchiv.zip")); + assertTrue(results.contains("test1.xml")); + assertTrue(results.contains("test2.xml")); + assertTrue(results.contains("test3.xml")); + assertEquals(1, expectedExceptions.size()); } @Test diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/ArchiveUtilsTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -69,12 +69,12 @@ @Test public void sanitizeShortensString() { - String input = "012345678901234567890123456789012345678901234567890123456789" + final String input = "012345678901234567890123456789012345678901234567890123456789" + "012345678901234567890123456789012345678901234567890123456789" + "012345678901234567890123456789012345678901234567890123456789" + "012345678901234567890123456789012345678901234567890123456789" + "012345678901234567890123456789012345678901234567890123456789"; - String expected = "012345678901234567890123456789012345678901234567890123456789" + final String expected = "012345678901234567890123456789012345678901234567890123456789" + "012345678901234567890123456789012345678901234567890123456789" + "012345678901234567890123456789012345678901234567890123456789" + "012345678901234567890123456789012345678901234567890123456789" @@ -84,14 +84,14 @@ @Test public void sanitizeLeavesShortStringsAlone() { - String input = "012345678901234567890123456789012345678901234567890123456789"; + final String input = "012345678901234567890123456789012345678901234567890123456789"; assertEquals(input, ArchiveUtils.sanitize(input)); } @Test public void sanitizeRemovesUnprintableCharacters() { - String input = "\b12345678901234567890123456789012345678901234567890123456789"; - String expected = "?12345678901234567890123456789012345678901234567890123456789"; + final String input = "\b12345678901234567890123456789012345678901234567890123456789"; + final String expected = "?12345678901234567890123456789012345678901234567890123456789"; assertEquals(expected, ArchiveUtils.sanitize(input)); } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -766,7 +766,7 @@ } } - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("test1.xml"); this.checkArchiveContent(temp, expected); @@ -805,7 +805,7 @@ ais.close(); } } - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("test1.xml"); this.checkArchiveContent(temp, expected); } @@ -846,7 +846,7 @@ ais.close(); } } - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("test1.xml"); this.checkArchiveContent(temp, expected); } @@ -893,7 +893,7 @@ ais.close(); } } - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("test1.xml"); expected.add("testdata/test.txt"); final ArchiveInputStream in = factory.createArchiveInputStream("tar", new FileInputStream(temp)); @@ -940,7 +940,7 @@ ais.close(); } } - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("test1.xml"); expected.add("testdata/test.txt"); this.checkArchiveContent(temp, expected); @@ -980,7 +980,7 @@ } } - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("test1.xml"); this.checkArchiveContent(temp, expected); } @@ -1025,7 +1025,7 @@ ais.close(); } } - final List expected = new ArrayList(); + final List expected = new ArrayList<>(); expected.add("test1.xml"); expected.add("test.txt"); this.checkArchiveContent(temp, expected); diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -31,11 +31,8 @@ @Test(expected = IOException.class) public void shouldThrowAnIOExceptionWhenAppliedToAZipFile() throws Exception { - final FileInputStream in = new FileInputStream(getFile("bla.zip")); - try { + try (FileInputStream in = new FileInputStream(getFile("bla.zip"))) { new BZip2CompressorInputStream(in); - } finally { - in.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -77,59 +77,39 @@ @Test public void testConcatenatedStreamsReadFirstOnly() throws Exception { final File input = getFile("multiple.bz2"); - final InputStream is = new FileInputStream(input); - try { - final CompressorInputStream in = new CompressorStreamFactory() - .createCompressorInputStream("bzip2", is); - try { + try (InputStream is = new FileInputStream(input)) { + try (CompressorInputStream in = new CompressorStreamFactory() + .createCompressorInputStream("bzip2", is)) { assertEquals('a', in.read()); assertEquals(-1, in.read()); - } finally { - in.close(); } - } finally { - is.close(); } } @Test public void testConcatenatedStreamsReadFully() throws Exception { final File input = getFile("multiple.bz2"); - final InputStream is = new FileInputStream(input); - try { - final CompressorInputStream in = - new BZip2CompressorInputStream(is, true); - try { + try (InputStream is = new FileInputStream(input)) { + try (CompressorInputStream in = new BZip2CompressorInputStream(is, true)) { assertEquals('a', in.read()); assertEquals('b', in.read()); assertEquals(0, in.available()); assertEquals(-1, in.read()); - } finally { - in.close(); } - } finally { - is.close(); } } @Test public void testCOMPRESS131() throws Exception { final File input = getFile("COMPRESS-131.bz2"); - final InputStream is = new FileInputStream(input); - try { - final CompressorInputStream in = - new BZip2CompressorInputStream(is, true); - try { + try (InputStream is = new FileInputStream(input)) { + try (CompressorInputStream in = new BZip2CompressorInputStream(is, true)) { int l = 0; - while(in.read() != -1) { + while (in.read() != -1) { l++; } assertEquals(539, l); - } finally { - in.close(); } - } finally { - is.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/CompressorServiceLoaderTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/CompressorServiceLoaderTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/CompressorServiceLoaderTest.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/CompressorServiceLoaderTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; + +import org.apache.commons.compress.compressors.TestCompressorStreamProvider.InvocationConfirmationException; +import org.junit.Test; + +public class CompressorServiceLoaderTest { + + @Test(expected = InvocationConfirmationException.class) + public void testInputStream() throws CompressorException { + new CompressorStreamFactory().createCompressorInputStream("TestInput1", new ByteArrayInputStream(new byte[] {})); + } + + @Test(expected = InvocationConfirmationException.class) + public void testOutputStream() throws CompressorException { + new CompressorStreamFactory().createCompressorOutputStream("TestOutput1", new ByteArrayOutputStream()); + } + +} diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/CompressorStreamFactoryRoundtripTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/CompressorStreamFactoryRoundtripTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/CompressorStreamFactoryRoundtripTest.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/CompressorStreamFactoryRoundtripTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; + +import org.apache.commons.compress.utils.IOUtils; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class CompressorStreamFactoryRoundtripTest { + + @Parameters(name = "{0}") + public static String[] data() { + return new String[] { // + CompressorStreamFactory.BZIP2, // + CompressorStreamFactory.DEFLATE, // + CompressorStreamFactory.GZIP, // + // CompressorStreamFactory.LZMA, // Not implemented yet + // CompressorStreamFactory.PACK200, // Bug + // CompressorStreamFactory.SNAPPY_FRAMED, // Not implemented yet + // CompressorStreamFactory.SNAPPY_RAW, // Not implemented yet + CompressorStreamFactory.XZ, // + // CompressorStreamFactory.Z, // Not implemented yet + }; + } + + private final String compressorName; + + public CompressorStreamFactoryRoundtripTest(final String compressorName) { + this.compressorName = compressorName; + } + + @Test + public void testCompressorStreamFactoryRoundtrip() throws Exception { + final CompressorStreamProvider factory = new CompressorStreamFactory(); + final ByteArrayOutputStream compressedOs = new ByteArrayOutputStream(); + final CompressorOutputStream compressorOutputStream = factory.createCompressorOutputStream(compressorName, + compressedOs); + final String fixture = "The quick brown fox jumps over the lazy dog"; + compressorOutputStream.write(fixture.getBytes("UTF-8")); + compressorOutputStream.flush(); + compressorOutputStream.close(); + final ByteArrayInputStream is = new ByteArrayInputStream(compressedOs.toByteArray()); + final CompressorInputStream compressorInputStream = factory.createCompressorInputStream(compressorName, is, false); + final ByteArrayOutputStream decompressedOs = new ByteArrayOutputStream(); + IOUtils.copy(compressorInputStream, decompressedOs); + compressorInputStream.close(); + decompressedOs.flush(); + decompressedOs.close(); + Assert.assertEquals(fixture, decompressedOs.toString("UTF-8")); + } + +} diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -33,58 +33,46 @@ @Test public void availableShouldReturnNonZero() throws IOException { final File input = AbstractTestCase.getFile("bla.tar.deflatez"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final DeflateCompressorInputStream in = - new DeflateCompressorInputStream(is); + new DeflateCompressorInputStream(is); Assert.assertTrue(in.available() > 0); in.close(); - } finally { - is.close(); } } @Test public void shouldBeAbleToSkipAByte() throws IOException { final File input = AbstractTestCase.getFile("bla.tar.deflatez"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final DeflateCompressorInputStream in = - new DeflateCompressorInputStream(is); + new DeflateCompressorInputStream(is); Assert.assertEquals(1, in.skip(1)); in.close(); - } finally { - is.close(); } } @Test public void singleByteReadWorksAsExpected() throws IOException { final File input = AbstractTestCase.getFile("bla.tar.deflatez"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final DeflateCompressorInputStream in = - new DeflateCompressorInputStream(is); + new DeflateCompressorInputStream(is); // tar header starts with filename "test1.xml" Assert.assertEquals('t', in.read()); in.close(); - } finally { - is.close(); } } @Test public void singleByteReadReturnsMinusOneAtEof() throws IOException { final File input = AbstractTestCase.getFile("bla.tar.deflatez"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final DeflateCompressorInputStream in = - new DeflateCompressorInputStream(is); + new DeflateCompressorInputStream(is); IOUtils.toByteArray(in); Assert.assertEquals(-1, in.read()); in.close(); - } finally { - is.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/deflate/DeflateCompressorOutputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -28,13 +28,13 @@ @Test public void canReadASingleByteFlushAndFinish() throws IOException { - final ByteArrayOutputStream bos = new ByteArrayOutputStream(); - final DeflateCompressorOutputStream cos = new DeflateCompressorOutputStream(bos); - cos.write(99); - cos.flush(); - cos.finish(); - Assert.assertTrue(bos.toByteArray().length > 0); - cos.close(); + try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(); + final DeflateCompressorOutputStream cos = new DeflateCompressorOutputStream(bos)) { + cos.write(99); + cos.flush(); + cos.finish(); + Assert.assertTrue(bos.toByteArray().length > 0); + } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -42,17 +42,11 @@ public void testDeflateCreation() throws Exception { final File input = getFile("test1.xml"); final File output = new File(dir, "test1.xml.deflatez"); - final OutputStream out = new FileOutputStream(output); - try { - final CompressorOutputStream cos = new CompressorStreamFactory() - .createCompressorOutputStream("deflate", out); // zlib header is used by default - try { + try (OutputStream out = new FileOutputStream(output)) { + try (CompressorOutputStream cos = new CompressorStreamFactory() + .createCompressorOutputStream("deflate", out)) { IOUtils.copy(new FileInputStream(input), cos); - } finally { - cos.close(); } - } finally { - out.close(); } } @@ -65,18 +59,12 @@ public void testRawDeflateCreation() throws Exception { final File input = getFile("test1.xml"); final File output = new File(dir, "test1.xml.deflate"); - final OutputStream out = new FileOutputStream(output); - try { + try (OutputStream out = new FileOutputStream(output)) { final DeflateParameters params = new DeflateParameters(); params.setWithZlibHeader(false); - final CompressorOutputStream cos = new DeflateCompressorOutputStream(out, params); - try { + try (CompressorOutputStream cos = new DeflateCompressorOutputStream(out, params)) { IOUtils.copy(new FileInputStream(input), cos); - } finally { - cos.close(); } - } finally { - out.close(); } } @@ -89,10 +77,9 @@ public void testDeflateUnarchive() throws Exception { final File input = getFile("bla.tar.deflatez"); final File output = new File(dir, "bla.tar"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final CompressorInputStream in = new CompressorStreamFactory() - .createCompressorInputStream("deflate", is); // zlib header is expected by default + .createCompressorInputStream("deflate", is); // zlib header is expected by default FileOutputStream out = null; try { out = new FileOutputStream(output); @@ -103,8 +90,6 @@ } in.close(); } - } finally { - is.close(); } } @@ -117,8 +102,7 @@ public void testRawDeflateUnarchive() throws Exception { final File input = getFile("bla.tar.deflate"); final File output = new File(dir, "bla.tar"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final DeflateParameters params = new DeflateParameters(); params.setWithZlibHeader(false); final CompressorInputStream in = new DeflateCompressorInputStream(is, params); @@ -132,8 +116,6 @@ } in.close(); } - } finally { - is.close(); } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -70,8 +70,7 @@ private void testUnarchive(final StreamWrapper wrapper) throws Exception { final File input = getFile("bla.tar.sz"); final File output = new File(dir, "bla.tar"); - final FileInputStream is = new FileInputStream(input); - try { + try (FileInputStream is = new FileInputStream(input)) { // the intermediate BufferedInputStream is there for mark // support in the autodetection test final CompressorInputStream in = wrapper.wrap(new BufferedInputStream(is)); @@ -86,21 +85,13 @@ } in.close(); } - } finally { - is.close(); } final File original = getFile("bla.tar"); - final FileInputStream written = new FileInputStream(output); - try { - final FileInputStream orig = new FileInputStream(original); - try { + try (FileInputStream written = new FileInputStream(output)) { + try (FileInputStream orig = new FileInputStream(original)) { assertArrayEquals(IOUtils.toByteArray(written), - IOUtils.toByteArray(orig)); - } finally { - orig.close(); + IOUtils.toByteArray(orig)); } - } finally { - written.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -45,17 +45,11 @@ public void testGzipCreation() throws Exception { final File input = getFile("test1.xml"); final File output = new File(dir, "test1.xml.gz"); - final OutputStream out = new FileOutputStream(output); - try { - final CompressorOutputStream cos = new CompressorStreamFactory() - .createCompressorOutputStream("gz", out); - try { + try (OutputStream out = new FileOutputStream(output)) { + try (CompressorOutputStream cos = new CompressorStreamFactory() + .createCompressorOutputStream("gz", out)) { IOUtils.copy(new FileInputStream(input), cos); - } finally { - cos.close(); } - } finally { - out.close(); } } @@ -63,10 +57,9 @@ public void testGzipUnarchive() throws Exception { final File input = getFile("bla.tgz"); final File output = new File(dir, "bla.tar"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final CompressorInputStream in = new CompressorStreamFactory() - .createCompressorInputStream("gz", is); + .createCompressorInputStream("gz", is); FileOutputStream out = null; try { out = new FileOutputStream(output); @@ -77,46 +70,31 @@ } in.close(); } - } finally { - is.close(); } } @Test public void testConcatenatedStreamsReadFirstOnly() throws Exception { final File input = getFile("multiple.gz"); - final InputStream is = new FileInputStream(input); - try { - final CompressorInputStream in = new CompressorStreamFactory() - .createCompressorInputStream("gz", is); - try { + try (InputStream is = new FileInputStream(input)) { + try (CompressorInputStream in = new CompressorStreamFactory() + .createCompressorInputStream("gz", is)) { assertEquals('a', in.read()); assertEquals(-1, in.read()); - } finally { - in.close(); } - } finally { - is.close(); } } @Test public void testConcatenatedStreamsReadFully() throws Exception { final File input = getFile("multiple.gz"); - final InputStream is = new FileInputStream(input); - try { - final CompressorInputStream in = - new GzipCompressorInputStream(is, true); - try { + try (InputStream is = new FileInputStream(input)) { + try (CompressorInputStream in = new GzipCompressorInputStream(is, true)) { assertEquals('a', in.read()); assertEquals('b', in.read()); assertEquals(0, in.available()); assertEquals(-1, in.read()); - } finally { - in.close(); } - } finally { - is.close(); } } @@ -163,12 +141,9 @@ @Test public void testInteroperabilityWithGzipCompressorInputStream() throws Exception { - final FileInputStream fis = new FileInputStream(getFile("test3.xml")); byte[] content; - try { + try (FileInputStream fis = new FileInputStream(getFile("test3.xml"))) { content = IOUtils.toByteArray(fis); - } finally { - fis.close(); } final ByteArrayOutputStream bout = new ByteArrayOutputStream(); @@ -192,12 +167,9 @@ @Test public void testInteroperabilityWithGZIPInputStream() throws Exception { - final FileInputStream fis = new FileInputStream(getFile("test3.xml")); byte[] content; - try { + try (FileInputStream fis = new FileInputStream(getFile("test3.xml"))) { content = IOUtils.toByteArray(fis); - } finally { - fis.close(); } final ByteArrayOutputStream bout = new ByteArrayOutputStream(); @@ -238,12 +210,9 @@ } private void testExtraFlags(final int compressionLevel, final int flag) throws Exception { - final FileInputStream fis = new FileInputStream(getFile("test3.xml")); byte[] content; - try { + try (FileInputStream fis = new FileInputStream(getFile("test3.xml"))) { content = IOUtils.toByteArray(fis); - } finally { - fis.close(); } final ByteArrayOutputStream bout = new ByteArrayOutputStream(); @@ -295,13 +264,9 @@ parameters.setOperatingSystem(13); parameters.setFilename("test3.xml"); parameters.setComment("Umlaute möglich?"); - final GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout, parameters); - final FileInputStream fis = new FileInputStream(getFile("test3.xml")); - try { + try (GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout, parameters); FileInputStream fis = new FileInputStream(getFile("test3" + + ".xml"))) { IOUtils.copy(fis, out); - } finally { - fis.close(); - out.close(); } final GzipCompressorInputStream input = diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -24,24 +24,45 @@ import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import org.apache.commons.compress.AbstractTestCase; import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream; import org.apache.commons.compress.utils.IOUtils; +import org.junit.Assert; import org.junit.Test; public final class LZMATestCase extends AbstractTestCase { @Test + public void lzmaRoundtrip() throws Exception { + final File input = getFile("test1.xml"); + final File compressed = new File(dir, "test1.xml.xz"); + try (OutputStream out = new FileOutputStream(compressed)) { + try (CompressorOutputStream cos = new CompressorStreamFactory() + .createCompressorOutputStream("lzma", out)) { + IOUtils.copy(new FileInputStream(input), cos); + } + } + byte[] orig; + try (InputStream is = new FileInputStream(input)) { + orig = IOUtils.toByteArray(is); + } + byte[] uncompressed; + try (InputStream is = new FileInputStream(compressed); + CompressorInputStream in = new LZMACompressorInputStream(is)) { + uncompressed = IOUtils.toByteArray(in); + } + Assert.assertArrayEquals(orig, uncompressed); + } + + @Test public void testLZMAUnarchive() throws Exception { final File input = getFile("bla.tar.lzma"); final File output = new File(dir, "bla.tar"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final CompressorInputStream in = new LZMACompressorInputStream(is); copy(in, output); - } finally { - is.close(); } } @@ -49,13 +70,10 @@ public void testLZMAUnarchiveWithAutodetection() throws Exception { final File input = getFile("bla.tar.lzma"); final File output = new File(dir, "bla.tar"); - final InputStream is = new BufferedInputStream(new FileInputStream(input)); - try { + try (InputStream is = new BufferedInputStream(new FileInputStream(input))) { final CompressorInputStream in = new CompressorStreamFactory() - .createCompressorInputStream(is); + .createCompressorInputStream(is); copy(in, output); - } finally { - is.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -40,10 +40,9 @@ try { Pack200Utils.normalize(input, output[1], new HashMap()); - final FileInputStream is = new FileInputStream(output[1]); - try { + try (FileInputStream is = new FileInputStream(output[1])) { final ArchiveInputStream in = new ArchiveStreamFactory() - .createArchiveInputStream("jar", is); + .createArchiveInputStream("jar", is); ArchiveEntry entry = in.getNextEntry(); while (entry != null) { @@ -61,8 +60,6 @@ } in.close(); - } finally { - is.close(); } } finally { output[1].delete(); diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -67,13 +67,12 @@ private void jarUnarchiveAll(final boolean useFile, final Pack200Strategy mode) throws Exception { final File input = getFile("bla.pack"); - final InputStream is = useFile - ? new Pack200CompressorInputStream(input, mode) - : new Pack200CompressorInputStream(new FileInputStream(input), - mode); - try { + try (InputStream is = useFile + ? new Pack200CompressorInputStream(input, mode) + : new Pack200CompressorInputStream(new FileInputStream(input), + mode)) { final ArchiveInputStream in = new ArchiveStreamFactory() - .createArchiveInputStream("jar", is); + .createArchiveInputStream("jar", is); ArchiveEntry entry = in.getNextEntry(); while (entry != null) { @@ -91,8 +90,6 @@ } in.close(); - } finally { - is.close(); } } @@ -112,12 +109,10 @@ final File file1 = getFile("test1.xml"); final File file2 = getFile("test2.xml"); - final OutputStream out = - new Pack200CompressorOutputStream(new FileOutputStream(output), - mode); - try { + try (OutputStream out = new Pack200CompressorOutputStream(new FileOutputStream(output), + mode)) { final ArchiveOutputStream os = new ArchiveStreamFactory() - .createArchiveOutputStream("jar", out); + .createArchiveOutputStream("jar", out); os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml")); IOUtils.copy(new FileInputStream(file1), os); @@ -128,68 +123,52 @@ os.closeArchiveEntry(); os.close(); - } finally { - out.close(); } - final InputStream is = new Pack200CompressorInputStream(output); - try { + try (InputStream is = new Pack200CompressorInputStream(output)) { final ArchiveInputStream in = new ArchiveStreamFactory() - .createArchiveInputStream("jar", is); - final List files = new ArrayList(); + .createArchiveInputStream("jar", is); + final List files = new ArrayList<>(); files.add("testdata/test1.xml"); files.add("testdata/test2.xml"); checkArchiveContent(in, files); in.close(); - } finally { - is.close(); } } @Test public void testGoodSignature() throws Exception { - final InputStream is = new FileInputStream(getFile("bla.pack")); - try { + try (InputStream is = new FileInputStream(getFile("bla.pack"))) { final byte[] sig = new byte[4]; is.read(sig); assertTrue(Pack200CompressorInputStream.matches(sig, 4)); - } finally { - is.close(); } } @Test public void testBadSignature() throws Exception { - final InputStream is = new FileInputStream(getFile("bla.jar")); - try { + try (InputStream is = new FileInputStream(getFile("bla.jar"))) { final byte[] sig = new byte[4]; is.read(sig); assertFalse(Pack200CompressorInputStream.matches(sig, 4)); - } finally { - is.close(); } } @Test public void testShortSignature() throws Exception { - final InputStream is = new FileInputStream(getFile("bla.pack")); - try { + try (InputStream is = new FileInputStream(getFile("bla.pack"))) { final byte[] sig = new byte[2]; is.read(sig); assertFalse(Pack200CompressorInputStream.matches(sig, 2)); - } finally { - is.close(); } } @Test public void testInputStreamMethods() throws Exception { - final Map m = new HashMap(); + final Map m = new HashMap<>(); m.put("foo", "bar"); - final InputStream is = - new Pack200CompressorInputStream(new FileInputStream(getFile("bla.jar")), - m); - try { + try (InputStream is = new Pack200CompressorInputStream(new FileInputStream(getFile("bla.jar")), + m)) { // packed file is a jar, which is a zip so it starts with // a local file header assertTrue(is.markSupported()); @@ -204,24 +183,19 @@ is.reset(); assertEquals(0x50, is.read()); assertTrue(is.available() > 0); - } finally { - is.close(); } } @Test public void testOutputStreamMethods() throws Exception { final File output = new File(dir, "bla.pack"); - final Map m = new HashMap(); + final Map m = new HashMap<>(); m.put("foo", "bar"); - final OutputStream out = new FileOutputStream(output); - try { + try (OutputStream out = new FileOutputStream(output)) { final OutputStream os = new Pack200CompressorOutputStream(out, m); os.write(1); os.write(new byte[] { 2, 3 }); os.close(); - } finally { - out.close(); } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -41,11 +41,8 @@ assertFalse(FramedSnappyCompressorInputStream.matches(new byte[10], 10)); final byte[] b = new byte[12]; final File input = getFile("bla.tar.sz"); - final FileInputStream in = new FileInputStream(input); - try { + try (FileInputStream in = new FileInputStream(input)) { IOUtils.readFully(in, b); - } finally { - in.close(); } assertFalse(FramedSnappyCompressorInputStream.matches(b, 9)); assertTrue(FramedSnappyCompressorInputStream.matches(b, 10)); @@ -57,10 +54,9 @@ */ @Test public void testLoremIpsum() throws Exception { - final FileInputStream isSz = new FileInputStream(getFile("lorem-ipsum.txt.sz")); final File outputSz = new File(dir, "lorem-ipsum.1"); final File outputGz = new File(dir, "lorem-ipsum.2"); - try { + try (FileInputStream isSz = new FileInputStream(getFile("lorem-ipsum.txt.sz"))) { InputStream in = new FramedSnappyCompressorInputStream(isSz); FileOutputStream out = null; try { @@ -72,8 +68,7 @@ } in.close(); } - final FileInputStream isGz = new FileInputStream(getFile("lorem-ipsum.txt.gz")); - try { + try (FileInputStream isGz = new FileInputStream(getFile("lorem-ipsum.txt.gz"))) { in = new GzipCompressorInputStream(isGz); try { out = new FileOutputStream(outputGz); @@ -84,37 +79,24 @@ } in.close(); } - } finally { - isGz.close(); } - } finally { - isSz.close(); } - final FileInputStream sz = new FileInputStream(outputSz); - try { - final FileInputStream gz = new FileInputStream(outputGz); - try { + try (FileInputStream sz = new FileInputStream(outputSz)) { + try (FileInputStream gz = new FileInputStream(outputGz)) { assertArrayEquals(IOUtils.toByteArray(sz), - IOUtils.toByteArray(gz)); - } finally { - gz.close(); + IOUtils.toByteArray(gz)); } - } finally { - sz.close(); } } @Test public void testRemainingChunkTypes() throws Exception { - final FileInputStream isSz = new FileInputStream(getFile("mixed.txt.sz")); final ByteArrayOutputStream out = new ByteArrayOutputStream(); - try { + try (FileInputStream isSz = new FileInputStream(getFile("mixed.txt.sz"))) { final FramedSnappyCompressorInputStream in = new FramedSnappyCompressorInputStream(isSz); IOUtils.copy(in, out); out.close(); - } finally { - isSz.close(); } assertArrayEquals(new byte[] { '1', '2', '3', '4', @@ -130,8 +112,7 @@ @Test public void testAvailable() throws Exception { - final FileInputStream isSz = new FileInputStream(getFile("mixed.txt.sz")); - try { + try (FileInputStream isSz = new FileInputStream(getFile("mixed.txt.sz"))) { final FramedSnappyCompressorInputStream in = new FramedSnappyCompressorInputStream(isSz); assertEquals(0, in.available()); // no chunk read so far assertEquals('1', in.read()); @@ -143,8 +124,6 @@ assertEquals('5', in.read()); assertEquals(19, in.available()); // remainder of copy in.close(); - } finally { - isSz.close(); } } @@ -173,12 +152,10 @@ @Test public void readIWAFile() throws Exception { - final ZipFile zip = new ZipFile(getFile("testNumbersNew.numbers")); - try { - InputStream is = zip.getInputStream(zip.getEntry("Index/Document.iwa")); - try { + try (ZipFile zip = new ZipFile(getFile("testNumbersNew.numbers"))) { + try (InputStream is = zip.getInputStream(zip.getEntry("Index/Document.iwa"))) { final FramedSnappyCompressorInputStream in = - new FramedSnappyCompressorInputStream(is, FramedSnappyDialect.IWORK_ARCHIVE); + new FramedSnappyCompressorInputStream(is, FramedSnappyDialect.IWORK_ARCHIVE); FileOutputStream out = null; try { out = new FileOutputStream(new File(dir, "snappyIWATest.raw")); @@ -189,11 +166,7 @@ } in.close(); } - } finally { - is.close(); } - } finally { - zip.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/TestCompressorStreamProvider.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/TestCompressorStreamProvider.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/TestCompressorStreamProvider.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/TestCompressorStreamProvider.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors; + +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Set; + +import org.apache.commons.compress.utils.Sets; + +public class TestCompressorStreamProvider implements CompressorStreamProvider { + + public static final class InvocationConfirmationException extends CompressorException { + + private static final long serialVersionUID = 1L; + + public InvocationConfirmationException(final String message) { + super(message); + } + } + + @Override + public CompressorInputStream createCompressorInputStream(final String name, final InputStream in, + final boolean decompressConcatenated) throws CompressorException { + throw new InvocationConfirmationException(name); + } + + @Override + public CompressorOutputStream createCompressorOutputStream(final String name, final OutputStream out) + throws CompressorException { + throw new InvocationConfirmationException(name); + } + + @Override + public Set getInputStreamCompressorNames() { + return Sets.newHashSet("TestInput1"); + } + + @Override + public Set getOutputStreamCompressorNames() { + return Sets.newHashSet("TestOutput1"); + } + +} diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/XZTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -40,17 +40,11 @@ System.out.println("XZTestCase: HeapMax="+max+" bytes "+(double)max/(1024*1024)+" MB"); final File input = getFile("test1.xml"); final File output = new File(dir, "test1.xml.xz"); - final OutputStream out = new FileOutputStream(output); - try { - final CompressorOutputStream cos = new CompressorStreamFactory() - .createCompressorOutputStream("xz", out); - try { + try (OutputStream out = new FileOutputStream(output)) { + try (CompressorOutputStream cos = new CompressorStreamFactory() + .createCompressorOutputStream("xz", out)) { IOUtils.copy(new FileInputStream(input), cos); - } finally { - cos.close(); } - } finally { - out.close(); } } @@ -58,10 +52,9 @@ public void testXZUnarchive() throws Exception { final File input = getFile("bla.tar.xz"); final File output = new File(dir, "bla.tar"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final CompressorInputStream in = new CompressorStreamFactory() - .createCompressorInputStream("xz", is); + .createCompressorInputStream("xz", is); FileOutputStream out = null; try { out = new FileOutputStream(output); @@ -72,46 +65,31 @@ } in.close(); } - } finally { - is.close(); } } @Test public void testConcatenatedStreamsReadFirstOnly() throws Exception { final File input = getFile("multiple.xz"); - final InputStream is = new FileInputStream(input); - try { - final CompressorInputStream in = new CompressorStreamFactory() - .createCompressorInputStream("xz", is); - try { + try (InputStream is = new FileInputStream(input)) { + try (CompressorInputStream in = new CompressorStreamFactory() + .createCompressorInputStream("xz", is)) { assertEquals('a', in.read()); assertEquals(-1, in.read()); - } finally { - in.close(); } - } finally { - is.close(); } } @Test public void testConcatenatedStreamsReadFully() throws Exception { final File input = getFile("multiple.xz"); - final InputStream is = new FileInputStream(input); - try { - final CompressorInputStream in = - new XZCompressorInputStream(is, true); - try { + try (InputStream is = new FileInputStream(input)) { + try (CompressorInputStream in = new XZCompressorInputStream(is, true)) { assertEquals('a', in.read()); assertEquals('b', in.read()); assertEquals(0, in.available()); assertEquals(-1, in.read()); - } finally { - in.close(); } - } finally { - is.close(); } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java 2016-12-25 11:57:03.000000000 +0000 @@ -81,8 +81,7 @@ private void testUnarchive(final StreamWrapper wrapper) throws Exception { final File input = getFile("bla.tar.Z"); final File output = new File(dir, "bla.tar"); - final InputStream is = new FileInputStream(input); - try { + try (InputStream is = new FileInputStream(input)) { final InputStream in = wrapper.wrap(is); FileOutputStream out = null; try { @@ -94,8 +93,6 @@ } in.close(); } - } finally { - is.close(); } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/BitInputStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -30,92 +30,145 @@ @Test(expected = IllegalArgumentException.class) public void shouldNotAllowReadingOfANegativeAmountOfBits() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - bis.readBits(-1); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + bis.readBits(-1); + } } @Test(expected = IllegalArgumentException.class) public void shouldNotAllowReadingOfMoreThan63BitsAtATime() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - bis.readBits(64); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + bis.readBits(64); + } } @Test public void testReading24BitsInLittleEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - assertEquals(0x000140f8, bis.readBits(24)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + assertEquals(0x000140f8, bis.readBits(24)); + } } @Test public void testReading24BitsInBigEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); - assertEquals(0x00f84001, bis.readBits(24)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN)) { + assertEquals(0x00f84001, bis.readBits(24)); + } } @Test public void testReading17BitsInLittleEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - assertEquals(0x000140f8, bis.readBits(17)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + assertEquals(0x000140f8, bis.readBits(17)); + } } @Test public void testReading17BitsInBigEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); - // 1-11110000-10000000 - assertEquals(0x0001f080, bis.readBits(17)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN)) { + // 1-11110000-10000000 + assertEquals(0x0001f080, bis.readBits(17)); + } } @Test public void testReading30BitsInLittleEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - assertEquals(0x2f0140f8, bis.readBits(30)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + assertEquals(0x2f0140f8, bis.readBits(30)); + } } @Test public void testReading30BitsInBigEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); - // 111110-00010000-00000000-01001011 - assertEquals(0x3e10004b, bis.readBits(30)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN)) { + // 111110-00010000-00000000-01001011 + assertEquals(0x3e10004b, bis.readBits(30)); + } } @Test public void testReading31BitsInLittleEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - assertEquals(0x2f0140f8, bis.readBits(31)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + assertEquals(0x2f0140f8, bis.readBits(31)); + } } @Test public void testReading31BitsInBigEndian() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN); - // 1111100-00100000-00000000-10010111 - assertEquals(0x7c200097, bis.readBits(31)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.BIG_ENDIAN)) { + // 1111100-00100000-00000000-10010111 + assertEquals(0x7c200097, bis.readBits(31)); + } } @Test public void testClearBitCache() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - assertEquals(0x08, bis.readBits(4)); - bis.clearBitCache(); - assertEquals(0, bis.readBits(1)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + assertEquals(0x08, bis.readBits(4)); + bis.clearBitCache(); + assertEquals(0, bis.readBits(1)); + } } @Test public void testEOF() throws IOException { - final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN); - assertEquals(0x2f0140f8, bis.readBits(30)); - assertEquals(-1, bis.readBits(3)); - bis.close(); + try (final BitInputStream bis = new BitInputStream(getStream(), ByteOrder.LITTLE_ENDIAN)) { + assertEquals(0x2f0140f8, bis.readBits(30)); + assertEquals(-1, bis.readBits(3)); + } + } + + /** + * @see "https://issues.apache.org/jira/browse/COMPRESS-363" + */ + @Test + public void littleEndianWithOverflow() throws Exception { + ByteArrayInputStream in = new ByteArrayInputStream(new byte[] { + 87, // 01010111 + 45, // 00101101 + 66, // 01000010 + 15, // 00001111 + 90, // 01011010 + 29, // 00011101 + 88, // 01011000 + 61, // 00111101 + 33, // 00100001 + 74 // 01001010 + }); + try (final BitInputStream bin = new BitInputStream(in, ByteOrder.LITTLE_ENDIAN)) { + assertEquals(23, // 10111 + bin.readBits(5)); + assertEquals(714595605644185962l, // 0001-00111101-01011000-00011101-01011010-00001111-01000010-00101101-010 + bin.readBits(63)); + assertEquals(1186, // 01001010-0010 + bin.readBits(12)); + assertEquals(-1 , bin.readBits(1)); + } + } + + @Test + public void bigEndianWithOverflow() throws Exception { + ByteArrayInputStream in = new ByteArrayInputStream(new byte[] { + 87, // 01010111 + 45, // 00101101 + 66, // 01000010 + 15, // 00001111 + 90, // 01011010 + 29, // 00011101 + 88, // 01011000 + 61, // 00111101 + 33, // 00100001 + 74 // 01001010 + }); + try (final BitInputStream bin = new BitInputStream(in, ByteOrder.BIG_ENDIAN)) { + assertEquals(10, // 01010 + bin.readBits(5)); + assertEquals(8274274654740644818l, //111-00101101-01000010-00001111-01011010-00011101-01011000-00111101-0010 + bin.readBits(63)); + assertEquals(330, // 0001-01001010 + bin.readBits(12)); + assertEquals(-1 , bin.readBits(1)); + } } private ByteArrayInputStream getStream() { diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/CountingStreamTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/CountingStreamTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/CountingStreamTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/CountingStreamTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -33,18 +33,18 @@ // I don't like "test all at once" tests either, but the class // is so trivial final ByteArrayOutputStream bos = new ByteArrayOutputStream(); - final CountingOutputStream o = new CountingOutputStream(bos); - o.write(1); - assertEquals(1, o.getBytesWritten()); - o.write(new byte[] { 2, 3 }); - assertEquals(3, o.getBytesWritten()); - o.write(new byte[] { 2, 3, 4, 5, }, 2, 1); - assertEquals(4, o.getBytesWritten()); - o.count(-1); - assertEquals(4, o.getBytesWritten()); - o.count(-2); - assertEquals(2, o.getBytesWritten()); - o.close(); + try (final CountingOutputStream o = new CountingOutputStream(bos)) { + o.write(1); + assertEquals(1, o.getBytesWritten()); + o.write(new byte[] { 2, 3 }); + assertEquals(3, o.getBytesWritten()); + o.write(new byte[] { 2, 3, 4, 5, }, 2, 1); + assertEquals(4, o.getBytesWritten()); + o.count(-1); + assertEquals(4, o.getBytesWritten()); + o.count(-2); + assertEquals(2, o.getBytesWritten()); + } assertArrayEquals(new byte[] { 1, 2, 3, 4 }, bos.toByteArray()); } @@ -54,22 +54,22 @@ // is so trivial final ByteArrayInputStream bis = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4 }); - final CountingInputStream i = new CountingInputStream(bis); - assertEquals(1, i.read()); - assertEquals(1, i.getBytesRead()); - byte[] b = new byte[2]; - i.read(b); - assertEquals(3, i.getBytesRead()); - assertArrayEquals(new byte[] { 2, 3 }, b); - b = new byte[3]; - i.read(b, 1, 1); - assertArrayEquals(new byte[] { 0, 4, 0 }, b); - assertEquals(4, i.getBytesRead()); - i.count(-1); - assertEquals(4, i.getBytesRead()); - i.count(-2); - assertEquals(2, i.getBytesRead()); - i.close(); + try (final CountingInputStream i = new CountingInputStream(bis)) { + assertEquals(1, i.read()); + assertEquals(1, i.getBytesRead()); + byte[] b = new byte[2]; + i.read(b); + assertEquals(3, i.getBytesRead()); + assertArrayEquals(new byte[] { 2, 3 }, b); + b = new byte[3]; + i.read(b, 1, 1); + assertArrayEquals(new byte[] { 0, 4, 0 }, b); + assertEquals(4, i.getBytesRead()); + i.count(-1); + assertEquals(4, i.getBytesRead()); + i.count(-2); + assertEquals(2, i.getBytesRead()); + } } } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/IOUtilsTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/IOUtilsTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/IOUtilsTest.java 2016-06-18 15:07:49.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/IOUtilsTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -18,9 +18,12 @@ package org.apache.commons.compress.utils; import java.io.ByteArrayInputStream; +import java.io.EOFException; import java.io.FilterInputStream; import java.io.InputStream; import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.ReadableByteChannel; import org.junit.Assert; import org.junit.Test; @@ -77,6 +80,47 @@ }); } + @Test + public void readFullyOnChannelReadsFully() throws IOException { + ByteBuffer b = ByteBuffer.allocate(20); + final byte[] source = new byte[20]; + for (byte i = 0; i < 20; i++) { + source[i] = i; + } + readFully(source, b); + Assert.assertArrayEquals(source, b.array()); + } + + @Test(expected = EOFException.class) + public void readFullyOnChannelThrowsEof() throws IOException { + ByteBuffer b = ByteBuffer.allocate(21); + final byte[] source = new byte[20]; + for (byte i = 0; i < 20; i++) { + source[i] = i; + } + readFully(source, b); + } + + private static void readFully(final byte[] source, ByteBuffer b) throws IOException { + IOUtils.readFully(new ReadableByteChannel() { + private int idx; + @Override + public int read(ByteBuffer buf) { + if (idx >= source.length) { + return -1; + } + buf.put(source[idx++]); + return 1; + } + @Override + public void close() { } + @Override + public boolean isOpen() { + return true; + } + }, b); + } + private void skip(final StreamWrapper wrapper) throws Exception { final ByteArrayInputStream in = new ByteArrayInputStream(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 @@ -85,5 +129,4 @@ Assert.assertEquals(10, IOUtils.skip(sut, 10)); Assert.assertEquals(11, sut.read()); } - } diff -Nru libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java --- libcommons-compress-java-1.12/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,192 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.utils; + +import org.junit.Test; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.ClosedChannelException; +import java.nio.charset.Charset; +import java.util.Arrays; + +import static org.apache.commons.compress.utils.CharsetNames.UTF_8; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; + +public class SeekableInMemoryByteChannelTest { + + private final byte[] testData = "Some data".getBytes(Charset.forName(UTF_8)); + + @Test + public void shouldReadContentsProperly() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + ByteBuffer readBuffer = ByteBuffer.allocate(testData.length); + //when + int readCount = c.read(readBuffer); + //then + assertEquals(testData.length, readCount); + assertArrayEquals(testData, readBuffer.array()); + assertEquals(testData.length, c.position()); + } + + @Test + public void shouldReadContentsWhenBiggerBufferSupplied() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + ByteBuffer readBuffer = ByteBuffer.allocate(testData.length + 1); + //when + int readCount = c.read(readBuffer); + //then + assertEquals(testData.length, readCount); + assertArrayEquals(testData, Arrays.copyOf(readBuffer.array(), testData.length)); + assertEquals(testData.length, c.position()); + } + + @Test + public void shouldReadDataFromSetPosition() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + ByteBuffer readBuffer = ByteBuffer.allocate(4); + //when + c.position(5L); + int readCount = c.read(readBuffer); + //then + assertEquals(4L, readCount); + assertEquals("data", new String(readBuffer.array(), Charset.forName(UTF_8))); + assertEquals(testData.length, c.position()); + } + + @Test + public void shouldSignalEOFWhenPositionAtTheEnd() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + ByteBuffer readBuffer = ByteBuffer.allocate(testData.length); + //when + c.position(testData.length + 1); + int readCount = c.read(readBuffer); + //then + assertEquals(0L, readBuffer.position()); + assertEquals(-1, readCount); + } + + @Test(expected = ClosedChannelException.class) + public void shouldThrowExceptionOnReadingClosedChannel() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(); + //when + c.close(); + c.read(ByteBuffer.allocate(1)); + } + + @Test + public void shouldWriteDataProperly() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(); + ByteBuffer inData = ByteBuffer.wrap(testData); + //when + int writeCount = c.write(inData); + //then + assertEquals(testData.length, writeCount); + assertArrayEquals(testData, Arrays.copyOf(c.array(), (int) c.size())); + assertEquals(testData.length, c.position()); + } + + @Test + public void shouldWriteDataProperlyAfterPositionSet() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + ByteBuffer inData = ByteBuffer.wrap(testData); + ByteBuffer expectedData = ByteBuffer.allocate(testData.length + 5).put(testData, 0, 5).put(testData); + //when + c.position(5L); + int writeCount = c.write(inData); + + //then + assertEquals(testData.length, writeCount); + assertArrayEquals(expectedData.array(), Arrays.copyOf(c.array(), (int) c.size())); + assertEquals(testData.length + 5, c.position()); + } + + + @Test(expected = ClosedChannelException.class) + public void shouldThrowExceptionOnWritingToClosedChannel() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(); + //when + c.close(); + c.write(ByteBuffer.allocate(1)); + } + + @Test + public void shouldTruncateContentsProperly() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + //when + c.truncate(4); + //then + byte[] bytes = Arrays.copyOf(c.array(), (int) c.size()); + assertEquals("Some", new String(bytes, Charset.forName(UTF_8))); + } + + @Test + public void shouldSetProperPositionOnTruncate() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + //when + c.position(testData.length); + c.truncate(4L); + //then + assertEquals(4L, c.position()); + assertEquals(4L, c.size()); + } + + @Test + public void shouldSetProperPosition() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(testData); + //when + long posAtFour = c.position(4L).position(); + long posAtTheEnd = c.position(testData.length).position(); + long posPastTheEnd = c.position(testData.length + 1L).position(); + //then + assertEquals(4L, posAtFour); + assertEquals(c.size(), posAtTheEnd); + assertEquals(posPastTheEnd, posPastTheEnd); + } + + @Test(expected = IllegalArgumentException.class) + public void shouldThrowExceptionWhenSettingIncorrectPosition() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(); + //when + c.position(Integer.MAX_VALUE + 1L); + } + + @Test(expected = ClosedChannelException.class) + public void shouldThrowExceptionWhenSettingPositionOnClosedChannel() throws IOException { + //given + SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel(); + //when + c.close(); + c.position(1L); + } + +} Binary files /tmp/tmpE9fw78/mu_PbHQI2M/libcommons-compress-java-1.12/src/test/resources/archive_with_bytes_after_data.zip and /tmp/tmpE9fw78/vHGfrbD63y/libcommons-compress-java-1.13/src/test/resources/archive_with_bytes_after_data.zip differ diff -Nru libcommons-compress-java-1.12/src/test/resources/invalid-zip.zip libcommons-compress-java-1.13/src/test/resources/invalid-zip.zip --- libcommons-compress-java-1.12/src/test/resources/invalid-zip.zip 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/resources/invalid-zip.zip 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1,2 @@ +This is not really a valid zip file even though it has the zip extension. +ZipArchiveInputStream.getNextZipEntry() should throw an exception. diff -Nru libcommons-compress-java-1.12/src/test/resources/META-INF/services/org.apache.commons.compress.archivers.ArchiveStreamProvider libcommons-compress-java-1.13/src/test/resources/META-INF/services/org.apache.commons.compress.archivers.ArchiveStreamProvider --- libcommons-compress-java-1.12/src/test/resources/META-INF/services/org.apache.commons.compress.archivers.ArchiveStreamProvider 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/resources/META-INF/services/org.apache.commons.compress.archivers.ArchiveStreamProvider 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1 @@ +org.apache.commons.compress.archivers.TestArchiveStreamProvider diff -Nru libcommons-compress-java-1.12/src/test/resources/META-INF/services/org.apache.commons.compress.compressors.CompressorStreamProvider libcommons-compress-java-1.13/src/test/resources/META-INF/services/org.apache.commons.compress.compressors.CompressorStreamProvider --- libcommons-compress-java-1.12/src/test/resources/META-INF/services/org.apache.commons.compress.compressors.CompressorStreamProvider 1970-01-01 00:00:00.000000000 +0000 +++ libcommons-compress-java-1.13/src/test/resources/META-INF/services/org.apache.commons.compress.compressors.CompressorStreamProvider 2016-12-25 11:57:03.000000000 +0000 @@ -0,0 +1 @@ +org.apache.commons.compress.compressors.TestCompressorStreamProvider