Inside a ZIP archive, additional data can be attached to
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStreamTest.java 2020-01-26 12:39:13.000000000 +0000
@@ -97,17 +97,18 @@
}
};
- ArArchiveInputStream archiveInputStream = new ArArchiveInputStream(simpleInputStream);
- ArArchiveEntry entry1 = archiveInputStream.getNextArEntry();
- assertThat(entry1, not(nullValue()));
- assertThat(entry1.getName(), equalTo("test1.xml"));
- assertThat(entry1.getLength(), equalTo(610L));
+ try (ArArchiveInputStream archiveInputStream = new ArArchiveInputStream(simpleInputStream)) {
+ ArArchiveEntry entry1 = archiveInputStream.getNextArEntry();
+ assertThat(entry1, not(nullValue()));
+ assertThat(entry1.getName(), equalTo("test1.xml"));
+ assertThat(entry1.getLength(), equalTo(610L));
- ArArchiveEntry entry2 = archiveInputStream.getNextArEntry();
- assertThat(entry2.getName(), equalTo("test2.xml"));
- assertThat(entry2.getLength(), equalTo(82L));
+ ArArchiveEntry entry2 = archiveInputStream.getNextArEntry();
+ assertThat(entry2.getName(), equalTo("test2.xml"));
+ assertThat(entry2.getLength(), equalTo(82L));
- assertThat(archiveInputStream.getNextArEntry(), nullValue());
+ assertThat(archiveInputStream.getNextArEntry(), nullValue());
+ }
}
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStreamTest.java 2020-01-26 12:39:13.000000000 +0000
@@ -39,49 +39,48 @@
expected.append("test2.xml\n");
expected.append("\n");
-
- final ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.arj")));
- ArjArchiveEntry entry;
-
final StringBuilder result = new StringBuilder();
- while ((entry = in.getNextEntry()) != null) {
- result.append(entry.getName());
- int tmp;
- while ((tmp = in.read()) != -1) {
- result.append((char) tmp);
+ try (final ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.arj")))) {
+ ArjArchiveEntry entry;
+
+ while ((entry = in.getNextEntry()) != null) {
+ result.append(entry.getName());
+ int tmp;
+ while ((tmp = in.read()) != -1) {
+ result.append((char) tmp);
+ }
+ assertFalse(entry.isDirectory());
}
- assertFalse(entry.isDirectory());
}
- in.close();
assertEquals(result.toString(), expected.toString());
}
@Test
public void testReadingOfAttributesDosVersion() throws Exception {
- final ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.arj")));
- final ArjArchiveEntry entry = in.getNextEntry();
- assertEquals("test1.xml", entry.getName());
- assertEquals(30, entry.getSize());
- assertEquals(0, entry.getUnixMode());
- final Calendar cal = Calendar.getInstance();
- cal.set(2008, 9, 6, 23, 50, 52);
- cal.set(Calendar.MILLISECOND, 0);
- assertEquals(cal.getTime(), entry.getLastModifiedDate());
- in.close();
+ try (final ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.arj")))) {
+ final ArjArchiveEntry entry = in.getNextEntry();
+ assertEquals("test1.xml", entry.getName());
+ assertEquals(30, entry.getSize());
+ assertEquals(0, entry.getUnixMode());
+ final Calendar cal = Calendar.getInstance();
+ cal.set(2008, 9, 6, 23, 50, 52);
+ cal.set(Calendar.MILLISECOND, 0);
+ assertEquals(cal.getTime(), entry.getLastModifiedDate());
+ }
}
@Test
public void testReadingOfAttributesUnixVersion() throws Exception {
- final ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.unix.arj")));
- final ArjArchiveEntry entry = in.getNextEntry();
- assertEquals("test1.xml", entry.getName());
- assertEquals(30, entry.getSize());
- assertEquals(0664, entry.getUnixMode() & 07777 /* UnixStat.PERM_MASK */);
- final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT+0000"));
- cal.set(2008, 9, 6, 21, 50, 52);
- cal.set(Calendar.MILLISECOND, 0);
- assertEquals(cal.getTime(), entry.getLastModifiedDate());
- in.close();
+ try (final ArjArchiveInputStream in = new ArjArchiveInputStream(new FileInputStream(getFile("bla.unix.arj")))) {
+ final ArjArchiveEntry entry = in.getNextEntry();
+ assertEquals("test1.xml", entry.getName());
+ assertEquals(30, entry.getSize());
+ assertEquals(0664, entry.getUnixMode() & 07777 /* UnixStat.PERM_MASK */);
+ final Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT+0000"));
+ cal.set(2008, 9, 6, 21, 50, 52);
+ cal.set(Calendar.MILLISECOND, 0);
+ assertEquals(cal.getTime(), entry.getLastModifiedDate());
+ }
}
@Test
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStreamTest.java 2020-01-26 12:39:13.000000000 +0000
@@ -36,50 +36,49 @@
expected.append("./test2.xml\n");
expected.append("\n");
-
- final CpioArchiveInputStream in = new CpioArchiveInputStream(new FileInputStream(getFile("bla.cpio")));
- CpioArchiveEntry entry;
-
final StringBuilder result = new StringBuilder();
- while ((entry = (CpioArchiveEntry) in.getNextEntry()) != null) {
- result.append(entry.getName());
- int tmp;
- while ((tmp = in.read()) != -1) {
- result.append((char) tmp);
+ try (final CpioArchiveInputStream in = new CpioArchiveInputStream(new FileInputStream(getFile("bla.cpio")))) {
+ CpioArchiveEntry entry;
+
+ while ((entry = (CpioArchiveEntry) in.getNextEntry()) != null) {
+ result.append(entry.getName());
+ int tmp;
+ while ((tmp = in.read()) != -1) {
+ result.append((char) tmp);
+ }
}
}
- in.close();
assertEquals(result.toString(), expected.toString());
}
@Test
public void testCpioUnarchiveCreatedByRedlineRpm() throws Exception {
- final CpioArchiveInputStream in =
- new CpioArchiveInputStream(new FileInputStream(getFile("redline.cpio")));
- CpioArchiveEntry entry= null;
-
int count = 0;
- while ((entry = (CpioArchiveEntry) in.getNextEntry()) != null) {
- count++;
- assertNotNull(entry);
+ try (final CpioArchiveInputStream in = new CpioArchiveInputStream(
+ new FileInputStream(getFile("redline.cpio")))) {
+ CpioArchiveEntry entry = null;
+
+ while ((entry = (CpioArchiveEntry) in.getNextEntry()) != null) {
+ count++;
+ assertNotNull(entry);
+ }
}
- in.close();
assertEquals(count, 1);
}
@Test
public void testCpioUnarchiveMultibyteCharName() throws Exception {
- final CpioArchiveInputStream in =
- new CpioArchiveInputStream(new FileInputStream(getFile("COMPRESS-459.cpio")), "UTF-8");
- CpioArchiveEntry entry= null;
-
int count = 0;
- while ((entry = (CpioArchiveEntry) in.getNextEntry()) != null) {
- count++;
- assertNotNull(entry);
+ try (final CpioArchiveInputStream in = new CpioArchiveInputStream(
+ new FileInputStream(getFile("COMPRESS-459.cpio")), "UTF-8")) {
+ CpioArchiveEntry entry = null;
+
+ while ((entry = (CpioArchiveEntry) in.getNextEntry()) != null) {
+ count++;
+ assertNotNull(entry);
+ }
}
- in.close();
assertEquals(2, count);
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/examples/ExpanderTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/examples/ExpanderTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/examples/ExpanderTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/examples/ExpanderTest.java 2020-01-26 12:39:13.000000000 +0000
@@ -28,13 +28,9 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
-import java.util.Arrays;
-import java.util.Collection;
import org.apache.commons.compress.AbstractTestCase;
-import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveException;
-import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
import org.apache.commons.compress.archivers.StreamingNotSupportedException;
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java 2019-08-20 19:29:54.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZFileTest.java 2020-01-26 12:39:13.000000000 +0000
@@ -23,6 +23,7 @@
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
+import java.io.InputStream;
import java.nio.file.Files;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
@@ -39,25 +40,30 @@
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.compress.utils.MultiReadOnlySeekableByteChannel;
import org.apache.commons.compress.utils.SeekableInMemoryByteChannel;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.ExpectedException;
public class SevenZFileTest extends AbstractTestCase {
private static final String TEST2_CONTENT = "\r\n\r\n\r\n\t\r\n\n";
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
// https://issues.apache.org/jira/browse/COMPRESS-320
@Test
public void testRandomlySkippingEntries() throws Exception {
// Read sequential reference.
final Map entriesByName = new HashMap<>();
- SevenZFile archive = new SevenZFile(getFile("COMPRESS-320/Copy.7z"));
- SevenZArchiveEntry entry;
- while ((entry = archive.getNextEntry()) != null) {
- if (entry.hasStream()) {
- entriesByName.put(entry.getName(), readFully(archive));
+ try (SevenZFile archive = new SevenZFile(getFile("COMPRESS-320/Copy.7z"))) {
+ SevenZArchiveEntry entry;
+ while ((entry = archive.getNextEntry()) != null) {
+ if (entry.hasStream()) {
+ entriesByName.put(entry.getName(), readFully(archive));
+ }
}
}
- archive.close();
final String[] variants = {
"BZip2-solid.7z",
@@ -75,26 +81,27 @@
// "PPMd.7z",
};
- // TODO: use randomizedtesting for predictable, but different, randomness.
+ // TODO: use randomized testing for predictable, but different, randomness.
final Random rnd = new Random(0xdeadbeef);
for (final String fileName : variants) {
- archive = new SevenZFile(getFile("COMPRESS-320/" + fileName));
+ try (SevenZFile archive = new SevenZFile(getFile("COMPRESS-320/" + fileName))) {
- while ((entry = archive.getNextEntry()) != null) {
- // Sometimes skip reading entries.
- if (rnd.nextBoolean()) {
- continue;
+ SevenZArchiveEntry entry;
+ while ((entry = archive.getNextEntry()) != null) {
+ // Sometimes skip reading entries.
+ if (rnd.nextBoolean()) {
+ continue;
+ }
+
+ if (entry.hasStream()) {
+ assertTrue(entriesByName.containsKey(entry.getName()));
+ final byte[] content = readFully(archive);
+ assertTrue("Content mismatch on: " + fileName + "!" + entry.getName(),
+ Arrays.equals(content, entriesByName.get(entry.getName())));
+ }
}
- if (entry.hasStream()) {
- assertTrue(entriesByName.containsKey(entry.getName()));
- final byte [] content = readFully(archive);
- assertTrue("Content mismatch on: " + fileName + "!" + entry.getName(),
- Arrays.equals(content, entriesByName.get(entry.getName())));
- }
}
-
- archive.close();
}
}
@@ -370,6 +377,188 @@
}
}
+ /**
+ * @see https://issues.apache.org/jira/browse/COMPRESS-492
+ */
+ @Test
+ public void handlesEmptyArchiveWithFilesInfo() throws Exception {
+ File f = new File(dir, "empty.7z");
+ try (SevenZOutputFile s = new SevenZOutputFile(f)) {
+ }
+ try (SevenZFile z = new SevenZFile(f)) {
+ assertFalse(z.getEntries().iterator().hasNext());
+ assertNull(z.getNextEntry());
+ }
+ }
+
+ /**
+ * @see https://issues.apache.org/jira/browse/COMPRESS-492
+ */
+ @Test
+ public void handlesEmptyArchiveWithoutFilesInfo() throws Exception {
+ try (SevenZFile z = new SevenZFile(getFile("COMPRESS-492.7z"))) {
+ assertFalse(z.getEntries().iterator().hasNext());
+ assertNull(z.getNextEntry());
+ }
+ }
+
+ @Test
+ public void test7zUnarchiveWithDefectHeader() throws Exception {
+ test7zUnarchive(getFile("bla.noendheaderoffset.7z"), SevenZMethod.LZMA);
+ }
+
+ @Test
+ public void extractSpecifiedFile() throws Exception {
+ try (SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-256.7z"))) {
+ final String testTxtContents = "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011";
+
+ for(SevenZArchiveEntry entry : sevenZFile.getEntries()) {
+ if(entry.getName().equals("commons-compress-1.7-src/src/test/resources/test.txt")) {
+ final byte[] contents = new byte[(int) entry.getSize()];
+ int off = 0;
+ InputStream inputStream = sevenZFile.getInputStream(entry);
+ while (off < contents.length) {
+ final int bytesRead = inputStream.read(contents, off, contents.length - off);
+ assert (bytesRead >= 0);
+ off += bytesRead;
+ }
+ assertEquals(testTxtContents, new String(contents, "UTF-8"));
+ break;
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testRandomAccessTogetherWithSequentialAccess() throws Exception {
+ try (SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-256.7z"))) {
+ final String testTxtContents = "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011";
+ final String filesTxtContents = "0xxxxxxxxx10xxxxxxxx20xxxxxxxx30xxxxxxxx40xxxxxxxx50xxxxxxxx60xxxxxxxx70xxxxxxxx80xxxxxxxx90xxxxxxxx100xxxxxxx110xxxxxxx120xxxxxxx130xxxxxxx -> 0yyyyyyyyy10yyyyyyyy20yyyyyyyy30yyyyyyyy40yyyyyyyy50yyyyyyyy60yyyyyyyy70yyyyyyyy80yyyyyyyy90yyyyyyyy100yyyyyyy110yyyyyyy120yyyyyyy130yyyyyyy\n";
+ int off;
+ byte[] contents;
+
+ // call getNextEntry and read before calling getInputStream
+ sevenZFile.getNextEntry();
+ SevenZArchiveEntry nextEntry = sevenZFile.getNextEntry();
+ contents = new byte[(int) nextEntry.getSize()];
+ off = 0;
+
+ assertEquals(SevenZMethod.LZMA2, nextEntry.getContentMethods().iterator().next().getMethod());
+
+ // just read them
+ while (off < contents.length) {
+ final int bytesRead = sevenZFile.read(contents, off, contents.length - off);
+ assert (bytesRead >= 0);
+ off += bytesRead;
+ }
+
+ sevenZFile.getNextEntry();
+ sevenZFile.getNextEntry();
+
+ for(SevenZArchiveEntry entry : sevenZFile.getEntries()) {
+ // commons-compress-1.7-src/src/test/resources/test.txt
+ if(entry.getName().equals("commons-compress-1.7-src/src/test/resources/longsymlink/files.txt")) {
+ contents = new byte[(int) entry.getSize()];
+ off = 0;
+ InputStream inputStream = sevenZFile.getInputStream(entry);
+ while (off < contents.length) {
+ final int bytesRead = inputStream.read(contents, off, contents.length - off);
+ assert (bytesRead >= 0);
+ off += bytesRead;
+ }
+ assertEquals(SevenZMethod.LZMA2, entry.getContentMethods().iterator().next().getMethod());
+ assertEquals(filesTxtContents, new String(contents, "UTF-8"));
+ break;
+ }
+ }
+
+ // call getNextEntry after getInputStream
+ nextEntry = sevenZFile.getNextEntry();
+ while(!nextEntry.getName().equals("commons-compress-1.7-src/src/test/resources/test.txt")) {
+ nextEntry = sevenZFile.getNextEntry();
+ }
+
+ contents = new byte[(int) nextEntry.getSize()];
+ off = 0;
+ while (off < contents.length) {
+ final int bytesRead = sevenZFile.read(contents, off, contents.length - off);
+ assert (bytesRead >= 0);
+ off += bytesRead;
+ }
+ assertEquals(SevenZMethod.LZMA2, nextEntry.getContentMethods().iterator().next().getMethod());
+ assertEquals(testTxtContents, new String(contents, "UTF-8"));
+ }
+ }
+
+ @Test
+ public void testRandomAccessWhenJumpingBackwards() throws Exception {
+ try (SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-256.7z"))) {
+ final String testTxtContents = "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011\n" +
+ "111111111111111111111111111000101011";
+
+ SevenZArchiveEntry entry;
+ SevenZArchiveEntry testTxtEntry = null;
+ while((entry = sevenZFile.getNextEntry()) != null ) {
+ if(entry.getName().equals("commons-compress-1.7-src/src/test/resources/test.txt")) {
+ testTxtEntry = entry;
+ break;
+ }
+ }
+
+ sevenZFile.getNextEntry();
+ sevenZFile.getNextEntry();
+ // skip all the entries and jump backwards
+ byte[] contents = new byte[(int) testTxtEntry.getSize()];
+ try (InputStream inputStream = sevenZFile.getInputStream(testTxtEntry)) {
+ int off = 0;
+ while (off < contents.length) {
+ final int bytesRead = inputStream.read(contents, off, contents.length - off);
+ assert (bytesRead >= 0);
+ off += bytesRead;
+ }
+ assertEquals(SevenZMethod.LZMA2, testTxtEntry.getContentMethods().iterator().next().getMethod());
+ assertEquals(testTxtContents, new String(contents, "UTF-8"));
+ }
+ }
+ }
+
+ @Test
+ public void extractNonExistSpecifiedFile() throws Exception {
+ try (SevenZFile sevenZFile = new SevenZFile(getFile("COMPRESS-256.7z"));
+ SevenZFile anotherSevenZFile = new SevenZFile(getFile("bla.7z"))) {
+ for (SevenZArchiveEntry nonExistEntry : anotherSevenZFile.getEntries()) {
+ thrown.expect(IllegalArgumentException.class);
+ sevenZFile.getInputStream(nonExistEntry);
+ }
+ }
+ }
+
private void test7zUnarchive(final File f, final SevenZMethod m, final byte[] password) throws Exception {
try (SevenZFile sevenZFile = new SevenZFile(f, password)) {
test7zUnarchive(sevenZFile, m);
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZNativeHeapTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZNativeHeapTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZNativeHeapTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZNativeHeapTest.java 2020-01-26 12:39:13.000000000 +0000
@@ -33,29 +33,28 @@
public class SevenZNativeHeapTest extends AbstractTestCase {
-
@Test
public void testEndDeflaterOnCloseStream() throws Exception {
- Coders.DeflateDecoder deflateDecoder = new DeflateDecoder();
-
- final DeflateDecoderOutputStream outputStream =
- (DeflateDecoderOutputStream) deflateDecoder.encode(new ByteArrayOutputStream(), 9);
- DelegatingDeflater delegatingDeflater = new DelegatingDeflater(outputStream.deflater);
- outputStream.deflater = delegatingDeflater;
- outputStream.close();
+ final Coders.DeflateDecoder deflateDecoder = new DeflateDecoder();
+ final DelegatingDeflater delegatingDeflater;
+ try (final DeflateDecoderOutputStream outputStream = (DeflateDecoderOutputStream) deflateDecoder
+ .encode(new ByteArrayOutputStream(), 9)) {
+ delegatingDeflater = new DelegatingDeflater(outputStream.deflater);
+ outputStream.deflater = delegatingDeflater;
+ }
assertTrue(delegatingDeflater.isEnded.get());
}
@Test
public void testEndInflaterOnCloseStream() throws Exception {
- Coders.DeflateDecoder deflateDecoder = new DeflateDecoder();
- final DeflateDecoderInputStream inputStream =
- (DeflateDecoderInputStream) deflateDecoder
- .decode("dummy", new ByteArrayInputStream(new byte[0]), 0, null, null, Integer.MAX_VALUE);
- DelegatingInflater delegatingInflater = new DelegatingInflater(inputStream.inflater);
- inputStream.inflater = delegatingInflater;
- inputStream.close();
+ final Coders.DeflateDecoder deflateDecoder = new DeflateDecoder();
+ final DelegatingInflater delegatingInflater;
+ try (final DeflateDecoderInputStream inputStream = (DeflateDecoderInputStream) deflateDecoder.decode("dummy",
+ new ByteArrayInputStream(new byte[0]), 0, null, null, Integer.MAX_VALUE)) {
+ delegatingInflater = new DelegatingInflater(inputStream.inflater);
+ inputStream.inflater = delegatingInflater;
+ }
assertTrue(delegatingInflater.isEnded.get());
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/sevenz/SevenZOutputFileTest.java 2020-01-26 12:39:13.000000000 +0000
@@ -479,12 +479,9 @@
}
private void createAndReadBack(final File output, final Iterable methods) throws Exception {
- final SevenZOutputFile outArchive = new SevenZOutputFile(output);
- outArchive.setContentMethods(methods);
- try {
+ try (final SevenZOutputFile outArchive = new SevenZOutputFile(output)) {
+ outArchive.setContentMethods(methods);
addFile(outArchive, 0, true);
- } finally {
- outArchive.close();
}
try (SevenZFile archive = new SevenZFile(output)) {
@@ -493,12 +490,9 @@
}
private void createAndReadBack(final SeekableInMemoryByteChannel output, final Iterable methods) throws Exception {
- final SevenZOutputFile outArchive = new SevenZOutputFile(output);
- outArchive.setContentMethods(methods);
- try {
+ try (final SevenZOutputFile outArchive = new SevenZOutputFile(output)) {
+ outArchive.setContentMethods(methods);
addFile(outArchive, 0, true);
- } finally {
- outArchive.close();
}
try (SevenZFile archive =
new SevenZFile(new SeekableInMemoryByteChannel(output.array()), "in memory")) {
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/SevenZTestCase.java 2020-01-26 12:39:13.000000000 +0000
@@ -44,6 +44,7 @@
file2 = getFile("test2.xml");
}
+ @Override
@Before
public void setUp() throws Exception {
super.setUp();
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java 2018-05-02 20:17:13.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/tar/SparseFilesTest.java 2020-01-25 14:01:31.000000000 +0000
@@ -18,15 +18,24 @@
package org.apache.commons.compress.archivers.tar;
-import static org.apache.commons.compress.AbstractTestCase.getFile;
import static org.junit.Assert.*;
+
+import org.apache.commons.compress.AbstractTestCase;
+import org.apache.commons.compress.utils.IOUtils;
+import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.List;
+import java.util.Locale;
+
+public class SparseFilesTest extends AbstractTestCase {
-public class SparseFilesTest {
+ private final boolean isOnWindows = System.getProperty("os.name").toLowerCase(Locale.ENGLISH).contains("windows");
@Test
public void testOldGNU() throws Throwable {
@@ -40,6 +49,18 @@
assertTrue(ae.isGNUSparse());
assertFalse(ae.isPaxGNUSparse());
assertFalse(tin.canReadEntryData(ae));
+
+ List sparseHeaders = ae.getSparseHeaders();
+ assertEquals(3, sparseHeaders.size());
+
+ assertEquals(0, sparseHeaders.get(0).getOffset());
+ assertEquals(2048, sparseHeaders.get(0).getNumbytes());
+
+ assertEquals(1050624L, sparseHeaders.get(1).getOffset());
+ assertEquals(2560, sparseHeaders.get(1).getNumbytes());
+
+ assertEquals(3101184L, sparseHeaders.get(2).getOffset());
+ assertEquals(0, sparseHeaders.get(2).getNumbytes());
} finally {
if (tin != null) {
tin.close();
@@ -63,6 +84,133 @@
}
}
+ @Test
+ public void testExtractSparseTarsOnWindows() throws IOException {
+ if (!isOnWindows) {
+ return;
+ }
+
+ final File oldGNUSparseTar = getFile("oldgnu_sparse.tar");
+ final File paxGNUSparseTar = getFile("pax_gnu_sparse.tar");
+ try (TarArchiveInputStream paxGNUSparseInputStream = new TarArchiveInputStream(new FileInputStream(paxGNUSparseTar))) {
+
+ // compare between old GNU and PAX 0.0
+ paxGNUSparseInputStream.getNextTarEntry();
+ try (TarArchiveInputStream oldGNUSparseInputStream = new TarArchiveInputStream(new FileInputStream(oldGNUSparseTar))) {
+ oldGNUSparseInputStream.getNextTarEntry();
+ assertArrayEquals(IOUtils.toByteArray(oldGNUSparseInputStream),
+ IOUtils.toByteArray(paxGNUSparseInputStream));
+ }
+
+ // compare between old GNU and PAX 0.1
+ paxGNUSparseInputStream.getNextTarEntry();
+ try (TarArchiveInputStream oldGNUSparseInputStream = new TarArchiveInputStream(new FileInputStream(oldGNUSparseTar))) {
+ oldGNUSparseInputStream.getNextTarEntry();
+ assertArrayEquals(IOUtils.toByteArray(oldGNUSparseInputStream),
+ IOUtils.toByteArray(paxGNUSparseInputStream));
+ }
+
+ // compare between old GNU and PAX 1.0
+ paxGNUSparseInputStream.getNextTarEntry();
+ try (TarArchiveInputStream oldGNUSparseInputStream = new TarArchiveInputStream(new FileInputStream(oldGNUSparseTar))) {
+ oldGNUSparseInputStream.getNextTarEntry();
+ assertArrayEquals(IOUtils.toByteArray(oldGNUSparseInputStream),
+ IOUtils.toByteArray(paxGNUSparseInputStream));
+ }
+ }
+ }
+
+ @Test
+ public void testExtractOldGNU() throws IOException, InterruptedException {
+ if (isOnWindows) {
+ return;
+ }
+
+ try {
+ final File file = getFile("oldgnu_sparse.tar");
+ try (InputStream sparseFileInputStream = extractTarAndGetInputStream(file, "sparsefile");
+ TarArchiveInputStream tin = new TarArchiveInputStream(new FileInputStream(file))) {
+ tin.getNextTarEntry();
+ assertArrayEquals(IOUtils.toByteArray(tin),
+ IOUtils.toByteArray(sparseFileInputStream));
+ }
+ } catch (RuntimeException | IOException ex) {
+ ex.printStackTrace();
+ throw ex;
+ }
+ }
+
+ @Test
+ public void testExtractExtendedOldGNU() throws IOException, InterruptedException {
+ if (isOnWindows) {
+ return;
+ }
+
+ final File file = getFile("oldgnu_extended_sparse.tar");
+ try (InputStream sparseFileInputStream = extractTarAndGetInputStream(file, "sparse6");
+ TarArchiveInputStream tin = new TarArchiveInputStream(new FileInputStream(file))) {
+ final TarArchiveEntry ae = tin.getNextTarEntry();
+
+ assertArrayEquals(IOUtils.toByteArray(tin),
+ IOUtils.toByteArray(sparseFileInputStream));
+
+ List sparseHeaders = ae.getSparseHeaders();
+ assertEquals(7, sparseHeaders.size());
+
+ assertEquals(0, sparseHeaders.get(0).getOffset());
+ assertEquals(1024, sparseHeaders.get(0).getNumbytes());
+
+ assertEquals(10240, sparseHeaders.get(1).getOffset());
+ assertEquals(1024, sparseHeaders.get(1).getNumbytes());
+
+ assertEquals(16384, sparseHeaders.get(2).getOffset());
+ assertEquals(1024, sparseHeaders.get(2).getNumbytes());
+
+ assertEquals(24576, sparseHeaders.get(3).getOffset());
+ assertEquals(1024, sparseHeaders.get(3).getNumbytes());
+
+ assertEquals(29696, sparseHeaders.get(4).getOffset());
+ assertEquals(1024, sparseHeaders.get(4).getNumbytes());
+
+ assertEquals(36864, sparseHeaders.get(5).getOffset());
+ assertEquals(1024, sparseHeaders.get(5).getNumbytes());
+
+ assertEquals(51200, sparseHeaders.get(6).getOffset());
+ assertEquals(0, sparseHeaders.get(6).getNumbytes());
+ }
+ }
+
+ @Test
+ public void testExtractPaxGNU() throws IOException, InterruptedException {
+ if (isOnWindows) {
+ return;
+ }
+
+ final File file = getFile("pax_gnu_sparse.tar");
+ try (TarArchiveInputStream tin = new TarArchiveInputStream(new FileInputStream(file))) {
+
+ tin.getNextTarEntry();
+ try (InputStream sparseFileInputStream = extractTarAndGetInputStream(file, "sparsefile-0.0")) {
+ assertArrayEquals(IOUtils.toByteArray(tin),
+ IOUtils.toByteArray(sparseFileInputStream));
+ }
+
+ // TODO : it's wired that I can only get a 0 size sparsefile-0.1 on my Ubuntu 16.04
+ // using "tar -xf pax_gnu_sparse.tar"
+ tin.getNextTarEntry();
+ try (InputStream sparseFileInputStream = extractTarAndGetInputStream(file, "sparsefile-0.0")) {
+ assertArrayEquals(IOUtils.toByteArray(tin),
+ IOUtils.toByteArray(sparseFileInputStream));
+ }
+
+ tin.getNextTarEntry();
+ try (InputStream sparseFileInputStream = extractTarAndGetInputStream(file, "sparsefile-1.0")) {
+ assertArrayEquals(IOUtils.toByteArray(tin),
+ IOUtils.toByteArray(sparseFileInputStream));
+ }
+ }
+ }
+
private void assertPaxGNUEntry(final TarArchiveInputStream tin, final String suffix) throws Throwable {
final TarArchiveEntry ae = tin.getNextTarEntry();
assertEquals("sparsefile-" + suffix, ae.getName());
@@ -70,6 +218,34 @@
assertTrue(ae.isPaxGNUSparse());
assertFalse(ae.isOldGNUSparse());
assertFalse(tin.canReadEntryData(ae));
+
+ List sparseHeaders = ae.getSparseHeaders();
+ assertEquals(3, sparseHeaders.size());
+
+ assertEquals(0, sparseHeaders.get(0).getOffset());
+ assertEquals(2048, sparseHeaders.get(0).getNumbytes());
+
+ assertEquals(1050624L, sparseHeaders.get(1).getOffset());
+ assertEquals(2560, sparseHeaders.get(1).getNumbytes());
+
+ assertEquals(3101184L, sparseHeaders.get(2).getOffset());
+ assertEquals(0, sparseHeaders.get(2).getNumbytes());
+ }
+
+ private InputStream extractTarAndGetInputStream(File tarFile, String sparseFileName) throws IOException, InterruptedException {
+ ProcessBuilder pb = new ProcessBuilder("tar", "-xf", tarFile.getPath(), "-C", resultDir.getPath());
+ pb.redirectErrorStream(true);
+ Process process = pb.start();
+ // wait until the extract finishes
+ assertEquals(new String(IOUtils.toByteArray(process.getInputStream())), 0, process.waitFor());
+
+ for (File file : resultDir.listFiles()) {
+ if (file.getName().equals(sparseFileName)) {
+ return new FileInputStream(file);
+ }
+ }
+ fail("didn't find " + sparseFileName + " after extracting " + tarFile);
+ return null;
}
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -54,7 +54,7 @@
final TarArchiveInputStream tais = new TarArchiveInputStream(is);
final Map headers = tais
.parsePaxHeaders(new ByteArrayInputStream("30 atime=1321711775.972059463\n"
- .getBytes(CharsetNames.UTF_8)));
+ .getBytes(CharsetNames.UTF_8)), null);
assertEquals(1, headers.size());
assertEquals("1321711775.972059463", headers.get("atime"));
tais.close();
@@ -66,7 +66,7 @@
final TarArchiveInputStream tais = new TarArchiveInputStream(is);
final Map headers = tais
.parsePaxHeaders(new ByteArrayInputStream("11 foo=bar\n11 foo=baz\n"
- .getBytes(CharsetNames.UTF_8)));
+ .getBytes(CharsetNames.UTF_8)), null);
assertEquals(1, headers.size());
assertEquals("baz", headers.get("foo"));
tais.close();
@@ -78,7 +78,7 @@
final TarArchiveInputStream tais = new TarArchiveInputStream(is);
final Map headers = tais
.parsePaxHeaders(new ByteArrayInputStream("11 foo=bar\n7 foo=\n"
- .getBytes(CharsetNames.UTF_8)));
+ .getBytes(CharsetNames.UTF_8)), null);
assertEquals(0, headers.size());
tais.close();
}
@@ -89,7 +89,7 @@
final TarArchiveInputStream tais = new TarArchiveInputStream(is);
final Map headers = tais
.parsePaxHeaders(new ByteArrayInputStream("28 comment=line1\nline2\nand3\n"
- .getBytes(CharsetNames.UTF_8)));
+ .getBytes(CharsetNames.UTF_8)), null);
assertEquals(1, headers.size());
assertEquals("line1\nline2\nand3", headers.get("comment"));
tais.close();
@@ -103,7 +103,7 @@
final InputStream is = new ByteArrayInputStream(new byte[1]);
final TarArchiveInputStream tais = new TarArchiveInputStream(is);
final Map headers = tais
- .parsePaxHeaders(new ByteArrayInputStream(line.getBytes(CharsetNames.UTF_8)));
+ .parsePaxHeaders(new ByteArrayInputStream(line.getBytes(CharsetNames.UTF_8)), null);
assertEquals(1, headers.size());
assertEquals(ae, headers.get("path"));
tais.close();
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java 2018-05-02 20:17:13.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/tar/TarUtilsTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -381,4 +381,16 @@
}
}
+ @Test
+ public void testParseSparse() {
+ final long expectedOffset = 0100000;
+ final long expectedNumbytes = 0111000;
+ final byte [] buffer = new byte[] {
+ ' ', ' ', ' ', ' ', ' ', '0', '1', '0', '0', '0', '0', '0', // sparseOffset
+ ' ', ' ', ' ', ' ', ' ', '0', '1', '1', '1', '0', '0', '0'};
+ TarArchiveStructSparse sparse = TarUtils.parseSparse(buffer, 0);
+ assertEquals(sparse.getOffset(), expectedOffset);
+ assertEquals(sparse.getNumbytes(), expectedNumbytes);
+ }
+
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java 2018-05-02 20:17:13.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ScatterSampleTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -52,22 +52,23 @@
};
scatterSample.addEntry(archiveEntry, supp);
- final ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(result);
- scatterSample.writeTo(zipArchiveOutputStream);
- zipArchiveOutputStream.close();
+ try (final ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(result)) {
+ scatterSample.writeTo(zipArchiveOutputStream);
+ }
}
private void checkFile(final File result) throws IOException {
- final ZipFile zf = new ZipFile(result);
- final ZipArchiveEntry archiveEntry1 = zf.getEntries().nextElement();
- assertEquals( "test1.xml", archiveEntry1.getName());
- final InputStream inputStream = zf.getInputStream(archiveEntry1);
- final byte[] b = new byte[6];
- final int i = IOUtils.readFully(inputStream, b);
- assertEquals(5, i);
- assertEquals('H', b[0]);
- assertEquals('o', b[4]);
- zf.close();
+ try (final ZipFile zipFile = new ZipFile(result)) {
+ final ZipArchiveEntry archiveEntry1 = zipFile.getEntries().nextElement();
+ assertEquals("test1.xml", archiveEntry1.getName());
+ try (final InputStream inputStream = zipFile.getInputStream(archiveEntry1)) {
+ final byte[] b = new byte[6];
+ final int i = IOUtils.readFully(inputStream, b);
+ assertEquals(5, i);
+ assertEquals('H', b[0]);
+ assertEquals('o', b[4]);
+ }
+ }
result.delete();
}
}
\ No newline at end of file
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java 2018-05-02 20:17:13.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java 2020-01-21 17:43:20.000000000 +0000
@@ -35,6 +35,7 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
+import java.nio.file.Files;
import java.util.Enumeration;
import java.util.Random;
import java.util.zip.ZipEntry;
@@ -362,6 +363,10 @@
return write3EntriesCreatingBigArchive(Zip64Mode.AsNeeded);
}
+ private static ZipOutputTest write3EntriesCreatingBigArchive(final Zip64Mode mode) {
+ return write3EntriesCreatingBigArchive(mode, false);
+ }
+
/*
* Individual sizes don't require ZIP64 but the offset of the
* third entry is bigger than 0xFFFFFFFF so a ZIP64 extended
@@ -370,7 +375,7 @@
* Creates a temporary archive of approx 5GB in size
*/
private static ZipOutputTest
- write3EntriesCreatingBigArchive(final Zip64Mode mode) {
+ write3EntriesCreatingBigArchive(final Zip64Mode mode, final boolean isSplitArchive) {
return new ZipOutputTest() {
@Override
public void test(final File f, final ZipArchiveOutputStream zos)
@@ -386,7 +391,7 @@
a.skipBytes(2 * 47 /* CD entry of file with
file name length 1 and no
extra data */
- + 2 * (mode == Zip64Mode.Always ? 28 : 0)
+ + 2 * (mode == Zip64Mode.Always ? 32 : 0)
/* ZIP64 extra fields if mode is Always */
);
@@ -427,11 +432,11 @@
// file name length
1, 0,
// extra field length
- (byte) (mode == Zip64Mode.Always? 28 : 12), 0,
+ (byte) (mode == Zip64Mode.Always? 32 : 12), 0,
// comment length
0, 0,
// disk number
- 0, 0,
+ (byte) (isSplitArchive? 0xFF : 0), (byte) (isSplitArchive? 0xFF : 0),
// attributes
0, 0,
0, 0, 0, 0,
@@ -448,7 +453,7 @@
// Header-ID
1, 0,
// size
- 24, 0,
+ 28, 0,
// Original Size
1, 0, 0, 0, 0, 0, 0, 0,
}, extra);
@@ -645,7 +650,7 @@
// file name length
1, 0,
// extra field length
- (byte) (mode == Zip64Mode.Always? 28 : 20), 0,
+ (byte) (mode == Zip64Mode.Always? 32 : 20), 0,
// comment length
0, 0,
// disk number
@@ -673,7 +678,7 @@
// Header-ID
1, 0,
// size of extra
- (byte) (mode == Zip64Mode.Always? 24 : 16), 0,
+ (byte) (mode == Zip64Mode.Always? 28 : 16), 0,
// original size
0, (byte) 0xF2, 5, (byte) 0x2A,
1, 0, 0, 0,
@@ -899,7 +904,7 @@
// file name length
1, 0,
// extra field length
- (byte) (mode == Zip64Mode.Always? 28 : 20), 0,
+ (byte) (mode == Zip64Mode.Always? 32 : 20), 0,
// comment length
0, 0,
// disk number
@@ -927,7 +932,7 @@
// Header-ID
1, 0,
// size of extra
- (byte) (mode == Zip64Mode.Always? 24 : 16), 0,
+ (byte) (mode == Zip64Mode.Always? 28 : 16), 0,
// original size
0, (byte) 0xF2, 5, (byte) 0x2A,
1, 0, 0, 0,
@@ -1153,7 +1158,7 @@
// file name length
1, 0,
// extra field length
- (byte) (mode == Zip64Mode.Always? 28 : 20), 0,
+ (byte) (mode == Zip64Mode.Always? 32 : 20), 0,
// comment length
0, 0,
// disk number
@@ -1181,7 +1186,7 @@
// Header-ID
1, 0,
// size of extra
- (byte) (mode == Zip64Mode.Always? 24 : 16), 0,
+ (byte) (mode == Zip64Mode.Always? 28 : 16), 0,
// original size
0, (byte) 0xF2, 5, (byte) 0x2A,
1, 0, 0, 0,
@@ -1594,7 +1599,7 @@
// file name length
1, 0,
// extra field length
- 28, 0,
+ 32, 0,
// comment length
0, 0,
// disk number
@@ -1614,7 +1619,7 @@
// Header-ID
1, 0,
// size of extra
- 24, 0,
+ 28, 0,
// original size
(byte) 0x40, (byte) 0x42, (byte) 0x0F, 0,
0, 0, 0, 0,
@@ -1935,7 +1940,7 @@
// file name length
1, 0,
// extra field length
- 28, 0,
+ 32, 0,
// comment length
0, 0,
// disk number
@@ -1954,7 +1959,7 @@
// Header-ID
1, 0,
// size of extra
- 24, 0,
+ 28, 0,
// original size
(byte) 0x40, (byte) 0x42, (byte) 0x0F, 0,
0, 0, 0, 0,
@@ -2294,7 +2299,7 @@
// file name length
1, 0,
// extra field length
- 28, 0,
+ 32, 0,
// comment length
0, 0,
// disk number
@@ -2313,7 +2318,7 @@
// Header-ID
1, 0,
// size of extra
- 24, 0,
+ 28, 0,
// original size
(byte) 0x40, (byte) 0x42, (byte) 0x0F, 0,
0, 0, 0, 0,
@@ -2398,19 +2403,54 @@
true);
}
+ @Test
+ public void write3EntriesCreatingManySplitArchiveFileModeNever()
+ throws Throwable {
+ withTemporaryArchive("write3EntriesCreatingManySplitArchiveFileModeNever",
+ write3EntriesCreatingBigArchiveModeNever,
+ true, 65536L);
+ }
+
+ @Test
+ public void write3EntriesCreatingManySplitArchiveFileModeAlways()
+ throws Throwable {
+ // about 76,293 zip split segments will be created
+ withTemporaryArchive("write3EntriesCreatingManySplitArchiveFileModeAlways",
+ write3EntriesCreatingBigArchive(Zip64Mode.Always, true),
+ true, 65536L);
+ }
+
static interface ZipOutputTest {
void test(File f, ZipArchiveOutputStream zos) throws IOException;
}
private static void withTemporaryArchive(final String testName,
final ZipOutputTest test,
- final boolean useRandomAccessFile)
+ final boolean useRandomAccessFile) throws Throwable {
+ withTemporaryArchive(testName, test, useRandomAccessFile, null);
+ }
+
+ private static void withTemporaryArchive(final String testName,
+ final ZipOutputTest test,
+ final boolean useRandomAccessFile,
+ final Long splitSize)
throws Throwable {
- final File f = getTempFile(testName);
+ File f = getTempFile(testName);
+ File dir = null;
+ if (splitSize != null) {
+ dir = Files.createTempDirectory("commons-compress-" + testName).toFile();
+ dir.deleteOnExit();
+
+ f = new File(dir, "commons-compress-" + testName + ".zip");
+ }
BufferedOutputStream os = null;
- final ZipArchiveOutputStream zos = useRandomAccessFile
+ ZipArchiveOutputStream zos = useRandomAccessFile
? new ZipArchiveOutputStream(f)
: new ZipArchiveOutputStream(os = new BufferedOutputStream(new FileOutputStream(f)));
+ if (splitSize != null) {
+ zos = new ZipArchiveOutputStream(f, splitSize);
+ }
+
try {
test.test(f, zos);
} catch (final IOException ex) {
@@ -2422,10 +2462,16 @@
try {
zos.destroy();
} finally {
- if (os != null) {
- os.close();
+ try {
+ if (os != null) {
+ os.close();
+ }
+ AbstractTestCase.tryHardToDelete(f);
+ } finally {
+ if (dir != null) {
+ AbstractTestCase.rmdir(dir);
+ }
}
- AbstractTestCase.tryHardToDelete(f);
}
}
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java 2019-08-18 10:32:58.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStreamTest.java 2020-01-24 18:49:27.000000000 +0000
@@ -34,6 +34,8 @@
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.channels.Channels;
+import java.nio.channels.SeekableByteChannel;
import java.util.Arrays;
import java.util.zip.ZipException;
@@ -596,6 +598,85 @@
}
}
+ @Test
+ public void testSplitZipCreatedByZip() throws IOException {
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.zip");
+ try (SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ InputStream inputStream = Channels.newInputStream(channel);
+ ZipArchiveInputStream splitInputStream = new ZipArchiveInputStream(inputStream, ZipEncodingHelper.UTF8, true, false, true)) {
+
+ File fileToCompare = getFile("COMPRESS-477/split_zip_created_by_zip/zip_to_compare_created_by_zip.zip");
+ try (ZipArchiveInputStream inputStreamToCompare = new ZipArchiveInputStream(new FileInputStream(fileToCompare), ZipEncodingHelper.UTF8, true, false, true)) {
+
+ ArchiveEntry entry;
+ while((entry = splitInputStream.getNextEntry()) != null && inputStreamToCompare.getNextEntry() != null) {
+ if(entry.isDirectory()) {
+ continue;
+ }
+ assertArrayEquals(IOUtils.toByteArray(splitInputStream),
+ IOUtils.toByteArray(inputStreamToCompare));
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testSplitZipCreatedByZipOfZip64() throws IOException {
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.zip");
+ try (SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ InputStream inputStream = Channels.newInputStream(channel);
+ ZipArchiveInputStream splitInputStream = new ZipArchiveInputStream(inputStream, ZipEncodingHelper.UTF8, true, false, true)) {
+
+ File fileToCompare = getFile("COMPRESS-477/split_zip_created_by_zip/zip_to_compare_created_by_zip_zip64.zip");
+ try (ZipArchiveInputStream inputStreamToCompare = new ZipArchiveInputStream(new FileInputStream(fileToCompare), ZipEncodingHelper.UTF8, true, false, true)) {
+
+ ArchiveEntry entry;
+ while((entry = splitInputStream.getNextEntry()) != null && inputStreamToCompare.getNextEntry() != null) {
+ if(entry.isDirectory()) {
+ continue;
+ }
+ assertArrayEquals(IOUtils.toByteArray(splitInputStream),
+ IOUtils.toByteArray(inputStreamToCompare));
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testSplitZipCreatedByWinrar() throws IOException {
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_winrar/split_zip_created_by_winrar.zip");
+ try (SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ InputStream inputStream = Channels.newInputStream(channel);
+ ZipArchiveInputStream splitInputStream = new ZipArchiveInputStream(inputStream, ZipEncodingHelper.UTF8, true, false, true)) {
+
+ File fileToCompare = getFile("COMPRESS-477/split_zip_created_by_winrar/zip_to_compare_created_by_winrar.zip");
+ try (ZipArchiveInputStream inputStreamToCompare = new ZipArchiveInputStream(new FileInputStream(fileToCompare), ZipEncodingHelper.UTF8, true, false, true)) {
+
+ ArchiveEntry entry;
+ while((entry = splitInputStream.getNextEntry()) != null && inputStreamToCompare.getNextEntry() != null) {
+ if(entry.isDirectory()) {
+ continue;
+ }
+ assertArrayEquals(IOUtils.toByteArray(splitInputStream),
+ IOUtils.toByteArray(inputStreamToCompare));
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testSplitZipCreatedByZipThrowsException() throws IOException {
+ thrown.expect(EOFException.class);
+ File zipSplitFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01");
+ InputStream fileInputStream = new FileInputStream(zipSplitFile);
+ ZipArchiveInputStream inputStream = new ZipArchiveInputStream(fileInputStream, ZipEncodingHelper.UTF8, true, false, true);
+
+ ArchiveEntry entry = inputStream.getNextEntry();
+ while(entry != null){
+ entry = inputStream.getNextEntry();
+ }
+ }
+
private static byte[] readEntry(ZipArchiveInputStream zip, ZipArchiveEntry zae) throws IOException {
final int len = (int)zae.getSize();
final byte[] buff = new byte[len];
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -28,12 +28,16 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.nio.channels.SeekableByteChannel;
import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicInteger;
@@ -687,6 +691,52 @@
multiByteReadConsistentlyReturnsMinusOneAtEof(getFile("bzip2-zip.zip"));
}
+ @Test
+ public void extractFileLiesAcrossSplitZipSegmentsCreatedByZip() throws Exception {
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.zip");
+ SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ zf = new ZipFile(channel);
+
+ // the compressed content of UnsupportedCompressionAlgorithmException.java lies between .z01 and .z02
+ ZipArchiveEntry zipEntry = zf.getEntry("commons-compress/src/main/java/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.java");
+ File fileToCompare = getFile("COMPRESS-477/split_zip_created_by_zip/file_to_compare_1");
+ assertFileEqualsToEntry(fileToCompare, zipEntry, zf);
+
+ // the compressed content of DeflateParameters.java lies between .z02 and .zip
+ zipEntry = zf.getEntry("commons-compress/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateParameters.java");
+ fileToCompare = getFile("COMPRESS-477/split_zip_created_by_zip/file_to_compare_2");
+ assertFileEqualsToEntry(fileToCompare, zipEntry, zf);
+ }
+
+ @Test
+ public void extractFileLiesAcrossSplitZipSegmentsCreatedByZipOfZip64() throws Exception {
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip_zip64.zip");
+ SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ zf = new ZipFile(channel);
+
+ // the compressed content of UnsupportedCompressionAlgorithmException.java lies between .z01 and .z02
+ ZipArchiveEntry zipEntry = zf.getEntry("commons-compress/src/main/java/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.java");
+ File fileToCompare = getFile("COMPRESS-477/split_zip_created_by_zip/file_to_compare_1");
+ assertFileEqualsToEntry(fileToCompare, zipEntry, zf);
+
+ // the compressed content of DeflateParameters.java lies between .z02 and .zip
+ zipEntry = zf.getEntry("commons-compress/src/main/java/org/apache/commons/compress/compressors/deflate/DeflateParameters.java");
+ fileToCompare = getFile("COMPRESS-477/split_zip_created_by_zip/file_to_compare_2");
+ assertFileEqualsToEntry(fileToCompare, zipEntry, zf);
+ }
+
+ @Test
+ public void extractFileLiesAcrossSplitZipSegmentsCreatedByWinrar() throws Exception {
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_winrar/split_zip_created_by_winrar.zip");
+ SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ zf = new ZipFile(channel);
+
+ // the compressed content of ZipArchiveInputStream.java lies between .z01 and .z02
+ ZipArchiveEntry zipEntry = zf.getEntry("commons-compress/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java");
+ File fileToCompare = getFile("COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1");
+ assertFileEqualsToEntry(fileToCompare, zipEntry, zf);
+ }
+
private void multiByteReadConsistentlyReturnsMinusOneAtEof(File file) throws Exception {
byte[] buf = new byte[2];
try (ZipFile archive = new ZipFile(file)) {
@@ -799,4 +849,37 @@
assertEquals(expected, ze.getNameSource());
}
}
+
+ private void assertFileEqualsToEntry(File fileToCompare, ZipArchiveEntry entry, ZipFile zipFile) throws IOException {
+ byte[] buffer = new byte[10240];
+ File tempFile = File.createTempFile("temp","txt");
+ OutputStream outputStream = new FileOutputStream(tempFile);
+ InputStream inputStream = zipFile.getInputStream(entry);
+ int readLen;
+ while((readLen = inputStream.read(buffer)) > 0) {
+ outputStream.write(buffer, 0, readLen);
+ }
+
+ outputStream.close();
+ inputStream.close();
+
+ assertFileEqualIgnoreEndOfLine(fileToCompare, tempFile);
+ }
+
+ private void assertFileEqualIgnoreEndOfLine(File file1, File file2) throws IOException {
+ List linesOfFile1 = Files.readAllLines(Paths.get(file1.getCanonicalPath()), Charset.forName("UTF-8"));
+ List linesOfFile2 = Files.readAllLines(Paths.get(file2.getCanonicalPath()), Charset.forName("UTF-8"));
+
+ if(linesOfFile1.size() != linesOfFile2.size()) {
+ fail("files not equal : " + file1.getName() + " , " + file2.getName());
+ }
+
+ String tempLineInFile1;
+ String tempLineInFile2;
+ for(int i = 0;i < linesOfFile1.size();i++) {
+ tempLineInFile1 = linesOfFile1.get(i).replaceAll("\r\n", "\n");
+ tempLineInFile2 = linesOfFile1.get(i).replaceAll("\r\n", "\n");
+ Assert.assertEquals(tempLineInFile1, tempLineInFile2);
+ }
+ }
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java 1970-01-01 00:00:00.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.commons.compress.archivers.zip;
+
+import org.apache.commons.compress.AbstractTestCase;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+
+public class ZipSplitOutputStreamTest extends AbstractTestCase {
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
+ @Test
+ public void throwsExceptionIfSplitSizeIsTooSmall() throws IOException {
+ thrown.expect(IllegalArgumentException.class);
+ new ZipSplitOutputStream(File.createTempFile("temp", "zip"), (64 * 1024 - 1));
+ }
+
+ @Test
+ public void throwsExceptionIfSplitSizeIsTooLarge() throws IOException {
+ thrown.expect(IllegalArgumentException.class);
+ new ZipSplitOutputStream(File.createTempFile("temp", "zip"), (4 * 1024 * 1024 * 1024L));
+ }
+
+ @Test
+ public void throwsIfUnsplittableSizeLargerThanSplitSize() throws IOException {
+ thrown.expect(IllegalArgumentException.class);
+ long splitSize = 100 * 1024;
+ ZipSplitOutputStream output = new ZipSplitOutputStream(File.createTempFile("temp", "zip"), splitSize);
+ output.prepareToWriteUnsplittableContent(splitSize + 1);
+ }
+
+ @Test
+ public void splitZipBeginsWithZipSplitSignature() throws IOException {
+ File tempFile = File.createTempFile("temp", "zip");
+ new ZipSplitOutputStream(tempFile, 100 * 1024L);
+
+ InputStream inputStream = new FileInputStream(tempFile);
+ byte[] buffer = new byte[4];
+ inputStream.read(buffer);
+
+ Assert.assertEquals(ByteBuffer.wrap(ZipArchiveOutputStream.DD_SIG).getInt(), ByteBuffer.wrap(buffer).getInt());
+ }
+
+ @Test
+ public void testCreateSplittedFiles() throws IOException {
+ File testOutputFile = new File(dir, "testCreateSplittedFiles.zip");
+ int splitSize = 100 * 1024; /* 100KB */
+ ZipSplitOutputStream zipSplitOutputStream = new ZipSplitOutputStream(testOutputFile, splitSize);
+
+ File fileToTest = getFile("COMPRESS-477/split_zip_created_by_zip/zip_to_compare_created_by_zip.zip");
+ InputStream inputStream = new FileInputStream(fileToTest);
+ byte[] buffer = new byte[4096];
+ int readLen;
+
+ while ((readLen = inputStream.read(buffer)) > 0) {
+ zipSplitOutputStream.write(buffer, 0, readLen);
+ }
+
+ inputStream.close();
+ zipSplitOutputStream.close();
+
+ File zipFile = new File(dir.getPath(), "testCreateSplittedFiles.z01");
+ Assert.assertEquals(zipFile.length(), splitSize);
+
+ zipFile = new File(dir.getPath(), "testCreateSplittedFiles.z02");
+ Assert.assertEquals(zipFile.length(), splitSize);
+
+ zipFile = new File(dir.getPath(), "testCreateSplittedFiles.z03");
+ Assert.assertEquals(zipFile.length(), splitSize);
+
+ zipFile = new File(dir.getPath(), "testCreateSplittedFiles.z04");
+ Assert.assertEquals(zipFile.length(), splitSize);
+
+ zipFile = new File(dir.getPath(), "testCreateSplittedFiles.z05");
+ Assert.assertEquals(zipFile.length(), splitSize);
+
+ zipFile = new File(dir.getPath(), "testCreateSplittedFiles.zip");
+ Assert.assertEquals(zipFile.length(), (fileToTest.length() + 4 - splitSize * 5));
+ }
+}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java 2019-08-17 16:01:50.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java 2020-01-24 18:45:56.000000000 +0000
@@ -27,6 +27,9 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.nio.channels.Channels;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
@@ -44,6 +47,7 @@
import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream;
import org.apache.commons.compress.archivers.zip.ZipFile;
import org.apache.commons.compress.archivers.zip.ZipMethod;
+import org.apache.commons.compress.archivers.zip.ZipSplitReadOnlySeekableByteChannel;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.compress.utils.InputStreamStatistics;
import org.apache.commons.compress.utils.SeekableInMemoryByteChannel;
@@ -51,6 +55,7 @@
import org.junit.Test;
public final class ZipTestCase extends AbstractTestCase {
+
/**
* Archives 2 files and unarchives it again. If the file length of result
* and source is the same, it looks like the operations have worked
@@ -63,49 +68,39 @@
final File file1 = getFile("test1.xml");
final File file2 = getFile("test2.xml");
- final OutputStream out = new FileOutputStream(output);
- ArchiveOutputStream os = null;
- try {
- os = new ArchiveStreamFactory()
- .createArchiveOutputStream("zip", out);
- os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
- IOUtils.copy(new FileInputStream(file1), os);
- os.closeArchiveEntry();
-
- os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
- IOUtils.copy(new FileInputStream(file2), os);
- os.closeArchiveEntry();
- } finally {
- if (os != null) {
- os.close();
+ try (final OutputStream out = new FileOutputStream(output)) {
+ try (ArchiveOutputStream os = new ArchiveStreamFactory().createArchiveOutputStream("zip", out)) {
+ os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
+ try (final FileInputStream input = new FileInputStream(file1)) {
+ IOUtils.copy(input, os);
+ }
+ os.closeArchiveEntry();
+
+ os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
+ try (final FileInputStream input = new FileInputStream(file2)) {
+ IOUtils.copy(input, os);
+ }
+ os.closeArchiveEntry();
}
}
- out.close();
// Unarchive the same
final List results = new ArrayList<>();
- final InputStream is = new FileInputStream(output);
- ArchiveInputStream in = null;
- try {
- in = new ArchiveStreamFactory()
- .createArchiveInputStream("zip", is);
-
- ZipArchiveEntry entry = null;
- while((entry = (ZipArchiveEntry)in.getNextEntry()) != null) {
- final File outfile = new File(resultDir.getCanonicalPath() + "/result/" + entry.getName());
- outfile.getParentFile().mkdirs();
- try (OutputStream o = new FileOutputStream(outfile)) {
- IOUtils.copy(in, o);
+ try (final InputStream fileInputStream = new FileInputStream(output)) {
+ try (ArchiveInputStream archiveInputStream = new ArchiveStreamFactory().createArchiveInputStream("zip",
+ fileInputStream)) {
+ ZipArchiveEntry entry = null;
+ while ((entry = (ZipArchiveEntry) archiveInputStream.getNextEntry()) != null) {
+ final File outfile = new File(resultDir.getCanonicalPath() + "/result/" + entry.getName());
+ outfile.getParentFile().mkdirs();
+ try (OutputStream o = new FileOutputStream(outfile)) {
+ IOUtils.copy(archiveInputStream, o);
+ }
+ results.add(outfile);
}
- results.add(outfile);
- }
- } finally {
- if (in != null) {
- in.close();
}
}
- is.close();
assertEquals(results.size(), 2);
File result = results.get(0);
@@ -125,34 +120,33 @@
final File file2 = getFile("test2.xml");
final byte[] file1Contents = new byte[(int) file1.length()];
final byte[] file2Contents = new byte[(int) file2.length()];
- IOUtils.readFully(new FileInputStream(file1), file1Contents);
- IOUtils.readFully(new FileInputStream(file2), file2Contents);
-
- SeekableInMemoryByteChannel channel = new SeekableInMemoryByteChannel();
- try (ZipArchiveOutputStream os = new ZipArchiveOutputStream(channel)) {
- os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
- os.write(file1Contents);
- os.closeArchiveEntry();
-
- os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
- os.write(file2Contents);
- os.closeArchiveEntry();
- }
-
- // Unarchive the same
+ IOUtils.read(file1, file1Contents);
+ IOUtils.read(file2, file2Contents);
final List results = new ArrayList<>();
- try (ArchiveInputStream in = new ArchiveStreamFactory()
- .createArchiveInputStream("zip", new ByteArrayInputStream(channel.array()))) {
-
- ZipArchiveEntry entry;
- while((entry = (ZipArchiveEntry)in.getNextEntry()) != null) {
- byte[] result = new byte[(int) entry.getSize()];
- IOUtils.readFully(in, result);
- results.add(result);
+ try (SeekableInMemoryByteChannel channel = new SeekableInMemoryByteChannel()) {
+ try (ZipArchiveOutputStream os = new ZipArchiveOutputStream(channel)) {
+ os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
+ os.write(file1Contents);
+ os.closeArchiveEntry();
+
+ os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
+ os.write(file2Contents);
+ os.closeArchiveEntry();
+ }
+
+ // Unarchive the same
+ try (ArchiveInputStream inputStream = new ArchiveStreamFactory().createArchiveInputStream("zip",
+ new ByteArrayInputStream(channel.array()))) {
+
+ ZipArchiveEntry entry;
+ while ((entry = (ZipArchiveEntry) inputStream.getNextEntry()) != null) {
+ byte[] result = new byte[(int) entry.getSize()];
+ IOUtils.readFully(inputStream, result);
+ results.add(result);
+ }
}
}
-
assertArrayEquals(results.get(0), file1Contents);
assertArrayEquals(results.get(1), file2Contents);
}
@@ -184,8 +178,8 @@
final ArrayList al = new ArrayList<>();
al.add("test1.xml");
al.add("test2.xml");
- try (InputStream is = new FileInputStream(input)) {
- checkArchiveContent(new ZipArchiveInputStream(is), al);
+ try (InputStream fis = new FileInputStream(input)) {
+ checkArchiveContent(new ZipArchiveInputStream(fis), al);
}
}
@@ -196,11 +190,11 @@
*/
@Test
public void testTokenizationCompressionMethod() throws IOException {
- final ZipFile moby = new ZipFile(getFile("moby.zip"));
- final ZipArchiveEntry entry = moby.getEntry("README");
- assertEquals("method", ZipMethod.TOKENIZATION.getCode(), entry.getMethod());
- assertFalse(moby.canReadEntryData(entry));
- moby.close();
+ try (final ZipFile moby = new ZipFile(getFile("moby.zip"))) {
+ final ZipArchiveEntry entry = moby.getEntry("README");
+ assertEquals("method", ZipMethod.TOKENIZATION.getCode(), entry.getMethod());
+ assertFalse(moby.canReadEntryData(entry));
+ }
}
/**
@@ -244,10 +238,8 @@
final List results = new ArrayList<>();
final List expectedExceptions = new ArrayList<>();
- final InputStream is = new FileInputStream(input);
- ArchiveInputStream in = null;
- try {
- in = new ArchiveStreamFactory().createArchiveInputStream("zip", is);
+ try (final InputStream fis = new FileInputStream(input);
+ ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream("zip", fis)) {
ZipArchiveEntry entry = null;
while ((entry = (ZipArchiveEntry) in.getNextEntry()) != null) {
@@ -265,12 +257,7 @@
}
// nested stream must not be closed here
}
- } finally {
- if (in != null) {
- in.close();
- }
}
- is.close();
assertTrue(results.contains("NestedArchiv.zip"));
assertTrue(results.contains("test1.xml"));
@@ -361,28 +348,28 @@
@Test
public void testCopyRawEntriesFromFile()
- throws IOException {
+ throws IOException {
final File[] tmp = createTempDirAndFile();
final File reference = createReferenceFile(tmp[0], Zip64Mode.Never, "expected.");
- final File a1 = File.createTempFile("src1.", ".zip", tmp[0]);
- try (final ZipArchiveOutputStream zos = new ZipArchiveOutputStream(a1)) {
+ final File file1 = File.createTempFile("src1.", ".zip", tmp[0]);
+ try (final ZipArchiveOutputStream zos = new ZipArchiveOutputStream(file1)) {
zos.setUseZip64(Zip64Mode.Never);
createFirstEntry(zos).close();
}
- final File a2 = File.createTempFile("src2.", ".zip", tmp[0]);
- try (final ZipArchiveOutputStream zos1 = new ZipArchiveOutputStream(a2)) {
+ final File file2 = File.createTempFile("src2.", ".zip", tmp[0]);
+ try (final ZipArchiveOutputStream zos1 = new ZipArchiveOutputStream(file2)) {
zos1.setUseZip64(Zip64Mode.Never);
createSecondEntry(zos1).close();
}
- try (final ZipFile zf1 = new ZipFile(a1); final ZipFile zf2 = new ZipFile(a2)) {
+ try (final ZipFile zipFile1 = new ZipFile(file1); final ZipFile zipFile2 = new ZipFile(file2)) {
final File fileResult = File.createTempFile("file-actual.", ".zip", tmp[0]);
try (final ZipArchiveOutputStream zos2 = new ZipArchiveOutputStream(fileResult)) {
- zf1.copyRawEntries(zos2, allFilesPredicate);
- zf2.copyRawEntries(zos2, allFilesPredicate);
+ zipFile1.copyRawEntries(zos2, allFilesPredicate);
+ zipFile2.copyRawEntries(zos2, allFilesPredicate);
}
// copyRawEntries does not add superfluous zip64 header like regular zip output stream
// does when using Zip64Mode.AsNeeded so all the source material has to be Zip64Mode.Never,
@@ -402,17 +389,17 @@
createFirstEntry(zos1);
}
- final File a1 = File.createTempFile("zip64src.", ".zip", tmp[0]);
- try (final ZipArchiveOutputStream zos = new ZipArchiveOutputStream(a1)) {
+ final File file1 = File.createTempFile("zip64src.", ".zip", tmp[0]);
+ try (final ZipArchiveOutputStream zos = new ZipArchiveOutputStream(file1)) {
zos.setUseZip64(Zip64Mode.Always);
createFirstEntry(zos).close();
}
final File fileResult = File.createTempFile("file-actual.", ".zip", tmp[0]);
- try (final ZipFile zf1 = new ZipFile(a1)) {
+ try (final ZipFile zipFile1 = new ZipFile(file1)) {
try (final ZipArchiveOutputStream zos2 = new ZipArchiveOutputStream(fileResult)) {
zos2.setUseZip64(Zip64Mode.Always);
- zf1.copyRawEntries(zos2, allFilesPredicate);
+ zipFile1.copyRawEntries(zos2, allFilesPredicate);
}
assertSameFileContents(reference, fileResult);
}
@@ -423,8 +410,8 @@
final File[] tmp = createTempDirAndFile();
- final File a1 = File.createTempFile("unixModeBits.", ".zip", tmp[0]);
- try (final ZipArchiveOutputStream zos = new ZipArchiveOutputStream(a1)) {
+ final File file1 = File.createTempFile("unixModeBits.", ".zip", tmp[0]);
+ try (final ZipArchiveOutputStream zos = new ZipArchiveOutputStream(file1)) {
final ZipArchiveEntry archiveEntry = new ZipArchiveEntry("fred");
archiveEntry.setUnixMode(0664);
@@ -432,7 +419,7 @@
zos.addRawArchiveEntry(archiveEntry, new ByteArrayInputStream("fud".getBytes()));
}
- try (final ZipFile zf1 = new ZipFile(a1)) {
+ try (final ZipFile zf1 = new ZipFile(file1)) {
final ZipArchiveEntry fred = zf1.getEntry("fred");
assertEquals(0664, fred.getUnixMode());
}
@@ -483,12 +470,11 @@
assertEquals(expectedElement.getExternalAttributes(), actualElement.getExternalAttributes());
assertEquals(expectedElement.getInternalAttributes(), actualElement.getInternalAttributes());
- final InputStream actualIs = actual.getInputStream(actualElement);
- final InputStream expectedIs = expected.getInputStream(expectedElement);
- IOUtils.readFully(expectedIs, expectedBuf);
- IOUtils.readFully(actualIs, actualBuf);
- expectedIs.close();
- actualIs.close();
+ try (final InputStream actualIs = actual.getInputStream(actualElement);
+ final InputStream expectedIs = expected.getInputStream(expectedElement)) {
+ IOUtils.readFully(expectedIs, expectedBuf);
+ IOUtils.readFully(actualIs, actualBuf);
+ }
Assert.assertArrayEquals(expectedBuf, actualBuf); // Buffers are larger than payload. dont care
}
@@ -647,6 +633,63 @@
testInputStreamStatistics("COMPRESS-380/COMPRESS-380.zip", expected);
}
+ @Test(expected = IllegalArgumentException.class)
+ public void buildSplitZipWithTooSmallSizeThrowsException() throws IOException {
+ new ZipArchiveOutputStream(File.createTempFile("temp", "zip"), 64 * 1024 - 1);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void buildSplitZipWithTooLargeSizeThrowsException() throws IOException {
+ new ZipArchiveOutputStream(File.createTempFile("temp", "zip"), 4294967295L + 1);
+ }
+
+ @Test(expected = IOException.class)
+ public void buildSplitZipWithSegmentAlreadyExistThrowsException() throws IOException {
+ File directoryToZip = getFilesToZip();
+ File outputZipFile = new File(dir, "splitZip.zip");
+ long splitSize = 100 * 1024L; /* 100 KB */
+ try (final ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(outputZipFile,
+ splitSize)) {
+
+ // create a file that has the same name of one of the created split segments
+ File sameNameFile = new File(dir, "splitZip.z01");
+ sameNameFile.createNewFile();
+
+ addFilesToZip(zipArchiveOutputStream, directoryToZip);
+ }
+ }
+
+ @Test
+ public void buildSplitZipTest() throws IOException {
+ File directoryToZip = getFilesToZip();
+ createTestSplitZipSegments();
+
+ File lastFile = new File(dir, "splitZip.zip");
+ try (SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ InputStream inputStream = Channels.newInputStream(channel);
+ ZipArchiveInputStream splitInputStream = new ZipArchiveInputStream(inputStream,
+ StandardCharsets.UTF_8.toString(), true, false, true)) {
+
+ ArchiveEntry entry;
+ int filesNum = countNonDirectories(directoryToZip);
+ int filesCount = 0;
+ while ((entry = splitInputStream.getNextEntry()) != null) {
+ if (entry.isDirectory()) {
+ continue;
+ }
+ // compare all files one by one
+ File fileToCompare = new File(entry.getName());
+ try (InputStream inputStreamToCompare = new FileInputStream(fileToCompare)) {
+ assertArrayEquals(IOUtils.toByteArray(splitInputStream),
+ IOUtils.toByteArray(inputStreamToCompare));
+ }
+ filesCount++;
+ }
+ // and the number of files should equal
+ assertEquals(filesCount, filesNum);
+ }
+ }
+
private void testInputStreamStatistics(String fileName, Map> expectedStatistics)
throws IOException, ArchiveException {
final File input = getFile(fileName);
@@ -701,4 +744,77 @@
final long b = stats.getCompressedCount();
l.add(Arrays.asList(t, b));
}
+
+ private File getFilesToZip() throws IOException {
+ File originalZipFile = getFile("COMPRESS-477/split_zip_created_by_zip/zip_to_compare_created_by_zip.zip");
+ try (ZipFile zipFile = new ZipFile(originalZipFile)) {
+ Enumeration zipEntries = zipFile.getEntries();
+ ZipArchiveEntry zipEntry;
+ File outputFile;
+ byte[] buffer;
+ int readLen;
+
+ while (zipEntries.hasMoreElements()) {
+ zipEntry = zipEntries.nextElement();
+ if (zipEntry.isDirectory()) {
+ continue;
+ }
+
+ outputFile = new File(dir, zipEntry.getName());
+ if (!outputFile.getParentFile().exists()) {
+ outputFile.getParentFile().mkdirs();
+ }
+ outputFile = new File(dir, zipEntry.getName());
+
+ try (InputStream inputStream = zipFile.getInputStream(zipEntry);
+ OutputStream outputStream = new FileOutputStream(outputFile)) {
+ buffer = new byte[(int) zipEntry.getSize()];
+ while ((readLen = inputStream.read(buffer)) > 0) {
+ outputStream.write(buffer, 0, readLen);
+ }
+ }
+ }
+ }
+ return dir.listFiles()[0];
+ }
+
+ private void createTestSplitZipSegments() throws IOException {
+ File directoryToZip = getFilesToZip();
+ File outputZipFile = new File(dir, "splitZip.zip");
+ long splitSize = 100 * 1024L; /* 100 KB */
+ try (final ZipArchiveOutputStream zipArchiveOutputStream = new ZipArchiveOutputStream(outputZipFile,
+ splitSize)) {
+ addFilesToZip(zipArchiveOutputStream, directoryToZip);
+ }
+ }
+
+ private void addFilesToZip(ZipArchiveOutputStream zipArchiveOutputStream, File fileToAdd) throws IOException {
+ if (fileToAdd.isDirectory()) {
+ for (File file : fileToAdd.listFiles()) {
+ addFilesToZip(zipArchiveOutputStream, file);
+ }
+ } else {
+ ZipArchiveEntry zipArchiveEntry = new ZipArchiveEntry(fileToAdd.getPath());
+ zipArchiveEntry.setMethod(ZipEntry.DEFLATED);
+
+ zipArchiveOutputStream.putArchiveEntry(zipArchiveEntry);
+ try (final FileInputStream input = new FileInputStream(fileToAdd)) {
+ IOUtils.copy(input, zipArchiveOutputStream);
+ }
+ zipArchiveOutputStream.closeArchiveEntry();
+ }
+ }
+
+ private int countNonDirectories(File file) {
+ if(!file.isDirectory()) {
+ return 1;
+ }
+
+ int result = 0;
+ for (File fileInDirectory : file.listFiles()) {
+ result += countNonDirectories(fileInDirectory);
+ }
+
+ return result;
+ }
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -45,13 +45,12 @@
final File input = getFile("brotli.testdata.compressed");
final File expected = getFile("brotli.testdata.uncompressed");
try (InputStream inputStream = new FileInputStream(input);
- InputStream expectedStream = new FileInputStream(expected);
- BrotliCompressorInputStream brotliInputStream = new BrotliCompressorInputStream(inputStream)) {
+ BrotliCompressorInputStream brotliInputStream = new BrotliCompressorInputStream(inputStream)) {
final byte[] b = new byte[20];
- IOUtils.readFully(expectedStream, b);
+ IOUtils.read(expected, b);
final ByteArrayOutputStream bos = new ByteArrayOutputStream();
int readByte = -1;
- while((readByte = brotliInputStream.read()) != -1) {
+ while ((readByte = brotliInputStream.read()) != -1) {
bos.write(readByte);
}
Assert.assertArrayEquals(b, bos.toByteArray());
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2NSelectorsOverflowTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2NSelectorsOverflowTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2NSelectorsOverflowTest.java 1970-01-01 00:00:00.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/bzip2/BZip2NSelectorsOverflowTest.java 2020-01-21 12:21:21.000000000 +0000
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.commons.compress.compressors.bzip2;
+
+import org.apache.commons.compress.AbstractTestCase;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
+
+import static org.junit.Assert.assertEquals;
+
+public class BZip2NSelectorsOverflowTest extends AbstractTestCase {
+
+ /**
+ * See https://sourceware.org/ml/bzip2-devel/2019-q3/msg00007.html
+ */
+ @Test
+ public void shouldDecompressBlockWithNSelectorOverflow() throws Exception {
+ final File toDecompress = getFile("lbzip2_32767.bz2");
+ try (final InputStream is = new FileInputStream(toDecompress);
+ final BZip2CompressorInputStream in = new BZip2CompressorInputStream(is)) {
+ int l = 0;
+ while (in.read() != -1) {
+ l++;
+ }
+ assertEquals(5, l);
+ }
+ }
+}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/deflate64/HuffmanDecoderTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/deflate64/HuffmanDecoderTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/deflate64/HuffmanDecoderTest.java 2018-05-02 20:17:13.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/deflate64/HuffmanDecoderTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -214,9 +214,6 @@
assertEquals("Hello World\nHello World\nHello World\nHello World\n", new String(result, 0, len));
len = decoder.decode(result);
- assertEquals(0, len);
-
- len = decoder.decode(result);
assertEquals(-1, len);
}
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStreamTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -46,10 +46,7 @@
public void testMatches() throws IOException {
assertFalse(FramedLZ4CompressorInputStream.matches(new byte[10], 4));
final byte[] b = new byte[12];
- final File input = getFile("bla.tar.lz4");
- try (FileInputStream in = new FileInputStream(input)) {
- IOUtils.readFully(in, b);
- }
+ IOUtils.read(getFile("bla.tar.lz4"), b);
assertFalse(FramedLZ4CompressorInputStream.matches(b, 3));
assertTrue(FramedLZ4CompressorInputStream.matches(b, 4));
assertTrue(FramedLZ4CompressorInputStream.matches(b, 5));
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -40,10 +40,7 @@
public void testMatches() throws IOException {
assertFalse(FramedSnappyCompressorInputStream.matches(new byte[10], 10));
final byte[] b = new byte[12];
- final File input = getFile("bla.tar.sz");
- try (FileInputStream in = new FileInputStream(input)) {
- IOUtils.readFully(in, b);
- }
+ IOUtils.read(getFile("bla.tar.sz"), b);
assertFalse(FramedSnappyCompressorInputStream.matches(b, 9));
assertTrue(FramedSnappyCompressorInputStream.matches(b, 10));
assertTrue(FramedSnappyCompressorInputStream.matches(b, 12));
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java 2019-08-09 15:51:09.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -47,10 +47,9 @@
final File input = getFile("zstandard.testdata.zst");
final File expected = getFile("zstandard.testdata");
try (InputStream inputStream = new FileInputStream(input);
- InputStream expectedStream = new FileInputStream(expected);
ZstdCompressorInputStream zstdInputStream = new ZstdCompressorInputStream(inputStream)) {
final byte[] b = new byte[97];
- IOUtils.readFully(expectedStream, b);
+ IOUtils.read(expected, b);
final ByteArrayOutputStream bos = new ByteArrayOutputStream();
int readByte = -1;
while((readByte = zstdInputStream.read()) != -1) {
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/OsgiITest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/OsgiITest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/OsgiITest.java 2018-05-23 12:50:54.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/OsgiITest.java 2020-02-05 04:59:32.000000000 +0000
@@ -18,6 +18,7 @@
*/
package org.apache.commons.compress;
+import static org.junit.Assert.assertTrue;
import static org.ops4j.pax.exam.CoreOptions.bundle;
import static org.ops4j.pax.exam.CoreOptions.composite;
import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
@@ -28,14 +29,25 @@
import org.ops4j.pax.exam.Configuration;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.PaxExam;
+import org.osgi.framework.Bundle;
+import org.osgi.framework.BundleContext;
+
+import javax.inject.Inject;
@RunWith(PaxExam.class)
public class OsgiITest {
+ private static final String EXPECTED_BUNDLE_NAME = "org.apache.commons.commons-compress";
+
+ @Inject
+ private BundleContext ctx;
+
@Configuration
public Option[] config() {
return new Option[] {
systemProperty("pax.exam.osgi.unresolved.fail").value("true"),
+ systemProperty("org.ops4j.pax.url.mvn.useFallbackRepositories").value("false"),
+ systemProperty("org.ops4j.pax.url.mvn.repositories").value("https://repo.maven.apache.org/maven2"),
mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.scr")
.version("2.0.14"),
mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.configadmin")
@@ -51,5 +63,18 @@
@Test
public void loadBundle() {
+ final StringBuilder bundles = new StringBuilder();
+ boolean foundCompressBundle = false, first = true;
+ for (final Bundle b : ctx.getBundles()) {
+ final String symbolicName = b.getSymbolicName();
+ foundCompressBundle |= EXPECTED_BUNDLE_NAME.equals(symbolicName);
+ if (!first) {
+ bundles.append(", ");
+ }
+ first = false;
+ bundles.append(symbolicName);
+ }
+ assertTrue("Expected to find bundle " + EXPECTED_BUNDLE_NAME + " in " + bundles,
+ foundCompressBundle);
}
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/FileNameUtilsTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/FileNameUtilsTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/FileNameUtilsTest.java 1970-01-01 00:00:00.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/FileNameUtilsTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.commons.compress.utils;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+
+public class FileNameUtilsTest {
+
+ @Test
+ public void getExtensionBaseCases() {
+ assertEquals("foo", FileNameUtils.getExtension("a/b/c/bar.foo"));
+ assertEquals("", FileNameUtils.getExtension("foo"));
+ }
+
+ @Test
+ public void getExtensionCornerCases() {
+ assertNull(FileNameUtils.getExtension(null));
+ assertEquals("", FileNameUtils.getExtension("foo."));
+ assertEquals("foo", FileNameUtils.getExtension("bar/.foo"));
+ }
+
+ @Test
+ public void getBaseNameBaseCases() {
+ assertEquals("bar", FileNameUtils.getBaseName("a/b/c/bar.foo"));
+ assertEquals("foo", FileNameUtils.getBaseName("foo"));
+ }
+
+ @Test
+ public void getBaseNameCornerCases() {
+ assertNull(FileNameUtils.getBaseName(null));
+ assertEquals("foo", FileNameUtils.getBaseName("foo."));
+ assertEquals("", FileNameUtils.getBaseName("bar/.foo"));
+ }
+}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/MultiReadOnlySeekableByteChannelTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/MultiReadOnlySeekableByteChannelTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/MultiReadOnlySeekableByteChannelTest.java 2019-08-18 15:27:29.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/MultiReadOnlySeekableByteChannelTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -29,6 +29,7 @@
import java.util.List;
import org.junit.Assert;
+import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -49,7 +50,7 @@
}
@Test
- public void forSeekableByteChannelsThrowsOnNullArg() {
+ public void forSeekableByteChannelsThrowsOnNullArg() throws IOException {
thrown.expect(NullPointerException.class);
MultiReadOnlySeekableByteChannel.forSeekableByteChannels(null);
}
@@ -61,7 +62,7 @@
}
@Test
- public void forSeekableByteChannelsReturnsIdentityForSingleElement() {
+ public void forSeekableByteChannelsReturnsIdentityForSingleElement() throws IOException {
final SeekableByteChannel e = makeEmpty();
final SeekableByteChannel m = MultiReadOnlySeekableByteChannel.forSeekableByteChannels(e);
Assert.assertSame(e, m);
@@ -106,7 +107,7 @@
}
@Test
- public void closesAllAndThrowsExceptionIfCloseThrows() {
+ public void closesAllAndThrowsExceptionIfCloseThrows() throws IOException {
SeekableByteChannel[] ts = new ThrowingSeekableByteChannel[] {
new ThrowingSeekableByteChannel(),
new ThrowingSeekableByteChannel()
@@ -150,7 +151,7 @@
return new SeekableInMemoryByteChannel(arr);
}
- private SeekableByteChannel makeMulti(byte[][] arr) {
+ private SeekableByteChannel makeMulti(byte[][] arr) throws IOException {
SeekableByteChannel[] s = new SeekableByteChannel[arr.length];
for (int i = 0; i < s.length; i++) {
s[i] = makeSingle(arr[i]);
@@ -291,4 +292,96 @@
return this;
}
}
+
+ // Contract Tests added in response to https://issues.apache.org/jira/browse/COMPRESS-499
+
+ private SeekableByteChannel testChannel() {
+ return MultiReadOnlySeekableByteChannel
+ .forSeekableByteChannels(makeEmpty(), makeEmpty());
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html#close()
+
+ /*
+ * If the stream is already closed then invoking this method has no effect.
+ */
+ @Test
+ public void closeIsIdempotent() throws Exception {
+ try (SeekableByteChannel c = testChannel()) {
+ c.close();
+ Assert.assertFalse(c.isOpen());
+ c.close();
+ Assert.assertFalse(c.isOpen());
+ }
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/nio/channels/SeekableByteChannel.html#position()
+
+ /*
+ * ClosedChannelException - If this channel is closed
+ */
+ @Test
+ @Ignore("we deliberately violate the spec")
+ public void throwsClosedChannelExceptionWhenPositionIsReadOnClosedChannel() throws Exception {
+ thrown.expect(ClosedChannelException.class);
+ try (SeekableByteChannel c = testChannel()) {
+ c.close();
+ c.position();
+ }
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/nio/channels/SeekableByteChannel.html#size()
+
+ /*
+ * ClosedChannelException - If this channel is closed
+ */
+ @Test
+ public void throwsClosedChannelExceptionWhenSizeIsReadOnClosedChannel() throws Exception {
+ thrown.expect(ClosedChannelException.class);
+ try (SeekableByteChannel c = testChannel()) {
+ c.close();
+ c.size();
+ }
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/nio/channels/SeekableByteChannel.html#position(long)
+
+ /*
+ * ClosedChannelException - If this channel is closed
+ */
+ @Test
+ public void throwsClosedChannelExceptionWhenPositionIsSetOnClosedChannel() throws Exception {
+ thrown.expect(ClosedChannelException.class);
+ try (SeekableByteChannel c = testChannel()) {
+ c.close();
+ c.position(0);
+ }
+ }
+
+ /*
+ * Setting the position to a value that is greater than the current size is legal but does not change the size of
+ * the entity. A later attempt to read bytes at such a position will immediately return an end-of-file
+ * indication
+ */
+ @Test
+ public void readingFromAPositionAfterEndReturnsEOF() throws Exception {
+ try (SeekableByteChannel c = testChannel()) {
+ c.position(2);
+ Assert.assertEquals(2, c.position());
+ ByteBuffer readBuffer = ByteBuffer.allocate(5);
+ Assert.assertEquals(-1, c.read(readBuffer));
+ }
+ }
+
+ /*
+ * IllegalArgumentException - If the new position is negative
+ */
+ @Test
+ public void throwsIllegalArgumentExceptionWhenPositionIsSetToANegativeValue() throws Exception {
+ thrown.expect(IllegalArgumentException.class);
+ try (SeekableByteChannel c = testChannel()) {
+ c.position(-1);
+ }
+ }
+
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java 2018-05-02 20:17:13.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/SeekableInMemoryByteChannelTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -18,17 +18,20 @@
*/
package org.apache.commons.compress.utils;
+import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
+import java.nio.channels.SeekableByteChannel;
import java.nio.charset.Charset;
import java.util.Arrays;
import static org.apache.commons.compress.utils.CharsetNames.UTF_8;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
public class SeekableInMemoryByteChannelTest {
@@ -88,6 +91,7 @@
//then
assertEquals(0L, readBuffer.position());
assertEquals(-1, readCount);
+ assertEquals(-1, c.read(readBuffer));
c.close();
}
@@ -177,7 +181,7 @@
//then
assertEquals(4L, posAtFour);
assertEquals(c.size(), posAtTheEnd);
- assertEquals(posPastTheEnd, posPastTheEnd);
+ assertEquals(testData.length + 1L, posPastTheEnd);
c.close();
}
@@ -190,13 +194,223 @@
c.close();
}
- @Test(expected = ClosedChannelException.class)
- public void shouldThrowExceptionWhenSettingPositionOnClosedChannel() throws IOException {
+ @Test(expected = IllegalArgumentException.class)
+ public void shouldThrowExceptionWhenTruncatingToIncorrectSize() throws IOException {
//given
SeekableInMemoryByteChannel c = new SeekableInMemoryByteChannel();
//when
+ c.truncate(Integer.MAX_VALUE + 1L);
c.close();
- c.position(1L);
+ }
+
+ // Contract Tests added in response to https://issues.apache.org/jira/browse/COMPRESS-499
+
+ // https://docs.oracle.com/javase/7/docs/api/java/io/Closeable.html#close()
+
+ /*
+ * If the stream is already closed then invoking this method has no effect.
+ */
+ @Test
+ public void closeIsIdempotent() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.close();
+ assertFalse(c.isOpen());
+ c.close();
+ assertFalse(c.isOpen());
+ }
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/nio/channels/SeekableByteChannel.html#position()
+
+ /*
+ * ClosedChannelException - If this channel is closed
+ */
+ @Test(expected = ClosedChannelException.class)
+ @Ignore("we deliberately violate the spec")
+ public void throwsClosedChannelExceptionWhenPositionIsReadOnClosedChannel() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.close();
+ c.position();
+ }
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/nio/channels/SeekableByteChannel.html#size()
+
+ /*
+ * ClosedChannelException - If this channel is closed
+ */
+ @Test(expected = ClosedChannelException.class)
+ @Ignore("we deliberately violate the spec")
+ public void throwsClosedChannelExceptionWhenSizeIsReadOnClosedChannel() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.close();
+ c.size();
+ }
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/nio/channels/SeekableByteChannel.html#position(long)
+
+ /*
+ * ClosedChannelException - If this channel is closed
+ */
+ @Test(expected = ClosedChannelException.class)
+ public void throwsClosedChannelExceptionWhenPositionIsSetOnClosedChannel() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.close();
+ c.position(0);
+ }
+ }
+
+ /*
+ * Setting the position to a value that is greater than the current size is legal but does not change the size of
+ * the entity. A later attempt to read bytes at such a position will immediately return an end-of-file
+ * indication
+ */
+ @Test
+ public void readingFromAPositionAfterEndReturnsEOF() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.position(2);
+ assertEquals(2, c.position());
+ ByteBuffer readBuffer = ByteBuffer.allocate(5);
+ assertEquals(-1, c.read(readBuffer));
+ }
+ }
+
+ /*
+ * Setting the position to a value that is greater than the current size is legal but does not change the size of
+ * the entity. A later attempt to write bytes at such a position will cause the entity to grow to accommodate the
+ * new bytes; the values of any bytes between the previous end-of-file and the newly-written bytes are
+ * unspecified.
+ */
+ public void writingToAPositionAfterEndGrowsChannel() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.position(2);
+ assertEquals(2, c.position());
+ ByteBuffer inData = ByteBuffer.wrap(testData);
+ assertEquals(testData.length, c.write(inData));
+ assertEquals(testData.length + 2, c.size());
+
+ c.position(2);
+ ByteBuffer readBuffer = ByteBuffer.allocate(testData.length);
+ c.read(readBuffer);
+ assertArrayEquals(testData, Arrays.copyOf(readBuffer.array(), testData.length));
+ }
+ }
+
+ /*
+ * IllegalArgumentException - If the new position is negative
+ */
+ @Test(expected = IllegalArgumentException.class)
+ public void throwsIllegalArgumentExceptionWhenPositionIsSetToANegativeValue() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.position(-1);
+ }
+ }
+
+ // https://docs.oracle.com/javase/7/docs/api/java/nio/channels/SeekableByteChannel.html#truncate(long)
+
+ /*
+ * If the given size is greater than or equal to the current size then the entity is not modified.
+ */
+ @Test
+ public void truncateToCurrentSizeDoesntChangeAnything() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel(testData)) {
+ assertEquals(testData.length, c.size());
+ c.truncate(testData.length);
+ assertEquals(testData.length, c.size());
+ ByteBuffer readBuffer = ByteBuffer.allocate(testData.length);
+ assertEquals(testData.length, c.read(readBuffer));
+ assertArrayEquals(testData, Arrays.copyOf(readBuffer.array(), testData.length));
+ }
+ }
+
+ /*
+ * If the given size is greater than or equal to the current size then the entity is not modified.
+ */
+ @Test
+ public void truncateToBiggerSizeDoesntChangeAnything() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel(testData)) {
+ assertEquals(testData.length, c.size());
+ c.truncate(testData.length + 1);
+ assertEquals(testData.length, c.size());
+ ByteBuffer readBuffer = ByteBuffer.allocate(testData.length);
+ assertEquals(testData.length, c.read(readBuffer));
+ assertArrayEquals(testData, Arrays.copyOf(readBuffer.array(), testData.length));
+ }
+ }
+
+ /*
+ * In either case, if the current position is greater than the given size then it is set to that size.
+ */
+ @Test
+ public void truncateDoesntChangeSmallPosition() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel(testData)) {
+ c.position(1);
+ c.truncate(testData.length - 1);
+ assertEquals(testData.length - 1, c.size());
+ assertEquals(1, c.position());
+ }
+ }
+
+ /*
+ * In either case, if the current position is greater than the given size then it is set to that size.
+ */
+ @Test
+ public void truncateMovesPositionWhenShrinkingBeyondPosition() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel(testData)) {
+ c.position(4);
+ c.truncate(3);
+ assertEquals(3, c.size());
+ assertEquals(3, c.position());
+ }
+ }
+
+ /*
+ * In either case, if the current position is greater than the given size then it is set to that size.
+ */
+ @Test
+ public void truncateMovesPositionWhenNotResizingButPositionBiggerThanSize() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel(testData)) {
+ c.position(2 * testData.length);
+ c.truncate(testData.length);
+ assertEquals(testData.length, c.size());
+ assertEquals(testData.length, c.position());
+ }
+ }
+
+ /*
+ * In either case, if the current position is greater than the given size then it is set to that size.
+ */
+ @Test
+ public void truncateMovesPositionWhenNewSizeIsBiggerThanSizeAndPositionIsEvenBigger() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel(testData)) {
+ c.position(2 * testData.length);
+ c.truncate(testData.length + 1);
+ assertEquals(testData.length, c.size());
+ assertEquals(testData.length + 1, c.position());
+ }
+ }
+
+ /*
+ * IllegalArgumentException - If the new position is negative
+ */
+ @Test(expected = IllegalArgumentException.class)
+ public void throwsIllegalArgumentExceptionWhenTruncatingToANegativeSize() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.truncate(-1);
+ }
+ }
+
+ /*
+ * ClosedChannelException - If this channel is closed
+ */
+ @Test(expected = ClosedChannelException.class)
+ @Ignore("we deliberately violate the spec")
+ public void throwsClosedChannelExceptionWhenTruncateIsCalledOnClosedChannel() throws Exception {
+ try (SeekableByteChannel c = new SeekableInMemoryByteChannel()) {
+ c.close();
+ c.truncate(0);
+ }
}
}
diff -Nru libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/ZipSplitReadOnlySeekableByteChannelTest.java libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/ZipSplitReadOnlySeekableByteChannelTest.java
--- libcommons-compress-java-1.19/src/test/java/org/apache/commons/compress/utils/ZipSplitReadOnlySeekableByteChannelTest.java 1970-01-01 00:00:00.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/java/org/apache/commons/compress/utils/ZipSplitReadOnlySeekableByteChannelTest.java 2020-01-07 14:40:25.000000000 +0000
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.commons.compress.utils;
+
+import org.apache.commons.compress.archivers.zip.ZipSplitReadOnlySeekableByteChannel;
+import org.junit.Assert;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.channels.SeekableByteChannel;
+import java.nio.file.Files;
+import java.nio.file.StandardOpenOption;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import static org.apache.commons.compress.AbstractTestCase.getFile;
+
+public class ZipSplitReadOnlySeekableByteChannelTest {
+ @Rule
+ public ExpectedException thrown = ExpectedException.none();
+
+ @Test
+ public void constructorThrowsOnNullArg() throws IOException {
+ thrown.expect(NullPointerException.class);
+ new ZipSplitReadOnlySeekableByteChannel(null);
+ }
+
+ @Test
+ public void constructorThrowsOnNonSplitZipFiles() throws IOException {
+ thrown.expect(IOException.class);
+ List channels = new ArrayList<>();
+ File file = getFile("COMPRESS-189.zip");
+ channels.add(Files.newByteChannel(file.toPath(), StandardOpenOption.READ));
+ new ZipSplitReadOnlySeekableByteChannel(channels);
+ }
+
+ @Test
+ public void channelsPositionIsZeroAfterConstructor() throws IOException {
+ List channels = getSplitZipChannels();
+ new ZipSplitReadOnlySeekableByteChannel(channels);
+ for (SeekableByteChannel channel : channels) {
+ Assert.assertEquals(0, channel.position());
+ }
+ }
+
+ @Test
+ public void forOrderedSeekableByteChannelsThrowsOnNullArg() throws IOException {
+ thrown.expect(NullPointerException.class);
+ ZipSplitReadOnlySeekableByteChannel.forOrderedSeekableByteChannels(null);
+ }
+
+ @Test
+ public void forOrderedSeekableByteChannelsOfTwoParametersThrowsOnNullArg() throws IOException {
+ thrown.expect(NullPointerException.class);
+ ZipSplitReadOnlySeekableByteChannel.forOrderedSeekableByteChannels(null, null);
+ }
+
+ @Test
+ public void forOrderedSeekableByteChannelsReturnCorrectClass() throws IOException {
+ File file1 = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01");
+ SeekableByteChannel firstChannel = Files.newByteChannel(file1.toPath(), StandardOpenOption.READ);
+
+ File file2 = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z02");
+ SeekableByteChannel secondChannel = Files.newByteChannel(file2.toPath(), StandardOpenOption.READ);
+
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.zip");
+ SeekableByteChannel lastChannel = Files.newByteChannel(lastFile.toPath(), StandardOpenOption.READ);
+
+ List channels = new ArrayList<>();
+ channels.add(firstChannel);
+ channels.add(secondChannel);
+
+ SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.forOrderedSeekableByteChannels(lastChannel, channels);
+ Assert.assertTrue(channel instanceof ZipSplitReadOnlySeekableByteChannel);
+
+ channel = ZipSplitReadOnlySeekableByteChannel.forOrderedSeekableByteChannels(firstChannel, secondChannel, lastChannel);
+ Assert.assertTrue(channel instanceof ZipSplitReadOnlySeekableByteChannel);
+ }
+
+ @Test
+ public void forOrderedSeekableByteChannelsReturnsIdentityForSingleElement() throws IOException {
+ SeekableByteChannel emptyChannel = new SeekableInMemoryByteChannel(new byte[0]);
+ final SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.forOrderedSeekableByteChannels(emptyChannel);
+ Assert.assertSame(emptyChannel, channel);
+ }
+
+ @Test
+ public void forFilesThrowsOnNullArg() throws IOException {
+ thrown.expect(NullPointerException.class);
+ ZipSplitReadOnlySeekableByteChannel.forFiles(null);
+ }
+
+ @Test
+ public void forFilesOfTwoParametersThrowsOnNullArg() throws IOException {
+ thrown.expect(NullPointerException.class);
+ ZipSplitReadOnlySeekableByteChannel.forFiles(null, null);
+ }
+
+ @Test
+ public void forFilesReturnCorrectClass() throws IOException {
+ File firstFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01");
+ File secondFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z02");
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01");
+
+ ArrayList list = new ArrayList<>();
+ list.add(firstFile);
+ list.add(secondFile);
+
+ SeekableByteChannel channel = ZipSplitReadOnlySeekableByteChannel.forFiles(lastFile, list);
+ Assert.assertTrue(channel instanceof ZipSplitReadOnlySeekableByteChannel);
+
+ channel = ZipSplitReadOnlySeekableByteChannel.forFiles(firstFile, secondFile, lastFile);
+ Assert.assertTrue(channel instanceof ZipSplitReadOnlySeekableByteChannel);
+ }
+
+ @Test
+ public void buildFromLastSplitSegmentThrowsOnNotZipFile() throws IOException {
+ thrown.expect(IllegalArgumentException.class);
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01");
+ ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ }
+
+ @Test
+ public void positionToSomeZipSplitSegment() throws IOException {
+ File firstFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01");
+ int firstFileSize = (int) firstFile.length();
+
+ File secondFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z02");
+ int secondFileSize = (int) secondFile.length();
+
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.zip");
+ int lastFileSize = (int) lastFile.length();
+
+ Random random = new Random();
+ int randomDiskNumber = random.nextInt(3);
+ int randomOffset = randomDiskNumber < 2 ? random.nextInt(firstFileSize) : random.nextInt(lastFileSize);
+
+ ZipSplitReadOnlySeekableByteChannel channel = (ZipSplitReadOnlySeekableByteChannel) ZipSplitReadOnlySeekableByteChannel.buildFromLastSplitSegment(lastFile);
+ channel.position(randomDiskNumber, randomOffset);
+ long expectedPosition = randomOffset;
+
+ expectedPosition += randomDiskNumber > 0 ? firstFileSize : 0;
+ expectedPosition += randomDiskNumber > 1 ? secondFileSize : 0;
+
+ Assert.assertEquals(expectedPosition, channel.position());
+ }
+
+ private List getSplitZipChannels() throws IOException {
+ List channels = new ArrayList<>();
+ File file1 = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z01");
+ channels.add(Files.newByteChannel(file1.toPath(), StandardOpenOption.READ));
+
+ File file2 = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.z02");
+ channels.add(Files.newByteChannel(file2.toPath(), StandardOpenOption.READ));
+
+ File lastFile = getFile("COMPRESS-477/split_zip_created_by_zip/split_zip_created_by_zip.zip");
+ channels.add(Files.newByteChannel(lastFile.toPath(), StandardOpenOption.READ));
+
+ return channels;
+ }
+}
Binary files /tmp/tmpLFVd1I/ijW45TajB9/libcommons-compress-java-1.19/src/test/resources/bla.noendheaderoffset.7z and /tmp/tmpLFVd1I/uir1NM4tP0/libcommons-compress-java-1.20/src/test/resources/bla.noendheaderoffset.7z differ
diff -Nru libcommons-compress-java-1.19/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1 libcommons-compress-java-1.20/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1
--- libcommons-compress-java-1.19/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1 1970-01-01 00:00:00.000000000 +0000
+++ libcommons-compress-java-1.20/src/test/resources/COMPRESS-477/split_zip_created_by_winrar/file_to_compare_1 2020-01-07 14:40:25.000000000 +0000
@@ -0,0 +1,1297 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.commons.compress.archivers.zip;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PushbackInputStream;
+import java.math.BigInteger;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.zip.CRC32;
+import java.util.zip.DataFormatException;
+import java.util.zip.Inflater;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipException;
+
+import org.apache.commons.compress.archivers.ArchiveEntry;
+import org.apache.commons.compress.archivers.ArchiveInputStream;
+import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
+import org.apache.commons.compress.compressors.deflate64.Deflate64CompressorInputStream;
+import org.apache.commons.compress.utils.ArchiveUtils;
+import org.apache.commons.compress.utils.IOUtils;
+import org.apache.commons.compress.utils.InputStreamStatistics;
+
+import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD;
+import static org.apache.commons.compress.archivers.zip.ZipConstants.SHORT;
+import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD;
+import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC;
+
+/**
+ * Implements an input stream that can read Zip archives.
+ *
+ * As of Apache Commons Compress it transparently supports Zip64
+ * extensions and thus individual entries and archives larger than 4
+ * GB or with more than 65536 entries.
+ *
+ * The {@link ZipFile} class is preferred when reading from files
+ * as {@link ZipArchiveInputStream} is limited by not being able to
+ * read the central directory header before returning entries. In
+ * particular {@link ZipArchiveInputStream}
+ *
+ *
+ *
+ * - may return entries that are not part of the central directory
+ * at all and shouldn't be considered part of the archive.
+ *
+ * - may return several entries with the same name.
+ *
+ * - will not return internal or external attributes.
+ *
+ * - may return incomplete extra field data.
+ *
+ * - may return unknown sizes and CRC values for entries until the
+ * next entry has been reached if the archive uses the data
+ * descriptor feature.
+ *
+ *
+ *
+ * @see ZipFile
+ * @NotThreadSafe
+ */
+public class ZipArchiveInputStream extends ArchiveInputStream implements InputStreamStatistics {
+
+ /** The zip encoding to use for file names and the file comment. */
+ private final ZipEncoding zipEncoding;
+
+ // the provided encoding (for unit tests)
+ final String encoding;
+
+ /** Whether to look for and use Unicode extra fields. */
+ private final boolean useUnicodeExtraFields;
+
+ /** Wrapped stream, will always be a PushbackInputStream. */
+ private final InputStream in;
+
+ /** Inflater used for all deflated entries. */
+ private final Inflater inf = new Inflater(true);
+
+ /** Buffer used to read from the wrapped stream. */
+ private final ByteBuffer buf = ByteBuffer.allocate(ZipArchiveOutputStream.BUFFER_SIZE);
+
+ /** The entry that is currently being read. */
+ private CurrentEntry current = null;
+
+ /** Whether the stream has been closed. */
+ private boolean closed = false;
+
+ /** Whether the stream has reached the central directory - and thus found all entries. */
+ private boolean hitCentralDirectory = false;
+
+ /**
+ * When reading a stored entry that uses the data descriptor this
+ * stream has to read the full entry and caches it. This is the
+ * cache.
+ */
+ private ByteArrayInputStream lastStoredEntry = null;
+
+ /** Whether the stream will try to read STORED entries that use a data descriptor. */
+ private boolean allowStoredEntriesWithDataDescriptor = false;
+
+ /** Count decompressed bytes for current entry */
+ private long uncompressedCount = 0;
+
+ private static final int LFH_LEN = 30;
+ /*
+ local file header signature WORD
+ version needed to extract SHORT
+ general purpose bit flag SHORT
+ compression method SHORT
+ last mod file time SHORT
+ last mod file date SHORT
+ crc-32 WORD
+ compressed size WORD
+ uncompressed size WORD
+ file name length SHORT
+ extra field length SHORT
+ */
+
+ private static final int CFH_LEN = 46;
+ /*
+ central file header signature WORD
+ version made by SHORT
+ version needed to extract SHORT
+ general purpose bit flag SHORT
+ compression method SHORT
+ last mod file time SHORT
+ last mod file date SHORT
+ crc-32 WORD
+ compressed size WORD
+ uncompressed size WORD
+ file name length SHORT
+ extra field length SHORT
+ file comment length SHORT
+ disk number start SHORT
+ internal file attributes SHORT
+ external file attributes WORD
+ relative offset of local header WORD
+ */
+
+ private static final long TWO_EXP_32 = ZIP64_MAGIC + 1;
+
+ // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection)
+ private final byte[] lfhBuf = new byte[LFH_LEN];
+ private final byte[] skipBuf = new byte[1024];
+ private final byte[] shortBuf = new byte[SHORT];
+ private final byte[] wordBuf = new byte[WORD];
+ private final byte[] twoDwordBuf = new byte[2 * DWORD];
+
+ private int entriesRead = 0;
+
+ /**
+ * Create an instance using UTF-8 encoding
+ * @param inputStream the stream to wrap
+ */
+ public ZipArchiveInputStream(final InputStream inputStream) {
+ this(inputStream, ZipEncodingHelper.UTF8);
+ }
+
+ /**
+ * Create an instance using the specified encoding
+ * @param inputStream the stream to wrap
+ * @param encoding the encoding to use for file names, use null
+ * for the platform's default encoding
+ * @since 1.5
+ */
+ public ZipArchiveInputStream(final InputStream inputStream, final String encoding) {
+ this(inputStream, encoding, true);
+ }
+
+ /**
+ * Create an instance using the specified encoding
+ * @param inputStream the stream to wrap
+ * @param encoding the encoding to use for file names, use null
+ * for the platform's default encoding
+ * @param useUnicodeExtraFields whether to use InfoZIP Unicode
+ * Extra Fields (if present) to set the file names.
+ */
+ public ZipArchiveInputStream(final InputStream inputStream, final String encoding, final boolean useUnicodeExtraFields) {
+ this(inputStream, encoding, useUnicodeExtraFields, false);
+ }
+
+ /**
+ * Create an instance using the specified encoding
+ * @param inputStream the stream to wrap
+ * @param encoding the encoding to use for file names, use null
+ * for the platform's default encoding
+ * @param useUnicodeExtraFields whether to use InfoZIP Unicode
+ * Extra Fields (if present) to set the file names.
+ * @param allowStoredEntriesWithDataDescriptor whether the stream
+ * will try to read STORED entries that use a data descriptor
+ * @since 1.1
+ */
+ public ZipArchiveInputStream(final InputStream inputStream,
+ final String encoding,
+ final boolean useUnicodeExtraFields,
+ final boolean allowStoredEntriesWithDataDescriptor) {
+ this.encoding = encoding;
+ zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
+ this.useUnicodeExtraFields = useUnicodeExtraFields;
+ in = new PushbackInputStream(inputStream, buf.capacity());
+ this.allowStoredEntriesWithDataDescriptor =
+ allowStoredEntriesWithDataDescriptor;
+ // haven't read anything so far
+ buf.limit(0);
+ }
+
+ public ZipArchiveEntry getNextZipEntry() throws IOException {
+ uncompressedCount = 0;
+
+ boolean firstEntry = true;
+ if (closed || hitCentralDirectory) {
+ return null;
+ }
+ if (current != null) {
+ closeEntry();
+ firstEntry = false;
+ }
+
+ long currentHeaderOffset = getBytesRead();
+ try {
+ if (firstEntry) {
+ // split archives have a special signature before the
+ // first local file header - look for it and fail with
+ // the appropriate error message if this is a split
+ // archive.
+ readFirstLocalFileHeader(lfhBuf);
+ } else {
+ readFully(lfhBuf);
+ }
+ } catch (final EOFException e) { //NOSONAR
+ return null;
+ }
+
+ final ZipLong sig = new ZipLong(lfhBuf);
+ if (!sig.equals(ZipLong.LFH_SIG)) {
+ if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG) || isApkSigningBlock(lfhBuf)) {
+ hitCentralDirectory = true;
+ skipRemainderOfArchive();
+ return null;
+ }
+ throw new ZipException(String.format("Unexpected record signature: 0X%X", sig.getValue()));
+ }
+
+ int off = WORD;
+ current = new CurrentEntry();
+
+ final int versionMadeBy = ZipShort.getValue(lfhBuf, off);
+ off += SHORT;
+ current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK);
+
+ final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(lfhBuf, off);
+ final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames();
+ final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding;
+ current.hasDataDescriptor = gpFlag.usesDataDescriptor();
+ current.entry.setGeneralPurposeBit(gpFlag);
+
+ off += SHORT;
+
+ current.entry.setMethod(ZipShort.getValue(lfhBuf, off));
+ off += SHORT;
+
+ final long time = ZipUtil.dosToJavaTime(ZipLong.getValue(lfhBuf, off));
+ current.entry.setTime(time);
+ off += WORD;
+
+ ZipLong size = null, cSize = null;
+ if (!current.hasDataDescriptor) {
+ current.entry.setCrc(ZipLong.getValue(lfhBuf, off));
+ off += WORD;
+
+ cSize = new ZipLong(lfhBuf, off);
+ off += WORD;
+
+ size = new ZipLong(lfhBuf, off);
+ off += WORD;
+ } else {
+ off += 3 * WORD;
+ }
+
+ final int fileNameLen = ZipShort.getValue(lfhBuf, off);
+
+ off += SHORT;
+
+ final int extraLen = ZipShort.getValue(lfhBuf, off);
+ off += SHORT; // NOSONAR - assignment as documentation
+
+ final byte[] fileName = new byte[fileNameLen];
+ readFully(fileName);
+ current.entry.setName(entryEncoding.decode(fileName), fileName);
+ if (hasUTF8Flag) {
+ current.entry.setNameSource(ZipArchiveEntry.NameSource.NAME_WITH_EFS_FLAG);
+ }
+
+ final byte[] extraData = new byte[extraLen];
+ readFully(extraData);
+ current.entry.setExtra(extraData);
+
+ if (!hasUTF8Flag && useUnicodeExtraFields) {
+ ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null);
+ }
+
+ processZip64Extra(size, cSize);
+
+ current.entry.setLocalHeaderOffset(currentHeaderOffset);
+ current.entry.setDataOffset(getBytesRead());
+ current.entry.setStreamContiguous(true);
+
+ ZipMethod m = ZipMethod.getMethodByCode(current.entry.getMethod());
+ if (current.entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN) {
+ if (ZipUtil.canHandleEntryData(current.entry) && m != ZipMethod.STORED && m != ZipMethod.DEFLATED) {
+ InputStream bis = new BoundedInputStream(in, current.entry.getCompressedSize());
+ switch (m) {
+ case UNSHRINKING:
+ current.in = new UnshrinkingInputStream(bis);
+ break;
+ case IMPLODING:
+ current.in = new ExplodingInputStream(
+ current.entry.getGeneralPurposeBit().getSlidingDictionarySize(),
+ current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(),
+ bis);
+ break;
+ case BZIP2:
+ current.in = new BZip2CompressorInputStream(bis);
+ break;
+ case ENHANCED_DEFLATED:
+ current.in = new Deflate64CompressorInputStream(bis);
+ break;
+ default:
+ // we should never get here as all supported methods have been covered
+ // will cause an error when read is invoked, don't throw an exception here so people can
+ // skip unsupported entries
+ break;
+ }
+ }
+ } else if (m == ZipMethod.ENHANCED_DEFLATED) {
+ current.in = new Deflate64CompressorInputStream(in);
+ }
+
+ entriesRead++;
+ return current.entry;
+ }
+
+ /**
+ * Fills the given array with the first local file header and
+ * deals with splitting/spanning markers that may prefix the first
+ * LFH.
+ */
+ private void readFirstLocalFileHeader(final byte[] lfh) throws IOException {
+ readFully(lfh);
+ final ZipLong sig = new ZipLong(lfh);
+ if (sig.equals(ZipLong.DD_SIG)) {
+ throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.SPLITTING);
+ }
+
+ if (sig.equals(ZipLong.SINGLE_SEGMENT_SPLIT_MARKER)) {
+ // The archive is not really split as only one segment was
+ // needed in the end. Just skip over the marker.
+ final byte[] missedLfhBytes = new byte[4];
+ readFully(missedLfhBytes);
+ System.arraycopy(lfh, 4, lfh, 0, LFH_LEN - 4);
+ System.arraycopy(missedLfhBytes, 0, lfh, LFH_LEN - 4, 4);
+ }
+ }
+
+ /**
+ * Records whether a Zip64 extra is present and sets the size
+ * information from it if sizes are 0xFFFFFFFF and the entry
+ * doesn't use a data descriptor.
+ */
+ private void processZip64Extra(final ZipLong size, final ZipLong cSize) {
+ final Zip64ExtendedInformationExtraField z64 =
+ (Zip64ExtendedInformationExtraField)
+ current.entry.getExtraField(Zip64ExtendedInformationExtraField.HEADER_ID);
+ current.usesZip64 = z64 != null;
+ if (!current.hasDataDescriptor) {
+ if (z64 != null // same as current.usesZip64 but avoids NPE warning
+ && (ZipLong.ZIP64_MAGIC.equals(cSize) || ZipLong.ZIP64_MAGIC.equals(size)) ) {
+ current.entry.setCompressedSize(z64.getCompressedSize().getLongValue());
+ current.entry.setSize(z64.getSize().getLongValue());
+ } else if (cSize != null && size != null) {
+ current.entry.setCompressedSize(cSize.getValue());
+ current.entry.setSize(size.getValue());
+ }
+ }
+ }
+
+ @Override
+ public ArchiveEntry getNextEntry() throws IOException {
+ return getNextZipEntry();
+ }
+
+ /**
+ * Whether this class is able to read the given entry.
+ *
+ * May return false if it is set up to use encryption or a
+ * compression method that hasn't been implemented yet.
+ * @since 1.1
+ */
+ @Override
+ public boolean canReadEntryData(final ArchiveEntry ae) {
+ if (ae instanceof ZipArchiveEntry) {
+ final ZipArchiveEntry ze = (ZipArchiveEntry) ae;
+ return ZipUtil.canHandleEntryData(ze)
+ && supportsDataDescriptorFor(ze)
+ && supportsCompressedSizeFor(ze);
+ }
+ return false;
+ }
+
+ @Override
+ public int read(final byte[] buffer, final int offset, final int length) throws IOException {
+ if (length == 0) {
+ return 0;
+ }
+ if (closed) {
+ throw new IOException("The stream is closed");
+ }
+
+ if (current == null) {
+ return -1;
+ }
+
+ // avoid int overflow, check null buffer
+ if (offset > buffer.length || length < 0 || offset < 0 || buffer.length - offset < length) {
+ throw new ArrayIndexOutOfBoundsException();
+ }
+
+ ZipUtil.checkRequestedFeatures(current.entry);
+ if (!supportsDataDescriptorFor(current.entry)) {
+ throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.DATA_DESCRIPTOR,
+ current.entry);
+ }
+ if (!supportsCompressedSizeFor(current.entry)) {
+ throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.UNKNOWN_COMPRESSED_SIZE,
+ current.entry);
+ }
+
+ int read;
+ if (current.entry.getMethod() == ZipArchiveOutputStream.STORED) {
+ read = readStored(buffer, offset, length);
+ } else if (current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED) {
+ read = readDeflated(buffer, offset, length);
+ } else if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()
+ || current.entry.getMethod() == ZipMethod.IMPLODING.getCode()
+ || current.entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()
+ || current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
+ read = current.in.read(buffer, offset, length);
+ } else {
+ throw new UnsupportedZipFeatureException(ZipMethod.getMethodByCode(current.entry.getMethod()),
+ current.entry);
+ }
+
+ if (read >= 0) {
+ current.crc.update(buffer, offset, read);
+ uncompressedCount += read;
+ }
+
+ return read;
+ }
+
+ /**
+ * @since 1.17
+ */
+ @Override
+ public long getCompressedCount() {
+ if (current.entry.getMethod() == ZipArchiveOutputStream.STORED) {
+ return current.bytesRead;
+ } else if (current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED) {
+ return getBytesInflated();
+ } else if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) {
+ return ((UnshrinkingInputStream) current.in).getCompressedCount();
+ } else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) {
+ return ((ExplodingInputStream) current.in).getCompressedCount();
+ } else if (current.entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()) {
+ return ((Deflate64CompressorInputStream) current.in).getCompressedCount();
+ } else if (current.entry.getMethod() == ZipMethod.BZIP2.getCode()) {
+ return ((BZip2CompressorInputStream) current.in).getCompressedCount();
+ } else {
+ return -1;
+ }
+ }
+
+ /**
+ * @since 1.17
+ */
+ @Override
+ public long getUncompressedCount() {
+ return uncompressedCount;
+ }
+
+ /**
+ * Implementation of read for STORED entries.
+ */
+ private int readStored(final byte[] buffer, final int offset, final int length) throws IOException {
+
+ if (current.hasDataDescriptor) {
+ if (lastStoredEntry == null) {
+ readStoredEntry();
+ }
+ return lastStoredEntry.read(buffer, offset, length);
+ }
+
+ final long csize = current.entry.getSize();
+ if (current.bytesRead >= csize) {
+ return -1;
+ }
+
+ if (buf.position() >= buf.limit()) {
+ buf.position(0);
+ final int l = in.read(buf.array());
+ if (l == -1) {
+ buf.limit(0);
+ throw new IOException("Truncated ZIP file");
+ }
+ buf.limit(l);
+
+ count(l);
+ current.bytesReadFromStream += l;
+ }
+
+ int toRead = Math.min(buf.remaining(), length);
+ if ((csize - current.bytesRead) < toRead) {
+ // if it is smaller than toRead then it fits into an int
+ toRead = (int) (csize - current.bytesRead);
+ }
+ buf.get(buffer, offset, toRead);
+ current.bytesRead += toRead;
+ return toRead;
+ }
+
+ /**
+ * Implementation of read for DEFLATED entries.
+ */
+ private int readDeflated(final byte[] buffer, final int offset, final int length) throws IOException {
+ final int read = readFromInflater(buffer, offset, length);
+ if (read <= 0) {
+ if (inf.finished()) {
+ return -1;
+ } else if (inf.needsDictionary()) {
+ throw new ZipException("This archive needs a preset dictionary"
+ + " which is not supported by Commons"
+ + " Compress.");
+ } else if (read == -1) {
+ throw new IOException("Truncated ZIP file");
+ }
+ }
+ return read;
+ }
+
+ /**
+ * Potentially reads more bytes to fill the inflater's buffer and
+ * reads from it.
+ */
+ private int readFromInflater(final byte[] buffer, final int offset, final int length) throws IOException {
+ int read = 0;
+ do {
+ if (inf.needsInput()) {
+ final int l = fill();
+ if (l > 0) {
+ current.bytesReadFromStream += buf.limit();
+ } else if (l == -1) {
+ return -1;
+ } else {
+ break;
+ }
+ }
+ try {
+ read = inf.inflate(buffer, offset, length);
+ } catch (final DataFormatException e) {
+ throw (IOException) new ZipException(e.getMessage()).initCause(e);
+ }
+ } while (read == 0 && inf.needsInput());
+ return read;
+ }
+
+ @Override
+ public void close() throws IOException {
+ if (!closed) {
+ closed = true;
+ try {
+ in.close();
+ } finally {
+ inf.end();
+ }
+ }
+ }
+
+ /**
+ * Skips over and discards value bytes of data from this input
+ * stream.
+ *
+ * This implementation may end up skipping over some smaller
+ * number of bytes, possibly 0, if and only if it reaches the end
+ * of the underlying stream.
+ *
+ * The actual number of bytes skipped is returned.
+ *
+ * @param value the number of bytes to be skipped.
+ * @return the actual number of bytes skipped.
+ * @throws IOException - if an I/O error occurs.
+ * @throws IllegalArgumentException - if value is negative.
+ */
+ @Override
+ public long skip(final long value) throws IOException {
+ if (value >= 0) {
+ long skipped = 0;
+ while (skipped < value) {
+ final long rem = value - skipped;
+ final int x = read(skipBuf, 0, (int) (skipBuf.length > rem ? rem : skipBuf.length));
+ if (x == -1) {
+ return skipped;
+ }
+ skipped += x;
+ }
+ return skipped;
+ }
+ throw new IllegalArgumentException();
+ }
+
+ /**
+ * Checks if the signature matches what is expected for a zip file.
+ * Does not currently handle self-extracting zips which may have arbitrary
+ * leading content.
+ *
+ * @param signature the bytes to check
+ * @param length the number of bytes to check
+ * @return true, if this stream is a zip archive stream, false otherwise
+ */
+ public static boolean matches(final byte[] signature, final int length) {
+ if (length < ZipArchiveOutputStream.LFH_SIG.length) {
+ return false;
+ }
+
+ return checksig(signature, ZipArchiveOutputStream.LFH_SIG) // normal file
+ || checksig(signature, ZipArchiveOutputStream.EOCD_SIG) // empty zip
+ || checksig(signature, ZipArchiveOutputStream.DD_SIG) // split zip
+ || checksig(signature, ZipLong.SINGLE_SEGMENT_SPLIT_MARKER.getBytes());
+ }
+
+ private static boolean checksig(final byte[] signature, final byte[] expected) {
+ for (int i = 0; i < expected.length; i++) {
+ if (signature[i] != expected[i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Closes the current ZIP archive entry and positions the underlying
+ * stream to the beginning of the next entry. All per-entry variables
+ * and data structures are cleared.
+ *
+ * If the compressed size of this entry is included in the entry header,
+ * then any outstanding bytes are simply skipped from the underlying
+ * stream without uncompressing them. This allows an entry to be safely
+ * closed even if the compression method is unsupported.
+ *
+ * In case we don't know the compressed size of this entry or have
+ * already buffered too much data from the underlying stream to support
+ * uncompression, then the uncompression process is completed and the
+ * end position of the stream is adjusted based on the result of that
+ * process.
+ *
+ * @throws IOException if an error occurs
+ */
+ private void closeEntry() throws IOException {
+ if (closed) {
+ throw new IOException("The stream is closed");
+ }
+ if (current == null) {
+ return;
+ }
+
+ // Ensure all entry bytes are read
+ if (currentEntryHasOutstandingBytes()) {
+ drainCurrentEntryData();
+ } else {
+ // this is guaranteed to exhaust the stream
+ skip(Long.MAX_VALUE); //NOSONAR
+
+ final long inB = current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED
+ ? getBytesInflated() : current.bytesRead;
+
+ // this is at most a single read() operation and can't
+ // exceed the range of int
+ final int diff = (int) (current.bytesReadFromStream - inB);
+
+ // Pushback any required bytes
+ if (diff > 0) {
+ pushback(buf.array(), buf.limit() - diff, diff);
+ current.bytesReadFromStream -= diff;
+ }
+
+ // Drain remainder of entry if not all data bytes were required
+ if (currentEntryHasOutstandingBytes()) {
+ drainCurrentEntryData();
+ }
+ }
+
+ if (lastStoredEntry == null && current.hasDataDescriptor) {
+ readDataDescriptor();
+ }
+
+ inf.reset();
+ buf.clear().flip();
+ current = null;
+ lastStoredEntry = null;
+ }
+
+ /**
+ * If the compressed size of the current entry is included in the entry header
+ * and there are any outstanding bytes in the underlying stream, then
+ * this returns true.
+ *
+ * @return true, if current entry is determined to have outstanding bytes, false otherwise
+ */
+ private boolean currentEntryHasOutstandingBytes() {
+ return current.bytesReadFromStream <= current.entry.getCompressedSize()
+ && !current.hasDataDescriptor;
+ }
+
+ /**
+ * Read all data of the current entry from the underlying stream
+ * that hasn't been read, yet.
+ */
+ private void drainCurrentEntryData() throws IOException {
+ long remaining = current.entry.getCompressedSize() - current.bytesReadFromStream;
+ while (remaining > 0) {
+ final long n = in.read(buf.array(), 0, (int) Math.min(buf.capacity(), remaining));
+ if (n < 0) {
+ throw new EOFException("Truncated ZIP entry: "
+ + ArchiveUtils.sanitize(current.entry.getName()));
+ }
+ count(n);
+ remaining -= n;
+ }
+ }
+
+ /**
+ * Get the number of bytes Inflater has actually processed.
+ *
+ *
for Java < Java7 the getBytes* methods in
+ * Inflater/Deflater seem to return unsigned ints rather than
+ * longs that start over with 0 at 2^32.
+ *
+ * The stream knows how many bytes it has read, but not how
+ * many the Inflater actually consumed - it should be between the
+ * total number of bytes read for the entry and the total number
+ * minus the last read operation. Here we just try to make the
+ * value close enough to the bytes we've read by assuming the
+ * number of bytes consumed must be smaller than (or equal to) the
+ * number of bytes read but not smaller by more than 2^32.
+ */
+ private long getBytesInflated() {
+ long inB = inf.getBytesRead();
+ if (current.bytesReadFromStream >= TWO_EXP_32) {
+ while (inB + TWO_EXP_32 <= current.bytesReadFromStream) {
+ inB += TWO_EXP_32;
+ }
+ }
+ return inB;
+ }
+
+ private int fill() throws IOException {
+ if (closed) {
+ throw new IOException("The stream is closed");
+ }
+ final int length = in.read(buf.array());
+ if (length > 0) {
+ buf.limit(length);
+ count(buf.limit());
+ inf.setInput(buf.array(), 0, buf.limit());
+ }
+ return length;
+ }
+
+ private void readFully(final byte[] b) throws IOException {
+ readFully(b, 0);
+ }
+
+ private void readFully(final byte[] b, final int off) throws IOException {
+ final int len = b.length - off;
+ final int count = IOUtils.readFully(in, b, off, len);
+ count(count);
+ if (count < len) {
+ throw new EOFException();
+ }
+ }
+
+ private void readDataDescriptor() throws IOException {
+ readFully(wordBuf);
+ ZipLong val = new ZipLong(wordBuf);
+ if (ZipLong.DD_SIG.equals(val)) {
+ // data descriptor with signature, skip sig
+ readFully(wordBuf);
+ val = new ZipLong(wordBuf);
+ }
+ current.entry.setCrc(val.getValue());
+
+ // if there is a ZIP64 extra field, sizes are eight bytes
+ // each, otherwise four bytes each. Unfortunately some
+ // implementations - namely Java7 - use eight bytes without
+ // using a ZIP64 extra field -
+ // https://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7073588
+
+ // just read 16 bytes and check whether bytes nine to twelve
+ // look like one of the signatures of what could follow a data
+ // descriptor (ignoring archive decryption headers for now).
+ // If so, push back eight bytes and assume sizes are four
+ // bytes, otherwise sizes are eight bytes each.
+ readFully(twoDwordBuf);
+ final ZipLong potentialSig = new ZipLong(twoDwordBuf, DWORD);
+ if (potentialSig.equals(ZipLong.CFH_SIG) || potentialSig.equals(ZipLong.LFH_SIG)) {
+ pushback(twoDwordBuf, DWORD, DWORD);
+ current.entry.setCompressedSize(ZipLong.getValue(twoDwordBuf));
+ current.entry.setSize(ZipLong.getValue(twoDwordBuf, WORD));
+ } else {
+ current.entry.setCompressedSize(ZipEightByteInteger.getLongValue(twoDwordBuf));
+ current.entry.setSize(ZipEightByteInteger.getLongValue(twoDwordBuf, DWORD));
+ }
+ }
+
+ /**
+ * Whether this entry requires a data descriptor this library can work with.
+ *
+ * @return true if allowStoredEntriesWithDataDescriptor is true,
+ * the entry doesn't require any data descriptor or the method is
+ * DEFLATED or ENHANCED_DEFLATED.
+ */
+ private boolean supportsDataDescriptorFor(final ZipArchiveEntry entry) {
+ return !entry.getGeneralPurposeBit().usesDataDescriptor()
+
+ || (allowStoredEntriesWithDataDescriptor && entry.getMethod() == ZipEntry.STORED)
+ || entry.getMethod() == ZipEntry.DEFLATED
+ || entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode();
+ }
+
+ /**
+ * Whether the compressed size for the entry is either known or
+ * not required by the compression method being used.
+ */
+ private boolean supportsCompressedSizeFor(final ZipArchiveEntry entry) {
+ return entry.getCompressedSize() != ArchiveEntry.SIZE_UNKNOWN
+ || entry.getMethod() == ZipEntry.DEFLATED
+ || entry.getMethod() == ZipMethod.ENHANCED_DEFLATED.getCode()
+ || (entry.getGeneralPurposeBit().usesDataDescriptor()
+ && allowStoredEntriesWithDataDescriptor
+ && entry.getMethod() == ZipEntry.STORED);
+ }
+
+ private static final String USE_ZIPFILE_INSTEAD_OF_STREAM_DISCLAIMER =
+ " while reading a stored entry using data descriptor. Either the archive is broken"
+ + " or it can not be read using ZipArchiveInputStream and you must use ZipFile."
+ + " A common cause for this is a ZIP archive containing a ZIP archive."
+ + " See http://commons.apache.org/proper/commons-compress/zip.html#ZipArchiveInputStream_vs_ZipFile";
+
+ /**
+ * Caches a stored entry that uses the data descriptor.
+ *
+ *
+ * - Reads a stored entry until the signature of a local file
+ * header, central directory header or data descriptor has been
+ * found.
+ * - Stores all entry data in lastStoredEntry.
+ * Rewinds the stream to position at the data
+ * descriptor.
+ * reads the data descriptor
+ *