better compiling, module info, warning fixes

This commit is contained in:
Jörg Prante 2020-06-03 22:42:41 +02:00
parent 5abdd51313
commit 5d05c1ffa6
35 changed files with 309 additions and 157 deletions

View file

@ -1,5 +1,5 @@
group = org.xbib
name = archive
version = 1.0.0
version = 1.0.1
gradle.wrapper.version = 6.4.1

View file

@ -35,7 +35,7 @@ artifacts {
}
tasks.withType(JavaCompile) {
options.compilerArgs << '-Xlint:all'
options.compilerArgs << '-Xlint:all,-fallthrough'
}
javadoc {

View file

@ -1,6 +1,5 @@
package org.xbib.io.archive.cpio;
import org.xbib.io.archive.entry.ArchiveEntry;
import org.xbib.io.archive.stream.ArchiveInputStream;
import org.xbib.io.archive.util.ArchiveUtils;
@ -33,7 +32,7 @@ import java.io.InputStream;
* Note: This implementation should be compatible to cpio 2.5
*/
public class CpioArchiveInputStream extends ArchiveInputStream implements CpioConstants {
public class CpioArchiveInputStream extends ArchiveInputStream<CpioArchiveEntry> implements CpioConstants {
private boolean closed = false;
@ -380,7 +379,7 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements CpioCo
}
@Override
public ArchiveEntry getNextEntry() throws IOException {
public CpioArchiveEntry getNextEntry() throws IOException {
return getNextCPIOEntry();
}

View file

@ -1,4 +1,4 @@
module org.xbib.io.archive.dump {
exports org.xbib.io.archive.dump;
requires org.xbib.io.archive;
requires transitive org.xbib.io.archive;
}

View file

@ -2,10 +2,10 @@ package org.xbib.io.archive.dump;
import java.io.IOException;
/**
* Dump Archive Exception
*/
@SuppressWarnings("serial")
public class DumpArchiveException extends IOException {
public DumpArchiveException(String msg) {

View file

@ -19,9 +19,9 @@ import java.util.Stack;
* the archive, and the read each entry as a normal input stream
* using read().
*/
public class DumpArchiveInputStream extends ArchiveInputStream {
public class DumpArchiveInputStream extends ArchiveInputStream<DumpArchiveEntry> {
private DumpArchiveSummary summary;
private final DumpArchiveSummary summary;
private DumpArchiveEntry active;
@ -35,7 +35,7 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
private int readIdx;
private byte[] readBuf = new byte[DumpArchiveConstants.TP_SIZE];
private final byte[] readBuf = new byte[DumpArchiveConstants.TP_SIZE];
private byte[] blockBuffer;
@ -46,13 +46,13 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
protected TapeInputStream raw;
// map of ino -> dirent entry. We can use this to reconstruct full paths.
private Map<Integer, Dirent> names = new HashMap<Integer, Dirent>();
private final Map<Integer, Dirent> names = new HashMap<Integer, Dirent>();
// map of ino -> (directory) entry when we're missing one or more elements in the path.
private Map<Integer, DumpArchiveEntry> pending = new HashMap<Integer, DumpArchiveEntry>();
private final Map<Integer, DumpArchiveEntry> pending = new HashMap<Integer, DumpArchiveEntry>();
// queue of (directory) entries where we now have the full path.
private Queue<DumpArchiveEntry> queue;
private final Queue<DumpArchiveEntry> queue;
/**
* Constructor.

View file

@ -1,11 +1,10 @@
package org.xbib.io.archive.dump;
/**
* Invalid Format Exception. There was an error decoding a
* tape segment header.
*/
@SuppressWarnings("serial")
public class InvalidFormatException extends DumpArchiveException {
protected long offset;

View file

@ -8,18 +8,24 @@ import java.util.Arrays;
import java.util.zip.DataFormatException;
import java.util.zip.Inflater;
/**
* Filter stream that mimics a physical tape drive capable of compressing
* the data stream
*/
class TapeInputStream extends FilterInputStream {
public class TapeInputStream extends FilterInputStream {
private byte[] blockBuffer = new byte[DumpArchiveConstants.TP_SIZE];
private int currBlkIdx = -1;
private int blockSize = DumpArchiveConstants.TP_SIZE;
private int recordSize = DumpArchiveConstants.TP_SIZE;
private int readOffset = DumpArchiveConstants.TP_SIZE;
private boolean isCompressed = false;
private long bytesRead = 0;
/**

View file

@ -1,5 +1,4 @@
module org.xbib.io.archive.jar {
exports org.xbib.io.archive.jar;
requires org.xbib.io.archive;
requires org.xbib.io.archive.zip;
requires transitive org.xbib.io.archive.zip;
}

View file

@ -1,7 +1,6 @@
package org.xbib.io.archive.jar;
import org.xbib.io.archive.entry.ArchiveEntry;
import org.xbib.io.archive.zip.ZipArchiveEntry;
import org.xbib.io.archive.zip.ZipArchiveInputStream;
@ -11,7 +10,7 @@ import java.io.InputStream;
/**
* Implements an input stream that can read entries from jar files.
*/
public class JarArchiveInputStream extends ZipArchiveInputStream {
public class JarArchiveInputStream extends ZipArchiveInputStream<JarArchiveEntry> {
public JarArchiveInputStream(final InputStream inputStream) {
super(inputStream);
@ -23,7 +22,7 @@ public class JarArchiveInputStream extends ZipArchiveInputStream {
}
@Override
public ArchiveEntry getNextEntry() throws IOException {
public JarArchiveEntry getNextEntry() throws IOException {
return getNextJarEntry();
}

View file

@ -1,4 +1,4 @@
module org.xbib.io.archive.tar {
exports org.xbib.io.archive.tar;
requires org.xbib.io.archive;
requires transitive org.xbib.io.archive;
}

View file

@ -546,6 +546,207 @@ public class TarArchiveEntry implements TarConstants, ArchiveEntry {
parseTarHeader(header, encoding, false);
}
/**
* Write an entry's header information to a header buffer.
*
* @param outbuf The tar entry header buffer to fill in.
* @param encoding encoding to use when writing the file name.
* @param starMode whether to use the star/GNU tar/BSD tar
* extension for numeric fields if their value doesn't fit in the
* maximum size of standard tar archives
*/
public void writeEntryHeader(byte[] outbuf, ArchiveEntryEncoding encoding, boolean starMode) throws IOException {
int offset = 0;
offset = ArchiveUtils.formatNameBytes(name, outbuf, offset, NAMELEN, encoding);
offset = writeEntryHeaderField(mode, outbuf, offset, MODELEN, starMode);
offset = writeEntryHeaderField(userId, outbuf, offset, UIDLEN, starMode);
offset = writeEntryHeaderField(groupId, outbuf, offset, GIDLEN, starMode);
offset = writeEntryHeaderField(size, outbuf, offset, SIZELEN, starMode);
offset = writeEntryHeaderField(modTime, outbuf, offset, MODTIMELEN, starMode);
int csOffset = offset;
for (int c = 0; c < CHKSUMLEN; ++c) {
outbuf[offset++] = (byte) ' ';
}
outbuf[offset++] = linkFlag;
offset = ArchiveUtils.formatNameBytes(linkName, outbuf, offset, NAMELEN, encoding);
offset = ArchiveUtils.formatNameBytes(MAGIC_POSIX, outbuf, offset, MAGICLEN);
offset = ArchiveUtils.formatNameBytes(version, outbuf, offset, VERSIONLEN);
offset = ArchiveUtils.formatNameBytes(userName, outbuf, offset, UNAMELEN, encoding);
offset = ArchiveUtils.formatNameBytes(groupName, outbuf, offset, GNAMELEN, encoding);
offset = writeEntryHeaderField(devMajor, outbuf, offset, DEVLEN, starMode);
offset = writeEntryHeaderField(devMinor, outbuf, offset, DEVLEN, starMode);
while (offset < outbuf.length) {
outbuf[offset++] = 0;
}
long chk = computeCheckSum(outbuf);
formatCheckSumOctalBytes(chk, outbuf, csOffset, CHKSUMLEN);
}
private int writeEntryHeaderField(long value, byte[] outbuf, int offset, int length, boolean starMode) {
if (!starMode && (value < 0
|| value >= (1l << (3 * (length - 1))))) {
// value doesn't fit into field when written as octal
// number, will be written to PAX header or causes an
// error
return formatLongOctalBytes(0, outbuf, offset, length);
}
return formatLongOctalOrBinaryBytes(value, outbuf, offset, length);
}
/**
* Write an long integer into a buffer as an octal string if this
* will fit, or as a binary number otherwise.
* <p/>
* Uses {@link #formatUnsignedOctalString} to format
* the value as an octal string with leading zeros.
* The converted number is followed by a space.
*
* @param value The value to write into the buffer.
* @param buf The destination buffer.
* @param offset The starting offset into the buffer.
* @param length The length of the buffer.
* @return The updated offset.
* @throws IllegalArgumentException if the value (and trailer)
* will not fit in the buffer.
*/
private int formatLongOctalOrBinaryBytes(final long value, byte[] buf, final int offset, final int length) {
// Check whether we are dealing with UID/GID or SIZE field
final long maxAsOctalChar = length == UIDLEN ? MAXID : MAXSIZE;
final boolean negative = value < 0;
if (!negative && value <= maxAsOctalChar) { // OK to store as octal chars
return formatLongOctalBytes(value, buf, offset, length);
}
if (length < 9) {
formatLongBinary(value, buf, offset, length, negative);
}
formatBigIntegerBinary(value, buf, offset, length, negative);
buf[offset] = (byte) (negative ? 0xff : 0x80);
return offset + length;
}
private void formatLongBinary(final long value, byte[] buf, final int offset, final int length, final boolean negative) {
final int bits = (length - 1) * 8;
final long max = 1l << bits;
long val = Math.abs(value);
if (val >= max) {
throw new IllegalArgumentException("Value " + value +
" is too large for " + length + " byte field.");
}
if (negative) {
val ^= max - 1;
val |= 0xff << bits;
val++;
}
for (int i = offset + length - 1; i >= offset; i--) {
buf[i] = (byte) val;
val >>= 8;
}
}
private void formatBigIntegerBinary(final long value, byte[] buf,
final int offset,
final int length,
final boolean negative) {
BigInteger val = BigInteger.valueOf(value);
final byte[] b = val.toByteArray();
final int len = b.length;
final int off = offset + length - len;
System.arraycopy(b, 0, buf, off, len);
final byte fill = (byte) (negative ? 0xff : 0);
for (int i = offset + 1; i < off; i++) {
buf[i] = fill;
}
}
/**
* Writes an octal value into a buffer.
* <p/>
* Uses {@link #formatUnsignedOctalString} to format
* the value as an octal string with leading zeros.
* The converted number is followed by NUL and then space.
*
* @param value The value to convert
* @param buf The destination buffer
* @param offset The starting offset into the buffer.
* @param length The size of the buffer.
* @return The updated value of offset, i.e. offset+length
* @throws IllegalArgumentException if the value (and trailer) will not fit in the buffer
*/
private int formatCheckSumOctalBytes(final long value, byte[] buf, final int offset, final int length) {
int idx = length - 2;
formatUnsignedOctalString(value, buf, offset, idx);
buf[offset + idx++] = 0;
buf[offset + idx] = (byte) ' ';
return offset + length;
}
/**
* Write an octal long integer into a buffer.
* <p/>
* Uses {@link #formatUnsignedOctalString} to format
* the value as an octal string with leading zeros.
* The converted number is followed by a space.
*
* @param value The value to write as octal
* @param buf The destinationbuffer.
* @param offset The starting offset into the buffer.
* @param length The length of the buffer
* @return The updated offset
* @throws IllegalArgumentException if the value (and trailer) will not fit in the buffer
*/
private int formatLongOctalBytes(final long value, byte[] buf, final int offset, final int length) {
int idx = length - 1; // For space
formatUnsignedOctalString(value, buf, offset, idx);
buf[offset + idx] = (byte) ' '; // Trailing space
return offset + length;
}
/**
* Fill buffer with unsigned octal number, padded with leading zeroes.
*
* @param value number to convert to octal - treated as unsigned
* @param buffer destination buffer
* @param offset starting offset in buffer
* @param length length of buffer to fill
* @throws IllegalArgumentException if the value will not fit in the buffer
*/
private void formatUnsignedOctalString(final long value, byte[] buffer, final int offset, final int length) {
int remaining = length;
remaining--;
if (value == 0) {
buffer[offset + remaining--] = (byte) '0';
} else {
long val = value;
for (; remaining >= 0 && val != 0; --remaining) {
buffer[offset + remaining] = (byte) ((byte) '0' + (byte) (val & 7));
val = val >>> 3;
}
if (val != 0) {
throw new IllegalArgumentException(value + "=" + Long.toOctalString(value) + " will not fit in octal number buffer of length " + length);
}
}
for (; remaining >= 0; --remaining) { // leading zeros
buffer[offset + remaining] = (byte) '0';
}
}
/**
* Compute the checksum of a tar entry header.
*
* @param buf The tar entry's header buffer.
* @return The computed checksum.
*/
private long computeCheckSum(final byte[] buf) {
long sum = 0;
for (byte aBuf : buf) {
sum += 255 & aBuf;
}
return sum;
}
private void parseTarHeader(byte[] header, ArchiveEntryEncoding encoding, final boolean oldStyle)
throws IOException {
int offset = 0;

View file

@ -10,7 +10,7 @@ import java.math.BigInteger;
import java.util.Date;
/**
* This class represents an entry in a Tar archive for output
* This class represents an entry in a Tar archive for output.
*/
public class TarArchiveOutputEntry implements TarConstants, ArchiveEntry {

View file

@ -7,7 +7,7 @@ import org.xbib.io.archive.entry.ArchiveEntryEncodingHelper;
import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
@ -15,7 +15,7 @@ import java.util.Map;
/**
* The TarOutputStream writes a UNIX tar archive as an output stream
*/
public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutputEntry> implements TarConstants {
public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveEntry> implements TarConstants {
private static final ArchiveEntryEncoding ASCII = ArchiveEntryEncodingHelper.getEncoding("ASCII");
@ -239,8 +239,8 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
}
@Override
public TarArchiveOutputEntry newArchiveEntry() {
return new TarArchiveOutputEntry();
public TarArchiveEntry newArchiveEntry() {
return new TarArchiveEntry();
}
/**
@ -257,7 +257,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
* @throws ClassCastException if archiveEntry is not an instance of TarArchiveEntry
*/
@Override
public void putArchiveEntry(TarArchiveOutputEntry archiveEntry) throws IOException {
public void putArchiveEntry(TarArchiveEntry archiveEntry) throws IOException {
if (finished) {
throw new IOException("Stream has already been finished");
}
@ -272,7 +272,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
} else if (longFileMode == LONGFILE_GNU) {
// create a TarEntry for the LongLink, the contents
// of which are the entry's name
TarArchiveOutputEntry longLinkEntry = new TarArchiveOutputEntry(GNU_LONGLINK, LF_GNUTYPE_LONGNAME);
TarArchiveEntry longLinkEntry = new TarArchiveEntry(GNU_LONGLINK, LF_GNUTYPE_LONGNAME);
longLinkEntry.setEntrySize(nameBytes.length + 1); // +1 for NUL
putArchiveEntry(longLinkEntry);
write(nameBytes);
@ -440,7 +440,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
if (name.length() >= NAMELEN) {
name = name.substring(0, NAMELEN - 1);
}
TarArchiveOutputEntry pex = new TarArchiveOutputEntry(name, LF_PAX_EXTENDED_HEADER_LC);
TarArchiveEntry pex = new TarArchiveEntry(name, LF_PAX_EXTENDED_HEADER_LC);
StringWriter w = new StringWriter();
for (Map.Entry<String, String> h : headers.entrySet()) {
String key = h.getKey();
@ -449,7 +449,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
+ 3 /* blank, equals and newline */
+ 2 /* guess 9 < actual length < 100 */;
String line = len + " " + key + "=" + value + "\n";
int actualLength = line.getBytes(Charset.forName("UTF-8")).length;
int actualLength = line.getBytes(StandardCharsets.UTF_8).length;
while (len != actualLength) {
// Adjust for cases where length < 10 or > 100
// or where UTF-8 encoding isn't a single octet
@ -458,11 +458,11 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
// first pass so we'd need a second.
len = actualLength;
line = len + " " + key + "=" + value + "\n";
actualLength = line.getBytes(Charset.forName("UTF-8")).length;
actualLength = line.getBytes(StandardCharsets.UTF_8).length;
}
w.write(line);
}
byte[] data = w.toString().getBytes(Charset.forName("UTF-8"));
byte[] data = w.toString().getBytes(StandardCharsets.UTF_8);
pex.setEntrySize(data.length);
putArchiveEntry(pex);
write(data);
@ -499,7 +499,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
}
private void addPaxHeadersForBigNumbers(Map<String, String> paxHeaders,
TarArchiveOutputEntry entry) {
TarArchiveEntry entry) {
addPaxHeaderForBigNumber(paxHeaders, "size", entry.getEntrySize(), MAXSIZE);
addPaxHeaderForBigNumber(paxHeaders, "gid", entry.getGroupId(), MAXID);
addPaxHeaderForBigNumber(paxHeaders, "mtime", entry.getLastModified().getTime() / 1000, MAXSIZE);
@ -519,7 +519,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream<TarArchiveOutput
}
}
private void failForBigNumbers(TarArchiveOutputEntry entry) {
private void failForBigNumbers(TarArchiveEntry entry) {
failForBigNumber("entry size", entry.getEntrySize(), MAXSIZE);
failForBigNumber("group id", entry.getGroupId(), MAXID);
failForBigNumber("last modification time", entry.getLastModified().getTime() / 1000, MAXSIZE);

View file

@ -9,7 +9,7 @@ import org.junit.jupiter.api.Test;
public class TarTest {
@Test
public void testTar() throws IOException {
public void testReadTar() throws IOException {
InputStream in = getClass().getResourceAsStream("test.tar");
TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(in);
byte[] buffer = new byte[1024];

View file

@ -1,4 +1,4 @@
module org.xbib.io.archive.zip {
exports org.xbib.io.archive.zip;
requires org.xbib.io.archive;
requires transitive org.xbib.io.archive;
}

View file

@ -1,4 +1,3 @@
package org.xbib.io.archive.zip;
import java.util.ArrayList;
@ -20,7 +19,7 @@ public class ExtraFieldUtils {
private static final Map<ZipShort, Class<?>> implementations;
static {
implementations = new HashMap<ZipShort, Class<?>>();
implementations = new HashMap<>();
register(AsiExtraField.class);
register(JarMarker.class);
register(UnicodePathExtraField.class);
@ -37,14 +36,10 @@ public class ExtraFieldUtils {
*/
public static void register(Class<?> c) {
try {
ZipExtraField ze = (ZipExtraField) c.newInstance();
ZipExtraField ze = (ZipExtraField) c.getDeclaredConstructor().newInstance();
implementations.put(ze.getHeaderId(), c);
} catch (ClassCastException cc) {
throw new RuntimeException(c + " doesn\'t implement ZipExtraField");
} catch (InstantiationException ie) {
throw new RuntimeException(c + " is not a concrete class");
} catch (IllegalAccessException ie) {
throw new RuntimeException(c + "\'s no-arg constructor is not public");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@ -54,14 +49,13 @@ public class ExtraFieldUtils {
*
* @param headerId the header identifier
* @return an instance of the appropiate ExtraField
* @throws InstantiationException if unable to instantiate the class
* @throws IllegalAccessException if not allowed to instatiate the class
* @throws Exception if unable to instantiate the class
*/
public static ZipExtraField createExtraField(ZipShort headerId)
throws InstantiationException, IllegalAccessException {
throws Exception {
Class<?> c = implementations.get(headerId);
if (c != null) {
return (ZipExtraField) c.newInstance();
return (ZipExtraField) c.getDeclaredConstructor().newInstance();
}
UnrecognizedExtraField u = new UnrecognizedExtraField();
u.setHeaderId(headerId);
@ -157,7 +151,7 @@ public class ExtraFieldUtils {
length);
}
v.add(ze);
} catch (InstantiationException | IllegalAccessException ie) {
} catch (Exception ie) {
throw new ZipException(ie.getMessage());
}
start += (length + WORD);

View file

@ -7,6 +7,7 @@ import java.util.zip.ZipException;
* Exception thrown when attempting to read or write data for a zip
* entry that uses ZIP features not supported by this library.
*/
@SuppressWarnings("serial")
public class UnsupportedZipFeatureException extends ZipException {
private final Feature reason;

View file

@ -8,6 +8,7 @@ import java.util.zip.ZipException;
* support to an archive and {@link ZipArchiveOutputStream#setUseZip64
* UseZip64} has been set to {@link Zip64Mode#Never Never}.
*/
@SuppressWarnings("serial")
public class Zip64RequiredException extends ZipException {
/**

View file

@ -1,6 +1,5 @@
package org.xbib.io.archive.zip;
import org.xbib.io.archive.entry.ArchiveEntry;
import org.xbib.io.archive.stream.ArchiveInputStream;
import org.xbib.io.archive.entry.ArchiveEntryEncoding;
import org.xbib.io.archive.entry.ArchiveEntryEncodingHelper;
@ -34,7 +33,7 @@ import static org.xbib.io.archive.zip.ZipConstants.ZIP64_MAGIC;
*
* @see ZipFile
*/
public class ZipArchiveInputStream extends ArchiveInputStream {
public class ZipArchiveInputStream<E extends ZipArchiveEntry> extends ArchiveInputStream<E> {
/**
* The zip encoding to use for filenames and the file comment.
@ -252,9 +251,10 @@ public class ZipArchiveInputStream extends ArchiveInputStream {
}
}
@SuppressWarnings("unchecked")
@Override
public ArchiveEntry getNextEntry() throws IOException {
return getNextZipEntry();
public E getNextEntry() throws IOException {
return (E) getNextZipEntry();
}
@Override

View file

@ -562,6 +562,7 @@ public class ZipArchiveOutputStream<E extends ZipArchiveEntry> extends ArchiveOu
raf.seek(save);
}
@SuppressWarnings("unchecked")
@Override
public E newArchiveEntry() {
return (E) new ZipArchiveEntry();

View file

@ -316,23 +316,6 @@ public class ZipFile {
}
}
/**
* Ensures that the close method of this zipfile is called when
* there are no more references to it.
*
* @see #close()
*/
@Override
protected void finalize() throws Throwable {
try {
if (!closed) {
close();
}
} finally {
super.finalize();
}
}
/**
* Length of a "central directory" entry structure without file
* name, extra fields or comment.
@ -370,8 +353,7 @@ public class ZipFile {
*/
private Map<ZipArchiveEntry, NameAndComment> populateFromCentralDirectory()
throws IOException {
HashMap<ZipArchiveEntry, NameAndComment> noUTF8Flag =
new HashMap<ZipArchiveEntry, NameAndComment>();
HashMap<ZipArchiveEntry, NameAndComment> noUTF8Flag = new HashMap<>();
positionAtCentralDirectory();

View file

@ -1,4 +1,5 @@
module org.xbib.io.codec {
uses org.xbib.io.codec.StreamCodec;
exports org.xbib.io.codec;
exports org.xbib.io.codec.ar;
exports org.xbib.io.codec.cpio;
@ -10,11 +11,10 @@ module org.xbib.io.codec {
requires org.xbib.io.compress.lzf;
requires org.xbib.io.compress.xz;
requires org.xbib.io.compress.zlib;
requires org.xbib.io.archive;
requires org.xbib.io.archive.ar;
requires org.xbib.io.archive.cpio;
requires org.xbib.io.archive.dump;
requires org.xbib.io.archive.jar;
requires org.xbib.io.archive.tar;
requires org.xbib.io.archive.zip;
requires transitive org.xbib.io.archive.ar;
requires transitive org.xbib.io.archive.cpio;
requires transitive org.xbib.io.archive.dump;
requires transitive org.xbib.io.archive.jar;
requires transitive org.xbib.io.archive.tar;
requires transitive org.xbib.io.archive.zip;
}

View file

@ -1,5 +1,6 @@
package org.xbib.io.codec;
import org.xbib.io.archive.entry.ArchiveEntry;
import org.xbib.io.archive.stream.ArchiveInputStream;
import org.xbib.io.archive.stream.ArchiveOutputStream;
import java.io.IOException;
@ -14,7 +15,7 @@ import java.io.OutputStream;
* @param <I> the archive input stream type
* @param <O> the archive output type
*/
public interface ArchiveCodec<S extends ArchiveSession, I extends ArchiveInputStream, O extends ArchiveOutputStream> {
public interface ArchiveCodec<E extends ArchiveEntry, I extends ArchiveInputStream<E>, O extends ArchiveOutputStream<E>, S extends ArchiveSession<E, I, O>> {
/**
* Returns the name of this archive codec ("cpio", "tar", "zip")
@ -23,14 +24,6 @@ public interface ArchiveCodec<S extends ArchiveSession, I extends ArchiveInputSt
*/
String getName();
/**
* Creates a new archive session with a progress watcher.
*
* @param watcher the progress watcher
* @return the new archive session
*/
S newSession(BytesProgressWatcher watcher);
/**
* Creates a new archive input stream
*
@ -49,4 +42,11 @@ public interface ArchiveCodec<S extends ArchiveSession, I extends ArchiveInputSt
*/
O createArchiveOutputStream(OutputStream out) throws IOException;
/**
* Creates a new archive session with a progress watcher.
*
* @param watcher the progress watcher
* @return the new archive session
*/
S newSession(BytesProgressWatcher watcher);
}

View file

@ -1,34 +1,3 @@
/*
* Licensed to Jörg Prante and xbib under one or more contributor
* license agreements. See the NOTICE.txt file distributed with this work
* for additional information regarding copyright ownership.
*
* Copyright (C) 2012 Jörg Prante and xbib
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program; if not, see http://www.gnu.org/licenses
* or write to the Free Software Foundation, Inc., 51 Franklin Street,
* Fifth Floor, Boston, MA 02110-1301 USA.
*
* The interactive user interfaces in modified source and object code
* versions of this program must display Appropriate Legal Notices,
* as required under Section 5 of the GNU Affero General Public License.
*
* In accordance with Section 7(b) of the GNU Affero General Public
* License, these Appropriate Legal Notices must retain the display of the
* "Powered by xbib" logo. If the display of the logo is not reasonably
* feasible for technical reasons, the Appropriate Legal Notices must display
* the words "Powered by xbib".
*/
package org.xbib.io.codec;
import org.xbib.io.archive.entry.ArchiveEntry;
@ -45,13 +14,11 @@ import java.util.Date;
import java.util.Set;
/**
* Archive session
* Archive session.
*/
public abstract class ArchiveSession<I extends ArchiveInputStream, O extends ArchiveOutputStream>
public abstract class ArchiveSession<E extends ArchiveEntry, I extends ArchiveInputStream<E>, O extends ArchiveOutputStream<E>>
implements Session<StringPacket> {
private final static StreamCodecService codecFactory = StreamCodecService.getInstance();
private final static int DEFAULT_INPUT_BUFSIZE = 65536;
protected int bufferSize = DEFAULT_INPUT_BUFSIZE;
@ -65,15 +32,13 @@ public abstract class ArchiveSession<I extends ArchiveInputStream, O extends Arc
protected ArchiveSession() {
}
public ArchiveSession setPath(Path path, OpenOption option) {
public void setPath(Path path, OpenOption option) {
this.path = path;
this.option = option;
return this;
}
public ArchiveSession setBufferSize(int bufferSize) {
public void setBufferSize(int bufferSize) {
this.bufferSize = bufferSize;
return this;
}
@Override
@ -138,7 +103,7 @@ public abstract class ArchiveSession<I extends ArchiveInputStream, O extends Arc
byte[] buf = packet.toString().getBytes();
if (buf.length > 0) {
String name = packet.name();
ArchiveEntry entry = getOutputStream().newArchiveEntry();
E entry = getOutputStream().newArchiveEntry();
entry.setName(name);
entry.setLastModified(new Date());
entry.setEntrySize(buf.length);

View file

@ -7,7 +7,7 @@ import java.io.IOException;
* operations, and being closed. Sessions must be opened before the first
* operation and closed after the last operation.
*/
public interface Session<P extends Packet> {
public interface Session<P extends StringPacket> {
enum Mode {

View file

@ -1,5 +1,7 @@
package org.xbib.io.codec;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import java.util.ServiceLoader;
import java.util.Set;
@ -10,13 +12,14 @@ import java.util.WeakHashMap;
*/
public class StreamCodecService {
private final static Map<String, StreamCodec> codecs = new WeakHashMap<>();
private final static Map<String, StreamCodec<InputStream, OutputStream>> codecs = new WeakHashMap<>();
private final static StreamCodecService instance = new StreamCodecService();
@SuppressWarnings({"rawtypes","unchecked"})
private StreamCodecService() {
ServiceLoader<StreamCodec> loader = ServiceLoader.load(StreamCodec.class);
for (StreamCodec codec : loader) {
for (StreamCodec<InputStream, OutputStream> codec : loader) {
if (!codecs.containsKey(codec.getName())) {
codecs.put(codec.getName(), codec);
}
@ -27,7 +30,7 @@ public class StreamCodecService {
return instance;
}
public StreamCodec getCodec(String suffix) {
public StreamCodec<InputStream, OutputStream> getCodec(String suffix) {
if (codecs.containsKey(suffix)) {
return codecs.get(suffix);
}

View file

@ -1,9 +1,9 @@
package org.xbib.io.codec.ar;
import org.xbib.io.archive.ar.ArArchiveEntry;
import org.xbib.io.codec.ArchiveSession;
import org.xbib.io.archive.ar.ArArchiveInputStream;
import org.xbib.io.archive.ar.ArArchiveOutputStream;
import org.xbib.io.codec.Packet;
import org.xbib.io.codec.Session;
import org.xbib.io.codec.StringPacket;
import java.io.InputStream;
@ -12,7 +12,7 @@ import java.io.OutputStream;
/**
* Ar Session
*/
public class ArSession extends ArchiveSession<ArArchiveInputStream, ArArchiveOutputStream>
public class ArSession extends ArchiveSession<ArArchiveEntry, ArArchiveInputStream, ArArchiveOutputStream>
implements Session<StringPacket> {
private final static String SUFFIX = "ar";

View file

@ -1,5 +1,6 @@
package org.xbib.io.codec.cpio;
import org.xbib.io.archive.cpio.CpioArchiveEntry;
import org.xbib.io.codec.ArchiveSession;
import org.xbib.io.archive.cpio.CpioArchiveInputStream;
import org.xbib.io.archive.cpio.CpioArchiveOutputStream;
@ -9,7 +10,7 @@ import java.io.OutputStream;
/**
* Cpio Session
*/
public class CpioSession extends ArchiveSession<CpioArchiveInputStream, CpioArchiveOutputStream> {
public class CpioSession extends ArchiveSession<CpioArchiveEntry, CpioArchiveInputStream, CpioArchiveOutputStream> {
private final static String SUFFIX = "cpio";

View file

@ -1,12 +1,13 @@
package org.xbib.io.codec.jar;
import org.xbib.io.archive.jar.JarArchiveEntry;
import org.xbib.io.codec.ArchiveSession;
import org.xbib.io.archive.jar.JarArchiveInputStream;
import org.xbib.io.archive.jar.JarArchiveOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
public class JarSession extends ArchiveSession<JarArchiveInputStream, JarArchiveOutputStream> {
public class JarSession extends ArchiveSession<JarArchiveEntry, JarArchiveInputStream, JarArchiveOutputStream> {
private final static String SUFFIX = "jar";

View file

@ -31,6 +31,7 @@
*/
package org.xbib.io.codec.tar;
import org.xbib.io.archive.tar.TarArchiveEntry;
import org.xbib.io.codec.ArchiveSession;
import org.xbib.io.archive.tar.TarArchiveInputStream;
import org.xbib.io.archive.tar.TarArchiveOutputStream;
@ -38,7 +39,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class TarSession extends ArchiveSession<TarArchiveInputStream, TarArchiveOutputStream> {
public class TarSession extends ArchiveSession<TarArchiveEntry, TarArchiveInputStream, TarArchiveOutputStream> {
private final static String SUFFIX = "tar";

View file

@ -1,37 +1,37 @@
package org.xbib.io.codec.zip;
import org.xbib.io.archive.zip.ZipArchiveEntry;
import org.xbib.io.codec.ArchiveSession;
import org.xbib.io.archive.zip.ZipArchiveInputStream;
import org.xbib.io.archive.zip.ZipArchiveOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
public class ZipSession extends ArchiveSession<ZipArchiveInputStream, ZipArchiveOutputStream> {
public class ZipSession extends ArchiveSession<ZipArchiveEntry, ZipArchiveInputStream<ZipArchiveEntry>, ZipArchiveOutputStream<ZipArchiveEntry>> {
private final static String SUFFIX = "zip";
private ZipArchiveInputStream in;
private ZipArchiveInputStream<ZipArchiveEntry> in;
private ZipArchiveOutputStream out;
private ZipArchiveOutputStream<ZipArchiveEntry> out;
protected String getSuffix() {
return SUFFIX;
}
protected void open(InputStream in) {
this.in = new ZipArchiveInputStream(in);
this.in = new ZipArchiveInputStream<>(in);
}
protected void open(OutputStream out) {
this.out = new ZipArchiveOutputStream(out);
this.out = new ZipArchiveOutputStream<>(out);
}
public ZipArchiveInputStream getInputStream() {
public ZipArchiveInputStream<ZipArchiveEntry> getInputStream() {
return in;
}
public ZipArchiveOutputStream getOutputStream() {
public ZipArchiveOutputStream<ZipArchiveEntry> getOutputStream() {
return out;
}
}

View file

@ -30,7 +30,7 @@ public class ChunkDecoderFactory {
*/
public static ChunkDecoder optimalInstance() {
try {
return INSTANCE.implClass.newInstance();
return INSTANCE.implClass.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new IllegalStateException("Failed to load a ChunkDecoder instance (" + e.getClass().getName() + "): "
+ e.getMessage(), e);

View file

@ -1,5 +1,5 @@
package org.xbib.io.compress.xz;
@SuppressWarnings("serial")
class IndexIndicatorException extends Exception {
}

View file

@ -1,4 +1,3 @@
package org.xbib.io.compress.xz.index;
import org.xbib.io.compress.xz.XZIOException;
@ -7,11 +6,12 @@ import org.xbib.io.compress.xz.common.EncoderUtil;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.zip.CheckedOutputStream;
public class IndexEncoder extends IndexBase {
private final ArrayList records = new ArrayList();
private final List<IndexRecord> records = new ArrayList<>();
public IndexEncoder() {
super(new XZIOException("XZ Stream or its Index has grown too big"));
@ -34,8 +34,7 @@ public class IndexEncoder extends IndexBase {
EncoderUtil.encodeVLI(outChecked, recordCount);
// List of Records
for (Iterator i = records.iterator(); i.hasNext(); ) {
IndexRecord record = (IndexRecord) i.next();
for (IndexRecord record : records) {
EncoderUtil.encodeVLI(outChecked, record.unpadded);
EncoderUtil.encodeVLI(outChecked, record.uncompressed);
}