initial commit
This commit is contained in:
commit
9945b77f55
259 changed files with 38619 additions and 0 deletions
12
.gitignore
vendored
Normal file
12
.gitignore
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
/.idea
|
||||
/target
|
||||
/.settings
|
||||
/.classpath
|
||||
/.project
|
||||
/.gradle
|
||||
build
|
||||
out
|
||||
logs
|
||||
*~
|
||||
*.iml
|
||||
.DS_Store
|
33
build.gradle
Normal file
33
build.gradle
Normal file
|
@ -0,0 +1,33 @@
|
|||
plugins {
|
||||
id "de.marcphilipp.nexus-publish" version "0.4.0"
|
||||
id "io.codearte.nexus-staging" version "0.21.1"
|
||||
}
|
||||
|
||||
wrapper {
|
||||
gradleVersion = "${project.property('gradle.wrapper.version')}"
|
||||
distributionType = Wrapper.DistributionType.ALL
|
||||
}
|
||||
|
||||
ext {
|
||||
user = 'xbib'
|
||||
name = 'archive'
|
||||
description = 'Archive algorithms for Java'
|
||||
inceptionYear = '2016'
|
||||
url = 'https://github.com/' + user + '/' + name
|
||||
scmUrl = 'https://github.com/' + user + '/' + name
|
||||
scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git'
|
||||
scmDeveloperConnection = 'scm:git:ssh://git@github.com:' + user + '/' + name + '.git'
|
||||
issueManagementSystem = 'Github'
|
||||
issueManagementUrl = ext.scmUrl + '/issues'
|
||||
licenseName = 'The Apache License, Version 2.0'
|
||||
licenseUrl = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply plugin: 'java-library'
|
||||
apply from: rootProject.file('gradle/ide/idea.gradle')
|
||||
apply from: rootProject.file('gradle/compile/java.gradle')
|
||||
apply from: rootProject.file('gradle/test/junit5.gradle')
|
||||
apply from: rootProject.file('gradle/publishing/publication.gradle')
|
||||
}
|
||||
apply from: rootProject.file('gradle/publishing/sonatype.gradle')
|
5
gradle.properties
Normal file
5
gradle.properties
Normal file
|
@ -0,0 +1,5 @@
|
|||
group = org.xbib
|
||||
name = archive
|
||||
version = 1.0.0
|
||||
|
||||
gradle.wrapper.version = 6.4.1
|
35
gradle/compile/java.gradle
Normal file
35
gradle/compile/java.gradle
Normal file
|
@ -0,0 +1,35 @@
|
|||
|
||||
apply plugin: 'java-library'
|
||||
|
||||
java {
|
||||
modularity.inferModulePath.set(true)
|
||||
}
|
||||
|
||||
compileJava {
|
||||
sourceCompatibility = JavaVersion.VERSION_11
|
||||
targetCompatibility = JavaVersion.VERSION_11
|
||||
}
|
||||
|
||||
compileTestJava {
|
||||
sourceCompatibility = JavaVersion.VERSION_11
|
||||
targetCompatibility = JavaVersion.VERSION_11
|
||||
}
|
||||
|
||||
jar {
|
||||
manifest {
|
||||
attributes('Implementation-Version': project.version)
|
||||
}
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier 'javadoc'
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives sourcesJar, javadocJar
|
||||
}
|
55
gradle/documentation/asciidoc.gradle
Normal file
55
gradle/documentation/asciidoc.gradle
Normal file
|
@ -0,0 +1,55 @@
|
|||
apply plugin: 'org.xbib.gradle.plugin.asciidoctor'
|
||||
|
||||
configurations {
|
||||
asciidoclet
|
||||
}
|
||||
|
||||
dependencies {
|
||||
asciidoclet "org.asciidoctor:asciidoclet:${project.property('asciidoclet.version')}"
|
||||
}
|
||||
|
||||
|
||||
asciidoctor {
|
||||
backends 'html5'
|
||||
outputDir = file("${rootProject.projectDir}/docs")
|
||||
separateOutputDirs = false
|
||||
attributes 'source-highlighter': 'coderay',
|
||||
idprefix: '',
|
||||
idseparator: '-',
|
||||
toc: 'left',
|
||||
doctype: 'book',
|
||||
icons: 'font',
|
||||
encoding: 'utf-8',
|
||||
sectlink: true,
|
||||
sectanchors: true,
|
||||
linkattrs: true,
|
||||
imagesdir: 'img',
|
||||
stylesheet: "${projectDir}/src/docs/asciidoc/css/foundation.css"
|
||||
}
|
||||
|
||||
|
||||
/*javadoc {
|
||||
options.docletpath = configurations.asciidoclet.files.asType(List)
|
||||
options.doclet = 'org.asciidoctor.Asciidoclet'
|
||||
//options.overview = "src/docs/asciidoclet/overview.adoc"
|
||||
options.addStringOption "-base-dir", "${projectDir}"
|
||||
options.addStringOption "-attribute",
|
||||
"name=${project.name},version=${project.version},title-link=https://github.com/xbib/${project.name}"
|
||||
configure(options) {
|
||||
noTimestamp = true
|
||||
}
|
||||
}*/
|
||||
|
||||
|
||||
/*javadoc {
|
||||
options.docletpath = configurations.asciidoclet.files.asType(List)
|
||||
options.doclet = 'org.asciidoctor.Asciidoclet'
|
||||
options.overview = "${rootProject.projectDir}/src/docs/asciidoclet/overview.adoc"
|
||||
options.addStringOption "-base-dir", "${projectDir}"
|
||||
options.addStringOption "-attribute",
|
||||
"name=${project.name},version=${project.version},title-link=https://github.com/xbib/${project.name}"
|
||||
options.destinationDirectory(file("${projectDir}/docs/javadoc"))
|
||||
configure(options) {
|
||||
noTimestamp = true
|
||||
}
|
||||
}*/
|
13
gradle/ide/idea.gradle
Normal file
13
gradle/ide/idea.gradle
Normal file
|
@ -0,0 +1,13 @@
|
|||
apply plugin: 'idea'
|
||||
|
||||
idea {
|
||||
module {
|
||||
outputDir file('build/classes/java/main')
|
||||
testOutputDir file('build/classes/java/test')
|
||||
}
|
||||
}
|
||||
|
||||
if (project.convention.findPlugin(JavaPluginConvention)) {
|
||||
//sourceSets.main.output.classesDirs = file("build/classes/java/main")
|
||||
//sourceSets.test.output.classesDirs = file("build/classes/java/test")
|
||||
}
|
64
gradle/publishing/publication.gradle
Normal file
64
gradle/publishing/publication.gradle
Normal file
|
@ -0,0 +1,64 @@
|
|||
|
||||
apply plugin: "de.marcphilipp.nexus-publish"
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
mavenJava(MavenPublication) {
|
||||
from components.java
|
||||
artifact sourcesJar
|
||||
artifact javadocJar
|
||||
pom {
|
||||
name = project.name
|
||||
description = rootProject.ext.description
|
||||
url = rootProject.ext.url
|
||||
inceptionYear = rootProject.ext.inceptionYear
|
||||
packaging = 'jar'
|
||||
organization {
|
||||
name = 'xbib'
|
||||
url = 'https://xbib.org'
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id = 'jprante'
|
||||
name = 'Jörg Prante'
|
||||
email = 'joergprante@gmail.com'
|
||||
url = 'https://github.com/jprante'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
url = rootProject.ext.scmUrl
|
||||
connection = rootProject.ext.scmConnection
|
||||
developerConnection = rootProject.ext.scmDeveloperConnection
|
||||
}
|
||||
issueManagement {
|
||||
system = rootProject.ext.issueManagementSystem
|
||||
url = rootProject.ext.issueManagementUrl
|
||||
}
|
||||
licenses {
|
||||
license {
|
||||
name = rootProject.ext.licenseName
|
||||
url = rootProject.ext.licenseUrl
|
||||
distribution = 'repo'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (project.hasProperty("signing.keyId")) {
|
||||
apply plugin: 'signing'
|
||||
signing {
|
||||
sign publishing.publications.mavenJava
|
||||
}
|
||||
}
|
||||
|
||||
nexusPublishing {
|
||||
repositories {
|
||||
sonatype {
|
||||
username = project.property('ossrhUsername')
|
||||
password = project.property('ossrhPassword')
|
||||
packageGroup = "org.xbib"
|
||||
}
|
||||
}
|
||||
}
|
11
gradle/publishing/sonatype.gradle
Normal file
11
gradle/publishing/sonatype.gradle
Normal file
|
@ -0,0 +1,11 @@
|
|||
|
||||
if (project.hasProperty('ossrhUsername') && project.hasProperty('ossrhPassword')) {
|
||||
|
||||
apply plugin: 'io.codearte.nexus-staging'
|
||||
|
||||
nexusStaging {
|
||||
username = project.property('ossrhUsername')
|
||||
password = project.property('ossrhPassword')
|
||||
packageGroup = "org.xbib"
|
||||
}
|
||||
}
|
27
gradle/test/junit5.gradle
Normal file
27
gradle/test/junit5.gradle
Normal file
|
@ -0,0 +1,27 @@
|
|||
|
||||
def junitVersion = project.hasProperty('junit.version')?project.property('junit.version'):'5.6.2'
|
||||
def hamcrestVersion = project.hasProperty('hamcrest.version')?project.property('hamcrest.version'):'2.2'
|
||||
|
||||
dependencies {
|
||||
testImplementation "org.junit.jupiter:junit-jupiter-api:${junitVersion}"
|
||||
testImplementation "org.junit.jupiter:junit-jupiter-params:${junitVersion}"
|
||||
testImplementation "org.hamcrest:hamcrest-library:${hamcrestVersion}"
|
||||
testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${junitVersion}"
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
failFast = true
|
||||
testLogging {
|
||||
events 'STARTED', 'PASSED', 'FAILED', 'SKIPPED'
|
||||
}
|
||||
afterSuite { desc, result ->
|
||||
if (!desc.parent) {
|
||||
println "\nTest result: ${result.resultType}"
|
||||
println "Test summary: ${result.testCount} tests, " +
|
||||
"${result.successfulTestCount} succeeded, " +
|
||||
"${result.failedTestCount} failed, " +
|
||||
"${result.skippedTestCount} skipped"
|
||||
}
|
||||
}
|
||||
}
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
5
gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
5
gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-6.4.1-all.zip
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
185
gradlew
vendored
Executable file
185
gradlew
vendored
Executable file
|
@ -0,0 +1,185 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
#
|
||||
# Copyright 2015 the original author or authors.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# https://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
##
|
||||
## Gradle start up script for UN*X
|
||||
##
|
||||
##############################################################################
|
||||
|
||||
# Attempt to set APP_HOME
|
||||
# Resolve links: $0 may be a link
|
||||
PRG="$0"
|
||||
# Need this for relative symlinks.
|
||||
while [ -h "$PRG" ] ; do
|
||||
ls=`ls -ld "$PRG"`
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
PRG="$link"
|
||||
else
|
||||
PRG=`dirname "$PRG"`"/$link"
|
||||
fi
|
||||
done
|
||||
SAVED="`pwd`"
|
||||
cd "`dirname \"$PRG\"`/" >/dev/null
|
||||
APP_HOME="`pwd -P`"
|
||||
cd "$SAVED" >/dev/null
|
||||
|
||||
APP_NAME="Gradle"
|
||||
APP_BASE_NAME=`basename "$0"`
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD="maximum"
|
||||
|
||||
warn () {
|
||||
echo "$*"
|
||||
}
|
||||
|
||||
die () {
|
||||
echo
|
||||
echo "$*"
|
||||
echo
|
||||
exit 1
|
||||
}
|
||||
|
||||
# OS specific support (must be 'true' or 'false').
|
||||
cygwin=false
|
||||
msys=false
|
||||
darwin=false
|
||||
nonstop=false
|
||||
case "`uname`" in
|
||||
CYGWIN* )
|
||||
cygwin=true
|
||||
;;
|
||||
Darwin* )
|
||||
darwin=true
|
||||
;;
|
||||
MINGW* )
|
||||
msys=true
|
||||
;;
|
||||
NONSTOP* )
|
||||
nonstop=true
|
||||
;;
|
||||
esac
|
||||
|
||||
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||
|
||||
|
||||
# Determine the Java command to use to start the JVM.
|
||||
if [ -n "$JAVA_HOME" ] ; then
|
||||
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||
# IBM's JDK on AIX uses strange locations for the executables
|
||||
JAVACMD="$JAVA_HOME/jre/sh/java"
|
||||
else
|
||||
JAVACMD="$JAVA_HOME/bin/java"
|
||||
fi
|
||||
if [ ! -x "$JAVACMD" ] ; then
|
||||
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD="java"
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
|
||||
MAX_FD_LIMIT=`ulimit -H -n`
|
||||
if [ $? -eq 0 ] ; then
|
||||
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
|
||||
MAX_FD="$MAX_FD_LIMIT"
|
||||
fi
|
||||
ulimit -n $MAX_FD
|
||||
if [ $? -ne 0 ] ; then
|
||||
warn "Could not set maximum file descriptor limit: $MAX_FD"
|
||||
fi
|
||||
else
|
||||
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# For Darwin, add options to specify how the application appears in the dock
|
||||
if $darwin; then
|
||||
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
|
||||
fi
|
||||
|
||||
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
|
||||
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
|
||||
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
|
||||
|
||||
JAVACMD=`cygpath --unix "$JAVACMD"`
|
||||
|
||||
# We build the pattern for arguments to be converted via cygpath
|
||||
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
|
||||
SEP=""
|
||||
for dir in $ROOTDIRSRAW ; do
|
||||
ROOTDIRS="$ROOTDIRS$SEP$dir"
|
||||
SEP="|"
|
||||
done
|
||||
OURCYGPATTERN="(^($ROOTDIRS))"
|
||||
# Add a user-defined pattern to the cygpath arguments
|
||||
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
|
||||
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
|
||||
fi
|
||||
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||
i=0
|
||||
for arg in "$@" ; do
|
||||
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
|
||||
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
|
||||
|
||||
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
|
||||
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
|
||||
else
|
||||
eval `echo args$i`="\"$arg\""
|
||||
fi
|
||||
i=`expr $i + 1`
|
||||
done
|
||||
case $i in
|
||||
0) set -- ;;
|
||||
1) set -- "$args0" ;;
|
||||
2) set -- "$args0" "$args1" ;;
|
||||
3) set -- "$args0" "$args1" "$args2" ;;
|
||||
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
|
||||
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
|
||||
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
|
||||
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
|
||||
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
|
||||
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
# Escape application args
|
||||
save () {
|
||||
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
|
||||
echo " "
|
||||
}
|
||||
APP_ARGS=`save "$@"`
|
||||
|
||||
# Collect all arguments for the java command, following the shell quoting and substitution rules
|
||||
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
|
||||
|
||||
exec "$JAVACMD" "$@"
|
104
gradlew.bat
vendored
Normal file
104
gradlew.bat
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
@rem
|
||||
@rem Copyright 2015 the original author or authors.
|
||||
@rem
|
||||
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@rem you may not use this file except in compliance with the License.
|
||||
@rem You may obtain a copy of the License at
|
||||
@rem
|
||||
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||
@rem
|
||||
@rem Unless required by applicable law or agreed to in writing, software
|
||||
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%" == "" @echo off
|
||||
@rem ##########################################################################
|
||||
@rem
|
||||
@rem Gradle startup script for Windows
|
||||
@rem
|
||||
@rem ##########################################################################
|
||||
|
||||
@rem Set local scope for the variables with windows NT shell
|
||||
if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%" == "" set DIRNAME=.
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||
|
||||
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||
|
||||
@rem Find java.exe
|
||||
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||
|
||||
set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if "%ERRORLEVEL%" == "0" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:findJavaFromJavaHome
|
||||
set JAVA_HOME=%JAVA_HOME:"=%
|
||||
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto init
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
|
||||
goto fail
|
||||
|
||||
:init
|
||||
@rem Get command-line arguments, handling Windows variants
|
||||
|
||||
if not "%OS%" == "Windows_NT" goto win9xME_args
|
||||
|
||||
:win9xME_args
|
||||
@rem Slurp the command line arguments.
|
||||
set CMD_LINE_ARGS=
|
||||
set _SKIP=2
|
||||
|
||||
:win9xME_args_slurp
|
||||
if "x%~1" == "x" goto execute
|
||||
|
||||
set CMD_LINE_ARGS=%*
|
||||
|
||||
:execute
|
||||
@rem Setup the command line
|
||||
|
||||
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||
|
||||
|
||||
@rem Execute Gradle
|
||||
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
|
||||
|
||||
:end
|
||||
@rem End local scope for the variables with windows NT shell
|
||||
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||
|
||||
:fail
|
||||
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||
rem the _cmd.exe /c_ return code!
|
||||
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||
exit /b 1
|
||||
|
||||
:mainEnd
|
||||
if "%OS%"=="Windows_NT" endlocal
|
||||
|
||||
:omega
|
3
io-archive-ar/build.gradle
Normal file
3
io-archive-ar/build.gradle
Normal file
|
@ -0,0 +1,3 @@
|
|||
dependencies {
|
||||
api project(':io-archive')
|
||||
}
|
4
io-archive-ar/src/main/java/module-info.java
Normal file
4
io-archive-ar/src/main/java/module-info.java
Normal file
|
@ -0,0 +1,4 @@
|
|||
module org.xbib.io.archive.ar {
|
||||
exports org.xbib.io.archive.ar;
|
||||
requires org.xbib.io.archive;
|
||||
}
|
|
@ -0,0 +1,181 @@
|
|||
package org.xbib.io.archive.ar;
|
||||
|
||||
import org.xbib.io.archive.entry.ArchiveEntry;
|
||||
import java.io.File;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* Represents an archive entry in the "ar" format.
|
||||
* Each AR archive starts with "!<arch>" followed by a LF. After these 8 bytes
|
||||
* the archive entries are listed. The format of an entry header is as it follows:
|
||||
* <pre>
|
||||
* START BYTE END BYTE NAME FORMAT LENGTH
|
||||
* 0 15 File name ASCII 16
|
||||
* 16 27 Modification timestamp Decimal 12
|
||||
* 28 33 Owner ID Decimal 6
|
||||
* 34 39 Group ID Decimal 6
|
||||
* 40 47 File mode Octal 8
|
||||
* 48 57 File size (bytes) Decimal 10
|
||||
* 58 59 File magic \140\012 2
|
||||
* </pre>
|
||||
* This specifies that an ar archive entry header contains 60 bytes.
|
||||
* Due to the limitation of the file name length to 16 bytes GNU and
|
||||
* BSD has their own variants of this format. Currently this code
|
||||
* can read but not write the GNU variant and doesn't support
|
||||
* the BSD variant at all.
|
||||
*
|
||||
* <a href="http://www.freebsd.org/cgi/man.cgi?query=ar&sektion=5">ar man page</a>
|
||||
*/
|
||||
public class ArArchiveEntry implements ArchiveEntry {
|
||||
|
||||
/**
|
||||
* The header for each entry
|
||||
*/
|
||||
public static final String HEADER = "!<arch>\n";
|
||||
|
||||
/**
|
||||
* The trailer for each entry
|
||||
*/
|
||||
public static final String TRAILER = "`\012";
|
||||
|
||||
private static final int DEFAULT_MODE = 33188; // = (octal) 0100644
|
||||
|
||||
/**
|
||||
* SVR4/GNU adds a trailing / to names; BSD does not.
|
||||
* They also vary in how names longer than 16 characters are represented.
|
||||
* (Not yet fully supported by this implementation)
|
||||
*/
|
||||
private String name;
|
||||
|
||||
private int userId;
|
||||
|
||||
private int groupId;
|
||||
|
||||
private int mode;
|
||||
|
||||
private long lastModified;
|
||||
|
||||
private long length;
|
||||
|
||||
public ArArchiveEntry() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance using a couple of default values.
|
||||
* Sets userId and groupId to 0, the octal file mode to 644 and
|
||||
* the last modified time to the current time.
|
||||
*
|
||||
* @param name name of the entry
|
||||
* @param length length of the entry in bytes
|
||||
*/
|
||||
public ArArchiveEntry(String name, long length) {
|
||||
this(name, length, 0, 0, DEFAULT_MODE,
|
||||
System.currentTimeMillis() / 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance.
|
||||
*
|
||||
* @param name name of the entry
|
||||
* @param length length of the entry in bytes
|
||||
* @param userId numeric user id
|
||||
* @param groupId numeric group id
|
||||
* @param mode file mode
|
||||
* @param lastModified last modified time in seconds since the epoch
|
||||
*/
|
||||
public ArArchiveEntry(String name, long length, int userId, int groupId,
|
||||
int mode, long lastModified) {
|
||||
this.name = name;
|
||||
this.length = length;
|
||||
this.userId = userId;
|
||||
this.groupId = groupId;
|
||||
this.mode = mode;
|
||||
this.lastModified = lastModified;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new instance using the attributes of the given file
|
||||
*/
|
||||
public ArArchiveEntry(File inputFile, String entryName) {
|
||||
// TODO sort out mode
|
||||
this(entryName, inputFile.isFile() ? inputFile.length() : 0,
|
||||
0, 0, DEFAULT_MODE, inputFile.lastModified() / 1000);
|
||||
}
|
||||
|
||||
public ArArchiveEntry setEntrySize(long size) {
|
||||
this.length = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
public long getEntrySize() {
|
||||
return this.getLength();
|
||||
}
|
||||
|
||||
public ArArchiveEntry setName(String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public int getUserId() {
|
||||
return userId;
|
||||
}
|
||||
|
||||
public int getGroupId() {
|
||||
return groupId;
|
||||
}
|
||||
|
||||
public int getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
public ArArchiveEntry setLastModified(Date date) {
|
||||
this.lastModified = date.getTime() / 1000;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Last modified time in seconds since the epoch.
|
||||
*/
|
||||
public Date getLastModified() {
|
||||
return new Date(1000 * lastModified);
|
||||
}
|
||||
|
||||
public long getLength() {
|
||||
return length;
|
||||
}
|
||||
|
||||
public boolean isDirectory() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ArArchiveEntry other = (ArArchiveEntry) obj;
|
||||
if (name == null) {
|
||||
if (other.name != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!name.equals(other.name)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,329 @@
|
|||
package org.xbib.io.archive.ar;
|
||||
|
||||
import org.xbib.io.archive.stream.ArchiveInputStream;
|
||||
import org.xbib.io.archive.util.ArchiveUtils;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* Implements the "ar" archive format as an input stream.
|
||||
*/
|
||||
public class ArArchiveInputStream extends ArchiveInputStream<ArArchiveEntry> {
|
||||
|
||||
static final String BSD_LONGNAME_PREFIX = "#1/";
|
||||
|
||||
private static final int BSD_LONGNAME_PREFIX_LEN =
|
||||
BSD_LONGNAME_PREFIX.length();
|
||||
|
||||
private static final String BSD_LONGNAME_PATTERN =
|
||||
"^" + BSD_LONGNAME_PREFIX + "\\d+";
|
||||
|
||||
private final InputStream input;
|
||||
private long offset = 0;
|
||||
private boolean closed;
|
||||
|
||||
/*
|
||||
* If getNextEnxtry has been called, the entry metadata is stored in
|
||||
* currentEntry.
|
||||
*/
|
||||
private ArArchiveEntry currentEntry = null;
|
||||
|
||||
// Storage area for extra long names (GNU ar)
|
||||
private byte[] namebuffer = null;
|
||||
|
||||
/*
|
||||
* The offset where the current entry started. -1 if no entry has been
|
||||
* called
|
||||
*/
|
||||
private long entryOffset = -1;
|
||||
|
||||
/**
|
||||
* Constructs an Ar input stream with the referenced stream
|
||||
*
|
||||
* @param pInput the ar input stream
|
||||
*/
|
||||
public ArArchiveInputStream(final InputStream pInput) {
|
||||
input = pInput;
|
||||
closed = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next AR entry in this stream.
|
||||
*
|
||||
* @return the next AR entry.
|
||||
* @throws java.io.IOException if the entry could not be read
|
||||
*/
|
||||
public ArArchiveEntry getNextArEntry() throws IOException {
|
||||
if (currentEntry != null) {
|
||||
final long entryEnd = entryOffset + currentEntry.getLength();
|
||||
while (offset < entryEnd) {
|
||||
int x = read();
|
||||
if (x == -1) {
|
||||
// hit EOF before previous entry was complete
|
||||
// TODO: throw an exception instead?
|
||||
return null;
|
||||
}
|
||||
}
|
||||
currentEntry = null;
|
||||
}
|
||||
|
||||
if (offset == 0) {
|
||||
final byte[] expected = ArchiveUtils.toAsciiBytes(ArArchiveEntry.HEADER);
|
||||
final byte[] realized = new byte[expected.length];
|
||||
final int read = read(realized);
|
||||
if (read != expected.length) {
|
||||
throw new IOException("failed to read header");
|
||||
}
|
||||
for (int i = 0; i < expected.length; i++) {
|
||||
if (expected[i] != realized[i]) {
|
||||
throw new IOException("invalid header " + ArchiveUtils.toAsciiString(realized));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (offset % 2 != 0 && read() < 0) {
|
||||
// hit eof
|
||||
return null;
|
||||
}
|
||||
|
||||
if (input.available() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final byte[] name = new byte[16];
|
||||
final byte[] lastmodified = new byte[12];
|
||||
final byte[] userid = new byte[6];
|
||||
final byte[] groupid = new byte[6];
|
||||
final byte[] filemode = new byte[8];
|
||||
final byte[] length = new byte[10];
|
||||
|
||||
read(name);
|
||||
read(lastmodified);
|
||||
read(userid);
|
||||
read(groupid);
|
||||
read(filemode);
|
||||
read(length);
|
||||
|
||||
{
|
||||
final byte[] expected = ArchiveUtils.toAsciiBytes(ArArchiveEntry.TRAILER);
|
||||
final byte[] realized = new byte[expected.length];
|
||||
final int read = read(realized);
|
||||
if (read != expected.length) {
|
||||
throw new IOException("failed to read entry trailer");
|
||||
}
|
||||
for (int i = 0; i < expected.length; i++) {
|
||||
if (expected[i] != realized[i]) {
|
||||
throw new IOException("invalid entry trailer. not read the content?");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
entryOffset = offset;
|
||||
|
||||
// GNU ar uses a '/' to mark the end of the filename; this allows for the use of spaces without the use of an extended filename.
|
||||
|
||||
// entry name is stored as ASCII string
|
||||
String temp = ArchiveUtils.toAsciiString(name).trim();
|
||||
long len = asLong(length);
|
||||
|
||||
if (isGNUStringTable(temp)) { // GNU extended filenames entry
|
||||
currentEntry = readGNUStringTable(length);
|
||||
return getNextArEntry();
|
||||
} else if (temp.endsWith("/")) { // GNU terminator
|
||||
temp = temp.substring(0, temp.length() - 1);
|
||||
} else if (isGNULongName(temp)) {
|
||||
int offset = Integer.parseInt(temp.substring(1));// get the offset
|
||||
temp = getExtendedName(offset); // convert to the long name
|
||||
} else if (isBSDLongName(temp)) {
|
||||
temp = getBSDLongName(temp);
|
||||
// entry length contained the length of the file name in
|
||||
// addition to the real length of the entry.
|
||||
// assume file name was ASCII, there is no "standard" otherwise
|
||||
int nameLen = temp.length();
|
||||
len -= nameLen;
|
||||
entryOffset += nameLen;
|
||||
}
|
||||
|
||||
currentEntry = new ArArchiveEntry(temp, len, asInt(userid, true),
|
||||
asInt(groupid, true), asInt(filemode, 8),
|
||||
asLong(lastmodified));
|
||||
return currentEntry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an extended name from the GNU extended name buffer.
|
||||
*
|
||||
* @param offset pointer to entry within the buffer
|
||||
* @return the extended file name; without trailing "/" if present.
|
||||
* @throws java.io.IOException if name not found or buffer not set up
|
||||
*/
|
||||
private String getExtendedName(int offset) throws IOException {
|
||||
if (namebuffer == null) {
|
||||
throw new IOException("Cannot process GNU long filename as no // record was found");
|
||||
}
|
||||
for (int i = offset; i < namebuffer.length; i++) {
|
||||
if (namebuffer[i] == '\012') {
|
||||
if (namebuffer[i - 1] == '/') {
|
||||
i--; // drop trailing /
|
||||
}
|
||||
return ArchiveUtils.toAsciiString(namebuffer, offset, i - offset);
|
||||
}
|
||||
}
|
||||
throw new IOException("Failed to read entry: " + offset);
|
||||
}
|
||||
|
||||
private long asLong(byte[] input) {
|
||||
return Long.parseLong(ArchiveUtils.toAsciiString(input).trim());
|
||||
}
|
||||
|
||||
private int asInt(byte[] input) {
|
||||
return asInt(input, 10, false);
|
||||
}
|
||||
|
||||
private int asInt(byte[] input, boolean treatBlankAsZero) {
|
||||
return asInt(input, 10, treatBlankAsZero);
|
||||
}
|
||||
|
||||
private int asInt(byte[] input, int base) {
|
||||
return asInt(input, base, false);
|
||||
}
|
||||
|
||||
private int asInt(byte[] input, int base, boolean treatBlankAsZero) {
|
||||
String string = ArchiveUtils.toAsciiString(input).trim();
|
||||
if (string.length() == 0 && treatBlankAsZero) {
|
||||
return 0;
|
||||
}
|
||||
return Integer.parseInt(string, base);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ArArchiveEntry getNextEntry() throws IOException {
|
||||
return getNextArEntry();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (!closed) {
|
||||
closed = true;
|
||||
input.close();
|
||||
}
|
||||
currentEntry = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] b, final int off, final int len) throws IOException {
|
||||
int toRead = len;
|
||||
if (currentEntry != null) {
|
||||
final long entryEnd = entryOffset + currentEntry.getLength();
|
||||
if (len > 0 && entryEnd > offset) {
|
||||
toRead = (int) Math.min(len, entryEnd - offset);
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
final int ret = this.input.read(b, off, toRead);
|
||||
offset += (ret > 0 ? ret : 0);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Does the name look like it is a long name (or a name containing
|
||||
* spaces) as encoded by BSD ar?
|
||||
* <p/>
|
||||
* <p>From the FreeBSD ar(5) man page:</p>
|
||||
* <pre>
|
||||
* BSD In the BSD variant, names that are shorter than 16
|
||||
* characters and without embedded spaces are stored
|
||||
* directly in this field. If a name has an embedded
|
||||
* space, or if it is longer than 16 characters, then
|
||||
* the string "#1/" followed by the decimal represen-
|
||||
* tation of the length of the file name is placed in
|
||||
* this field. The actual file name is stored immedi-
|
||||
* ately after the archive header. The content of the
|
||||
* archive member follows the file name. The ar_size
|
||||
* field of the header (see below) will then hold the
|
||||
* sum of the size of the file name and the size of
|
||||
* the member.
|
||||
* </pre>
|
||||
*/
|
||||
private static boolean isBSDLongName(String name) {
|
||||
return name != null && name.matches(BSD_LONGNAME_PATTERN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the real name from the current stream assuming the very
|
||||
* first bytes to be read are the real file name.
|
||||
*
|
||||
* @see #isBSDLongName
|
||||
*/
|
||||
private String getBSDLongName(String bsdLongName) throws IOException {
|
||||
int nameLen =
|
||||
Integer.parseInt(bsdLongName.substring(BSD_LONGNAME_PREFIX_LEN));
|
||||
byte[] name = new byte[nameLen];
|
||||
int read = 0, readNow = 0;
|
||||
while ((readNow = input.read(name, read, nameLen - read)) >= 0) {
|
||||
read += readNow;
|
||||
if (read == nameLen) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (read != nameLen) {
|
||||
throw new EOFException();
|
||||
}
|
||||
return ArchiveUtils.toAsciiString(name);
|
||||
}
|
||||
|
||||
private static final String GNU_STRING_TABLE_NAME = "//";
|
||||
|
||||
/**
|
||||
* Is this the name of the "Archive String Table" as used by
|
||||
* SVR4/GNU to store long file names?
|
||||
* <p/>
|
||||
* <p>GNU ar stores multiple extended filenames in the data section
|
||||
* of a file with the name "//", this record is referred to by
|
||||
* future headers.</p>
|
||||
* <p/>
|
||||
* <p>A header references an extended filename by storing a "/"
|
||||
* followed by a decimal offset to the start of the filename in
|
||||
* the extended filename data section.</p>
|
||||
* <p/>
|
||||
* <p>The format of the "//" file itself is simply a list of the
|
||||
* long filenames, each separated by one or more LF
|
||||
* characters. Note that the decimal offsets are number of
|
||||
* characters, not line or string number within the "//" file.</p>
|
||||
*/
|
||||
private static boolean isGNUStringTable(String name) {
|
||||
return GNU_STRING_TABLE_NAME.equals(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the GNU archive String Table.
|
||||
*
|
||||
* @see #isGNUStringTable
|
||||
*/
|
||||
private ArArchiveEntry readGNUStringTable(byte[] length) throws IOException {
|
||||
int bufflen = asInt(length); // Assume length will fit in an int
|
||||
namebuffer = new byte[bufflen];
|
||||
int read = read(namebuffer, 0, bufflen);
|
||||
if (read != bufflen) {
|
||||
throw new IOException("Failed to read complete // record: expected="
|
||||
+ bufflen + " read=" + read);
|
||||
}
|
||||
return new ArArchiveEntry(GNU_STRING_TABLE_NAME, bufflen);
|
||||
}
|
||||
|
||||
private static final String GNU_LONGNAME_PATTERN = "^/\\d+";
|
||||
|
||||
/**
|
||||
* Does the name look like it is a long name (or a name containing
|
||||
* spaces) as encoded by SVR4/GNU ar?
|
||||
*
|
||||
* @see #isGNUStringTable
|
||||
*/
|
||||
private boolean isGNULongName(String name) {
|
||||
return name != null && name.matches(GNU_LONGNAME_PATTERN);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,215 @@
|
|||
package org.xbib.io.archive.ar;
|
||||
|
||||
import org.xbib.io.archive.stream.ArchiveOutputStream;
|
||||
import org.xbib.io.archive.util.ArchiveUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Implements the "ar" archive format as an output stream.
|
||||
*/
|
||||
public class ArArchiveOutputStream extends ArchiveOutputStream<ArArchiveEntry> {
|
||||
/**
|
||||
* Fail if a long file name is required in the archive.
|
||||
*/
|
||||
public static final int LONGFILE_ERROR = 0;
|
||||
|
||||
/**
|
||||
* BSD ar extensions are used to store long file names in the archive.
|
||||
*/
|
||||
public static final int LONGFILE_BSD = 1;
|
||||
|
||||
private final OutputStream out;
|
||||
|
||||
private long entryOffset = 0;
|
||||
|
||||
private ArArchiveEntry prevEntry;
|
||||
|
||||
private boolean haveUnclosedEntry = false;
|
||||
|
||||
private int longFileMode = LONGFILE_ERROR;
|
||||
|
||||
/**
|
||||
* indicates if this archive is finished
|
||||
*/
|
||||
private boolean finished = false;
|
||||
|
||||
public ArArchiveOutputStream(final OutputStream pOut) {
|
||||
this.out = pOut;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the long file mode.
|
||||
* This can be LONGFILE_ERROR(0) or LONGFILE_BSD(1).
|
||||
* This specifies the treatment of long file names (names >= 16).
|
||||
* Default is LONGFILE_ERROR.
|
||||
*
|
||||
* @param longFileMode the mode to use
|
||||
*/
|
||||
public void setLongFileMode(int longFileMode) {
|
||||
this.longFileMode = longFileMode;
|
||||
}
|
||||
|
||||
private long writeArchiveHeader() throws IOException {
|
||||
byte[] header = ArchiveUtils.toAsciiBytes(ArArchiveEntry.HEADER);
|
||||
out.write(header);
|
||||
return header.length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void closeArchiveEntry() throws IOException {
|
||||
if (finished) {
|
||||
throw new IOException("Stream has already been finished");
|
||||
}
|
||||
if (prevEntry == null || !haveUnclosedEntry) {
|
||||
throw new IOException("No current entry to close");
|
||||
}
|
||||
if ((entryOffset % 2) != 0) {
|
||||
out.write('\n'); // Pad byte
|
||||
}
|
||||
haveUnclosedEntry = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ArArchiveEntry newArchiveEntry() {
|
||||
return new ArArchiveEntry();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putArchiveEntry(final ArArchiveEntry pEntry) throws IOException {
|
||||
if (finished) {
|
||||
throw new IOException("Stream has already been finished");
|
||||
}
|
||||
if (prevEntry == null) {
|
||||
writeArchiveHeader();
|
||||
} else {
|
||||
if (prevEntry.getLength() != entryOffset) {
|
||||
throw new IOException("length does not match entry (" + prevEntry.getLength() + " != " + entryOffset);
|
||||
}
|
||||
|
||||
if (haveUnclosedEntry) {
|
||||
closeArchiveEntry();
|
||||
}
|
||||
}
|
||||
|
||||
prevEntry = pEntry;
|
||||
|
||||
writeEntryHeader(pEntry);
|
||||
|
||||
entryOffset = 0;
|
||||
haveUnclosedEntry = true;
|
||||
}
|
||||
|
||||
private long fill(final long pOffset, final long pNewOffset, final char pFill) throws IOException {
|
||||
final long diff = pNewOffset - pOffset;
|
||||
|
||||
if (diff > 0) {
|
||||
for (int i = 0; i < diff; i++) {
|
||||
write(pFill);
|
||||
}
|
||||
}
|
||||
|
||||
return pNewOffset;
|
||||
}
|
||||
|
||||
private long write(final String data) throws IOException {
|
||||
final byte[] bytes = data.getBytes("ascii");
|
||||
write(bytes);
|
||||
return bytes.length;
|
||||
}
|
||||
|
||||
private long writeEntryHeader(final ArArchiveEntry pEntry) throws IOException {
|
||||
|
||||
long offset = 0;
|
||||
boolean mustAppendName = false;
|
||||
|
||||
final String n = pEntry.getName();
|
||||
if (LONGFILE_ERROR == longFileMode && n.length() > 16) {
|
||||
throw new IOException("filename too long, > 16 chars: " + n);
|
||||
}
|
||||
if (LONGFILE_BSD == longFileMode &&
|
||||
(n.length() > 16 || n.indexOf(" ") > -1)) {
|
||||
mustAppendName = true;
|
||||
offset += write(ArArchiveInputStream.BSD_LONGNAME_PREFIX
|
||||
+ String.valueOf(n.length()));
|
||||
} else {
|
||||
offset += write(n);
|
||||
}
|
||||
|
||||
offset = fill(offset, 16, ' ');
|
||||
final String m = "" + (pEntry.getLastModified());
|
||||
if (m.length() > 12) {
|
||||
throw new IOException("modified too long");
|
||||
}
|
||||
offset += write(m);
|
||||
|
||||
offset = fill(offset, 28, ' ');
|
||||
final String u = "" + pEntry.getUserId();
|
||||
if (u.length() > 6) {
|
||||
throw new IOException("userid too long");
|
||||
}
|
||||
offset += write(u);
|
||||
|
||||
offset = fill(offset, 34, ' ');
|
||||
final String g = "" + pEntry.getGroupId();
|
||||
if (g.length() > 6) {
|
||||
throw new IOException("groupid too long");
|
||||
}
|
||||
offset += write(g);
|
||||
|
||||
offset = fill(offset, 40, ' ');
|
||||
final String fm = "" + Integer.toString(pEntry.getMode(), 8);
|
||||
if (fm.length() > 8) {
|
||||
throw new IOException("filemode too long");
|
||||
}
|
||||
offset += write(fm);
|
||||
|
||||
offset = fill(offset, 48, ' ');
|
||||
final String s =
|
||||
String.valueOf(pEntry.getLength()
|
||||
+ (mustAppendName ? n.length() : 0));
|
||||
if (s.length() > 10) {
|
||||
throw new IOException("size too long");
|
||||
}
|
||||
offset += write(s);
|
||||
|
||||
offset = fill(offset, 58, ' ');
|
||||
|
||||
offset += write(ArArchiveEntry.TRAILER);
|
||||
|
||||
if (mustAppendName) {
|
||||
offset += write(n);
|
||||
}
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte[] b, int off, int len) throws IOException {
|
||||
out.write(b, off, len);
|
||||
entryOffset += len;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls finish if necessary, and then closes the OutputStream
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (!finished) {
|
||||
finish();
|
||||
}
|
||||
out.close();
|
||||
prevEntry = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finish() throws IOException {
|
||||
if (haveUnclosedEntry) {
|
||||
throw new IOException("This archive contains unclosed entries.");
|
||||
} else if (finished) {
|
||||
throw new IOException("This archive has already been finished");
|
||||
}
|
||||
finished = true;
|
||||
}
|
||||
}
|
3
io-archive-cpio/build.gradle
Normal file
3
io-archive-cpio/build.gradle
Normal file
|
@ -0,0 +1,3 @@
|
|||
dependencies {
|
||||
api project(':io-archive')
|
||||
}
|
4
io-archive-cpio/src/main/java/module-info.java
Normal file
4
io-archive-cpio/src/main/java/module-info.java
Normal file
|
@ -0,0 +1,4 @@
|
|||
module org.xbib.io.archive.cpio {
|
||||
exports org.xbib.io.archive.cpio;
|
||||
requires org.xbib.io.archive;
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
|
||||
package org.xbib.io.archive.cpio;
|
||||
|
||||
import java.io.FilterOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Stream that tracks the number of bytes read.
|
||||
*/
|
||||
public class CountingOutputStream extends FilterOutputStream {
|
||||
private long bytesWritten = 0;
|
||||
|
||||
public CountingOutputStream(final OutputStream out) {
|
||||
super(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(int b) throws IOException {
|
||||
out.write(b);
|
||||
count(1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte[] b) throws IOException {
|
||||
write(b, 0, b.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(byte[] b, int off, int len) throws IOException {
|
||||
out.write(b, off, len);
|
||||
count(len);
|
||||
}
|
||||
|
||||
/**
|
||||
* Increments the counter of already written bytes.
|
||||
* Doesn't increment if the EOF has been hit (written == -1)
|
||||
*
|
||||
* @param written the number of bytes written
|
||||
*/
|
||||
protected void count(long written) {
|
||||
if (written != -1) {
|
||||
bytesWritten += written;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current number of bytes written to this stream.
|
||||
*
|
||||
* @return the number of written bytes
|
||||
*/
|
||||
public long getBytesWritten() {
|
||||
return bytesWritten;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,805 @@
|
|||
package org.xbib.io.archive.cpio;
|
||||
|
||||
import org.xbib.io.archive.entry.ArchiveEntry;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* A cpio archive consists of a sequence of files. There are several types of
|
||||
* headers defided in two categories of new and old format. The headers are
|
||||
* recognized by magic numbers:
|
||||
*
|
||||
* <ul>
|
||||
* <li>"070701" ASCII for new portable format</li>
|
||||
* <li>"070702" ASCII for new portable format with CRC format</li>
|
||||
* <li>"070707" ASCII for old ascii (also known as Portable ASCII, odc or old
|
||||
* character format</li>
|
||||
* <li>070707 binary for old binary</li>
|
||||
* </ul>
|
||||
* The old binary format is limited to 16 bits for user id, group
|
||||
* id, device, and inode numbers. It is limited to 4 gigabyte file
|
||||
* sizes.
|
||||
* The old ASCII format is limited to 18 bits for the user id, group
|
||||
* id, device, and inode numbers. It is limited to 8 gigabyte file
|
||||
* sizes.
|
||||
* The new ASCII format is limited to 4 gigabyte file sizes.
|
||||
* CPIO 2.5 knows also about tar, but it is not recognized here.
|
||||
* OLD FORMAT
|
||||
* Each file has a 76 (ascii) / 26 (binary) byte header, a variable
|
||||
* length, NUL terminated filename, and variable length file data. A
|
||||
* header for a filename "TRAILER!!!" indicates the end of the
|
||||
* archive.
|
||||
* All the fields in the header are ISO 646 (approximately ASCII)
|
||||
* strings of octal numbers, left padded, not NUL terminated.
|
||||
* <pre>
|
||||
* FIELDNAME NOTES
|
||||
* c_magic The integer value octal 070707. This value can be used to deter-
|
||||
* mine whether this archive is written with little-endian or big-
|
||||
* endian integers.
|
||||
* c_dev Device that contains a directory entry for this file
|
||||
* c_ino I-node number that identifies the input file to the file system
|
||||
* c_mode The mode specifies both the regular permissions and the file type.
|
||||
* c_uid Numeric User ID of the owner of the input file
|
||||
* c_gid Numeric Group ID of the owner of the input file
|
||||
* c_nlink Number of links that are connected to the input file
|
||||
* c_rdev For block special and character special entries, this field
|
||||
* contains the associated device number. For all other entry types,
|
||||
* it should be set to zero by writers and ignored by readers.
|
||||
* c_mtime[2] Modification time of the file, indicated as the number of seconds
|
||||
* since the start of the epoch, 00:00:00 UTC January 1, 1970. The
|
||||
* four-byte integer is stored with the most-significant 16 bits
|
||||
* first followed by the least-significant 16 bits. Each of the two
|
||||
* 16 bit values are stored in machine-native byte order.
|
||||
* c_namesize Length of the path name, including the terminating null byte
|
||||
* c_filesize[2] Length of the file in bytes. This is the length of the data
|
||||
* section that follows the header structure. Must be 0 for
|
||||
* FIFOs and directories
|
||||
*
|
||||
* All fields are unsigned short fields with 16-bit integer values
|
||||
* apart from c_mtime and c_filesize which are 32-bit integer values
|
||||
* </pre>
|
||||
* If necessary, the filename and file data are padded with a NUL byte to an even length
|
||||
* Special files, directories, and the trailer are recorded with
|
||||
* the h_filesize field equal to 0.
|
||||
* In the ASCII version of this format, the 16-bit entries are represented as 6-byte octal numbers,
|
||||
* and the 32-bit entries are represented as 11-byte octal numbers. No padding is added.
|
||||
* NEW FORMAT
|
||||
* Each file has a 110 byte header, a variable length, NUL
|
||||
* terminated filename, and variable length file data. A header for a
|
||||
* filename "TRAILER!!!" indicates the end of the archive. All the
|
||||
* fields in the header are ISO 646 (approximately ASCII) strings of
|
||||
* hexadecimal numbers, left padded, not NUL terminated.
|
||||
* <pre>
|
||||
* FIELDNAME NOTES
|
||||
* c_magic[6] The string 070701 for new ASCII, the string 070702 for new ASCII with CRC
|
||||
* c_ino[8]
|
||||
* c_mode[8]
|
||||
* c_uid[8]
|
||||
* c_gid[8]
|
||||
* c_nlink[8]
|
||||
* c_mtim[8]
|
||||
* c_filesize[8] must be 0 for FIFOs and directories
|
||||
* c_maj[8]
|
||||
* c_min[8]
|
||||
* c_rmaj[8] only valid for chr and blk special files
|
||||
* c_rmin[8] only valid for chr and blk special files
|
||||
* c_namesize[8] count includes terminating NUL in pathname
|
||||
* c_check[8] 0 for "new" portable format; for CRC format
|
||||
* the sum of all the bytes in the file
|
||||
* </pre>
|
||||
* New ASCII Format The "new" ASCII format uses 8-byte hexadecimal
|
||||
* fields for all numbers and separates device numbers into separate
|
||||
* fields for major and minor numbers.
|
||||
* The pathname is followed by NUL bytes so that the total size of
|
||||
* the fixed header plus pathname is a multiple of four. Likewise, the
|
||||
* file data is padded to a multiple of four bytes.
|
||||
* This class uses mutable fields and is not considered to be
|
||||
* threadsafe.
|
||||
* Based on code from the jRPM project (http://jrpm.sourceforge.net)
|
||||
* The MAGIC numbers and other constants are defined in {@link CpioConstants}
|
||||
* N.B. does not handle the cpio "tar" format
|
||||
*
|
||||
* <a href="http://people.freebsd.org/~kientzle/libarchive/man/cpio.5.txt">CPIO man page</a>
|
||||
*/
|
||||
public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
|
||||
|
||||
/**
|
||||
* See constructor documenation for possible values.
|
||||
*/
|
||||
private short fileFormat;
|
||||
|
||||
/**
|
||||
* The number of bytes in each header record; depends on the file format
|
||||
*/
|
||||
private int headerSize;
|
||||
|
||||
/**
|
||||
* The boundary to which the header and data elements are aligned: 0, 2 or 4 bytes
|
||||
*/
|
||||
private int alignmentBoundary;
|
||||
|
||||
// Header fields
|
||||
|
||||
private long chksum = 0;
|
||||
|
||||
/**
|
||||
* Number of bytes in the file
|
||||
*/
|
||||
private long filesize = 0;
|
||||
|
||||
private long gid = 0;
|
||||
|
||||
private long inode = 0;
|
||||
|
||||
private long maj = 0;
|
||||
|
||||
private long min = 0;
|
||||
|
||||
private long mode = 0;
|
||||
|
||||
private long mtime = 0;
|
||||
|
||||
private String name;
|
||||
|
||||
private long nlink = 0;
|
||||
|
||||
private long rmaj = 0;
|
||||
|
||||
private long rmin = 0;
|
||||
|
||||
private long uid = 0;
|
||||
|
||||
public CpioArchiveEntry() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CPIOArchiveEntry with a specified format.
|
||||
*
|
||||
* @param format The cpio format for this entry.
|
||||
* Possible format values are:
|
||||
* CpioConstants.FORMAT_NEW
|
||||
* CpioConstants.FORMAT_NEW_CRC
|
||||
* CpioConstants.FORMAT_OLD_BINARY
|
||||
* CpioConstants.FORMAT_OLD_ASCII
|
||||
*/
|
||||
public CpioArchiveEntry(final short format) {
|
||||
switch (format) {
|
||||
case FORMAT_NEW:
|
||||
this.headerSize = 110;
|
||||
this.alignmentBoundary = 4;
|
||||
break;
|
||||
case FORMAT_NEW_CRC:
|
||||
this.headerSize = 110;
|
||||
this.alignmentBoundary = 4;
|
||||
break;
|
||||
case FORMAT_OLD_ASCII:
|
||||
this.headerSize = 76;
|
||||
this.alignmentBoundary = 0;
|
||||
break;
|
||||
case FORMAT_OLD_BINARY:
|
||||
this.headerSize = 26;
|
||||
this.alignmentBoundary = 2;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown header type");
|
||||
}
|
||||
this.fileFormat = format;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CPIOArchiveEntry with a specified name. The format of
|
||||
* this entry will be the new format.
|
||||
*
|
||||
* @param name The name of this entry.
|
||||
*/
|
||||
public CpioArchiveEntry(final String name) {
|
||||
this(FORMAT_NEW, name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CPIOArchiveEntry with a specified name.
|
||||
*
|
||||
* @param format The cpio format for this entry.
|
||||
* @param name The name of this entry.
|
||||
* Possible format values are:
|
||||
* CpioConstants.FORMAT_NEW
|
||||
* CpioConstants.FORMAT_NEW_CRC
|
||||
* CpioConstants.FORMAT_OLD_BINARY
|
||||
* CpioConstants.FORMAT_OLD_ASCII
|
||||
*/
|
||||
public CpioArchiveEntry(final short format, final String name) {
|
||||
this(format);
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CPIOArchiveEntry with a specified name. The format of
|
||||
* this entry will be the new format.
|
||||
*
|
||||
* @param name The name of this entry.
|
||||
* @param size The size of this entry
|
||||
*/
|
||||
public CpioArchiveEntry(final String name, final long size) {
|
||||
this(name);
|
||||
setEntrySize(size);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CPIOArchiveEntry with a specified name.
|
||||
*
|
||||
* @param format The cpio format for this entry.
|
||||
* @param name The name of this entry.
|
||||
* @param size The size of this entry
|
||||
* Possible format values are:
|
||||
* CpioConstants.FORMAT_NEW
|
||||
* CpioConstants.FORMAT_NEW_CRC
|
||||
* CpioConstants.FORMAT_OLD_BINARY
|
||||
* CpioConstants.FORMAT_OLD_ASCII
|
||||
*/
|
||||
public CpioArchiveEntry(final short format, final String name,
|
||||
final long size) {
|
||||
this(format, name);
|
||||
setEntrySize(size);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CPIOArchiveEntry with a specified name for a
|
||||
* specified file. The format of this entry will be the new
|
||||
* format.
|
||||
*
|
||||
* @param inputFile The file to gather information from.
|
||||
* @param entryName The name of this entry.
|
||||
*/
|
||||
public CpioArchiveEntry(File inputFile, String entryName) {
|
||||
this(FORMAT_NEW, inputFile, entryName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a CPIOArchiveEntry with a specified name for a
|
||||
* specified file.
|
||||
*
|
||||
* @param format The cpio format for this entry.
|
||||
* @param inputFile The file to gather information from.
|
||||
* @param entryName The name of this entry.
|
||||
* Possible format values are:
|
||||
* CpioConstants.FORMAT_NEW
|
||||
* CpioConstants.FORMAT_NEW_CRC
|
||||
* CpioConstants.FORMAT_OLD_BINARY
|
||||
* CpioConstants.FORMAT_OLD_ASCII
|
||||
*/
|
||||
public CpioArchiveEntry(final short format, File inputFile,
|
||||
String entryName) {
|
||||
this(format, entryName, inputFile.isFile() ? inputFile.length() : 0);
|
||||
long mode = 0;
|
||||
if (inputFile.isDirectory()) {
|
||||
mode |= C_ISDIR;
|
||||
} else if (inputFile.isFile()) {
|
||||
mode |= C_ISREG;
|
||||
} else {
|
||||
throw new IllegalArgumentException("Cannot determine type of file "
|
||||
+ inputFile.getName());
|
||||
}
|
||||
setMode(mode);
|
||||
setTime(inputFile.lastModified() / 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the method is allowed for the defined format.
|
||||
*/
|
||||
private void checkNewFormat() {
|
||||
if ((this.fileFormat & FORMAT_NEW_MASK) == 0) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the method is allowed for the defined format.
|
||||
*/
|
||||
private void checkOldFormat() {
|
||||
if ((this.fileFormat & FORMAT_OLD_MASK) == 0) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the checksum.
|
||||
* Only supported for the new formats.
|
||||
*
|
||||
* @return Returns the checksum.
|
||||
* @throws UnsupportedOperationException if the format is not a new format
|
||||
*/
|
||||
public long getChksum() {
|
||||
checkNewFormat();
|
||||
return this.chksum;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the device id.
|
||||
*
|
||||
* @return Returns the device id.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with a new
|
||||
* format.
|
||||
*/
|
||||
public long getDevice() {
|
||||
checkOldFormat();
|
||||
return this.min;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the major device id.
|
||||
*
|
||||
* @return Returns the major device id.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with an old
|
||||
* format.
|
||||
*/
|
||||
public long getDeviceMaj() {
|
||||
checkNewFormat();
|
||||
return this.maj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the minor device id
|
||||
*
|
||||
* @return Returns the minor device id.
|
||||
* @throws UnsupportedOperationException if format is not a new format
|
||||
*/
|
||||
public long getDeviceMin() {
|
||||
checkNewFormat();
|
||||
return this.min;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the filesize.
|
||||
*
|
||||
* @return Returns the filesize.
|
||||
*/
|
||||
public long getEntrySize() {
|
||||
return this.filesize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the format for this entry.
|
||||
*
|
||||
* @return Returns the format.
|
||||
*/
|
||||
public short getFormat() {
|
||||
return this.fileFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the group id.
|
||||
*
|
||||
* @return Returns the group id.
|
||||
*/
|
||||
public long getGID() {
|
||||
return this.gid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the header size for this CPIO format
|
||||
*
|
||||
* @return Returns the header size in bytes.
|
||||
*/
|
||||
public int getHeaderSize() {
|
||||
return this.headerSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the alignment boundary for this CPIO format
|
||||
*
|
||||
* @return Returns the aligment boundary (0, 2, 4) in bytes
|
||||
*/
|
||||
public int getAlignmentBoundary() {
|
||||
return this.alignmentBoundary;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of bytes needed to pad the header to the alignment boundary.
|
||||
*
|
||||
* @return the number of bytes needed to pad the header (0,1,2,3)
|
||||
*/
|
||||
public int getHeaderPadCount() {
|
||||
if (this.alignmentBoundary == 0) {
|
||||
return 0;
|
||||
}
|
||||
int size = this.headerSize + this.name.length() + 1; // Name has terminating null
|
||||
int remain = size % this.alignmentBoundary;
|
||||
if (remain > 0) {
|
||||
return this.alignmentBoundary - remain;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of bytes needed to pad the data to the alignment boundary.
|
||||
*
|
||||
* @return the number of bytes needed to pad the data (0,1,2,3)
|
||||
*/
|
||||
public int getDataPadCount() {
|
||||
if (this.alignmentBoundary == 0) {
|
||||
return 0;
|
||||
}
|
||||
long size = this.filesize;
|
||||
int remain = (int) (size % this.alignmentBoundary);
|
||||
if (remain > 0) {
|
||||
return this.alignmentBoundary - remain;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the inode.
|
||||
*
|
||||
* @return Returns the inode.
|
||||
*/
|
||||
public long getInode() {
|
||||
return this.inode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the mode of this entry (e.g. directory, regular file).
|
||||
*
|
||||
* @return Returns the mode.
|
||||
*/
|
||||
public long getMode() {
|
||||
return mode == 0 && !CPIO_TRAILER.equals(name) ? C_ISREG : mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name.
|
||||
*
|
||||
* @return Returns the name.
|
||||
*/
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of links.
|
||||
*
|
||||
* @return Returns the number of links.
|
||||
*/
|
||||
public long getNumberOfLinks() {
|
||||
return nlink == 0 ?
|
||||
(isDirectory() ? 2 : 1)
|
||||
: nlink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote device id.
|
||||
*
|
||||
* @return Returns the remote device id.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with a new
|
||||
* format.
|
||||
*/
|
||||
public long getRemoteDevice() {
|
||||
checkOldFormat();
|
||||
return this.rmin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote major device id.
|
||||
*
|
||||
* @return Returns the remote major device id.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with an old
|
||||
* format.
|
||||
*/
|
||||
public long getRemoteDeviceMaj() {
|
||||
checkNewFormat();
|
||||
return this.rmaj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote minor device id.
|
||||
*
|
||||
* @return Returns the remote minor device id.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with an old
|
||||
* format.
|
||||
*/
|
||||
public long getRemoteDeviceMin() {
|
||||
checkNewFormat();
|
||||
return this.rmin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the time in seconds.
|
||||
*
|
||||
* @return Returns the time.
|
||||
*/
|
||||
public long getTime() {
|
||||
return this.mtime;
|
||||
}
|
||||
|
||||
public CpioArchiveEntry setLastModified(Date date) {
|
||||
setTime(date.getTime() / 1000);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Date getLastModified() {
|
||||
return new Date(1000 * getTime());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the user id.
|
||||
*
|
||||
* @return Returns the user id.
|
||||
*/
|
||||
public long getUID() {
|
||||
return this.uid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a block device.
|
||||
*
|
||||
* @return TRUE if this entry is a block device.
|
||||
*/
|
||||
public boolean isBlockDevice() {
|
||||
return (this.mode & S_IFMT) == C_ISBLK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a character device.
|
||||
*
|
||||
* @return TRUE if this entry is a character device.
|
||||
*/
|
||||
public boolean isCharacterDevice() {
|
||||
return (this.mode & S_IFMT) == C_ISCHR;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a directory.
|
||||
*
|
||||
* @return TRUE if this entry is a directory.
|
||||
*/
|
||||
public boolean isDirectory() {
|
||||
return (this.mode & S_IFMT) == C_ISDIR;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a network device.
|
||||
*
|
||||
* @return TRUE if this entry is a network device.
|
||||
*/
|
||||
public boolean isNetwork() {
|
||||
return (this.mode & S_IFMT) == C_ISNWK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a pipe.
|
||||
*
|
||||
* @return TRUE if this entry is a pipe.
|
||||
*/
|
||||
public boolean isPipe() {
|
||||
return (this.mode & S_IFMT) == C_ISFIFO;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a regular file.
|
||||
*
|
||||
* @return TRUE if this entry is a regular file.
|
||||
*/
|
||||
public boolean isRegularFile() {
|
||||
return (this.mode & S_IFMT) == C_ISREG;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a socket.
|
||||
*
|
||||
* @return TRUE if this entry is a socket.
|
||||
*/
|
||||
public boolean isSocket() {
|
||||
return (this.mode & S_IFMT) == C_ISSOCK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this entry represents a symbolic link.
|
||||
*
|
||||
* @return TRUE if this entry is a symbolic link.
|
||||
*/
|
||||
public boolean isSymbolicLink() {
|
||||
return (this.mode & S_IFMT) == C_ISLNK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the checksum. The checksum is calculated by adding all bytes of a
|
||||
* file to transfer (crc += buf[pos] & 0xFF).
|
||||
*
|
||||
* @param chksum The checksum to set.
|
||||
*/
|
||||
public void setChksum(final long chksum) {
|
||||
checkNewFormat();
|
||||
this.chksum = chksum;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the device id.
|
||||
*
|
||||
* @param device The device id to set.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with a new
|
||||
* format.
|
||||
*/
|
||||
public void setDevice(final long device) {
|
||||
checkOldFormat();
|
||||
this.min = device;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set major device id.
|
||||
*
|
||||
* @param maj The major device id to set.
|
||||
*/
|
||||
public void setDeviceMaj(final long maj) {
|
||||
checkNewFormat();
|
||||
this.maj = maj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the minor device id
|
||||
*
|
||||
* @param min The minor device id to set.
|
||||
*/
|
||||
public void setDeviceMin(final long min) {
|
||||
checkNewFormat();
|
||||
this.min = min;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the filesize.
|
||||
*
|
||||
* @param size The filesize to set.
|
||||
*/
|
||||
public CpioArchiveEntry setEntrySize(final long size) {
|
||||
if (size < 0 || size > 0xFFFFFFFFL) {
|
||||
throw new IllegalArgumentException("invalid entry size <" + size
|
||||
+ ">");
|
||||
}
|
||||
this.filesize = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the group id.
|
||||
*
|
||||
* @param gid The group id to set.
|
||||
*/
|
||||
public void setGID(final long gid) {
|
||||
this.gid = gid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the inode.
|
||||
*
|
||||
* @param inode The inode to set.
|
||||
*/
|
||||
public void setInode(final long inode) {
|
||||
this.inode = inode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the mode of this entry (e.g. directory, regular file).
|
||||
*
|
||||
* @param mode The mode to set.
|
||||
*/
|
||||
public void setMode(final long mode) {
|
||||
final long maskedMode = mode & S_IFMT;
|
||||
switch ((int) maskedMode) {
|
||||
case C_ISDIR:
|
||||
case C_ISLNK:
|
||||
case C_ISREG:
|
||||
case C_ISFIFO:
|
||||
case C_ISCHR:
|
||||
case C_ISBLK:
|
||||
case C_ISSOCK:
|
||||
case C_ISNWK:
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"Unknown mode. "
|
||||
+ "Full: " + Long.toHexString(mode)
|
||||
+ " Masked: " + Long.toHexString(maskedMode));
|
||||
}
|
||||
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the name.
|
||||
*
|
||||
* @param name The name to set.
|
||||
*/
|
||||
public CpioArchiveEntry setName(final String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of links.
|
||||
*
|
||||
* @param nlink The number of links to set.
|
||||
*/
|
||||
public void setNumberOfLinks(final long nlink) {
|
||||
this.nlink = nlink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the remote device id.
|
||||
*
|
||||
* @param device The remote device id to set.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with a new
|
||||
* format.
|
||||
*/
|
||||
public void setRemoteDevice(final long device) {
|
||||
checkOldFormat();
|
||||
this.rmin = device;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the remote major device id.
|
||||
*
|
||||
* @param rmaj The remote major device id to set.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with an old
|
||||
* format.
|
||||
*/
|
||||
public void setRemoteDeviceMaj(final long rmaj) {
|
||||
checkNewFormat();
|
||||
this.rmaj = rmaj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the remote minor device id.
|
||||
*
|
||||
* @param rmin The remote minor device id to set.
|
||||
* @throws UnsupportedOperationException if this method is called for a CPIOArchiveEntry with an old
|
||||
* format.
|
||||
*/
|
||||
public void setRemoteDeviceMin(final long rmin) {
|
||||
checkNewFormat();
|
||||
this.rmin = rmin;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the time in seconds.
|
||||
*
|
||||
* @param time The time to set.
|
||||
*/
|
||||
public void setTime(final long time) {
|
||||
this.mtime = time;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the user id.
|
||||
*
|
||||
* @param uid The user id to set.
|
||||
*/
|
||||
public void setUID(final long uid) {
|
||||
this.uid = uid;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null || getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
CpioArchiveEntry other = (CpioArchiveEntry) obj;
|
||||
if (name == null) {
|
||||
if (other.name != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!name.equals(other.name)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,387 @@
|
|||
package org.xbib.io.archive.cpio;
|
||||
|
||||
import org.xbib.io.archive.entry.ArchiveEntry;
|
||||
import org.xbib.io.archive.stream.ArchiveInputStream;
|
||||
import org.xbib.io.archive.util.ArchiveUtils;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* CPIOArchiveInputStream is a stream for reading cpio streams. All formats of
|
||||
* cpio are supported (old ascii, old binary, new portable format and the new
|
||||
* portable format with crc).
|
||||
* The stream can be read by extracting a cpio entry (containing all
|
||||
* informations about a entry) and afterwards reading from the stream the file
|
||||
* specified by the entry.
|
||||
* <pre><code>
|
||||
* CPIOArchiveInputStream cpioIn = new CPIOArchiveInputStream(
|
||||
* new FileInputStream(new File("test.cpio")));
|
||||
* CPIOArchiveEntry cpioEntry;
|
||||
* while ((cpioEntry = cpioIn.getNextEntry()) != null) {
|
||||
* System.out.println(cpioEntry.getName());
|
||||
* int tmp;
|
||||
* StringBuilder buf = new StringBuilder();
|
||||
* while ((tmp = cpIn.read()) != -1) {
|
||||
* buf.append((char) tmp);
|
||||
* }
|
||||
* System.out.println(buf.toString());
|
||||
* }
|
||||
* cpioIn.close();
|
||||
* </code></pre>
|
||||
* Note: This implementation should be compatible to cpio 2.5
|
||||
*/
|
||||
|
||||
public class CpioArchiveInputStream extends ArchiveInputStream implements CpioConstants {
|
||||
|
||||
private boolean closed = false;
|
||||
|
||||
private CpioArchiveEntry entry;
|
||||
|
||||
private long entryBytesRead = 0;
|
||||
|
||||
private boolean entryEOF = false;
|
||||
|
||||
private final byte tmpbuf[] = new byte[4096];
|
||||
|
||||
private long crc = 0;
|
||||
|
||||
private final InputStream in;
|
||||
|
||||
/**
|
||||
* Construct the cpio input stream
|
||||
*
|
||||
* @param in The cpio stream
|
||||
*/
|
||||
public CpioArchiveInputStream(final InputStream in) {
|
||||
this.in = in;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns 0 after EOF has reached for the current entry data, otherwise
|
||||
* always return 1.
|
||||
* Programs should not count on this method to return the actual number of
|
||||
* bytes that could be read without blocking.
|
||||
*
|
||||
* @return 1 before EOF and 0 after EOF has reached for current entry.
|
||||
* @throws java.io.IOException if an I/O error has occurred or if a CPIO file error has
|
||||
* occurred
|
||||
*/
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
ensureOpen();
|
||||
if (this.entryEOF) {
|
||||
return 0;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the CPIO input stream.
|
||||
*
|
||||
* @throws java.io.IOException if an I/O error has occurred
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (!this.closed) {
|
||||
in.close();
|
||||
this.closed = true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the current CPIO entry and positions the stream for reading the
|
||||
* next entry.
|
||||
*
|
||||
* @throws java.io.IOException if an I/O error has occurred or if a CPIO file error has
|
||||
* occurred
|
||||
*/
|
||||
private void closeEntry() throws IOException {
|
||||
ensureOpen();
|
||||
while (read(this.tmpbuf, 0, this.tmpbuf.length) != -1) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
this.entryEOF = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to make sure that this stream has not been closed
|
||||
*
|
||||
* @throws java.io.IOException if the stream is already closed
|
||||
*/
|
||||
private void ensureOpen() throws IOException {
|
||||
if (this.closed) {
|
||||
throw new IOException("stream closed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the next CPIO file entry and positions stream at the beginning of
|
||||
* the entry data.
|
||||
*
|
||||
* @return the CPIOArchiveEntry just read
|
||||
* @throws java.io.IOException if an I/O error has occurred or if a CPIO file error has
|
||||
* occurred
|
||||
*/
|
||||
public CpioArchiveEntry getNextCPIOEntry() throws IOException {
|
||||
ensureOpen();
|
||||
if (this.entry != null) {
|
||||
closeEntry();
|
||||
}
|
||||
byte magic[] = new byte[2];
|
||||
readFully(magic, 0, magic.length);
|
||||
if (CpioUtil.byteArray2long(magic, false) == MAGIC_OLD_BINARY) {
|
||||
this.entry = readOldBinaryEntry(false);
|
||||
} else if (CpioUtil.byteArray2long(magic, true) == MAGIC_OLD_BINARY) {
|
||||
this.entry = readOldBinaryEntry(true);
|
||||
} else {
|
||||
byte more_magic[] = new byte[4];
|
||||
readFully(more_magic, 0, more_magic.length);
|
||||
byte tmp[] = new byte[6];
|
||||
System.arraycopy(magic, 0, tmp, 0, magic.length);
|
||||
System.arraycopy(more_magic, 0, tmp, magic.length,
|
||||
more_magic.length);
|
||||
String magicString = ArchiveUtils.toAsciiString(tmp);
|
||||
if (magicString.equals(MAGIC_NEW)) {
|
||||
this.entry = readNewEntry(false);
|
||||
} else if (magicString.equals(MAGIC_NEW_CRC)) {
|
||||
this.entry = readNewEntry(true);
|
||||
} else if (magicString.equals(MAGIC_OLD_ASCII)) {
|
||||
this.entry = readOldAsciiEntry();
|
||||
} else {
|
||||
throw new IOException("Unknown magic [" + magicString + "]");
|
||||
}
|
||||
}
|
||||
|
||||
this.entryBytesRead = 0;
|
||||
this.entryEOF = false;
|
||||
this.crc = 0;
|
||||
|
||||
if (this.entry.getName().equals(CPIO_TRAILER)) {
|
||||
this.entryEOF = true;
|
||||
return null;
|
||||
}
|
||||
return this.entry;
|
||||
}
|
||||
|
||||
private void skip(int bytes) throws IOException {
|
||||
final byte[] buff = new byte[4]; // Cannot be more than 3 bytes
|
||||
if (bytes > 0) {
|
||||
readFully(buff, 0, bytes);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads from the current CPIO entry into an array of bytes. Blocks until
|
||||
* some input is available.
|
||||
*
|
||||
* @param b the buffer into which the data is read
|
||||
* @param off the start offset of the data
|
||||
* @param len the maximum number of bytes read
|
||||
* @return the actual number of bytes read, or -1 if the end of the entry is
|
||||
* reached
|
||||
* @throws java.io.IOException if an I/O error has occurred or if a CPIO file error has
|
||||
* occurred
|
||||
*/
|
||||
@Override
|
||||
public int read(final byte[] b, final int off, final int len)
|
||||
throws IOException {
|
||||
ensureOpen();
|
||||
if (off < 0 || len < 0 || off > b.length - len) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
} else if (len == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (this.entry == null || this.entryEOF) {
|
||||
return -1;
|
||||
}
|
||||
if (this.entryBytesRead == this.entry.getEntrySize()) {
|
||||
skip(entry.getDataPadCount());
|
||||
this.entryEOF = true;
|
||||
if (this.entry.getFormat() == FORMAT_NEW_CRC
|
||||
&& this.crc != this.entry.getChksum()) {
|
||||
throw new IOException("CRC Error");
|
||||
}
|
||||
return -1; // EOF for this entry
|
||||
}
|
||||
int tmplength = (int) Math.min(len, this.entry.getEntrySize()
|
||||
- this.entryBytesRead);
|
||||
if (tmplength < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int tmpread = readFully(b, off, tmplength);
|
||||
if (this.entry.getFormat() == FORMAT_NEW_CRC) {
|
||||
for (int pos = 0; pos < tmpread; pos++) {
|
||||
this.crc += b[pos] & 0xFF;
|
||||
}
|
||||
}
|
||||
this.entryBytesRead += tmpread;
|
||||
|
||||
return tmpread;
|
||||
}
|
||||
|
||||
private int readFully(final byte[] b, final int off, final int len)
|
||||
throws IOException {
|
||||
if (len < 0) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
}
|
||||
int n = 0;
|
||||
while (n < len) {
|
||||
int count = this.in.read(b, off + n, len - n);
|
||||
if (count < 0) {
|
||||
throw new EOFException();
|
||||
}
|
||||
n += count;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
private long readBinaryLong(final int length, final boolean swapHalfWord)
|
||||
throws IOException {
|
||||
byte tmp[] = new byte[length];
|
||||
readFully(tmp, 0, tmp.length);
|
||||
return CpioUtil.byteArray2long(tmp, swapHalfWord);
|
||||
}
|
||||
|
||||
private long readAsciiLong(final int length, final int radix)
|
||||
throws IOException {
|
||||
byte tmpBuffer[] = new byte[length];
|
||||
readFully(tmpBuffer, 0, tmpBuffer.length);
|
||||
return Long.parseLong(ArchiveUtils.toAsciiString(tmpBuffer), radix);
|
||||
}
|
||||
|
||||
private CpioArchiveEntry readNewEntry(final boolean hasCrc)
|
||||
throws IOException {
|
||||
CpioArchiveEntry ret;
|
||||
if (hasCrc) {
|
||||
ret = new CpioArchiveEntry(FORMAT_NEW_CRC);
|
||||
} else {
|
||||
ret = new CpioArchiveEntry(FORMAT_NEW);
|
||||
}
|
||||
|
||||
ret.setInode(readAsciiLong(8, 16));
|
||||
long mode = readAsciiLong(8, 16);
|
||||
if (mode != 0) { // mode is initialised to 0
|
||||
ret.setMode(mode);
|
||||
}
|
||||
ret.setUID(readAsciiLong(8, 16));
|
||||
ret.setGID(readAsciiLong(8, 16));
|
||||
ret.setNumberOfLinks(readAsciiLong(8, 16));
|
||||
ret.setTime(readAsciiLong(8, 16));
|
||||
ret.setEntrySize(readAsciiLong(8, 16));
|
||||
ret.setDeviceMaj(readAsciiLong(8, 16));
|
||||
ret.setDeviceMin(readAsciiLong(8, 16));
|
||||
ret.setRemoteDeviceMaj(readAsciiLong(8, 16));
|
||||
ret.setRemoteDeviceMin(readAsciiLong(8, 16));
|
||||
long namesize = readAsciiLong(8, 16);
|
||||
ret.setChksum(readAsciiLong(8, 16));
|
||||
String name = readCString((int) namesize);
|
||||
ret.setName(name);
|
||||
if (mode == 0 && !name.equals(CPIO_TRAILER)) {
|
||||
throw new IOException("Mode 0 only allowed in the trailer. Found entry name: " + name);
|
||||
}
|
||||
skip(ret.getHeaderPadCount());
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
private CpioArchiveEntry readOldAsciiEntry() throws IOException {
|
||||
CpioArchiveEntry ret = new CpioArchiveEntry(FORMAT_OLD_ASCII);
|
||||
|
||||
ret.setDevice(readAsciiLong(6, 8));
|
||||
ret.setInode(readAsciiLong(6, 8));
|
||||
final long mode = readAsciiLong(6, 8);
|
||||
if (mode != 0) {
|
||||
ret.setMode(mode);
|
||||
}
|
||||
ret.setUID(readAsciiLong(6, 8));
|
||||
ret.setGID(readAsciiLong(6, 8));
|
||||
ret.setNumberOfLinks(readAsciiLong(6, 8));
|
||||
ret.setRemoteDevice(readAsciiLong(6, 8));
|
||||
ret.setTime(readAsciiLong(11, 8));
|
||||
long namesize = readAsciiLong(6, 8);
|
||||
ret.setEntrySize(readAsciiLong(11, 8));
|
||||
final String name = readCString((int) namesize);
|
||||
ret.setName(name);
|
||||
if (mode == 0 && !name.equals(CPIO_TRAILER)) {
|
||||
throw new IOException("Mode 0 only allowed in the trailer. Found entry: " + name);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
private CpioArchiveEntry readOldBinaryEntry(final boolean swapHalfWord)
|
||||
throws IOException {
|
||||
CpioArchiveEntry ret = new CpioArchiveEntry(FORMAT_OLD_BINARY);
|
||||
|
||||
ret.setDevice(readBinaryLong(2, swapHalfWord));
|
||||
ret.setInode(readBinaryLong(2, swapHalfWord));
|
||||
final long mode = readBinaryLong(2, swapHalfWord);
|
||||
if (mode != 0) {
|
||||
ret.setMode(mode);
|
||||
}
|
||||
ret.setUID(readBinaryLong(2, swapHalfWord));
|
||||
ret.setGID(readBinaryLong(2, swapHalfWord));
|
||||
ret.setNumberOfLinks(readBinaryLong(2, swapHalfWord));
|
||||
ret.setRemoteDevice(readBinaryLong(2, swapHalfWord));
|
||||
ret.setTime(readBinaryLong(4, swapHalfWord));
|
||||
long namesize = readBinaryLong(2, swapHalfWord);
|
||||
ret.setEntrySize(readBinaryLong(4, swapHalfWord));
|
||||
final String name = readCString((int) namesize);
|
||||
ret.setName(name);
|
||||
if (mode == 0 && !name.equals(CPIO_TRAILER)) {
|
||||
throw new IOException("Mode 0 only allowed in the trailer. Found entry: " + name);
|
||||
}
|
||||
skip(ret.getHeaderPadCount());
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
private String readCString(final int length) throws IOException {
|
||||
byte[] tmpBuffer = new byte[length];
|
||||
readFully(tmpBuffer, 0, tmpBuffer.length);
|
||||
return new String(tmpBuffer, 0, tmpBuffer.length - 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Skips specified number of bytes in the current CPIO entry.
|
||||
*
|
||||
* @param n the number of bytes to skip
|
||||
* @return the actual number of bytes skipped
|
||||
* @throws java.io.IOException if an I/O error has occurred
|
||||
* @throws IllegalArgumentException if n < 0
|
||||
*/
|
||||
@Override
|
||||
public long skip(final long n) throws IOException {
|
||||
if (n < 0) {
|
||||
throw new IllegalArgumentException("negative skip length");
|
||||
}
|
||||
ensureOpen();
|
||||
int max = (int) Math.min(n, Integer.MAX_VALUE);
|
||||
int total = 0;
|
||||
|
||||
while (total < max) {
|
||||
int len = max - total;
|
||||
if (len > this.tmpbuf.length) {
|
||||
len = this.tmpbuf.length;
|
||||
}
|
||||
len = read(this.tmpbuf, 0, len);
|
||||
if (len == -1) {
|
||||
this.entryEOF = true;
|
||||
break;
|
||||
}
|
||||
total += len;
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ArchiveEntry getNextEntry() throws IOException {
|
||||
return getNextCPIOEntry();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,430 @@
|
|||
package org.xbib.io.archive.cpio;
|
||||
|
||||
import org.xbib.io.archive.stream.ArchiveOutputStream;
|
||||
import org.xbib.io.archive.util.ArchiveUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.HashMap;
|
||||
|
||||
/**
|
||||
* CPIOArchiveOutputStream is a stream for writing CPIO streams. All formats of
|
||||
* CPIO are supported (old ASCII, old binary, new portable format and the new
|
||||
* portable format with CRC).
|
||||
* An entry can be written by creating an instance of CpioArchiveEntry and fill
|
||||
* it with the necessary values and put it into the CPIO stream. Afterwards
|
||||
* write the contents of the file into the CPIO stream. Either close the stream
|
||||
* by calling finish() or put a next entry into the cpio stream.
|
||||
* <pre><code>
|
||||
* CpioArchiveOutputStream out = new CpioArchiveOutputStream(
|
||||
* new FileOutputStream(new File("test.cpio")));
|
||||
* CpioArchiveEntry entry = new CpioArchiveEntry();
|
||||
* entry.setName("testfile");
|
||||
* String contents = "12345";
|
||||
* entry.setFileSize(contents.length());
|
||||
* entry.setMode(CpioConstants.C_ISREG); // regular file
|
||||
* ... set other attributes, e.g. time, number of links
|
||||
* out.putArchiveEntry(entry);
|
||||
* out.write(testContents.getBytes());
|
||||
* out.close();
|
||||
* </code></pre>
|
||||
* Note: This implementation should be compatible to cpio 2.5
|
||||
*/
|
||||
public class CpioArchiveOutputStream extends ArchiveOutputStream<CpioArchiveEntry> implements CpioConstants {
|
||||
|
||||
private CpioArchiveEntry entry;
|
||||
|
||||
private boolean closed = false;
|
||||
|
||||
private boolean finished;
|
||||
|
||||
private final short entryFormat;
|
||||
|
||||
private final HashMap<String, CpioArchiveEntry> names = new HashMap<>();
|
||||
|
||||
private long crc = 0;
|
||||
|
||||
private long written;
|
||||
|
||||
private final CountingOutputStream out;
|
||||
|
||||
private final int blockSize;
|
||||
|
||||
private long nextArtificalDeviceAndInode = 1;
|
||||
|
||||
/**
|
||||
* Construct the cpio output stream with a specified format and a
|
||||
* blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE}.
|
||||
*
|
||||
* @param out The cpio stream
|
||||
* @param format The format of the stream
|
||||
*/
|
||||
public CpioArchiveOutputStream(OutputStream out, final short format) {
|
||||
this(out, format, BLOCK_SIZE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the cpio output stream with a specified format
|
||||
*
|
||||
* @param out The cpio stream
|
||||
* @param format The format of the stream
|
||||
* @param blockSize The block size of the archive.
|
||||
*/
|
||||
public CpioArchiveOutputStream(final OutputStream out, final short format,
|
||||
final int blockSize) {
|
||||
this.out = new CountingOutputStream(out);
|
||||
switch (format) {
|
||||
case FORMAT_NEW:
|
||||
case FORMAT_NEW_CRC:
|
||||
case FORMAT_OLD_ASCII:
|
||||
case FORMAT_OLD_BINARY:
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown format: " + format);
|
||||
|
||||
}
|
||||
this.entryFormat = format;
|
||||
this.blockSize = blockSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct the cpio output stream. The format for this CPIO stream is the
|
||||
* "new" format
|
||||
*
|
||||
* @param out The cpio stream
|
||||
*/
|
||||
public CpioArchiveOutputStream(final OutputStream out) {
|
||||
this(out, FORMAT_NEW);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check to make sure that this stream has not been closed
|
||||
*
|
||||
* @throws java.io.IOException if the stream is already closed
|
||||
*/
|
||||
private void ensureOpen() throws IOException {
|
||||
if (this.closed) {
|
||||
throw new IOException("Stream closed");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CpioArchiveEntry newArchiveEntry() {
|
||||
return new CpioArchiveEntry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Begins writing a new CPIO file entry and positions the stream to the
|
||||
* start of the entry data. Closes the current entry if still active. The
|
||||
* current time will be used if the entry has no set modification time and
|
||||
* the default header format will be used if no other format is specified in
|
||||
* the entry.
|
||||
*
|
||||
* @param entry the CPIO cpioEntry to be written
|
||||
* @throws java.io.IOException if an I/O error has occurred or if a CPIO file error has
|
||||
* occurred
|
||||
* @throws ClassCastException if entry is not an instance of CpioArchiveEntry
|
||||
*/
|
||||
@Override
|
||||
public void putArchiveEntry(CpioArchiveEntry entry) throws IOException {
|
||||
if (finished) {
|
||||
throw new IOException("Stream has already been finished");
|
||||
}
|
||||
ensureOpen();
|
||||
if (this.entry != null) {
|
||||
closeArchiveEntry(); // close previous entry
|
||||
}
|
||||
if (entry.getTime() == -1) {
|
||||
entry.setTime(System.currentTimeMillis() / 1000);
|
||||
}
|
||||
|
||||
final short format = entry.getFormat();
|
||||
if (format != this.entryFormat) {
|
||||
throw new IOException("Header format: " + format + " does not match existing format: " + this.entryFormat);
|
||||
}
|
||||
|
||||
if (this.names.put(entry.getName(), entry) != null) {
|
||||
throw new IOException("duplicate entry: " + entry.getName());
|
||||
}
|
||||
|
||||
writeHeader(entry);
|
||||
this.entry = entry;
|
||||
this.written = 0;
|
||||
}
|
||||
|
||||
private void writeHeader(final CpioArchiveEntry e) throws IOException {
|
||||
switch (e.getFormat()) {
|
||||
case FORMAT_NEW:
|
||||
out.write(ArchiveUtils.toAsciiBytes(MAGIC_NEW));
|
||||
writeNewEntry(e);
|
||||
break;
|
||||
case FORMAT_NEW_CRC:
|
||||
out.write(ArchiveUtils.toAsciiBytes(MAGIC_NEW_CRC));
|
||||
writeNewEntry(e);
|
||||
break;
|
||||
case FORMAT_OLD_ASCII:
|
||||
out.write(ArchiveUtils.toAsciiBytes(MAGIC_OLD_ASCII));
|
||||
writeOldAsciiEntry(e);
|
||||
break;
|
||||
case FORMAT_OLD_BINARY:
|
||||
boolean swapHalfWord = true;
|
||||
writeBinaryLong(MAGIC_OLD_BINARY, 2, swapHalfWord);
|
||||
writeOldBinaryEntry(e, swapHalfWord);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private void writeNewEntry(final CpioArchiveEntry entry) throws IOException {
|
||||
long inode = entry.getInode();
|
||||
long devMin = entry.getDeviceMin();
|
||||
if (CPIO_TRAILER.equals(entry.getName())) {
|
||||
inode = devMin = 0;
|
||||
} else {
|
||||
if (inode == 0 && devMin == 0) {
|
||||
inode = nextArtificalDeviceAndInode & 0xFFFFFFFF;
|
||||
devMin = (nextArtificalDeviceAndInode++ >> 32) & 0xFFFFFFFF;
|
||||
} else {
|
||||
nextArtificalDeviceAndInode =
|
||||
Math.max(nextArtificalDeviceAndInode,
|
||||
inode + 0x100000000L * devMin) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
writeAsciiLong(inode, 8, 16);
|
||||
writeAsciiLong(entry.getMode(), 8, 16);
|
||||
writeAsciiLong(entry.getUID(), 8, 16);
|
||||
writeAsciiLong(entry.getGID(), 8, 16);
|
||||
writeAsciiLong(entry.getNumberOfLinks(), 8, 16);
|
||||
writeAsciiLong(entry.getTime(), 8, 16);
|
||||
writeAsciiLong(entry.getEntrySize(), 8, 16);
|
||||
writeAsciiLong(entry.getDeviceMaj(), 8, 16);
|
||||
writeAsciiLong(devMin, 8, 16);
|
||||
writeAsciiLong(entry.getRemoteDeviceMaj(), 8, 16);
|
||||
writeAsciiLong(entry.getRemoteDeviceMin(), 8, 16);
|
||||
writeAsciiLong(entry.getName().length() + 1, 8, 16);
|
||||
writeAsciiLong(entry.getChksum(), 8, 16);
|
||||
writeCString(entry.getName());
|
||||
pad(entry.getHeaderPadCount());
|
||||
}
|
||||
|
||||
private void writeOldAsciiEntry(final CpioArchiveEntry entry)
|
||||
throws IOException {
|
||||
long inode = entry.getInode();
|
||||
long device = entry.getDevice();
|
||||
if (CPIO_TRAILER.equals(entry.getName())) {
|
||||
inode = device = 0;
|
||||
} else {
|
||||
if (inode == 0 && device == 0) {
|
||||
inode = nextArtificalDeviceAndInode & 0777777;
|
||||
device = (nextArtificalDeviceAndInode++ >> 18) & 0777777;
|
||||
} else {
|
||||
nextArtificalDeviceAndInode =
|
||||
Math.max(nextArtificalDeviceAndInode,
|
||||
inode + 01000000 * device) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
writeAsciiLong(device, 6, 8);
|
||||
writeAsciiLong(inode, 6, 8);
|
||||
writeAsciiLong(entry.getMode(), 6, 8);
|
||||
writeAsciiLong(entry.getUID(), 6, 8);
|
||||
writeAsciiLong(entry.getGID(), 6, 8);
|
||||
writeAsciiLong(entry.getNumberOfLinks(), 6, 8);
|
||||
writeAsciiLong(entry.getRemoteDevice(), 6, 8);
|
||||
writeAsciiLong(entry.getTime(), 11, 8);
|
||||
writeAsciiLong(entry.getName().length() + 1, 6, 8);
|
||||
writeAsciiLong(entry.getEntrySize(), 11, 8);
|
||||
writeCString(entry.getName());
|
||||
}
|
||||
|
||||
private void writeOldBinaryEntry(final CpioArchiveEntry entry,
|
||||
final boolean swapHalfWord) throws IOException {
|
||||
long inode = entry.getInode();
|
||||
long device = entry.getDevice();
|
||||
if (CPIO_TRAILER.equals(entry.getName())) {
|
||||
inode = device = 0;
|
||||
} else {
|
||||
if (inode == 0 && device == 0) {
|
||||
inode = nextArtificalDeviceAndInode & 0xFFFF;
|
||||
device = (nextArtificalDeviceAndInode++ >> 16) & 0xFFFF;
|
||||
} else {
|
||||
nextArtificalDeviceAndInode =
|
||||
Math.max(nextArtificalDeviceAndInode,
|
||||
inode + 0x10000 * device) + 1;
|
||||
}
|
||||
}
|
||||
|
||||
writeBinaryLong(device, 2, swapHalfWord);
|
||||
writeBinaryLong(inode, 2, swapHalfWord);
|
||||
writeBinaryLong(entry.getMode(), 2, swapHalfWord);
|
||||
writeBinaryLong(entry.getUID(), 2, swapHalfWord);
|
||||
writeBinaryLong(entry.getGID(), 2, swapHalfWord);
|
||||
writeBinaryLong(entry.getNumberOfLinks(), 2, swapHalfWord);
|
||||
writeBinaryLong(entry.getRemoteDevice(), 2, swapHalfWord);
|
||||
writeBinaryLong(entry.getTime(), 4, swapHalfWord);
|
||||
writeBinaryLong(entry.getName().length() + 1, 2, swapHalfWord);
|
||||
writeBinaryLong(entry.getEntrySize(), 4, swapHalfWord);
|
||||
writeCString(entry.getName());
|
||||
pad(entry.getHeaderPadCount());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void closeArchiveEntry() throws IOException {
|
||||
if (finished) {
|
||||
throw new IOException("Stream has already been finished");
|
||||
}
|
||||
|
||||
ensureOpen();
|
||||
|
||||
if (entry == null) {
|
||||
throw new IOException("Trying to close non-existent entry");
|
||||
}
|
||||
|
||||
if (this.entry.getEntrySize() != this.written) {
|
||||
throw new IOException("invalid entry size (expected "
|
||||
+ this.entry.getEntrySize() + " but got " + this.written
|
||||
+ " bytes)");
|
||||
}
|
||||
pad(this.entry.getDataPadCount());
|
||||
if (this.entry.getFormat() == FORMAT_NEW_CRC
|
||||
&& this.crc != this.entry.getChksum()) {
|
||||
throw new IOException("CRC Error");
|
||||
}
|
||||
this.entry = null;
|
||||
this.crc = 0;
|
||||
this.written = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes an array of bytes to the current CPIO entry data. This method will
|
||||
* block until all the bytes are written.
|
||||
*
|
||||
* @param b the data to be written
|
||||
* @param off the start offset in the data
|
||||
* @param len the number of bytes that are written
|
||||
* @throws java.io.IOException if an I/O error has occurred or if a CPIO file error has
|
||||
* occurred
|
||||
*/
|
||||
@Override
|
||||
public void write(final byte[] b, final int off, final int len)
|
||||
throws IOException {
|
||||
ensureOpen();
|
||||
if (off < 0 || len < 0 || off > b.length - len) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
} else if (len == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.entry == null) {
|
||||
throw new IOException("no current CPIO entry");
|
||||
}
|
||||
if (this.written + len > this.entry.getEntrySize()) {
|
||||
throw new IOException("attempt to write past end of STORED entry");
|
||||
}
|
||||
out.write(b, off, len);
|
||||
this.written += len;
|
||||
if (this.entry.getFormat() == FORMAT_NEW_CRC) {
|
||||
for (int pos = 0; pos < len; pos++) {
|
||||
this.crc += b[pos] & 0xFF;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Finishes writing the contents of the CPIO output stream without closing
|
||||
* the underlying stream. Use this method when applying multiple filters in
|
||||
* succession to the same output stream.
|
||||
*
|
||||
* @throws IOException if an I/O exception has occurred or if a CPIO file error has occurred
|
||||
*/
|
||||
@Override
|
||||
public void finish() throws IOException {
|
||||
ensureOpen();
|
||||
if (finished) {
|
||||
throw new IOException("This archive has already been finished");
|
||||
}
|
||||
|
||||
if (this.entry != null) {
|
||||
throw new IOException("This archive contains unclosed entries.");
|
||||
}
|
||||
this.entry = new CpioArchiveEntry(this.entryFormat);
|
||||
this.entry.setName(CPIO_TRAILER);
|
||||
this.entry.setNumberOfLinks(1);
|
||||
writeHeader(this.entry);
|
||||
closeArchiveEntry();
|
||||
int lengthOfLastBlock = (int) (out.getBytesWritten() % blockSize);
|
||||
if (lengthOfLastBlock != 0) {
|
||||
pad(blockSize - lengthOfLastBlock);
|
||||
}
|
||||
finished = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the CPIO output stream as well as the stream being filtered.
|
||||
*
|
||||
* @throws java.io.IOException if an I/O error has occurred or if a CPIO file error has
|
||||
* occurred
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (!finished) {
|
||||
finish();
|
||||
}
|
||||
|
||||
if (!this.closed) {
|
||||
out.close();
|
||||
this.closed = true;
|
||||
}
|
||||
}
|
||||
|
||||
private void pad(int count) throws IOException {
|
||||
if (count > 0) {
|
||||
byte buff[] = new byte[count];
|
||||
out.write(buff);
|
||||
}
|
||||
}
|
||||
|
||||
private void writeBinaryLong(final long number, final int length,
|
||||
final boolean swapHalfWord) throws IOException {
|
||||
byte tmp[] = CpioUtil.long2byteArray(number, length, swapHalfWord);
|
||||
out.write(tmp);
|
||||
}
|
||||
|
||||
private void writeAsciiLong(final long number, final int length,
|
||||
final int radix) throws IOException {
|
||||
StringBuilder tmp = new StringBuilder();
|
||||
String tmpStr;
|
||||
if (radix == 16) {
|
||||
tmp.append(Long.toHexString(number));
|
||||
} else if (radix == 8) {
|
||||
tmp.append(Long.toOctalString(number));
|
||||
} else {
|
||||
tmp.append(Long.toString(number));
|
||||
}
|
||||
|
||||
if (tmp.length() <= length) {
|
||||
long insertLength = length - tmp.length();
|
||||
for (int pos = 0; pos < insertLength; pos++) {
|
||||
tmp.insert(0, "0");
|
||||
}
|
||||
tmpStr = tmp.toString();
|
||||
} else {
|
||||
tmpStr = tmp.substring(tmp.length() - length);
|
||||
}
|
||||
byte[] b = ArchiveUtils.toAsciiBytes(tmpStr);
|
||||
out.write(b);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes an ASCII string to the stream followed by \0
|
||||
*
|
||||
* @param str the String to write
|
||||
* @throws java.io.IOException if the string couldn't be written
|
||||
*/
|
||||
private void writeCString(final String str) throws IOException {
|
||||
byte[] b = ArchiveUtils.toAsciiBytes(str);
|
||||
out.write(b);
|
||||
out.write('\0');
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,184 @@
|
|||
package org.xbib.io.archive.cpio;
|
||||
|
||||
/**
|
||||
* All constants needed by CPIO.
|
||||
*/
|
||||
public interface CpioConstants {
|
||||
/**
|
||||
* magic number of a cpio entry in the new format
|
||||
*/
|
||||
final String MAGIC_NEW = "070701";
|
||||
|
||||
/**
|
||||
* magic number of a cpio entry in the new format with crc
|
||||
*/
|
||||
final String MAGIC_NEW_CRC = "070702";
|
||||
|
||||
/**
|
||||
* magic number of a cpio entry in the old ascii format
|
||||
*/
|
||||
final String MAGIC_OLD_ASCII = "070707";
|
||||
|
||||
/**
|
||||
* magic number of a cpio entry in the old binary format
|
||||
*/
|
||||
final int MAGIC_OLD_BINARY = 070707;
|
||||
|
||||
// These FORMAT_ constants are internal to the code
|
||||
|
||||
/**
|
||||
* write/read a CPIOArchiveEntry in the new format
|
||||
*/
|
||||
final short FORMAT_NEW = 1;
|
||||
|
||||
/**
|
||||
* write/read a CPIOArchiveEntry in the new format with crc
|
||||
*/
|
||||
final short FORMAT_NEW_CRC = 2;
|
||||
|
||||
/**
|
||||
* write/read a CPIOArchiveEntry in the old ascii format
|
||||
*/
|
||||
final short FORMAT_OLD_ASCII = 4;
|
||||
|
||||
/**
|
||||
* write/read a CPIOArchiveEntry in the old binary format
|
||||
*/
|
||||
final short FORMAT_OLD_BINARY = 8;
|
||||
|
||||
/**
|
||||
* Mask for both new formats
|
||||
*/
|
||||
final short FORMAT_NEW_MASK = 3;
|
||||
|
||||
/**
|
||||
* Mask for both old formats
|
||||
*/
|
||||
final short FORMAT_OLD_MASK = 12;
|
||||
|
||||
/*
|
||||
* Constants for the MODE bits
|
||||
*/
|
||||
|
||||
/**
|
||||
* Mask for all file type bits.
|
||||
*/
|
||||
final int S_IFMT = 0170000;
|
||||
|
||||
// http://www.opengroup.org/onlinepubs/9699919799/basedefs/cpio.h.html
|
||||
// has a list of the C_xxx constatnts
|
||||
|
||||
/**
|
||||
* Defines a socket
|
||||
*/
|
||||
final int C_ISSOCK = 0140000;
|
||||
|
||||
/**
|
||||
* Defines a symbolic link
|
||||
*/
|
||||
final int C_ISLNK = 0120000;
|
||||
|
||||
/**
|
||||
* HP/UX network special (C_ISCTG)
|
||||
*/
|
||||
final int C_ISNWK = 0110000;
|
||||
|
||||
/**
|
||||
* Defines a regular file
|
||||
*/
|
||||
final int C_ISREG = 0100000;
|
||||
|
||||
/**
|
||||
* Defines a block device
|
||||
*/
|
||||
final int C_ISBLK = 0060000;
|
||||
|
||||
/**
|
||||
* Defines a directory
|
||||
*/
|
||||
final int C_ISDIR = 0040000;
|
||||
|
||||
/**
|
||||
* Defines a character device
|
||||
*/
|
||||
final int C_ISCHR = 0020000;
|
||||
|
||||
/**
|
||||
* Defines a pipe
|
||||
*/
|
||||
final int C_ISFIFO = 0010000;
|
||||
|
||||
|
||||
/**
|
||||
* Set user ID
|
||||
*/
|
||||
final int C_ISUID = 0004000;
|
||||
|
||||
/**
|
||||
* Set group ID
|
||||
*/
|
||||
final int C_ISGID = 0002000;
|
||||
|
||||
/**
|
||||
* On directories, restricted deletion flag.
|
||||
*/
|
||||
final int C_ISVTX = 0001000;
|
||||
|
||||
|
||||
/**
|
||||
* Permits the owner of a file to read the file
|
||||
*/
|
||||
final int C_IRUSR = 0000400;
|
||||
|
||||
/**
|
||||
* Permits the owner of a file to write to the file
|
||||
*/
|
||||
final int C_IWUSR = 0000200;
|
||||
|
||||
/**
|
||||
* Permits the owner of a file to execute the file or to search the directory
|
||||
*/
|
||||
final int C_IXUSR = 0000100;
|
||||
|
||||
|
||||
/**
|
||||
* Permits a file's group to read the file
|
||||
*/
|
||||
final int C_IRGRP = 0000040;
|
||||
|
||||
/**
|
||||
* Permits a file's group to write to the file
|
||||
*/
|
||||
final int C_IWGRP = 0000020;
|
||||
|
||||
/**
|
||||
* Permits a file's group to execute the file or to search the directory
|
||||
*/
|
||||
final int C_IXGRP = 0000010;
|
||||
|
||||
|
||||
/**
|
||||
* Permits others to read the file
|
||||
*/
|
||||
final int C_IROTH = 0000004;
|
||||
|
||||
/**
|
||||
* Permits others to write to the file
|
||||
*/
|
||||
final int C_IWOTH = 0000002;
|
||||
|
||||
/**
|
||||
* Permits others to execute the file or to search the directory
|
||||
*/
|
||||
final int C_IXOTH = 0000001;
|
||||
|
||||
/**
|
||||
* The special trailer marker
|
||||
*/
|
||||
final String CPIO_TRAILER = "TRAILER!!!";
|
||||
|
||||
/**
|
||||
* The default block size.
|
||||
*/
|
||||
final int BLOCK_SIZE = 512;
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
|
||||
package org.xbib.io.archive.cpio;
|
||||
|
||||
/**
|
||||
* Package private utility class for Cpio
|
||||
*/
|
||||
class CpioUtil {
|
||||
/**
|
||||
* Converts a byte array to a long. Halfwords can be swapped by setting
|
||||
* swapHalfWord=true.
|
||||
*
|
||||
* @param number An array of bytes containing a number
|
||||
* @param swapHalfWord Swap halfwords ([0][1][2][3]->[1][0][3][2])
|
||||
* @return The long value
|
||||
* @throws UnsupportedOperationException if number length is not a multiple of 2
|
||||
*/
|
||||
static long byteArray2long(final byte[] number, final boolean swapHalfWord) {
|
||||
if (number.length % 2 != 0) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
long ret = 0;
|
||||
int pos = 0;
|
||||
byte tmp_number[] = new byte[number.length];
|
||||
System.arraycopy(number, 0, tmp_number, 0, number.length);
|
||||
|
||||
if (!swapHalfWord) {
|
||||
byte tmp = 0;
|
||||
for (pos = 0; pos < tmp_number.length; pos++) {
|
||||
tmp = tmp_number[pos];
|
||||
tmp_number[pos++] = tmp_number[pos];
|
||||
tmp_number[pos] = tmp;
|
||||
}
|
||||
}
|
||||
|
||||
ret = tmp_number[0] & 0xFF;
|
||||
for (pos = 1; pos < tmp_number.length; pos++) {
|
||||
ret <<= 8;
|
||||
ret |= tmp_number[pos] & 0xFF;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a long number to a byte array
|
||||
* Halfwords can be swapped by setting swapHalfWord=true.
|
||||
*
|
||||
* @param number the input long number to be converted
|
||||
* @param length The length of the returned array
|
||||
* @param swapHalfWord Swap halfwords ([0][1][2][3]->[1][0][3][2])
|
||||
* @return The long value
|
||||
* @throws UnsupportedOperationException if the length is not a positive multiple of two
|
||||
*/
|
||||
static byte[] long2byteArray(final long number, final int length,
|
||||
final boolean swapHalfWord) {
|
||||
byte[] ret = new byte[length];
|
||||
int pos = 0;
|
||||
long tmp_number = 0;
|
||||
|
||||
if (length % 2 != 0 || length < 2) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
tmp_number = number;
|
||||
for (pos = length - 1; pos >= 0; pos--) {
|
||||
ret[pos] = (byte) (tmp_number & 0xFF);
|
||||
tmp_number >>= 8;
|
||||
}
|
||||
|
||||
if (!swapHalfWord) {
|
||||
byte tmp = 0;
|
||||
for (pos = 0; pos < length; pos++) {
|
||||
tmp = ret[pos];
|
||||
ret[pos++] = ret[pos];
|
||||
ret[pos] = tmp;
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
3
io-archive-dump/build.gradle
Normal file
3
io-archive-dump/build.gradle
Normal file
|
@ -0,0 +1,3 @@
|
|||
dependencies {
|
||||
api project(':io-archive')
|
||||
}
|
4
io-archive-dump/src/main/java/module-info.java
Normal file
4
io-archive-dump/src/main/java/module-info.java
Normal file
|
@ -0,0 +1,4 @@
|
|||
module org.xbib.io.archive.dump {
|
||||
exports org.xbib.io.archive.dump;
|
||||
requires org.xbib.io.archive;
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package org.xbib.io.archive.dump;
|
||||
|
||||
/**
|
||||
* Directory entry.
|
||||
*/
|
||||
class Dirent {
|
||||
private int ino;
|
||||
private int parentIno;
|
||||
private int type;
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param ino
|
||||
* @param parentIno
|
||||
* @param type
|
||||
* @param name
|
||||
*/
|
||||
Dirent(int ino, int parentIno, int type, String name) {
|
||||
this.ino = ino;
|
||||
this.parentIno = parentIno;
|
||||
this.type = type;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ino.
|
||||
*
|
||||
* @return the i-node
|
||||
*/
|
||||
int getIno() {
|
||||
return ino;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get ino of parent directory.
|
||||
*
|
||||
* @return the parent i-node
|
||||
*/
|
||||
int getParentIno() {
|
||||
return parentIno;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get entry type.
|
||||
*
|
||||
* @return the entry type
|
||||
*/
|
||||
int getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get name of directory entry.
|
||||
*
|
||||
* @return the directory name
|
||||
*/
|
||||
String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("[%d]: %s", ino, name);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
package org.xbib.io.archive.dump;
|
||||
|
||||
/**
|
||||
* Various constants associated with dump archives.
|
||||
*/
|
||||
public final class DumpArchiveConstants {
|
||||
public static final int TP_SIZE = 1024;
|
||||
public static final int NTREC = 10;
|
||||
public static final int HIGH_DENSITY_NTREC = 32;
|
||||
public static final int OFS_MAGIC = 60011;
|
||||
public static final int NFS_MAGIC = 60012;
|
||||
public static final int FS_UFS2_MAGIC = 0x19540119;
|
||||
public static final int CHECKSUM = 84446;
|
||||
public static final int LBLSIZE = 16;
|
||||
public static final int NAMELEN = 64;
|
||||
|
||||
/* do not instantiate */
|
||||
private DumpArchiveConstants() {
|
||||
}
|
||||
|
||||
/**
|
||||
* The type of tape segment.
|
||||
*/
|
||||
public enum SEGMENT_TYPE {
|
||||
TAPE(1),
|
||||
INODE(2),
|
||||
BITS(3),
|
||||
ADDR(4),
|
||||
END(5),
|
||||
CLRI(6);
|
||||
|
||||
int code;
|
||||
|
||||
SEGMENT_TYPE(int code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static SEGMENT_TYPE find(int code) {
|
||||
for (SEGMENT_TYPE t : values()) {
|
||||
if (t.code == code) {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The type of compression.
|
||||
*/
|
||||
public enum COMPRESSION_TYPE {
|
||||
ZLIB(0),
|
||||
BZLIB(1),
|
||||
LZO(2);
|
||||
|
||||
int code;
|
||||
|
||||
COMPRESSION_TYPE(int code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static COMPRESSION_TYPE find(int code) {
|
||||
for (COMPRESSION_TYPE t : values()) {
|
||||
if (t.code == code) {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,797 @@
|
|||
package org.xbib.io.archive.dump;
|
||||
|
||||
import org.xbib.io.archive.entry.ArchiveEntry;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* This class represents an entry in a Dump archive. It consists
|
||||
* of the entry's header, the entry's File and any extended attributes.
|
||||
* DumpEntries that are created from the header bytes read from
|
||||
* an archive are instantiated with the DumpArchiveEntry( byte[] )
|
||||
* constructor. These entries will be used when extracting from
|
||||
* or listing the contents of an archive. These entries have their
|
||||
* header filled in using the header bytes. They also set the File
|
||||
* to null, since they reference an archive entry not a file.
|
||||
* DumpEntries can also be constructed from nothing but a name.
|
||||
* This allows the programmer to construct the entry by hand, for
|
||||
* instance when only an InputStream is available for writing to
|
||||
* the archive, and the header information is constructed from
|
||||
* other information. In this case the header fields are set to
|
||||
* defaults and the File is set to null.
|
||||
* The C structure for a Dump Entry's header is:
|
||||
* <pre>
|
||||
* #define TP_BSIZE 1024 // size of each file block
|
||||
* #define NTREC 10 // number of blocks to write at once
|
||||
* #define HIGHDENSITYTREC 32 // number of blocks to write on high-density tapes
|
||||
* #define TP_NINDIR (TP_BSIZE/2) // number if indirect inodes in record
|
||||
* #define TP_NINOS (TP_NINDIR / sizeof (int32_t))
|
||||
* #define LBLSIZE 16
|
||||
* #define NAMELEN 64
|
||||
*
|
||||
* #define OFS_MAGIC (int)60011 // old format magic value
|
||||
* #define NFS_MAGIC (int)60012 // new format magic value
|
||||
* #define FS_UFS2_MAGIC (int)0x19540119
|
||||
* #define CHECKSUM (int)84446 // constant used in checksum algorithm
|
||||
*
|
||||
* struct s_spcl {
|
||||
* int32_t c_type; // record type (see below)
|
||||
* int32_t c_date; // date of this dump
|
||||
* int32_t c_ddate; // date of previous dump
|
||||
* int32_t c_volume; // dump volume number
|
||||
* u_int32_t c_tapea; // logical block of this record
|
||||
* dump_ino_t c_ino; // number of inode
|
||||
* int32_t c_magic; // magic number (see above)
|
||||
* int32_t c_checksum; // record checksum
|
||||
* #ifdef __linux__
|
||||
* struct new_bsd_inode c_dinode;
|
||||
* #else
|
||||
* #ifdef sunos
|
||||
* struct new_bsd_inode c_dinode;
|
||||
* #else
|
||||
* struct dinode c_dinode; // ownership and mode of inode
|
||||
* #endif
|
||||
* #endif
|
||||
* int32_t c_count; // number of valid c_addr entries
|
||||
* union u_data c_data; // see above
|
||||
* char c_label[LBLSIZE]; // dump label
|
||||
* int32_t c_level; // level of this dump
|
||||
* char c_filesys[NAMELEN]; // name of dumpped file system
|
||||
* char c_dev[NAMELEN]; // name of dumpped device
|
||||
* char c_host[NAMELEN]; // name of dumpped host
|
||||
* int32_t c_flags; // additional information (see below)
|
||||
* int32_t c_firstrec; // first record on volume
|
||||
* int32_t c_ntrec; // blocksize on volume
|
||||
* int32_t c_extattributes; // additional inode info (see below)
|
||||
* int32_t c_spare[30]; // reserved for future uses
|
||||
* } s_spcl;
|
||||
*
|
||||
* //
|
||||
* // flag values
|
||||
* //
|
||||
* #define DR_NEWHEADER 0x0001 // new format tape header
|
||||
* #define DR_NEWINODEFMT 0x0002 // new format inodes on tape
|
||||
* #define DR_COMPRESSED 0x0080 // dump tape is compressed
|
||||
* #define DR_METAONLY 0x0100 // only the metadata of the inode has been dumped
|
||||
* #define DR_INODEINFO 0x0002 // [SIC] TS_END header contains c_inos information
|
||||
* #define DR_EXTATTRIBUTES 0x8000
|
||||
*
|
||||
* //
|
||||
* // extattributes inode info
|
||||
* //
|
||||
* #define EXT_REGULAR 0
|
||||
* #define EXT_MACOSFNDRINFO 1
|
||||
* #define EXT_MACOSRESFORK 2
|
||||
* #define EXT_XATTR 3
|
||||
*
|
||||
* // used for EA on tape
|
||||
* #define EXT2_GOOD_OLD_INODE_SIZE 128
|
||||
* #define EXT2_XATTR_MAGIC 0xEA020000 // block EA
|
||||
* #define EXT2_XATTR_MAGIC2 0xEA020001 // in inode EA
|
||||
* </pre>
|
||||
* The C structure for the inode (file) information is:
|
||||
* <pre>
|
||||
* struct bsdtimeval { // **** alpha-*-linux is deviant
|
||||
* __u32 tv_sec;
|
||||
* __u32 tv_usec;
|
||||
* };
|
||||
*
|
||||
* #define NDADDR 12
|
||||
* #define NIADDR 3
|
||||
*
|
||||
* //
|
||||
* // This is the new (4.4) BSD inode structure
|
||||
* // copied from the FreeBSD 2.0 ufs/ufs/dinode.h include file
|
||||
* //
|
||||
* struct new_bsd_inode {
|
||||
* __u16 di_mode; // file type, standard Unix permissions
|
||||
* __s16 di_nlink; // number of hard links to file.
|
||||
* union {
|
||||
* __u16 oldids[2];
|
||||
* __u32 inumber;
|
||||
* } di_u;
|
||||
* u_quad_t di_size; // file size
|
||||
* struct bsdtimeval di_atime; // time file was last accessed
|
||||
* struct bsdtimeval di_mtime; // time file was last modified
|
||||
* struct bsdtimeval di_ctime; // time file was created
|
||||
* __u32 di_db[NDADDR];
|
||||
* __u32 di_ib[NIADDR];
|
||||
* __u32 di_flags; //
|
||||
* __s32 di_blocks; // number of disk blocks
|
||||
* __s32 di_gen; // generation number
|
||||
* __u32 di_uid; // user id (see /etc/passwd)
|
||||
* __u32 di_gid; // group id (see /etc/group)
|
||||
* __s32 di_spare[2]; // unused
|
||||
* };
|
||||
* </pre>
|
||||
* It is important to note that the header DOES NOT have the name of the
|
||||
* file. It can't since hard links mean that you may have multiple filenames
|
||||
* for a single physical file. You must read the contents of the directory
|
||||
* entries to learn the mapping(s) from filename to inode.
|
||||
* The C structure that indicates if a specific block is a real block
|
||||
* that contains data or is a sparse block that is not persisted to the
|
||||
* disk is:
|
||||
* <pre>
|
||||
* #define TP_BSIZE 1024
|
||||
* #define TP_NINDIR (TP_BSIZE/2)
|
||||
*
|
||||
* union u_data {
|
||||
* char s_addrs[TP_NINDIR]; // 1 => data; 0 => hole in inode
|
||||
* int32_t s_inos[TP_NINOS]; // table of first inode on each volume
|
||||
* } u_data;
|
||||
* </pre>
|
||||
*/
|
||||
public class DumpArchiveEntry implements ArchiveEntry {
|
||||
|
||||
private String name;
|
||||
|
||||
private TYPE type = TYPE.UNKNOWN;
|
||||
|
||||
private int mode;
|
||||
|
||||
private Set<PERMISSION> permissions = Collections.emptySet();
|
||||
|
||||
private long size;
|
||||
|
||||
private long atime;
|
||||
|
||||
private long mtime;
|
||||
|
||||
private int uid;
|
||||
|
||||
private int gid;
|
||||
|
||||
/**
|
||||
* Currently unused
|
||||
*/
|
||||
private DumpArchiveSummary summary = null;
|
||||
|
||||
// this information is available from standard index.
|
||||
private TapeSegmentHeader header = new TapeSegmentHeader();
|
||||
|
||||
private String simpleName;
|
||||
|
||||
private String originalName;
|
||||
|
||||
// this information is available from QFA index
|
||||
private int volume;
|
||||
|
||||
private long offset;
|
||||
|
||||
private int ino;
|
||||
|
||||
private int nlink;
|
||||
|
||||
private long ctime;
|
||||
|
||||
private int generation;
|
||||
|
||||
private boolean isDeleted;
|
||||
|
||||
/**
|
||||
* Default constructor.
|
||||
*/
|
||||
public DumpArchiveEntry() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor taking only filename.
|
||||
*
|
||||
* @param name pathname
|
||||
* @param simpleName actual filename.
|
||||
*/
|
||||
public DumpArchiveEntry(String name, String simpleName) {
|
||||
setName(name);
|
||||
this.simpleName = simpleName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor taking name, inode and type.
|
||||
*
|
||||
* @param name
|
||||
* @param simpleName
|
||||
* @param ino
|
||||
* @param type
|
||||
*/
|
||||
protected DumpArchiveEntry(String name, String simpleName, int ino,
|
||||
TYPE type) {
|
||||
setType(type);
|
||||
setName(name);
|
||||
this.simpleName = simpleName;
|
||||
this.ino = ino;
|
||||
this.offset = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor taking tape buffer.
|
||||
* @param buffer
|
||||
* @param offset
|
||||
*/
|
||||
|
||||
/**
|
||||
* Returns the path of the entry.
|
||||
*
|
||||
* @return the path of the entry.
|
||||
*/
|
||||
public String getSimpleName() {
|
||||
return simpleName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the path of the entry.
|
||||
*/
|
||||
protected void setSimpleName(String simpleName) {
|
||||
this.simpleName = simpleName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the ino of the entry.
|
||||
*/
|
||||
public int getIno() {
|
||||
return header.getIno();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the number of hard links to the entry.
|
||||
*/
|
||||
public int getNlink() {
|
||||
return nlink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of hard links.
|
||||
*/
|
||||
public void setNlink(int nlink) {
|
||||
this.nlink = nlink;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file creation time.
|
||||
*/
|
||||
public Date getCreationTime() {
|
||||
return new Date(ctime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the file creation time.
|
||||
*/
|
||||
public void setCreationTime(Date ctime) {
|
||||
this.ctime = ctime.getTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the generation of the file.
|
||||
*/
|
||||
public int getGeneration() {
|
||||
return generation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the generation of the file.
|
||||
*/
|
||||
public void setGeneration(int generation) {
|
||||
this.generation = generation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Has this file been deleted? (On valid on incremental dumps.)
|
||||
*/
|
||||
public boolean isDeleted() {
|
||||
return isDeleted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set whether this file has been deleted.
|
||||
*/
|
||||
public void setDeleted(boolean isDeleted) {
|
||||
this.isDeleted = isDeleted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the offset within the archive
|
||||
*/
|
||||
public long getOffset() {
|
||||
return offset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the offset within the archive.
|
||||
*/
|
||||
public void setOffset(long offset) {
|
||||
this.offset = offset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the tape volume where this file is located.
|
||||
*/
|
||||
public int getVolume() {
|
||||
return volume;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the tape volume.
|
||||
*/
|
||||
public void setVolume(int volume) {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the type of the tape segment header.
|
||||
*/
|
||||
public DumpArchiveConstants.SEGMENT_TYPE getHeaderType() {
|
||||
return header.getType();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the number of records in this segment.
|
||||
*/
|
||||
public int getHeaderCount() {
|
||||
return header.getCount();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the number of sparse records in this segment.
|
||||
*/
|
||||
public int getHeaderHoles() {
|
||||
return header.getHoles();
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this a sparse record?
|
||||
*/
|
||||
public boolean isSparseRecord(int idx) {
|
||||
return (header.getCdata(idx) & 0x01) == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return ino;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see Object#equals(Object o)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o == this) {
|
||||
return true;
|
||||
} else if (o == null || !o.getClass().equals(getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DumpArchiveEntry rhs = (DumpArchiveEntry) o;
|
||||
|
||||
if ((header == null) || (rhs.header == null)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (ino != rhs.ino) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((summary == null && rhs.summary != null)
|
||||
|| (summary != null && !summary.equals(rhs.summary))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Populate the dump archive entry and tape segment header with
|
||||
* the contents of the buffer.
|
||||
*
|
||||
* @param buffer
|
||||
* @throws Exception
|
||||
*/
|
||||
static DumpArchiveEntry parse(byte[] buffer) {
|
||||
DumpArchiveEntry entry = new DumpArchiveEntry();
|
||||
TapeSegmentHeader header = entry.header;
|
||||
|
||||
header.type = DumpArchiveConstants.SEGMENT_TYPE.find(DumpArchiveUtil.convert32(
|
||||
buffer, 0));
|
||||
|
||||
//header.dumpDate = new Date(1000L * DumpArchiveUtil.convert32(buffer, 4));
|
||||
//header.previousDumpDate = new Date(1000L * DumpArchiveUtil.convert32(
|
||||
// buffer, 8));
|
||||
header.volume = DumpArchiveUtil.convert32(buffer, 12);
|
||||
//header.tapea = DumpArchiveUtil.convert32(buffer, 16);
|
||||
entry.ino = header.ino = DumpArchiveUtil.convert32(buffer, 20);
|
||||
|
||||
//header.magic = DumpArchiveUtil.convert32(buffer, 24);
|
||||
//header.checksum = DumpArchiveUtil.convert32(buffer, 28);
|
||||
int m = DumpArchiveUtil.convert16(buffer, 32);
|
||||
|
||||
// determine the type of the file.
|
||||
entry.setType(TYPE.find((m >> 12) & 0x0F));
|
||||
|
||||
// determine the standard permissions
|
||||
entry.setMode(m);
|
||||
|
||||
entry.nlink = DumpArchiveUtil.convert16(buffer, 34);
|
||||
// inumber, oldids?
|
||||
entry.setEntrySize(DumpArchiveUtil.convert64(buffer, 40));
|
||||
|
||||
long t = (1000L * DumpArchiveUtil.convert32(buffer, 48)) +
|
||||
(DumpArchiveUtil.convert32(buffer, 52) / 1000);
|
||||
entry.setAccessTime(new Date(t));
|
||||
t = (1000L * DumpArchiveUtil.convert32(buffer, 56)) +
|
||||
(DumpArchiveUtil.convert32(buffer, 60) / 1000);
|
||||
entry.setLastModified(new Date(t));
|
||||
t = (1000L * DumpArchiveUtil.convert32(buffer, 64)) +
|
||||
(DumpArchiveUtil.convert32(buffer, 68) / 1000);
|
||||
entry.ctime = t;
|
||||
|
||||
// db: 72-119 - direct blocks
|
||||
// id: 120-131 - indirect blocks
|
||||
//entry.flags = DumpArchiveUtil.convert32(buffer, 132);
|
||||
//entry.blocks = DumpArchiveUtil.convert32(buffer, 136);
|
||||
entry.generation = DumpArchiveUtil.convert32(buffer, 140);
|
||||
entry.setUserId(DumpArchiveUtil.convert32(buffer, 144));
|
||||
entry.setGroupId(DumpArchiveUtil.convert32(buffer, 148));
|
||||
// two 32-bit spare values.
|
||||
header.count = DumpArchiveUtil.convert32(buffer, 160);
|
||||
|
||||
header.holes = 0;
|
||||
|
||||
for (int i = 0; (i < 512) && (i < header.count); i++) {
|
||||
if (buffer[164 + i] == 0) {
|
||||
header.holes++;
|
||||
}
|
||||
}
|
||||
|
||||
System.arraycopy(buffer, 164, header.cdata, 0, 512);
|
||||
|
||||
entry.volume = header.getVolume();
|
||||
|
||||
//entry.isSummaryOnly = false;
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update entry with information from next tape segment header.
|
||||
*/
|
||||
void update(byte[] buffer) {
|
||||
header.volume = DumpArchiveUtil.convert32(buffer, 16);
|
||||
header.count = DumpArchiveUtil.convert32(buffer, 160);
|
||||
|
||||
header.holes = 0;
|
||||
|
||||
for (int i = 0; (i < 512) && (i < header.count); i++) {
|
||||
if (buffer[164 + i] == 0) {
|
||||
header.holes++;
|
||||
}
|
||||
}
|
||||
|
||||
System.arraycopy(buffer, 164, header.cdata, 0, 512);
|
||||
}
|
||||
|
||||
/**
|
||||
* Archive entry as stored on tape. There is one TSH for (at most)
|
||||
* every 512k in the file.
|
||||
*/
|
||||
static class TapeSegmentHeader {
|
||||
private DumpArchiveConstants.SEGMENT_TYPE type;
|
||||
private int volume;
|
||||
private int ino;
|
||||
private int count;
|
||||
private int holes;
|
||||
private byte[] cdata = new byte[512]; // map of any 'holes'
|
||||
|
||||
public DumpArchiveConstants.SEGMENT_TYPE getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public int getVolume() {
|
||||
return volume;
|
||||
}
|
||||
|
||||
public int getIno() {
|
||||
return ino;
|
||||
}
|
||||
|
||||
void setIno(int ino) {
|
||||
this.ino = ino;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public int getHoles() {
|
||||
return holes;
|
||||
}
|
||||
|
||||
public int getCdata(int idx) {
|
||||
return cdata[idx];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the entry.
|
||||
*
|
||||
* @return the name of the entry.
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the unmodified name of the entry.
|
||||
*
|
||||
* @return the name of the entry.
|
||||
*/
|
||||
String getOriginalName() {
|
||||
return originalName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the name of the entry.
|
||||
*/
|
||||
public DumpArchiveEntry setName(String name) {
|
||||
this.originalName = name;
|
||||
if (name != null) {
|
||||
if (isDirectory() && !name.endsWith("/")) {
|
||||
name += "/";
|
||||
}
|
||||
if (name.startsWith("./")) {
|
||||
name = name.substring(2);
|
||||
}
|
||||
}
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Date getLastModifiedDate() {
|
||||
return new Date(mtime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this a directory?
|
||||
*/
|
||||
public boolean isDirectory() {
|
||||
return type == TYPE.DIRECTORY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this a regular file?
|
||||
*/
|
||||
public boolean isFile() {
|
||||
return type == TYPE.FILE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this a network device?
|
||||
*/
|
||||
public boolean isSocket() {
|
||||
return type == TYPE.SOCKET;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this a character device?
|
||||
*/
|
||||
public boolean isChrDev() {
|
||||
return type == TYPE.CHRDEV;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this a block device?
|
||||
*/
|
||||
public boolean isBlkDev() {
|
||||
return type == TYPE.BLKDEV;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this a fifo/pipe?
|
||||
*/
|
||||
public boolean isFifo() {
|
||||
return type == TYPE.FIFO;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the type of the entry.
|
||||
*/
|
||||
public TYPE getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the type of the entry.
|
||||
*/
|
||||
public void setType(TYPE type) {
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the access permissions on the entry.
|
||||
*/
|
||||
public int getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the access permissions on the entry.
|
||||
*/
|
||||
public void setMode(int mode) {
|
||||
this.mode = mode & 07777;
|
||||
this.permissions = PERMISSION.find(mode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the permissions on the entry.
|
||||
*/
|
||||
public Set<PERMISSION> getPermissions() {
|
||||
return permissions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the size of the entry as read from the archive.
|
||||
*/
|
||||
public long getEntrySize() {
|
||||
return isDirectory() ? ArchiveEntry.SIZE_UNKNOWN : size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the size of the entry.
|
||||
*/
|
||||
public DumpArchiveEntry setEntrySize(long size) {
|
||||
this.size = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the time the file was last modified.
|
||||
*/
|
||||
public DumpArchiveEntry setLastModified(Date mtime) {
|
||||
this.mtime = mtime.getTime();
|
||||
return this;
|
||||
}
|
||||
|
||||
public Date getLastModified() {
|
||||
return new Date(mtime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the time the file was last accessed.
|
||||
*/
|
||||
public Date getAccessTime() {
|
||||
return new Date(atime);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the time the file was last accessed.
|
||||
*/
|
||||
public void setAccessTime(Date atime) {
|
||||
this.atime = atime.getTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the user id.
|
||||
*/
|
||||
public int getUserId() {
|
||||
return uid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the user id.
|
||||
*/
|
||||
public void setUserId(int uid) {
|
||||
this.uid = uid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the group id
|
||||
*/
|
||||
public int getGroupId() {
|
||||
return gid;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the group id.
|
||||
*/
|
||||
public void setGroupId(int gid) {
|
||||
this.gid = gid;
|
||||
}
|
||||
|
||||
public enum TYPE {
|
||||
WHITEOUT(14),
|
||||
SOCKET(12),
|
||||
LINK(10),
|
||||
FILE(8),
|
||||
BLKDEV(6),
|
||||
DIRECTORY(4),
|
||||
CHRDEV(2),
|
||||
FIFO(1),
|
||||
UNKNOWN(15);
|
||||
|
||||
private int code;
|
||||
|
||||
TYPE(int code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static TYPE find(int code) {
|
||||
TYPE type = UNKNOWN;
|
||||
|
||||
for (TYPE t : TYPE.values()) {
|
||||
if (code == t.code) {
|
||||
type = t;
|
||||
}
|
||||
}
|
||||
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
public enum PERMISSION {
|
||||
SETUID(04000),
|
||||
SETGUI(02000),
|
||||
STICKY(01000),
|
||||
USER_READ(00400),
|
||||
USER_WRITE(00200),
|
||||
USER_EXEC(00100),
|
||||
GROUP_READ(00040),
|
||||
GROUP_WRITE(00020),
|
||||
GROUP_EXEC(00010),
|
||||
WORLD_READ(00004),
|
||||
WORLD_WRITE(00002),
|
||||
WORLD_EXEC(00001);
|
||||
|
||||
private int code;
|
||||
|
||||
PERMISSION(int code) {
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
public static Set<PERMISSION> find(int code) {
|
||||
Set<PERMISSION> set = new HashSet<>();
|
||||
|
||||
for (PERMISSION p : PERMISSION.values()) {
|
||||
if ((code & p.code) == p.code) {
|
||||
set.add(p);
|
||||
}
|
||||
}
|
||||
|
||||
if (set.isEmpty()) {
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
return EnumSet.copyOf(set);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package org.xbib.io.archive.dump;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
||||
/**
|
||||
* Dump Archive Exception
|
||||
*/
|
||||
public class DumpArchiveException extends IOException {
|
||||
|
||||
public DumpArchiveException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
public DumpArchiveException(String msg, Throwable cause) {
|
||||
super(msg);
|
||||
initCause(cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,490 @@
|
|||
package org.xbib.io.archive.dump;
|
||||
|
||||
import org.xbib.io.archive.stream.ArchiveInputStream;
|
||||
|
||||
import java.io.EOFException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.PriorityQueue;
|
||||
import java.util.Queue;
|
||||
import java.util.Stack;
|
||||
|
||||
/**
|
||||
* The DumpArchiveInputStream reads a UNIX dump archive as an InputStream.
|
||||
* Methods are provided to position at each successive entry in
|
||||
* the archive, and the read each entry as a normal input stream
|
||||
* using read().
|
||||
*/
|
||||
public class DumpArchiveInputStream extends ArchiveInputStream {
|
||||
|
||||
private DumpArchiveSummary summary;
|
||||
|
||||
private DumpArchiveEntry active;
|
||||
|
||||
private boolean isClosed;
|
||||
|
||||
private boolean hasHitEOF;
|
||||
|
||||
private long entrySize;
|
||||
|
||||
private long entryOffset;
|
||||
|
||||
private int readIdx;
|
||||
|
||||
private byte[] readBuf = new byte[DumpArchiveConstants.TP_SIZE];
|
||||
|
||||
private byte[] blockBuffer;
|
||||
|
||||
private int recordOffset;
|
||||
|
||||
private long filepos;
|
||||
|
||||
protected TapeInputStream raw;
|
||||
|
||||
// map of ino -> dirent entry. We can use this to reconstruct full paths.
|
||||
private Map<Integer, Dirent> names = new HashMap<Integer, Dirent>();
|
||||
|
||||
// map of ino -> (directory) entry when we're missing one or more elements in the path.
|
||||
private Map<Integer, DumpArchiveEntry> pending = new HashMap<Integer, DumpArchiveEntry>();
|
||||
|
||||
// queue of (directory) entries where we now have the full path.
|
||||
private Queue<DumpArchiveEntry> queue;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param is
|
||||
*/
|
||||
public DumpArchiveInputStream(InputStream is) throws IOException {
|
||||
this.raw = new TapeInputStream(is);
|
||||
this.hasHitEOF = false;
|
||||
|
||||
// read header, verify it's a dump archive.
|
||||
byte[] headerBytes = raw.readRecord();
|
||||
|
||||
if (!DumpArchiveUtil.verify(headerBytes)) {
|
||||
throw new UnrecognizedFormatException();
|
||||
}
|
||||
|
||||
// get summary information
|
||||
summary = new DumpArchiveSummary(headerBytes);
|
||||
|
||||
// reset buffer with actual block size.
|
||||
raw.resetBlockSize(summary.getNTRec(), summary.isCompressed());
|
||||
|
||||
// allocate our read buffer.
|
||||
blockBuffer = new byte[4 * DumpArchiveConstants.TP_SIZE];
|
||||
|
||||
// skip past CLRI and BITS segments since we don't handle them yet.
|
||||
readCLRI();
|
||||
readBITS();
|
||||
|
||||
// put in a dummy record for the root node.
|
||||
Dirent root = new Dirent(2, 2, 4, ".");
|
||||
names.put(2, root);
|
||||
|
||||
// use priority based on queue to ensure parent directories are
|
||||
// released first.
|
||||
queue = new PriorityQueue<DumpArchiveEntry>(10,
|
||||
new Comparator<DumpArchiveEntry>() {
|
||||
public int compare(DumpArchiveEntry p, DumpArchiveEntry q) {
|
||||
if ((p.getOriginalName() == null) || (q.getOriginalName() == null)) {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
return p.getOriginalName().compareTo(q.getOriginalName());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the archive summary information.
|
||||
*/
|
||||
public DumpArchiveSummary getSummary() {
|
||||
return summary;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read CLRI (deleted inode) segment.
|
||||
*/
|
||||
private void readCLRI() throws IOException {
|
||||
byte[] readBuf = raw.readRecord();
|
||||
|
||||
if (!DumpArchiveUtil.verify(readBuf)) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
active = DumpArchiveEntry.parse(readBuf);
|
||||
|
||||
if (DumpArchiveConstants.SEGMENT_TYPE.CLRI != active.getHeaderType()) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
// we don't do anything with this yet.
|
||||
if (raw.skip(DumpArchiveConstants.TP_SIZE * active.getHeaderCount())
|
||||
== -1) {
|
||||
throw new EOFException();
|
||||
}
|
||||
readIdx = active.getHeaderCount();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read BITS segment.
|
||||
*/
|
||||
private void readBITS() throws IOException {
|
||||
byte[] readBuf = raw.readRecord();
|
||||
|
||||
if (!DumpArchiveUtil.verify(readBuf)) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
active = DumpArchiveEntry.parse(readBuf);
|
||||
|
||||
if (DumpArchiveConstants.SEGMENT_TYPE.BITS != active.getHeaderType()) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
// we don't do anything with this yet.
|
||||
if (raw.skip(DumpArchiveConstants.TP_SIZE * active.getHeaderCount())
|
||||
== -1) {
|
||||
throw new EOFException();
|
||||
}
|
||||
readIdx = active.getHeaderCount();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the next entry.
|
||||
*/
|
||||
public DumpArchiveEntry getNextDumpEntry() throws IOException {
|
||||
return getNextEntry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the next entry.
|
||||
*/
|
||||
@Override
|
||||
public DumpArchiveEntry getNextEntry() throws IOException {
|
||||
DumpArchiveEntry entry = null;
|
||||
String path = null;
|
||||
|
||||
// is there anything in the queue?
|
||||
if (!queue.isEmpty()) {
|
||||
return queue.remove();
|
||||
}
|
||||
|
||||
while (entry == null) {
|
||||
if (hasHitEOF) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// skip any remaining records in this segment for prior file.
|
||||
// we might still have holes... easiest to do it
|
||||
// block by block. We may want to revisit this if
|
||||
// the unnecessary decompression time adds up.
|
||||
while (readIdx < active.getHeaderCount()) {
|
||||
if (!active.isSparseRecord(readIdx++)
|
||||
&& raw.skip(DumpArchiveConstants.TP_SIZE) == -1) {
|
||||
throw new EOFException();
|
||||
}
|
||||
}
|
||||
|
||||
readIdx = 0;
|
||||
filepos = raw.getBytesRead();
|
||||
|
||||
byte[] headerBytes = raw.readRecord();
|
||||
|
||||
if (!DumpArchiveUtil.verify(headerBytes)) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
active = DumpArchiveEntry.parse(headerBytes);
|
||||
|
||||
// skip any remaining segments for prior file.
|
||||
while (DumpArchiveConstants.SEGMENT_TYPE.ADDR == active.getHeaderType()) {
|
||||
if (raw.skip(DumpArchiveConstants.TP_SIZE
|
||||
* (active.getHeaderCount()
|
||||
- active.getHeaderHoles())) == -1) {
|
||||
throw new EOFException();
|
||||
}
|
||||
|
||||
filepos = raw.getBytesRead();
|
||||
headerBytes = raw.readRecord();
|
||||
|
||||
if (!DumpArchiveUtil.verify(headerBytes)) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
active = DumpArchiveEntry.parse(headerBytes);
|
||||
}
|
||||
|
||||
// check if this is an end-of-volume marker.
|
||||
if (DumpArchiveConstants.SEGMENT_TYPE.END == active.getHeaderType()) {
|
||||
hasHitEOF = true;
|
||||
isClosed = true;
|
||||
raw.close();
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
entry = active;
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
readDirectoryEntry(active);
|
||||
|
||||
// now we create an empty InputStream.
|
||||
entryOffset = 0;
|
||||
entrySize = 0;
|
||||
readIdx = active.getHeaderCount();
|
||||
} else {
|
||||
entryOffset = 0;
|
||||
entrySize = active.getEntrySize();
|
||||
readIdx = 0;
|
||||
}
|
||||
|
||||
recordOffset = readBuf.length;
|
||||
|
||||
path = getPath(entry);
|
||||
|
||||
if (path == null) {
|
||||
entry = null;
|
||||
}
|
||||
}
|
||||
|
||||
entry.setName(path);
|
||||
entry.setSimpleName(names.get(entry.getIno()).getName());
|
||||
entry.setOffset(filepos);
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read directory entry.
|
||||
*/
|
||||
private void readDirectoryEntry(DumpArchiveEntry entry)
|
||||
throws IOException {
|
||||
long size = entry.getEntrySize();
|
||||
boolean first = true;
|
||||
|
||||
while (first ||
|
||||
(DumpArchiveConstants.SEGMENT_TYPE.ADDR == entry.getHeaderType())) {
|
||||
// read the header that we just peeked at.
|
||||
if (!first) {
|
||||
raw.readRecord();
|
||||
}
|
||||
|
||||
if (!names.containsKey(entry.getIno()) &&
|
||||
(DumpArchiveConstants.SEGMENT_TYPE.INODE == entry.getHeaderType())) {
|
||||
pending.put(entry.getIno(), entry);
|
||||
}
|
||||
|
||||
int datalen = DumpArchiveConstants.TP_SIZE * entry.getHeaderCount();
|
||||
|
||||
if (blockBuffer.length < datalen) {
|
||||
blockBuffer = new byte[datalen];
|
||||
}
|
||||
|
||||
if (raw.read(blockBuffer, 0, datalen) != datalen) {
|
||||
throw new EOFException();
|
||||
}
|
||||
|
||||
int reclen = 0;
|
||||
|
||||
for (int i = 0; (i < (datalen - 8)) && (i < (size - 8));
|
||||
i += reclen) {
|
||||
int ino = DumpArchiveUtil.convert32(blockBuffer, i);
|
||||
reclen = DumpArchiveUtil.convert16(blockBuffer, i + 4);
|
||||
|
||||
byte type = blockBuffer[i + 6];
|
||||
|
||||
String name = new String(blockBuffer, i + 8, blockBuffer[i + 7]); // TODO default charset?
|
||||
|
||||
if (".".equals(name) || "src/test".equals(name)) {
|
||||
// do nothing...
|
||||
continue;
|
||||
}
|
||||
|
||||
Dirent d = new Dirent(ino, entry.getIno(), type, name);
|
||||
|
||||
|
||||
names.put(Integer.valueOf(ino), d);
|
||||
|
||||
// check whether this allows us to fill anything in the pending list.
|
||||
for (Map.Entry<Integer, DumpArchiveEntry> e : pending.entrySet()) {
|
||||
String path = getPath(e.getValue());
|
||||
|
||||
if (path != null) {
|
||||
e.getValue().setName(path);
|
||||
e.getValue()
|
||||
.setSimpleName(names.get(e.getKey()).getName());
|
||||
queue.add(e.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
// remove anything that we found. (We can't do it earlier
|
||||
// because of concurrent modification exceptions.)
|
||||
for (DumpArchiveEntry e : queue) {
|
||||
pending.remove(Integer.valueOf(e.getIno()));
|
||||
}
|
||||
}
|
||||
|
||||
byte[] peekBytes = raw.peek();
|
||||
|
||||
if (!DumpArchiveUtil.verify(peekBytes)) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
entry = DumpArchiveEntry.parse(peekBytes);
|
||||
first = false;
|
||||
size -= DumpArchiveConstants.TP_SIZE;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get full path for specified archive entry, or null if there's a gap.
|
||||
*
|
||||
* @param entry
|
||||
* @return full path for specified archive entry, or null if there's a gap.
|
||||
*/
|
||||
private String getPath(DumpArchiveEntry entry) {
|
||||
// build the stack of elements. It's possible that we're
|
||||
// still missing an intermediate value and if so we
|
||||
Stack<String> elements = new Stack<String>();
|
||||
Dirent dirent = null;
|
||||
|
||||
for (int i = entry.getIno(); ; i = dirent.getParentIno()) {
|
||||
if (!names.containsKey(Integer.valueOf(i))) {
|
||||
elements.clear();
|
||||
break;
|
||||
}
|
||||
|
||||
dirent = names.get(Integer.valueOf(i));
|
||||
elements.push(dirent.getName());
|
||||
|
||||
if (dirent.getIno() == dirent.getParentIno()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// if an element is missing defer the work and read next entry.
|
||||
if (elements.isEmpty()) {
|
||||
pending.put(Integer.valueOf(entry.getIno()), entry);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// generate full path from stack of elements.
|
||||
StringBuilder sb = new StringBuilder(elements.pop());
|
||||
|
||||
while (!elements.isEmpty()) {
|
||||
sb.append('/');
|
||||
sb.append(elements.pop());
|
||||
}
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads bytes from the current dump archive entry.
|
||||
* This method is aware of the boundaries of the current
|
||||
* entry in the archive and will deal with them as if they
|
||||
* were this stream's start and EOF.
|
||||
*
|
||||
* @param buf The buffer into which to place bytes read.
|
||||
* @param off The offset at which to place bytes read.
|
||||
* @param len The number of bytes to read.
|
||||
* @return The number of bytes read, or -1 at EOF.
|
||||
* @throws java.io.IOException on error
|
||||
*/
|
||||
@Override
|
||||
public int read(byte[] buf, int off, int len) throws IOException {
|
||||
int totalRead = 0;
|
||||
|
||||
if (isClosed || (entryOffset >= entrySize)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if ((len + entryOffset) > entrySize) {
|
||||
len = (int) (entrySize - entryOffset);
|
||||
}
|
||||
|
||||
while (len > 0) {
|
||||
int sz = (len > (readBuf.length - recordOffset))
|
||||
? (readBuf.length - recordOffset) : len;
|
||||
|
||||
// copy any data we have
|
||||
if ((recordOffset + sz) <= readBuf.length) {
|
||||
System.arraycopy(readBuf, recordOffset, buf, off, sz);
|
||||
totalRead += sz;
|
||||
recordOffset += sz;
|
||||
len -= sz;
|
||||
off += sz;
|
||||
}
|
||||
|
||||
// load next block if necessary.
|
||||
if (len > 0) {
|
||||
if (readIdx >= 512) {
|
||||
byte[] headerBytes = raw.readRecord();
|
||||
|
||||
if (!DumpArchiveUtil.verify(headerBytes)) {
|
||||
throw new InvalidFormatException();
|
||||
}
|
||||
|
||||
active = DumpArchiveEntry.parse(headerBytes);
|
||||
readIdx = 0;
|
||||
}
|
||||
|
||||
if (!active.isSparseRecord(readIdx++)) {
|
||||
int r = raw.read(readBuf, 0, readBuf.length);
|
||||
if (r != readBuf.length) {
|
||||
throw new EOFException();
|
||||
}
|
||||
} else {
|
||||
Arrays.fill(readBuf, (byte) 0);
|
||||
}
|
||||
|
||||
recordOffset = 0;
|
||||
}
|
||||
}
|
||||
|
||||
entryOffset += totalRead;
|
||||
|
||||
return totalRead;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the stream for this entry.
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (!isClosed) {
|
||||
isClosed = true;
|
||||
raw.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Look at the first few bytes of the file to decide if it's a dump
|
||||
* archive. With 32 bytes we can look at the magic value, with a full
|
||||
* 1k we can verify the checksum.
|
||||
*/
|
||||
public static boolean matches(byte[] buffer, int length) {
|
||||
// do we have enough of the header?
|
||||
if (length < 32) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// this is the best test
|
||||
if (length >= DumpArchiveConstants.TP_SIZE) {
|
||||
return DumpArchiveUtil.verify(buffer);
|
||||
}
|
||||
|
||||
// this will work in a pinch.
|
||||
return DumpArchiveConstants.NFS_MAGIC == DumpArchiveUtil.convert32(buffer,
|
||||
24);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,333 @@
|
|||
package org.xbib.io.archive.dump;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
|
||||
/**
|
||||
* This class represents identifying information about a Dump archive volume.
|
||||
* It consists the archive's dump date, label, hostname, device name and possibly
|
||||
* last mount point plus the volume's volume id andfirst record number.
|
||||
* For the corresponding C structure see the header of {@link DumpArchiveEntry}.
|
||||
*/
|
||||
public class DumpArchiveSummary {
|
||||
private long dumpDate;
|
||||
private long previousDumpDate;
|
||||
private int volume;
|
||||
private String label;
|
||||
private int level;
|
||||
private String filesys;
|
||||
private String devname;
|
||||
private String hostname;
|
||||
private int flags;
|
||||
private int firstrec;
|
||||
private int ntrec;
|
||||
|
||||
DumpArchiveSummary(byte[] buffer) {
|
||||
dumpDate = 1000L * DumpArchiveUtil.convert32(buffer, 4);
|
||||
previousDumpDate = 1000L * DumpArchiveUtil.convert32(buffer, 8);
|
||||
volume = DumpArchiveUtil.convert32(buffer, 12);
|
||||
label = new String(buffer, 676, DumpArchiveConstants.LBLSIZE).trim(); // TODO default charset?
|
||||
level = DumpArchiveUtil.convert32(buffer, 692);
|
||||
filesys = new String(buffer, 696, DumpArchiveConstants.NAMELEN).trim(); // TODO default charset?
|
||||
devname = new String(buffer, 760, DumpArchiveConstants.NAMELEN).trim(); // TODO default charset?
|
||||
hostname = new String(buffer, 824, DumpArchiveConstants.NAMELEN).trim(); // TODO default charset?
|
||||
flags = DumpArchiveUtil.convert32(buffer, 888);
|
||||
firstrec = DumpArchiveUtil.convert32(buffer, 892);
|
||||
ntrec = DumpArchiveUtil.convert32(buffer, 896);
|
||||
|
||||
//extAttributes = DumpArchiveUtil.convert32(buffer, 900);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the date of this dump.
|
||||
*
|
||||
* @return the date of this dump.
|
||||
*/
|
||||
public Date getDumpDate() {
|
||||
return new Date(dumpDate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set dump date.
|
||||
*/
|
||||
public void setDumpDate(Date dumpDate) {
|
||||
this.dumpDate = dumpDate.getTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the date of the previous dump at this level higher.
|
||||
*
|
||||
* @return dumpdate may be null
|
||||
*/
|
||||
public Date getPreviousDumpDate() {
|
||||
return new Date(previousDumpDate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set previous dump date.
|
||||
*/
|
||||
public void setPreviousDumpDate(Date previousDumpDate) {
|
||||
this.previousDumpDate = previousDumpDate.getTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get volume (tape) number.
|
||||
*
|
||||
* @return volume (tape) number.
|
||||
*/
|
||||
public int getVolume() {
|
||||
return volume;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set volume (tape) number.
|
||||
*/
|
||||
public void setVolume(int volume) {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the level of this dump. This is a number between 0 and 9, inclusive,
|
||||
* and a level 0 dump is a complete dump of the partition. For any other dump
|
||||
* 'n' this dump contains all files that have changed since the last dump
|
||||
* at this level or lower. This is used to support different levels of
|
||||
* incremental backups.
|
||||
*
|
||||
* @return dump level
|
||||
*/
|
||||
public int getLevel() {
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set level.
|
||||
*/
|
||||
public void setLevel(int level) {
|
||||
this.level = level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get dump label. This may be autogenerated or it may be specified
|
||||
* bu the user.
|
||||
*
|
||||
* @return dump label
|
||||
*/
|
||||
public String getLabel() {
|
||||
return label;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set dump label.
|
||||
*
|
||||
* @param label
|
||||
*/
|
||||
public void setLabel(String label) {
|
||||
this.label = label;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the last mountpoint, e.g., /home.
|
||||
*
|
||||
* @return last mountpoint
|
||||
*/
|
||||
public String getFilesystem() {
|
||||
return filesys;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the last mountpoint.
|
||||
*/
|
||||
public void setFilesystem(String filesystem) {
|
||||
this.filesys = filesystem;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the device name, e.g., /dev/sda3 or /dev/mapper/vg0-home.
|
||||
*
|
||||
* @return device name
|
||||
*/
|
||||
public String getDevname() {
|
||||
return devname;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the device name.
|
||||
*
|
||||
* @param devname
|
||||
*/
|
||||
public void setDevname(String devname) {
|
||||
this.devname = devname;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the hostname of the system where the dump was performed.
|
||||
*
|
||||
* @return hostname
|
||||
*/
|
||||
public String getHostname() {
|
||||
return hostname;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the hostname.
|
||||
*/
|
||||
public void setHostname(String hostname) {
|
||||
this.hostname = hostname;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the miscellaneous flags. See below.
|
||||
*
|
||||
* @return flags
|
||||
*/
|
||||
public int getFlags() {
|
||||
return flags;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the miscellaneous flags.
|
||||
*
|
||||
* @param flags
|
||||
*/
|
||||
public void setFlags(int flags) {
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the inode of the first record on this volume.
|
||||
*
|
||||
* @return inode of the first record on this volume.
|
||||
*/
|
||||
public int getFirstRecord() {
|
||||
return firstrec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the inode of the first record.
|
||||
*
|
||||
* @param firstrec
|
||||
*/
|
||||
public void setFirstRecord(int firstrec) {
|
||||
this.firstrec = firstrec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of records per tape block. This is typically
|
||||
* between 10 and 32.
|
||||
*
|
||||
* @return the number of records per tape block
|
||||
*/
|
||||
public int getNTRec() {
|
||||
return ntrec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the number of records per tape block.
|
||||
*/
|
||||
public void setNTRec(int ntrec) {
|
||||
this.ntrec = ntrec;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this the new header format? (We do not currently support the
|
||||
* old format.)
|
||||
*
|
||||
* @return true if using new header format
|
||||
*/
|
||||
public boolean isNewHeader() {
|
||||
return (flags & 0x0001) == 0x0001;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this the new inode format? (We do not currently support the
|
||||
* old format.)
|
||||
*
|
||||
* @return true if using new inode format
|
||||
*/
|
||||
public boolean isNewInode() {
|
||||
return (flags & 0x0002) == 0x0002;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is this volume compressed? N.B., individual blocks may or may not be compressed.
|
||||
* The first block is never compressed.
|
||||
*
|
||||
* @return true if volume is compressed
|
||||
*/
|
||||
public boolean isCompressed() {
|
||||
return (flags & 0x0080) == 0x0080;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this volume only contain metadata?
|
||||
*
|
||||
* @return true if volume only contains meta-data
|
||||
*/
|
||||
public boolean isMetaDataOnly() {
|
||||
return (flags & 0x0100) == 0x0100;
|
||||
}
|
||||
|
||||
/**
|
||||
* Does this volume cotain extended attributes.
|
||||
*
|
||||
* @return true if volume cotains extended attributes.
|
||||
*/
|
||||
public boolean isExtendedAttributes() {
|
||||
return (flags & 0x8000) == 0x8000;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see Object#hashCode()
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 17;
|
||||
|
||||
if (label != null) {
|
||||
hash = label.hashCode();
|
||||
}
|
||||
|
||||
hash += 31 * dumpDate;
|
||||
|
||||
if (hostname != null) {
|
||||
hash = (31 * hostname.hashCode()) + 17;
|
||||
}
|
||||
|
||||
if (devname != null) {
|
||||
hash = (31 * devname.hashCode()) + 17;
|
||||
}
|
||||
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see Object#equals(Object)
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (o == null || !o.getClass().equals(getClass())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
DumpArchiveSummary rhs = (DumpArchiveSummary) o;
|
||||
|
||||
if (dumpDate != rhs.dumpDate) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((getHostname() == null) ||
|
||||
!getHostname().equals(rhs.getHostname())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((getDevname() == null) || !getDevname().equals(rhs.getDevname())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,116 @@
|
|||
|
||||
package org.xbib.io.archive.dump;
|
||||
|
||||
|
||||
/**
|
||||
* Various utilities for dump archives.
|
||||
*/
|
||||
class DumpArchiveUtil {
|
||||
/**
|
||||
* Private constructor to prevent instantiation.
|
||||
*/
|
||||
private DumpArchiveUtil() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate checksum for buffer.
|
||||
*
|
||||
* @param buffer buffer containing tape segment header
|
||||
* @return checksum
|
||||
*/
|
||||
public static int calculateChecksum(byte[] buffer) {
|
||||
int calc = 0;
|
||||
|
||||
for (int i = 0; i < 256; i++) {
|
||||
calc += DumpArchiveUtil.convert32(buffer, 4 * i);
|
||||
}
|
||||
|
||||
return DumpArchiveConstants.CHECKSUM -
|
||||
(calc - DumpArchiveUtil.convert32(buffer, 28));
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that the buffer contains a tape segment header.
|
||||
*
|
||||
* @param buffer
|
||||
*/
|
||||
public static final boolean verify(byte[] buffer) {
|
||||
// verify magic. for now only accept NFS_MAGIC.
|
||||
int magic = convert32(buffer, 24);
|
||||
|
||||
if (magic != DumpArchiveConstants.NFS_MAGIC) {
|
||||
return false;
|
||||
}
|
||||
|
||||
//verify checksum...
|
||||
int checksum = convert32(buffer, 28);
|
||||
|
||||
if (checksum != calculateChecksum(buffer)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ino associated with this buffer.
|
||||
*
|
||||
* @param buffer
|
||||
*/
|
||||
public static final int getIno(byte[] buffer) {
|
||||
return convert32(buffer, 20);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read 8-byte integer from buffer.
|
||||
*
|
||||
* @param buffer
|
||||
* @param offset
|
||||
* @return the 8-byte entry as a long
|
||||
*/
|
||||
public static final long convert64(byte[] buffer, int offset) {
|
||||
long i = 0;
|
||||
i += (((long) buffer[offset + 7]) << 56);
|
||||
i += (((long) buffer[offset + 6] << 48) & 0x00FF000000000000L);
|
||||
i += (((long) buffer[offset + 5] << 40) & 0x0000FF0000000000L);
|
||||
i += (((long) buffer[offset + 4] << 32) & 0x000000FF00000000L);
|
||||
i += (((long) buffer[offset + 3] << 24) & 0x00000000FF000000L);
|
||||
i += (((long) buffer[offset + 2] << 16) & 0x0000000000FF0000L);
|
||||
i += (((long) buffer[offset + 1] << 8) & 0x000000000000FF00L);
|
||||
i += (buffer[offset] & 0x00000000000000FFL);
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read 4-byte integer from buffer.
|
||||
*
|
||||
* @param buffer
|
||||
* @param offset
|
||||
* @return the 4-byte entry as an int
|
||||
*/
|
||||
public static final int convert32(byte[] buffer, int offset) {
|
||||
int i = 0;
|
||||
i = buffer[offset + 3] << 24;
|
||||
i += (buffer[offset + 2] << 16) & 0x00FF0000;
|
||||
i += (buffer[offset + 1] << 8) & 0x0000FF00;
|
||||
i += buffer[offset] & 0x000000FF;
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read 2-byte integer from buffer.
|
||||
*
|
||||
* @param buffer
|
||||
* @param offset
|
||||
* @return the 2-byte entry as an int
|
||||
*/
|
||||
public static final int convert16(byte[] buffer, int offset) {
|
||||
int i = 0;
|
||||
i += (buffer[offset + 1] << 8) & 0x0000FF00;
|
||||
i += buffer[offset] & 0x000000FF;
|
||||
|
||||
return i;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
|
||||
package org.xbib.io.archive.dump;
|
||||
|
||||
|
||||
/**
|
||||
* Invalid Format Exception. There was an error decoding a
|
||||
* tape segment header.
|
||||
*/
|
||||
public class InvalidFormatException extends DumpArchiveException {
|
||||
|
||||
protected long offset;
|
||||
|
||||
public InvalidFormatException() {
|
||||
super("there was an error decoding a tape segment");
|
||||
}
|
||||
|
||||
public long getOffset() {
|
||||
return offset;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
package org.xbib.io.archive.dump;
|
||||
|
||||
|
||||
/**
|
||||
* Short File Exception. There was an unexpected EOF when reading
|
||||
* the input stream.
|
||||
*/
|
||||
public class ShortFileException extends DumpArchiveException {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public ShortFileException() {
|
||||
super("unexpected EOF");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,331 @@
|
|||
|
||||
package org.xbib.io.archive.dump;
|
||||
|
||||
import java.io.FilterInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.zip.DataFormatException;
|
||||
import java.util.zip.Inflater;
|
||||
|
||||
|
||||
/**
|
||||
* Filter stream that mimics a physical tape drive capable of compressing
|
||||
* the data stream
|
||||
*/
|
||||
class TapeInputStream extends FilterInputStream {
|
||||
private byte[] blockBuffer = new byte[DumpArchiveConstants.TP_SIZE];
|
||||
private int currBlkIdx = -1;
|
||||
private int blockSize = DumpArchiveConstants.TP_SIZE;
|
||||
private int recordSize = DumpArchiveConstants.TP_SIZE;
|
||||
private int readOffset = DumpArchiveConstants.TP_SIZE;
|
||||
private boolean isCompressed = false;
|
||||
private long bytesRead = 0;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TapeInputStream(InputStream in) {
|
||||
super(in);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the DumpArchive Buffer's block size. We need to sync the block size with the
|
||||
* dump archive's actual block size since compression is handled at the
|
||||
* block level.
|
||||
*
|
||||
* @param recsPerBlock records per block
|
||||
* @param isCompressed true if the archive is compressed
|
||||
* @throws java.io.IOException more than one block has been read
|
||||
* @throws java.io.IOException there was an error reading additional blocks.
|
||||
*/
|
||||
public void resetBlockSize(int recsPerBlock, boolean isCompressed)
|
||||
throws IOException {
|
||||
this.isCompressed = isCompressed;
|
||||
|
||||
blockSize = recordSize * recsPerBlock;
|
||||
|
||||
// save first block in case we need it again
|
||||
byte[] oldBuffer = blockBuffer;
|
||||
|
||||
// read rest of new block
|
||||
blockBuffer = new byte[blockSize];
|
||||
System.arraycopy(oldBuffer, 0, blockBuffer, 0, recordSize);
|
||||
readFully(blockBuffer, recordSize, blockSize - recordSize);
|
||||
|
||||
this.currBlkIdx = 0;
|
||||
this.readOffset = recordSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* @see java.io.InputStream#available
|
||||
*/
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
if (readOffset < blockSize) {
|
||||
return blockSize - readOffset;
|
||||
}
|
||||
|
||||
return in.available();
|
||||
}
|
||||
|
||||
/**
|
||||
* @see java.io.InputStream#read()
|
||||
*/
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
throw new IllegalArgumentException(
|
||||
"all reads must be multiple of record size (" + recordSize +
|
||||
" bytes.");
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>reads the full given length unless EOF is reached.</p>
|
||||
*
|
||||
* @param len length to read, must be a multiple of the stream's
|
||||
* record size
|
||||
*/
|
||||
@Override
|
||||
public int read(byte[] b, int off, int len) throws IOException {
|
||||
if ((len % recordSize) != 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"all reads must be multiple of record size (" + recordSize +
|
||||
" bytes.");
|
||||
}
|
||||
|
||||
int bytes = 0;
|
||||
|
||||
while (bytes < len) {
|
||||
// we need to read from the underlying stream.
|
||||
// this will reset readOffset value.
|
||||
// return -1 if there's a problem.
|
||||
if ((readOffset == blockSize) && !readBlock(true)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int n = 0;
|
||||
|
||||
if ((readOffset + (len - bytes)) <= blockSize) {
|
||||
// we can read entirely from the buffer.
|
||||
n = len - bytes;
|
||||
} else {
|
||||
// copy what we can from the buffer.
|
||||
n = blockSize - readOffset;
|
||||
}
|
||||
|
||||
// copy data, increment counters.
|
||||
System.arraycopy(blockBuffer, readOffset, b, off, n);
|
||||
readOffset += n;
|
||||
bytes += n;
|
||||
off += n;
|
||||
}
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip bytes. Same as read but without the arraycopy.
|
||||
* <p/>
|
||||
* <p>skips the full given length unless EOF is reached.</p>
|
||||
*
|
||||
* @param len length to read, must be a multiple of the stream's
|
||||
* record size
|
||||
*/
|
||||
@Override
|
||||
public long skip(long len) throws IOException {
|
||||
if ((len % recordSize) != 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"all reads must be multiple of record size (" + recordSize +
|
||||
" bytes.");
|
||||
}
|
||||
|
||||
long bytes = 0;
|
||||
|
||||
while (bytes < len) {
|
||||
// we need to read from the underlying stream.
|
||||
// this will reset readOffset value. We do not perform
|
||||
// any decompression if we won't eventually read the data.
|
||||
// return -1 if there's a problem.
|
||||
if ((readOffset == blockSize) &&
|
||||
!readBlock((len - bytes) < blockSize)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
long n = 0;
|
||||
|
||||
if ((readOffset + (len - bytes)) <= blockSize) {
|
||||
// we can read entirely from the buffer.
|
||||
n = len - bytes;
|
||||
} else {
|
||||
// copy what we can from the buffer.
|
||||
n = blockSize - readOffset;
|
||||
}
|
||||
|
||||
// do not copy data but still increment counters.
|
||||
readOffset += n;
|
||||
bytes += n;
|
||||
}
|
||||
|
||||
return bytes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the input stream.
|
||||
*
|
||||
* @throws java.io.IOException on error
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (in != null && in != System.in) {
|
||||
in.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Peek at the next record from the input stream and return the data.
|
||||
*
|
||||
* @return The record data.
|
||||
* @throws java.io.IOException on error
|
||||
*/
|
||||
public byte[] peek() throws IOException {
|
||||
// we need to read from the underlying stream. This
|
||||
// isn't a problem since it would be the first step in
|
||||
// any subsequent read() anyway.
|
||||
if ((readOffset == blockSize) && !readBlock(true)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// copy data, increment counters.
|
||||
byte[] b = new byte[recordSize];
|
||||
System.arraycopy(blockBuffer, readOffset, b, 0, b.length);
|
||||
|
||||
return b;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a record from the input stream and return the data.
|
||||
*
|
||||
* @return The record data.
|
||||
* @throws java.io.IOException on error
|
||||
*/
|
||||
public byte[] readRecord() throws IOException {
|
||||
byte[] result = new byte[recordSize];
|
||||
|
||||
if (-1 == read(result, 0, result.length)) {
|
||||
throw new ShortFileException();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read next block. All decompression is handled here.
|
||||
*
|
||||
* @param decompress if false the buffer will not be decompressed.
|
||||
* This is an optimization for longer seeks.
|
||||
* @return false if End-Of-File, else true
|
||||
*/
|
||||
private boolean readBlock(boolean decompress) throws IOException {
|
||||
boolean success = true;
|
||||
|
||||
if (in == null) {
|
||||
throw new IOException("input buffer is closed");
|
||||
}
|
||||
|
||||
if (!isCompressed || (currBlkIdx == -1)) {
|
||||
// file is not compressed
|
||||
success = readFully(blockBuffer, 0, blockSize);
|
||||
bytesRead += blockSize;
|
||||
} else {
|
||||
if (!readFully(blockBuffer, 0, 4)) {
|
||||
return false;
|
||||
}
|
||||
bytesRead += 4;
|
||||
|
||||
int h = DumpArchiveUtil.convert32(blockBuffer, 0);
|
||||
boolean compressed = (h & 0x01) == 0x01;
|
||||
|
||||
if (!compressed) {
|
||||
// file is compressed but this block is not.
|
||||
success = readFully(blockBuffer, 0, blockSize);
|
||||
bytesRead += blockSize;
|
||||
} else {
|
||||
// this block is compressed.
|
||||
int flags = (h >> 1) & 0x07;
|
||||
int length = (h >> 4) & 0x0FFFFFFF;
|
||||
byte[] compBuffer = new byte[length];
|
||||
success = readFully(compBuffer, 0, length);
|
||||
bytesRead += length;
|
||||
|
||||
if (!decompress) {
|
||||
// just in case someone reads the data.
|
||||
Arrays.fill(blockBuffer, (byte) 0);
|
||||
} else {
|
||||
switch (DumpArchiveConstants.COMPRESSION_TYPE.find(flags &
|
||||
0x03)) {
|
||||
case ZLIB:
|
||||
|
||||
try {
|
||||
Inflater inflator = new Inflater();
|
||||
inflator.setInput(compBuffer, 0, compBuffer.length);
|
||||
length = inflator.inflate(blockBuffer);
|
||||
|
||||
if (length != blockSize) {
|
||||
throw new ShortFileException();
|
||||
}
|
||||
|
||||
inflator.end();
|
||||
} catch (DataFormatException e) {
|
||||
throw new DumpArchiveException("bad data", e);
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case BZLIB:
|
||||
throw new UnsupportedCompressionAlgorithmException(
|
||||
"BZLIB2");
|
||||
|
||||
case LZO:
|
||||
throw new UnsupportedCompressionAlgorithmException(
|
||||
"LZO");
|
||||
|
||||
default:
|
||||
throw new UnsupportedCompressionAlgorithmException();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currBlkIdx++;
|
||||
readOffset = 0;
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read buffer
|
||||
*/
|
||||
private boolean readFully(byte[] b, int off, int len)
|
||||
throws IOException {
|
||||
int count = 0;
|
||||
|
||||
while (count < len) {
|
||||
int n = in.read(b, off + count, len - count);
|
||||
|
||||
if (n == -1) {
|
||||
throw new ShortFileException();
|
||||
}
|
||||
|
||||
count += n;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get number of bytes read.
|
||||
*/
|
||||
public long getBytesRead() {
|
||||
return bytesRead;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
|
||||
package org.xbib.io.archive.dump;
|
||||
|
||||
/**
|
||||
* Unrecognized Format Exception. This is either not a recognized dump archive or there's
|
||||
* a bad tape segment header.
|
||||
*/
|
||||
public class UnrecognizedFormatException extends DumpArchiveException {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public UnrecognizedFormatException() {
|
||||
super("this is not a recognized format.");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
|
||||
package org.xbib.io.archive.dump;
|
||||
|
||||
/**
|
||||
* Unsupported compression algorithm. The dump archive uses an unsupported
|
||||
* compression algorithm (BZLIB2 or LZO).
|
||||
*/
|
||||
public class UnsupportedCompressionAlgorithmException
|
||||
extends DumpArchiveException {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
public UnsupportedCompressionAlgorithmException() {
|
||||
super("this file uses an unsupported compression algorithm.");
|
||||
}
|
||||
|
||||
public UnsupportedCompressionAlgorithmException(String alg) {
|
||||
super("this file uses an unsupported compression algorithm: " + alg +
|
||||
".");
|
||||
}
|
||||
}
|
4
io-archive-jar/build.gradle
Normal file
4
io-archive-jar/build.gradle
Normal file
|
@ -0,0 +1,4 @@
|
|||
dependencies {
|
||||
api project(':io-archive')
|
||||
api project(':io-archive-zip')
|
||||
}
|
5
io-archive-jar/src/main/java/module-info.java
Normal file
5
io-archive-jar/src/main/java/module-info.java
Normal file
|
@ -0,0 +1,5 @@
|
|||
module org.xbib.io.archive.jar {
|
||||
exports org.xbib.io.archive.jar;
|
||||
requires org.xbib.io.archive;
|
||||
requires org.xbib.io.archive.zip;
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package org.xbib.io.archive.jar;
|
||||
|
||||
import org.xbib.io.archive.zip.ZipArchiveEntry;
|
||||
|
||||
import java.security.cert.Certificate;
|
||||
import java.util.jar.Attributes;
|
||||
import java.util.jar.JarEntry;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipException;
|
||||
|
||||
public class JarArchiveEntry extends ZipArchiveEntry {
|
||||
|
||||
private Attributes manifestAttributes = null;
|
||||
|
||||
private Certificate[] certificates = null;
|
||||
|
||||
public JarArchiveEntry() {
|
||||
super();
|
||||
}
|
||||
|
||||
public JarArchiveEntry(ZipEntry entry) throws ZipException {
|
||||
super(entry);
|
||||
}
|
||||
|
||||
public JarArchiveEntry(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
public JarArchiveEntry(ZipArchiveEntry entry) throws ZipException {
|
||||
super(entry);
|
||||
}
|
||||
|
||||
public JarArchiveEntry(JarEntry entry) throws ZipException {
|
||||
super(entry);
|
||||
|
||||
}
|
||||
|
||||
public Attributes getManifestAttributes() {
|
||||
return manifestAttributes;
|
||||
}
|
||||
|
||||
public Certificate[] getCertificates() {
|
||||
if (certificates != null) {
|
||||
Certificate[] certs = new Certificate[certificates.length];
|
||||
System.arraycopy(certificates, 0, certs, 0, certs.length);
|
||||
return certs;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return super.equals(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return super.hashCode();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
|
||||
package org.xbib.io.archive.jar;
|
||||
|
||||
import org.xbib.io.archive.entry.ArchiveEntry;
|
||||
import org.xbib.io.archive.zip.ZipArchiveEntry;
|
||||
import org.xbib.io.archive.zip.ZipArchiveInputStream;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
* Implements an input stream that can read entries from jar files.
|
||||
*/
|
||||
public class JarArchiveInputStream extends ZipArchiveInputStream {
|
||||
|
||||
public JarArchiveInputStream(final InputStream inputStream) {
|
||||
super(inputStream);
|
||||
}
|
||||
|
||||
public JarArchiveEntry getNextJarEntry() throws IOException {
|
||||
ZipArchiveEntry entry = getNextZipEntry();
|
||||
return entry == null ? null : new JarArchiveEntry(entry);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ArchiveEntry getNextEntry() throws IOException {
|
||||
return getNextJarEntry();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
package org.xbib.io.archive.jar;
|
||||
|
||||
import org.xbib.io.archive.zip.JarMarker;
|
||||
import org.xbib.io.archive.zip.ZipArchiveOutputStream;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* Subclass that adds a special extra field to the very first entry
|
||||
* which allows the created archive to be used as an executable jar on
|
||||
* Solaris.
|
||||
*/
|
||||
public class JarArchiveOutputStream extends ZipArchiveOutputStream<JarArchiveEntry> {
|
||||
|
||||
private boolean jarMarkerAdded = false;
|
||||
|
||||
public JarArchiveOutputStream(final OutputStream out) {
|
||||
super(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putArchiveEntry(JarArchiveEntry ze) throws IOException {
|
||||
if (!jarMarkerAdded) {
|
||||
ze.addAsFirstExtraField(JarMarker.getInstance());
|
||||
jarMarkerAdded = true;
|
||||
}
|
||||
super.putArchiveEntry(ze);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
package org.xbib.io.archive.jar;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import java.io.InputStream;
|
||||
|
||||
public class JarTest {
|
||||
|
||||
@Test
|
||||
public void testJar() throws Exception {
|
||||
InputStream in = getClass().getResourceAsStream("test.jar");
|
||||
JarArchiveInputStream jarArchiveInputStream = new JarArchiveInputStream(in);
|
||||
byte[] buffer = new byte[1024];
|
||||
long total = 0L;
|
||||
while ((jarArchiveInputStream.getNextEntry()) != null) {
|
||||
int len = 0;
|
||||
while ((len = jarArchiveInputStream.read(buffer)) > 0) {
|
||||
total += len;
|
||||
}
|
||||
}
|
||||
assertEquals(1813L, total);
|
||||
jarArchiveInputStream.close();
|
||||
}
|
||||
}
|
Binary file not shown.
3
io-archive-tar/build.gradle
Normal file
3
io-archive-tar/build.gradle
Normal file
|
@ -0,0 +1,3 @@
|
|||
dependencies {
|
||||
api project(':io-archive')
|
||||
}
|
4
io-archive-tar/src/main/java/module-info.java
Normal file
4
io-archive-tar/src/main/java/module-info.java
Normal file
|
@ -0,0 +1,4 @@
|
|||
module org.xbib.io.archive.tar {
|
||||
exports org.xbib.io.archive.tar;
|
||||
requires org.xbib.io.archive;
|
||||
}
|
|
@ -0,0 +1,842 @@
|
|||
package org.xbib.io.archive.tar;
|
||||
|
||||
import org.xbib.io.archive.entry.ArchiveEntry;
|
||||
import org.xbib.io.archive.util.ArchiveUtils;
|
||||
import org.xbib.io.archive.entry.ArchiveEntryEncoding;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* This class represents an entry in a Tar archive.
|
||||
*/
|
||||
public class TarArchiveEntry implements TarConstants, ArchiveEntry {
|
||||
|
||||
/**
|
||||
* Maximum length of a user's name in the tar file
|
||||
*/
|
||||
private static final int MAX_NAMELEN = 31;
|
||||
|
||||
/**
|
||||
* Default permissions bits for directories
|
||||
*/
|
||||
private static final int DEFAULT_DIR_MODE = 040755;
|
||||
|
||||
/**
|
||||
* Default permissions bits for files
|
||||
*/
|
||||
private static final int DEFAULT_FILE_MODE = 0100644;
|
||||
|
||||
/**
|
||||
* Convert millis to seconds
|
||||
*/
|
||||
private static final int MILLIS_PER_SECOND = 1000;
|
||||
|
||||
/**
|
||||
* The entry's name.
|
||||
*/
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* The entry's permission mode.
|
||||
*/
|
||||
private int mode;
|
||||
|
||||
/**
|
||||
* The entry's user id.
|
||||
*/
|
||||
private int userId;
|
||||
|
||||
/**
|
||||
* The entry's group id.
|
||||
*/
|
||||
private int groupId;
|
||||
|
||||
/**
|
||||
* The entry's size.
|
||||
*/
|
||||
private long size;
|
||||
|
||||
/**
|
||||
* The entry's modification time.
|
||||
*/
|
||||
private long modTime;
|
||||
|
||||
/**
|
||||
* The entry's link flag.
|
||||
*/
|
||||
private byte linkFlag;
|
||||
|
||||
/**
|
||||
* The entry's link name.
|
||||
*/
|
||||
private String linkName;
|
||||
|
||||
/**
|
||||
* The version of the format
|
||||
*/
|
||||
private String version;
|
||||
|
||||
/**
|
||||
* The entry's user name.
|
||||
*/
|
||||
private String userName;
|
||||
|
||||
/**
|
||||
* The entry's group name.
|
||||
*/
|
||||
private String groupName;
|
||||
|
||||
/**
|
||||
* The entry's major device number.
|
||||
*/
|
||||
private int devMajor;
|
||||
|
||||
/**
|
||||
* The entry's minor device number.
|
||||
*/
|
||||
private int devMinor;
|
||||
|
||||
/**
|
||||
* If an extension sparse header follows.
|
||||
*/
|
||||
private boolean isExtended;
|
||||
|
||||
/**
|
||||
* The entry's real size in case of a sparse file.
|
||||
*/
|
||||
private long realSize;
|
||||
|
||||
private boolean isDir;
|
||||
|
||||
/**
|
||||
* Construct an empty entry and prepares the header values.
|
||||
*/
|
||||
public TarArchiveEntry() {
|
||||
this.version = VERSION_POSIX;
|
||||
this.name = "";
|
||||
this.linkName = "";
|
||||
this.linkFlag = LF_GNUTYPE_LONGNAME;
|
||||
String user = System.getProperty("user.name", "");
|
||||
if (user.length() > MAX_NAMELEN) {
|
||||
user = user.substring(0, MAX_NAMELEN);
|
||||
}
|
||||
this.userName = user;
|
||||
this.groupName = "";
|
||||
this.userId = 0;
|
||||
this.groupId = 0;
|
||||
this.mode = DEFAULT_FILE_MODE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an entry with only a name. This allows the programmer
|
||||
* to construct the entry's header "by hand". File is set to null.
|
||||
*
|
||||
* @param name the entry name
|
||||
*/
|
||||
public TarArchiveEntry(String name) {
|
||||
this(name, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an entry with only a name. This allows the programmer
|
||||
* to construct the entry's header "by hand". File is set to null.
|
||||
*
|
||||
* @param name the entry name
|
||||
* @param preserveLeadingSlashes whether to allow leading slashes
|
||||
* in the name.
|
||||
*/
|
||||
public TarArchiveEntry(String name, boolean preserveLeadingSlashes) {
|
||||
this();
|
||||
name = ArchiveUtils.normalizeFileName(name, preserveLeadingSlashes);
|
||||
this.name = name;
|
||||
boolean isDir = name.endsWith("/");
|
||||
this.mode = isDir ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE;
|
||||
this.linkFlag = isDir ? LF_DIR : LF_NORMAL;
|
||||
this.devMajor = 0;
|
||||
this.devMinor = 0;
|
||||
this.userId = 0;
|
||||
this.groupId = 0;
|
||||
this.size = 0;
|
||||
this.modTime = (new Date()).getTime() / MILLIS_PER_SECOND;
|
||||
this.linkName = "";
|
||||
this.userName = "";
|
||||
this.groupName = "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an entry with a name and a link flag.
|
||||
*
|
||||
* @param name the entry name
|
||||
* @param linkFlag the entry link flag.
|
||||
*/
|
||||
public TarArchiveEntry(String name, byte linkFlag) {
|
||||
this(name);
|
||||
this.linkFlag = linkFlag;
|
||||
if (linkFlag == LF_GNUTYPE_LONGNAME) {
|
||||
version = VERSION_GNU_SPACE;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an entry from an archive's header bytes. File is set
|
||||
* to null.
|
||||
*
|
||||
* @param headerBuf The header bytes from a tar archive entry.
|
||||
* @param encoding encoding to use for file names
|
||||
* @throws IllegalArgumentException if any of the numeric fields have an invalid format
|
||||
*/
|
||||
public TarArchiveEntry(byte[] headerBuf, ArchiveEntryEncoding encoding) throws IOException {
|
||||
this();
|
||||
parseTarHeader(headerBuf, encoding);
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the two entries are equal. Equality is determined
|
||||
* by the header names being equal.
|
||||
*
|
||||
* @param it Entry to be checked for equality.
|
||||
* @return True if the entries are equal.
|
||||
*/
|
||||
public boolean equals(TarArchiveEntry it) {
|
||||
return getName().equals(it.getName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the two entries are equal. Equality is determined
|
||||
* by the header names being equal.
|
||||
*
|
||||
* @param it Entry to be checked for equality.
|
||||
* @return True if the entries are equal.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object it) {
|
||||
return !(it == null || getClass() != it.getClass()) && equals((TarArchiveEntry) it);
|
||||
}
|
||||
|
||||
/**
|
||||
* Hashcodes are based on entry names.
|
||||
*
|
||||
* @return the entry hashcode
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getName().hashCode();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's name.
|
||||
*
|
||||
* @return This entry's name.
|
||||
*/
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's name.
|
||||
*
|
||||
* @param name This entry's new name.
|
||||
*/
|
||||
public TarArchiveEntry setName(String name) {
|
||||
this.name = ArchiveUtils.normalizeFileName(name, false);
|
||||
this.isDir = name.endsWith("/");
|
||||
this.mode = isDir ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE;
|
||||
this.linkFlag = isDir ? LF_DIR : LF_NORMAL;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's modification time
|
||||
*
|
||||
* @param date This entry's new modification time
|
||||
*/
|
||||
public TarArchiveEntry setLastModified(Date date) {
|
||||
modTime = date.getTime() / MILLIS_PER_SECOND;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Date getLastModified() {
|
||||
return new Date(modTime * MILLIS_PER_SECOND);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDirectory() {
|
||||
return isDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's file size.
|
||||
*
|
||||
* @param size This entry's new file size.
|
||||
* @throws IllegalArgumentException if the size is < 0.
|
||||
*/
|
||||
public TarArchiveEntry setEntrySize(long size) {
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("size is out of range: " + size);
|
||||
}
|
||||
this.size = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's file size.
|
||||
*
|
||||
* @return This entry's file size.
|
||||
*/
|
||||
public long getEntrySize() {
|
||||
return size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the mode for this entry
|
||||
*
|
||||
* @param mode the mode for this entry
|
||||
*/
|
||||
public void setMode(int mode) {
|
||||
this.mode = mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's link name.
|
||||
*
|
||||
* @return This entry's link name.
|
||||
*/
|
||||
public String getLinkName() {
|
||||
return linkName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's link name.
|
||||
*
|
||||
* @param link the link name to use.
|
||||
*/
|
||||
public void setLinkName(String link) {
|
||||
this.linkName = link;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's user id.
|
||||
*
|
||||
* @return This entry's user id.
|
||||
*/
|
||||
public int getUserId() {
|
||||
return userId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's user id.
|
||||
*
|
||||
* @param userId This entry's new user id.
|
||||
*/
|
||||
public void setUserId(int userId) {
|
||||
this.userId = userId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's group id.
|
||||
*
|
||||
* @return This entry's group id.
|
||||
*/
|
||||
public int getGroupId() {
|
||||
return groupId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's group id.
|
||||
*
|
||||
* @param groupId This entry's new group id.
|
||||
*/
|
||||
public void setGroupId(int groupId) {
|
||||
this.groupId = groupId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's user name.
|
||||
*
|
||||
* @return This entry's user name.
|
||||
*/
|
||||
public String getUserName() {
|
||||
return userName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's user name.
|
||||
*
|
||||
* @param userName This entry's new user name.
|
||||
*/
|
||||
public void setUserName(String userName) {
|
||||
this.userName = userName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's group name.
|
||||
*
|
||||
* @return This entry's group name.
|
||||
*/
|
||||
public String getGroupName() {
|
||||
return groupName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's group name.
|
||||
*
|
||||
* @param groupName This entry's new group name.
|
||||
*/
|
||||
public void setGroupName(String groupName) {
|
||||
this.groupName = groupName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's mode.
|
||||
*
|
||||
* @return This entry's mode.
|
||||
*/
|
||||
public int getMode() {
|
||||
return mode;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get this entry's major device number.
|
||||
*
|
||||
* @return This entry's major device number.
|
||||
*/
|
||||
public int getDevMajor() {
|
||||
return devMajor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's major device number.
|
||||
*
|
||||
* @param devNo This entry's major device number.
|
||||
* @throws IllegalArgumentException if the devNo is < 0.
|
||||
*/
|
||||
public void setDevMajor(int devNo) {
|
||||
if (devNo < 0) {
|
||||
throw new IllegalArgumentException("Major device number is out of "
|
||||
+ "range: " + devNo);
|
||||
}
|
||||
this.devMajor = devNo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's minor device number.
|
||||
*
|
||||
* @return This entry's minor device number.
|
||||
*/
|
||||
public int getDevMinor() {
|
||||
return devMinor;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set this entry's minor device number.
|
||||
*
|
||||
* @param devNo This entry's minor device number.
|
||||
* @throws IllegalArgumentException if the devNo is < 0.
|
||||
*/
|
||||
public void setDevMinor(int devNo) {
|
||||
if (devNo < 0) {
|
||||
throw new IllegalArgumentException("Minor device number is out of " + "range: " + devNo);
|
||||
}
|
||||
this.devMinor = devNo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates in case of a sparse file if an extension sparse header
|
||||
* follows.
|
||||
*
|
||||
* @return true if an extension sparse header follows.
|
||||
*/
|
||||
public boolean isExtended() {
|
||||
return isExtended;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get this entry's real file size in case of a sparse file.
|
||||
*
|
||||
* @return This entry's real file size.
|
||||
*/
|
||||
public long getRealSize() {
|
||||
return realSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate if this entry is a GNU sparse block
|
||||
*
|
||||
* @return true if this is a sparse extension provided by GNU tar
|
||||
*/
|
||||
public boolean isGNUSparse() {
|
||||
return linkFlag == LF_GNUTYPE_SPARSE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate if this entry is a GNU long name block
|
||||
*
|
||||
* @return true if this is a long name extension provided by GNU tar
|
||||
*/
|
||||
public boolean isGNULongNameEntry() {
|
||||
return linkFlag == LF_GNUTYPE_LONGNAME && GNU_LONGLINK.equals(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a Pax header.
|
||||
*
|
||||
* @return {@code true} if this is a Pax header.
|
||||
*/
|
||||
public boolean isPaxHeader() {
|
||||
return linkFlag == LF_PAX_EXTENDED_HEADER_LC || linkFlag == LF_PAX_EXTENDED_HEADER_UC;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a Pax header.
|
||||
*
|
||||
* @return {@code true} if this is a Pax header.
|
||||
*/
|
||||
public boolean isGlobalPaxHeader() {
|
||||
return linkFlag == LF_PAX_GLOBAL_EXTENDED_HEADER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a symbolic link entry.
|
||||
*/
|
||||
public boolean isSymbolicLink() {
|
||||
return linkFlag == LF_SYMLINK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a link entry.
|
||||
*/
|
||||
public boolean isLink() {
|
||||
return linkFlag == LF_LINK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a character device entry.
|
||||
*/
|
||||
public boolean isCharacterDevice() {
|
||||
return linkFlag == LF_CHR;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a block device entry.
|
||||
*/
|
||||
public boolean isBlockDevice() {
|
||||
return linkFlag == LF_BLK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if this is a FIFO (pipe) entry.
|
||||
*/
|
||||
public boolean isFIFO() {
|
||||
return linkFlag == LF_FIFO;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an entry's header information from a header buffer.
|
||||
*
|
||||
* @param header The tar entry header buffer to get information from.
|
||||
* @param encoding encoding to use for file names
|
||||
* @throws IllegalArgumentException if any of the numeric fields
|
||||
* have an invalid format
|
||||
*/
|
||||
public void parseTarHeader(byte[] header, ArchiveEntryEncoding encoding)
|
||||
throws IOException {
|
||||
parseTarHeader(header, encoding, false);
|
||||
}
|
||||
|
||||
private void parseTarHeader(byte[] header, ArchiveEntryEncoding encoding, final boolean oldStyle)
|
||||
throws IOException {
|
||||
int offset = 0;
|
||||
int type = evaluateType(header);
|
||||
name = parseFileName(header);
|
||||
offset += NAMELEN;
|
||||
mode = (int) parseOctalOrBinary(header, offset, MODELEN);
|
||||
offset += MODELEN;
|
||||
userId = (int) parseOctalOrBinary(header, offset, UIDLEN);
|
||||
offset += UIDLEN;
|
||||
groupId = (int) parseOctalOrBinary(header, offset, GIDLEN);
|
||||
offset += GIDLEN;
|
||||
if (type == GNU_FORMAT) {
|
||||
size = getSize(header, offset, SIZELEN);
|
||||
} else {
|
||||
size = parseOctalOrBinary(header, offset, SIZELEN);
|
||||
}
|
||||
offset += SIZELEN;
|
||||
modTime = parseOctalOrBinary(header, offset, MODTIMELEN);
|
||||
offset += MODTIMELEN;
|
||||
offset += CHKSUMLEN;
|
||||
linkFlag = header[offset++];
|
||||
linkName = oldStyle ? parseName(header, offset, NAMELEN) : parseName(header, offset, NAMELEN, encoding);
|
||||
offset += NAMELEN;
|
||||
switch (type) {
|
||||
case UNIX_FORMAT: {
|
||||
offset += ATIMELEN_GNU;
|
||||
offset += CTIMELEN_GNU;
|
||||
offset += OFFSETLEN_GNU;
|
||||
offset += LONGNAMESLEN_GNU;
|
||||
offset += PAD2LEN_GNU;
|
||||
offset += SPARSELEN_GNU;
|
||||
isExtended = parseBoolean(header, offset);
|
||||
offset += ISEXTENDEDLEN_GNU;
|
||||
realSize = parseOctal(header, offset, REALSIZELEN_GNU);
|
||||
offset += REALSIZELEN_GNU;
|
||||
break;
|
||||
}
|
||||
case POSIX_FORMAT: {
|
||||
parseName(header, offset, MAGICLEN); // magic
|
||||
offset += MAGICLEN;
|
||||
version = parseName(header, offset, VERSIONLEN);
|
||||
offset += VERSIONLEN;
|
||||
userName = oldStyle ? parseName(header, offset, UNAMELEN) : parseName(header, offset, UNAMELEN, encoding);
|
||||
offset += UNAMELEN;
|
||||
groupName = oldStyle ? parseName(header, offset, GNAMELEN) : parseName(header, offset, GNAMELEN, encoding);
|
||||
offset += GNAMELEN;
|
||||
devMajor = (int) parseOctalOrBinary(header, offset, DEVLEN);
|
||||
offset += DEVLEN;
|
||||
devMinor = (int) parseOctalOrBinary(header, offset, DEVLEN);
|
||||
offset += DEVLEN;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate an entry's header format from a header buffer.
|
||||
*
|
||||
* @param header The tar entry header buffer to evaluate the format for.
|
||||
* @return format type
|
||||
*/
|
||||
private int evaluateType(byte[] header) {
|
||||
if (ArchiveUtils.matchAsciiBuffer(MAGIC_UNIX, header, MAGIC_OFFSET, MAGICLEN)) {
|
||||
return UNIX_FORMAT;
|
||||
}
|
||||
if (ArchiveUtils.matchAsciiBuffer(MAGIC_POSIX, header, MAGIC_OFFSET, MAGICLEN)) {
|
||||
return POSIX_FORMAT;
|
||||
}
|
||||
if (ArchiveUtils.matchAsciiBuffer(MAGIC_GNU, header, MAGIC_OFFSET, MAGICLEN)) {
|
||||
return GNU_FORMAT;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an octal string from a buffer.
|
||||
* <p>Leading spaces are ignored.
|
||||
* The buffer must contain a trailing space or NUL,
|
||||
* and may contain an additional trailing space or NUL.</p>
|
||||
* <p>The input buffer is allowed to contain all NULs,
|
||||
* in which case the method returns 0L
|
||||
* (this allows for missing fields).</p>
|
||||
* <p>To work-around some tar implementations that insert a
|
||||
* leading NUL this method returns 0 if it detects a leading NUL.</p>
|
||||
*
|
||||
* @param buffer The buffer from which to parse.
|
||||
* @param offset The offset into the buffer from which to parse.
|
||||
* @param length The maximum number of bytes to parse - must be at least 2 bytes.
|
||||
* @return The long value of the octal string.
|
||||
* @throws IllegalArgumentException if the trailing space/NUL is missing or if a invalid byte is detected.
|
||||
*/
|
||||
private long parseOctal(final byte[] buffer, final int offset, final int length) {
|
||||
long result = 0;
|
||||
int end = offset + length;
|
||||
int start = offset;
|
||||
if (length < 2) {
|
||||
throw new IllegalArgumentException("Length " + length + " must be at least 2");
|
||||
}
|
||||
if (buffer[start] == 0) {
|
||||
return 0L;
|
||||
}
|
||||
while (start < end) {
|
||||
if (buffer[start] == ' ') {
|
||||
start++;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
byte trailer;
|
||||
trailer = buffer[end - 1];
|
||||
if (trailer == 0 || trailer == ' ') {
|
||||
end--;
|
||||
} else {
|
||||
throw new IllegalArgumentException(exceptionMessage(buffer, offset, length, end - 1, trailer));
|
||||
}
|
||||
trailer = buffer[end - 1];
|
||||
if (trailer == 0 || trailer == ' ') {
|
||||
end--;
|
||||
}
|
||||
for (; start < end; start++) {
|
||||
final byte currentByte = buffer[start];
|
||||
if (currentByte < '0' || currentByte > '7') {
|
||||
throw new IllegalArgumentException(
|
||||
exceptionMessage(buffer, offset, length, start, currentByte));
|
||||
}
|
||||
result = (result << 3) + (currentByte - '0'); // convert from ASCII
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the value contained in a byte buffer. If the most
|
||||
* significant bit of the first byte in the buffer is set, this
|
||||
* bit is ignored and the rest of the buffer is interpreted as a
|
||||
* binary number. Otherwise, the buffer is interpreted as an
|
||||
* octal number as per the parseOctal function above.
|
||||
*
|
||||
* @param buffer The buffer from which to parse.
|
||||
* @param offset The offset into the buffer from which to parse.
|
||||
* @param length The maximum number of bytes to parse.
|
||||
* @return The long value of the octal or binary string.
|
||||
* @throws IllegalArgumentException if the trailing space/NUL is
|
||||
* missing or an invalid byte is detected in an octal number, or
|
||||
* if a binary number would exceed the size of a signed long
|
||||
* 64-bit integer.
|
||||
*/
|
||||
private long parseOctalOrBinary(final byte[] buffer, final int offset, final int length) {
|
||||
if ((buffer[offset] & 0x80) == 0) {
|
||||
return parseOctal(buffer, offset, length);
|
||||
}
|
||||
final boolean negative = buffer[offset] == (byte) 0xff;
|
||||
if (length < 9) {
|
||||
return parseBinaryLong(buffer, offset, length, negative);
|
||||
}
|
||||
return parseBinaryBigInteger(buffer, offset, length, negative);
|
||||
}
|
||||
|
||||
private long parseBinaryLong(final byte[] buffer, final int offset, final int length, final boolean negative) {
|
||||
if (length >= 9) {
|
||||
throw new IllegalArgumentException("At offset " + offset + ", "
|
||||
+ length + " byte binary number"
|
||||
+ " exceeds maximum signed long"
|
||||
+ " value");
|
||||
}
|
||||
long val = 0;
|
||||
for (int i = 1; i < length; i++) {
|
||||
val = (val << 8) + (buffer[offset + i] & 0xff);
|
||||
}
|
||||
if (negative) {
|
||||
// 2's complement
|
||||
val--;
|
||||
val ^= ((long) Math.pow(2, (length - 1) * 8) - 1);
|
||||
}
|
||||
return negative ? -val : val;
|
||||
}
|
||||
|
||||
private long parseBinaryBigInteger(final byte[] buffer, final int offset, final int length, final boolean negative) {
|
||||
byte[] remainder = new byte[length - 1];
|
||||
System.arraycopy(buffer, offset + 1, remainder, 0, length - 1);
|
||||
BigInteger val = new BigInteger(remainder);
|
||||
if (negative) {
|
||||
// 2's complement
|
||||
val = val.add(BigInteger.valueOf(-1)).not();
|
||||
}
|
||||
if (val.bitLength() > 63) {
|
||||
throw new IllegalArgumentException("At offset " + offset + ", "
|
||||
+ length + " byte binary number"
|
||||
+ " exceeds maximum signed long"
|
||||
+ " value");
|
||||
}
|
||||
return negative ? -val.longValue() : val.longValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a boolean byte from a buffer.
|
||||
* Leading spaces and NUL are ignored.
|
||||
* The buffer may contain trailing spaces or NULs.
|
||||
*
|
||||
* @param buffer The buffer from which to parse.
|
||||
* @param offset The offset into the buffer from which to parse.
|
||||
* @return The boolean value of the bytes.
|
||||
* @throws IllegalArgumentException if an invalid byte is detected.
|
||||
*/
|
||||
private boolean parseBoolean(final byte[] buffer, final int offset) {
|
||||
return buffer[offset] == 1;
|
||||
}
|
||||
|
||||
private String exceptionMessage(byte[] buffer, final int offset, final int length, int current, final byte currentByte) {
|
||||
String string = new String(buffer, offset, length); // TODO default charset?
|
||||
string = string.replaceAll("\0", "{NUL}"); // Replace NULs to allow string to be printed
|
||||
return "Invalid byte " + currentByte + " at offset " + (current - offset) + " in '" + string + "' len=" + length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an entry name from a buffer.
|
||||
* Parsing stops when a NUL is found
|
||||
* or the buffer length is reached.
|
||||
*
|
||||
* @param buffer The buffer from which to parse.
|
||||
* @param offset The offset into the buffer from which to parse.
|
||||
* @param length The maximum number of bytes to parse.
|
||||
* @return The entry name.
|
||||
*/
|
||||
private String parseName(byte[] buffer, final int offset, final int length) {
|
||||
try {
|
||||
return parseName(buffer, offset, length, ArchiveUtils.DEFAULT_ENCODING);
|
||||
} catch (IOException ex) {
|
||||
try {
|
||||
return parseName(buffer, offset, length, ArchiveUtils.FALLBACK_ENCODING);
|
||||
} catch (IOException ex2) {
|
||||
// impossible
|
||||
throw new RuntimeException(ex2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse an entry name from a buffer.
|
||||
* Parsing stops when a NUL is found
|
||||
* or the buffer length is reached.
|
||||
*
|
||||
* @param buffer The buffer from which to parse.
|
||||
* @param offset The offset into the buffer from which to parse.
|
||||
* @param length The maximum number of bytes to parse.
|
||||
* @param encoding name of the encoding to use for file names
|
||||
* @return The entry name.
|
||||
*/
|
||||
private String parseName(byte[] buffer, final int offset, final int length, final ArchiveEntryEncoding encoding) throws IOException {
|
||||
int len = length;
|
||||
for (; len > 0; len--) {
|
||||
if (buffer[offset + len - 1] != 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (len > 0) {
|
||||
byte[] b = new byte[len];
|
||||
System.arraycopy(buffer, offset, b, 0, len);
|
||||
return encoding.decode(b);
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
private long getSize(byte[] header, int offset, int length) {
|
||||
long test = parseOctal(header, offset, length);
|
||||
if (test <= 0 && header[offset] == (byte) 128) {
|
||||
byte[] last = new byte[length];
|
||||
System.arraycopy(header, offset, last, 0, length);
|
||||
last[0] = (byte) 0;
|
||||
long rSize = new BigInteger(last).longValue();
|
||||
last = null;
|
||||
return rSize;
|
||||
}
|
||||
return test;
|
||||
}
|
||||
|
||||
private String parseFileName(byte[] header) {
|
||||
StringBuilder result = new StringBuilder(256);
|
||||
// If header[345] is not equal to zero, then it is the "prefix"
|
||||
// that 'ustar' defines. It must be prepended to the "normal"
|
||||
// name field. We are responsible for the separating '/'.
|
||||
if (header[345] != 0) {
|
||||
for (int i = 345; i < 500 && header[i] != 0; ++i) {
|
||||
result.append((char) header[i]);
|
||||
}
|
||||
result.append("/");
|
||||
}
|
||||
for (int i = 0; i < 100 && header[i] != 0; ++i) {
|
||||
result.append((char) header[i]);
|
||||
}
|
||||
return result.toString();
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,434 @@
|
|||
package org.xbib.io.archive.tar;
|
||||
|
||||
import org.xbib.io.archive.stream.ArchiveInputStream;
|
||||
import org.xbib.io.archive.entry.ArchiveEntryEncoding;
|
||||
import org.xbib.io.archive.entry.ArchiveEntryEncodingHelper;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
public class TarArchiveInputStream extends ArchiveInputStream<TarArchiveEntry> implements TarConstants {
|
||||
|
||||
private final ArchiveEntryEncoding encoding;
|
||||
|
||||
private final InputStream inStream;
|
||||
|
||||
private final int blockSize;
|
||||
|
||||
private final int recordSize;
|
||||
|
||||
private final int recsPerBlock;
|
||||
|
||||
private final byte[] blockBuffer;
|
||||
|
||||
private byte[] readBuf;
|
||||
|
||||
private boolean hasHitEOF;
|
||||
|
||||
private long entrySize;
|
||||
|
||||
private long entryOffset;
|
||||
|
||||
private TarArchiveEntry entry;
|
||||
|
||||
private int currRecIdx;
|
||||
|
||||
/**
|
||||
* Constructor for TarInputStream.
|
||||
*
|
||||
* @param is the input stream to use
|
||||
*/
|
||||
public TarArchiveInputStream(InputStream is) {
|
||||
this.encoding = ArchiveEntryEncodingHelper.getEncoding(null);
|
||||
this.readBuf = null;
|
||||
this.hasHitEOF = false;
|
||||
this.inStream = is;
|
||||
this.blockSize = DEFAULT_BLOCK_SIZE;
|
||||
this.recordSize = DEFAULT_RECORD_SIZE;
|
||||
this.recsPerBlock = this.blockSize / this.recordSize;
|
||||
this.blockBuffer = new byte[this.blockSize];
|
||||
this.currRecIdx = this.recsPerBlock;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes this stream
|
||||
*
|
||||
* @throws IOException on error
|
||||
*/
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (inStream != null) {
|
||||
if (inStream != System.in) {
|
||||
inStream.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the record size
|
||||
*
|
||||
* @return the record size.
|
||||
*/
|
||||
public int getRecordSize() {
|
||||
return recordSize;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the available data that can be read from the current
|
||||
* entry in the archive. This does not indicate how much data
|
||||
* is left in the entire archive, only in the current entry.
|
||||
* This value is determined from the entry's size header field
|
||||
* and the amount of data already read from the current entry.
|
||||
* Integer.MAX_VALUE is returen in case more than Integer.MAX_VALUE
|
||||
* bytes are left in the current entry in the archive.
|
||||
*
|
||||
* @return The number of available bytes for the current entry.
|
||||
* @throws IOException
|
||||
*/
|
||||
@Override
|
||||
public int available() throws IOException {
|
||||
if (entrySize - entryOffset > Integer.MAX_VALUE) {
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
return (int) (entrySize - entryOffset);
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip bytes in the input buffer. This skips bytes in the
|
||||
* current entry's data, not the entire archive, and will
|
||||
* stop at the end of the current entry's data if the number
|
||||
* to skip extends beyond that point.
|
||||
*
|
||||
* @param numToSkip The number of bytes to skip.
|
||||
* @return the number actually skipped
|
||||
* @throws IOException on error
|
||||
*/
|
||||
@Override
|
||||
public long skip(long numToSkip) throws IOException {
|
||||
// REVIEW
|
||||
// This is horribly inefficient, but it ensures that we
|
||||
// properly skip over bytes
|
||||
//
|
||||
byte[] skipBuf = new byte[1024];
|
||||
long skip = numToSkip;
|
||||
while (skip > 0) {
|
||||
int realSkip = (int) (skip > skipBuf.length ? skipBuf.length : skip);
|
||||
int numRead = read(skipBuf, 0, realSkip);
|
||||
if (numRead == -1) {
|
||||
break;
|
||||
}
|
||||
skip -= numRead;
|
||||
}
|
||||
return (numToSkip - skip);
|
||||
}
|
||||
|
||||
/**
|
||||
* Since we do not support marking just yet, we do nothing.
|
||||
*/
|
||||
@Override
|
||||
public void reset() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next entry in this tar archive. This will skip
|
||||
* over any remaining data in the current entry, if there
|
||||
* is one, and place the input stream at the header of the
|
||||
* next entry, and read the header and instantiate a new
|
||||
* TarEntry from the header bytes and return that entry.
|
||||
* If there are no more entries in the archive, null will
|
||||
* be returned to indicate that the end of the archive has
|
||||
* been reached.
|
||||
*
|
||||
* @return The next TarEntry in the archive, or null.
|
||||
* @throws IOException on error
|
||||
*/
|
||||
public synchronized TarArchiveEntry getNextTarEntry() throws IOException {
|
||||
if (hasHitEOF) {
|
||||
return null;
|
||||
}
|
||||
if (entry != null) {
|
||||
long numToSkip = entrySize - entryOffset;
|
||||
while (numToSkip > 0) {
|
||||
long skipped = skip(numToSkip);
|
||||
if (skipped <= 0) {
|
||||
throw new RuntimeException("failed to skip current tar entry");
|
||||
}
|
||||
numToSkip -= skipped;
|
||||
}
|
||||
readBuf = null;
|
||||
}
|
||||
byte[] headerBuf = getRecord();
|
||||
if (hasHitEOF) {
|
||||
entry = null;
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
this.entry = new TarArchiveEntry(headerBuf, encoding);
|
||||
this.entryOffset = 0;
|
||||
this.entrySize = this.entry.getEntrySize();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IOException("error detected parsing the header", e);
|
||||
}
|
||||
if (entry.isGNULongNameEntry()) {
|
||||
StringBuilder longName = new StringBuilder();
|
||||
byte[] buf = new byte[SMALL_BUFFER_SIZE];
|
||||
int length;
|
||||
while ((length = read(buf)) >= 0) {
|
||||
longName.append(new String(buf, 0, length));
|
||||
}
|
||||
getNextEntry();
|
||||
if (entry == null) {
|
||||
return null;
|
||||
}
|
||||
if (longName.length() > 0 && longName.charAt(longName.length() - 1) == 0) {
|
||||
longName.deleteCharAt(longName.length() - 1);
|
||||
}
|
||||
entry.setName(longName.toString());
|
||||
}
|
||||
if (entry.isPaxHeader()) {
|
||||
paxHeaders();
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next record in this tar archive. This will skip
|
||||
* over any remaining data in the current entry, if there
|
||||
* is one, and place the input stream at the header of the
|
||||
* next entry.
|
||||
* If there are no more entries in the archive, null will
|
||||
* be returned to indicate that the end of the archive has
|
||||
* been reached.
|
||||
*
|
||||
* @return The next header in the archive, or null.
|
||||
* @throws IOException on error
|
||||
*/
|
||||
private byte[] getRecord() throws IOException {
|
||||
if (hasHitEOF) {
|
||||
return null;
|
||||
}
|
||||
byte[] headerBuf = readRecord();
|
||||
if (headerBuf == null) {
|
||||
hasHitEOF = true;
|
||||
} else if (isEOFRecord(headerBuf)) {
|
||||
hasHitEOF = true;
|
||||
}
|
||||
return hasHitEOF ? null : headerBuf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a record from the input stream and return the data.
|
||||
*
|
||||
* @return The record data.
|
||||
* @throws IOException on error
|
||||
*/
|
||||
private byte[] readRecord() throws IOException {
|
||||
if (currRecIdx >= recsPerBlock && !readBlock()) {
|
||||
return null;
|
||||
}
|
||||
byte[] result = new byte[recordSize];
|
||||
System.arraycopy(blockBuffer, (currRecIdx * recordSize), result, 0, recordSize);
|
||||
currRecIdx++;
|
||||
return result;
|
||||
}
|
||||
|
||||
private boolean readBlock() throws IOException {
|
||||
currRecIdx = 0;
|
||||
int offset = 0;
|
||||
int bytesNeeded = blockSize;
|
||||
while (bytesNeeded > 0) {
|
||||
long numBytes = inStream.read(blockBuffer, offset, bytesNeeded);
|
||||
if (numBytes == -1) {
|
||||
if (offset == 0) {
|
||||
return false;
|
||||
}
|
||||
Arrays.fill(blockBuffer, offset, offset + bytesNeeded, (byte) 0);
|
||||
break;
|
||||
}
|
||||
offset += numBytes;
|
||||
bytesNeeded -= numBytes;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if an archive record indicate End of Archive. End of
|
||||
* archive is indicated by a record that consists entirely of null bytes.
|
||||
*
|
||||
* @param record The record data to check.
|
||||
* @return true if the record data is an End of Archive
|
||||
*/
|
||||
private boolean isEOFRecord(byte[] record) {
|
||||
for (int i = 0, sz = getRecordSize(); i < sz; ++i) {
|
||||
if (record[i] != 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
private void paxHeaders() throws IOException {
|
||||
Map<String, String> headers = parsePaxHeaders(this);
|
||||
getNextEntry(); // Get the actual file entry
|
||||
applyPaxHeadersToCurrentEntry(headers);
|
||||
}
|
||||
|
||||
private Map<String, String> parsePaxHeaders(InputStream i) throws IOException {
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
// Format is "length keyword=value\n";
|
||||
while (true) { // get length
|
||||