initial commit
This commit is contained in:
commit
6905991002
548 changed files with 243884 additions and 0 deletions
16
.gitignore
vendored
Normal file
16
.gitignore
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
/data
|
||||
/work
|
||||
/logs
|
||||
/.idea
|
||||
/target
|
||||
.DS_Store
|
||||
*.iml
|
||||
/.settings
|
||||
/.classpath
|
||||
/.project
|
||||
/.gradle
|
||||
build
|
||||
/plugins
|
||||
/sessions
|
||||
*~
|
||||
*.MARC
|
12
.travis.yml
Normal file
12
.travis.yml
Normal file
|
@ -0,0 +1,12 @@
|
|||
language: java
|
||||
sudo: required
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.m2
|
||||
after_success:
|
||||
- ./gradlew sonarqube -Dsonar.host.url=https://sonarqube.com -Dsonar.login=$SONAR_TOKEN
|
||||
env:
|
||||
global:
|
||||
secure: n1Ai4q/yMLn/Pg5pA4lTavoJoe7mQYB1PSKnZAqwbgyla94ySzK6iyBCBiNs/foMPisB/x+DHvmUXTsjvquw9Ay48ZITCV3xhcWzD0eZM2TMoG19CpRAEe8L8LNuYiti9k89ijDdUGZ5ifsvQNTGNHksouayAuApC3PrTUejJfR6SYrp1ZsQTbsMlr+4XU3p7QknK5rGgOwATIMP28F+bVnB05WJtlJA3b0SeucCurn3wJ4FGBQXRYmdlT7bQhNE4QgZM1VzcUFD/K0TBxzzq/otb/lNRSifyoekktDmJwQnaT9uQ4R8R6KdQ2Kb38Rvgjur+TKm5i1G8qS2+6LnIxQJG1aw3JvKK6W0wWCgnAVVRrXaCLday9NuY59tuh1mfjQ10UcsMNKcTdcKEMrLow506wSETcXc7L/LEnneWQyJJeV4vhPqR7KJfsBbeqgz3yIfsCn1GZVWFlfegzYCN52YTl0Y0uRD2Z+TnzQu+Bf4DzaWXLge1rz31xkhyeNNspub4h024+XqBjcMm6M9mlMzmmK8t2DIwPy/BlQbFBUyhrxziuR/5/2NEDPyHltvWkRb4AUIa25WJqkV0gTBegbMadZ9DyOo6Ea7aoVFBae2WGR08F1kzABsWrd1S7UJmWxW35iyMEtoAIayXphIK98qO5aCutwZ+3iOQazxbAs=
|
202
LICENSE.txt
Normal file
202
LICENSE.txt
Normal file
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
111
build.gradle
Normal file
111
build.gradle
Normal file
|
@ -0,0 +1,111 @@
|
|||
plugins {
|
||||
id "org.sonarqube" version "2.1-rc1"
|
||||
id "org.ajoberstar.github-pages" version "1.6.0-rc.1"
|
||||
id "org.xbib.gradle.plugin.jbake" version "1.1.0"
|
||||
}
|
||||
|
||||
println "Host: " + java.net.InetAddress.getLocalHost()
|
||||
println "Gradle: " + gradle.gradleVersion + " JVM: " + org.gradle.internal.jvm.Jvm.current() + " Groovy: " + GroovySystem.getVersion()
|
||||
println "Build: group: '${project.group}', name: '${project.name}', version: '${project.version}'"
|
||||
|
||||
allprojects {
|
||||
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'signing'
|
||||
apply plugin: 'findbugs'
|
||||
apply plugin: 'pmd'
|
||||
apply plugin: 'checkstyle'
|
||||
apply plugin: "jacoco"
|
||||
|
||||
repositories {
|
||||
mavenLocal()
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
configurations {
|
||||
wagon
|
||||
provided
|
||||
testCompile.extendsFrom(provided)
|
||||
}
|
||||
|
||||
dependencies {
|
||||
testCompile 'junit:junit:4.12'
|
||||
wagon 'org.apache.maven.wagon:wagon-ssh-external:2.10'
|
||||
}
|
||||
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
|
||||
[compileJava, compileTestJava]*.options*.encoding = 'UTF-8'
|
||||
tasks.withType(JavaCompile) {
|
||||
options.compilerArgs << "-Xlint:all" << "-profile" << "compact2"
|
||||
}
|
||||
test {
|
||||
classpath += configurations.provided
|
||||
testLogging {
|
||||
showStandardStreams = false
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
||||
tasks.withType(FindBugs) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = true
|
||||
html.enabled = false
|
||||
}
|
||||
}
|
||||
tasks.withType(Pmd) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = true
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
tasks.withType(Checkstyle) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = true
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
jacocoTestReport {
|
||||
reports {
|
||||
xml.enabled true
|
||||
csv.enabled false
|
||||
xml.destination "${buildDir}/reports/jacoco-xml"
|
||||
html.destination "${buildDir}/reports/jacoco-html"
|
||||
}
|
||||
}
|
||||
|
||||
sonarqube {
|
||||
properties {
|
||||
property "sonar.projectName", "xbib content"
|
||||
property "sonar.sourceEncoding", "UTF-8"
|
||||
property "sonar.tests", "src/test/java"
|
||||
property "sonar.scm.provider", "git"
|
||||
property "sonar.java.coveragePlugin", "jacoco"
|
||||
property "sonar.junit.reportsPath", "build/test-results/test/"
|
||||
}
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier 'sources'
|
||||
from sourceSets.main.allSource
|
||||
}
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier 'javadoc'
|
||||
}
|
||||
artifacts {
|
||||
archives sourcesJar, javadocJar
|
||||
}
|
||||
if (project.hasProperty('signing.keyId')) {
|
||||
signing {
|
||||
sign configurations.archives
|
||||
}
|
||||
}
|
||||
|
||||
apply from: "${rootProject.projectDir}/gradle/ext.gradle"
|
||||
apply from: "${rootProject.projectDir}/gradle/publish.gradle"
|
||||
|
||||
}
|
3
content-core/build.gradle
Normal file
3
content-core/build.gradle
Normal file
|
@ -0,0 +1,3 @@
|
|||
dependencies {
|
||||
compile "com.fasterxml.jackson.core:jackson-core:2.8.3"
|
||||
}
|
323
content-core/config/checkstyle/checkstyle.xml
Normal file
323
content-core/config/checkstyle/checkstyle.xml
Normal file
|
@ -0,0 +1,323 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
|
||||
<!-- This is a checkstyle configuration file. For descriptions of
|
||||
what the following rules do, please see the checkstyle configuration
|
||||
page at http://checkstyle.sourceforge.net/config.html -->
|
||||
|
||||
<module name="Checker">
|
||||
|
||||
<module name="FileTabCharacter">
|
||||
<!-- Checks that there are no tab characters in the file.
|
||||
-->
|
||||
</module>
|
||||
|
||||
<module name="NewlineAtEndOfFile">
|
||||
<property name="lineSeparator" value="lf"/>
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that FIXME is not used in comments. TODO is preferred.
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))FIXME" />
|
||||
<property name="message" value='TODO is preferred to FIXME. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that TODOs are named. (Actually, just that they are followed
|
||||
by an open paren.)
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))TODO[^(]" />
|
||||
<property name="message" value='All TODOs should be named. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="JavadocPackage">
|
||||
<!-- Checks that each Java package has a Javadoc file used for commenting.
|
||||
Only allows a package-info.java, not package.html. -->
|
||||
</module>
|
||||
|
||||
<!-- All Java AST specific tests live under TreeWalker module. -->
|
||||
<module name="TreeWalker">
|
||||
|
||||
<!--
|
||||
|
||||
IMPORT CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="RedundantImport">
|
||||
<!-- Checks for redundant import statements. -->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ImportOrder">
|
||||
<!-- Checks for out of order import statements. -->
|
||||
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="groups" value="com,junit,net,org,java,javax"/>
|
||||
<!-- This ensures that static imports go first. -->
|
||||
<property name="option" value="top"/>
|
||||
<property name="tokens" value="STATIC_IMPORT, IMPORT"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
JAVADOC CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Checks for Javadoc comments. -->
|
||||
<!-- See http://checkstyle.sf.net/config_javadoc.html -->
|
||||
<module name="JavadocMethod">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="allowMissingJavadoc" value="true"/>
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
<property name="allowMissingThrowsTags" value="true"/>
|
||||
<property name="allowThrowsTagsForSubclasses" value="true"/>
|
||||
<property name="allowUndeclaredRTE" value="true"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocType">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocStyle">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
NAMING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Item 38 - Adhere to generally accepted naming conventions -->
|
||||
|
||||
<module name="PackageName">
|
||||
<!-- Validates identifiers for package names against the
|
||||
supplied expression. -->
|
||||
<!-- Here the default checkstyle rule restricts package name parts to
|
||||
seven characters, this is not in line with common practice at Google.
|
||||
-->
|
||||
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]{1,})*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="TypeNameCheck">
|
||||
<!-- Validates static, final fields against the
|
||||
expression "^[A-Z][a-zA-Z0-9]*$". -->
|
||||
<metadata name="altname" value="TypeName"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ConstantNameCheck">
|
||||
<!-- Validates non-private, static, final fields against the supplied
|
||||
public/package final fields "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$". -->
|
||||
<metadata name="altname" value="ConstantName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="false"/>
|
||||
<property name="format" value="^([A-Z][A-Z0-9]*(_[A-Z0-9]+)*|FLAG_.*)$"/>
|
||||
<message key="name.invalidPattern"
|
||||
value="Variable ''{0}'' should be in ALL_CAPS (if it is a constant) or be private (otherwise)."/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="StaticVariableNameCheck">
|
||||
<!-- Validates static, non-final fields against the supplied
|
||||
expression "^[a-z][a-zA-Z0-9]*_?$". -->
|
||||
<metadata name="altname" value="StaticVariableName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*_?$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MemberNameCheck">
|
||||
<!-- Validates non-static members against the supplied expression. -->
|
||||
<metadata name="altname" value="MemberName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MethodNameCheck">
|
||||
<!-- Validates identifiers for method names. -->
|
||||
<metadata name="altname" value="MethodName"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*(_[a-zA-Z0-9]+)*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ParameterName">
|
||||
<!-- Validates identifiers for method parameters against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalFinalVariableName">
|
||||
<!-- Validates identifiers for local final variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalVariableName">
|
||||
<!-- Validates identifiers for local variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
LENGTH and CODING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="LineLength">
|
||||
<!-- Checks if a line is too long. -->
|
||||
<property name="max" value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.max}" default="128"/>
|
||||
<property name="severity" value="error"/>
|
||||
|
||||
<!--
|
||||
The default ignore pattern exempts the following elements:
|
||||
- import statements
|
||||
- long URLs inside comments
|
||||
-->
|
||||
|
||||
<property name="ignorePattern"
|
||||
value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.ignorePattern}"
|
||||
default="^(package .*;\s*)|(import .*;\s*)|( *(\*|//).*https?://.*)$"/>
|
||||
</module>
|
||||
|
||||
<module name="LeftCurly">
|
||||
<!-- Checks for placement of the left curly brace ('{'). -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="RightCurly">
|
||||
<!-- Checks right curlies on CATCH, ELSE, and TRY blocks are on
|
||||
the same line. e.g., the following example is fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
} else
|
||||
</pre>
|
||||
-->
|
||||
<!-- This next example is not fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
}
|
||||
else
|
||||
</pre>
|
||||
-->
|
||||
<property name="option" value="same"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for braces around if and else blocks -->
|
||||
<module name="NeedBraces">
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="tokens" value="LITERAL_IF, LITERAL_ELSE, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO"/>
|
||||
</module>
|
||||
|
||||
<module name="UpperEll">
|
||||
<!-- Checks that long constants are defined with an upper ell.-->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="FallThrough">
|
||||
<!-- Warn about falling through to the next case statement. Similar to
|
||||
javac -Xlint:fallthrough, but the check is suppressed if a single-line comment
|
||||
on the last non-blank line preceding the fallen-into case contains 'fall through' (or
|
||||
some other variants which we don't publicized to promote consistency).
|
||||
-->
|
||||
<property name="reliefPattern"
|
||||
value="fall through|Fall through|fallthru|Fallthru|falls through|Falls through|fallthrough|Fallthrough|No break|NO break|no break|continue on"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
MODIFIERS CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="ModifierOrder">
|
||||
<!-- Warn if modifier order is inconsistent with JLS3 8.1.1, 8.3.1, and
|
||||
8.4.3. The prescribed order is:
|
||||
public, protected, private, abstract, static, final, transient, volatile,
|
||||
synchronized, native, strictfp
|
||||
-->
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
WHITESPACE CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="WhitespaceAround">
|
||||
<!-- Checks that various tokens are surrounded by whitespace.
|
||||
This includes most binary operators and keywords followed
|
||||
by regular or curly braces.
|
||||
-->
|
||||
<property name="tokens" value="ASSIGN, BAND, BAND_ASSIGN, BOR,
|
||||
BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN,
|
||||
EQUAL, GE, GT, LAND, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE,
|
||||
LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
|
||||
LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS,
|
||||
MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION,
|
||||
SL, SL_ASSIGN, SR_ASSIGN, STAR, STAR_ASSIGN"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="WhitespaceAfter">
|
||||
<!-- Checks that commas, semicolons and typecasts are followed by
|
||||
whitespace.
|
||||
-->
|
||||
<property name="tokens" value="COMMA, SEMI, TYPECAST"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceAfter">
|
||||
<!-- Checks that there is no whitespace after various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS,
|
||||
UNARY_PLUS"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceBefore">
|
||||
<!-- Checks that there is no whitespace before various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="SEMI, DOT, POST_DEC, POST_INC"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ParenPad">
|
||||
<!-- Checks that there is no whitespace before close parens or after
|
||||
open parens.
|
||||
-->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
</module>
|
||||
</module>
|
||||
|
|
@ -0,0 +1,246 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class AbstractXContentGenerator implements XContentGenerator {
|
||||
|
||||
protected XContentGenerator generator;
|
||||
|
||||
public AbstractXContentGenerator setGenerator(XContentGenerator generator) {
|
||||
this.generator = generator;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStartArray() throws IOException {
|
||||
generator.writeStartArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeEndArray() throws IOException {
|
||||
generator.writeEndArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStartObject() throws IOException {
|
||||
generator.writeStartObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeEndObject() throws IOException {
|
||||
generator.writeEndObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFieldName(String name) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFieldName(XContentString name) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(String text) throws IOException {
|
||||
generator.writeString(text);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(char[] text, int offset, int len) throws IOException {
|
||||
generator.writeString(text, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUTF8String(byte[] text, int offset, int length) throws IOException {
|
||||
generator.writeUTF8String(text, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] data, int offset, int len) throws IOException {
|
||||
generator.writeBinary(data, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] data) throws IOException {
|
||||
generator.writeBinary(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(int v) throws IOException {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(long v) throws IOException {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(double d) throws IOException {
|
||||
generator.writeNumber(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(float f) throws IOException {
|
||||
generator.writeNumber(f);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(BigInteger bi) throws IOException {
|
||||
generator.writeNumber(bi);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(BigDecimal bd) throws IOException {
|
||||
generator.writeNumber(bd);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBoolean(boolean b) throws IOException {
|
||||
generator.writeBoolean(b);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNull() throws IOException {
|
||||
generator.writeNull();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStringField(String fieldName, String value) throws IOException {
|
||||
generator.writeStringField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStringField(XContentString fieldName, String value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBooleanField(String fieldName, boolean value) throws IOException {
|
||||
generator.writeBooleanField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBooleanField(XContentString fieldName, boolean value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeBoolean(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNullField(String fieldName) throws IOException {
|
||||
generator.writeNullField(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, int value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, int value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, long value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, long value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, double value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, double value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, float value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, float value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, BigInteger value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, BigInteger value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, BigDecimal value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, BigDecimal value) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinaryField(String fieldName, byte[] data) throws IOException {
|
||||
generator.writeBinaryField(fieldName, data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinaryField(XContentString fieldName, byte[] data) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeBinary(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeArrayFieldStart(String fieldName) throws IOException {
|
||||
generator.writeArrayFieldStart(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeArrayFieldStart(XContentString fieldName) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeStartArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeObjectFieldStart(String fieldName) throws IOException {
|
||||
generator.writeObjectFieldStart(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeObjectFieldStart(XContentString fieldName) throws IOException {
|
||||
generator.writeFieldName(fieldName);
|
||||
generator.writeStartObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copy(XContentBuilder builder, OutputStream outputStream) throws IOException {
|
||||
flush();
|
||||
builder.bytes().streamOutput(outputStream);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,265 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class AbstractXContentParser implements XContentParser {
|
||||
|
||||
private static final MapFactory SIMPLE_MAP_FACTORY = HashMap::new;
|
||||
private static final MapFactory ORDERED_MAP_FACTORY = LinkedHashMap::new;
|
||||
private boolean losslessDecimals;
|
||||
private boolean base16Checks;
|
||||
|
||||
private static Map<String, Object> readMap(XContentParser parser) throws IOException {
|
||||
return readMap(parser, SIMPLE_MAP_FACTORY);
|
||||
}
|
||||
|
||||
private static Map<String, Object> readOrderedMap(XContentParser parser) throws IOException {
|
||||
return readMap(parser, ORDERED_MAP_FACTORY);
|
||||
}
|
||||
|
||||
private static Map<String, Object> readMap(XContentParser parser, MapFactory mapFactory) throws IOException {
|
||||
Map<String, Object> map = mapFactory.newMap();
|
||||
XContentParser.Token t = parser.currentToken();
|
||||
if (t == null) {
|
||||
t = parser.nextToken();
|
||||
}
|
||||
if (t == XContentParser.Token.START_OBJECT) {
|
||||
t = parser.nextToken();
|
||||
}
|
||||
for (; t == XContentParser.Token.FIELD_NAME; t = parser.nextToken()) {
|
||||
String fieldName = parser.currentName();
|
||||
t = parser.nextToken();
|
||||
Object value = readValue(parser, mapFactory, t);
|
||||
map.put(fieldName, value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
private static List<Object> readList(XContentParser parser, MapFactory mapFactory) throws IOException {
|
||||
ArrayList<Object> list = new ArrayList<>();
|
||||
Token t;
|
||||
while ((t = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
list.add(readValue(parser, mapFactory, t));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private static Object readValue(XContentParser parser, MapFactory mapFactory, XContentParser.Token t) throws IOException {
|
||||
if (t == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
} else if (t == XContentParser.Token.VALUE_STRING) {
|
||||
if (parser.isBase16Checks()) {
|
||||
return XContentHelper.parseBase16(parser.text());
|
||||
}
|
||||
return parser.text();
|
||||
} else if (t == XContentParser.Token.VALUE_NUMBER) {
|
||||
XContentParser.NumberType numberType = parser.numberType();
|
||||
if (numberType == XContentParser.NumberType.INT) {
|
||||
return parser.isLosslessDecimals() ? parser.bigIntegerValue() : parser.intValue();
|
||||
} else if (numberType == XContentParser.NumberType.LONG) {
|
||||
return parser.isLosslessDecimals() ? parser.bigIntegerValue() : parser.longValue();
|
||||
} else if (numberType == XContentParser.NumberType.FLOAT) {
|
||||
return parser.isLosslessDecimals() ? parser.bigDecimalValue() : parser.floatValue();
|
||||
} else if (numberType == XContentParser.NumberType.DOUBLE) {
|
||||
return parser.isLosslessDecimals() ? parser.bigDecimalValue() : parser.doubleValue();
|
||||
} else if (numberType == NumberType.BIG_INTEGER) {
|
||||
return parser.bigIntegerValue();
|
||||
} else if (numberType == NumberType.BIG_DECIMAL) {
|
||||
return parser.bigDecimalValue();
|
||||
}
|
||||
} else if (t == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
return parser.booleanValue();
|
||||
} else if (t == XContentParser.Token.START_OBJECT) {
|
||||
return readMap(parser, mapFactory);
|
||||
} else if (t == XContentParser.Token.START_ARRAY) {
|
||||
return readList(parser, mapFactory);
|
||||
} else if (t == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
return parser.binaryValue();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBooleanValue() throws IOException {
|
||||
switch (currentToken()) {
|
||||
case VALUE_BOOLEAN:
|
||||
return true;
|
||||
case VALUE_NUMBER:
|
||||
NumberType numberType = numberType();
|
||||
return numberType == NumberType.LONG || numberType == NumberType.INT;
|
||||
case VALUE_STRING:
|
||||
return isBoolean(textCharacters(), textOffset(), textLength());
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean booleanValue() throws IOException {
|
||||
Token token = currentToken();
|
||||
if (token == Token.VALUE_NUMBER) {
|
||||
return intValue() != 0;
|
||||
} else if (token == Token.VALUE_STRING) {
|
||||
String s = new String(textCharacters(), textOffset(), textLength());
|
||||
return Boolean.parseBoolean(s);
|
||||
}
|
||||
return doBooleanValue();
|
||||
}
|
||||
|
||||
protected abstract boolean doBooleanValue() throws IOException;
|
||||
|
||||
@Override
|
||||
public short shortValue() throws IOException {
|
||||
Token token = currentToken();
|
||||
if (token == Token.VALUE_STRING) {
|
||||
return Short.parseShort(text());
|
||||
}
|
||||
return doShortValue();
|
||||
}
|
||||
|
||||
protected abstract short doShortValue() throws IOException;
|
||||
|
||||
@Override
|
||||
public int intValue() throws IOException {
|
||||
Token token = currentToken();
|
||||
if (token == Token.VALUE_STRING) {
|
||||
return Integer.parseInt(text());
|
||||
}
|
||||
return doIntValue();
|
||||
}
|
||||
|
||||
protected abstract int doIntValue() throws IOException;
|
||||
|
||||
@Override
|
||||
public long longValue() throws IOException {
|
||||
Token token = currentToken();
|
||||
if (token == Token.VALUE_STRING) {
|
||||
return Long.parseLong(text());
|
||||
}
|
||||
return doLongValue();
|
||||
}
|
||||
|
||||
protected abstract long doLongValue() throws IOException;
|
||||
|
||||
@Override
|
||||
public float floatValue() throws IOException {
|
||||
Token token = currentToken();
|
||||
if (token == Token.VALUE_STRING) {
|
||||
return Float.parseFloat(text());
|
||||
}
|
||||
return doFloatValue();
|
||||
}
|
||||
|
||||
protected abstract float doFloatValue() throws IOException;
|
||||
|
||||
@Override
|
||||
public double doubleValue() throws IOException {
|
||||
Token token = currentToken();
|
||||
if (token == Token.VALUE_STRING) {
|
||||
return Double.parseDouble(text());
|
||||
}
|
||||
return doDoubleValue();
|
||||
}
|
||||
|
||||
protected abstract double doDoubleValue() throws IOException;
|
||||
|
||||
@Override
|
||||
public XContentParser losslessDecimals(boolean losslessDecimals) {
|
||||
this.losslessDecimals = losslessDecimals;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isLosslessDecimals() {
|
||||
return losslessDecimals;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser enableBase16Checks(boolean base16Checks) {
|
||||
this.base16Checks = base16Checks;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBase16Checks() {
|
||||
return base16Checks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String textOrNull() throws IOException {
|
||||
if (currentToken() == Token.VALUE_NULL) {
|
||||
return null;
|
||||
}
|
||||
return text();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> map() throws IOException {
|
||||
return readMap(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> mapOrdered() throws IOException {
|
||||
return readOrderedMap(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> mapAndClose() throws IOException {
|
||||
try {
|
||||
return map();
|
||||
} finally {
|
||||
close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Object> mapOrderedAndClose() throws IOException {
|
||||
try {
|
||||
return mapOrdered();
|
||||
} finally {
|
||||
close();
|
||||
}
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
interface MapFactory {
|
||||
Map<String, Object> newMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the a sequence of chars is one of "true","false","on","off","yes","no","0","1".
|
||||
*
|
||||
* @param text sequence to check
|
||||
* @param offset offset to start
|
||||
* @param length length to check
|
||||
* @return true if it is a boolean
|
||||
*/
|
||||
private static boolean isBoolean(char[] text, int offset, int length) {
|
||||
if (text == null || length == 0) {
|
||||
return false;
|
||||
}
|
||||
if (length == 1) {
|
||||
return text[offset] == '0' || text[offset] == '1';
|
||||
}
|
||||
if (length == 2) {
|
||||
return (text[offset] == 'n' && text[offset + 1] == 'o') || (text[offset] == 'o' && text[offset + 1] == 'n');
|
||||
}
|
||||
if (length == 3) {
|
||||
return (text[offset] == 'o' && text[offset + 1] == 'f' && text[offset + 2] == 'f') ||
|
||||
(text[offset] == 'y' && text[offset + 1] == 'e' && text[offset + 2] == 's');
|
||||
}
|
||||
if (length == 4) {
|
||||
return text[offset] == 't' && text[offset + 1] == 'r' && text[offset + 2] == 'u' && text[offset + 3] == 'e';
|
||||
}
|
||||
return length == 5 && (text[offset] == 'f' && text[offset + 1] == 'a' && text[offset + 2] == 'l'
|
||||
&& text[offset + 3] == 's' && text[offset + 4] == 'e');
|
||||
}
|
||||
}
|
36
content-core/src/main/java/org/xbib/content/ToXContent.java
Normal file
36
content-core/src/main/java/org/xbib/content/ToXContent.java
Normal file
|
@ -0,0 +1,36 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* An interface allowing to transfer an object to content using an {@link XContentBuilder}.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ToXContent {
|
||||
|
||||
XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
interface Params {
|
||||
String param(String key);
|
||||
|
||||
String param(String key, String defaultValue);
|
||||
|
||||
}
|
||||
|
||||
Params EMPTY_PARAMS = new Params() {
|
||||
|
||||
@Override
|
||||
public String param(String key) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String param(String key, String defaultValue) {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
};
|
||||
}
|
98
content-core/src/main/java/org/xbib/content/XContent.java
Normal file
98
content-core/src/main/java/org/xbib/content/XContent.java
Normal file
|
@ -0,0 +1,98 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import org.xbib.content.io.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Reader;
|
||||
import java.io.Writer;
|
||||
|
||||
/**
|
||||
* A generic abstraction on top of handling content, inspired by JSON and pull parsing.
|
||||
*/
|
||||
public interface XContent {
|
||||
|
||||
String name();
|
||||
|
||||
/**
|
||||
* Creates a new generator using the provided output stream.
|
||||
*
|
||||
* @param outputStream output stream
|
||||
* @return content generator
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentGenerator createGenerator(OutputStream outputStream) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a new generator using the provided writer.
|
||||
*
|
||||
* @param writer writer
|
||||
* @return content generator
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentGenerator createGenerator(Writer writer) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a parser over the provided input stream.
|
||||
*
|
||||
* @param inputStream input stream
|
||||
* @return content parser
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentParser createParser(InputStream inputStream) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a parser over the provided reader.
|
||||
*
|
||||
* @param reader reader
|
||||
* @return content parser
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentParser createParser(Reader reader) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a parser over the provided string content.
|
||||
*
|
||||
* @param content string
|
||||
* @return content parser
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentParser createParser(String content) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a parser over the provided bytes.
|
||||
*
|
||||
* @param bytes bytes
|
||||
* @return content parser
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentParser createParser(byte[] bytes) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a parser over the provided bytes.
|
||||
*
|
||||
* @param bytes bytes
|
||||
* @param offset offset
|
||||
* @param length length
|
||||
* @return content parser
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentParser createParser(byte[] bytes, int offset, int length) throws IOException;
|
||||
|
||||
/**
|
||||
* Creates a parser over the provided bytes.
|
||||
*
|
||||
* @param bytes bytes
|
||||
* @return content parser
|
||||
* @throws IOException if creation fails
|
||||
*/
|
||||
XContentParser createParser(BytesReference bytes) throws IOException;
|
||||
|
||||
/**
|
||||
* Returns true if content can be parsed/generated.
|
||||
* @param bytes bytes
|
||||
* @return true if content can be parsed/generated.
|
||||
*/
|
||||
boolean isXContent(BytesReference bytes);
|
||||
}
|
924
content-core/src/main/java/org/xbib/content/XContentBuilder.java
Normal file
924
content-core/src/main/java/org/xbib/content/XContentBuilder.java
Normal file
|
@ -0,0 +1,924 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import org.xbib.content.io.BytesReference;
|
||||
import org.xbib.content.io.BytesStreamOutput;
|
||||
import org.xbib.content.util.geo.GeoPoint;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.time.Instant;
|
||||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class XContentBuilder implements ToXContent, Flushable, Closeable {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(XContentBuilder.class.getName());
|
||||
|
||||
private final OutputStream outputStream;
|
||||
private final XContentGenerator generator;
|
||||
|
||||
/**
|
||||
* Constructs a new builder using the provided xcontent and an OutputStream. Make sure
|
||||
* to call {@link #close()} when the builder is done with.
|
||||
* @param xContent content
|
||||
* @param outputStream output stream
|
||||
* @throws IOException if construction fails
|
||||
*/
|
||||
public XContentBuilder(XContent xContent, OutputStream outputStream) throws IOException {
|
||||
this.outputStream = outputStream;
|
||||
this.generator = xContent.createGenerator(outputStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new builder using a fresh {@link org.xbib.content.io.BytesStreamOutput}.
|
||||
* @param xContent the content
|
||||
* @return content builder
|
||||
* @throws IOException exception
|
||||
*/
|
||||
public static XContentBuilder builder(XContent xContent) throws IOException {
|
||||
return new XContentBuilder(xContent, new BytesStreamOutput());
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a new content builder.
|
||||
* @param xContent the content
|
||||
* @param out out
|
||||
* @return content builder
|
||||
* @throws IOException if build fails
|
||||
*/
|
||||
public static XContentBuilder builder(XContent xContent, OutputStream out) throws IOException {
|
||||
return new XContentBuilder(xContent, out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
return builder.copy(this);
|
||||
}
|
||||
|
||||
public XContent content() {
|
||||
return generator.content();
|
||||
}
|
||||
|
||||
public XContentGenerator generator() {
|
||||
return generator;
|
||||
}
|
||||
|
||||
public XContentBuilder prettyPrint() {
|
||||
generator.usePrettyPrint();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, ToXContent xContent) throws IOException {
|
||||
field(name);
|
||||
xContent.toXContent(this, ToXContent.EMPTY_PARAMS);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, ToXContent xContent, ToXContent.Params params) throws IOException {
|
||||
field(name);
|
||||
xContent.toXContent(this, params);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder startObject(String name) throws IOException {
|
||||
field(name);
|
||||
startObject();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder startObject(XContentBuilderString name) throws IOException {
|
||||
field(name);
|
||||
startObject();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder startObject() throws IOException {
|
||||
generator.writeStartObject();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder endObject() throws IOException {
|
||||
generator.writeEndObject();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder array(String name, Collection<?> values) throws IOException {
|
||||
startArray(name);
|
||||
for (Object value : values) {
|
||||
value(value);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder array(String name, String... values) throws IOException {
|
||||
startArray(name);
|
||||
for (String value : values) {
|
||||
value(value);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder array(String name, Object... values) throws IOException {
|
||||
startArray(name);
|
||||
for (Object value : values) {
|
||||
value(value);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder startArray(String name) throws IOException {
|
||||
field(name);
|
||||
startArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder startArray(XContentBuilderString name) throws IOException {
|
||||
field(name);
|
||||
startArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder startArray() throws IOException {
|
||||
generator.writeStartArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder endArray() throws IOException {
|
||||
generator.writeEndArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name) throws IOException {
|
||||
generator.writeFieldName(name.string());
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name) throws IOException {
|
||||
Objects.requireNonNull(name);
|
||||
generator.writeFieldName(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, char[] value, int offset, int length) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeString(value, offset, length);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, String value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeString(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, String value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeString(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder fieldIfNotNull(String name, String value) throws IOException {
|
||||
if (value != null) {
|
||||
field(name);
|
||||
generator.writeString(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Integer value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder fieldIfNotNull(String name, Integer value) throws IOException {
|
||||
if (value != null) {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Integer value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, int value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, int value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Long value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Long value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder fieldIfNotNull(String name, Long value) throws IOException {
|
||||
if (value != null) {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, long value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, long value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Float value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Float value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder fieldIfNotNull(String name, Float value) throws IOException {
|
||||
if (value != null) {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, float value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, float value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Double value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder fieldIfNotNull(String name, Double value) throws IOException {
|
||||
if (value != null) {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Double value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, double value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, double value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, BigInteger value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, BigInteger value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, BigDecimal value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, BigDecimal value) throws IOException {
|
||||
field(name);
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, BytesReference value) throws IOException {
|
||||
field(name);
|
||||
byte[] b = value.toBytes();
|
||||
generator.writeBinary(b, 0, b.length);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, BytesReference value) throws IOException {
|
||||
field(name);
|
||||
byte[] b = value.toBytes();
|
||||
generator.writeBinary(b, 0, b.length);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, byte[] value, int offset, int length) throws IOException {
|
||||
field(name);
|
||||
generator.writeBinary(value, offset, length);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Map<String, Object> value) throws IOException {
|
||||
field(name);
|
||||
value(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Map<String, Object> value) throws IOException {
|
||||
field(name);
|
||||
value(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Iterable<?> value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Iterable<?> value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, String... value) throws IOException {
|
||||
startArray(name);
|
||||
for (String o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, String... value) throws IOException {
|
||||
startArray(name);
|
||||
for (String o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Object... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Object... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, int... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, int offset, int length, int... value) throws IOException {
|
||||
startArray(name);
|
||||
for (int i = offset; i < length; i++) {
|
||||
value(value[i]);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, int... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, long... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, long... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, float... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, float... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, double... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, double... value) throws IOException {
|
||||
startArray(name);
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, Object value) throws IOException {
|
||||
field(name);
|
||||
writeValue(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(XContentBuilderString name, Object value) throws IOException {
|
||||
field(name);
|
||||
writeValue(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder fieldIfNotNull(String name, Object value) throws IOException {
|
||||
if (value != null) {
|
||||
return field(name, value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Object value) throws IOException {
|
||||
writeValue(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, boolean value) throws IOException {
|
||||
field(name);
|
||||
generator.writeBoolean(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder field(String name, byte[] value) throws IOException {
|
||||
field(name);
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
generator.writeBinary(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder nullField(String name) throws IOException {
|
||||
generator.writeNullField(name);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder nullValue() throws IOException {
|
||||
generator.writeNull();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder rawField(String fieldName, byte[] content) throws IOException {
|
||||
generator.writeRawField(fieldName, content, outputStream);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder rawField(String fieldName, byte[] content, int offset, int length) throws IOException {
|
||||
generator.writeRawField(fieldName, content, offset, length, outputStream);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(XContentBuilder builder) throws IOException {
|
||||
generator.writeValue(builder);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder copy(XContentBuilder builder) throws IOException {
|
||||
generator.copy(builder, outputStream);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder copy(List<XContentBuilder> builder) throws IOException {
|
||||
for (int i = 0; i < builder.size(); i++) {
|
||||
if (i > 0) {
|
||||
outputStream.write(',');
|
||||
}
|
||||
generator.copy(builder.get(i), outputStream);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Boolean value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
return value(value.booleanValue());
|
||||
}
|
||||
|
||||
public XContentBuilder value(boolean value) throws IOException {
|
||||
generator.writeBoolean(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Integer value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
return value(value.intValue());
|
||||
}
|
||||
|
||||
public XContentBuilder value(int value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Long value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
return value(value.longValue());
|
||||
}
|
||||
|
||||
public XContentBuilder value(long value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Float value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
return value(value.floatValue());
|
||||
}
|
||||
|
||||
public XContentBuilder value(float value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Double value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
return value(value.doubleValue());
|
||||
}
|
||||
|
||||
public XContentBuilder value(double value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(BigInteger bi) throws IOException {
|
||||
generator.writeNumber(bi);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(BigDecimal bd) throws IOException {
|
||||
generator.writeNumber(bd);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(String value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
generator.writeString(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(byte[] value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
generator.writeBinary(value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(byte[] value, int offset, int length) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
generator.writeBinary(value, offset, length);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(BytesReference value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
byte[] b = value.toBytes();
|
||||
generator.writeBinary(b, 0, b.length);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder map(Map<String, Object> map) throws IOException {
|
||||
if (map == null) {
|
||||
return nullValue();
|
||||
}
|
||||
writeMap(map);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Map<String, Object> map) throws IOException {
|
||||
if (map == null) {
|
||||
return nullValue();
|
||||
}
|
||||
writeMap(map);
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder value(Iterable<?> value) throws IOException {
|
||||
if (value == null) {
|
||||
return nullValue();
|
||||
}
|
||||
startArray();
|
||||
for (Object o : value) {
|
||||
value(o);
|
||||
}
|
||||
endArray();
|
||||
return this;
|
||||
}
|
||||
|
||||
public XContentBuilder copyCurrentStructure(XContentParser parser) throws IOException {
|
||||
generator.copyCurrentStructure(parser);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
generator.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
generator.close();
|
||||
}
|
||||
|
||||
public BytesReference bytes() {
|
||||
try {
|
||||
generator.close();
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.FINE, e.getMessage(), e);
|
||||
}
|
||||
return ((BytesStreamOutput) outputStream).bytes();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string representation of the builder (only applicable for text based xcontent).
|
||||
* Only applicable when the builder is constructed with {@link BytesStreamOutput}.
|
||||
* @return string
|
||||
*/
|
||||
public String string() {
|
||||
return bytes().toUtf8();
|
||||
}
|
||||
|
||||
private void writeMap(Map<String, Object> map) throws IOException {
|
||||
generator.writeStartObject();
|
||||
for (Map.Entry<String, Object> entry : map.entrySet()) {
|
||||
field(entry.getKey());
|
||||
Object value = entry.getValue();
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
} else {
|
||||
writeValue(value);
|
||||
}
|
||||
}
|
||||
generator.writeEndObject();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void writeValue(Object value) throws IOException {
|
||||
if (value == null) {
|
||||
generator.writeNull();
|
||||
return;
|
||||
}
|
||||
Class<?> type = value.getClass();
|
||||
if (type == String.class) {
|
||||
generator.writeString((String) value);
|
||||
} else if (type == Integer.class) {
|
||||
generator.writeNumber((Integer) value);
|
||||
} else if (type == Long.class) {
|
||||
generator.writeNumber((Long) value);
|
||||
} else if (type == Float.class) {
|
||||
generator.writeNumber((Float) value);
|
||||
} else if (type == Double.class) {
|
||||
generator.writeNumber((Double) value);
|
||||
} else if (type == Short.class) {
|
||||
generator.writeNumber((Short) value);
|
||||
} else if (type == Boolean.class) {
|
||||
generator.writeBoolean((Boolean) value);
|
||||
} else if (type == GeoPoint.class) {
|
||||
generator.writeStartObject();
|
||||
generator.writeNumberField("lat", ((GeoPoint) value).lat());
|
||||
generator.writeNumberField("lon", ((GeoPoint) value).lon());
|
||||
generator.writeEndObject();
|
||||
} else if (value instanceof Map) {
|
||||
writeMap((Map) value);
|
||||
} else if (value instanceof Iterable) {
|
||||
generator.writeStartArray();
|
||||
for (Object v : (Iterable) value) {
|
||||
writeValue(v);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
} else if (value instanceof Object[]) {
|
||||
generator.writeStartArray();
|
||||
for (Object v : (Object[]) value) {
|
||||
writeValue(v);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
} else if (type == byte[].class) {
|
||||
generator.writeBinary((byte[]) value);
|
||||
} else if (value instanceof Date) {
|
||||
Date date = (Date) value;
|
||||
Instant instant = Instant.ofEpochMilli(date.getTime());
|
||||
ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.systemDefault());
|
||||
generator.writeString(zdt.format(DateTimeFormatter.ISO_INSTANT));
|
||||
} else if (value instanceof Calendar) {
|
||||
Calendar calendar = (Calendar) value;
|
||||
Instant instant = Instant.ofEpochMilli(calendar.getTime().getTime());
|
||||
ZonedDateTime zdt = ZonedDateTime.ofInstant(instant, ZoneId.systemDefault());
|
||||
generator.writeString(zdt.format(DateTimeFormatter.ISO_INSTANT));
|
||||
} else if (value instanceof BytesReference) {
|
||||
BytesReference bytes = (BytesReference) value;
|
||||
byte[] b = bytes.toBytes();
|
||||
generator.writeBinary(b, 0, b.length);
|
||||
} else if (value instanceof XContentBuilder) {
|
||||
value((XContentBuilder) value);
|
||||
} else if (value instanceof ToXContent) {
|
||||
((ToXContent) value).toXContent(this, ToXContent.EMPTY_PARAMS);
|
||||
} else if (value instanceof double[]) {
|
||||
generator.writeStartArray();
|
||||
for (double v : (double[]) value) {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
} else if (value instanceof long[]) {
|
||||
generator.writeStartArray();
|
||||
for (long v : (long[]) value) {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
} else if (value instanceof int[]) {
|
||||
generator.writeStartArray();
|
||||
for (int v : (int[]) value) {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
} else if (value instanceof float[]) {
|
||||
generator.writeStartArray();
|
||||
for (float v : (float[]) value) {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
} else if (value instanceof short[]) {
|
||||
generator.writeStartArray();
|
||||
for (float v : (short[]) value) {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
} else {
|
||||
// if this is a "value" object, like enum, DistanceUnit, ..., just toString it
|
||||
// yea, it can be misleading when toString a Java class, but really, jackson should be used in that case
|
||||
generator.writeString(value.toString());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package org.xbib.content;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class XContentBuilderString {
|
||||
|
||||
private final XContentString string;
|
||||
|
||||
public XContentBuilderString(String value) {
|
||||
string = new XContentString(value);
|
||||
}
|
||||
|
||||
public XContentString string() {
|
||||
return string;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import org.xbib.content.io.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface XContentGenerator {
|
||||
|
||||
XContent content();
|
||||
|
||||
void usePrettyPrint();
|
||||
|
||||
void writeStartArray() throws IOException;
|
||||
|
||||
void writeEndArray() throws IOException;
|
||||
|
||||
void writeStartObject() throws IOException;
|
||||
|
||||
void writeEndObject() throws IOException;
|
||||
|
||||
void writeFieldName(String name) throws IOException;
|
||||
|
||||
void writeFieldName(XContentString name) throws IOException;
|
||||
|
||||
void writeString(String text) throws IOException;
|
||||
|
||||
void writeString(char[] text, int offset, int len) throws IOException;
|
||||
|
||||
void writeUTF8String(byte[] text, int offset, int length) throws IOException;
|
||||
|
||||
void writeBinary(byte[] data, int offset, int len) throws IOException;
|
||||
|
||||
void writeBinary(byte[] data) throws IOException;
|
||||
|
||||
void writeNumber(int v) throws IOException;
|
||||
|
||||
void writeNumber(long v) throws IOException;
|
||||
|
||||
void writeNumber(double d) throws IOException;
|
||||
|
||||
void writeNumber(float f) throws IOException;
|
||||
|
||||
void writeNumber(BigDecimal bd) throws IOException;
|
||||
|
||||
void writeNumber(BigInteger bi) throws IOException;
|
||||
|
||||
void writeBoolean(boolean state) throws IOException;
|
||||
|
||||
void writeNull() throws IOException;
|
||||
|
||||
void writeStringField(String fieldName, String value) throws IOException;
|
||||
|
||||
void writeStringField(XContentString fieldName, String value) throws IOException;
|
||||
|
||||
void writeBooleanField(String fieldName, boolean value) throws IOException;
|
||||
|
||||
void writeBooleanField(XContentString fieldName, boolean value) throws IOException;
|
||||
|
||||
void writeNullField(String fieldName) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, int value) throws IOException;
|
||||
|
||||
void writeNumberField(XContentString fieldName, int value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, long value) throws IOException;
|
||||
|
||||
void writeNumberField(XContentString fieldName, long value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, double value) throws IOException;
|
||||
|
||||
void writeNumberField(XContentString fieldName, double value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, float value) throws IOException;
|
||||
|
||||
void writeNumberField(XContentString fieldName, float value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, BigInteger value) throws IOException;
|
||||
|
||||
void writeNumberField(XContentString fieldName, BigInteger value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, BigDecimal value) throws IOException;
|
||||
|
||||
void writeNumberField(XContentString fieldName, BigDecimal value) throws IOException;
|
||||
|
||||
void writeBinaryField(String fieldName, byte[] data) throws IOException;
|
||||
|
||||
void writeBinaryField(XContentString fieldName, byte[] data) throws IOException;
|
||||
|
||||
void writeArrayFieldStart(String fieldName) throws IOException;
|
||||
|
||||
void writeArrayFieldStart(XContentString fieldName) throws IOException;
|
||||
|
||||
void writeObjectFieldStart(String fieldName) throws IOException;
|
||||
|
||||
void writeObjectFieldStart(XContentString fieldName) throws IOException;
|
||||
|
||||
void writeRawField(String fieldName, byte[] content, OutputStream outputStream)
|
||||
throws IOException;
|
||||
|
||||
void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream outputStream)
|
||||
throws IOException;
|
||||
|
||||
void writeRawField(String fieldName, BytesReference content, OutputStream outputStream)
|
||||
throws IOException;
|
||||
|
||||
void writeValue(XContentBuilder builder) throws IOException;
|
||||
|
||||
void copy(XContentBuilder builder, OutputStream outputStream) throws IOException;
|
||||
|
||||
void copyCurrentStructure(XContentParser parser) throws IOException;
|
||||
|
||||
void flush() throws IOException;
|
||||
|
||||
void close() throws IOException;
|
||||
}
|
252
content-core/src/main/java/org/xbib/content/XContentHelper.java
Normal file
252
content-core/src/main/java/org/xbib/content/XContentHelper.java
Normal file
|
@ -0,0 +1,252 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import org.xbib.content.io.BytesReference;
|
||||
import org.xbib.content.json.JsonXContent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class XContentHelper {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(XContentHelper.class.getName());
|
||||
|
||||
private static final String UNKNOWN_FORMAT = "unknown format";
|
||||
|
||||
private XContentHelper() {
|
||||
}
|
||||
|
||||
public static XContentParser createParser(BytesReference bytes) throws IOException {
|
||||
XContent content = XContentService.xContent(bytes);
|
||||
if (content == null) {
|
||||
throw new IOException(UNKNOWN_FORMAT);
|
||||
}
|
||||
return content.createParser(bytes.streamInput());
|
||||
}
|
||||
|
||||
public static XContentParser createParser(byte[] data, int offset, int length) throws IOException {
|
||||
return XContentService.xContent(data, offset, length).createParser(data, offset, length);
|
||||
}
|
||||
|
||||
public static Map<String, Object> convertFromJsonToMap(Reader reader) {
|
||||
try {
|
||||
return JsonXContent.jsonContent().createParser(reader).mapOrderedAndClose();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("Failed to parse content to map", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static Map<String, Object> convertToMap(String data) {
|
||||
try {
|
||||
XContent content = XContentService.xContent(data);
|
||||
return content.createParser(data).mapOrderedAndClose();
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse content to map", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static Map<String, Object> convertToMap(BytesReference bytes, boolean ordered) {
|
||||
XContent content = XContentService.xContent(bytes);
|
||||
if (content == null) {
|
||||
throw new IllegalArgumentException(UNKNOWN_FORMAT);
|
||||
}
|
||||
try {
|
||||
XContentParser parser = content.createParser(bytes.streamInput());
|
||||
if (ordered) {
|
||||
return parser.mapOrderedAndClose();
|
||||
} else {
|
||||
return parser.mapAndClose();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("failed to parse content to map", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static Map<String, Object> convertToMap(byte[] data, boolean ordered) throws IOException {
|
||||
return convertToMap(data, 0, data.length, ordered);
|
||||
}
|
||||
|
||||
public static Map<String, Object> convertToMap(byte[] data, int offset, int length, boolean ordered) throws IOException {
|
||||
XContent content = XContentService.xContent(data, offset, length);
|
||||
XContentParser parser = content.createParser(data, offset, length);
|
||||
if (ordered) {
|
||||
return parser.mapOrderedAndClose();
|
||||
} else {
|
||||
return parser.mapAndClose();
|
||||
}
|
||||
}
|
||||
|
||||
public static String convertToJson(BytesReference bytes, boolean reformatJson, boolean prettyPrint) throws IOException {
|
||||
XContent xContent = XContentService.xContent(bytes);
|
||||
if (xContent == null) {
|
||||
throw new IOException(UNKNOWN_FORMAT);
|
||||
}
|
||||
if (xContent == JsonXContent.jsonContent() && !reformatJson) {
|
||||
return bytes.toUtf8();
|
||||
}
|
||||
try (XContentParser parser = xContent.createParser(bytes.streamInput());
|
||||
XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonContent())) {
|
||||
parser.nextToken();
|
||||
if (prettyPrint) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
builder.copyCurrentStructure(parser);
|
||||
return builder.string();
|
||||
}
|
||||
}
|
||||
|
||||
public static String convertToJson(byte[] data, int offset, int length, boolean reformatJson, boolean prettyPrint)
|
||||
throws IOException {
|
||||
XContent xContent = XContentService.xContent(data, offset, length);
|
||||
if (xContent == JsonXContent.jsonContent() && !reformatJson) {
|
||||
return new String(data, offset, length, StandardCharsets.UTF_8);
|
||||
}
|
||||
try (XContentParser parser = xContent.createParser(data, offset, length);
|
||||
XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonContent())) {
|
||||
parser.nextToken();
|
||||
if (prettyPrint) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
builder.copyCurrentStructure(parser);
|
||||
return builder.string();
|
||||
}
|
||||
}
|
||||
|
||||
public static void copyCurrentStructure(XContentGenerator generator, XContentParser parser) throws IOException {
|
||||
XContentParser.Token t = parser.currentToken();
|
||||
if (t == XContentParser.Token.FIELD_NAME) {
|
||||
generator.writeFieldName(parser.currentName());
|
||||
t = parser.nextToken();
|
||||
}
|
||||
switch (t) {
|
||||
case START_ARRAY:
|
||||
generator.writeStartArray();
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
copyCurrentStructure(generator, parser);
|
||||
}
|
||||
generator.writeEndArray();
|
||||
break;
|
||||
case START_OBJECT:
|
||||
generator.writeStartObject();
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
copyCurrentStructure(generator, parser);
|
||||
}
|
||||
generator.writeEndObject();
|
||||
break;
|
||||
default:
|
||||
copyCurrentEvent(generator, parser);
|
||||
}
|
||||
}
|
||||
|
||||
private static void copyCurrentEvent(XContentGenerator generator, XContentParser parser) throws IOException {
|
||||
switch (parser.currentToken()) {
|
||||
case START_OBJECT:
|
||||
generator.writeStartObject();
|
||||
break;
|
||||
case END_OBJECT:
|
||||
generator.writeEndObject();
|
||||
break;
|
||||
case START_ARRAY:
|
||||
generator.writeStartArray();
|
||||
break;
|
||||
case END_ARRAY:
|
||||
generator.writeEndArray();
|
||||
break;
|
||||
case FIELD_NAME:
|
||||
generator.writeFieldName(parser.currentName());
|
||||
break;
|
||||
case VALUE_STRING:
|
||||
if (parser.hasTextCharacters()) {
|
||||
generator.writeString(parser.textCharacters(), parser.textOffset(), parser.textLength());
|
||||
} else {
|
||||
if (parser.isBase16Checks()) {
|
||||
try {
|
||||
generator.writeBinary(parseBase16(parser.text()));
|
||||
} catch (IllegalArgumentException e) {
|
||||
logger.log(Level.FINE, e.getMessage(), e);
|
||||
generator.writeString(parser.text());
|
||||
}
|
||||
} else {
|
||||
generator.writeString(parser.text());
|
||||
}
|
||||
}
|
||||
break;
|
||||
case VALUE_NUMBER:
|
||||
switch (parser.numberType()) {
|
||||
case INT:
|
||||
generator.writeNumber(parser.intValue());
|
||||
break;
|
||||
case LONG:
|
||||
generator.writeNumber(parser.longValue());
|
||||
break;
|
||||
case FLOAT:
|
||||
generator.writeNumber(parser.floatValue());
|
||||
break;
|
||||
case DOUBLE:
|
||||
if (parser.isLosslessDecimals()) {
|
||||
generator.writeNumber(parser.bigDecimalValue());
|
||||
} else {
|
||||
generator.writeNumber(parser.doubleValue());
|
||||
}
|
||||
break;
|
||||
case BIG_INTEGER:
|
||||
generator.writeNumber(parser.bigIntegerValue());
|
||||
break;
|
||||
case BIG_DECIMAL:
|
||||
generator.writeNumber(parser.bigDecimalValue());
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case VALUE_BOOLEAN:
|
||||
generator.writeBoolean(parser.booleanValue());
|
||||
break;
|
||||
case VALUE_NULL:
|
||||
generator.writeNull();
|
||||
break;
|
||||
case VALUE_EMBEDDED_OBJECT:
|
||||
generator.writeBinary(parser.binaryValue());
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static byte[] parseBase16(String s) {
|
||||
final int len = s.length();
|
||||
if (len % 2 != 0) {
|
||||
throw new IllegalArgumentException("hex string needs to be of even length: " + s);
|
||||
}
|
||||
byte[] out = new byte[len / 2];
|
||||
for (int i = 0; i < len; i += 2) {
|
||||
int h = hexToBin(s.charAt(i));
|
||||
int l = hexToBin(s.charAt(i + 1));
|
||||
if (h == -1 || l == -1) {
|
||||
throw new IllegalArgumentException("contains illegal character for hex string: " + s);
|
||||
}
|
||||
out[i / 2] = (byte) (h * 16 + l);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
private static int hexToBin(char ch) {
|
||||
if ('0' <= ch && ch <= '9') {
|
||||
return ch - '0';
|
||||
}
|
||||
if ('A' <= ch && ch <= 'F') {
|
||||
return ch - 'A' + 10;
|
||||
}
|
||||
if ('a' <= ch && ch <= 'f') {
|
||||
return ch - 'a' + 10;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
}
|
167
content-core/src/main/java/org/xbib/content/XContentParser.java
Normal file
167
content-core/src/main/java/org/xbib/content/XContentParser.java
Normal file
|
@ -0,0 +1,167 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface XContentParser extends Closeable {
|
||||
|
||||
XContent content();
|
||||
|
||||
Token nextToken() throws IOException;
|
||||
|
||||
void skipChildren() throws IOException;
|
||||
|
||||
Token currentToken();
|
||||
|
||||
String currentName() throws IOException;
|
||||
|
||||
Map<String, Object> map() throws IOException;
|
||||
|
||||
Map<String, Object> mapOrdered() throws IOException;
|
||||
|
||||
Map<String, Object> mapAndClose() throws IOException;
|
||||
|
||||
Map<String, Object> mapOrderedAndClose() throws IOException;
|
||||
|
||||
String text() throws IOException;
|
||||
|
||||
String textOrNull() throws IOException;
|
||||
|
||||
boolean hasTextCharacters();
|
||||
|
||||
char[] textCharacters() throws IOException;
|
||||
|
||||
int textLength() throws IOException;
|
||||
|
||||
int textOffset() throws IOException;
|
||||
|
||||
Number numberValue() throws IOException;
|
||||
|
||||
NumberType numberType() throws IOException;
|
||||
|
||||
/**
|
||||
* Is the number type estimated or not (i.e. an int might actually be a long, its just low enough
|
||||
* to be an int).
|
||||
* @return true if number is estimated
|
||||
*/
|
||||
boolean estimatedNumberType();
|
||||
|
||||
short shortValue() throws IOException;
|
||||
|
||||
int intValue() throws IOException;
|
||||
|
||||
long longValue() throws IOException;
|
||||
|
||||
float floatValue() throws IOException;
|
||||
|
||||
double doubleValue() throws IOException;
|
||||
|
||||
XContentParser losslessDecimals(boolean b);
|
||||
|
||||
boolean isLosslessDecimals();
|
||||
|
||||
BigInteger bigIntegerValue() throws IOException;
|
||||
|
||||
BigDecimal bigDecimalValue() throws IOException;
|
||||
|
||||
boolean isBooleanValue() throws IOException;
|
||||
|
||||
boolean booleanValue() throws IOException;
|
||||
|
||||
byte[] binaryValue() throws IOException;
|
||||
|
||||
XContentParser enableBase16Checks(boolean b);
|
||||
|
||||
boolean isBase16Checks();
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
enum Token {
|
||||
START_OBJECT {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
END_OBJECT {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
START_ARRAY {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
END_ARRAY {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
FIELD_NAME {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
VALUE_STRING {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
|
||||
VALUE_NUMBER {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
|
||||
VALUE_BOOLEAN {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
|
||||
// usually a binary value
|
||||
VALUE_EMBEDDED_OBJECT {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
|
||||
VALUE_NULL {
|
||||
@Override
|
||||
public boolean isValue() {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
public abstract boolean isValue();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
enum NumberType {
|
||||
INT, LONG, FLOAT, DOUBLE, BIG_DECIMAL, BIG_INTEGER
|
||||
}
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import org.xbib.content.io.BytesArray;
|
||||
import org.xbib.content.io.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class XContentService {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(XContentService.class.getName());
|
||||
|
||||
private static final Map<String, XContent> xcontents = new HashMap<>();
|
||||
|
||||
private static final XContentService instance = new XContentService();
|
||||
|
||||
private XContentService() {
|
||||
try {
|
||||
ServiceLoader<XContent> loader = ServiceLoader.load(XContent.class);
|
||||
for (XContent xContent : loader) {
|
||||
if (!xcontents.containsKey(xContent.name())) {
|
||||
xcontents.put(xContent.name(), xContent);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.log(Level.SEVERE, e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
public static XContentService getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
public static XContentBuilder builder(String name) throws IOException {
|
||||
return xcontents.containsKey(name) ? XContentBuilder.builder(xcontents.get(name)) : null;
|
||||
}
|
||||
|
||||
public static XContent xContent(byte[] data, int offset, int length) {
|
||||
return xContent(new BytesArray(data, offset, length));
|
||||
}
|
||||
|
||||
public static XContent xContent(String charSequence) {
|
||||
return xContent(new BytesArray(charSequence.getBytes(StandardCharsets.UTF_8)));
|
||||
}
|
||||
|
||||
public static XContent xContent(BytesReference bytes) {
|
||||
for (XContent xcontent : xcontents.values()) {
|
||||
if (xcontent.isXContent(bytes)) {
|
||||
return xcontent;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import com.fasterxml.jackson.core.io.SerializedString;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class XContentString extends SerializedString {
|
||||
|
||||
private static final long serialVersionUID = -127711532459894341L;
|
||||
|
||||
public XContentString(String v) {
|
||||
super(v);
|
||||
}
|
||||
}
|
101
content-core/src/main/java/org/xbib/content/io/BytesArray.java
Normal file
101
content-core/src/main/java/org/xbib/content/io/BytesArray.java
Normal file
|
@ -0,0 +1,101 @@
|
|||
package org.xbib.content.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* A byte array, wrapped in a {@link BytesReference}.
|
||||
*/
|
||||
public class BytesArray implements BytesReference {
|
||||
|
||||
private static final String EMPTY_STRING = "";
|
||||
|
||||
private byte[] bytes;
|
||||
|
||||
private int offset;
|
||||
|
||||
private int length;
|
||||
|
||||
/**
|
||||
* Create {@link BytesArray} from a byte array.
|
||||
* @param bytes the byte array
|
||||
*/
|
||||
public BytesArray(byte[] bytes) {
|
||||
this.bytes = bytes;
|
||||
this.offset = 0;
|
||||
this.length = bytes.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create {@link BytesArray} from a part of a byte array.
|
||||
* @param bytes the byte array
|
||||
* @param offset the offset
|
||||
* @param length the length
|
||||
*/
|
||||
public BytesArray(byte[] bytes, int offset, int length) {
|
||||
this.bytes = bytes;
|
||||
this.offset = offset;
|
||||
this.length = length;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public byte get(int index) {
|
||||
return bytes[offset + index];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int length() {
|
||||
return length;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int indexOf(byte b, int offset, int len) {
|
||||
if (offset < 0 || (offset + length) > this.length) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
for (int i = offset; i < offset + len; i++) {
|
||||
if (bytes[i] == b) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesReference slice(int from, int length) {
|
||||
if (from < 0 || (from + length) > this.length) {
|
||||
throw new IllegalArgumentException("can't slice a buffer with length [" + this.length +
|
||||
"], with slice parameters from [" + from + "], length [" + length + "]");
|
||||
}
|
||||
return new BytesArray(bytes, offset + from, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] toBytes() {
|
||||
if (offset == 0 && bytes.length == length) {
|
||||
return bytes;
|
||||
}
|
||||
return Arrays.copyOfRange(bytes, offset, offset + length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toUtf8() {
|
||||
if (length == 0) {
|
||||
return EMPTY_STRING;
|
||||
}
|
||||
return new String(bytes, offset, length, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesStreamInput streamInput() {
|
||||
return new BytesStreamInput(bytes, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void streamOutput(OutputStream os) throws IOException {
|
||||
os.write(bytes, offset, length);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,61 @@
|
|||
package org.xbib.content.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
/**
|
||||
* A reference to bytes.
|
||||
*/
|
||||
public interface BytesReference {
|
||||
|
||||
/**
|
||||
* Returns the byte at the specified index. Need to be between 0 and length.
|
||||
*
|
||||
* @param index index
|
||||
* @return byte at specified index
|
||||
*/
|
||||
byte get(int index);
|
||||
|
||||
/**
|
||||
* The length.
|
||||
*
|
||||
* @return length
|
||||
*/
|
||||
int length();
|
||||
|
||||
/**
|
||||
* Find the index of a given byte, in the given area.
|
||||
* @param b the byte
|
||||
* @param offset offset
|
||||
* @param len len
|
||||
* @return -1 if not found, otherwise the position, counting from offset
|
||||
*/
|
||||
int indexOf(byte b, int offset, int len);
|
||||
|
||||
/**
|
||||
* Slice the bytes from the <tt>from</tt> index up to <tt>length</tt>.
|
||||
*
|
||||
* @param from from
|
||||
* @param length length
|
||||
* @return bytes reference
|
||||
*/
|
||||
BytesReference slice(int from, int length);
|
||||
|
||||
/**
|
||||
* Returns the bytes as a single byte array.
|
||||
*
|
||||
* @return bytes
|
||||
*/
|
||||
byte[] toBytes();
|
||||
|
||||
/**
|
||||
* Converts to a string based on utf8.
|
||||
*
|
||||
* @return UTF-8 encoded string
|
||||
*/
|
||||
String toUtf8();
|
||||
|
||||
BytesStreamInput streamInput();
|
||||
|
||||
void streamOutput(OutputStream outputStream) throws IOException;
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
package org.xbib.content.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class BytesStreamInput extends InputStream {
|
||||
|
||||
private byte[] buf;
|
||||
private int pos;
|
||||
private int count;
|
||||
|
||||
public BytesStreamInput(byte[] buf) {
|
||||
this(buf, 0, buf.length);
|
||||
}
|
||||
|
||||
public BytesStreamInput(byte[] buf, int offset, int length) {
|
||||
this.buf = buf;
|
||||
this.pos = offset;
|
||||
this.count = Math.min(offset + length, buf.length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long skip(long n) throws IOException {
|
||||
long res = n;
|
||||
if (pos + res > count) {
|
||||
res = (long) count - pos;
|
||||
}
|
||||
if (res < 0) {
|
||||
return 0;
|
||||
}
|
||||
pos += res;
|
||||
return res;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
return pos < count ? buf[pos++] & 0xff : -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(byte[] b, int off, int len) throws IOException {
|
||||
if (off < 0 || len < 0 || len > b.length - off) {
|
||||
throw new IndexOutOfBoundsException();
|
||||
}
|
||||
if (pos >= count) {
|
||||
return -1;
|
||||
}
|
||||
int l = len;
|
||||
if (pos + l > count) {
|
||||
l = count - pos;
|
||||
}
|
||||
if (l <= 0) {
|
||||
return 0;
|
||||
}
|
||||
System.arraycopy(buf, pos, b, off, l);
|
||||
pos += l;
|
||||
return l;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() throws IOException {
|
||||
pos = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
|
@ -0,0 +1,207 @@
|
|||
package org.xbib.content.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* A growable stream of bytes, with random access methods.
|
||||
*/
|
||||
public class BytesStreamOutput extends OutputStream {
|
||||
|
||||
/**
|
||||
* Small default buffer size, to reduce heap pressure.
|
||||
*/
|
||||
private static final int DEFAULT_BUFFER_SIZE = 1024;
|
||||
|
||||
private static final boolean JRE_IS_64BIT;
|
||||
|
||||
static {
|
||||
String oaarch = System.getProperty("os.arch");
|
||||
String sunarch = System.getProperty("sun.arch.data.model");
|
||||
JRE_IS_64BIT = sunarch != null ? sunarch.contains("64") :
|
||||
oaarch != null && oaarch.contains("64");
|
||||
}
|
||||
|
||||
/**
|
||||
* The buffer where data is stored.
|
||||
*/
|
||||
private byte[] buf;
|
||||
|
||||
/**
|
||||
* The number of valid bytes in the buffer.
|
||||
*/
|
||||
private int count;
|
||||
|
||||
/**
|
||||
* Create a new {@code BytesStreamOutput} with default buffer size.
|
||||
*/
|
||||
public BytesStreamOutput() {
|
||||
this(DEFAULT_BUFFER_SIZE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@code BytesStreamOutput} with given buffer size.
|
||||
* @param size size
|
||||
*/
|
||||
public BytesStreamOutput(int size) {
|
||||
this.buf = new byte[size];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the position in the stream.
|
||||
* @return the position
|
||||
*/
|
||||
public long position() {
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set to new position in stream. Must be in the current buffer.
|
||||
* @param position the new position.
|
||||
*/
|
||||
public void seek(long position) {
|
||||
if (position > Integer.MAX_VALUE) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
count = (int) position;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write an integer.
|
||||
*
|
||||
* @param b int
|
||||
* @throws IOException if write fails
|
||||
*/
|
||||
@Override
|
||||
public void write(int b) throws IOException {
|
||||
int newcount = count + 1;
|
||||
if (newcount > buf.length) {
|
||||
buf = Arrays.copyOf(buf, oversize(newcount));
|
||||
}
|
||||
buf[count] = (byte) b;
|
||||
count = newcount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write byte array.
|
||||
*
|
||||
* @param b byte array
|
||||
* @param offset offset
|
||||
* @param length length
|
||||
* @throws IOException if write fails
|
||||
*/
|
||||
@Override
|
||||
public void write(byte[] b, int offset, int length) throws IOException {
|
||||
if (length == 0) {
|
||||
return;
|
||||
}
|
||||
int newcount = count + length;
|
||||
if (newcount > buf.length) {
|
||||
buf = Arrays.copyOf(buf, oversize(newcount));
|
||||
}
|
||||
System.arraycopy(b, offset, buf, count, length);
|
||||
count = newcount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Skip a number of bytes.
|
||||
* @param length the number of bytes to skip.
|
||||
*/
|
||||
public void skip(int length) {
|
||||
int newcount = count + length;
|
||||
if (newcount > buf.length) {
|
||||
buf = Arrays.copyOf(buf, oversize(newcount));
|
||||
}
|
||||
count = newcount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Seek to absolute position. Must be in buffer.
|
||||
* @param pos the position.
|
||||
*/
|
||||
public void seek(int pos) {
|
||||
count = pos;
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
count = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
// nothing to do there
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
// nothing to do here
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a {@link BytesReference} to the buffer of this output stream.
|
||||
* @return the byets reference
|
||||
*/
|
||||
public BytesReference bytes() {
|
||||
return new BytesArray(buf, 0, count);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current size of the buffer.
|
||||
*
|
||||
* @return the value of the <code>count</code> field, which is the number
|
||||
* of valid bytes in this output stream.
|
||||
* @see java.io.ByteArrayOutputStream#count
|
||||
*/
|
||||
public int size() {
|
||||
return count;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array size >= minTargetSize, generally
|
||||
* over-allocating exponentially to achieve amortized
|
||||
* linear-time cost as the array grows.
|
||||
* NOTE: this was originally borrowed from Python 2.4.2
|
||||
* listobject.c sources (attribution in LICENSE.txt), but
|
||||
* has now been substantially changed based on
|
||||
* discussions from java-dev thread with subject "Dynamic
|
||||
* array reallocation algorithms", started on Jan 12
|
||||
* 2010.
|
||||
*
|
||||
* @param minTargetSize Minimum required value to be returned.
|
||||
* @return int
|
||||
*/
|
||||
private static int oversize(int minTargetSize) {
|
||||
if (minTargetSize < 0) {
|
||||
// catch usage that accidentally overflows int
|
||||
throw new IllegalArgumentException("invalid array size " + minTargetSize);
|
||||
}
|
||||
if (minTargetSize == 0) {
|
||||
// wait until at least one element is requested
|
||||
return 0;
|
||||
}
|
||||
// asymptotic exponential growth by 1/8th, favors
|
||||
// spending a bit more CPU to not tie up too much wasted
|
||||
// RAM:
|
||||
int extra = minTargetSize >> 3;
|
||||
if (extra < 3) {
|
||||
// for very small arrays, where constant overhead of
|
||||
// realloc is presumably relatively high, we grow
|
||||
// faster
|
||||
extra = 3;
|
||||
}
|
||||
int newSize = minTargetSize + extra;
|
||||
// add 7 to allow for worst case byte alignment addition below:
|
||||
if (newSize + 7 < 0) {
|
||||
// int overflowed -- return max allowed array size
|
||||
return Integer.MAX_VALUE;
|
||||
}
|
||||
if (JRE_IS_64BIT) {
|
||||
// round up to multiple of 8
|
||||
return (newSize + 7) & 0x7ffffff8;
|
||||
} else {
|
||||
// round up to multiple of 4
|
||||
return (newSize + 3) & 0x7ffffffc;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for input/output with content.
|
||||
*/
|
||||
package org.xbib.content.io;
|
|
@ -0,0 +1,32 @@
|
|||
package org.xbib.content.json;
|
||||
|
||||
import org.xbib.content.XContent;
|
||||
import org.xbib.content.settings.AbstractSettingsLoader;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Settings loader that loads (parses) the settings in a json format by flattening them
|
||||
* into a map.
|
||||
*/
|
||||
public class JsonSettingsLoader extends AbstractSettingsLoader {
|
||||
|
||||
private static final Set<String> JSON_SUFFIXES = new HashSet<>(Collections.singletonList("json"));
|
||||
|
||||
@Override
|
||||
public XContent content() {
|
||||
return JsonXContent.jsonContent();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> suffixes() {
|
||||
return JSON_SUFFIXES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean canLoad(String source) {
|
||||
return source.indexOf('{') != -1 && source.indexOf('}') != -1;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,115 @@
|
|||
package org.xbib.content.json;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonEncoding;
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import org.xbib.content.XContent;
|
||||
import org.xbib.content.XContentBuilder;
|
||||
import org.xbib.content.XContentGenerator;
|
||||
import org.xbib.content.XContentParser;
|
||||
import org.xbib.content.io.BytesReference;
|
||||
import org.xbib.content.io.BytesStreamInput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Reader;
|
||||
import java.io.Writer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* A JSON content implementation using Jackson.
|
||||
*/
|
||||
public class JsonXContent implements XContent {
|
||||
|
||||
private static final JsonXContent jsonXContent;
|
||||
private static final JsonFactory jsonFactory;
|
||||
|
||||
static {
|
||||
jsonFactory = new JsonFactory();
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
|
||||
jsonFactory.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
|
||||
jsonXContent = new JsonXContent();
|
||||
}
|
||||
|
||||
public JsonXContent() {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
public static JsonXContent jsonContent() {
|
||||
return jsonXContent;
|
||||
}
|
||||
|
||||
public static XContentBuilder contentBuilder() throws IOException {
|
||||
return XContentBuilder.builder(jsonXContent);
|
||||
}
|
||||
|
||||
public static XContentBuilder contentBuilder(OutputStream outputStream) throws IOException {
|
||||
return XContentBuilder.builder(jsonXContent, outputStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String name() {
|
||||
return "json";
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentGenerator createGenerator(OutputStream outputStream) throws IOException {
|
||||
return new JsonXContentGenerator(jsonFactory.createGenerator(outputStream, JsonEncoding.UTF8));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentGenerator createGenerator(Writer writer) throws IOException {
|
||||
return new JsonXContentGenerator(jsonFactory.createGenerator(writer));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(String content) throws IOException {
|
||||
return new JsonXContentParser(jsonFactory
|
||||
.createParser(new BytesStreamInput(content.getBytes(StandardCharsets.UTF_8))));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(InputStream inputStream) throws IOException {
|
||||
return new JsonXContentParser(jsonFactory.createParser(inputStream));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(byte[] data) throws IOException {
|
||||
return new JsonXContentParser(jsonFactory.createParser(data));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(byte[] data, int offset, int length) throws IOException {
|
||||
return new JsonXContentParser(jsonFactory.createParser(data, offset, length));
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(BytesReference bytes) throws IOException {
|
||||
return createParser(bytes.streamInput());
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser createParser(Reader reader) throws IOException {
|
||||
return new JsonXContentParser(jsonFactory.createParser(reader));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isXContent(BytesReference bytes) {
|
||||
int length = bytes.length() < 20 ? bytes.length() : 20;
|
||||
if (length == 0) {
|
||||
return false;
|
||||
}
|
||||
byte first = bytes.get(0);
|
||||
if (first == '{') {
|
||||
return true;
|
||||
}
|
||||
for (int i = 0; i < length; i++) {
|
||||
if (bytes.get(i) == '{') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,380 @@
|
|||
package org.xbib.content.json;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import org.xbib.content.AbstractXContentGenerator;
|
||||
import org.xbib.content.XContent;
|
||||
import org.xbib.content.XContentBuilder;
|
||||
import org.xbib.content.XContentGenerator;
|
||||
import org.xbib.content.XContentHelper;
|
||||
import org.xbib.content.XContentParser;
|
||||
import org.xbib.content.XContentString;
|
||||
import org.xbib.content.io.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class JsonXContentGenerator extends AbstractXContentGenerator {
|
||||
|
||||
private final JsonGeneratorDelegate delegate;
|
||||
|
||||
public JsonXContentGenerator(JsonGenerator generator) {
|
||||
this.delegate = new JsonGeneratorDelegate(generator);
|
||||
super.setGenerator(delegate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContent content() {
|
||||
return delegate.content();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void usePrettyPrint() {
|
||||
delegate.usePrettyPrint();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, byte[] content, OutputStream outputStream) throws IOException {
|
||||
delegate.writeRawField(fieldName, content, outputStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream outputStream)
|
||||
throws IOException {
|
||||
delegate.writeRawField(fieldName, content, offset, length, outputStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, BytesReference content, OutputStream outputStream) throws IOException {
|
||||
delegate.writeRawField(fieldName, content, outputStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(XContentBuilder builder) throws IOException {
|
||||
delegate.writeValue(builder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyCurrentStructure(XContentParser parser) throws IOException {
|
||||
delegate.copyCurrentStructure(parser);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
delegate.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
delegate.close();
|
||||
}
|
||||
|
||||
private static class JsonGeneratorDelegate implements XContentGenerator {
|
||||
|
||||
final JsonGenerator jsonGenerator;
|
||||
|
||||
JsonGeneratorDelegate(JsonGenerator jsonGenerator) {
|
||||
this.jsonGenerator = jsonGenerator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContent content() {
|
||||
return JsonXContent.jsonContent();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void usePrettyPrint() {
|
||||
jsonGenerator.useDefaultPrettyPrinter();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void writeStartArray() throws IOException {
|
||||
jsonGenerator.writeStartArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeEndArray() throws IOException {
|
||||
jsonGenerator.writeEndArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStartObject() throws IOException {
|
||||
jsonGenerator.writeStartObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeEndObject() throws IOException {
|
||||
jsonGenerator.writeEndObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFieldName(String name) throws IOException {
|
||||
jsonGenerator.writeFieldName(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFieldName(XContentString name) throws IOException {
|
||||
jsonGenerator.writeFieldName(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(String text) throws IOException {
|
||||
jsonGenerator.writeString(text);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(char[] text, int offset, int len) throws IOException {
|
||||
jsonGenerator.writeString(text, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUTF8String(byte[] text, int offset, int length) throws IOException {
|
||||
jsonGenerator.writeUTF8String(text, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] data, int offset, int len) throws IOException {
|
||||
jsonGenerator.writeBinary(data, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] data) throws IOException {
|
||||
jsonGenerator.writeBinary(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(int v) throws IOException {
|
||||
jsonGenerator.writeNumber(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(long v) throws IOException {
|
||||
jsonGenerator.writeNumber(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(double d) throws IOException {
|
||||
jsonGenerator.writeNumber(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(float f) throws IOException {
|
||||
jsonGenerator.writeNumber(f);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(BigInteger bi) throws IOException {
|
||||
jsonGenerator.writeNumber(bi);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(BigDecimal bd) throws IOException {
|
||||
jsonGenerator.writeNumber(bd);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBoolean(boolean b) throws IOException {
|
||||
jsonGenerator.writeBoolean(b);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNull() throws IOException {
|
||||
jsonGenerator.writeNull();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStringField(String fieldName, String value) throws IOException {
|
||||
jsonGenerator.writeStringField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStringField(XContentString fieldName, String value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBooleanField(String fieldName, boolean value) throws IOException {
|
||||
jsonGenerator.writeBooleanField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBooleanField(XContentString fieldName, boolean value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeBoolean(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNullField(String fieldName) throws IOException {
|
||||
jsonGenerator.writeNullField(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, int value) throws IOException {
|
||||
jsonGenerator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, int value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, long value) throws IOException {
|
||||
jsonGenerator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, long value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, double value) throws IOException {
|
||||
jsonGenerator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, double value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, float value) throws IOException {
|
||||
jsonGenerator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, float value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, BigInteger value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, BigInteger value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, BigDecimal value) throws IOException {
|
||||
jsonGenerator.writeNumberField(fieldName, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(XContentString fieldName, BigDecimal value) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinaryField(String fieldName, byte[] data) throws IOException {
|
||||
jsonGenerator.writeBinaryField(fieldName, data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinaryField(XContentString fieldName, byte[] data) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeBinary(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeArrayFieldStart(String fieldName) throws IOException {
|
||||
jsonGenerator.writeArrayFieldStart(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeArrayFieldStart(XContentString fieldName) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeStartArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeObjectFieldStart(String fieldName) throws IOException {
|
||||
jsonGenerator.writeObjectFieldStart(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeObjectFieldStart(XContentString fieldName) throws IOException {
|
||||
jsonGenerator.writeFieldName(fieldName);
|
||||
jsonGenerator.writeStartObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, byte[] content, OutputStream outputStream) throws IOException {
|
||||
jsonGenerator.writeRaw(",\"");
|
||||
jsonGenerator.writeRaw(fieldName);
|
||||
jsonGenerator.writeRaw("\":");
|
||||
flush();
|
||||
outputStream.write(content);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream outputStream)
|
||||
throws IOException {
|
||||
jsonGenerator.writeRaw(",\"");
|
||||
jsonGenerator.writeRaw(fieldName);
|
||||
jsonGenerator.writeRaw("\":");
|
||||
flush();
|
||||
outputStream.write(content, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, BytesReference content, OutputStream outputStream) throws IOException {
|
||||
jsonGenerator.writeRaw(",\"");
|
||||
jsonGenerator.writeRaw(fieldName);
|
||||
jsonGenerator.writeRaw("\":");
|
||||
flush();
|
||||
content.streamOutput(outputStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeValue(XContentBuilder builder) throws IOException {
|
||||
jsonGenerator.writeRawValue(builder.string());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copy(XContentBuilder builder, OutputStream outputStream) throws IOException {
|
||||
flush();
|
||||
builder.bytes().streamOutput(outputStream);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyCurrentStructure(XContentParser parser) throws IOException {
|
||||
if (parser.currentToken() == null) {
|
||||
parser.nextToken();
|
||||
}
|
||||
if (parser instanceof JsonXContentParser) {
|
||||
jsonGenerator.copyCurrentStructure(((JsonXContentParser) parser).parser);
|
||||
} else {
|
||||
XContentHelper.copyCurrentStructure(this, parser);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
jsonGenerator.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (jsonGenerator.isClosed()) {
|
||||
return;
|
||||
}
|
||||
jsonGenerator.close();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,191 @@
|
|||
package org.xbib.content.json;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import org.xbib.content.AbstractXContentParser;
|
||||
import org.xbib.content.XContent;
|
||||
import org.xbib.content.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class JsonXContentParser extends AbstractXContentParser {
|
||||
|
||||
protected final JsonParser parser;
|
||||
|
||||
public JsonXContentParser(JsonParser parser) {
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContent content() {
|
||||
return JsonXContent.jsonContent();
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser.Token nextToken() throws IOException {
|
||||
return convertToken(parser.nextToken());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void skipChildren() throws IOException {
|
||||
parser.skipChildren();
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser.Token currentToken() {
|
||||
return convertToken(parser.getCurrentToken());
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentParser.NumberType numberType() throws IOException {
|
||||
return convertNumberType(parser.getNumberType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean estimatedNumberType() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String currentName() throws IOException {
|
||||
return parser.getCurrentName();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doBooleanValue() throws IOException {
|
||||
return parser.getBooleanValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String text() throws IOException {
|
||||
return parser.getText();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTextCharacters() {
|
||||
return parser.hasTextCharacters();
|
||||
}
|
||||
|
||||
@Override
|
||||
public char[] textCharacters() throws IOException {
|
||||
return parser.getTextCharacters();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int textLength() throws IOException {
|
||||
return parser.getTextLength();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int textOffset() throws IOException {
|
||||
return parser.getTextOffset();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Number numberValue() throws IOException {
|
||||
return parser.getNumberValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger bigIntegerValue() throws IOException {
|
||||
return parser.getBigIntegerValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigDecimal bigDecimalValue() throws IOException {
|
||||
return parser.getDecimalValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public short doShortValue() throws IOException {
|
||||
return parser.getShortValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int doIntValue() throws IOException {
|
||||
return parser.getIntValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long doLongValue() throws IOException {
|
||||
return parser.getLongValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float doFloatValue() throws IOException {
|
||||
return parser.getFloatValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doDoubleValue() throws IOException {
|
||||
return parser.getDoubleValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] binaryValue() throws IOException {
|
||||
return parser.getBinaryValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
parser.close();
|
||||
}
|
||||
|
||||
private NumberType convertNumberType(JsonParser.NumberType numberType) {
|
||||
switch (numberType) {
|
||||
case INT:
|
||||
return NumberType.INT;
|
||||
case LONG:
|
||||
return NumberType.LONG;
|
||||
case FLOAT:
|
||||
return NumberType.FLOAT;
|
||||
case DOUBLE:
|
||||
return NumberType.DOUBLE;
|
||||
case BIG_DECIMAL:
|
||||
return NumberType.BIG_DECIMAL;
|
||||
case BIG_INTEGER:
|
||||
return NumberType.BIG_INTEGER;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
throw new IllegalStateException("No matching token for number_type [" + numberType + "]");
|
||||
}
|
||||
|
||||
private Token convertToken(JsonToken token) {
|
||||
if (token == null) {
|
||||
return null;
|
||||
}
|
||||
switch (token) {
|
||||
case FIELD_NAME:
|
||||
return Token.FIELD_NAME;
|
||||
case VALUE_FALSE:
|
||||
case VALUE_TRUE:
|
||||
return Token.VALUE_BOOLEAN;
|
||||
case VALUE_STRING:
|
||||
return Token.VALUE_STRING;
|
||||
case VALUE_NUMBER_INT:
|
||||
case VALUE_NUMBER_FLOAT:
|
||||
return Token.VALUE_NUMBER;
|
||||
case VALUE_NULL:
|
||||
return Token.VALUE_NULL;
|
||||
case START_OBJECT:
|
||||
return Token.START_OBJECT;
|
||||
case END_OBJECT:
|
||||
return Token.END_OBJECT;
|
||||
case START_ARRAY:
|
||||
return Token.START_ARRAY;
|
||||
case END_ARRAY:
|
||||
return Token.END_ARRAY;
|
||||
case VALUE_EMBEDDED_OBJECT:
|
||||
return Token.VALUE_EMBEDDED_OBJECT;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
throw new IllegalStateException("No matching token for json_token [" + token + "]");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for JSON content.
|
||||
*/
|
||||
package org.xbib.content.json;
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for content parsing and generating.
|
||||
*/
|
||||
package org.xbib.content;
|
|
@ -0,0 +1,91 @@
|
|||
package org.xbib.content.settings;
|
||||
|
||||
import org.xbib.content.XContent;
|
||||
import org.xbib.content.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Settings loader that loads (parses) the settings in a XContent format by flattening them
|
||||
* into a map.
|
||||
*/
|
||||
public abstract class AbstractSettingsLoader implements SettingsLoader {
|
||||
|
||||
public abstract XContent content();
|
||||
|
||||
@Override
|
||||
public Map<String, String> load(String source) throws IOException {
|
||||
try (XContentParser parser = content().createParser(source)) {
|
||||
return load(parser);
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, String> load(XContentParser xContentParser) throws IOException {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
Map<String, String> map = new HashMap<>();
|
||||
List<String> path = new ArrayList<>();
|
||||
XContentParser.Token token = xContentParser.nextToken();
|
||||
if (token == null) {
|
||||
return map;
|
||||
}
|
||||
parseObject(map, sb, path, xContentParser, null);
|
||||
return map;
|
||||
}
|
||||
|
||||
private void parseObject(Map<String, String> settings, StringBuilder sb, List<String> path,
|
||||
XContentParser parser, String objFieldName) throws IOException {
|
||||
if (objFieldName != null) {
|
||||
path.add(objFieldName);
|
||||
}
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
parseObject(settings, sb, path, parser, currentFieldName);
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
parseArray(settings, sb, path, parser, currentFieldName);
|
||||
} else if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
parseValue(settings, sb, path, parser, currentFieldName);
|
||||
|
||||
}
|
||||
}
|
||||
if (objFieldName != null) {
|
||||
path.remove(path.size() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
private void parseArray(Map<String, String> settings, StringBuilder sb, List<String> path,
|
||||
XContentParser parser, String name) throws IOException {
|
||||
XContentParser.Token token;
|
||||
int counter = 0;
|
||||
String fieldName = name;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
parseObject(settings, sb, path, parser, fieldName + '.' + (counter++));
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
parseArray(settings, sb, path, parser, fieldName + '.' + (counter++));
|
||||
} else if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
} else {
|
||||
parseValue(settings, sb, path, parser, fieldName + '.' + (counter++));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void parseValue(Map<String, String> settings, StringBuilder sb, List<String> path,
|
||||
XContentParser parser, String fieldName) throws IOException {
|
||||
sb.setLength(0);
|
||||
for (String s : path) {
|
||||
sb.append(s).append('.');
|
||||
}
|
||||
sb.append(fieldName);
|
||||
settings.put(sb.toString(), parser.text());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,836 @@
|
|||
package org.xbib.content.settings;
|
||||
|
||||
import static org.xbib.content.util.unit.ByteSizeValue.parseBytesSizeValue;
|
||||
import static org.xbib.content.util.unit.TimeValue.parseTimeValue;
|
||||
|
||||
import org.xbib.content.XContentBuilder;
|
||||
import org.xbib.content.json.JsonSettingsLoader;
|
||||
import org.xbib.content.json.JsonXContent;
|
||||
import org.xbib.content.util.unit.ByteSizeValue;
|
||||
import org.xbib.content.util.unit.TimeValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.io.StringWriter;
|
||||
import java.io.Writer;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.DateTimeException;
|
||||
import java.time.LocalDate;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Settings {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(Settings.class.getName());
|
||||
|
||||
public static final Settings EMPTY_SETTINGS = new Builder().build();
|
||||
public static final String[] EMPTY_ARRAY = new String[0];
|
||||
public static final int BUFFER_SIZE = 1024 * 8;
|
||||
private final Map<String, String> settings;
|
||||
|
||||
public Settings() {
|
||||
this(new HashMap<>());
|
||||
}
|
||||
|
||||
public Settings(Map<String, String> settings) {
|
||||
this.settings = new HashMap<>(settings);
|
||||
}
|
||||
|
||||
public static Settings readSettingsFromMap(Map<String, Object> map) throws IOException {
|
||||
Builder builder = new Builder();
|
||||
for (Map.Entry<String, Object> entry : map.entrySet()) {
|
||||
builder.put(entry.getKey(), entry.getValue() != null ? entry.getValue().toString() : null);
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public static void writeSettingsToMap(Settings settings, Map<String, Object> map) throws IOException {
|
||||
for (String key : settings.getAsMap().keySet()) {
|
||||
map.put(key, settings.get(key));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a builder to be used in order to build settings.
|
||||
* @return a builder
|
||||
*/
|
||||
public static Builder settingsBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static String[] splitStringByCommaToArray(final String s) {
|
||||
return splitStringToArray(s, ',');
|
||||
}
|
||||
|
||||
public static String[] splitStringToArray(final String s, final char c) {
|
||||
if (s.length() == 0) {
|
||||
return EMPTY_ARRAY;
|
||||
}
|
||||
final char[] chars = s.toCharArray();
|
||||
int count = 1;
|
||||
for (final char x : chars) {
|
||||
if (x == c) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
final String[] result = new String[count];
|
||||
final int len = chars.length;
|
||||
int start = 0;
|
||||
int pos = 0;
|
||||
int i = 0;
|
||||
for (; pos < len; pos++) {
|
||||
if (chars[pos] == c) {
|
||||
int size = pos - start;
|
||||
if (size > 0) {
|
||||
result[i++] = new String(chars, start, size);
|
||||
}
|
||||
start = pos + 1;
|
||||
}
|
||||
}
|
||||
int size = pos - start;
|
||||
if (size > 0) {
|
||||
result[i++] = new String(chars, start, size);
|
||||
}
|
||||
if (i != count) {
|
||||
String[] result1 = new String[i];
|
||||
System.arraycopy(result, 0, result1, 0, i);
|
||||
return result1;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static String copyToString(Reader in) throws IOException {
|
||||
StringWriter out = new StringWriter();
|
||||
copy(in, out);
|
||||
return out.toString();
|
||||
}
|
||||
|
||||
public static int copy(final Reader in, final Writer out) throws IOException {
|
||||
try (Reader reader = in; Writer writer = out) {
|
||||
int byteCount = 0;
|
||||
char[] buffer = new char[BUFFER_SIZE];
|
||||
int bytesRead;
|
||||
while ((bytesRead = reader.read(buffer)) != -1) {
|
||||
writer.write(buffer, 0, bytesRead);
|
||||
byteCount += bytesRead;
|
||||
}
|
||||
writer.flush();
|
||||
return byteCount;
|
||||
}
|
||||
}
|
||||
|
||||
public Map<String, String> getAsMap() {
|
||||
return this.settings;
|
||||
}
|
||||
|
||||
public Map<String, Object> getAsStructuredMap() {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
for (Map.Entry<String, String> entry : settings.entrySet()) {
|
||||
processSetting(map, "", entry.getKey(), entry.getValue());
|
||||
}
|
||||
for (Map.Entry<String, Object> entry : map.entrySet()) {
|
||||
if (entry.getValue() instanceof Map) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> valMap = (Map<String, Object>) entry.getValue();
|
||||
entry.setValue(convertMapsToArrays(valMap));
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
public Settings getByPrefix(String prefix) {
|
||||
Builder builder = new Builder();
|
||||
for (Map.Entry<String, String> entry : settings.entrySet()) {
|
||||
if (entry.getKey().startsWith(prefix)) {
|
||||
if (entry.getKey().length() < prefix.length()) {
|
||||
continue;
|
||||
}
|
||||
builder.put(entry.getKey().substring(prefix.length()), entry.getValue());
|
||||
}
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public Settings getAsSettings(String setting) {
|
||||
return getByPrefix(setting + ".");
|
||||
}
|
||||
|
||||
public boolean containsSetting(String setting) {
|
||||
if (settings.containsKey(setting)) {
|
||||
return true;
|
||||
}
|
||||
for (Map.Entry<String, String> entry : settings.entrySet()) {
|
||||
if (entry.getKey().startsWith(setting)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public String get(String setting) {
|
||||
String retVal = settings.get(setting);
|
||||
if (retVal != null) {
|
||||
return retVal;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String get(String setting, String defaultValue) {
|
||||
String retVal = settings.get(setting);
|
||||
return retVal == null ? defaultValue : retVal;
|
||||
}
|
||||
|
||||
public Float getAsFloat(String setting, Float defaultValue) {
|
||||
String sValue = get(setting);
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
try {
|
||||
return Float.parseFloat(sValue);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new SettingsException("Failed to parse float setting [" + setting + "] with value [" + sValue + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public Double getAsDouble(String setting, Double defaultValue) {
|
||||
String sValue = get(setting);
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
try {
|
||||
return Double.parseDouble(sValue);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new SettingsException("Failed to parse double setting [" + setting + "] with value [" + sValue + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public Integer getAsInt(String setting, Integer defaultValue) {
|
||||
String sValue = get(setting);
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
try {
|
||||
return Integer.parseInt(sValue);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new SettingsException("Failed to parse int setting [" + setting + "] with value [" + sValue + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public Long getAsLong(String setting, Long defaultValue) {
|
||||
String sValue = get(setting);
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
try {
|
||||
return Long.parseLong(sValue);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new SettingsException("Failed to parse long setting [" + setting + "] with value [" + sValue + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public Boolean getAsBoolean(String setting, Boolean defaultValue) {
|
||||
String value = get(setting);
|
||||
if (value == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
return !("false".equals(value) || "0".equals(value) || "off".equals(value) || "no".equals(value));
|
||||
}
|
||||
|
||||
public TimeValue getAsTime(String setting, TimeValue defaultValue) {
|
||||
return parseTimeValue(get(setting), defaultValue);
|
||||
}
|
||||
|
||||
public ByteSizeValue getAsBytesSize(String setting, ByteSizeValue defaultValue) throws SettingsException {
|
||||
return parseBytesSizeValue(get(setting), defaultValue);
|
||||
}
|
||||
|
||||
public String[] getAsArray(String settingPrefix) throws SettingsException {
|
||||
return getAsArray(settingPrefix, EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
public String[] getAsArray(String settingPrefix, String[] defaultArray) throws SettingsException {
|
||||
List<String> result = new ArrayList<>();
|
||||
if (get(settingPrefix) != null) {
|
||||
String[] strings = splitStringByCommaToArray(get(settingPrefix));
|
||||
if (strings.length > 0) {
|
||||
for (String string : strings) {
|
||||
result.add(string.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
int counter = 0;
|
||||
while (true) {
|
||||
String value = get(settingPrefix + '.' + (counter++));
|
||||
if (value == null) {
|
||||
break;
|
||||
}
|
||||
result.add(value.trim());
|
||||
}
|
||||
if (result.isEmpty()) {
|
||||
return defaultArray;
|
||||
}
|
||||
return result.toArray(new String[result.size()]);
|
||||
}
|
||||
|
||||
public Map<String, Settings> getGroups(String prefix) throws SettingsException {
|
||||
String settingPrefix = prefix;
|
||||
if (settingPrefix.charAt(settingPrefix.length() - 1) != '.') {
|
||||
settingPrefix = settingPrefix + ".";
|
||||
}
|
||||
// we don't really care that it might happen twice
|
||||
Map<String, Map<String, String>> map = new LinkedHashMap<>();
|
||||
for (Object o : settings.keySet()) {
|
||||
String setting = (String) o;
|
||||
if (setting.startsWith(settingPrefix)) {
|
||||
String nameValue = setting.substring(settingPrefix.length());
|
||||
int dotIndex = nameValue.indexOf('.');
|
||||
if (dotIndex == -1) {
|
||||
throw new SettingsException("Failed to get setting group for ["
|
||||
+ settingPrefix
|
||||
+ "] setting prefix and setting [" + setting + "] because of a missing '.'");
|
||||
}
|
||||
String name = nameValue.substring(0, dotIndex);
|
||||
String value = nameValue.substring(dotIndex + 1);
|
||||
Map<String, String> groupSettings = map.get(name);
|
||||
if (groupSettings == null) {
|
||||
groupSettings = new LinkedHashMap<>();
|
||||
map.put(name, groupSettings);
|
||||
}
|
||||
groupSettings.put(value, get(setting));
|
||||
}
|
||||
}
|
||||
Map<String, Settings> retVal = new LinkedHashMap<>();
|
||||
for (Map.Entry<String, Map<String, String>> entry : map.entrySet()) {
|
||||
retVal.put(entry.getKey(), new Settings(Collections.unmodifiableMap(entry.getValue())));
|
||||
}
|
||||
return Collections.unmodifiableMap(retVal);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Settings that = (Settings) o;
|
||||
return settings != null ? settings.equals(that.settings) : that.settings == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return settings != null ? settings.hashCode() : 0;
|
||||
}
|
||||
|
||||
private void processSetting(Map<String, Object> map, String prefix, String setting, String value) {
|
||||
int prefixLength = setting.indexOf('.');
|
||||
if (prefixLength == -1) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> innerMap = (Map<String, Object>) map.get(prefix + setting);
|
||||
if (innerMap != null) {
|
||||
for (Map.Entry<String, Object> entry : innerMap.entrySet()) {
|
||||
map.put(prefix + setting + "." + entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
map.put(prefix + setting, value);
|
||||
} else {
|
||||
String key = setting.substring(0, prefixLength);
|
||||
String rest = setting.substring(prefixLength + 1);
|
||||
Object existingValue = map.get(prefix + key);
|
||||
if (existingValue == null) {
|
||||
Map<String, Object> newMap = new HashMap<>(2);
|
||||
processSetting(newMap, "", rest, value);
|
||||
map.put(key, newMap);
|
||||
} else {
|
||||
if (existingValue instanceof Map) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> innerMap = (Map<String, Object>) existingValue;
|
||||
processSetting(innerMap, "", rest, value);
|
||||
map.put(key, innerMap);
|
||||
} else {
|
||||
processSetting(map, prefix + key + ".", rest, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Object convertMapsToArrays(Map<String, Object> map) {
|
||||
if (map.isEmpty()) {
|
||||
return map;
|
||||
}
|
||||
boolean isArray = true;
|
||||
int maxIndex = -1;
|
||||
for (Map.Entry<String, Object> entry : map.entrySet()) {
|
||||
if (isArray) {
|
||||
try {
|
||||
int index = Integer.parseInt(entry.getKey());
|
||||
if (index >= 0) {
|
||||
maxIndex = Math.max(maxIndex, index);
|
||||
} else {
|
||||
isArray = false;
|
||||
}
|
||||
} catch (NumberFormatException ex) {
|
||||
isArray = false;
|
||||
}
|
||||
}
|
||||
if (entry.getValue() instanceof Map) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> valMap = (Map<String, Object>) entry.getValue();
|
||||
entry.setValue(convertMapsToArrays(valMap));
|
||||
}
|
||||
}
|
||||
if (isArray && (maxIndex + 1) == map.size()) {
|
||||
ArrayList<Object> newValue = new ArrayList<>(maxIndex + 1);
|
||||
for (int i = 0; i <= maxIndex; i++) {
|
||||
Object obj = map.get(Integer.toString(i));
|
||||
if (obj == null) {
|
||||
return map;
|
||||
}
|
||||
newValue.add(obj);
|
||||
}
|
||||
return newValue;
|
||||
}
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public static class Builder {
|
||||
|
||||
private final Map<String, String> map = new LinkedHashMap<>();
|
||||
|
||||
private Builder() {
|
||||
}
|
||||
|
||||
public Map<String, String> internalMap() {
|
||||
return this.map;
|
||||
}
|
||||
|
||||
public String remove(String key) {
|
||||
return map.remove(key);
|
||||
}
|
||||
|
||||
public String get(String key) {
|
||||
return map.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a setting with the provided setting key and value.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param value The setting value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String key, String value) {
|
||||
map.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a setting with the provided setting key and class as value.
|
||||
*
|
||||
* @param key The setting key
|
||||
* @param clazz The setting class value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String key, Class<?> clazz) {
|
||||
map.put(key, clazz.getName());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and the boolean value.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param value The boolean value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String setting, boolean value) {
|
||||
put(setting, String.valueOf(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and the int value.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param value The int value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String setting, int value) {
|
||||
put(setting, String.valueOf(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and the long value.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param value The long value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String setting, long value) {
|
||||
put(setting, String.valueOf(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and the float value.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param value The float value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String setting, float value) {
|
||||
put(setting, String.valueOf(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and the double value.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param value The double value
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder put(String setting, double value) {
|
||||
put(setting, String.valueOf(value));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and an array of values.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param values The values
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putArray(String setting, String... values) {
|
||||
remove(setting);
|
||||
int counter = 0;
|
||||
while (true) {
|
||||
String value = map.remove(setting + '.' + (counter++));
|
||||
if (value == null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
put(setting + '.' + i, values[i]);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting with the provided setting key and an array of values.
|
||||
*
|
||||
* @param setting The setting key
|
||||
* @param values The values
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putArray(String setting, List<String> values) {
|
||||
remove(setting);
|
||||
int counter = 0;
|
||||
while (true) {
|
||||
String value = map.remove(setting + '.' + (counter++));
|
||||
if (value == null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < values.size(); i++) {
|
||||
put(setting + '.' + i, values.get(i));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the setting group.
|
||||
* @param settingPrefix setting prefix
|
||||
* @param groupName group name
|
||||
* @param settings settings
|
||||
* @param values values
|
||||
* @return a builder
|
||||
* @throws SettingsException if setting fails
|
||||
*/
|
||||
public Builder put(String settingPrefix, String groupName, String[] settings, String[] values)
|
||||
throws SettingsException {
|
||||
if (settings.length != values.length) {
|
||||
throw new SettingsException("The settings length must match the value length");
|
||||
}
|
||||
for (int i = 0; i < settings.length; i++) {
|
||||
if (values[i] == null) {
|
||||
continue;
|
||||
}
|
||||
put(settingPrefix + "" + groupName + "." + settings[i], values[i]);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets all the provided settings.
|
||||
* @param settings settings
|
||||
* @return builder
|
||||
*/
|
||||
public Builder put(Settings settings) {
|
||||
map.putAll(settings.getAsMap());
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets all the provided settings.
|
||||
*
|
||||
* @param settings settings
|
||||
* @return a builder
|
||||
*/
|
||||
public Builder put(Map<String, String> settings) {
|
||||
map.putAll(settings);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from the actual string content that represents them using the
|
||||
* {@link SettingsLoaderService#loaderFromString(String)}.
|
||||
*
|
||||
* @param source source
|
||||
* @return builder
|
||||
*/
|
||||
public Builder loadFromString(String source) {
|
||||
SettingsLoader settingsLoader = SettingsLoaderService.loaderFromString(source);
|
||||
try {
|
||||
Map<String, String> loadedSettings = settingsLoader.load(source);
|
||||
put(loadedSettings);
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + source + "]", e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from a map.
|
||||
* @param map map
|
||||
* @return builder
|
||||
*/
|
||||
public Builder loadFromMap(Map<String, Object> map) {
|
||||
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
|
||||
put(new JsonSettingsLoader().load(builder.map(map).string()));
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + map + "]", e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from an URL.
|
||||
* @param url url
|
||||
* @return builder
|
||||
*/
|
||||
public Builder loadFromUrl(URL url) throws SettingsException {
|
||||
try {
|
||||
return loadFromStream(url.toExternalForm(), url.openStream());
|
||||
} catch (IOException e) {
|
||||
throw new SettingsException("Failed to open stream for url [" + url.toExternalForm() + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads settings from a stream.
|
||||
* @param resourceName resource name
|
||||
* @param inputStream input stream
|
||||
* @return builder
|
||||
*/
|
||||
public Builder loadFromStream(String resourceName, InputStream inputStream) throws SettingsException {
|
||||
SettingsLoader settingsLoader = SettingsLoaderService.loaderFromResource(resourceName);
|
||||
try {
|
||||
Map<String, String> loadedSettings = settingsLoader
|
||||
.load(copyToString(new InputStreamReader(inputStream, StandardCharsets.UTF_8)));
|
||||
put(loadedSettings);
|
||||
} catch (Exception e) {
|
||||
throw new SettingsException("Failed to load settings from [" + resourceName + "]", e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs across all the settings set on this builder and replaces <tt>${...}</tt> elements in the
|
||||
* each setting value according to the following logic:
|
||||
*
|
||||
* First, tries to resolve it against a System property ({@link System#getProperty(String)}), next,
|
||||
* tries and resolve it against an environment variable ({@link System#getenv(String)}), next,
|
||||
* tries and resolve it against a date pattern to resolve the current date,
|
||||
* and last, tries and replace it with another setting already set on this builder.
|
||||
* @return builder
|
||||
*/
|
||||
public Builder replacePropertyPlaceholders() {
|
||||
PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false);
|
||||
PropertyPlaceholder.PlaceholderResolver placeholderResolver = placeholderName -> {
|
||||
String value = System.getProperty(placeholderName);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
value = System.getenv(placeholderName);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
try {
|
||||
return DateTimeFormatter.ofPattern(placeholderName).format(LocalDate.now());
|
||||
} catch (IllegalArgumentException | DateTimeException e) {
|
||||
logger.log(Level.FINER, e.getMessage(), e);
|
||||
return map.get(placeholderName);
|
||||
}
|
||||
};
|
||||
for (Map.Entry<String, String> entry : map.entrySet()) {
|
||||
map.put(entry.getKey(), propertyPlaceholder.replacePlaceholders(entry.getValue(), placeholderResolver));
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public Settings build() {
|
||||
return new Settings(map);
|
||||
}
|
||||
}
|
||||
|
||||
private static class PropertyPlaceholder {
|
||||
|
||||
private final String placeholderPrefix;
|
||||
|
||||
private final String placeholderSuffix;
|
||||
|
||||
private final boolean ignoreUnresolvablePlaceholders;
|
||||
|
||||
/**
|
||||
* Creates a new <code>PropertyPlaceholderHelper</code> that uses the supplied prefix and suffix.
|
||||
*
|
||||
* @param placeholderPrefix the prefix that denotes the start of a placeholder.
|
||||
* @param placeholderSuffix the suffix that denotes the end of a placeholder.
|
||||
* @param ignoreUnresolvablePlaceholders indicates whether unresolvable placeholders should be ignored
|
||||
* (<code>true</code>) or cause an exception (<code>false</code>).
|
||||
*/
|
||||
public PropertyPlaceholder(String placeholderPrefix, String placeholderSuffix,
|
||||
boolean ignoreUnresolvablePlaceholders) {
|
||||
this.placeholderPrefix = placeholderPrefix;
|
||||
this.placeholderSuffix = placeholderSuffix;
|
||||
this.ignoreUnresolvablePlaceholders = ignoreUnresolvablePlaceholders;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Replaces all placeholders of format <code>${name}</code> with the value returned from the supplied {@link
|
||||
* PlaceholderResolver}.
|
||||
*
|
||||
* @param value the value containing the placeholders to be replaced.
|
||||
* @param placeholderResolver the <code>PlaceholderResolver</code> to use for replacement.
|
||||
* @return the supplied value with placeholders replaced inline.
|
||||
*/
|
||||
public String replacePlaceholders(String value, PlaceholderResolver placeholderResolver) {
|
||||
return parseStringValue(value, placeholderResolver, new HashSet<String>());
|
||||
}
|
||||
|
||||
protected String parseStringValue(String strVal, PlaceholderResolver placeholderResolver,
|
||||
Set<String> visitedPlaceholders) {
|
||||
StringBuilder buf = new StringBuilder(strVal);
|
||||
int startIndex = strVal.indexOf(this.placeholderPrefix);
|
||||
while (startIndex != -1) {
|
||||
int endIndex = findPlaceholderEndIndex(buf, startIndex);
|
||||
if (endIndex != -1) {
|
||||
String placeholder = buf.substring(startIndex + this.placeholderPrefix.length(), endIndex);
|
||||
if (!visitedPlaceholders.add(placeholder)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Circular placeholder reference '" + placeholder + "' in property definitions");
|
||||
}
|
||||
// Recursive invocation, parsing placeholders contained in the placeholder key.
|
||||
placeholder = parseStringValue(placeholder, placeholderResolver, visitedPlaceholders);
|
||||
|
||||
// Now obtain the value for the fully resolved key...
|
||||
int defaultValueIdx = placeholder.indexOf(':');
|
||||
String defaultValue = null;
|
||||
if (defaultValueIdx != -1) {
|
||||
defaultValue = placeholder.substring(defaultValueIdx + 1);
|
||||
placeholder = placeholder.substring(0, defaultValueIdx);
|
||||
}
|
||||
String propVal = placeholderResolver.resolvePlaceholder(placeholder);
|
||||
if (propVal == null) {
|
||||
propVal = defaultValue;
|
||||
}
|
||||
if (propVal != null) {
|
||||
// Recursive invocation, parsing placeholders contained in the
|
||||
// previously resolved placeholder value.
|
||||
propVal = parseStringValue(propVal, placeholderResolver, visitedPlaceholders);
|
||||
buf.replace(startIndex, endIndex + this.placeholderSuffix.length(), propVal);
|
||||
startIndex = buf.indexOf(this.placeholderPrefix, startIndex + propVal.length());
|
||||
} else if (this.ignoreUnresolvablePlaceholders) {
|
||||
// Proceed with unprocessed value.
|
||||
startIndex = buf.indexOf(this.placeholderPrefix, endIndex + this.placeholderSuffix.length());
|
||||
} else {
|
||||
throw new IllegalArgumentException("Could not resolve placeholder '" + placeholder + "'");
|
||||
}
|
||||
visitedPlaceholders.remove(placeholder);
|
||||
} else {
|
||||
startIndex = -1;
|
||||
}
|
||||
}
|
||||
return buf.toString();
|
||||
}
|
||||
|
||||
private int findPlaceholderEndIndex(CharSequence buf, int startIndex) {
|
||||
int index = startIndex + this.placeholderPrefix.length();
|
||||
int withinNestedPlaceholder = 0;
|
||||
while (index < buf.length()) {
|
||||
if (substringMatch(buf, index, this.placeholderSuffix)) {
|
||||
if (withinNestedPlaceholder > 0) {
|
||||
withinNestedPlaceholder--;
|
||||
index = index + this.placeholderPrefix.length() - 1;
|
||||
} else {
|
||||
return index;
|
||||
}
|
||||
} else if (substringMatch(buf, index, this.placeholderPrefix)) {
|
||||
withinNestedPlaceholder++;
|
||||
index = index + this.placeholderPrefix.length();
|
||||
} else {
|
||||
index++;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
private boolean substringMatch(CharSequence str, int index, CharSequence substring) {
|
||||
for (int j = 0; j < substring.length(); j++) {
|
||||
int i = index + j;
|
||||
if (i >= str.length() || str.charAt(i) != substring.charAt(j)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strategy interface used to resolve replacement values for placeholders contained in Strings.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface PlaceholderResolver {
|
||||
|
||||
/**
|
||||
* Resolves the supplied placeholder name into the replacement value.
|
||||
*
|
||||
* @param placeholderName the name of the placeholder to resolve.
|
||||
* @return the replacement value or <code>null</code> if no replacement is to be made.
|
||||
*/
|
||||
String resolvePlaceholder(String placeholderName);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
package org.xbib.content.settings;
|
||||
|
||||
/**
|
||||
* A generic failure to handle settings.
|
||||
*/
|
||||
public class SettingsException extends RuntimeException {
|
||||
|
||||
private static final long serialVersionUID = -1833327708622505101L;
|
||||
|
||||
public SettingsException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public SettingsException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
package org.xbib.content.settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Provides the ability to load settings from
|
||||
* the actual source content that represents them.
|
||||
*/
|
||||
public interface SettingsLoader {
|
||||
|
||||
Set<String> suffixes();
|
||||
|
||||
/**
|
||||
* Loads the settings from a source string.
|
||||
* @param source the source
|
||||
* @return a Map
|
||||
* @throws IOException if load fails
|
||||
*/
|
||||
Map<String, String> load(String source) throws IOException;
|
||||
|
||||
boolean canLoad(String source);
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
package org.xbib.content.settings;
|
||||
|
||||
import org.xbib.content.json.JsonSettingsLoader;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
* A settings loader service for loading {@link SettingsLoader} implementations.
|
||||
*/
|
||||
public final class SettingsLoaderService {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(SettingsLoaderService.class.getName());
|
||||
|
||||
private static final Map<Set<String>, SettingsLoader> settingsLoaderMap = new HashMap<>();
|
||||
|
||||
private SettingsLoaderService() {
|
||||
try {
|
||||
ServiceLoader<SettingsLoader> serviceLoader = ServiceLoader.load(SettingsLoader.class);
|
||||
for (SettingsLoader settingsLoader : serviceLoader) {
|
||||
if (!settingsLoaderMap.containsKey(settingsLoader.suffixes())) {
|
||||
settingsLoaderMap.put(settingsLoader.suffixes(), settingsLoader);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.log(Level.SEVERE, e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns a {@link SettingsLoader} based on the resource name.
|
||||
* @param resourceName the resource
|
||||
* @return the settings loader
|
||||
*/
|
||||
public static SettingsLoader loaderFromResource(String resourceName) {
|
||||
for (Map.Entry<Set<String>, SettingsLoader> entry : settingsLoaderMap.entrySet()) {
|
||||
Set<String> suffixes = entry.getKey();
|
||||
for (String suffix : suffixes) {
|
||||
if (resourceName.endsWith("." + suffix)) {
|
||||
return entry.getValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
return new JsonSettingsLoader();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link SettingsLoader} based on the actual source.
|
||||
* @param source the source
|
||||
* @return the settings loader
|
||||
*/
|
||||
public static SettingsLoader loaderFromString(String source) {
|
||||
for (SettingsLoader loader : settingsLoaderMap.values()) {
|
||||
if (loader.canLoad(source)) {
|
||||
return loader;
|
||||
}
|
||||
}
|
||||
return new JsonSettingsLoader();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for settings.
|
||||
*/
|
||||
package org.xbib.content.settings;
|
|
@ -0,0 +1,470 @@
|
|||
package org.xbib.content.util.geo;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
* Utilities for encoding and decoding geohashes. Based on
|
||||
* http://en.wikipedia.org/wiki/Geohash.
|
||||
*/
|
||||
public class GeoHashUtils {
|
||||
|
||||
public static final int PRECISION = 12;
|
||||
private static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
|
||||
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
|
||||
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
|
||||
private static final int[] BITS = {16, 8, 4, 2, 1};
|
||||
|
||||
private GeoHashUtils() {
|
||||
}
|
||||
|
||||
public static String encode(double latitude, double longitude) {
|
||||
return encode(latitude, longitude, PRECISION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes the given latitude and longitude into a geohash.
|
||||
*
|
||||
* @param latitude Latitude to encode
|
||||
* @param longitude Longitude to encode
|
||||
* @param precision precision
|
||||
* @return Geohash encoding of the longitude and latitude
|
||||
*/
|
||||
public static String encode(double latitude, double longitude, int precision) {
|
||||
double latInterval0 = -90.0;
|
||||
double latInterval1 = 90.0;
|
||||
double lngInterval0 = -180.0;
|
||||
double lngInterval1 = 180.0;
|
||||
final StringBuilder geohash = new StringBuilder();
|
||||
boolean isEven = true;
|
||||
int bit = 0;
|
||||
int ch = 0;
|
||||
while (geohash.length() < precision) {
|
||||
double mid;
|
||||
if (isEven) {
|
||||
mid = (lngInterval0 + lngInterval1) / 2D;
|
||||
if (longitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
lngInterval0 = mid;
|
||||
} else {
|
||||
lngInterval1 = mid;
|
||||
}
|
||||
} else {
|
||||
mid = (latInterval0 + latInterval1) / 2D;
|
||||
if (latitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
latInterval0 = mid;
|
||||
} else {
|
||||
latInterval1 = mid;
|
||||
}
|
||||
}
|
||||
isEven = !isEven;
|
||||
if (bit < 4) {
|
||||
bit++;
|
||||
} else {
|
||||
geohash.append(BASE_32[ch]);
|
||||
bit = 0;
|
||||
ch = 0;
|
||||
}
|
||||
}
|
||||
return geohash.toString();
|
||||
}
|
||||
|
||||
private static char encode(int x, int y) {
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate all neighbors of a given geohash cell.
|
||||
*
|
||||
* @param geohash Geohash of the defined cell
|
||||
* @return geohashes of all neighbor cells
|
||||
*/
|
||||
public static Collection<? extends CharSequence> neighbors(String geohash) {
|
||||
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an {@link Iterable} which allows to iterate over the cells that
|
||||
* contain a given geohash.
|
||||
*
|
||||
* @param geohash Geohash of a cell
|
||||
* @return {@link Iterable} of path
|
||||
*/
|
||||
public static Iterable<String> path(final String geohash) {
|
||||
return new Iterable<String>() {
|
||||
@Override
|
||||
public Iterator<String> iterator() {
|
||||
return new GeohashPathIterator(geohash);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the geohash of a neighbor of a geohash.
|
||||
*
|
||||
* @param geohash the geohash of a cell
|
||||
* @param level non-negative level of the geohash
|
||||
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
|
||||
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
|
||||
* @return geohash of the defined cell
|
||||
*/
|
||||
private static String neighbor(String geohash, int level, int dx, int dy) {
|
||||
int cell = decode(geohash.charAt(level - 1));
|
||||
|
||||
// Decoding the Geohash bit pattern to determine grid coordinates
|
||||
int x0 = cell & 1; // first bit of x
|
||||
int y0 = cell & 2; // first bit of y
|
||||
int x1 = cell & 4; // second bit of x
|
||||
int y1 = cell & 8; // second bit of y
|
||||
int x2 = cell & 16; // third bit of x
|
||||
|
||||
// combine the bitpattern to grid coordinates.
|
||||
// note that the semantics of x and y are swapping
|
||||
// on each level
|
||||
int x = x0 + (x1 / 2) + (x2 / 4);
|
||||
int y = (y0 / 2) + (y1 / 4);
|
||||
|
||||
if (level == 1) {
|
||||
// Root cells at north (namely "bcfguvyz") or at
|
||||
// south (namely "0145hjnp") do not have neighbors
|
||||
// in north/south direction
|
||||
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
|
||||
return null;
|
||||
} else {
|
||||
return Character.toString(encode(x + dx, y + dy));
|
||||
}
|
||||
} else if (level > 1) {
|
||||
boolean odd = (level % 2) != 0;
|
||||
// define grid coordinates for next level
|
||||
final int nx = odd ? (x + dx) : (x + dy);
|
||||
final int ny = odd ? (y + dy) : (y + dx);
|
||||
|
||||
boolean even = (level % 2) == 0;
|
||||
// define grid limits for current level
|
||||
final int xLimit = even ? 7 : 3;
|
||||
final int yLimit = even ? 3 : 7;
|
||||
|
||||
// if the defined neighbor has the same parent a the current cell
|
||||
// encode the cell directly. Otherwise find the cell next to this
|
||||
// cell recursively. Since encoding wraps around within a cell
|
||||
// it can be encoded here.
|
||||
if (nx >= 0 && nx <= xLimit && ny >= 0 && ny < yLimit) {
|
||||
return geohash.substring(0, level - 1) + encode(nx, ny);
|
||||
} else {
|
||||
String neighbor = neighbor(geohash, level - 1, dx, dy);
|
||||
if (neighbor != null) {
|
||||
return neighbor + encode(nx, ny);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @param <E> the neighbor type
|
||||
* @return the given list
|
||||
*/
|
||||
public static <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
|
||||
return addNeighbors(geohash, geohash.length(), neighbors);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param length level of the given geohash
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @param <E> the neighbor type
|
||||
* @return the given list
|
||||
*/
|
||||
public static <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
|
||||
String south = neighbor(geohash, length, 0, -1);
|
||||
String north = neighbor(geohash, length, 0, +1);
|
||||
if (north != null) {
|
||||
neighbors.add(neighbor(north, length, -1, 0));
|
||||
neighbors.add(north);
|
||||
neighbors.add(neighbor(north, length, +1, 0));
|
||||
}
|
||||
neighbors.add(neighbor(geohash, length, -1, 0));
|
||||
neighbors.add(neighbor(geohash, length, +1, 0));
|
||||
if (south != null) {
|
||||
neighbors.add(neighbor(south, length, -1, 0));
|
||||
neighbors.add(south);
|
||||
neighbors.add(neighbor(south, length, +1, 0));
|
||||
}
|
||||
return neighbors;
|
||||
}
|
||||
|
||||
private static int decode(char geo) {
|
||||
switch (geo) {
|
||||
case '0':
|
||||
return 0;
|
||||
case '1':
|
||||
return 1;
|
||||
case '2':
|
||||
return 2;
|
||||
case '3':
|
||||
return 3;
|
||||
case '4':
|
||||
return 4;
|
||||
case '5':
|
||||
return 5;
|
||||
case '6':
|
||||
return 6;
|
||||
case '7':
|
||||
return 7;
|
||||
case '8':
|
||||
return 8;
|
||||
case '9':
|
||||
return 9;
|
||||
case 'b':
|
||||
return 10;
|
||||
case 'c':
|
||||
return 11;
|
||||
case 'd':
|
||||
return 12;
|
||||
case 'e':
|
||||
return 13;
|
||||
case 'f':
|
||||
return 14;
|
||||
case 'g':
|
||||
return 15;
|
||||
case 'h':
|
||||
return 16;
|
||||
case 'j':
|
||||
return 17;
|
||||
case 'k':
|
||||
return 18;
|
||||
case 'm':
|
||||
return 19;
|
||||
case 'n':
|
||||
return 20;
|
||||
case 'p':
|
||||
return 21;
|
||||
case 'q':
|
||||
return 22;
|
||||
case 'r':
|
||||
return 23;
|
||||
case 's':
|
||||
return 24;
|
||||
case 't':
|
||||
return 25;
|
||||
case 'u':
|
||||
return 26;
|
||||
case 'v':
|
||||
return 27;
|
||||
case 'w':
|
||||
return 28;
|
||||
case 'x':
|
||||
return 29;
|
||||
case 'y':
|
||||
return 30;
|
||||
case 'z':
|
||||
return 31;
|
||||
default:
|
||||
throw new IllegalArgumentException("the character '" + geo + "' is not a valid geohash character");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the given geohash.
|
||||
*
|
||||
* @param geohash Geohash to decocde
|
||||
* @return {@link GeoPoint} at the center of cell, given by the geohash
|
||||
*/
|
||||
public static GeoPoint decode(String geohash) {
|
||||
return decode(geohash, new GeoPoint());
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the given geohash into a latitude and longitude.
|
||||
*
|
||||
* @param geohash Geohash to decocde
|
||||
* @param ret the ret geo point
|
||||
* @return the given {@link GeoPoint} reseted to the center of
|
||||
* cell, given by the geohash
|
||||
*/
|
||||
public static GeoPoint decode(String geohash, GeoPoint ret) {
|
||||
double[] interval = decodeCell(geohash);
|
||||
return ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the given geohash into a geohash cell defined by the points nothWest and southEast.
|
||||
*
|
||||
* @param geohash Geohash to deocde
|
||||
* @param northWest the point north/west of the cell
|
||||
* @param southEast the point south/east of the cell
|
||||
*/
|
||||
public static void decodeCell(String geohash, GeoPoint northWest, GeoPoint southEast) {
|
||||
double[] interval = decodeCell(geohash);
|
||||
northWest.reset(interval[1], interval[2]);
|
||||
southEast.reset(interval[0], interval[3]);
|
||||
}
|
||||
|
||||
private static double[] decodeCell(String geohash) {
|
||||
double[] interval = {-90.0, 90.0, -180.0, 180.0};
|
||||
boolean isEven = true;
|
||||
for (int i = 0; i < geohash.length(); i++) {
|
||||
final int cd = decode(geohash.charAt(i));
|
||||
for (int mask : BITS) {
|
||||
if (isEven) {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[2] = (interval[2] + interval[3]) / 2D;
|
||||
} else {
|
||||
interval[3] = (interval[2] + interval[3]) / 2D;
|
||||
}
|
||||
} else {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[0] = (interval[0] + interval[1]) / 2D;
|
||||
} else {
|
||||
interval[1] = (interval[0] + interval[1]) / 2D;
|
||||
}
|
||||
}
|
||||
isEven = !isEven;
|
||||
}
|
||||
}
|
||||
return interval;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes latitude and longitude information into a single long with variable precision.
|
||||
* Up to 12 levels of precision are supported which should offer sub-metre resolution.
|
||||
*
|
||||
* @param latitude latitude
|
||||
* @param longitude longitude
|
||||
* @param precision The required precision between 1 and 12
|
||||
* @return A single long where 4 bits are used for holding the precision and the remaining
|
||||
* 60 bits are reserved for 5 bit cell identifiers giving up to 12 layers.
|
||||
*/
|
||||
public static long encodeAsLong(double latitude, double longitude, int precision) {
|
||||
if ((precision > 12) || (precision < 1)) {
|
||||
throw new IllegalArgumentException("Illegal precision length of " + precision +
|
||||
". Long-based geohashes only support precisions between 1 and 12");
|
||||
}
|
||||
double latInterval0 = -90.0;
|
||||
double latInterval1 = 90.0;
|
||||
double lngInterval0 = -180.0;
|
||||
double lngInterval1 = 180.0;
|
||||
long geohash = 0L;
|
||||
boolean isEven = true;
|
||||
int bit = 0;
|
||||
int ch = 0;
|
||||
int geohashLength = 0;
|
||||
while (geohashLength < precision) {
|
||||
double mid;
|
||||
if (isEven) {
|
||||
mid = (lngInterval0 + lngInterval1) / 2D;
|
||||
if (longitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
lngInterval0 = mid;
|
||||
} else {
|
||||
lngInterval1 = mid;
|
||||
}
|
||||
} else {
|
||||
mid = (latInterval0 + latInterval1) / 2D;
|
||||
if (latitude > mid) {
|
||||
ch |= BITS[bit];
|
||||
latInterval0 = mid;
|
||||
} else {
|
||||
latInterval1 = mid;
|
||||
}
|
||||
}
|
||||
|
||||
isEven = !isEven;
|
||||
|
||||
if (bit < 4) {
|
||||
bit++;
|
||||
} else {
|
||||
geohashLength++;
|
||||
geohash |= ch;
|
||||
if (geohashLength < precision) {
|
||||
geohash <<= 5;
|
||||
}
|
||||
bit = 0;
|
||||
ch = 0;
|
||||
}
|
||||
}
|
||||
geohash <<= 4;
|
||||
geohash |= precision;
|
||||
return geohash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a geohash held as a long as a more conventional
|
||||
* String-based geohash.
|
||||
*
|
||||
* @param geohashAsLong a geohash encoded as a long
|
||||
* @return A traditional base32-based String representation of a geohash
|
||||
*/
|
||||
public static String toString(long geohashAsLong) {
|
||||
int precision = (int) (geohashAsLong & 15);
|
||||
char[] chars = new char[precision];
|
||||
long l = geohashAsLong;
|
||||
l >>= 4;
|
||||
for (int i = precision - 1; i >= 0; i--) {
|
||||
chars[i] = BASE_32[(int) (l & 31)];
|
||||
l >>= 5;
|
||||
}
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
|
||||
public static GeoPoint decode(long geohash) {
|
||||
GeoPoint point = new GeoPoint();
|
||||
decode(geohash, point);
|
||||
return point;
|
||||
}
|
||||
|
||||
/**
|
||||
* Decodes the given long-format geohash into a latitude and longitude.
|
||||
*
|
||||
* @param geohash long format Geohash to decode
|
||||
* @param ret The Geopoint into which the latitude and longitude will be stored
|
||||
*/
|
||||
public static void decode(long geohash, GeoPoint ret) {
|
||||
double[] interval = decodeCell(geohash);
|
||||
ret.reset((interval[0] + interval[1]) / 2D, (interval[2] + interval[3]) / 2D);
|
||||
|
||||
}
|
||||
|
||||
private static double[] decodeCell(long geohash) {
|
||||
double[] interval = {-90.0, 90.0, -180.0, 180.0};
|
||||
boolean isEven = true;
|
||||
int precision = (int) (geohash & 15);
|
||||
long l = geohash;
|
||||
l >>= 4;
|
||||
int[] cds = new int[precision];
|
||||
for (int i = precision - 1; i >= 0; i--) {
|
||||
cds[i] = (int) (l & 31);
|
||||
l >>= 5;
|
||||
}
|
||||
for (final int cd : cds) {
|
||||
for (int mask : BITS) {
|
||||
if (isEven) {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[2] = (interval[2] + interval[3]) / 2D;
|
||||
} else {
|
||||
interval[3] = (interval[2] + interval[3]) / 2D;
|
||||
}
|
||||
} else {
|
||||
if ((cd & mask) != 0) {
|
||||
interval[0] = (interval[0] + interval[1]) / 2D;
|
||||
} else {
|
||||
interval[1] = (interval[0] + interval[1]) / 2D;
|
||||
}
|
||||
}
|
||||
isEven = !isEven;
|
||||
}
|
||||
}
|
||||
return interval;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
package org.xbib.content.util.geo;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class GeoPoint {
|
||||
|
||||
private double lat;
|
||||
private double lon;
|
||||
|
||||
public GeoPoint() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Geopointform a string. This String must either be a geohash
|
||||
* or a lat-lon tuple.
|
||||
*
|
||||
* @param value String to create the point from
|
||||
*/
|
||||
public GeoPoint(String value) {
|
||||
this.resetFromString(value);
|
||||
}
|
||||
|
||||
public GeoPoint(double lat, double lon) {
|
||||
this.lat = lat;
|
||||
this.lon = lon;
|
||||
}
|
||||
|
||||
public static GeoPoint parseFromLatLon(String latLon) {
|
||||
GeoPoint point = new GeoPoint();
|
||||
point.resetFromString(latLon);
|
||||
return point;
|
||||
}
|
||||
|
||||
public GeoPoint reset(double lat, double lon) {
|
||||
this.lat = lat;
|
||||
this.lon = lon;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetLat(double lat) {
|
||||
this.lat = lat;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetLon(double lon) {
|
||||
this.lon = lon;
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetFromString(String value) {
|
||||
int comma = value.indexOf(',');
|
||||
if (comma != -1) {
|
||||
lat = Double.parseDouble(value.substring(0, comma).trim());
|
||||
lon = Double.parseDouble(value.substring(comma + 1).trim());
|
||||
} else {
|
||||
resetFromGeoHash(value);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(String hash) {
|
||||
GeoHashUtils.decode(hash, this);
|
||||
return this;
|
||||
}
|
||||
|
||||
void latlon(double lat, double lon) {
|
||||
this.lat = lat;
|
||||
this.lon = lon;
|
||||
}
|
||||
|
||||
public final double lat() {
|
||||
return this.lat;
|
||||
}
|
||||
|
||||
public final double getLat() {
|
||||
return this.lat;
|
||||
}
|
||||
|
||||
public final double lon() {
|
||||
return this.lon;
|
||||
}
|
||||
|
||||
public final double getLon() {
|
||||
return this.lon;
|
||||
}
|
||||
|
||||
public final String geohash() {
|
||||
return GeoHashUtils.encode(lat, lon);
|
||||
}
|
||||
|
||||
public final String getGeohash() {
|
||||
return GeoHashUtils.encode(lat, lon);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
GeoPoint geoPoint = (GeoPoint) o;
|
||||
return Double.compare(geoPoint.lat, lat) == 0 && Double.compare(geoPoint.lon, lon) == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result;
|
||||
long temp;
|
||||
temp = Double.compare(lat, 0.0d) == 0 ? 0L : Double.doubleToLongBits(lat);
|
||||
result = (int) (temp ^ (temp >>> 32));
|
||||
temp = Double.compare(lon, 0.0d) == 0 ? 0L : Double.doubleToLongBits(lon);
|
||||
result = 31 * result + (int) (temp ^ (temp >>> 32));
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + lat + ", " + lon + "]";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
package org.xbib.content.util.geo;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
/**
|
||||
* This class iterates over the cells of a given geohash. Assume geohashes
|
||||
* form a tree, this iterator traverses this tree form a leaf (actual gehash)
|
||||
* to the root (geohash of length 1).
|
||||
*/
|
||||
public final class GeohashPathIterator implements Iterator<String> {
|
||||
|
||||
private final String geohash;
|
||||
private int currentLength;
|
||||
|
||||
/**
|
||||
* Create a new {@link GeohashPathIterator} for a given geohash.
|
||||
*
|
||||
* @param geohash The geohash to traverse
|
||||
*/
|
||||
public GeohashPathIterator(String geohash) {
|
||||
this.geohash = geohash;
|
||||
this.currentLength = geohash.length();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return currentLength > 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String next() {
|
||||
if (currentLength <= 0) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
String result = geohash.substring(0, currentLength);
|
||||
currentLength--;
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException("unable to remove a geohash from this path");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Utility classes for geo points and geo hashes.
|
||||
*/
|
||||
package org.xbib.content.util.geo;
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Utility classes for cotent.
|
||||
*/
|
||||
package org.xbib.content.util;
|
|
@ -0,0 +1,233 @@
|
|||
package org.xbib.content.util.unit;
|
||||
|
||||
/**
|
||||
* A <tt>SizeUnit</tt> represents size at a given unit of
|
||||
* granularity and provides utility methods to convert across units.
|
||||
* A <tt>SizeUnit</tt> does not maintain size information, but only
|
||||
* helps organize and use size representations that may be maintained
|
||||
* separately across various contexts.
|
||||
*/
|
||||
public enum ByteSizeUnit {
|
||||
BYTES {
|
||||
@Override
|
||||
public long toBytes(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKB(long size) {
|
||||
return size / (C1 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMB(long size) {
|
||||
return size / (C2 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGB(long size) {
|
||||
return size / (C3 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTB(long size) {
|
||||
return size / (C4 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPB(long size) {
|
||||
return size / (C5 / C0);
|
||||
}
|
||||
},
|
||||
KB {
|
||||
@Override
|
||||
public long toBytes(long size) {
|
||||
return x(size, C1 / C0, MAX / (C1 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKB(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMB(long size) {
|
||||
return size / (C2 / C1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGB(long size) {
|
||||
return size / (C3 / C1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTB(long size) {
|
||||
return size / (C4 / C1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPB(long size) {
|
||||
return size / (C5 / C1);
|
||||
}
|
||||
},
|
||||
MB {
|
||||
@Override
|
||||
public long toBytes(long size) {
|
||||
return x(size, C2 / C0, MAX / (C2 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKB(long size) {
|
||||
return x(size, C2 / C1, MAX / (C2 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMB(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGB(long size) {
|
||||
return size / (C3 / C2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTB(long size) {
|
||||
return size / (C4 / C2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPB(long size) {
|
||||
return size / (C5 / C2);
|
||||
}
|
||||
},
|
||||
GB {
|
||||
@Override
|
||||
public long toBytes(long size) {
|
||||
return x(size, C3 / C0, MAX / (C3 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKB(long size) {
|
||||
return x(size, C3 / C1, MAX / (C3 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMB(long size) {
|
||||
return x(size, C3 / C2, MAX / (C3 / C2));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGB(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTB(long size) {
|
||||
return size / (C4 / C3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPB(long size) {
|
||||
return size / (C5 / C3);
|
||||
}
|
||||
},
|
||||
TB {
|
||||
@Override
|
||||
public long toBytes(long size) {
|
||||
return x(size, C4 / C0, MAX / (C4 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKB(long size) {
|
||||
return x(size, C4 / C1, MAX / (C4 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMB(long size) {
|
||||
return x(size, C4 / C2, MAX / (C4 / C2));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGB(long size) {
|
||||
return x(size, C4 / C3, MAX / (C4 / C3));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTB(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPB(long size) {
|
||||
return size / (C5 / C4);
|
||||
}
|
||||
},
|
||||
PB {
|
||||
@Override
|
||||
public long toBytes(long size) {
|
||||
return x(size, C5 / C0, MAX / (C5 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKB(long size) {
|
||||
return x(size, C5 / C1, MAX / (C5 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMB(long size) {
|
||||
return x(size, C5 / C2, MAX / (C5 / C2));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGB(long size) {
|
||||
return x(size, C5 / C3, MAX / (C5 / C3));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTB(long size) {
|
||||
return x(size, C5 / C4, MAX / (C5 / C4));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPB(long size) {
|
||||
return size;
|
||||
}
|
||||
};
|
||||
|
||||
static final long C0 = 1L;
|
||||
static final long C1 = C0 * 1024L;
|
||||
static final long C2 = C1 * 1024L;
|
||||
static final long C3 = C2 * 1024L;
|
||||
static final long C4 = C3 * 1024L;
|
||||
static final long C5 = C4 * 1024L;
|
||||
|
||||
static final long MAX = Long.MAX_VALUE;
|
||||
|
||||
/**
|
||||
* Scale d by m, checking for overflow.
|
||||
* This has a short name to make above code more readable.
|
||||
*/
|
||||
static long x(long d, long m, long over) {
|
||||
if (d > over) {
|
||||
return Long.MAX_VALUE;
|
||||
}
|
||||
if (d < -over) {
|
||||
return Long.MIN_VALUE;
|
||||
}
|
||||
return d * m;
|
||||
}
|
||||
|
||||
|
||||
public abstract long toBytes(long size);
|
||||
|
||||
public abstract long toKB(long size);
|
||||
|
||||
public abstract long toMB(long size);
|
||||
|
||||
public abstract long toGB(long size);
|
||||
|
||||
public abstract long toTB(long size);
|
||||
|
||||
public abstract long toPB(long size);
|
||||
}
|
|
@ -0,0 +1,228 @@
|
|||
package org.xbib.content.util.unit;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ByteSizeValue {
|
||||
|
||||
private long size;
|
||||
|
||||
private ByteSizeUnit sizeUnit;
|
||||
|
||||
private ByteSizeValue() {
|
||||
}
|
||||
|
||||
public ByteSizeValue(long size, ByteSizeUnit sizeUnit) {
|
||||
this.size = size;
|
||||
this.sizeUnit = sizeUnit;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the double value with a single decimal points, trimming trailing '.0'.
|
||||
* @param value value
|
||||
* @param suffix suffix
|
||||
* @return formatted decimal
|
||||
*/
|
||||
public static String format1Decimals(double value, String suffix) {
|
||||
String p = String.valueOf(value);
|
||||
int ix = p.indexOf('.') + 1;
|
||||
int ex = p.indexOf('E');
|
||||
char fraction = p.charAt(ix);
|
||||
if (fraction == '0') {
|
||||
if (ex != -1) {
|
||||
return p.substring(0, ix - 1) + p.substring(ex) + suffix;
|
||||
} else {
|
||||
return p.substring(0, ix - 1) + suffix;
|
||||
}
|
||||
} else {
|
||||
if (ex != -1) {
|
||||
return p.substring(0, ix) + fraction + p.substring(ex) + suffix;
|
||||
} else {
|
||||
return p.substring(0, ix) + fraction + suffix;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static ByteSizeValue parseBytesSizeValue(String sValue) {
|
||||
return parseBytesSizeValue(sValue, null);
|
||||
}
|
||||
|
||||
public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue) {
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
long bytes;
|
||||
try {
|
||||
String lastTwoChars = sValue.substring(sValue.length() - Math.min(2, sValue.length())).toLowerCase(Locale.ROOT);
|
||||
if (lastTwoChars.endsWith("k")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C1);
|
||||
} else if (lastTwoChars.endsWith("kb")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C1);
|
||||
} else if (lastTwoChars.endsWith("m")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C2);
|
||||
} else if (lastTwoChars.endsWith("mb")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C2);
|
||||
} else if (lastTwoChars.endsWith("g")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C3);
|
||||
} else if (lastTwoChars.endsWith("gb")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C3);
|
||||
} else if (lastTwoChars.endsWith("t")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C4);
|
||||
} else if (lastTwoChars.endsWith("tb")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C4);
|
||||
} else if (lastTwoChars.endsWith("p")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * ByteSizeUnit.C5);
|
||||
} else if (lastTwoChars.endsWith("pb")) {
|
||||
bytes = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)) * ByteSizeUnit.C5);
|
||||
} else if (lastTwoChars.endsWith("b")) {
|
||||
bytes = Long.parseLong(sValue.substring(0, sValue.length() - 1));
|
||||
} else {
|
||||
bytes = Long.parseLong(sValue);
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
return defaultValue;
|
||||
}
|
||||
return new ByteSizeValue(bytes, ByteSizeUnit.BYTES);
|
||||
}
|
||||
|
||||
public int bytesAsInt() throws IllegalArgumentException {
|
||||
long bytes = bytes();
|
||||
if (bytes > Integer.MAX_VALUE) {
|
||||
throw new IllegalArgumentException("size [" + toString() + "] is bigger than max int");
|
||||
}
|
||||
return (int) bytes;
|
||||
}
|
||||
|
||||
public long bytes() {
|
||||
return sizeUnit.toBytes(size);
|
||||
}
|
||||
|
||||
public long getBytes() {
|
||||
return bytes();
|
||||
}
|
||||
|
||||
public long kb() {
|
||||
return sizeUnit.toKB(size);
|
||||
}
|
||||
|
||||
public long getKb() {
|
||||
return kb();
|
||||
}
|
||||
|
||||
public long mb() {
|
||||
return sizeUnit.toMB(size);
|
||||
}
|
||||
|
||||
public long getMb() {
|
||||
return mb();
|
||||
}
|
||||
|
||||
public long gb() {
|
||||
return sizeUnit.toGB(size);
|
||||
}
|
||||
|
||||
public long getGb() {
|
||||
return gb();
|
||||
}
|
||||
|
||||
public long tb() {
|
||||
return sizeUnit.toTB(size);
|
||||
}
|
||||
|
||||
public long getTb() {
|
||||
return tb();
|
||||
}
|
||||
|
||||
public long pb() {
|
||||
return sizeUnit.toPB(size);
|
||||
}
|
||||
|
||||
public long getPb() {
|
||||
return pb();
|
||||
}
|
||||
|
||||
public double kbFrac() {
|
||||
return ((double) bytes()) / ByteSizeUnit.C1;
|
||||
}
|
||||
|
||||
public double getKbFrac() {
|
||||
return kbFrac();
|
||||
}
|
||||
|
||||
public double mbFrac() {
|
||||
return ((double) bytes()) / ByteSizeUnit.C2;
|
||||
}
|
||||
|
||||
public double getMbFrac() {
|
||||
return mbFrac();
|
||||
}
|
||||
|
||||
public double gbFrac() {
|
||||
return ((double) bytes()) / ByteSizeUnit.C3;
|
||||
}
|
||||
|
||||
public double getGbFrac() {
|
||||
return gbFrac();
|
||||
}
|
||||
|
||||
public double tbFrac() {
|
||||
return ((double) bytes()) / ByteSizeUnit.C4;
|
||||
}
|
||||
|
||||
public double getTbFrac() {
|
||||
return tbFrac();
|
||||
}
|
||||
|
||||
public double pbFrac() {
|
||||
return ((double) bytes()) / ByteSizeUnit.C5;
|
||||
}
|
||||
|
||||
public double getPbFrac() {
|
||||
return pbFrac();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
long bytes = bytes();
|
||||
double value = bytes;
|
||||
String suffix = "b";
|
||||
if (bytes >= ByteSizeUnit.C5) {
|
||||
value = pbFrac();
|
||||
suffix = "pb";
|
||||
} else if (bytes >= ByteSizeUnit.C4) {
|
||||
value = tbFrac();
|
||||
suffix = "tb";
|
||||
} else if (bytes >= ByteSizeUnit.C3) {
|
||||
value = gbFrac();
|
||||
suffix = "gb";
|
||||
} else if (bytes >= ByteSizeUnit.C2) {
|
||||
value = mbFrac();
|
||||
suffix = "mb";
|
||||
} else if (bytes >= ByteSizeUnit.C1) {
|
||||
value = kbFrac();
|
||||
suffix = "kb";
|
||||
}
|
||||
return format1Decimals(value, suffix);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
ByteSizeValue sizeValue = (ByteSizeValue) o;
|
||||
return size == sizeValue.size && sizeUnit == sizeValue.sizeUnit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = (int) (size ^ (size >>> 32));
|
||||
result = 31 * result + (sizeUnit != null ? sizeUnit.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,229 @@
|
|||
package org.xbib.content.util.unit;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public enum SizeUnit {
|
||||
SCALAR {
|
||||
@Override
|
||||
public long toScalar(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKilo(long size) {
|
||||
return size / (C1 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMega(long size) {
|
||||
return size / (C2 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGiga(long size) {
|
||||
return size / (C3 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTera(long size) {
|
||||
return size / (C4 / C0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPeta(long size) {
|
||||
return size / (C5 / C0);
|
||||
}
|
||||
},
|
||||
KILO {
|
||||
@Override
|
||||
public long toScalar(long size) {
|
||||
return x(size, C1 / C0, MAX / (C1 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKilo(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMega(long size) {
|
||||
return size / (C2 / C1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGiga(long size) {
|
||||
return size / (C3 / C1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTera(long size) {
|
||||
return size / (C4 / C1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPeta(long size) {
|
||||
return size / (C5 / C1);
|
||||
}
|
||||
},
|
||||
MEGA {
|
||||
@Override
|
||||
public long toScalar(long size) {
|
||||
return x(size, C2 / C0, MAX / (C2 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKilo(long size) {
|
||||
return x(size, C2 / C1, MAX / (C2 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMega(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGiga(long size) {
|
||||
return size / (C3 / C2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTera(long size) {
|
||||
return size / (C4 / C2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPeta(long size) {
|
||||
return size / (C5 / C2);
|
||||
}
|
||||
},
|
||||
GIGA {
|
||||
@Override
|
||||
public long toScalar(long size) {
|
||||
return x(size, C3 / C0, MAX / (C3 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKilo(long size) {
|
||||
return x(size, C3 / C1, MAX / (C3 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMega(long size) {
|
||||
return x(size, C3 / C2, MAX / (C3 / C2));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGiga(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTera(long size) {
|
||||
return size / (C4 / C3);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPeta(long size) {
|
||||
return size / (C5 / C3);
|
||||
}
|
||||
},
|
||||
TERA {
|
||||
@Override
|
||||
public long toScalar(long size) {
|
||||
return x(size, C4 / C0, MAX / (C4 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKilo(long size) {
|
||||
return x(size, C4 / C1, MAX / (C4 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMega(long size) {
|
||||
return x(size, C4 / C2, MAX / (C4 / C2));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGiga(long size) {
|
||||
return x(size, C4 / C3, MAX / (C4 / C3));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTera(long size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPeta(long size) {
|
||||
return size / (C5 / C0);
|
||||
}
|
||||
},
|
||||
PETA {
|
||||
@Override
|
||||
public long toScalar(long size) {
|
||||
return x(size, C5 / C0, MAX / (C5 / C0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toKilo(long size) {
|
||||
return x(size, C5 / C1, MAX / (C5 / C1));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toMega(long size) {
|
||||
return x(size, C5 / C2, MAX / (C5 / C2));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toGiga(long size) {
|
||||
return x(size, C5 / C3, MAX / (C5 / C3));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toTera(long size) {
|
||||
return x(size, C5 / C4, MAX / (C5 / C4));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long toPeta(long size) {
|
||||
return size;
|
||||
}
|
||||
};
|
||||
|
||||
static final long C0 = 1L;
|
||||
static final long C1 = C0 * 1000L;
|
||||
static final long C2 = C1 * 1000L;
|
||||
static final long C3 = C2 * 1000L;
|
||||
static final long C4 = C3 * 1000L;
|
||||
static final long C5 = C4 * 1000L;
|
||||
|
||||
static final long MAX = Long.MAX_VALUE;
|
||||
|
||||
/**
|
||||
* Scale d by m, checking for overflow.
|
||||
* This has a short name to make above code more readable.
|
||||
*/
|
||||
static long x(long d, long m, long over) {
|
||||
if (d > over) {
|
||||
return Long.MAX_VALUE;
|
||||
}
|
||||
if (d < -over) {
|
||||
return Long.MIN_VALUE;
|
||||
}
|
||||
return d * m;
|
||||
}
|
||||
|
||||
|
||||
public abstract long toScalar(long size);
|
||||
|
||||
public abstract long toKilo(long size);
|
||||
|
||||
public abstract long toMega(long size);
|
||||
|
||||
public abstract long toGiga(long size);
|
||||
|
||||
public abstract long toTera(long size);
|
||||
|
||||
public abstract long toPeta(long size);
|
||||
}
|
|
@ -0,0 +1,259 @@
|
|||
package org.xbib.content.util.unit;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TimeValue {
|
||||
|
||||
private static final long C0 = 1L;
|
||||
private static final long C1 = C0 * 1000L;
|
||||
private static final long C2 = C1 * 1000L;
|
||||
private static final long C3 = C2 * 1000L;
|
||||
private static final long C4 = C3 * 60L;
|
||||
private static final long C5 = C4 * 60L;
|
||||
private static final long C6 = C5 * 24L;
|
||||
private long duration;
|
||||
private TimeUnit timeUnit;
|
||||
|
||||
private TimeValue() {
|
||||
}
|
||||
|
||||
public TimeValue(long millis) {
|
||||
this(millis, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
public TimeValue(long duration, TimeUnit timeUnit) {
|
||||
this.duration = duration;
|
||||
this.timeUnit = timeUnit;
|
||||
}
|
||||
|
||||
public static TimeValue timeValueNanos(long nanos) {
|
||||
return new TimeValue(nanos, TimeUnit.NANOSECONDS);
|
||||
}
|
||||
|
||||
public static TimeValue timeValueMillis(long millis) {
|
||||
return new TimeValue(millis, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
public static TimeValue timeValueSeconds(long seconds) {
|
||||
return new TimeValue(seconds, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
public static TimeValue timeValueMinutes(long minutes) {
|
||||
return new TimeValue(minutes, TimeUnit.MINUTES);
|
||||
}
|
||||
|
||||
public static TimeValue timeValueHours(long hours) {
|
||||
return new TimeValue(hours, TimeUnit.HOURS);
|
||||
}
|
||||
|
||||
/**
|
||||
* Format the double value with a single decimal points, trimming trailing '.0'.
|
||||
*
|
||||
* @param value value
|
||||
* @param suffix suffix
|
||||
* @return string
|
||||
*/
|
||||
public static String format1Decimals(double value, String suffix) {
|
||||
String p = String.valueOf(value);
|
||||
int ix = p.indexOf('.') + 1;
|
||||
int ex = p.indexOf('E');
|
||||
char fraction = p.charAt(ix);
|
||||
if (fraction == '0') {
|
||||
if (ex != -1) {
|
||||
return p.substring(0, ix - 1) + p.substring(ex) + suffix;
|
||||
} else {
|
||||
return p.substring(0, ix - 1) + suffix;
|
||||
}
|
||||
} else {
|
||||
if (ex != -1) {
|
||||
return p.substring(0, ix) + fraction + p.substring(ex) + suffix;
|
||||
} else {
|
||||
return p.substring(0, ix) + fraction + suffix;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static TimeValue parseTimeValue(String sValue, TimeValue defaultValue) {
|
||||
if (sValue == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
long millis;
|
||||
if (sValue.endsWith("S")) {
|
||||
millis = Long.parseLong(sValue.substring(0, sValue.length() - 1));
|
||||
} else if (sValue.endsWith("ms")) {
|
||||
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 2)));
|
||||
} else if (sValue.endsWith("s")) {
|
||||
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 1000);
|
||||
} else if (sValue.endsWith("m")) {
|
||||
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 60 * 1000);
|
||||
} else if (sValue.endsWith("H") || sValue.endsWith("h")) {
|
||||
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 60 * 60 * 1000);
|
||||
} else if (sValue.endsWith("d")) {
|
||||
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 24 * 60 * 60 * 1000);
|
||||
} else if (sValue.endsWith("w")) {
|
||||
millis = (long) (Double.parseDouble(sValue.substring(0, sValue.length() - 1)) * 7 * 24 * 60 * 60 * 1000);
|
||||
} else {
|
||||
millis = Long.parseLong(sValue);
|
||||
}
|
||||
return new TimeValue(millis, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
public long nanos() {
|
||||
return timeUnit.toNanos(duration);
|
||||
}
|
||||
|
||||
public long getNanos() {
|
||||
return nanos();
|
||||
}
|
||||
|
||||
public long micros() {
|
||||
return timeUnit.toMicros(duration);
|
||||
}
|
||||
|
||||
public long getMicros() {
|
||||
return micros();
|
||||
}
|
||||
|
||||
public long millis() {
|
||||
return timeUnit.toMillis(duration);
|
||||
}
|
||||
|
||||
public long getMillis() {
|
||||
return millis();
|
||||
}
|
||||
|
||||
public long seconds() {
|
||||
return timeUnit.toSeconds(duration);
|
||||
}
|
||||
|
||||
public long getSeconds() {
|
||||
return seconds();
|
||||
}
|
||||
|
||||
public long minutes() {
|
||||
return timeUnit.toMinutes(duration);
|
||||
}
|
||||
|
||||
public long getMinutes() {
|
||||
return minutes();
|
||||
}
|
||||
|
||||
public long hours() {
|
||||
return timeUnit.toHours(duration);
|
||||
}
|
||||
|
||||
public long getHours() {
|
||||
return hours();
|
||||
}
|
||||
|
||||
public long days() {
|
||||
return timeUnit.toDays(duration);
|
||||
}
|
||||
|
||||
public long getDays() {
|
||||
return days();
|
||||
}
|
||||
|
||||
public double microsFrac() {
|
||||
return ((double) nanos()) / C1;
|
||||
}
|
||||
|
||||
public double getMicrosFrac() {
|
||||
return microsFrac();
|
||||
}
|
||||
|
||||
public double millisFrac() {
|
||||
return ((double) nanos()) / C2;
|
||||
}
|
||||
|
||||
public double getMillisFrac() {
|
||||
return millisFrac();
|
||||
}
|
||||
|
||||
public double secondsFrac() {
|
||||
return ((double) nanos()) / C3;
|
||||
}
|
||||
|
||||
public double getSecondsFrac() {
|
||||
return secondsFrac();
|
||||
}
|
||||
|
||||
public double minutesFrac() {
|
||||
return ((double) nanos()) / C4;
|
||||
}
|
||||
|
||||
public double getMinutesFrac() {
|
||||
return minutesFrac();
|
||||
}
|
||||
|
||||
public double hoursFrac() {
|
||||
return ((double) nanos()) / C5;
|
||||
}
|
||||
|
||||
public double getHoursFrac() {
|
||||
return hoursFrac();
|
||||
}
|
||||
|
||||
public double daysFrac() {
|
||||
return ((double) nanos()) / C6;
|
||||
}
|
||||
|
||||
public double getDaysFrac() {
|
||||
return daysFrac();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
if (duration < 0) {
|
||||
return Long.toString(duration);
|
||||
}
|
||||
long nanos = nanos();
|
||||
if (nanos == 0) {
|
||||
return "0s";
|
||||
}
|
||||
double value = nanos;
|
||||
String suffix = "nanos";
|
||||
if (nanos >= C6) {
|
||||
value = daysFrac();
|
||||
suffix = "d";
|
||||
} else if (nanos >= C5) {
|
||||
value = hoursFrac();
|
||||
suffix = "h";
|
||||
} else if (nanos >= C4) {
|
||||
value = minutesFrac();
|
||||
suffix = "m";
|
||||
} else if (nanos >= C3) {
|
||||
value = secondsFrac();
|
||||
suffix = "s";
|
||||
} else if (nanos >= C2) {
|
||||
value = millisFrac();
|
||||
suffix = "ms";
|
||||
} else if (nanos >= C1) {
|
||||
value = microsFrac();
|
||||
suffix = "micros";
|
||||
}
|
||||
return format1Decimals(value, suffix);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) {
|
||||
return true;
|
||||
}
|
||||
if (o == null || getClass() != o.getClass()) {
|
||||
return false;
|
||||
}
|
||||
TimeValue timeValue = (TimeValue) o;
|
||||
return duration == timeValue.duration && timeUnit == timeValue.timeUnit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = (int) (duration ^ (duration >>> 32));
|
||||
result = 31 * result + (timeUnit != null ? timeUnit.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for units.
|
||||
*/
|
||||
package org.xbib.content.util.unit;
|
|
@ -0,0 +1 @@
|
|||
org.xbib.content.json.JsonXContent
|
|
@ -0,0 +1 @@
|
|||
org.xbib.content.json.JsonSettingsLoader
|
|
@ -0,0 +1,150 @@
|
|||
package org.xbib.content;
|
||||
|
||||
import static org.xbib.content.json.JsonXContent.contentBuilder;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.xbib.content.json.JsonXContent;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class XContentBuilderTest extends Assert {
|
||||
|
||||
@Test
|
||||
public void testCopy() throws IOException {
|
||||
XContentBuilder builder = contentBuilder();
|
||||
builder.startObject().field("hello", "world").endObject();
|
||||
builder.close();
|
||||
|
||||
XContentBuilder builder2 = contentBuilder();
|
||||
builder2.copy(builder);
|
||||
builder2.close();
|
||||
assertEquals(builder2.string(), "{\"hello\":\"world\"}");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCopyList() throws IOException {
|
||||
XContentBuilder builder1 = contentBuilder();
|
||||
builder1.startObject().field("hello", "world").endObject();
|
||||
builder1.close();
|
||||
XContentBuilder builder2 = contentBuilder();
|
||||
builder2.startObject().field("hello", "world").endObject();
|
||||
builder2.close();
|
||||
|
||||
XContentBuilder builder = contentBuilder();
|
||||
builder.startObject().startArray("list");
|
||||
builder.copy(Arrays.asList(builder1, builder2));
|
||||
builder.endArray().endObject();
|
||||
builder.close();
|
||||
|
||||
assertEquals(builder.string(), "{\"list\":[{\"hello\":\"world\"},{\"hello\":\"world\"}]}");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBuilderAsXContentList() throws IOException {
|
||||
XContentBuilder builder1 = contentBuilder();
|
||||
builder1.startObject().field("hello", "world").endObject();
|
||||
builder1.close();
|
||||
|
||||
XContentBuilder builder2 = contentBuilder();
|
||||
builder2.startObject().field("hello", "world").endObject();
|
||||
builder2.close();
|
||||
|
||||
XContentBuilder builder = contentBuilder();
|
||||
builder.startObject().array("list", builder1, builder2).endObject();
|
||||
builder.close();
|
||||
|
||||
assertEquals(builder.string(), "{\"list\":[{\"hello\":\"world\"},{\"hello\":\"world\"}]}");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBigDecimal() throws IOException {
|
||||
XContentBuilder builder = contentBuilder();
|
||||
builder.startObject().field("value", new BigDecimal("57683974591503.00")).endObject();
|
||||
assertEquals(builder.string(), "{\"value\":57683974591503.00}");
|
||||
|
||||
XContent content = XContentService.xContent(builder.string());
|
||||
Map<String, Object> map = content
|
||||
.createParser(builder.string())
|
||||
.losslessDecimals(true)
|
||||
.mapAndClose();
|
||||
assertEquals(map.toString(), "{value=57683974591503.00}");
|
||||
assertEquals(map.get("value").getClass().toString(), "class java.math.BigDecimal");
|
||||
|
||||
map = content
|
||||
.createParser(builder.string())
|
||||
.losslessDecimals(false)
|
||||
.mapAndClose();
|
||||
assertEquals(map.toString(), "{value=5.7683974591503E13}");
|
||||
assertEquals(map.get("value").getClass().toString(), "class java.lang.Double");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBigInteger() throws IOException {
|
||||
XContentBuilder builder = contentBuilder();
|
||||
builder.startObject().field("value", new BigInteger("1234567891234567890123456789")).endObject();
|
||||
assertEquals(builder.string(), "{\"value\":1234567891234567890123456789}");
|
||||
|
||||
XContent content = XContentService.xContent(builder.string());
|
||||
Map<String, Object> map = content
|
||||
.createParser(builder.string())
|
||||
.losslessDecimals(true)
|
||||
.mapAndClose();
|
||||
assertEquals(map.toString(), "{value=1234567891234567890123456789}");
|
||||
assertEquals(map.get("value").getClass().toString(), "class java.math.BigInteger");
|
||||
|
||||
map = content
|
||||
.createParser(builder.string())
|
||||
.losslessDecimals(false)
|
||||
.mapAndClose();
|
||||
assertEquals(map.toString(), "{value=1234567891234567890123456789}");
|
||||
assertEquals(map.get("value").getClass().toString(), "class java.math.BigInteger");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDateFormatting() throws IOException {
|
||||
XContentBuilder builder = contentBuilder();
|
||||
Date d = new Date();
|
||||
d.setTime(1398175311488L);
|
||||
builder.startObject().field("value", d).endObject();
|
||||
Map<String, Object> map = JsonXContent.jsonContent()
|
||||
.createParser(builder.string())
|
||||
.losslessDecimals(false)
|
||||
.mapAndClose();
|
||||
assertEquals("{value=2014-04-22T14:01:51.488Z}", map.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBase16() throws IOException {
|
||||
XContentBuilder builder = contentBuilder();
|
||||
builder.startObject().field("value", "4AC3B67267").endObject();
|
||||
assertEquals(builder.string(), "{\"value\":\"4AC3B67267\"}");
|
||||
|
||||
XContent content = XContentService.xContent(builder.string());
|
||||
Map<String, Object> map = content
|
||||
.createParser(builder.string())
|
||||
.enableBase16Checks(true)
|
||||
.mapAndClose();
|
||||
assertEquals(new String((byte[]) map.get("value")), "Jörg");
|
||||
|
||||
map = content.createParser(builder.string())
|
||||
.enableBase16Checks(false)
|
||||
.mapAndClose();
|
||||
assertEquals(map.toString(), "{value=4AC3B67267}");
|
||||
}
|
||||
|
||||
@Test(expected = NullPointerException.class)
|
||||
public void testNullKey() throws IOException {
|
||||
XContentBuilder builder = contentBuilder();
|
||||
builder.field((String) null);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for testing content parsing and generating.
|
||||
*/
|
||||
package org.xbib.content;
|
|
@ -0,0 +1,75 @@
|
|||
package org.xbib.content.settings;
|
||||
|
||||
import static org.xbib.content.settings.Settings.settingsBuilder;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
import org.xbib.content.XContentHelper;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SettingsTest extends Assert {
|
||||
|
||||
@Test
|
||||
public void testArray() {
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.putArray("input", Arrays.asList("a", "b", "c")).build();
|
||||
assertEquals("a", settings.getAsArray("input")[0]);
|
||||
assertEquals("b", settings.getAsArray("input")[1]);
|
||||
assertEquals("c", settings.getAsArray("input")[2]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGroups() {
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.put("prefix.group1.k1", "v1")
|
||||
.put("prefix.group1.k2", "v2")
|
||||
.put("prefix.group1.k3", "v3")
|
||||
.put("prefix.group2.k1", "v1")
|
||||
.put("prefix.group2.k2", "v2")
|
||||
.put("prefix.group2.k3", "v3")
|
||||
.build();
|
||||
Map<String, Settings> groups = settings.getGroups("prefix");
|
||||
assertEquals("[group1, group2]", groups.keySet().toString());
|
||||
assertTrue(groups.get("group1").getAsMap().containsKey("k1"));
|
||||
assertTrue(groups.get("group1").getAsMap().containsKey("k2"));
|
||||
assertTrue(groups.get("group1").getAsMap().containsKey("k3"));
|
||||
assertTrue(groups.get("group2").getAsMap().containsKey("k1"));
|
||||
assertTrue(groups.get("group2").getAsMap().containsKey("k2"));
|
||||
assertTrue(groups.get("group2").getAsMap().containsKey("k3"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapForSettings() {
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("hello", "world");
|
||||
Map<String, Object> settingsMap = new HashMap<>();
|
||||
settingsMap.put("map", map);
|
||||
Settings settings = settingsBuilder().loadFromMap(settingsMap).build();
|
||||
assertEquals("{map.hello=world}", settings.getAsMap().toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapSettingsFromReader() {
|
||||
StringReader reader = new StringReader("{\"map\":{\"hello\":\"world\"}}");
|
||||
Map<String, Object> spec = XContentHelper.convertFromJsonToMap(reader);
|
||||
Settings settings = settingsBuilder().loadFromMap(spec).build();
|
||||
assertEquals("{map.hello=world}", settings.getAsMap().toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCurrentYearInSettings() {
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.put("date", "${yyyy}")
|
||||
.replacePropertyPlaceholders()
|
||||
.build();
|
||||
assertTrue(Integer.parseInt(settings.get("date")) > 2000);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for testing settings parsing and generating.
|
||||
*/
|
||||
package org.xbib.content.settings;
|
13
content-core/src/test/resources/log4j2.xml
Normal file
13
content-core/src/test/resources/log4j2.xml
Normal file
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration status="OFF">
|
||||
<appenders>
|
||||
<Console name="Console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{ABSOLUTE}][%-5p][%-25c][%t] %m%n"/>
|
||||
</Console>
|
||||
</appenders>
|
||||
<Loggers>
|
||||
<Root level="info">
|
||||
<AppenderRef ref="Console"/>
|
||||
</Root>
|
||||
</Loggers>
|
||||
</configuration>
|
323
content-csv/config/checkstyle/checkstyle.xml
Normal file
323
content-csv/config/checkstyle/checkstyle.xml
Normal file
|
@ -0,0 +1,323 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
|
||||
<!-- This is a checkstyle configuration file. For descriptions of
|
||||
what the following rules do, please see the checkstyle configuration
|
||||
page at http://checkstyle.sourceforge.net/config.html -->
|
||||
|
||||
<module name="Checker">
|
||||
|
||||
<module name="FileTabCharacter">
|
||||
<!-- Checks that there are no tab characters in the file.
|
||||
-->
|
||||
</module>
|
||||
|
||||
<module name="NewlineAtEndOfFile">
|
||||
<property name="lineSeparator" value="lf"/>
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that FIXME is not used in comments. TODO is preferred.
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))FIXME" />
|
||||
<property name="message" value='TODO is preferred to FIXME. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that TODOs are named. (Actually, just that they are followed
|
||||
by an open paren.)
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))TODO[^(]" />
|
||||
<property name="message" value='All TODOs should be named. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="JavadocPackage">
|
||||
<!-- Checks that each Java package has a Javadoc file used for commenting.
|
||||
Only allows a package-info.java, not package.html. -->
|
||||
</module>
|
||||
|
||||
<!-- All Java AST specific tests live under TreeWalker module. -->
|
||||
<module name="TreeWalker">
|
||||
|
||||
<!--
|
||||
|
||||
IMPORT CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="RedundantImport">
|
||||
<!-- Checks for redundant import statements. -->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ImportOrder">
|
||||
<!-- Checks for out of order import statements. -->
|
||||
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="groups" value="com,junit,net,org,java,javax"/>
|
||||
<!-- This ensures that static imports go first. -->
|
||||
<property name="option" value="top"/>
|
||||
<property name="tokens" value="STATIC_IMPORT, IMPORT"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
JAVADOC CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Checks for Javadoc comments. -->
|
||||
<!-- See http://checkstyle.sf.net/config_javadoc.html -->
|
||||
<module name="JavadocMethod">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="allowMissingJavadoc" value="true"/>
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
<property name="allowMissingThrowsTags" value="true"/>
|
||||
<property name="allowThrowsTagsForSubclasses" value="true"/>
|
||||
<property name="allowUndeclaredRTE" value="true"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocType">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocStyle">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
NAMING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Item 38 - Adhere to generally accepted naming conventions -->
|
||||
|
||||
<module name="PackageName">
|
||||
<!-- Validates identifiers for package names against the
|
||||
supplied expression. -->
|
||||
<!-- Here the default checkstyle rule restricts package name parts to
|
||||
seven characters, this is not in line with common practice at Google.
|
||||
-->
|
||||
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]{1,})*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="TypeNameCheck">
|
||||
<!-- Validates static, final fields against the
|
||||
expression "^[A-Z][a-zA-Z0-9]*$". -->
|
||||
<metadata name="altname" value="TypeName"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ConstantNameCheck">
|
||||
<!-- Validates non-private, static, final fields against the supplied
|
||||
public/package final fields "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$". -->
|
||||
<metadata name="altname" value="ConstantName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="false"/>
|
||||
<property name="format" value="^([A-Z][A-Z0-9]*(_[A-Z0-9]+)*|FLAG_.*)$"/>
|
||||
<message key="name.invalidPattern"
|
||||
value="Variable ''{0}'' should be in ALL_CAPS (if it is a constant) or be private (otherwise)."/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="StaticVariableNameCheck">
|
||||
<!-- Validates static, non-final fields against the supplied
|
||||
expression "^[a-z][a-zA-Z0-9]*_?$". -->
|
||||
<metadata name="altname" value="StaticVariableName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*_?$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MemberNameCheck">
|
||||
<!-- Validates non-static members against the supplied expression. -->
|
||||
<metadata name="altname" value="MemberName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MethodNameCheck">
|
||||
<!-- Validates identifiers for method names. -->
|
||||
<metadata name="altname" value="MethodName"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*(_[a-zA-Z0-9]+)*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ParameterName">
|
||||
<!-- Validates identifiers for method parameters against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalFinalVariableName">
|
||||
<!-- Validates identifiers for local final variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalVariableName">
|
||||
<!-- Validates identifiers for local variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
LENGTH and CODING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="LineLength">
|
||||
<!-- Checks if a line is too long. -->
|
||||
<property name="max" value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.max}" default="128"/>
|
||||
<property name="severity" value="error"/>
|
||||
|
||||
<!--
|
||||
The default ignore pattern exempts the following elements:
|
||||
- import statements
|
||||
- long URLs inside comments
|
||||
-->
|
||||
|
||||
<property name="ignorePattern"
|
||||
value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.ignorePattern}"
|
||||
default="^(package .*;\s*)|(import .*;\s*)|( *(\*|//).*https?://.*)$"/>
|
||||
</module>
|
||||
|
||||
<module name="LeftCurly">
|
||||
<!-- Checks for placement of the left curly brace ('{'). -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="RightCurly">
|
||||
<!-- Checks right curlies on CATCH, ELSE, and TRY blocks are on
|
||||
the same line. e.g., the following example is fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
} else
|
||||
</pre>
|
||||
-->
|
||||
<!-- This next example is not fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
}
|
||||
else
|
||||
</pre>
|
||||
-->
|
||||
<property name="option" value="same"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for braces around if and else blocks -->
|
||||
<module name="NeedBraces">
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="tokens" value="LITERAL_IF, LITERAL_ELSE, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO"/>
|
||||
</module>
|
||||
|
||||
<module name="UpperEll">
|
||||
<!-- Checks that long constants are defined with an upper ell.-->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="FallThrough">
|
||||
<!-- Warn about falling through to the next case statement. Similar to
|
||||
javac -Xlint:fallthrough, but the check is suppressed if a single-line comment
|
||||
on the last non-blank line preceding the fallen-into case contains 'fall through' (or
|
||||
some other variants which we don't publicized to promote consistency).
|
||||
-->
|
||||
<property name="reliefPattern"
|
||||
value="fall through|Fall through|fallthru|Fallthru|falls through|Falls through|fallthrough|Fallthrough|No break|NO break|no break|continue on"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
MODIFIERS CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="ModifierOrder">
|
||||
<!-- Warn if modifier order is inconsistent with JLS3 8.1.1, 8.3.1, and
|
||||
8.4.3. The prescribed order is:
|
||||
public, protected, private, abstract, static, final, transient, volatile,
|
||||
synchronized, native, strictfp
|
||||
-->
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
WHITESPACE CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="WhitespaceAround">
|
||||
<!-- Checks that various tokens are surrounded by whitespace.
|
||||
This includes most binary operators and keywords followed
|
||||
by regular or curly braces.
|
||||
-->
|
||||
<property name="tokens" value="ASSIGN, BAND, BAND_ASSIGN, BOR,
|
||||
BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN,
|
||||
EQUAL, GE, GT, LAND, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE,
|
||||
LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
|
||||
LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS,
|
||||
MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION,
|
||||
SL, SL_ASSIGN, SR_ASSIGN, STAR, STAR_ASSIGN"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="WhitespaceAfter">
|
||||
<!-- Checks that commas, semicolons and typecasts are followed by
|
||||
whitespace.
|
||||
-->
|
||||
<property name="tokens" value="COMMA, SEMI, TYPECAST"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceAfter">
|
||||
<!-- Checks that there is no whitespace after various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS,
|
||||
UNARY_PLUS"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceBefore">
|
||||
<!-- Checks that there is no whitespace before various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="SEMI, DOT, POST_DEC, POST_INC"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ParenPad">
|
||||
<!-- Checks that there is no whitespace before close parens or after
|
||||
open parens.
|
||||
-->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
</module>
|
||||
</module>
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
interface CSVConstants {
|
||||
|
||||
char BACKSPACE = '\b';
|
||||
char CR = '\r';
|
||||
char FF = '\f';
|
||||
char LF = '\n';
|
||||
char TAB = '\t';
|
||||
char COMMA = ',';
|
||||
char QUOTE = '\"';
|
||||
char ESCAPE_CHARACTER = '\"';
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class CSVGenerator implements CSVConstants, Closeable, Flushable {
|
||||
|
||||
private static final String LF = System.getProperty("line.separator");
|
||||
|
||||
private Writer writer;
|
||||
|
||||
private int col;
|
||||
|
||||
private int row;
|
||||
|
||||
private List<String> keys;
|
||||
|
||||
public CSVGenerator(Writer writer) {
|
||||
this.writer = writer;
|
||||
this.col = 0;
|
||||
this.keys = new ArrayList<>();
|
||||
}
|
||||
|
||||
public CSVGenerator keys(List<String> keys) {
|
||||
this.keys = keys;
|
||||
return this;
|
||||
}
|
||||
|
||||
public CSVGenerator writeKeys() throws IOException {
|
||||
for (String k : keys) {
|
||||
write(k);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public CSVGenerator write(String value) throws IOException {
|
||||
if (col > 0) {
|
||||
writer.write(COMMA);
|
||||
}
|
||||
if (value != null) {
|
||||
writer.write(escape(value));
|
||||
}
|
||||
col++;
|
||||
if (col > keys.size()) {
|
||||
writer.write(LF);
|
||||
row++;
|
||||
col = 0;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public int getColumn() {
|
||||
return col;
|
||||
}
|
||||
|
||||
public int getRow() {
|
||||
return row;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
writer.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
writer.flush();
|
||||
}
|
||||
|
||||
private String escape(String value) {
|
||||
if (value.indexOf(QUOTE) < 0
|
||||
&& value.indexOf(ESCAPE_CHARACTER) < 0
|
||||
&& value.indexOf(COMMA) < 0
|
||||
&& value.indexOf(TAB) < 0
|
||||
&& !value.contains(LF)) {
|
||||
return value;
|
||||
}
|
||||
int length = value.length();
|
||||
StringBuilder sb = new StringBuilder(length + 2);
|
||||
sb.append(QUOTE);
|
||||
for (int i = 0; i < length; i++) {
|
||||
char ch = value.charAt(i);
|
||||
if (ch == QUOTE) {
|
||||
sb.append(QUOTE);
|
||||
}
|
||||
sb.append(ch);
|
||||
}
|
||||
sb.append(QUOTE);
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
}
|
257
content-csv/src/main/java/org/xbib/content/csv/CSVLexer.java
Normal file
257
content-csv/src/main/java/org/xbib/content/csv/CSVLexer.java
Normal file
|
@ -0,0 +1,257 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
class CSVLexer implements CSVConstants, Closeable {
|
||||
|
||||
private final CSVLookAheadReader reader;
|
||||
|
||||
private final char delimiter;
|
||||
|
||||
private final char escape;
|
||||
|
||||
private final char quoteChar;
|
||||
|
||||
private final char commentStart;
|
||||
|
||||
private final boolean ignoreSurroundingSpaces;
|
||||
|
||||
private final boolean ignoreEmptyLines;
|
||||
|
||||
CSVLexer(CSVLookAheadReader reader, char delimiter, char escape, char quoteChar, char commentStart,
|
||||
boolean ignoreSurroundingSpaces, boolean ignoreEmptyLines) {
|
||||
this.reader = reader;
|
||||
this.delimiter = delimiter;
|
||||
this.escape = escape;
|
||||
this.quoteChar = quoteChar;
|
||||
this.commentStart = commentStart;
|
||||
this.ignoreSurroundingSpaces = ignoreSurroundingSpaces;
|
||||
this.ignoreEmptyLines = ignoreEmptyLines;
|
||||
}
|
||||
|
||||
CSVToken nextToken(final CSVToken csvToken) throws IOException {
|
||||
int lastChar = reader.getLastChar();
|
||||
int c = reader.read();
|
||||
boolean eol = readEndOfLine(c);
|
||||
if (ignoreEmptyLines) {
|
||||
while (eol && isStartOfLine(lastChar)) {
|
||||
lastChar = c;
|
||||
c = reader.read();
|
||||
eol = readEndOfLine(c);
|
||||
if (isEndOfFile(c)) {
|
||||
csvToken.type = CSVToken.Type.EOF;
|
||||
return csvToken;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isEndOfFile(lastChar) || (!isDelimiter(lastChar) && isEndOfFile(c))) {
|
||||
csvToken.type = CSVToken.Type.EOF;
|
||||
return csvToken;
|
||||
}
|
||||
if (isStartOfLine(lastChar) && isCommentStart(c)) {
|
||||
final String line = reader.readLine();
|
||||
if (line == null) {
|
||||
csvToken.type = CSVToken.Type.EOF;
|
||||
return csvToken;
|
||||
}
|
||||
final String comment = line.trim();
|
||||
csvToken.content.append(comment);
|
||||
csvToken.type = CSVToken.Type.COMMENT;
|
||||
return csvToken;
|
||||
}
|
||||
while (csvToken.type == CSVToken.Type.INVALID) {
|
||||
if (ignoreSurroundingSpaces) {
|
||||
while (isWhitespace(c) && !eol) {
|
||||
c = reader.read();
|
||||
eol = readEndOfLine(c);
|
||||
}
|
||||
}
|
||||
if (isDelimiter(c)) {
|
||||
csvToken.type = CSVToken.Type.TOKEN;
|
||||
} else if (eol) {
|
||||
csvToken.type = CSVToken.Type.EORECORD;
|
||||
} else if (isQuoteChar(c)) {
|
||||
parseEncapsulatedToken(csvToken);
|
||||
} else if (isEndOfFile(c)) {
|
||||
csvToken.type = CSVToken.Type.EOF;
|
||||
csvToken.isReady = true;
|
||||
} else {
|
||||
parseSimpleToken(csvToken, c);
|
||||
}
|
||||
}
|
||||
return csvToken;
|
||||
}
|
||||
|
||||
private CSVToken parseSimpleToken(final CSVToken csvToken, int c) throws IOException {
|
||||
int ch = c;
|
||||
while (true) {
|
||||
if (readEndOfLine(ch)) {
|
||||
csvToken.type = CSVToken.Type.EORECORD;
|
||||
break;
|
||||
} else if (isEndOfFile(ch)) {
|
||||
csvToken.type = CSVToken.Type.EOF;
|
||||
csvToken.isReady = true;
|
||||
break;
|
||||
} else if (isDelimiter(ch)) {
|
||||
csvToken.type = CSVToken.Type.TOKEN;
|
||||
break;
|
||||
} else if (isEscape(ch)) {
|
||||
final int unescaped = readEscape();
|
||||
if (unescaped == -1) {
|
||||
csvToken.content.append((char) ch).append((char) reader.getLastChar());
|
||||
} else {
|
||||
csvToken.content.append((char) unescaped);
|
||||
}
|
||||
ch = reader.read();
|
||||
} else {
|
||||
csvToken.content.append((char) ch);
|
||||
ch = reader.read();
|
||||
}
|
||||
}
|
||||
if (ignoreSurroundingSpaces) {
|
||||
trimTrailingSpaces(csvToken.content);
|
||||
}
|
||||
return csvToken;
|
||||
}
|
||||
|
||||
private CSVToken parseEncapsulatedToken(final CSVToken csvToken) throws IOException {
|
||||
final long startLineNumber = getCurrentLineNumber();
|
||||
int c;
|
||||
while (true) {
|
||||
c = reader.read();
|
||||
if (isEscape(c)) {
|
||||
final int unescaped = readEscape();
|
||||
if (unescaped == -1) {
|
||||
csvToken.content.append((char) c).append((char) reader.getLastChar());
|
||||
} else {
|
||||
csvToken.content.append((char) unescaped);
|
||||
}
|
||||
} else if (isQuoteChar(c)) {
|
||||
if (isQuoteChar(reader.lookAhead())) {
|
||||
c = reader.read();
|
||||
csvToken.content.append((char) c);
|
||||
} else {
|
||||
while (true) {
|
||||
c = reader.read();
|
||||
if (isDelimiter(c)) {
|
||||
csvToken.type = CSVToken.Type.TOKEN;
|
||||
return csvToken;
|
||||
} else if (isEndOfFile(c)) {
|
||||
csvToken.type = CSVToken.Type.EOF;
|
||||
csvToken.isReady = true;
|
||||
return csvToken;
|
||||
} else if (readEndOfLine(c)) {
|
||||
csvToken.type = CSVToken.Type.EORECORD;
|
||||
return csvToken;
|
||||
} else if (!isWhitespace(c)) {
|
||||
throw new IOException("(line "
|
||||
+ getCurrentLineNumber()
|
||||
+ ") invalid char between encapsulated token and delimiter");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (isEndOfFile(c)) {
|
||||
throw new IOException("(startline "
|
||||
+ startLineNumber
|
||||
+ ") EOF reached before encapsulated token finished");
|
||||
} else {
|
||||
csvToken.content.append((char) c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
long getCurrentLineNumber() {
|
||||
return reader.getCurrentLineNumber();
|
||||
}
|
||||
|
||||
private int readEscape() throws IOException {
|
||||
final int ch = reader.read();
|
||||
switch (ch) {
|
||||
case 'r':
|
||||
return CR;
|
||||
case 'n':
|
||||
return LF;
|
||||
case 't':
|
||||
return TAB;
|
||||
case 'b':
|
||||
return BACKSPACE;
|
||||
case 'f':
|
||||
return FF;
|
||||
case CR:
|
||||
case LF:
|
||||
case FF:
|
||||
case TAB:
|
||||
case BACKSPACE:
|
||||
return ch;
|
||||
case -1:
|
||||
throw new IOException("EOF whilst processing escape sequence");
|
||||
default:
|
||||
if (isMetaChar(ch)) {
|
||||
return ch;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
private void trimTrailingSpaces(final StringBuilder buffer) {
|
||||
int length = buffer.length();
|
||||
while (length > 0 && Character.isWhitespace(buffer.charAt(length - 1))) {
|
||||
length = length - 1;
|
||||
}
|
||||
if (length != buffer.length()) {
|
||||
buffer.setLength(length);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean readEndOfLine(final int ch) throws IOException {
|
||||
int c = ch;
|
||||
if (c == CR && reader.lookAhead() == LF) {
|
||||
c = reader.read();
|
||||
}
|
||||
return c == LF || c == CR;
|
||||
}
|
||||
|
||||
private boolean isWhitespace(final int ch) {
|
||||
return !isDelimiter(ch) && Character.isWhitespace((char) ch);
|
||||
}
|
||||
|
||||
private boolean isStartOfLine(final int ch) {
|
||||
return ch == LF || ch == CR || ch == -2;
|
||||
}
|
||||
|
||||
private boolean isEndOfFile(final int ch) {
|
||||
return ch == -1;
|
||||
}
|
||||
|
||||
private boolean isDelimiter(final int ch) {
|
||||
return ch == delimiter;
|
||||
}
|
||||
|
||||
private boolean isEscape(final int ch) {
|
||||
return ch == escape;
|
||||
}
|
||||
|
||||
private boolean isQuoteChar(final int ch) {
|
||||
return ch == quoteChar;
|
||||
}
|
||||
|
||||
private boolean isCommentStart(final int ch) {
|
||||
return ch == commentStart;
|
||||
}
|
||||
|
||||
private boolean isMetaChar(final int ch) {
|
||||
return ch == delimiter ||
|
||||
ch == escape ||
|
||||
ch == quoteChar ||
|
||||
ch == commentStart;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
reader.close();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
class CSVLookAheadReader extends BufferedReader implements CSVConstants {
|
||||
|
||||
private int lastChar = -2;
|
||||
|
||||
private long eolCounter = 0;
|
||||
|
||||
CSVLookAheadReader(Reader reader) {
|
||||
super(reader);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read() throws IOException {
|
||||
final int current = super.read();
|
||||
if (current == CR || (current == LF && lastChar != CR)) {
|
||||
eolCounter++;
|
||||
}
|
||||
lastChar = current;
|
||||
return lastChar;
|
||||
}
|
||||
|
||||
int getLastChar() {
|
||||
return lastChar;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int read(final char[] buf, final int offset, final int length) throws IOException {
|
||||
if (length == 0) {
|
||||
return 0;
|
||||
}
|
||||
final int len = super.read(buf, offset, length);
|
||||
if (len > 0) {
|
||||
for (int i = offset; i < offset + len; i++) {
|
||||
final char ch = buf[i];
|
||||
if (ch == LF) {
|
||||
if (CR != (i > 0 ? buf[i - 1] : lastChar)) {
|
||||
eolCounter++;
|
||||
}
|
||||
} else if (ch == CR) {
|
||||
eolCounter++;
|
||||
}
|
||||
}
|
||||
lastChar = buf[offset + len - 1];
|
||||
} else if (len == -1) {
|
||||
lastChar = -1;
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String readLine() throws IOException {
|
||||
final String line = super.readLine();
|
||||
if (line != null) {
|
||||
lastChar = LF;
|
||||
eolCounter++;
|
||||
} else {
|
||||
lastChar = -1;
|
||||
}
|
||||
return line;
|
||||
}
|
||||
|
||||
int lookAhead() throws IOException {
|
||||
super.mark(1);
|
||||
final int c = super.read();
|
||||
super.reset();
|
||||
|
||||
return c;
|
||||
}
|
||||
|
||||
long getCurrentLineNumber() {
|
||||
if (lastChar == CR || lastChar == LF || lastChar == -2 || lastChar == -1) {
|
||||
return eolCounter;
|
||||
}
|
||||
return eolCounter + 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
lastChar = -1;
|
||||
super.close();
|
||||
}
|
||||
|
||||
}
|
118
content-csv/src/main/java/org/xbib/content/csv/CSVParser.java
Normal file
118
content-csv/src/main/java/org/xbib/content/csv/CSVParser.java
Normal file
|
@ -0,0 +1,118 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class CSVParser {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(CSVParser.class.getName());
|
||||
|
||||
private final CSVLexer lexer;
|
||||
|
||||
private final List<String> row;
|
||||
|
||||
private final CSVToken reusableCSVToken;
|
||||
|
||||
public CSVParser(Reader reader) throws IOException {
|
||||
lexer = new CSVLexer(new CSVLookAheadReader(reader), ',', '\\', '"', '#', true, true);
|
||||
row = new LinkedList<>();
|
||||
reusableCSVToken = new CSVToken();
|
||||
}
|
||||
|
||||
public CSVParser(Reader reader, char sep) throws IOException {
|
||||
lexer = new CSVLexer(new CSVLookAheadReader(reader), sep, '\\', '"', '#', true, true);
|
||||
row = new LinkedList<>();
|
||||
reusableCSVToken = new CSVToken();
|
||||
}
|
||||
|
||||
public void close() throws IOException {
|
||||
lexer.close();
|
||||
}
|
||||
|
||||
public long getCurrentLineNumber() {
|
||||
return lexer.getCurrentLineNumber();
|
||||
}
|
||||
|
||||
public Iterator<List<String>> iterator() {
|
||||
return new Iterator<List<String>>() {
|
||||
private List<String> current;
|
||||
|
||||
private List<String> getNextRow() throws IOException {
|
||||
return CSVParser.this.nextRow();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
if (current == null) {
|
||||
try {
|
||||
current = getNextRow();
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.FINE, e.getMessage(), e);
|
||||
throw new NoSuchElementException(e.getMessage());
|
||||
}
|
||||
}
|
||||
return current != null && !current.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> next() {
|
||||
if (current == null || current.isEmpty()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
List<String> list = current;
|
||||
current = null;
|
||||
return list;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected List<String> nextRow() throws IOException {
|
||||
row.clear();
|
||||
StringBuilder sb = null;
|
||||
do {
|
||||
reusableCSVToken.reset();
|
||||
lexer.nextToken(reusableCSVToken);
|
||||
String s = reusableCSVToken.content.toString();
|
||||
switch (reusableCSVToken.type) {
|
||||
case TOKEN:
|
||||
case EORECORD:
|
||||
row.add(s);
|
||||
break;
|
||||
case EOF:
|
||||
if (reusableCSVToken.isReady) {
|
||||
row.add(s);
|
||||
}
|
||||
break;
|
||||
case INVALID:
|
||||
throw new IOException("(line " + getCurrentLineNumber() + ") invalid parse sequence");
|
||||
case COMMENT:
|
||||
if (sb == null) {
|
||||
sb = new StringBuilder();
|
||||
} else {
|
||||
sb.append(CSVConstants.LF);
|
||||
}
|
||||
sb.append(reusableCSVToken.content);
|
||||
reusableCSVToken.type = CSVToken.Type.TOKEN;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalStateException("unexpected token type: " + reusableCSVToken.type);
|
||||
}
|
||||
} while (reusableCSVToken.type == CSVToken.Type.TOKEN);
|
||||
return row;
|
||||
}
|
||||
|
||||
}
|
30
content-csv/src/main/java/org/xbib/content/csv/CSVToken.java
Normal file
30
content-csv/src/main/java/org/xbib/content/csv/CSVToken.java
Normal file
|
@ -0,0 +1,30 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
final class CSVToken {
|
||||
|
||||
private static final int INITIAL_TOKEN_LENGTH = 50;
|
||||
|
||||
enum Type {
|
||||
INVALID,
|
||||
TOKEN,
|
||||
EOF,
|
||||
EORECORD,
|
||||
COMMENT
|
||||
}
|
||||
|
||||
CSVToken.Type type = Type.INVALID;
|
||||
|
||||
StringBuilder content = new StringBuilder(INITIAL_TOKEN_LENGTH);
|
||||
|
||||
boolean isReady;
|
||||
|
||||
void reset() {
|
||||
content.setLength(0);
|
||||
type = Type.INVALID;
|
||||
isReady = false;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for CSV content.
|
||||
*/
|
||||
package org.xbib.content.csv;
|
|
@ -0,0 +1,26 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class CSVGeneratorTest {
|
||||
|
||||
@Test
|
||||
public void test() throws IOException {
|
||||
StringWriter writer = new StringWriter();
|
||||
CSVGenerator gen = new CSVGenerator(writer);
|
||||
gen.keys(Arrays.asList("a", "b", "c"));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
gen.write("val" + i);
|
||||
gen.write("\"Hello, World\"");
|
||||
gen.write("hey look a line seperator \n");
|
||||
}
|
||||
gen.close();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class CSVParserTest {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(CSVParserTest.class.getName());
|
||||
|
||||
@Test
|
||||
public void testCommaSeparated() throws IOException {
|
||||
InputStream in = getClass().getResourceAsStream("test.csv");
|
||||
int count = 0;
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) {
|
||||
CSVParser csvParser = new CSVParser(reader);
|
||||
Iterator<List<String>> it = csvParser.iterator();
|
||||
while (it.hasNext()) {
|
||||
List<String> row = it.next();
|
||||
//logger.log(Level.INFO, MessageFormat.format("count={0} row={1}", count, row));
|
||||
count++;
|
||||
}
|
||||
}
|
||||
assertEquals(2, count);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLargeFile() throws IOException {
|
||||
InputStream in = getClass().getResourceAsStream("titleFile.csv");
|
||||
int count = 0;
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) {
|
||||
CSVParser csvParser = new CSVParser(reader);
|
||||
Iterator<List<String>> it = csvParser.iterator();
|
||||
while (it.hasNext()) {
|
||||
List<String> row = it.next();
|
||||
//logger.log(Level.INFO, MessageFormat.format("count={0} row={1}", count, row));
|
||||
count++;
|
||||
}
|
||||
}
|
||||
assertEquals(44447, count);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
package org.xbib.content.csv;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TSVParserTest {
|
||||
|
||||
@Test
|
||||
public void testTabSeparated() throws IOException {
|
||||
InputStream in = getClass().getResourceAsStream("2076831-X-web.txt");
|
||||
InputStreamReader r = new InputStreamReader(in, "UTF-8");
|
||||
BufferedReader reader = new BufferedReader(r);
|
||||
// skip 3 lines
|
||||
reader.readLine();
|
||||
reader.readLine();
|
||||
reader.readLine();
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
String[] s = line.split("\\t");
|
||||
//logger.info("len={} line={}", s.length, Arrays.asList(s));
|
||||
int i = 0;
|
||||
String sigel = i < s.length ? s[i++] : "";
|
||||
String isil = i < s.length ? s[i++] : "";
|
||||
String name = i < s.length ? s[i++] : ""; // unused
|
||||
String code1 = i < s.length ? s[i++] : "";
|
||||
String code2 = i < s.length ? s[i++] : "";
|
||||
String code3 = i < s.length ? s[i++] : "";
|
||||
String comment = i < s.length ? s[i++] : "";
|
||||
String firstDate = i < s.length ? s[i++] : "";
|
||||
String firstVolume = i < s.length ? s[i++] : "";
|
||||
String firstIssue = i < s.length ? s[i++] : "";
|
||||
String lastDate = i < s.length ? s[i++] : "";
|
||||
String lastVolume = i < s.length ? s[i++] : "";
|
||||
String lastIssue = i < s.length ? s[i++] : "";
|
||||
String movingWall = i < s.length ? s[i] : "";
|
||||
//logger.info("lastDate={}", lastDate);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for testing CSV content.
|
||||
*/
|
||||
package org.xbib.content.csv;
|
13
content-csv/src/test/resources/log4j2.xml
Normal file
13
content-csv/src/test/resources/log4j2.xml
Normal file
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration status="OFF">
|
||||
<appenders>
|
||||
<Console name="Console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{ABSOLUTE}][%-5p][%-25c][%t] %m%n"/>
|
||||
</Console>
|
||||
</appenders>
|
||||
<Loggers>
|
||||
<Root level="info">
|
||||
<AppenderRef ref="Console" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</configuration>
|
|
@ -0,0 +1,615 @@
|
|||
ZDB-Id: 2076831-X
|
||||
Treffer: 612
|
||||
|
||||
B 785 DE-B785 Berlin Abgeordnetenhaus k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Po 62 DE-Po62 Potsdam MPI Gravitationsphys. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 33 DE-B33 Potsdam Astrophysik k n 1998 117 0 0 0
|
||||
B 33 DE-B33 Potsdam Astrophysik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bv 2 DE-Bv2 Bremerhaven A.-Wegener-Inst. k p 2004 123 0 2012 131 0
|
||||
Bv 2 DE-Bv2 Bremerhaven A.-Wegener-Inst. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
D 161 DE-D161 Dresden Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
D 275 DE-D275 Dresden EHS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Gla 1 DE-Gla1 Glauchau Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hed 2 DE-Hed2 Heidenheim DHBW Bibliothek k p 2003 122 0 0 0
|
||||
Hed 2 DE-Hed2 Heidenheim DHBW Bibliothek k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hed 2 DE-Hed2 Heidenheim DHBW Bibliothek k p 1998 117 0 2014 133 0
|
||||
Bn 3 DE-Bn3 Bautzen Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Lör 2 DE-Loer2 Lörrach Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Lör 2 DE-Loer2 Lörrach Berufsakademie k p 2003 122 0 0 0
|
||||
B 43 DE-B43 Berlin BAM k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mh 35 DE-Mh35 Mannheim Berufsakademie k p 2003 122 0 0 0
|
||||
Mh 35 DE-Mh35 Mannheim Berufsakademie k p 1998 117 0 2014 133 0
|
||||
Mh 35 DE-Mh35 Mannheim Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
941 DE-941 Mosbach Duale Hochschule k p 2003 122 0 0 0
|
||||
941 DE-941 Mosbach Duale Hochschule k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Rs 1 DE-Rs1 Riesa Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Rav 1 DE-Rav1 Ravensburg Berufsakademie k p 2003 122 0 0 0
|
||||
Rav 1 DE-Rav1 Ravensburg Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Rav 1 DE-Rav1 Ravensburg Berufsakademie k p 1998 117 0 2014 133 0
|
||||
Vil 2 DE-Vil2 Villingen-S. Berufsakad. k p 2003 122 0 0 0
|
||||
Vil 2 DE-Vil2 Villingen-S. Berufsakad. k p 1998 117 0 2014 133 0
|
||||
B 4 DE-B4 Berlin AdW <strong>B</strong> k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
H 140 DE-H140 Hamburg vTI, FIZ Fischerei k p 1857 1 1 0 0
|
||||
H 140 DE-H140 Hamburg vTI, FIZ Fischerei k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1512 DE-B1512 Bonn BfArM k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ki 29 DE-Ki29 Kiel MRI, BID k p 1857 1 1 0 0
|
||||
Ka 51 DE-Ka51 Karlsruhe MRI, BID k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ku 1 DE-Ku1 Kulmbach MRI, BID k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ka 51 DE-Ka51 Karlsruhe MRI, BID k p 1857 1 1 0 0
|
||||
Det 2 DE-Det2 Detmold MRI, BID k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ku 1 DE-Ku1 Kulmbach MRI, BID k p 1857 1 1 0 0
|
||||
Ki 29 DE-Ki29 Kiel MRI, BID k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Det 2 DE-Det2 Detmold MRI, BID k p 1857 1 1 0 0
|
||||
H 105 DE-H105 Hamburg vTI, FIZ Wald k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
H 105 DE-H105 Hamburg vTI, FIZ Wald k p 1857 1 1 0 0
|
||||
208 DE-208 Karlsruhe BGH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 12 DE-B12 Berlin BI Risikobewertung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 12 DE-B12 Berlin BI Risikobewertung k p 1857 1 1 0 0
|
||||
B 1509 DE-B1509 Bonn BI f. Berufsbildung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Lan 1 DE-Lan1 Landau UB k p 1997 0 0 0 0
|
||||
Lan 1 DE-Lan1 Landau UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Kob 7 DE-Kob7 Koblenz UB k p 1997 0 0 0 0
|
||||
Kob 7 DE-Kob7 Koblenz UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
715 DE-715 Oldenburg IBIT k n 1997 116 0 0 0
|
||||
715 DE-715 Oldenburg IBIT k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
31 DE-31 Karlsruhe LB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
12 DE-12 München BSB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
12 DE-12 München BSB k 1998 117 0 0 0
|
||||
12 DE-12 München BSB k 2002 121 0 2003 122 0
|
||||
82 DE-82 Aachen BTH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bo 408 DE-Bo408 Bonn Stiftung caesar k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
D 267 DE-D267 Dresden MPI Mol.Zellbiol. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
D 210 DE-D210 Dresden MPI Chem. Physik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
982 DE-982 Wadern IBF für Informatik WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 491 DE-M491 München ArchäologInst k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ha 2 DE-Ha2 Halle/S Dt. Akad. Naturfor. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1503 DE-B1503 Bonn Inst. Entwicklpol. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Lg 3 DE-Lg3 Ludwigsburg Dt.-Franz. Inst. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Y 7 DE-Y7 Paris DFK k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
90/8 DE-90-8 Fachbibliothek der Dualen Hochschule Baden-Württemberg Karlsruhe (FBD k p 2003 122 0 0 0
|
||||
Po 6 DE-Po6 Nuthetal/Bergh.-Rehbr.DIfE WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1547 DE-B1547 Berlin InstMenschenrechte k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 554 DE-B554 Berlin DIW WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mar 1 DE-Mar1 Marbach Dt. Literaturarchiv k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
210 DE-210 München Deutsches Museum WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 23 DE-B23 Offenbach Meteorolog. Bibl. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
201 DE-201 München PatAmt k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1531 DE-B1531 Berlin Dt. RheumaforschZ k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1126 DE-1126 Idstein HS Fresenius k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1126 DE-1126 Idstein HS Fresenius k p n 1998 117 0 0 0
|
||||
1052 DE-1052 Nürnberg Evang. HS k 1998 117 0 0 0
|
||||
521 DE-521 Frankfurt/O UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Dm 1 DE-Dm1 Dortmund MPI f. mol. Physiol. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
H 256 DE-H256 Hamburg Führungsakad.Bundesw. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
253 DE-253 Braunschweig vTI,FIZ LändlRäum k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
253 DE-253 Braunschweig vTI,FIZ LändlRäum k p 1857 1 1 0 0
|
||||
90/4 DE-90-4 Karlsruhe KIT-B./FB TuW k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
90/4 DE-90-4 Karlsruhe KIT-B./FB TuW k p 2003 122 0 0 0
|
||||
90/4 DE-90-4 Karlsruhe KIT-B./FB TuW k p 1998 117 0 2014 133 0
|
||||
R 48 DE-R48 Dummerstorf Inst Nutztierbiol. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bo 133 DE-Bo133 Bonn F.-Ebert-Stiftung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
944 DE-944 Aalen HS k p 1998 117 0 2014 133 0
|
||||
944 DE-944 Aalen HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
944 DE-944 Aalen HS k p 2003 122 0 0 0
|
||||
A 96 DE-A96 Aachen FHB k n 0 0 2014 0
|
||||
A 96 DE-A96 Aachen FHB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1043 DE-1043 Aschaffenburg HS k 1998 117 0 0 0
|
||||
1102 DE-1102 Ansbach HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1047 DE-1047 Weiden FH Amberg k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1046 DE-1046 Amberg/Oberpfalz HS k 1998 117 0 0 0
|
||||
1047 DE-1047 Weiden FH Amberg k 1998 117 0 0 0
|
||||
1046 DE-1046 Amberg/Oberpfalz HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
949 DE-949 Biberach HBC k p 2003 122 0 0 0
|
||||
949 DE-949 Biberach HBC k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
949 DE-949 Biberach HBC k p 1998 117 0 2014 133 0
|
||||
858 DE-858 Coburg FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
858 DE-858 Coburg FH k 1998 117 0 0 0
|
||||
Dm 13 DE-Dm13 Dortmund FH k n 0 0 2014 0
|
||||
Dm 13 DE-Dm13 Dortmund FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1010 DE-1010 Gelsenkirchen FH k n 0 0 2014 0
|
||||
1010 DE-1010 Gelsenkirchen FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1051 DE-1051 Hof HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bi 10 DE-Bi10 Bielefeld FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bi 10 DE-Bi10 Bielefeld FH k n 0 0 2014 0
|
||||
573 DE-573 Ingolstadt HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
573 DE-573 Ingolstadt HS k 1998 117 0 0 0
|
||||
832 DE-832 Köln FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
859 DE-859 Kempten FH k 1998 117 0 0 0
|
||||
859 DE-859 Kempten FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
860 DE-860 Landshut FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
860 DE-860 Landshut FH k 1998 117 0 0 0
|
||||
836 DE-836 Münster FH k n 0 0 2014 0
|
||||
836 DE-836 Münster FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1049 DE-1049 Neu-Ulm FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ofb 1 DE-Ofb1 Offenburg HS k p 2003 122 0 0 0
|
||||
Ofb 1 DE-Ofb1 Offenburg HS k p 1998 117 0 2014 133 0
|
||||
Ofb 1 DE-Ofb1 Offenburg HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
898 DE-898 Regensburg HSBR k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
898 DE-898 Regensburg HSBR k 1998 117 0 0 0
|
||||
522 DE-522 Brandenburg FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
861 DE-861 Rosenheim HSB k 1998 117 0 0 0
|
||||
1044 DE-1044 St. Augustin FH k n 0 0 2014 0
|
||||
1044 DE-1044 St. Augustin FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
862 DE-862 Schweinfurt FH Würzburg k 1998 117 0 0 0
|
||||
863 DE-863 Würzburg FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
863 DE-863 Würzburg FH k 1998 117 0 0 0
|
||||
862 DE-862 Schweinfurt FH Würzburg k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Dü 62 DE-Due62 Düsseldorf FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Dü 62 DE-Due62 Düsseldorf FH k n 0 0 2014 0
|
||||
1050 DE-1050 Deggendorf HochschulB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
546 DE-546 Erfurt FH k p n 1998 117 1 0 0
|
||||
546 DE-546 Erfurt FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
946 DE-946 Frankfurt/M FH k p n 1998 117 0 0 0
|
||||
946 DE-946 Frankfurt/M FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
66 DE-66 Fulda HLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
66 DE-66 Fulda HLB k p n 1998 117 0 0 0
|
||||
Fn 1/VS DE-Fn1-VS Villingen-S. HS Furtwangen k p 2003 122 0 0 0
|
||||
Fn 1/VS DE-Fn1-VS Villingen-S. HS Furtwangen k p 1998 117 0 2014 133 0
|
||||
Fn 1 DE-Fn1 Furtwangen HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Fn 1/TUT DE-Fn1-TUT Tuttlingen HS Furtwangen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Fn 1 DE-Fn1 Furtwangen HS k p 2003 122 0 0 0
|
||||
Fn 1/VS DE-Fn1-VS Villingen-S. HS Furtwangen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Fn 1 DE-Fn1 Furtwangen HS k p 1998 117 0 2014 133 0
|
||||
Fn 1/TUT DE-Fn1-TUT Tuttlingen HS Furtwangen k p 2003 122 0 0 0
|
||||
Fn 1/TUT DE-Fn1-TUT Tuttlingen HS Furtwangen k p 1998 117 0 2014 133 0
|
||||
974 DE-974 Gießen FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
974 DE-974 Gießen FH k p n 1998 117 0 0 0
|
||||
960/1 DE-960-1 Hannover HS ZentralB Linden k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
960/1 DE-960-1 Hannover HS ZentralB Linden k n 1997 116 0 0 0
|
||||
840 DE-840 Heilbronn HS k p 2003 122 0 0 0
|
||||
840 DE-840 Heilbronn HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
840 DE-840 Heilbronn HS k p 1998 117 0 2014 133 0
|
||||
B 113 DE-B113 Berlin F.-Haber-Inst. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
J 59 DE-J59 Jena FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ki 95 DE-Ki95 Kiel FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ki 95 DE-Ki95 Kiel FH k p n 1998 117 1 0 0
|
||||
Kon 4 DE-Kon4 Konstanz HS Techn.Wirt.Gestalt k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Kon 4 DE-Kon4 Konstanz HS Techn.Wirt.Gestalt k p 2003 122 0 0 0
|
||||
Kon 4 DE-Kon4 Konstanz HS Techn.Wirt.Gestalt k p 1998 117 0 2014 133 0
|
||||
1147 DE-1147 Ludwigsburg HVF k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1147 DE-1147 Ludwigsburg HVF k p 2003 122 0 0 0
|
||||
743 DE-743 Lemgo HS OWL S(KIM) k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
743 DE-743 Lemgo HS OWL S(KIM) k n 0 0 2014 0
|
||||
M 347 DE-M347 München HMB k 1998 117 0 0 0
|
||||
M 347 DE-M347 München HMB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
953 DE-953 Mannheim Hochschule k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
953 DE-953 Mannheim Hochschule k p 1998 117 0 2014 133 0
|
||||
953 DE-953 Mannheim Hochschule k p 2003 122 0 0 0
|
||||
542 DE-542 Merseburg FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
542 DE-542 Merseburg FH k p n 1998 117 1 0 0
|
||||
92 DE-92 Nürnberg Ohm-HSB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
92 DE-92 Nürnberg Ohm-HSB k 1998 117 0 0 0
|
||||
519 DE-519 Neubrandenburg HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
755 DE-755 Emden FH Emden/Leer k n 1997 116 0 0 0
|
||||
755 DE-755 Emden FH Emden/Leer k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
959 DE-959 Osnabrück HS ZentralB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
959 DE-959 Osnabrück HS ZentralB k n 1997 116 0 0 0
|
||||
525 DE-525 Potsdam FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Rt 2 DE-Rt2 Reutlingen HSB k p 1998 117 0 2014 133 0
|
||||
Rt 2 DE-Rt2 Reutlingen HSB k p 2003 122 0 0 0
|
||||
Rt 2 DE-Rt2 Reutlingen HSB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Shm 2 DE-Shm2 Schmalkalden FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
943 DE-943 Ulm Hochschule k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
943 DE-943 Ulm Hochschule k p 1998 117 0 2014 133 0
|
||||
943 DE-943 Ulm Hochschule k p 2003 122 0 0 0
|
||||
1029 DE-1029 Triesdorf HSWT k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1028 DE-1028 Freising HSWT k 1998 117 0 0 0
|
||||
1028 DE-1028 Freising HSWT k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1029 DE-1029 Triesdorf HSWT k 1998 117 0 0 0
|
||||
916 DE-916 Wolfenbüttel FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
916 DE-916 Wolfenbüttel FH k n 1997 116 0 0 0
|
||||
1117 DE-1117 Worms FH k p 1997 0 0 0 0
|
||||
1117 DE-1117 Worms FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
897/1 DE-897-1 Elsfleth Jade Hochschule k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
839 DE-839 Wilhelmshaven Jade Hochschule k n 1997 116 0 0 0
|
||||
897 DE-897 Oldenburg Jade Hochschule k n 1997 116 0 0 0
|
||||
897/1 DE-897-1 Elsfleth Jade Hochschule k n 1997 116 0 0 0
|
||||
839 DE-839 Wilhelmshaven Jade Hochschule k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
897 DE-897 Oldenburg Jade Hochschule k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Stg 191 DE-Stg191 Stuttgart FHG:IAO k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
188/e DE-188-e Berlin FU E-Medien k p 1998 117 0 0 0
|
||||
188/e DE-188-e Berlin FU E-Medien k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
188 DE-188 Berlin UBFU k p 1998 117 0 0 0
|
||||
188 DE-188 Berlin UBFU k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
708 DE-708 Hagen FernUB k n 0 0 2014 0
|
||||
708 DE-708 Hagen FernUB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ki 131 DE-Ki131 Kiel FA Bundesw.f.Wasserschall k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Jül 1 DE-Juel1 Jülich ForschZ k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Jül 1 DE-Juel1 Jülich ForschZ k p 2003 122 0 0 0
|
||||
Ka 85 DE-Ka85 Karlsruhe KIT Campus Nord k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
D 120 DE-D120 Dresden HZDR k p 2004 123 0 2012 131 0
|
||||
D 120 DE-D120 Dresden HZDR k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bs 80 DE-Bs80 Braunschweig HZI k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bs 80 DE-Bs80 Braunschweig HZI k p 2004 123 0 2012 131 0
|
||||
Bs 78 DE-Bs78 Braunschweig Georg-Eckert-Inst k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
H 221 DE-H221 Hamburg GIGA FB Afrika k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
H 222 DE-H222 Hamburg GIGA FB Asien k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Gt 1 DE-Gt1 Geesthacht HZG k p 2004 123 0 2012 131 0
|
||||
Gt 1 DE-Gt1 Geesthacht HZG k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ds 200 DE-Ds200 Darmstadt GSI k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
35 DE-35 Hannover NLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
185 DE-185 Leipzig Inst. Länderkunde WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
18/285 DE-18-285 Hamburg FB Design k p n 1998 117 1 0 0
|
||||
18/287 DE-18-287 Hamburg FB Soziale Arbeit k p n 1998 117 1 0 0
|
||||
18/302 DE-18-302 Hamburg HAW FB Technik Wirtsch k p n 1998 117 1 0 0
|
||||
18/284 DE-18-284 Hamburg FB Life Sciences k p n 1998 117 1 0 0
|
||||
Hil 3/1 DE-Hil3-1 Göttingen HAWK Bibliothek N k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hil 3/1 DE-Hil3-1 Göttingen HAWK Bibliothek N k n 1997 116 0 0 0
|
||||
753 DE-753 Esslingen HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
972 DE-972 Göppingen HS Esslingen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
753 DE-753 Esslingen HS k p 1998 117 0 2014 133 0
|
||||
972 DE-972 Göppingen HS Esslingen k p 1998 117 0 2014 133 0
|
||||
753 DE-753 Esslingen HS k p 2003 122 0 0 0
|
||||
972 DE-972 Göppingen HS Esslingen k p 2003 122 0 0 0
|
||||
551 DE-551 Magdeburg FHB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
551 DE-551 Magdeburg FHB k p 1998 117 1 0 0
|
||||
747 DE-747 Weingarten HSB k p 2003 122 0 0 0
|
||||
747 DE-747 Weingarten HSB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
747 DE-747 Weingarten HSB k p 1998 117 0 2014 133 0
|
||||
1373 DE-1373 Hamburg HC,Uni Baukunst k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Fl 3 DE-Fl3 Flensburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
958 DE-958 Stuttgart HdM Nobelstraße k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
958 DE-958 Stuttgart HdM Nobelstraße k p 2003 122 0 0 0
|
||||
Y 2 DE-Y2 Rom Hertziana MPI k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Po 75 DE-Po75 Potsdam Filmuniversität k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1866 DE-1866 Bochum HSG k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1866 DE-1866 Bochum HSG k n 0 0 2014 0
|
||||
955 DE-955 Rottenburg HS Forstw k p 1998 117 0 2014 133 0
|
||||
955 DE-955 Rottenburg HS Forstw k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
955 DE-955 Rottenburg HS Forstw k p 2003 122 0 0 0
|
||||
1033 DE-1033 Stuttgart HS Technik k p 1998 117 0 2014 133 0
|
||||
1033 DE-1033 Stuttgart HS Technik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1033 DE-1033 Stuttgart HS Technik k p 2003 122 0 0 0
|
||||
950 DE-950 Nürtingen HS Wirts.Umwelt k p 2003 122 0 0 0
|
||||
1090 DE-1090 Geislingen HS Wirtsch.Nürt.-G. k p 2003 122 0 0 0
|
||||
950 DE-950 Nürtingen HS Wirts.Umwelt k p 1998 117 0 2014 133 0
|
||||
950 DE-950 Nürtingen HS Wirts.Umwelt k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1090 DE-1090 Geislingen HS Wirtsch.Nürt.-G. k p 1998 117 0 2014 133 0
|
||||
1090 DE-1090 Geislingen HS Wirtsch.Nürt.-G. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
43 DE-43 Wiesbaden LB k p n 1998 117 0 0 0
|
||||
43 DE-43 Wiesbaden LB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1505 DE-B1505 Berlin Helmholtz-Zentrum k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1505 DE-B1505 Berlin Helmholtz-Zentrum k p 2004 123 0 2012 131 0
|
||||
L 152 DE-L152 Leipzig HS Musik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 29 DE-M29 München, HS f. Musik u.Theater k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1393 DE-1393 Mülheim/Ruhr HS Ruhr West k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
991 DE-991 Sigmaringen HS Albstadt-Sigmar k p 1998 117 0 2014 133 0
|
||||
991 DE-991 Sigmaringen HS Albstadt-Sigmar k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
991 DE-991 Sigmaringen HS Albstadt-Sigmar k p 2003 122 0 0 0
|
||||
Kt 1 DE-Kt1 Köthen FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
751 DE-751 Karlsruhe HSB k p 1998 117 0 2014 133 0
|
||||
751 DE-751 Karlsruhe HSB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
751 DE-751 Karlsruhe HSB k p 2003 122 0 0 0
|
||||
747 DE-747 Weingarten HSB k p 1998 117 0 2014 133 0
|
||||
747 DE-747 Weingarten HSB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
747 DE-747 Weingarten HSB k p 2003 122 0 0 0
|
||||
B 1570 DE-B1570 Berlin HertieSchool k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
527 DE-527 Wernigerode HS Harz k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Wis 1 DE-Wis1 Wismar HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
829 DE-829 Mönchengladbach HS Niederrhein k n 0 0 2014 0
|
||||
829 DE-829 Mönchengladbach HS Niederrhein k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
523 DE-523 Berlin HTW k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
520 DE-520 Dresden HS TuW k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
L 189 DE-L189 Leipzig FH HTWK k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mit 1 DE-Mit1 Mittweida HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Sa 16 DE-Sa16 Saarbrücken HTW/Goebenstr k p 1997 0 0 0 0
|
||||
Sa 16 DE-Sa16 Saarbrücken HTW/Goebenstr k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
206 H DE-206H Hamburg ZBW WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Zi 4 DE-Zi4 Zittau/Görlitz HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
N 38 DE-N38 Nürnberg Fachb.Arbeitsmarktfor k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mh 39 DE-Mh39 Mannheim IDS WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Dm 21 DE-Dm21 Dortmund IfADo WGL k n 1998 117 0 0 0
|
||||
Dm 21 DE-Dm21 Dortmund IfADo WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ki 109 DE-Ki109 Kiel GEOMAR Bibl.West k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ki 130 DE-Ki130 Kiel GEOMAR Bibl.Ost k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 352 DE-M352 München Inst.f.Zeitgeschichte k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 259 DE-B259 Berlin Gewässerökologie WGL k n 1998 117 0 0 0
|
||||
B 259 DE-B259 Berlin Gewässerökologie WGL k n 1998 117 0 0 0
|
||||
B 259 DE-B259 Berlin Gewässerökologie WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
291/415 DE-291-415 Saarbrücken INM WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Wa 1 DE-Wa1 Rostock Inst.f.Ostseefor. WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Stg 183 DE-Stg183 Stuttgart Fraunhofer IPA k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ha 93 DE-Ha93 Halle/S Pflanzenbiochemie k n 1998 117 0 0 0
|
||||
Ha 93 DE-Ha93 Halle/S Pflanzenbiochemie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ha 93 DE-Ha93 Halle/S Pflanzenbiochemie k n 1998 117 0 0 0
|
||||
Gat 1 DE-Gat1 Gatersleben Pflanzengenetik k n 1998 117 0 0 0
|
||||
Gat 1 DE-Gat1 Gatersleben Pflanzengenetik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 382 DE-M382 München MPIs Im.GüterR/SteuerR k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ki 128 DE-Ki128 Kiel IPN k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1826 DE-1826 Berlin Bibliothek der IPU k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 567 DE-B567 Erkner IRS WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
918 DE-918 St.Augustin K-Adenauer-Stiftg. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ha 125 DE-Ha125 Halle/S HS Kunst k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Y 3 DE-Y3 Florenz Kunsthist. Inst. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
1032 DE-1032 Köln Kath. HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
90 DE-90 Karlsruhe KIT-Bibliothek k p 1857 1 1 0 0
|
||||
Ka 85 DE-Ka85 Karlsruhe KIT Campus Nord k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ka 93 DE-Ka93 Karlsruher Institut für Technologie, KIT-Archiv k p 1998 117 0 2014 133 0
|
||||
90 DE-90 Karlsruhe KIT-Bibliothek k p 2003 122 0 0 0
|
||||
Ka 85 DE-Ka85 Karlsruhe KIT Campus Nord k p 1857 1 1 0 0
|
||||
Ka 93 DE-Ka93 Karlsruher Institut für Technologie, KIT-Archiv k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ka 85 DE-Ka85 Karlsruhe KIT Campus Nord k p 2003 122 0 0 0
|
||||
Ka 93 DE-Ka93 Karlsruher Institut für Technologie, KIT-Archiv k p 1857 1 1 0 0
|
||||
Ka 93 DE-Ka93 Karlsruher Institut für Technologie, KIT-Archiv k p 2003 122 0 0 0
|
||||
90 DE-90 Karlsruhe KIT-Bibliothek k p 1998 117 0 2014 133 0
|
||||
90 DE-90 Karlsruhe KIT-Bibliothek k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ka 85 DE-Ka85 Karlsruhe KIT Campus Nord k p 1998 117 0 2014 133 0
|
||||
70 DE-70 Coburg LB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
33 DE-33 Schwerin LBMV k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
45 DE-45 Oldenburg LB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ma 45 DE-Ma45 Magdeburg Inst. Neurobiologie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
51 DE-51 Detmold LB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
578/M DE-578-M Berlin Charité Med.Bib.Magazin k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
578/821 DE-578-821 Berlin Charité Med.Bib. ZMK k n 1857 1 1 0 0
|
||||
578/M DE-578-M Berlin Charité Med.Bib.Magazin k n 1857 1 1 0 0
|
||||
578/e DE-578-e Berlin Charité Med.Bibl.eZsn k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
578/3 DE-578-3 Berlin Charité Med. Bibl. CVK k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
578/e DE-578-e Berlin Charité Med.Bibl.eZsn k n 1857 1 1 0 0
|
||||
578/821 DE-578-821 Berlin Charité Med.Bib. ZMK k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
578/3 DE-578-3 Berlin Charité Med. Bibl. CVK k n 1857 1 1 0 0
|
||||
Hag 4/4 DE-Hag4-4 Soest FH Südwestfalen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hag 4/2 DE-Hag4-2 Iserlohn FH Südwestfalen k n 0 0 2014 0
|
||||
Hag 4/E DE-Hag4-E Hagen FH Südwestf.E-Ressourcen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hag 4/4 DE-Hag4-4 Soest FH Südwestfalen k n 0 0 2014 0
|
||||
Hag 4/3 DE-Hag4-3 Meschede FH Südwestfalen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hag 4/E DE-Hag4-E Hagen FH Südwestf.E-Ressourcen k n 0 0 2014 0
|
||||
Hag 4 DE-Hag4 Hagen FH Südwestfalen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hag 4/3 DE-Hag4-3 Meschede FH Südwestfalen k n 0 0 2014 0
|
||||
Hag 4/2 DE-Hag4-2 Iserlohn FH Südwestfalen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Hag 4 DE-Hag4 Hagen FH Südwestfalen k n 0 0 2014 0
|
||||
B 16 DE-B16 Berlin Museum für Naturkunde k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Frei 3c DE-Frei3c Oberwolfach Math. FI k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
354 DE-354 Hannover MedHS k n 1997 116 0 0 0
|
||||
354 DE-354 Hannover MedHS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Trs 1 DE-Trs1 Trossingen HS Musik k p 2003 122 0 0 0
|
||||
B 787 DE-B787 Berlin MPI Molek.Genetik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Lin 1b DE-Lin1b Katlenburg MPI Sonnensystem k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 161a DE-B161a Garching AstroB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Frei 85 DE-Frei85 Freiburg MPI Ausländ.Recht k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Star 1 DE-Star1 Seewiesen MPI Ornithologie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
2177 DE-2177 Frankfurt/M MPI f.emp.Ästhetik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
He 81 DE-He81 Heidelberg MPI f. Astronomie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1532 DE-B1532 Berlin MPI Bildungsforschung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Gö 116 DE-Goe116 Göttingen Otto-Hahn-B k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mz 4 DE-Mz4 Mainz MPI Chemie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ma 54 DE-Ma54 Magdeburg MPI DKTS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Dü 57 DE-Due57 Düsseldorf MPI Eisenforschung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ha 163 DE-Ha163 Halle/S. MPI Ethnol. Forschung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Gö 134 DE-Goe134 Göttingen MPI exp. Med. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
F 137 DE-F137 Frankfurt/M MPI Rechtgesch. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
J 152 DE-J152 Jena MPI Menschheitsgeschichte k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Kn 188 DE-Kn188 Köln MPI Gesellschaftsforsch. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bo 146 DE-Bo146 Bonn MPI Radioastronomie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Gö 164 DE-Goe164 Göttingen MPI Erforsch.multi. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Sa 18 DE-Sa18 Saarbrücken MPI Informatik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Frei 106 DE-Frei106 Freiburg MPI f. Immunbiologie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Po 81 DE-Po81 Potsdam MPI Grenzflächenforsch k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
He 74 DE-He74 Heidelberg MPI Kernphysik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Pn 1 DE-Pn1 Plön MPI Evolutionsbiologie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bo 206 DE-Bo206 Bonn MPI Mathematik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
He 43 DE-He43 Heidelberg MPI Med.Forschung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bre 11 DE-Bre11 Bremen MPI Marine Mikrobiol. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
L 323 DE-L323 Leipzig MPI Mathematik Naturw. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Po 80 DE-Po80 Potsdam MPI Pflanzenphysiolog. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ha 94 DE-Ha94 Halle/S MPI Mikrostrukturph. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
L 322 DE-L322 Leipzig MPI Neurowiss. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mz 116 DE-Mz116 Mainz MPI Polymerforschung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Nau 1 DE-Nau1 Bad Nauheim MPI Herz/Lungen k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Y 4 DE-Y4 Nijmegen MPI Psycholinguistik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 359 DE-M359 Garching MPI Plasmaphysik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 212 DE-B212 Hamburg MPI IntPrivatrecht k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 477 DE-M477 München MPI Sozialrecht k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 107 DE-B107 Tübingen MP-Haus k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mb 105 DE-Mb105 Marburg MPI f. terre Mikrob. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 208 DE-B208 Heidelberg MPI Völkerrecht k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Vol 1 DE-Vol1 Köln MPI Züchtungsforschung k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bo 407 DE-Bo407 Bonn MPI Gemeinschaftsgüter k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 484 DE-M484 Garching MPI Quantenoptik k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mh 31 DE-Mh31 Mannheim HS Musik k p 2003 122 0 0 0
|
||||
1933 DE-1933 Mannheim Popakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Frei 129 DE-Frei129 Freiburg PH k p 2003 122 0 0 0
|
||||
Frei 129 DE-Frei129 Freiburg PH k p 1998 117 0 2014 133 0
|
||||
Frei 129 DE-Frei129 Freiburg PH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
752 DE-752 Schwäbisch Gmünd PH k p 2003 122 0 0 0
|
||||
752 DE-752 Schwäbisch Gmünd PH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
He 76 DE-He76 Heidelberg PH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
He 76 DE-He76 Heidelberg PH k p 1998 117 0 2014 133 0
|
||||
He 76 DE-He76 Heidelberg PH k p 2003 122 0 0 0
|
||||
Lg 1 DE-Lg1 Ludwigsburg PH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Lg 1 DE-Lg1 Ludwigsburg PH k p 2003 122 0 0 0
|
||||
D 206 DE-D206 Dresden MPI Phys.komplex.Syst. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
107 DE-107 Speyer Pfälzische LB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
107 DE-107 Speyer Pfälzische LB k p 1997 0 0 0 0
|
||||
B 106 DE-B106 Berlin R. Koch-Inst k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
929 DE-929 Koblenz LB k p 1997 0 0 0 0
|
||||
929 DE-929 Koblenz LB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
294 DE-294 Bochum UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
36 DE-36 Mainz StBi k p 1997 0 0 0 0
|
||||
36 DE-36 Mainz StBi k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
150 DE-150 Neuburg/Donau SB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
14 DE-14 Dresden SLUB, ZB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Stg 265 DE-Stg265 Stuttgart Mus.f.Naturkunde k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
L 229 DE-L229 Leipzig Berufsakademie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
155 DE-155 Regensburg SB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
282 DE-282 Wiesbaden StaBA k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
7 DE-7 Göttingen SUB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
7 DE-7 Göttingen SUB k n 1998 117 0 0 0
|
||||
18 DE-18 Hamburg SUB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
18 DE-18 Hamburg SUB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1998 117 1 0 0
|
||||
291 DE-291 Saarbrücken SULB k p 1997 0 0 0 0
|
||||
291 DE-291 Saarbrücken SULB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
37 DE-37 Augsburg SuStB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 768 DE-B768 Berlin Technische FH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 768 DE-B768 Berlin Technische FH k n 1857 1 1 0 0
|
||||
526 DE-526 Wildau TH k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
27 DE-27 Jena UuLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
95 DE-95 Hannover TierHS k n 1997 116 0 0 0
|
||||
95 DE-95 Hannover TierHS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Wim 7 DE-Wim7 Weimar TLDA k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
83 DE-83 Berlin UBTU k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
105 DE-105 Freiberg TU BA k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
84 DE-84 Braunschweig UB k n 1997 116 0 0 0
|
||||
84 DE-84 Braunschweig UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Co 1 DE-Co1 Cottbus BTU k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Ch 1 DE-Ch1 Chemnitz UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
17 DE-17 Darmstadt ULB k p n 1998 117 0 0 0
|
||||
17 DE-17 Darmstadt ULB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
830 DE-830 Hamburg TU k p Nur für den persönlichen Gebrauch oder für wissenschaftliche, Bildungs- oder Forschungszwecke, nicht jedoch zu gewerblichen Zwecken. 1998 117 1 0 0
|
||||
830 DE-830 Hamburg TU k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
91 S DE-91S München TU/TeilB Straubing k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
M 49 DE-M49 Freising TU München/Weihenst k 1998 117 0 0 0
|
||||
M 49 DE-M49 Freising TU München/Weihenst k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
91 DE-91 München UBTU k 1998 117 0 0 0
|
||||
91 G DE-91G München TU/TeilB Garching k 1998 117 0 0 0
|
||||
91 DE-91 München UBTU k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
91 S DE-91S München TU/TeilB Straubing k 1998 117 0 0 0
|
||||
91 G DE-91G München TU/TeilB Garching k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
384 DE-384 Augsburg UB k 1998 117 0 0 0
|
||||
384 DE-384 Augsburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
473 DE-473 Bamberg UB k 1998 117 0 0 0
|
||||
473 DE-473 Bamberg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
703 DE-703 Bayreuth UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
703 DE-703 Bayreuth UB k 1998 117 0 0 0
|
||||
361 DE-361 Bielefeld UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
361 DE-361 Bielefeld UB k n 0 0 2014 0
|
||||
Wim 2 DE-Wim2 Weimar UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
104 DE-104 Clausthal-Z. UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
104 DE-104 Clausthal-Z. UB k n 1997 116 0 0 0
|
||||
290 DE-290 Dortmund UB k n 0 0 2014 0
|
||||
290 DE-290 Dortmund UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
464 DE-464 Duisburg UB k n 0 0 2014 0
|
||||
464 DE-464 Duisburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
824 DE-824 Eichstätt UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
945 DE-945 Ingolstadt UB Eichstätt/Wirt. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
N 2 DE-N2 Nürnberg WSZB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
29 DE-29 Erlangen-N UB k 1998 117 0 0 0
|
||||
N 32 DE-N32 Nürnberg UB Erlangen-N/Erzwiss k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
N 2 DE-N2 Nürnberg WSZB k 1998 117 0 0 0
|
||||
29 T DE-29T Erlangen-N UB Technik/Naturw. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
N 32 DE-N32 Nürnberg UB Erlangen-N/Erzwiss k 1998 117 0 0 0
|
||||
29 T DE-29T Erlangen-N UB Technik/Naturw. k 1998 117 0 0 0
|
||||
29 DE-29 Erlangen-N UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
F 21 DE-F21 Frankfurt/M Med. HauptBi k p n 1998 117 0 0 0
|
||||
30 DE-30 Frankfurt/M UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
F 21 DE-F21 Frankfurt/M Med. HauptBi k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
30 DE-30 Frankfurt/M UB k p n 1998 117 0 0 0
|
||||
25 DE-25 Freiburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
25 DE-25 Freiburg UB k p 2003 122 0 0 0
|
||||
25 DE-25 Freiburg UB k p 1998 117 0 2014 133 0
|
||||
26 DE-26 Gießen UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
26 DE-26 Gießen UB k p n 1998 117 0 0 0
|
||||
9 DE-9 Greifswald UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
16 DE-16 Heidelberg UB k p 2003 122 0 0 0
|
||||
16 DE-16 Heidelberg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
16 DE-16 Heidelberg UB k p 1998 117 0 2014 133 0
|
||||
Hil 2 DE-Hil2 Hildesheim UB k n 1997 116 0 0 0
|
||||
Hil 2 DE-Hil2 Hildesheim UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
841 DE-841 Lübeck ZHB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
100 DE-100 Stuttgart-Hoh. KIM k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
100 DE-100 Stuttgart-Hoh. KIM k p 2003 122 0 0 0
|
||||
11/10 DE-11-10 Berlin UBHU, Elektron. Res. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
11/10 DE-11-10 Berlin UBHU, Elektron. Res. k n 1857 1 1 0 0
|
||||
11 DE-11 Berlin UB Humboldt k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
11 DE-11 Berlin UB Humboldt k n 1857 1 1 0 0
|
||||
Ilm 1 DE-Ilm1 Ilmenau UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
8 DE-8 Kiel UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
8 DE-8 Kiel UB k p n 1998 117 1 0 0
|
||||
34 DE-34 Kassel UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
34 DE-34 Kassel UB k p n 1998 117 0 0 0
|
||||
386 DE-386 Kaiserslautern UB k p 1997 0 0 0 0
|
||||
386 DE-386 Kaiserslautern UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
352 DE-352 Konstanz UB k p 1998 117 0 2014 133 0
|
||||
352 DE-352 Konstanz UB k p 2003 122 0 0 0
|
||||
352 DE-352 Konstanz UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
15 DE-15 Leipzig UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
15/292 DE-15-292 Leipzig ZB Medizin k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Lün 4 DE-Luen4 Lüneburg UB k n 1997 116 0 0 0
|
||||
Lün 4 DE-Luen4 Lüneburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
19 DE-19 München UB k 1998 117 0 0 0
|
||||
19 DE-19 München UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
4 DE-4 Marburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
4 DE-4 Marburg UB k p n 1998 117 0 0 0
|
||||
180 DE-180 Mannheim UB k p 2003 122 0 0 0
|
||||
180 DE-180 Mannheim UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
180 DE-180 Mannheim UB k p 1998 117 0 2014 133 0
|
||||
Ma 9 DE-Ma9 Magdeburg UB k p 1857 1 1 0 0
|
||||
Ma 9 DE-Ma9 Magdeburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
77 DE-77 Mainz UB k p n 1998 117 0 0 0
|
||||
Mz 19 DE-Mz19 Mainz FB TSK k p n 1998 117 0 0 0
|
||||
77 DE-77 Mainz UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mz 19 DE-Mz19 Mainz FB TSK k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
700 DE-700 Osnabrück UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
700 DE-700 Osnabrück UB k n 1997 116 0 0 0
|
||||
739 DE-739 Passau UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
739 DE-739 Passau UB k 1998 117 0 0 0
|
||||
466 DE-466 Paderborn UB k n 0 0 2014 0
|
||||
466 DE-466 Paderborn UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
517 DE-517 Potsdam UB k n 1857 1 1 0 0
|
||||
517 DE-517 Potsdam UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
355 DE-355 Regensburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
355 DE-355 Regensburg UB k 1998 117 0 0 0
|
||||
28 DE-28 Rostock UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
93 DE-93 Stuttgart UB k p 2003 122 0 0 0
|
||||
93 DE-93 Stuttgart UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
93 DE-93 Stuttgart UB k p 1998 117 0 2014 133 0
|
||||
467 DE-467 Siegen UB k n 0 0 2014 0
|
||||
467 DE-467 Siegen UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
89 DE-89 Hannover TIB/UB k n 1997 116 0 0 0
|
||||
89 DE-89 Hannover TIB/UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
385 DE-385 Trier UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
385 DE-385 Trier UB k p 1997 0 0 0 0
|
||||
21 DE-21 Tübingen UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
21 DE-21 Tübingen UB k p 2003 122 0 0 0
|
||||
21 DE-21 Tübingen UB k p 1998 117 0 2014 133 0
|
||||
289 DE-289 Ulm UB k p 1998 117 0 2014 133 0
|
||||
289 DE-289 Ulm UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
289 DE-289 Ulm UB k p 2003 122 0 0 0
|
||||
Va 1 DE-Va1 Vechta UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Va 1 DE-Va1 Vechta UB k n 1997 116 0 0 0
|
||||
20 DE-20 Würzburg UB k keine Fernleihe an kommerzielle Bibliotheken liefern 2002 121 1 2003 122 4
|
||||
20 DE-20 Würzburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
705 DE-705 Hamburg HSU k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
706 DE-706 Neubiberg UniBundeswehr k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
468 DE-468 Wuppertal UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
468 DE-468 Wuppertal UB k n 0 0 2014 0
|
||||
L 97 DE-L97 Leipzig Helmholtz-Zentrum UFZ k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
L 97 DE-L97 Leipzig Helmholtz-Zentrum UFZ k p 2004 123 0 2012 131 0
|
||||
465 DE-465 Essen UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
464 DE-464 Duisburg UB k n 0 0 2014 0
|
||||
464 DE-464 Duisburg UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
465 M DE-465M Essen Duisburg-Essen FB Med. k n 0 0 2014 0
|
||||
465 M DE-465M Essen Duisburg-Essen FB Med. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
465 DE-465 Essen UB k n 0 0 2014 0
|
||||
18/64 DE-18-64 Hamburg Ärztl. ZB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1998 117 1 0 0
|
||||
18/64 DE-18-64 Hamburg Ärztl. ZB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
5 DE-5 Bonn ULB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
61 DE-61 Düsseldorf UuLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
61 DE-61 Düsseldorf UuLB k n 0 0 2014 0
|
||||
3 DE-3 Halle/S UuLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
6 DE-6 Münster UuLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
6 DE-6 Münster UuLB k n 0 0 2014 0
|
||||
1018 DE-1018 Witten UB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
38 DE-38 Köln USB k n 0 0 2014 0
|
||||
38 DE-38 Köln USB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Zwi 2 DE-Zwi2 Zwickau HS k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 513 DE-B513 Berlin Weierstraß-Inst. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
24 DE-24 Stuttgart WLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 1543 DE-B1543 Berlin WZB WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
38 M DE-38M Köln ZBMedizin k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
98 DE-98 Bonn ZB MED k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
38 M DE-38M Köln ZBMedizin k p n non-commercial libraries<br> (Springer) 1857 1 1 0 0
|
||||
98 DE-98 Bonn ZB MED k p n non-commercial libraries<br> (Springer) 1857 1 1 0 0
|
||||
Kn 41 DE-Kn41 Köln SportHS ZB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Kn 41 DE-Kn41 Köln SportHS ZB k p 1857 1 1 2011 130 6
|
||||
206 DE-206 Kiel ZBW WGL k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mh 36 DE-Mh36 Mannheim ZEW k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
255 DE-255 München ZI Kunstgeschichte k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Mh 32 DE-Mh32 Mannheim ZI Seel. Gesundh. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
109 DE-109 Berlin ZLB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
B 2138 DE-B2138 Berlin Zentrum Modern. Orient k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bre 14 DE-Bre14 Bremen Zentr. Tropenökologie k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Bre 14 DE-Bre14 Bremen Zentr. Tropenökologie k n 1998 117 0 0 0
|
||||
Bre 14 DE-Bre14 Bremen Zentr. Tropenökologie k n 1998 117 0 0 0
|
||||
B 1536 DE-B1536 Berlin ZIB k n 1857 1 1 0 0
|
||||
B 1536 DE-B1536 Berlin ZIB k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
||||
Po 82 DE-Po82 Potsdam Zeithist.Forsch. k p n keine Fernleihe an kommerzielle Bibliotheken liefern 1857 1 0 2002 121 0
|
|
@ -0,0 +1,2 @@
|
|||
a,b,c
|
||||
d,e,f
|
|
44447
content-csv/src/test/resources/org/xbib/content/csv/titleFile.csv
Normal file
44447
content-csv/src/test/resources/org/xbib/content/csv/titleFile.csv
Normal file
File diff suppressed because it is too large
Load diff
27
content-json/CREDITS.txt
Normal file
27
content-json/CREDITS.txt
Normal file
|
@ -0,0 +1,27 @@
|
|||
|
||||
Credits
|
||||
|
||||
org.xbib.json.jackson is originally based on com.github.fge.jackson
|
||||
org.xbib.json.pointer is originally based on com.github.fge.jackson,jsonpointer
|
||||
org.xbib.json.patch is originally bases on com.github.fge.jsonpatch
|
||||
|
||||
Original copyright notice:
|
||||
|
||||
/*
|
||||
* Copyright (c) 2014, Francis Galiegue (fgaliegue@gmail.com)
|
||||
*
|
||||
* This software is dual-licensed under:
|
||||
*
|
||||
* - the Lesser General Public License (LGPL) version 3.0 or, at your option, any
|
||||
* later version;
|
||||
* - the Apache Software License (ASL) version 2.0.
|
||||
*
|
||||
* The text of this file and of both licenses is available at the root of this
|
||||
* project or, if you have the jar distribution, in directory META-INF/, under
|
||||
* the names LGPL-3.0.txt and ASL-2.0.txt respectively.
|
||||
*
|
||||
* Direct link to the sources:
|
||||
*
|
||||
* - LGPL 3.0: https://www.gnu.org/licenses/lgpl-3.0.txt
|
||||
* - ASL 2.0: http://www.apache.org/licenses/LICENSE-2.0.txt
|
||||
*/
|
10
content-json/build.gradle
Normal file
10
content-json/build.gradle
Normal file
|
@ -0,0 +1,10 @@
|
|||
dependencies {
|
||||
compile "com.fasterxml.jackson.core:jackson-databind:2.8.3"
|
||||
testCompile('junit:junit:4.12') {
|
||||
exclude group: 'org.hamcrest'
|
||||
}
|
||||
testCompile('org.mockito:mockito-core:1.9.5') {
|
||||
exclude group: 'org.hamcrest'
|
||||
}
|
||||
testCompile 'org.hamcrest:hamcrest-all:1.3'
|
||||
}
|
323
content-json/config/checkstyle/checkstyle.xml
Normal file
323
content-json/config/checkstyle/checkstyle.xml
Normal file
|
@ -0,0 +1,323 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
|
||||
<!-- This is a checkstyle configuration file. For descriptions of
|
||||
what the following rules do, please see the checkstyle configuration
|
||||
page at http://checkstyle.sourceforge.net/config.html -->
|
||||
|
||||
<module name="Checker">
|
||||
|
||||
<module name="FileTabCharacter">
|
||||
<!-- Checks that there are no tab characters in the file.
|
||||
-->
|
||||
</module>
|
||||
|
||||
<module name="NewlineAtEndOfFile">
|
||||
<property name="lineSeparator" value="lf"/>
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that FIXME is not used in comments. TODO is preferred.
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))FIXME" />
|
||||
<property name="message" value='TODO is preferred to FIXME. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that TODOs are named. (Actually, just that they are followed
|
||||
by an open paren.)
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))TODO[^(]" />
|
||||
<property name="message" value='All TODOs should be named. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="JavadocPackage">
|
||||
<!-- Checks that each Java package has a Javadoc file used for commenting.
|
||||
Only allows a package-info.java, not package.html. -->
|
||||
</module>
|
||||
|
||||
<!-- All Java AST specific tests live under TreeWalker module. -->
|
||||
<module name="TreeWalker">
|
||||
|
||||
<!--
|
||||
|
||||
IMPORT CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="RedundantImport">
|
||||
<!-- Checks for redundant import statements. -->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ImportOrder">
|
||||
<!-- Checks for out of order import statements. -->
|
||||
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="groups" value="com,junit,net,org,java,javax"/>
|
||||
<!-- This ensures that static imports go first. -->
|
||||
<property name="option" value="top"/>
|
||||
<property name="tokens" value="STATIC_IMPORT, IMPORT"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
JAVADOC CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Checks for Javadoc comments. -->
|
||||
<!-- See http://checkstyle.sf.net/config_javadoc.html -->
|
||||
<module name="JavadocMethod">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="allowMissingJavadoc" value="true"/>
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
<property name="allowMissingThrowsTags" value="true"/>
|
||||
<property name="allowThrowsTagsForSubclasses" value="true"/>
|
||||
<property name="allowUndeclaredRTE" value="true"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocType">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocStyle">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
NAMING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Item 38 - Adhere to generally accepted naming conventions -->
|
||||
|
||||
<module name="PackageName">
|
||||
<!-- Validates identifiers for package names against the
|
||||
supplied expression. -->
|
||||
<!-- Here the default checkstyle rule restricts package name parts to
|
||||
seven characters, this is not in line with common practice at Google.
|
||||
-->
|
||||
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]{1,})*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="TypeNameCheck">
|
||||
<!-- Validates static, final fields against the
|
||||
expression "^[A-Z][a-zA-Z0-9]*$". -->
|
||||
<metadata name="altname" value="TypeName"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ConstantNameCheck">
|
||||
<!-- Validates non-private, static, final fields against the supplied
|
||||
public/package final fields "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$". -->
|
||||
<metadata name="altname" value="ConstantName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="false"/>
|
||||
<property name="format" value="^([A-Z][A-Z0-9]*(_[A-Z0-9]+)*|FLAG_.*)$"/>
|
||||
<message key="name.invalidPattern"
|
||||
value="Variable ''{0}'' should be in ALL_CAPS (if it is a constant) or be private (otherwise)."/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="StaticVariableNameCheck">
|
||||
<!-- Validates static, non-final fields against the supplied
|
||||
expression "^[a-z][a-zA-Z0-9]*_?$". -->
|
||||
<metadata name="altname" value="StaticVariableName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*_?$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MemberNameCheck">
|
||||
<!-- Validates non-static members against the supplied expression. -->
|
||||
<metadata name="altname" value="MemberName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MethodNameCheck">
|
||||
<!-- Validates identifiers for method names. -->
|
||||
<metadata name="altname" value="MethodName"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*(_[a-zA-Z0-9]+)*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ParameterName">
|
||||
<!-- Validates identifiers for method parameters against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalFinalVariableName">
|
||||
<!-- Validates identifiers for local final variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalVariableName">
|
||||
<!-- Validates identifiers for local variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
LENGTH and CODING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="LineLength">
|
||||
<!-- Checks if a line is too long. -->
|
||||
<property name="max" value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.max}" default="128"/>
|
||||
<property name="severity" value="error"/>
|
||||
|
||||
<!--
|
||||
The default ignore pattern exempts the following elements:
|
||||
- import statements
|
||||
- long URLs inside comments
|
||||
-->
|
||||
|
||||
<property name="ignorePattern"
|
||||
value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.ignorePattern}"
|
||||
default="^(package .*;\s*)|(import .*;\s*)|( *(\*|//).*https?://.*)$"/>
|
||||
</module>
|
||||
|
||||
<module name="LeftCurly">
|
||||
<!-- Checks for placement of the left curly brace ('{'). -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="RightCurly">
|
||||
<!-- Checks right curlies on CATCH, ELSE, and TRY blocks are on
|
||||
the same line. e.g., the following example is fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
} else
|
||||
</pre>
|
||||
-->
|
||||
<!-- This next example is not fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
}
|
||||
else
|
||||
</pre>
|
||||
-->
|
||||
<property name="option" value="same"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for braces around if and else blocks -->
|
||||
<module name="NeedBraces">
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="tokens" value="LITERAL_IF, LITERAL_ELSE, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO"/>
|
||||
</module>
|
||||
|
||||
<module name="UpperEll">
|
||||
<!-- Checks that long constants are defined with an upper ell.-->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="FallThrough">
|
||||
<!-- Warn about falling through to the next case statement. Similar to
|
||||
javac -Xlint:fallthrough, but the check is suppressed if a single-line comment
|
||||
on the last non-blank line preceding the fallen-into case contains 'fall through' (or
|
||||
some other variants which we don't publicized to promote consistency).
|
||||
-->
|
||||
<property name="reliefPattern"
|
||||
value="fall through|Fall through|fallthru|Fallthru|falls through|Falls through|fallthrough|Fallthrough|No break|NO break|no break|continue on"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
MODIFIERS CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="ModifierOrder">
|
||||
<!-- Warn if modifier order is inconsistent with JLS3 8.1.1, 8.3.1, and
|
||||
8.4.3. The prescribed order is:
|
||||
public, protected, private, abstract, static, final, transient, volatile,
|
||||
synchronized, native, strictfp
|
||||
-->
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
WHITESPACE CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="WhitespaceAround">
|
||||
<!-- Checks that various tokens are surrounded by whitespace.
|
||||
This includes most binary operators and keywords followed
|
||||
by regular or curly braces.
|
||||
-->
|
||||
<property name="tokens" value="ASSIGN, BAND, BAND_ASSIGN, BOR,
|
||||
BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN,
|
||||
EQUAL, GE, GT, LAND, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE,
|
||||
LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
|
||||
LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS,
|
||||
MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION,
|
||||
SL, SL_ASSIGN, SR_ASSIGN, STAR, STAR_ASSIGN"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="WhitespaceAfter">
|
||||
<!-- Checks that commas, semicolons and typecasts are followed by
|
||||
whitespace.
|
||||
-->
|
||||
<property name="tokens" value="COMMA, SEMI, TYPECAST"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceAfter">
|
||||
<!-- Checks that there is no whitespace after various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS,
|
||||
UNARY_PLUS"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceBefore">
|
||||
<!-- Checks that there is no whitespace before various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="SEMI, DOT, POST_DEC, POST_INC"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ParenPad">
|
||||
<!-- Checks that there is no whitespace before close parens or after
|
||||
open parens.
|
||||
-->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
</module>
|
||||
</module>
|
||||
|
159
content-json/src/main/java/org/xbib/content/json/diff/Diff.java
Normal file
159
content-json/src/main/java/org/xbib/content/json/diff/Diff.java
Normal file
|
@ -0,0 +1,159 @@
|
|||
package org.xbib.content.json.diff;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.xbib.content.json.jackson.JsonNumEquals;
|
||||
import org.xbib.content.json.jackson.Wrapper;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Difference representation. Captures diff information required to
|
||||
* generate JSON patch operations and factorize differences.
|
||||
*/
|
||||
final class Diff {
|
||||
|
||||
final JsonNode value;
|
||||
DiffOperation operation;
|
||||
JsonPointer path;
|
||||
JsonPointer arrayPath;
|
||||
int firstArrayIndex;
|
||||
int secondArrayIndex;
|
||||
JsonPointer fromPath;
|
||||
|
||||
Diff pairedDiff;
|
||||
|
||||
boolean firstOfPair;
|
||||
|
||||
private Diff(final DiffOperation operation, final JsonPointer path,
|
||||
final JsonNode value) {
|
||||
this.operation = operation;
|
||||
this.path = path;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
private Diff(final DiffOperation operation, final JsonPointer arrayPath,
|
||||
final int firstArrayIndex, final int secondArrayIndex,
|
||||
final JsonNode value) {
|
||||
this.operation = operation;
|
||||
this.arrayPath = arrayPath;
|
||||
this.firstArrayIndex = firstArrayIndex;
|
||||
this.secondArrayIndex = secondArrayIndex;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
static Diff simpleDiff(final DiffOperation operation,
|
||||
final JsonPointer path, final JsonNode value) {
|
||||
return new Diff(operation, path, value.deepCopy());
|
||||
}
|
||||
|
||||
/*
|
||||
* "Stateless" removal of a given node from an array given a base path (the
|
||||
* immediate parent of an array) and an array index; as the name suggests,
|
||||
* this factory method is called only when a node is removed from the tail
|
||||
* of a target array; in other words, the source node has extra elements.
|
||||
*/
|
||||
static Diff tailArrayRemove(final JsonPointer basePath, final int index,
|
||||
final int removeIndex, final JsonNode victim) {
|
||||
return new Diff(DiffOperation.REMOVE, basePath, index, removeIndex,
|
||||
victim.deepCopy());
|
||||
}
|
||||
|
||||
static Diff arrayRemove(final JsonPointer basePath,
|
||||
final IndexedJsonArray array1, final IndexedJsonArray array2) {
|
||||
return new Diff(DiffOperation.REMOVE, basePath, array1.getIndex(),
|
||||
array2.getIndex(), array1.getElement().deepCopy());
|
||||
}
|
||||
|
||||
static Diff arrayAdd(final JsonPointer basePath, final JsonNode node) {
|
||||
return new Diff(DiffOperation.ADD, basePath, -1, -1, node.deepCopy());
|
||||
}
|
||||
|
||||
static Diff arrayInsert(final JsonPointer basePath,
|
||||
final IndexedJsonArray array1, final IndexedJsonArray array2) {
|
||||
return new Diff(DiffOperation.ADD, basePath, array1.getIndex(),
|
||||
array2.getIndex(), array2.getElement().deepCopy());
|
||||
}
|
||||
|
||||
JsonNode asJsonPatch() {
|
||||
final JsonPointer ptr = arrayPath != null ? getSecondArrayPath()
|
||||
: path;
|
||||
final ObjectNode patch = operation.newOp(ptr);
|
||||
/*
|
||||
* A remove only has a path
|
||||
*/
|
||||
if (operation == DiffOperation.REMOVE) {
|
||||
return patch;
|
||||
}
|
||||
/*
|
||||
* A move has a "source path" (the "from" member), other defined
|
||||
* operations (add and replace) have a value instead.
|
||||
*/
|
||||
if (operation == DiffOperation.MOVE
|
||||
|| operation == DiffOperation.COPY) {
|
||||
patch.put("from", fromPath.toString());
|
||||
} else {
|
||||
patch.set("value", value);
|
||||
}
|
||||
return patch;
|
||||
}
|
||||
|
||||
JsonPointer getSecondArrayPath() {
|
||||
// compute path from array path and index
|
||||
if (secondArrayIndex != -1) {
|
||||
return arrayPath.append(secondArrayIndex);
|
||||
}
|
||||
return arrayPath.append("-");
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return hashCode(operation, path, arrayPath, firstArrayIndex,
|
||||
secondArrayIndex, new Wrapper<>(JsonNumEquals.getInstance(), value),
|
||||
fromPath, pairedDiff != null, firstOfPair);
|
||||
}
|
||||
|
||||
private int hashCode(Object... objects) {
|
||||
return Arrays.hashCode(objects);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final Diff other = (Diff) obj;
|
||||
return operation == other.operation
|
||||
&& Objects.equals(path, other.path)
|
||||
&& Objects.equals(arrayPath, other.arrayPath)
|
||||
&& firstArrayIndex == other.firstArrayIndex
|
||||
&& secondArrayIndex == other.secondArrayIndex
|
||||
&& JsonNumEquals.getInstance().equivalent(value, other.value)
|
||||
&& Objects.equals(fromPath, other.fromPath)
|
||||
&& Objects.equals(pairedDiff != null, other.pairedDiff != null)
|
||||
&& firstOfPair == other.firstOfPair;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getName())
|
||||
.append("{op=").append(operation)
|
||||
.append("}{path=").append(path)
|
||||
.append("}{arrayPath=").append(arrayPath)
|
||||
.append("}{firstArrayIndex=").append(firstArrayIndex)
|
||||
.append("}{secondArrayIndex").append(secondArrayIndex)
|
||||
.append("}{value").append(value)
|
||||
.append("}{fromPath").append(fromPath)
|
||||
.append("}{paired").append(pairedDiff != null)
|
||||
.append("}{firstOfPair").append(firstOfPair);
|
||||
return Objects.toString(sb.toString());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,285 @@
|
|||
package org.xbib.content.json.diff;
|
||||
|
||||
import static org.xbib.content.json.diff.DiffOperation.ADD;
|
||||
import static org.xbib.content.json.diff.DiffOperation.REMOVE;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.xbib.content.json.jackson.Equivalence;
|
||||
import org.xbib.content.json.jackson.JsonNumEquals;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
final class DiffFactorizer {
|
||||
|
||||
private static final Equivalence<JsonNode> EQUIVALENCE = JsonNumEquals.getInstance();
|
||||
|
||||
private DiffFactorizer() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Factorize list of ordered differences. Where removed values are
|
||||
* equivalent to added values, merge add and remove to move
|
||||
* differences. Because remove differences are relocated in the
|
||||
* process of merging, other differences can be side effected.
|
||||
* Add differences with equivalent values to previous add
|
||||
* differences are converted to copy differences.
|
||||
*
|
||||
* @param diffs list of ordered differences.
|
||||
*/
|
||||
public static void factorizeDiffs(final List<Diff> diffs) {
|
||||
findPairs(diffs);
|
||||
factorizePairs(diffs);
|
||||
|
||||
// Factorize add diffs with equivalent non-empty object or array
|
||||
// values into copy diffs; from paths for copy diffs can be set using
|
||||
// previous add diff paths and/or array paths because diff order is
|
||||
// acyclic and immutable for this factorization. The only exception
|
||||
// to this rule are adds that append to arrays: these have no concrete
|
||||
// path that can serve as a copy diff from path.
|
||||
final List<Diff> addDiffs = new ArrayList<>();
|
||||
for (final Diff diff : diffs) {
|
||||
/*
|
||||
* Ignore non add operations
|
||||
*/
|
||||
if (diff.operation != ADD) {
|
||||
continue;
|
||||
}
|
||||
/*
|
||||
* Skip value nodes or empty objects/arrays
|
||||
*/
|
||||
if (diff.value.size() == 0) {
|
||||
continue;
|
||||
}
|
||||
// check add diff value against list of previous add diffs
|
||||
Diff addDiff = null;
|
||||
for (final Diff testAddDiff : addDiffs) {
|
||||
if (EQUIVALENCE.equivalent(diff.value, testAddDiff.value)) {
|
||||
addDiff = testAddDiff;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// if not found previously, save add diff, (if not appending
|
||||
// to an array which can have no concrete from path), and continue
|
||||
if (addDiff == null) {
|
||||
if (diff.arrayPath == null || diff.secondArrayIndex != -1) {
|
||||
addDiffs.add(diff);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
// previous add diff found by value: convert add diff to copy
|
||||
// diff with from path set to concrete add diff path
|
||||
diff.operation = DiffOperation.COPY;
|
||||
diff.fromPath = addDiff.arrayPath != null
|
||||
? addDiff.getSecondArrayPath() : addDiff.path;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find additions/removal pairs
|
||||
* <p>
|
||||
* <p>Find addition operations which can be paired value-wise with removal
|
||||
* operations.</p>
|
||||
* <p>
|
||||
* <p>Note that only the first pair is considered.</p>
|
||||
*
|
||||
* @param diffs the list of diffs
|
||||
*/
|
||||
private static void findPairs(final List<Diff> diffs) {
|
||||
final int diffsSize = diffs.size();
|
||||
|
||||
Diff addition, removal;
|
||||
|
||||
for (int addIndex = 0; addIndex < diffsSize; addIndex++) {
|
||||
addition = diffs.get(addIndex);
|
||||
if (addition.operation != ADD) {
|
||||
continue;
|
||||
}
|
||||
/*
|
||||
* Found an addition: try and find a matching removal
|
||||
*/
|
||||
for (int removeIndex = 0; removeIndex < diffsSize; removeIndex++) {
|
||||
removal = diffs.get(removeIndex);
|
||||
if (removal.operation == REMOVE && EQUIVALENCE.equivalent(removal.value, addition.value)) {
|
||||
addition.pairedDiff = removal;
|
||||
addition.firstOfPair = addIndex < removeIndex;
|
||||
removal.pairedDiff = addition;
|
||||
removal.firstOfPair = removeIndex < addIndex;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Factorize additions/removals
|
||||
* <p>Removals, when paired with additions, are removed from the list.</p>
|
||||
* <p>
|
||||
* <p>Special care must be taken for additions/removal pairs happening
|
||||
* within the same array, so that array indices can be adjusted properly.
|
||||
* </p>
|
||||
*
|
||||
* @param diffs the list of diffs
|
||||
*/
|
||||
private static void factorizePairs(final List<Diff> diffs) {
|
||||
/*
|
||||
* Those two arrays will hold array removals seen before or after their
|
||||
* paired additions.
|
||||
*/
|
||||
final List<Diff> seenBefore = new ArrayList<>();
|
||||
final List<Diff> seenAfter = new ArrayList<>();
|
||||
final Iterator<Diff> iterator = diffs.iterator();
|
||||
|
||||
Diff diff;
|
||||
while (iterator.hasNext()) {
|
||||
diff = iterator.next();
|
||||
if (diff.pairedDiff != null) {
|
||||
if (diff.operation == REMOVE) {
|
||||
/*
|
||||
* If removal is from an array and we reach this point,
|
||||
* it means the matching addition has not been seen yet.
|
||||
* Add this diff to the relevant list.
|
||||
*/
|
||||
if (diff.arrayPath != null && diff.firstOfPair) {
|
||||
seenBefore.add(diff);
|
||||
}
|
||||
// remove paired remove and continue
|
||||
iterator.remove();
|
||||
} else if (diff.operation == ADD) {
|
||||
/*
|
||||
* Turn paired additions into move operations
|
||||
*/
|
||||
transformAddition(seenBefore, seenAfter, diff);
|
||||
} else {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
}
|
||||
// adjust secondary index for all array diffs with matching
|
||||
// deferred array removes; note: all non remove array diffs
|
||||
// have a valid second array index
|
||||
if (diff.arrayPath != null) {
|
||||
diff.secondArrayIndex = adjustSecondArrayIndex(seenBefore,
|
||||
diff.arrayPath, diff.secondArrayIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void transformAddition(final List<Diff> seenBefore,
|
||||
final List<Diff> seenAfter, final Diff diff) {
|
||||
final Diff removal = diff.pairedDiff;
|
||||
// convert paired add diff into a move
|
||||
diff.operation = DiffOperation.MOVE;
|
||||
diff.pairedDiff = null;
|
||||
|
||||
/*
|
||||
* Compute the "from" path of this move operation
|
||||
*/
|
||||
if (removal.arrayPath == null) {
|
||||
/*
|
||||
* If removal was not from an array, we just need to grab
|
||||
* the path of this remove operation as the origin path
|
||||
* for this move
|
||||
*/
|
||||
diff.fromPath = removal.path;
|
||||
return;
|
||||
}
|
||||
|
||||
if (diff.firstOfPair) {
|
||||
// move diff is first of pair: remove will be advanced
|
||||
// and will use original first indexes into array
|
||||
int removeIndex = removal.firstArrayIndex;
|
||||
// adjust remove index for operations on arrays with
|
||||
// matching advanced array removes
|
||||
removeIndex = adjustFirstArrayIndex(seenAfter, removal.arrayPath,
|
||||
removeIndex);
|
||||
// if move diff and remove diff are from the same array,
|
||||
// remove index must be based on an original index offset
|
||||
// from the move diff secondary index; this is reflecting
|
||||
// the fact that array diff operations up to the move diff
|
||||
// have been applied, but those following the move diff to
|
||||
// the remove diff have not and thus require original
|
||||
// first array index adjustments
|
||||
if (removal.arrayPath.equals(diff.arrayPath)) {
|
||||
final int moveSecondArrayIndex = adjustSecondArrayIndex(
|
||||
seenBefore, diff.arrayPath, diff.secondArrayIndex);
|
||||
final int moveFirstArrayIndex = adjustFirstArrayIndex(seenAfter,
|
||||
diff.arrayPath, diff.firstArrayIndex);
|
||||
removeIndex += moveSecondArrayIndex - moveFirstArrayIndex;
|
||||
}
|
||||
// set move diff from using adjusted remove index
|
||||
diff.fromPath = removal.arrayPath.append(removeIndex);
|
||||
// track advanced array removes
|
||||
seenAfter.add(removal);
|
||||
} else {
|
||||
// remove diff is first of pair: remove has been deferred
|
||||
// for this move; remove tracked deferred array remove
|
||||
seenBefore.remove(removal);
|
||||
// remove can now be moved using second index
|
||||
int removeIndex = removal.secondArrayIndex;
|
||||
// adjust remove index for operations on arrays with
|
||||
// matching deferred array removes
|
||||
removeIndex = adjustSecondArrayIndex(seenBefore, removal.arrayPath,
|
||||
removeIndex);
|
||||
// set move diff from using adjusted remove index
|
||||
diff.fromPath = removal.arrayPath.append(removeIndex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjust array index based on array removes seen before their matching
|
||||
* additions. Missing second array indexes (-1) are not adjusted.
|
||||
*
|
||||
* @param seenAfter list of removals seen before their matching additions
|
||||
* @param arrayPath array path of array index to adjust
|
||||
* @param arrayIndex index to adjust and upper range of removes
|
||||
* @return index adjusted by advanced array moves in range
|
||||
*/
|
||||
private static int adjustFirstArrayIndex(final List<Diff> seenAfter,
|
||||
final JsonPointer arrayPath, final int arrayIndex) {
|
||||
/*
|
||||
* Adjust index of removal operations on arrays with matching advanced
|
||||
* array removes: for each advanced remove, decrement the index assuming
|
||||
* remove will have been done before remaining diffs on array
|
||||
*/
|
||||
int arrayRemoves = 0;
|
||||
for (final Diff removal : seenAfter) {
|
||||
if (arrayPath.equals(removal.arrayPath)
|
||||
&& arrayIndex > removal.firstArrayIndex) {
|
||||
arrayRemoves++;
|
||||
}
|
||||
}
|
||||
return arrayIndex - arrayRemoves;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adjust array index of removal operations seen after their matching
|
||||
* additions. Missing second array indexes (-1) are not adjusted.
|
||||
*
|
||||
* @param seenAfter list of removals seen before their matching additions
|
||||
* @param arrayPath array path of array index to adjust
|
||||
* @param arrayIndex index to adjust and upper range of moves
|
||||
* @return index adjusted by deferred array moves in range
|
||||
*/
|
||||
private static int adjustSecondArrayIndex(final List<Diff> seenAfter,
|
||||
final JsonPointer arrayPath, final int arrayIndex) {
|
||||
if (arrayIndex == -1) {
|
||||
return arrayIndex;
|
||||
}
|
||||
/*
|
||||
* adjust secondary index for operations on arrays with matching
|
||||
* deferred array removes: for each deferred remove, increment the index
|
||||
* assuming remove will not be done until the move diff is performed
|
||||
*/
|
||||
int arrayRemoves = 0;
|
||||
for (final Diff removal : seenAfter) {
|
||||
if (arrayPath.equals(removal.arrayPath)
|
||||
&& arrayIndex >= removal.secondArrayIndex) {
|
||||
arrayRemoves++;
|
||||
}
|
||||
}
|
||||
return arrayIndex + arrayRemoves;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
package org.xbib.content.json.diff;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.xbib.content.json.jackson.JacksonUtils;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
/**
|
||||
* Difference operation types. Add, remove, and replace operations
|
||||
* are directly generated by node comparison. Move operations are
|
||||
* the result of factorized add and remove operations.
|
||||
*/
|
||||
enum DiffOperation {
|
||||
ADD("add"),
|
||||
REMOVE("remove"),
|
||||
REPLACE("replace"),
|
||||
MOVE("move"),
|
||||
COPY("copy");
|
||||
|
||||
private final String opName;
|
||||
|
||||
DiffOperation(final String opName) {
|
||||
this.opName = opName;
|
||||
}
|
||||
|
||||
ObjectNode newOp(final JsonPointer ptr) {
|
||||
final ObjectNode ret = JacksonUtils.nodeFactory().objectNode();
|
||||
ret.put("op", opName);
|
||||
ret.put("path", ptr.toString());
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return opName;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,46 @@
|
|||
package org.xbib.content.json.diff;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import org.xbib.content.json.jackson.JacksonUtils;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
final class IndexedJsonArray {
|
||||
private final int size;
|
||||
private final JsonNode node;
|
||||
|
||||
private int index = 0;
|
||||
|
||||
IndexedJsonArray(final JsonNode node) {
|
||||
this.node = node;
|
||||
size = node.size();
|
||||
}
|
||||
|
||||
IndexedJsonArray(final List<JsonNode> list) {
|
||||
final ArrayNode arrayNode = JacksonUtils.nodeFactory().arrayNode();
|
||||
arrayNode.addAll(list);
|
||||
node = arrayNode;
|
||||
size = arrayNode.size();
|
||||
}
|
||||
|
||||
int getIndex() {
|
||||
return isEmpty() ? -1 : index;
|
||||
}
|
||||
|
||||
void shift() {
|
||||
index++;
|
||||
}
|
||||
|
||||
JsonNode getElement() {
|
||||
return node.get(index);
|
||||
}
|
||||
|
||||
boolean isEmpty() {
|
||||
return index >= size;
|
||||
}
|
||||
|
||||
int size() {
|
||||
return size;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,401 @@
|
|||
package org.xbib.content.json.diff;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
||||
import org.xbib.content.json.jackson.Equivalence;
|
||||
import org.xbib.content.json.jackson.JacksonUtils;
|
||||
import org.xbib.content.json.jackson.JsonNumEquals;
|
||||
import org.xbib.content.json.jackson.NodeType;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* "Reverse" factorizing JSON Patch implementation.
|
||||
*
|
||||
* This class only has one method, {@link #asJson(com.fasterxml.jackson.databind.JsonNode,
|
||||
* com.fasterxml.jackson.databind.JsonNode)}, which
|
||||
* takes two JSON values as arguments and returns a patch as a {@link com.fasterxml.jackson.databind.JsonNode}.
|
||||
* This generated patch can then be used in {@link
|
||||
* org.xbib.content.json.patch.JsonPatch#fromJson(com.fasterxml.jackson.databind.JsonNode)}.
|
||||
*
|
||||
* Numeric equivalence is respected. Operations are always generated in the
|
||||
* following order:
|
||||
*
|
||||
* <ul>
|
||||
* <li>additions,</li>
|
||||
* <li>removals,</li>
|
||||
* <li>replacements.</li>
|
||||
* </ul>
|
||||
*
|
||||
* Array values generate operations in the order of elements. Factorizing is
|
||||
* done to merge add and remove into move operations and convert duplicate add
|
||||
* to copy operations if values are equivalent. No test operations are
|
||||
* generated (they don't really make sense for diffs anyway).
|
||||
*
|
||||
* Note that due to the way {@link com.fasterxml.jackson.databind.JsonNode} is implemented, this class is
|
||||
* inherently not thread safe (since {@code JsonNode} is mutable). It is
|
||||
* therefore the responsibility of the caller to ensure that the calling context
|
||||
* is safe (by ensuring, for instance, that only the diff operation has
|
||||
* references to the values to be diff'ed).
|
||||
*/
|
||||
public final class JsonDiff {
|
||||
private static final JsonNodeFactory FACTORY = JacksonUtils.nodeFactory();
|
||||
|
||||
private static final Equivalence<JsonNode> EQUIVALENCE
|
||||
= JsonNumEquals.getInstance();
|
||||
|
||||
private JsonDiff() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a JSON patch for transforming the source node into the target node.
|
||||
*
|
||||
* @param source the node to be patched
|
||||
* @param target the expected result after applying the patch
|
||||
* @return the patch as a {@link com.fasterxml.jackson.databind.JsonNode}
|
||||
*/
|
||||
public static JsonNode asJson(final JsonNode source, final JsonNode target) {
|
||||
final List<Diff> diffs = new ArrayList<>();
|
||||
generateDiffs(diffs, JsonPointer.empty(), source, target);
|
||||
DiffFactorizer.factorizeDiffs(diffs);
|
||||
final ArrayNode patch = FACTORY.arrayNode();
|
||||
for (final Diff diff : diffs) {
|
||||
patch.add(diff.asJsonPatch());
|
||||
}
|
||||
return patch;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate differences between source and target node.
|
||||
*
|
||||
* @param diffs list of differences (in order)
|
||||
* @param path parent path for both nodes
|
||||
* @param source source node
|
||||
* @param target target node
|
||||
*/
|
||||
private static void generateDiffs(final List<Diff> diffs,
|
||||
final JsonPointer path, final JsonNode source, final JsonNode target) {
|
||||
/*
|
||||
* If both nodes are equivalent, there is nothing to do
|
||||
*/
|
||||
if (EQUIVALENCE.equivalent(source, target)) {
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get both node types. We shortcut to a simple replace operation in the
|
||||
* following scenarios:
|
||||
*
|
||||
* - nodes are not the same type; or
|
||||
* - they are the same type, but are not containers (ie, they are
|
||||
* neither objects nor arrays).
|
||||
*/
|
||||
final NodeType sourceType = NodeType.getNodeType(source);
|
||||
final NodeType targetType = NodeType.getNodeType(target);
|
||||
if (sourceType != targetType || !source.isContainerNode()) {
|
||||
diffs.add(Diff.simpleDiff(DiffOperation.REPLACE, path, target));
|
||||
return;
|
||||
}
|
||||
if (sourceType == NodeType.OBJECT) {
|
||||
generateObjectDiffs(diffs, path, source, target);
|
||||
} else {
|
||||
generateArrayDiffs(diffs, path, source, target);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate differences between two object nodes.
|
||||
*
|
||||
* Differences are generated in the following order: added members,
|
||||
* removed members, modified members.
|
||||
*
|
||||
* @param diffs list of differences (modified)
|
||||
* @param path parent path common to both nodes
|
||||
* @param source node to patch
|
||||
* @param target node to attain
|
||||
*/
|
||||
private static void generateObjectDiffs(final List<Diff> diffs,
|
||||
final JsonPointer path, final JsonNode source, final JsonNode target) {
|
||||
final List<String> inFirst = new ArrayList<>();
|
||||
Iterator<String> it = source.fieldNames();
|
||||
while (it.hasNext()) {
|
||||
inFirst.add(it.next());
|
||||
}
|
||||
final List<String> inSecond = new ArrayList<>();
|
||||
it = target.fieldNames();
|
||||
while (it.hasNext()) {
|
||||
inSecond.add(it.next());
|
||||
}
|
||||
List<String> fields;
|
||||
fields = new ArrayList<>(inSecond);
|
||||
fields.removeAll(inFirst);
|
||||
for (final String s : fields) {
|
||||
diffs.add(Diff.simpleDiff(DiffOperation.ADD, path.append(s), target.get(s)));
|
||||
}
|
||||
fields = new ArrayList<>(inFirst);
|
||||
fields.removeAll(inSecond);
|
||||
for (final String s : fields) {
|
||||
diffs.add(Diff.simpleDiff(DiffOperation.REMOVE, path.append(s), source.get(s)));
|
||||
}
|
||||
fields = new ArrayList<>(inFirst);
|
||||
fields.retainAll(inSecond);
|
||||
for (final String s : fields) {
|
||||
generateDiffs(diffs, path.append(s), source.get(s), target.get(s));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate differences between two array nodes.
|
||||
* Differences are generated in order by comparing elements against the
|
||||
* longest common subsequence of elements in both arrays.
|
||||
*
|
||||
* @param diffs list of differences (modified)
|
||||
* @param path parent pointer of both array nodes
|
||||
* @param source array node to be patched
|
||||
* @param target target node after patching
|
||||
* @see LeastCommonSubsequence#getLCS(com.fasterxml.jackson.databind.JsonNode, com.fasterxml.jackson.databind.JsonNode)
|
||||
*/
|
||||
private static void generateArrayDiffs(final List<Diff> diffs,
|
||||
final JsonPointer path, final JsonNode source, final JsonNode target) {
|
||||
// compare array elements linearly using longest common subsequence
|
||||
// algorithm applied to the array elements
|
||||
final IndexedJsonArray src = new IndexedJsonArray(source);
|
||||
final IndexedJsonArray dst = new IndexedJsonArray(target);
|
||||
final IndexedJsonArray lcs = LeastCommonSubsequence.doLCS(source, target);
|
||||
|
||||
preLCS(diffs, path, lcs, src, dst);
|
||||
inLCS(diffs, path, lcs, src, dst);
|
||||
postLCS(diffs, path, src, dst);
|
||||
}
|
||||
|
||||
/*
|
||||
* First method entered when computing array diffs. It will exit early if
|
||||
* the LCS is empty.
|
||||
*
|
||||
* If the LCS is not empty, it means that both the source and target arrays
|
||||
* have at least one element left. In such a situation, this method will run
|
||||
* until elements extracted from both arrays are equivalent to the first
|
||||
* element of the LCS.
|
||||
*/
|
||||
private static void preLCS(final List<Diff> diffs, final JsonPointer path,
|
||||
final IndexedJsonArray lcs, final IndexedJsonArray source,
|
||||
final IndexedJsonArray target) {
|
||||
if (lcs.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
/*
|
||||
* This is our sentinel: if nodes from both the first array and the
|
||||
* second array are equivalent to this node, we are done.
|
||||
*/
|
||||
final JsonNode sentinel = lcs.getElement();
|
||||
|
||||
/*
|
||||
* Those two variables hold nodes for the first and second array in the
|
||||
* main loop.
|
||||
*/
|
||||
JsonNode srcNode;
|
||||
JsonNode dstNode;
|
||||
|
||||
/*
|
||||
* This records the number of equivalences between the LCS node and
|
||||
* nodes from the source and target arrays.
|
||||
*/
|
||||
int nrEquivalences;
|
||||
|
||||
while (true) {
|
||||
/*
|
||||
* At each step, we reset the number of equivalences to 0.
|
||||
*/
|
||||
nrEquivalences = 0;
|
||||
srcNode = source.getElement();
|
||||
dstNode = target.getElement();
|
||||
if (EQUIVALENCE.equivalent(sentinel, srcNode)) {
|
||||
nrEquivalences++;
|
||||
}
|
||||
if (EQUIVALENCE.equivalent(sentinel, dstNode)) {
|
||||
nrEquivalences++;
|
||||
}
|
||||
/*
|
||||
* If both srcNode and dstNode are equivalent to our sentinel, we
|
||||
* are done; this is our exit condition.
|
||||
*/
|
||||
if (nrEquivalences == 2) {
|
||||
return;
|
||||
}
|
||||
/*
|
||||
* If none of them are equivalent to the LCS node, compute diffs
|
||||
* in first array so that the element in this array's index be
|
||||
* transformed into the matching element in the second array; then
|
||||
* restart the loop.
|
||||
*
|
||||
* Note that since we are using an LCS, and no element of either
|
||||
* array is equivalent to the first element of the LCS (our
|
||||
* sentinel), a consequence is that indices in both arrays are
|
||||
* equal. In the path below, we could have equally used the index
|
||||
* from the target array.
|
||||
*/
|
||||
if (nrEquivalences == 0) {
|
||||
generateDiffs(diffs, path.append(source.getIndex()), srcNode,
|
||||
dstNode);
|
||||
source.shift();
|
||||
target.shift();
|
||||
continue;
|
||||
}
|
||||
/*
|
||||
* If we reach this point, one array has to catch up in order to
|
||||
* reach the first element of the LCS. The logic is as follows:
|
||||
*
|
||||
* <ul>
|
||||
* <li>if the source array has to catch up, it means its elements have
|
||||
* been removed from the target array;</li>
|
||||
* <li>if the target array has to catch up, it means the source
|
||||
* array's elements are being inserted into the target array.</li>
|
||||
* </ul>
|
||||
*/
|
||||
if (!EQUIVALENCE.equivalent(sentinel, srcNode)) {
|
||||
diffs.add(Diff.arrayRemove(path, source, target));
|
||||
source.shift();
|
||||
} else {
|
||||
diffs.add(Diff.arrayInsert(path, source, target));
|
||||
target.shift();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This method is called after preLCS(). Its role is to deplete the LCS.
|
||||
*
|
||||
* One particularity of using LCS is that as long as the LCS is not empty,
|
||||
* we can be sure that there is at least one element left in both the source
|
||||
* and target array.
|
||||
*/
|
||||
private static void inLCS(final List<Diff> diffs, final JsonPointer path,
|
||||
final IndexedJsonArray lcs, final IndexedJsonArray source,
|
||||
final IndexedJsonArray target) {
|
||||
JsonNode sourceNode;
|
||||
JsonNode targetNode;
|
||||
JsonNode lcsNode;
|
||||
|
||||
boolean sourceMatch;
|
||||
boolean targetMatch;
|
||||
|
||||
while (!lcs.isEmpty()) {
|
||||
sourceNode = source.getElement();
|
||||
targetNode = target.getElement();
|
||||
lcsNode = lcs.getElement();
|
||||
sourceMatch = EQUIVALENCE.equivalent(sourceNode, lcsNode);
|
||||
targetMatch = EQUIVALENCE.equivalent(targetNode, lcsNode);
|
||||
|
||||
if (!sourceMatch) {
|
||||
/*
|
||||
* At this point, the first element of our source array has
|
||||
* failed to "reach" a matching element in the target array.
|
||||
*
|
||||
* Such an element therefore needs to be removed from the target
|
||||
* array. We therefore generate a "remove event", shift the
|
||||
* source array and restart the loop.
|
||||
*/
|
||||
diffs.add(Diff.arrayRemove(path, source, target));
|
||||
source.shift();
|
||||
continue;
|
||||
}
|
||||
/*
|
||||
* When we reach this point, we know that the element extracted
|
||||
* from the source array is equivalent to the LCS element.
|
||||
*
|
||||
* Note that from this point on, whatever the target element is, we
|
||||
* need to shift our target array; there are two different scenarios
|
||||
* we must account for:
|
||||
*
|
||||
* <ul>
|
||||
* <li>if the target element is equivalent to the LCS element, we have
|
||||
* a common subsequence element (remember that the source element
|
||||
* is also equivalent to this same LCS element at this point); no
|
||||
* mutation of the target array takes place; we must therefore
|
||||
* shift all three arrays (source, target, LCS);</li>
|
||||
* <li>otherwise (target element is not equivalent to the LCS
|
||||
* element), we need to emit an insertion event of the target
|
||||
* element, and advance the target array only.</li>
|
||||
* </ul>
|
||||
*/
|
||||
if (targetMatch) {
|
||||
source.shift();
|
||||
lcs.shift();
|
||||
} else {
|
||||
diffs.add(Diff.arrayInsert(path, source, target));
|
||||
}
|
||||
/*
|
||||
* Shift/advance the target array; always performed, see above
|
||||
*/
|
||||
target.shift();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This function is run once the LCS has been exhausted.
|
||||
*
|
||||
* Since the LCS has been exhausted, it means that for whatever nodes node1
|
||||
* and node2 extracted from source and target, they can never be equal.
|
||||
*
|
||||
* The algorithm is therefore as follows:
|
||||
*
|
||||
* <ul>
|
||||
* <li>as long as both are not empty, grab both elements from both arrays and
|
||||
* generate diff operations on them recursively;</li>
|
||||
* <li>when we are out of this loop, add any elements remaining in the second
|
||||
* array (if any), and remove any elements remaining in the first array
|
||||
* (if any).</li>
|
||||
* </ul>
|
||||
*
|
||||
* Note that at the second step, only one of the two input arrays will ever
|
||||
* have any elements left; it is therefore safe to call the appropriate
|
||||
* functions for _both_ possibilities since only one will ever produce any
|
||||
* results.
|
||||
*/
|
||||
private static void postLCS(final List<Diff> diffs, final JsonPointer path,
|
||||
final IndexedJsonArray source, final IndexedJsonArray target) {
|
||||
JsonNode src, dst;
|
||||
|
||||
while (!(source.isEmpty() || target.isEmpty())) {
|
||||
src = source.getElement();
|
||||
dst = target.getElement();
|
||||
generateDiffs(diffs, path.append(source.getIndex()), src, dst);
|
||||
source.shift();
|
||||
target.shift();
|
||||
}
|
||||
addRemaining(diffs, path, target);
|
||||
removeRemaining(diffs, path, source, target.size());
|
||||
}
|
||||
|
||||
private static void addRemaining(final List<Diff> diffs,
|
||||
final JsonPointer path, final IndexedJsonArray array) {
|
||||
Diff diff;
|
||||
JsonNode node;
|
||||
|
||||
while (!array.isEmpty()) {
|
||||
node = array.getElement().deepCopy();
|
||||
diff = Diff.arrayAdd(path, node);
|
||||
diffs.add(diff);
|
||||
array.shift();
|
||||
}
|
||||
}
|
||||
|
||||
private static void removeRemaining(final List<Diff> diffs,
|
||||
final JsonPointer path, final IndexedJsonArray array,
|
||||
final int removeIndex) {
|
||||
Diff diff;
|
||||
JsonNode node;
|
||||
|
||||
while (!array.isEmpty()) {
|
||||
node = array.getElement();
|
||||
diff = Diff.tailArrayRemove(path, array.getIndex(),
|
||||
removeIndex, node);
|
||||
diffs.add(diff);
|
||||
array.shift();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,167 @@
|
|||
package org.xbib.content.json.diff;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.xbib.content.json.jackson.Equivalence;
|
||||
import org.xbib.content.json.jackson.JsonNumEquals;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Longest common subsequence algorithm implementation
|
||||
* <p>
|
||||
* <p>This is an adaptation of the code found at <a
|
||||
* href="http://rosettacode.org/wiki/Longest_common_subsequence#Dynamic_Programming_2">Rosetta
|
||||
* Code</a> for {@link com.fasterxml.jackson.databind.node.ArrayNode} instances.</p>
|
||||
* <p>
|
||||
* <p>For instance, given these two arrays:</p>
|
||||
* <p>
|
||||
* <ul>
|
||||
* <li>{@code [ 1, 2, 3, 4, 5, 6, 7, 8, 9 ]},</li>
|
||||
* <li>{@code [ 1, 2, 10, 11, 5, 12, 8, 9 ]}</li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* <p>this code will return {@code [ 1, 2, 5, 8, 9 ]}.</p>
|
||||
*/
|
||||
final class LeastCommonSubsequence {
|
||||
|
||||
private static final Equivalence<JsonNode> EQUIVALENCE = JsonNumEquals.getInstance();
|
||||
|
||||
private LeastCommonSubsequence() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the longest common subsequence of elements of two array nodes
|
||||
* <p>
|
||||
* <p>This is an implementation of the classic 'diff' algorithm often used
|
||||
* to compare text files line by line.</p>
|
||||
*
|
||||
* @param first first array node to compare
|
||||
* @param second second array node to compare
|
||||
*/
|
||||
static List<JsonNode> getLCS(final JsonNode first, final JsonNode second) {
|
||||
final int minSize = Math.min(first.size(), second.size());
|
||||
|
||||
List<JsonNode> l1 = Arrays.asList(first);
|
||||
List<JsonNode> l2 = Arrays.asList(second);
|
||||
|
||||
final List<JsonNode> ret = head(l1, l2);
|
||||
final int headSize = ret.size();
|
||||
|
||||
l1 = l1.subList(headSize, l1.size());
|
||||
l2 = l2.subList(headSize, l2.size());
|
||||
|
||||
final List<JsonNode> tail = tail(l1, l2);
|
||||
final int trim = tail.size();
|
||||
|
||||
l1 = l1.subList(0, l1.size() - trim);
|
||||
l2 = l2.subList(0, l2.size() - trim);
|
||||
|
||||
if (headSize < minSize) {
|
||||
ret.addAll(doLCS(l1, l2));
|
||||
}
|
||||
ret.addAll(tail);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static IndexedJsonArray doLCS(final JsonNode first, final JsonNode second) {
|
||||
return new IndexedJsonArray(getLCS(first, second));
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute longest common subsequence out of two lists
|
||||
* <p>
|
||||
* <p>When entering this function, both lists are trimmed from their
|
||||
* common leading and trailing nodes.</p>
|
||||
*
|
||||
* @param l1 the first list
|
||||
* @param l2 the second list
|
||||
* @return the longest common subsequence
|
||||
*/
|
||||
private static List<JsonNode> doLCS(final List<JsonNode> l1,
|
||||
final List<JsonNode> l2) {
|
||||
final List<JsonNode> lcs = new ArrayList<>();
|
||||
// construct LCS lengths matrix
|
||||
final int size1 = l1.size();
|
||||
final int size2 = l2.size();
|
||||
final int[][] lengths = new int[size1 + 1][size2 + 1];
|
||||
|
||||
JsonNode node1;
|
||||
JsonNode node2;
|
||||
int len;
|
||||
|
||||
for (int i = 0; i < size1; i++) {
|
||||
for (int j = 0; j < size2; j++) {
|
||||
node1 = l1.get(i);
|
||||
node2 = l2.get(j);
|
||||
len = EQUIVALENCE.equivalent(node1, node2) ? lengths[i][j] + 1
|
||||
: Math.max(lengths[i + 1][j], lengths[i][j + 1]);
|
||||
lengths[i + 1][j + 1] = len;
|
||||
}
|
||||
}
|
||||
|
||||
// return result out of the LCS lengths matrix
|
||||
int x = size1, y = size2;
|
||||
while (x > 0 && y > 0) {
|
||||
if (lengths[x][y] == lengths[x - 1][y]) {
|
||||
x--;
|
||||
} else if (lengths[x][y] == lengths[x][y - 1]) {
|
||||
y--;
|
||||
} else {
|
||||
lcs.add(l1.get(x - 1));
|
||||
x--;
|
||||
y--;
|
||||
}
|
||||
}
|
||||
Collections.reverse(lcs);
|
||||
return lcs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list with common head elements of two lists
|
||||
* <p>
|
||||
* <p>Note that the arguments are NOT altered.</p>
|
||||
*
|
||||
* @param l1 first list
|
||||
* @param l2 second list
|
||||
* @return a list of common head elements
|
||||
*/
|
||||
private static List<JsonNode> head(final List<JsonNode> l1,
|
||||
final List<JsonNode> l2) {
|
||||
final List<JsonNode> ret = new ArrayList<>();
|
||||
final int len = Math.min(l1.size(), l2.size());
|
||||
|
||||
JsonNode node;
|
||||
|
||||
for (int index = 0; index < len; index++) {
|
||||
node = l1.get(index);
|
||||
if (!EQUIVALENCE.equivalent(node, l2.get(index))) {
|
||||
break;
|
||||
}
|
||||
ret.add(node);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the list of common tail elements of two lists
|
||||
* <p>
|
||||
* <p>Note that the arguments are NOT altered. Elements are returned in
|
||||
* their order of appearance.</p>
|
||||
*
|
||||
* @param l1 first list
|
||||
* @param l2 second list
|
||||
* @return a list of common tail elements
|
||||
*/
|
||||
private static List<JsonNode> tail(final List<JsonNode> l1,
|
||||
final List<JsonNode> l2) {
|
||||
Collections.reverse(l1);
|
||||
Collections.reverse(l2);
|
||||
final List<JsonNode> l = head(l1, l2);
|
||||
Collections.reverse(l);
|
||||
return l;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for JSON diff operation.
|
||||
*/
|
||||
package org.xbib.content.json.diff;
|
|
@ -0,0 +1,45 @@
|
|||
package org.xbib.content.json.jackson;
|
||||
|
||||
/**
|
||||
* A strategy for determining whether two instances are considered equivalent.
|
||||
* @param <T> type parameter
|
||||
*/
|
||||
public interface Equivalence<T> {
|
||||
/**
|
||||
* Returns {@code true} if the given objects are considered equivalent.
|
||||
*
|
||||
* The <code>equivalent</code> method implements an equivalence relation on non-null object
|
||||
* references:
|
||||
* <ul>
|
||||
* <li>It is <i>reflexive</i>: for any non-null reference value {@code x}, {@code x.equals(x)}
|
||||
* should return {@code true}.
|
||||
* <li>It is <i>symmetric</i>: for any non-null reference values {@code x} and {@code y}, {@code
|
||||
* x.equals(y)} should return {@code true} if and only if {@code y.equals(x)} returns {@code
|
||||
* true}.
|
||||
* <li>It is <i>transitive</i>: for any non-null reference values {@code x}, {@code y}, and {@code
|
||||
* z}, if {@code x.equals(y)} returns {@code true} and {@code y.equals(z)} returns {@code
|
||||
* true}, then {@code x.equals(z)} should return {@code true}.
|
||||
* <li>It is <i>consistent</i>: for any non-null reference values {@code x} and {@code y},
|
||||
* multiple invocations of {@code x.equals(y)} consistently return {@code true} or
|
||||
* consistently return {@code false}, provided no information used in {@code equals}
|
||||
* comparisons on the objects is modified.
|
||||
* <li>For any non-null reference value {@code x}, {@code x.equals(null)} should return {@code
|
||||
* false}.
|
||||
* </ul>
|
||||
* @param a a
|
||||
* @param b b
|
||||
* @return true if a and b are equivalent
|
||||
*/
|
||||
boolean equivalent(T a, T b);
|
||||
|
||||
/**
|
||||
* Returns a hash code for {@code object}. This function must return the same value for
|
||||
* any two instances which are {@link #equivalent}, and should as often as possible return a
|
||||
* distinct value for instances which are not equivalent.
|
||||
*
|
||||
* @param t the object of type t
|
||||
* @return hash code
|
||||
* @see Object#hashCode the same contractual obligations apply here
|
||||
*/
|
||||
int hash(T t);
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
package org.xbib.content.json.jackson;
|
||||
|
||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.ObjectReader;
|
||||
import com.fasterxml.jackson.databind.ObjectWriter;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
* Utility class for Jackson.
|
||||
*
|
||||
* This class provides utility methods to get a {@link com.fasterxml.jackson.databind.node.JsonNodeFactory} and
|
||||
* a preconfigured {@link com.fasterxml.jackson.databind.ObjectReader}. It can also be used to return
|
||||
* preconfigured instances of {@link com.fasterxml.jackson.databind.ObjectMapper} (see {@link #newMapper()}.
|
||||
*
|
||||
*/
|
||||
public final class JacksonUtils {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(JacksonUtils.class.getName());
|
||||
|
||||
private static final JsonNodeFactory FACTORY = JsonNodeFactory.instance;
|
||||
|
||||
private static final ObjectReader READER;
|
||||
private static final ObjectWriter WRITER;
|
||||
|
||||
static {
|
||||
final ObjectMapper mapper = newMapper();
|
||||
READER = mapper.reader();
|
||||
WRITER = mapper.writer();
|
||||
}
|
||||
|
||||
private JacksonUtils() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a preconfigured {@link com.fasterxml.jackson.databind.ObjectReader} to read JSON inputs.
|
||||
*
|
||||
* @return the reader
|
||||
* @see #newMapper()
|
||||
*/
|
||||
public static ObjectReader getReader() {
|
||||
return READER;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a preconfigured {@link com.fasterxml.jackson.databind.node.JsonNodeFactory} to generate JSON data as
|
||||
* {@link com.fasterxml.jackson.databind.JsonNode}s.
|
||||
*
|
||||
* @return the factory
|
||||
*/
|
||||
public static JsonNodeFactory nodeFactory() {
|
||||
return FACTORY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a map out of an object's members.
|
||||
* If the node given as an argument is not a map, an empty map is
|
||||
* returned.
|
||||
*
|
||||
* @param node the node
|
||||
* @return a map
|
||||
*/
|
||||
public static Map<String, JsonNode> asMap(final JsonNode node) {
|
||||
if (!node.isObject()) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
final Iterator<Map.Entry<String, JsonNode>> iterator = node.fields();
|
||||
final Map<String, JsonNode> ret = new HashMap<>();
|
||||
Map.Entry<String, JsonNode> entry;
|
||||
while (iterator.hasNext()) {
|
||||
entry = iterator.next();
|
||||
ret.put(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pretty print a JSON value.
|
||||
*
|
||||
* @param node the JSON value to print
|
||||
* @return the pretty printed value as a string
|
||||
* @see #newMapper()
|
||||
*/
|
||||
public static String prettyPrint(final JsonNode node) {
|
||||
final StringWriter writer = new StringWriter();
|
||||
|
||||
try {
|
||||
WRITER.writeValue(writer, node);
|
||||
writer.flush();
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.FINE, e.getMessage(), e);
|
||||
}
|
||||
return writer.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a preconfigured {@link com.fasterxml.jackson.databind.ObjectMapper}.
|
||||
* The returned mapper will have the following features enabled:
|
||||
* <ul>
|
||||
* <li>{@link com.fasterxml.jackson.databind.DeserializationFeature#USE_BIG_DECIMAL_FOR_FLOATS};</li>
|
||||
* <li>{@link com.fasterxml.jackson.databind.SerializationFeature#INDENT_OUTPUT}.</li>
|
||||
* </ul>
|
||||
* This returns a new instance each time.
|
||||
*
|
||||
* @return an {@link com.fasterxml.jackson.databind.ObjectMapper}
|
||||
*/
|
||||
public static ObjectMapper newMapper() {
|
||||
return new ObjectMapper().setNodeFactory(FACTORY)
|
||||
.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS)
|
||||
.enable(SerializationFeature.INDENT_OUTPUT);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
package org.xbib.content.json.jackson;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Reader;
|
||||
import java.io.StringReader;
|
||||
import java.net.URL;
|
||||
|
||||
/**
|
||||
* Utility class to load JSON values from various sources as {@link com.fasterxml.jackson.databind.JsonNode}s.
|
||||
*
|
||||
* This class uses a {@link JsonNodeReader} to parse JSON inputs.
|
||||
*
|
||||
* @see JsonNodeReader
|
||||
*/
|
||||
public final class JsonLoader {
|
||||
/**
|
||||
* The reader.
|
||||
*/
|
||||
private static final JsonNodeReader READER = new JsonNodeReader();
|
||||
|
||||
private JsonLoader() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a {@link com.fasterxml.jackson.databind.JsonNode} from a resource path.
|
||||
*
|
||||
* This method first tries and loads the resource using {@link
|
||||
* Class#getResource(String)}; if not found, is tries and uses the context
|
||||
* classloader and if this is not found, this class's classloader.
|
||||
*
|
||||
* This method throws an {@link java.io.IOException} if the resource does not
|
||||
* exist.
|
||||
*
|
||||
* @param classLoader the class loader
|
||||
* @param resource the path to the resource (must begin
|
||||
* with a {@code /})
|
||||
* @return the JSON document at the resource
|
||||
* @throws IllegalArgumentException resource path does not begin with a
|
||||
* {@code /}
|
||||
* @throws java.io.IOException there was a problem loading the resource, or the JSON
|
||||
* document is invalid
|
||||
*/
|
||||
public static JsonNode fromResource(ClassLoader classLoader, final String resource)
|
||||
throws IOException {
|
||||
URL url = JsonLoader.class.getResource(resource);
|
||||
InputStream in = url != null ? url.openStream() : classLoader.getResourceAsStream(resource);
|
||||
final JsonNode ret;
|
||||
try {
|
||||
ret = READER.fromInputStream(in);
|
||||
} finally {
|
||||
if (in != null) {
|
||||
in.close();
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a {@link com.fasterxml.jackson.databind.JsonNode} from a user supplied {@link java.io.Reader}.
|
||||
*
|
||||
* @param reader The reader
|
||||
* @return the document
|
||||
* @throws java.io.IOException if the reader has problems
|
||||
*/
|
||||
public static JsonNode fromReader(final Reader reader)
|
||||
throws IOException {
|
||||
return READER.fromReader(reader);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a {@link com.fasterxml.jackson.databind.JsonNode} from a string input.
|
||||
*
|
||||
* @param json the JSON as a string
|
||||
* @return the document
|
||||
* @throws java.io.IOException could not read from string
|
||||
*/
|
||||
public static JsonNode fromString(final String json)
|
||||
throws IOException {
|
||||
return fromReader(new StringReader(json));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,142 @@
|
|||
package org.xbib.content.json.jackson;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonLocation;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.MappingIterator;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.ObjectReader;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.Reader;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
* Class dedicated to reading JSON values from {@link java.io.InputStream}s and {@link
|
||||
* java.io.Reader}s.
|
||||
* This class wraps a Jackson {@link com.fasterxml.jackson.databind.ObjectMapper} so that it read one, and
|
||||
* only one, JSON text from a source. By default, when you read and map an
|
||||
* input source, Jackson will stop after it has read the first valid JSON text;
|
||||
* this means, for instance, that with this as an input:
|
||||
* <pre>
|
||||
* []]]
|
||||
* </pre>
|
||||
* it will read the initial empty array ({@code []}) and stop there. This
|
||||
* class, instead, will peek to see whether anything is after the initial array,
|
||||
* and throw an exception if it finds anything.
|
||||
* Note: the input sources are closed by the read methods.
|
||||
*
|
||||
* @see com.fasterxml.jackson.databind.ObjectMapper#readValues(com.fasterxml.jackson.core.JsonParser, Class)
|
||||
*/
|
||||
public final class JsonNodeReader {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(JsonNodeReader.class.getName());
|
||||
|
||||
private final ObjectReader reader;
|
||||
|
||||
public JsonNodeReader(final ObjectMapper mapper) {
|
||||
reader = mapper.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, true)
|
||||
.readerFor(JsonNode.class);
|
||||
}
|
||||
|
||||
public JsonNodeReader() {
|
||||
this(JacksonUtils.newMapper());
|
||||
}
|
||||
|
||||
private static JsonNode readNode(final MappingIterator<JsonNode> iterator) throws IOException {
|
||||
final Object source = iterator.getParser().getInputSource();
|
||||
final JsonParseExceptionBuilder builder = new JsonParseExceptionBuilder(null, source);
|
||||
if (!iterator.hasNextValue()) {
|
||||
throw builder.build();
|
||||
}
|
||||
final JsonNode ret = iterator.nextValue();
|
||||
builder.setLocation(iterator.getCurrentLocation());
|
||||
try {
|
||||
if (iterator.hasNextValue()) {
|
||||
throw builder.build();
|
||||
}
|
||||
} catch (JsonParseException e) {
|
||||
logger.log(Level.FINE, e.getMessage(), e);
|
||||
throw builder.setLocation(e.getLocation()).build();
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a JSON value from an {@link java.io.InputStream}.
|
||||
*
|
||||
* @param in the input stream
|
||||
* @return the value
|
||||
* @throws java.io.IOException malformed input, or problem encountered when reading
|
||||
* from the stream
|
||||
*/
|
||||
public JsonNode fromInputStream(final InputStream in) throws IOException {
|
||||
JsonParser parser = null;
|
||||
MappingIterator<JsonNode> iterator = null;
|
||||
|
||||
try {
|
||||
parser = reader.getFactory().createParser(in);
|
||||
iterator = reader.readValues(parser);
|
||||
return readNode(iterator);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
if (iterator != null) {
|
||||
iterator.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read a JSON value from a {@link java.io.Reader}.
|
||||
*
|
||||
* @param r the reader
|
||||
* @return the value
|
||||
* @throws java.io.IOException malformed input, or problem encountered when reading
|
||||
* from the reader
|
||||
*/
|
||||
public JsonNode fromReader(final Reader r)
|
||||
throws IOException {
|
||||
JsonParser parser = null;
|
||||
MappingIterator<JsonNode> iterator = null;
|
||||
|
||||
try {
|
||||
parser = reader.getFactory().createParser(r);
|
||||
iterator = reader.readValues(parser);
|
||||
return readNode(iterator);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
if (iterator != null) {
|
||||
iterator.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
private static final class JsonParseExceptionBuilder {
|
||||
private JsonParser jsonParser;
|
||||
private JsonLocation location;
|
||||
|
||||
private JsonParseExceptionBuilder(final JsonParser jsonParser, final Object source) {
|
||||
this.jsonParser = jsonParser;
|
||||
location = new JsonLocation(source, 0L, 1, 1);
|
||||
}
|
||||
|
||||
private JsonParseExceptionBuilder setLocation(final JsonLocation location) {
|
||||
this.location = location;
|
||||
return this;
|
||||
}
|
||||
|
||||
public JsonParseException build() {
|
||||
return new JsonParseException(jsonParser, "", location);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,193 @@
|
|||
package org.xbib.content.json.jackson;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* An equivalence strategy for JSON Schema equality
|
||||
* {@link com.fasterxml.jackson.databind.JsonNode} does a pretty good job of obeying the {@link
|
||||
* Object#equals(Object) equals()}/{@link Object#hashCode() hashCode()}
|
||||
* contract. And in fact, it does it too well for JSON Schema.
|
||||
*
|
||||
* For instance, it considers numeric nodes {@code 1} and {@code 1.0} to be
|
||||
* different nodes, which is true. But some IETF RFCs and drafts (among them,
|
||||
* JSON Schema and JSON Patch) mandate that numeric JSON values be considered
|
||||
* equal if their mathematical value is the same. This class implements this
|
||||
* kind of equality.
|
||||
*/
|
||||
public final class JsonNumEquals implements Equivalence<JsonNode> {
|
||||
private static final Equivalence<JsonNode> INSTANCE = new JsonNumEquals();
|
||||
|
||||
private JsonNumEquals() {
|
||||
}
|
||||
|
||||
public static Equivalence<JsonNode> getInstance() {
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
private static boolean numEquals(final JsonNode a, final JsonNode b) {
|
||||
/*
|
||||
* If both numbers are integers, delegate to JsonNode.
|
||||
*/
|
||||
if (a.isIntegralNumber() && b.isIntegralNumber()) {
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
/*
|
||||
* Otherwise, compare decimal values.
|
||||
*/
|
||||
return a.decimalValue().compareTo(b.decimalValue()) == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equivalent(final JsonNode a, final JsonNode b) {
|
||||
/*
|
||||
* If both are numbers, delegate to the helper method
|
||||
*/
|
||||
if (a.isNumber() && b.isNumber()) {
|
||||
return numEquals(a, b);
|
||||
}
|
||||
|
||||
final NodeType typeA = NodeType.getNodeType(a);
|
||||
final NodeType typeB = NodeType.getNodeType(b);
|
||||
|
||||
/*
|
||||
* If they are of different types, no dice
|
||||
*/
|
||||
if (typeA != typeB) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* For all other primitive types than numbers, trust JsonNode
|
||||
*/
|
||||
if (!a.isContainerNode()) {
|
||||
return a.equals(b);
|
||||
}
|
||||
|
||||
/*
|
||||
* OK, so they are containers (either both arrays or objects due to the
|
||||
* test on types above). They are obviously not equal if they do not
|
||||
* have the same number of elements/members.
|
||||
*/
|
||||
if (a.size() != b.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* Delegate to the appropriate method according to their type.
|
||||
*/
|
||||
return typeA == NodeType.ARRAY ? arrayEquals(a, b) : objectEquals(a, b);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hash(final JsonNode t) {
|
||||
/*
|
||||
* If this is a numeric node, we want the same hashcode for the same
|
||||
* mathematical values. Go with double, its range is good enough for
|
||||
* 99+% of use cases.
|
||||
*/
|
||||
if (t.isNumber()) {
|
||||
return Double.valueOf(t.doubleValue()).hashCode();
|
||||
}
|
||||
|
||||
/*
|
||||
* If this is a primitive type (other than numbers, handled above),
|
||||
* delegate to JsonNode.
|
||||
*/
|
||||
if (!t.isContainerNode()) {
|
||||
return t.hashCode();
|
||||
}
|
||||
|
||||
/*
|
||||
* The following hash calculations work, yes, but they are poor at best.
|
||||
* And probably slow, too.
|
||||
*/
|
||||
int ret = 0;
|
||||
|
||||
/*
|
||||
* If the container is empty, just return
|
||||
*/
|
||||
if (t.size() == 0) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
/*
|
||||
* Array
|
||||
*/
|
||||
if (t.isArray()) {
|
||||
for (final JsonNode element : t) {
|
||||
ret = 31 * ret + hash(element);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
/*
|
||||
* Not an array? An object.
|
||||
*/
|
||||
final Iterator<Map.Entry<String, JsonNode>> iterator = t.fields();
|
||||
|
||||
Map.Entry<String, JsonNode> entry;
|
||||
|
||||
while (iterator.hasNext()) {
|
||||
entry = iterator.next();
|
||||
ret = 31 * ret + (entry.getKey().hashCode() ^ hash(entry.getValue()));
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
private boolean arrayEquals(final JsonNode a, final JsonNode b) {
|
||||
/*
|
||||
* We are guaranteed here that arrays are the same size.
|
||||
*/
|
||||
final int size = a.size();
|
||||
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (!equivalent(a.get(i), b.get(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean objectEquals(final JsonNode a, final JsonNode b) {
|
||||
/*
|
||||
* Grab the key set from the first node
|
||||
*/
|
||||
final Set<String> keys = new HashSet<>();
|
||||
Iterator<String> it = a.fieldNames();
|
||||
while (it.hasNext()) {
|
||||
keys.add(it.next());
|
||||
}
|
||||
|
||||
/*
|
||||
* Grab the key set from the second node, and see if both sets are the
|
||||
* same. If not, objects are not equal, no need to check for children.
|
||||
*/
|
||||
final Set<String> set = new HashSet<>();
|
||||
it = b.fieldNames();
|
||||
while (it.hasNext()) {
|
||||
set.add(it.next());
|
||||
}
|
||||
if (!set.equals(keys)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* Test each member individually.
|
||||
*/
|
||||
for (final String key : keys) {
|
||||
if (!equivalent(a.get(key), b.get(key))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,109 @@
|
|||
package org.xbib.content.json.jackson;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Enumeration for the different types of JSON instances which can be
|
||||
* encountered.
|
||||
* In addition to what the JSON RFC defines, JSON Schema has an {@code
|
||||
* integer} type, which is a numeric value without any fraction or exponent
|
||||
* part.
|
||||
*/
|
||||
public enum NodeType {
|
||||
/**
|
||||
* Array nodes.
|
||||
*/
|
||||
ARRAY("array"),
|
||||
/**
|
||||
* Boolean nodes.
|
||||
*/
|
||||
BOOLEAN("boolean"),
|
||||
/**
|
||||
* Integer nodes.
|
||||
*/
|
||||
INTEGER("integer"),
|
||||
/**
|
||||
* Number nodes (ie, decimal numbers).
|
||||
*/
|
||||
NULL("null"),
|
||||
/**
|
||||
* Object nodes.
|
||||
*/
|
||||
NUMBER("number"),
|
||||
/**
|
||||
* Null nodes.
|
||||
*/
|
||||
OBJECT("object"),
|
||||
/**
|
||||
* String nodes.
|
||||
*/
|
||||
STRING("string");
|
||||
|
||||
/**
|
||||
* Reverse map to find a node type out of this type's name.
|
||||
*/
|
||||
private static final Map<String, NodeType> NAME_MAP
|
||||
= new HashMap<String, NodeType>();
|
||||
/**
|
||||
* Mapping of {@link com.fasterxml.jackson.core.JsonToken} back to node types (used in {@link
|
||||
* #getNodeType(com.fasterxml.jackson.databind.JsonNode)}).
|
||||
*/
|
||||
private static final Map<JsonToken, NodeType> TOKEN_MAP
|
||||
= new EnumMap<JsonToken, NodeType>(JsonToken.class);
|
||||
|
||||
static {
|
||||
TOKEN_MAP.put(JsonToken.START_ARRAY, ARRAY);
|
||||
TOKEN_MAP.put(JsonToken.VALUE_TRUE, BOOLEAN);
|
||||
TOKEN_MAP.put(JsonToken.VALUE_FALSE, BOOLEAN);
|
||||
TOKEN_MAP.put(JsonToken.VALUE_NUMBER_INT, INTEGER);
|
||||
TOKEN_MAP.put(JsonToken.VALUE_NUMBER_FLOAT, NUMBER);
|
||||
TOKEN_MAP.put(JsonToken.VALUE_NULL, NULL);
|
||||
TOKEN_MAP.put(JsonToken.START_OBJECT, OBJECT);
|
||||
TOKEN_MAP.put(JsonToken.VALUE_STRING, STRING);
|
||||
|
||||
for (final NodeType type : NodeType.values()) {
|
||||
NAME_MAP.put(type.name, type);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The name for this type, as encountered in a JSON schema.
|
||||
*/
|
||||
private final String name;
|
||||
|
||||
NodeType(final String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a type name, return the corresponding node type.
|
||||
*
|
||||
* @param name the type name
|
||||
* @return the node type, or null if not found
|
||||
*/
|
||||
public static NodeType fromName(final String name) {
|
||||
return NAME_MAP.get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a {@link com.fasterxml.jackson.databind.JsonNode} as an argument, return its type. The argument
|
||||
* MUST NOT BE NULL, and MUST NOT be a {@link com.fasterxml.jackson.databind.node.MissingNode}.
|
||||
*
|
||||
* @param node the node to determine the type of
|
||||
* @return the type for this node
|
||||
*/
|
||||
public static NodeType getNodeType(final JsonNode node) {
|
||||
final JsonToken token = node.asToken();
|
||||
return TOKEN_MAP.get(token);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
package org.xbib.content.json.jackson;
|
||||
|
||||
/**
|
||||
*
|
||||
* @param <T> the type parameter
|
||||
*/
|
||||
public final class Wrapper<T> {
|
||||
|
||||
private final Equivalence<? super T> equivalence;
|
||||
private final T reference;
|
||||
|
||||
public Wrapper(Equivalence<? super T> equivalence, T reference) {
|
||||
this.equivalence = equivalence;
|
||||
this.reference = reference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the (possibly null) reference wrapped by this instance.
|
||||
* @return the reference
|
||||
*/
|
||||
public T get() {
|
||||
return reference;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if {@link Equivalence#equivalent(Object, Object)} applied to the wrapped
|
||||
* references is {@code true} and both wrappers use the {@link Object#equals(Object) same}
|
||||
* equivalence.
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
} else if (obj instanceof Wrapper) {
|
||||
Wrapper<?> that = (Wrapper<?>) obj;
|
||||
/*
|
||||
* We cast to Equivalence<Object> here because we can't check the type of the reference held
|
||||
* by the other wrapper. But, by checking that the Equivalences are equal, we know that
|
||||
* whatever type it is, it is assignable to the type handled by this wrapper's equivalence.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
Equivalence<Object> equivalence = (Equivalence<Object>) this.equivalence;
|
||||
return equivalence.equals(that.equivalence)
|
||||
&& equivalence.equivalent(this.reference, that.reference);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the result of {@link Equivalence#hash(Object)} applied to the the wrapped reference.
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return equivalence.hash(reference);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a string representation for this equivalence wrapper. The form of this string
|
||||
* representation is not specified.
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return equivalence + ".wrap(" + reference + ")";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for Jackson support.
|
||||
*/
|
||||
package org.xbib.content.json.jackson;
|
|
@ -0,0 +1,21 @@
|
|||
package org.xbib.content.json.mergepatch;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
final class ArrayMergePatch extends JsonMergePatch {
|
||||
|
||||
private final JsonNode content;
|
||||
|
||||
ArrayMergePatch(final JsonNode content) {
|
||||
super(content);
|
||||
this.content = clearNulls(content);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode apply(final JsonNode input) {
|
||||
return content;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
package org.xbib.content.json.mergepatch;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.JsonSerializable;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.xbib.content.json.jackson.JacksonUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Implementation of <a href="http://tools.ietf.org/html/draft-ietf-appsawg-json-merge-patch-02">JSON merge patch</a>.
|
||||
* Unlike JSON Patch, JSON Merge Patch only applies to JSON Objects or JSON
|
||||
* arrays.
|
||||
*/
|
||||
@JsonDeserialize(using = JsonMergePatchDeserializer.class)
|
||||
public abstract class JsonMergePatch implements JsonSerializable {
|
||||
|
||||
protected static final JsonNodeFactory FACTORY = JacksonUtils.nodeFactory();
|
||||
|
||||
protected final JsonNode origPatch;
|
||||
|
||||
/**
|
||||
* Protected constructor.
|
||||
*
|
||||
* Only necessary for serialization purposes. The patching process
|
||||
* itself never requires the full node to operate.
|
||||
*
|
||||
* @param node the original patch node
|
||||
*/
|
||||
protected JsonMergePatch(final JsonNode node) {
|
||||
origPatch = node;
|
||||
}
|
||||
|
||||
public static JsonMergePatch fromJson(final JsonNode input) {
|
||||
return input.isArray() ? new ArrayMergePatch(input)
|
||||
: new ObjectMergePatch(input);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear "null values" from a JSON value.
|
||||
* Non container values are unchanged. For arrays, null elements are
|
||||
* removed. From objects, members whose values are null are removed.
|
||||
* This method is recursive, therefore arrays within objects, or objects
|
||||
* within arrays, or arrays within arrays etc are also affected.
|
||||
*
|
||||
* @param node the original JSON value
|
||||
* @return a JSON value without null values (see description)
|
||||
*/
|
||||
protected static JsonNode clearNulls(final JsonNode node) {
|
||||
if (!node.isContainerNode()) {
|
||||
return node;
|
||||
}
|
||||
|
||||
return node.isArray() ? clearNullsFromArray(node)
|
||||
: clearNullsFromObject(node);
|
||||
}
|
||||
|
||||
private static JsonNode clearNullsFromArray(final JsonNode node) {
|
||||
final ArrayNode ret = FACTORY.arrayNode();
|
||||
|
||||
/*
|
||||
* Cycle through array elements. If the element is a null node itself,
|
||||
* skip it. Otherwise, add a "cleaned up" element to the result.
|
||||
*/
|
||||
for (final JsonNode element : node) {
|
||||
if (!element.isNull()) {
|
||||
ret.add(clearNulls(element));
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
private static JsonNode clearNullsFromObject(final JsonNode node) {
|
||||
final ObjectNode ret = FACTORY.objectNode();
|
||||
final Iterator<Map.Entry<String, JsonNode>> iterator
|
||||
= node.fields();
|
||||
|
||||
Map.Entry<String, JsonNode> entry;
|
||||
JsonNode value;
|
||||
|
||||
/*
|
||||
* When faces with an object, cycle through this object's entries.
|
||||
*
|
||||
* If the value of the entry is a JSON null, don't include it in the
|
||||
* result. If not, include a "cleaned up" value for this key instead of
|
||||
* the original element.
|
||||
*/
|
||||
while (iterator.hasNext()) {
|
||||
entry = iterator.next();
|
||||
value = entry.getValue();
|
||||
if (!value.isNull()) {
|
||||
ret.set(entry.getKey(), clearNulls(value));
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
public abstract JsonNode apply(final JsonNode input);
|
||||
|
||||
@Override
|
||||
public final void serialize(final JsonGenerator jgen,
|
||||
final SerializerProvider provider)
|
||||
throws IOException {
|
||||
jgen.writeTree(origPatch);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void serializeWithType(final JsonGenerator jgen,
|
||||
final SerializerProvider provider, final TypeSerializer typeSer)
|
||||
throws IOException {
|
||||
serialize(jgen, provider);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
package org.xbib.content.json.mergepatch;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.databind.DeserializationContext;
|
||||
import com.fasterxml.jackson.databind.JsonDeserializer;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Custom {@link com.fasterxml.jackson.databind.JsonDeserializer} for {@link JsonMergePatch} instances
|
||||
* Unlike "real" JSON Patches (ie, as defined by RFC 6902), JSON merge patch
|
||||
* instances are "free form", they can be either JSON arrays or JSON objects
|
||||
* without any restriction on the contents; only the content itself may guide
|
||||
* the patching process (null elements in arrays, null values in objects).
|
||||
* Jackson does not provide a deserializer for such a case; we therefore
|
||||
* write our own here.
|
||||
*/
|
||||
public final class JsonMergePatchDeserializer extends JsonDeserializer<JsonMergePatch> {
|
||||
@Override
|
||||
public JsonMergePatch deserialize(final JsonParser jp, final DeserializationContext ctxt)
|
||||
throws IOException {
|
||||
final JsonNode node = jp.readValueAs(JsonNode.class);
|
||||
if (!node.isContainerNode()) {
|
||||
throw new IOException("expected either an array or an object");
|
||||
}
|
||||
return node.isArray() ? new ArrayMergePatch(node)
|
||||
: new ObjectMergePatch(node);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
package org.xbib.content.json.mergepatch;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.xbib.content.json.jackson.JacksonUtils;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Merge patch for a JSON Object
|
||||
* <p>
|
||||
* <p>This only takes care of the top level, and delegates to other {@link
|
||||
* JsonMergePatch} instances for deeper levels.</p>
|
||||
*/
|
||||
final class ObjectMergePatch extends JsonMergePatch {
|
||||
private final Map<String, JsonNode> fields;
|
||||
private final Set<String> removals;
|
||||
|
||||
ObjectMergePatch(final JsonNode content) {
|
||||
super(content);
|
||||
fields = JacksonUtils.asMap(content);
|
||||
removals = new HashSet<>();
|
||||
for (final Map.Entry<String, JsonNode> entry : fields.entrySet()) {
|
||||
if (entry.getValue().isNull()) {
|
||||
removals.add(entry.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
fields.keySet().removeAll(removals);
|
||||
}
|
||||
|
||||
private static JsonNode mapToNode(final Map<String, JsonNode> map) {
|
||||
final ObjectNode ret = FACTORY.objectNode();
|
||||
return ret.setAll(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode apply(final JsonNode input) {
|
||||
if (!input.isObject()) {
|
||||
return mapToNode(fields);
|
||||
}
|
||||
|
||||
final Map<String, JsonNode> map = JacksonUtils.asMap(input);
|
||||
|
||||
// Remove all entries which must be removed
|
||||
map.keySet().removeAll(removals);
|
||||
|
||||
// Now cycle through what is left
|
||||
String memberName;
|
||||
JsonNode patchNode;
|
||||
|
||||
for (final Map.Entry<String, JsonNode> entry : map.entrySet()) {
|
||||
memberName = entry.getKey();
|
||||
patchNode = fields.get(memberName);
|
||||
|
||||
// Leave untouched if no mention in the patch
|
||||
if (patchNode == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// If the patch node is a primitive type, replace in the result.
|
||||
// Reminder: there cannot be a JSON null anymore
|
||||
if (!patchNode.isContainerNode()) {
|
||||
entry.setValue(patchNode); // no need for .deepCopy()
|
||||
continue;
|
||||
}
|
||||
|
||||
final JsonMergePatch patch = JsonMergePatch.fromJson(patchNode);
|
||||
entry.setValue(patch.apply(entry.getValue()));
|
||||
}
|
||||
|
||||
// Finally, if there are members in the patch not present in the input,
|
||||
// fill in members
|
||||
fields.keySet().removeAll(map.keySet());
|
||||
for (Map.Entry<String, JsonNode> entry : fields.entrySet()) {
|
||||
map.put(entry.getKey(), clearNulls(entry.getValue()));
|
||||
}
|
||||
|
||||
return mapToNode(map);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* Classes for JSON merge/patch operation.
|
||||
*/
|
||||
package org.xbib.content.json.mergepatch;
|
|
@ -0,0 +1,105 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
import org.xbib.content.json.pointer.ReferenceToken;
|
||||
import org.xbib.content.json.pointer.TokenResolver;
|
||||
|
||||
/**
|
||||
* JSON Patch {@code add} operation.
|
||||
* For this operation, {@code path} is the JSON Pointer where the value
|
||||
* should be added, and {@code value} is the value to add.
|
||||
*
|
||||
* Note that if the target value pointed to by {@code path} already exists,
|
||||
* it is replaced. In this case, {@code add} is equivalent to {@code replace}.
|
||||
*
|
||||
*
|
||||
* Note also that a value will be created at the target path <b>if and only
|
||||
* if</b> the immediate parent of that value exists (and is of the correct
|
||||
* type).
|
||||
*
|
||||
* Finally, if the last reference token of the JSON Pointer is {@code -} and
|
||||
* the immediate parent is an array, the given value is added at the end of the
|
||||
* array. For instance, applying:
|
||||
*
|
||||
* <pre>
|
||||
* { "op": "add", "path": "/-", "value": 3 }
|
||||
* </pre>
|
||||
*
|
||||
* to
|
||||
*
|
||||
* <pre>
|
||||
* [ 1, 2 ]
|
||||
* </pre>
|
||||
*
|
||||
* will give
|
||||
*
|
||||
* <pre>
|
||||
* [ 1, 2, 3 ]
|
||||
* </pre>
|
||||
*/
|
||||
public final class AddOperation extends PathValueOperation {
|
||||
|
||||
private static final ReferenceToken LAST_ARRAY_ELEMENT = ReferenceToken.fromRaw("-");
|
||||
|
||||
@JsonCreator
|
||||
public AddOperation(@JsonProperty("path") final JsonPointer path,
|
||||
@JsonProperty("value") final JsonNode value) {
|
||||
super("add", path, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode apply(final JsonNode node) throws JsonPatchException {
|
||||
if (path.isEmpty()) {
|
||||
return value;
|
||||
}
|
||||
|
||||
/*
|
||||
* Check the parent node: it must exist and be a container (ie an array
|
||||
* or an object) for the add operation to work.
|
||||
*/
|
||||
final JsonNode parentNode = path.parent().path(node);
|
||||
if (parentNode.isMissingNode()) {
|
||||
throw new JsonPatchException("no such parent");
|
||||
}
|
||||
if (!parentNode.isContainerNode()) {
|
||||
throw new JsonPatchException("parent not container");
|
||||
}
|
||||
return parentNode.isArray()
|
||||
? addToArray(path, node)
|
||||
: addToObject(path, node);
|
||||
}
|
||||
|
||||
private JsonNode addToArray(final JsonPointer path, final JsonNode node) throws JsonPatchException {
|
||||
final JsonNode ret = node.deepCopy();
|
||||
final ArrayNode target = (ArrayNode) path.parent().get(ret);
|
||||
final TokenResolver<JsonNode> token = path.getLast();
|
||||
if (token.getToken().equals(LAST_ARRAY_ELEMENT)) {
|
||||
target.add(value);
|
||||
return ret;
|
||||
}
|
||||
final int size = target.size();
|
||||
final int index;
|
||||
try {
|
||||
index = Integer.parseInt(token.toString());
|
||||
} catch (NumberFormatException ignored) {
|
||||
throw new JsonPatchException("not an index: " + token.toString());
|
||||
}
|
||||
if (index < 0 || index > size) {
|
||||
throw new JsonPatchException("no such index: " + index);
|
||||
}
|
||||
target.insert(index, value);
|
||||
return ret;
|
||||
}
|
||||
|
||||
private JsonNode addToObject(final JsonPointer path, final JsonNode node) {
|
||||
final JsonNode ret = node.deepCopy();
|
||||
final ObjectNode target = (ObjectNode) path.parent().get(ret);
|
||||
target.set(path.getLast().getToken().getRaw(), value);
|
||||
return ret;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
/**
|
||||
* JSON Patch {@code copy} operation.
|
||||
* For this operation, {@code from} is the JSON Pointer of the value to copy,
|
||||
* and {@code path} is the destination where the value should be copied.
|
||||
* As for {@code add}:
|
||||
*
|
||||
* <ul>
|
||||
* <li>the value at the destination path is either created or replaced;</li>
|
||||
* <li>it is created only if the immediate parent exists;</li>
|
||||
* <li>{@code -} appends at the end of an array.</li>
|
||||
* </ul>
|
||||
*
|
||||
* It is an error if {@code from} fails to resolve to a JSON value.
|
||||
*/
|
||||
public final class CopyOperation extends DualPathOperation {
|
||||
@JsonCreator
|
||||
public CopyOperation(@JsonProperty("from") final JsonPointer from,
|
||||
@JsonProperty("path") final JsonPointer path) {
|
||||
super("copy", from, path);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode apply(final JsonNode node)
|
||||
throws JsonPatchException {
|
||||
final JsonNode dupData = from.path(node).deepCopy();
|
||||
if (dupData.isMissingNode()) {
|
||||
throw new JsonPatchException("no such path");
|
||||
}
|
||||
return new AddOperation(path, dupData).apply(node);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
|
||||
import com.fasterxml.jackson.databind.ser.std.ToStringSerializer;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Base class for JSON Patch operations taking two JSON Pointers as arguments.
|
||||
*/
|
||||
public abstract class DualPathOperation extends JsonPatchOperation {
|
||||
@JsonSerialize(using = ToStringSerializer.class)
|
||||
protected final JsonPointer from;
|
||||
|
||||
/**
|
||||
* Protected constructor.
|
||||
*
|
||||
* @param op operation name
|
||||
* @param from source path
|
||||
* @param path destination path
|
||||
*/
|
||||
protected DualPathOperation(final String op, final JsonPointer from,
|
||||
final JsonPointer path) {
|
||||
super(op, path);
|
||||
this.from = from;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void serialize(final JsonGenerator jgen,
|
||||
final SerializerProvider provider)
|
||||
throws IOException, JsonProcessingException {
|
||||
jgen.writeStartObject();
|
||||
jgen.writeStringField("op", op);
|
||||
jgen.writeStringField("path", path.toString());
|
||||
jgen.writeStringField("from", from.toString());
|
||||
jgen.writeEndObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void serializeWithType(final JsonGenerator jgen,
|
||||
final SerializerProvider provider, final TypeSerializer typeSer)
|
||||
throws IOException, JsonProcessingException {
|
||||
serialize(jgen, provider);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "op: " + op + "; from: \"" + from + "\"; path: \"" + path + '"';
|
||||
}
|
||||
}
|
|
@ -0,0 +1,135 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.JsonSerializable;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
|
||||
import org.xbib.content.json.jackson.JacksonUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Implementation of JSON Patch.
|
||||
* <a href="http://tools.ietf.org/html/draft-ietf-appsawg-json-patch-10">JSON
|
||||
* Patch</a>, as its name implies, is an IETF draft describing a mechanism to
|
||||
* apply a patch to any JSON value. This implementation covers all operations
|
||||
* according to the specification; however, there are some subtle differences
|
||||
* with regards to some operations which are covered in these operations'
|
||||
* respective documentation.
|
||||
* An example of a JSON Patch is as follows:
|
||||
* <code>
|
||||
* [
|
||||
* {
|
||||
* "op": "add",
|
||||
* "path": "/-",
|
||||
* "value": {
|
||||
* "productId": 19,
|
||||
* "name": "Duvel",
|
||||
* "type": "beer"
|
||||
* }
|
||||
* }
|
||||
* ]
|
||||
* </code>
|
||||
* This patch contains a single operation which adds an item at the end of
|
||||
* an array. A JSON Patch can contain more than one operation; in this case, all
|
||||
* operations are applied to the input JSON value in their order of appearance,
|
||||
* until all operations are applied or an error condition is encountered.
|
||||
* The main point where this implementation differs from the specification
|
||||
* is initial JSON parsing. The draft says:
|
||||
*
|
||||
* <pre>
|
||||
* Operation objects MUST have exactly one "op" member
|
||||
* </pre>
|
||||
*
|
||||
* and
|
||||
*
|
||||
* <pre>
|
||||
* Additionally, operation objects MUST have exactly one "path" member.
|
||||
* </pre>
|
||||
*
|
||||
* However, obeying these to the letter forces constraints on the JSON
|
||||
* parser. Here, these constraints are not enforced, which means:
|
||||
*
|
||||
* <pre>
|
||||
* [ { "op": "add", "op": "remove", "path": "/x" } ]
|
||||
* </pre>
|
||||
*
|
||||
* is parsed (as a {@code remove} operation, since it appears last).
|
||||
* IMPORTANT NOTE: the JSON Patch is supposed to be VALID when the
|
||||
* constructor for this class ({@link JsonPatch#fromJson(com.fasterxml.jackson.databind.JsonNode)} is used.
|
||||
*/
|
||||
public final class JsonPatch
|
||||
implements JsonSerializable {
|
||||
/**
|
||||
* List of operations.
|
||||
*/
|
||||
private final List<JsonPatchOperation> operations;
|
||||
|
||||
/**
|
||||
* Package-visible constructor.
|
||||
*
|
||||
* Visible only for testing purposes. Also used for deserialization.
|
||||
*
|
||||
* @param operations the list of operations for this patch
|
||||
* @see JsonPatchOperation
|
||||
*/
|
||||
@JsonCreator
|
||||
JsonPatch(final List<JsonPatchOperation> operations) {
|
||||
this.operations = new ArrayList<>(operations);
|
||||
}
|
||||
|
||||
/**
|
||||
* Static factory method to build a JSON Patch out of a JSON representation.
|
||||
*
|
||||
* @param node the JSON representation of the generated JSON Patch
|
||||
* @return a JSON Patch
|
||||
* @throws java.io.IOException input is not a valid JSON patch
|
||||
*/
|
||||
public static JsonPatch fromJson(final JsonNode node) throws IOException {
|
||||
return JacksonUtils.getReader().forType(JsonPatch.class)
|
||||
.readValue(node);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply this patch to a JSON value.
|
||||
*
|
||||
* @param node the value to apply the patch to
|
||||
* @return the patched JSON value
|
||||
* @throws JsonPatchException if patch fails
|
||||
*/
|
||||
public JsonNode apply(final JsonNode node) throws JsonPatchException {
|
||||
JsonNode ret = node;
|
||||
for (final JsonPatchOperation operation : operations) {
|
||||
ret = operation.apply(ret);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return operations.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serialize(final JsonGenerator jgen,
|
||||
final SerializerProvider provider)
|
||||
throws IOException {
|
||||
jgen.writeStartArray();
|
||||
for (final JsonPatchOperation op : operations) {
|
||||
op.serialize(jgen, provider);
|
||||
}
|
||||
jgen.writeEndArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serializeWithType(final JsonGenerator jgen,
|
||||
final SerializerProvider provider, final TypeSerializer typeSer)
|
||||
throws IOException {
|
||||
serialize(jgen, provider);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public final class JsonPatchException extends Exception {
|
||||
private static final long serialVersionUID = -7086990008150217950L;
|
||||
|
||||
public JsonPatchException(final String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import static com.fasterxml.jackson.annotation.JsonSubTypes.Type;
|
||||
import static com.fasterxml.jackson.annotation.JsonTypeInfo.As;
|
||||
import static com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonSubTypes;
|
||||
import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.JsonSerializable;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
/**
|
||||
* Base abstract class for one patch operation
|
||||
*
|
||||
* <p>Two more abstract classes extend this one according to the arguments of
|
||||
* the operation:</p>
|
||||
*
|
||||
* <ul>
|
||||
* <li>{@link DualPathOperation} for operations taking a second pointer as
|
||||
* an argument ({@code copy} and {@code move});</li>
|
||||
* <li>{@link PathValueOperation} for operations taking a value as an
|
||||
* argument ({@code add}, {@code replace} and {@code test}).</li>
|
||||
* </ul>
|
||||
*/
|
||||
@JsonTypeInfo(use = Id.NAME, include = As.PROPERTY, property = "op")
|
||||
@JsonSubTypes({
|
||||
@Type(name = "add", value = AddOperation.class),
|
||||
@Type(name = "copy", value = CopyOperation.class),
|
||||
@Type(name = "move", value = MoveOperation.class),
|
||||
@Type(name = "remove", value = RemoveOperation.class),
|
||||
@Type(name = "replace", value = ReplaceOperation.class),
|
||||
@Type(name = "test", value = TestOperation.class)
|
||||
})
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public abstract class JsonPatchOperation implements JsonSerializable {
|
||||
protected final String op;
|
||||
|
||||
/*
|
||||
* Note: no need for a custom deserializer, Jackson will try and find a
|
||||
* constructor with a single string argument and use it.
|
||||
*
|
||||
* However, we need to serialize using .toString().
|
||||
*/
|
||||
protected final JsonPointer path;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*
|
||||
* @param op the operation name
|
||||
* @param path the JSON Pointer for this operation
|
||||
*/
|
||||
protected JsonPatchOperation(final String op, final JsonPointer path) {
|
||||
this.op = op;
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply this operation to a JSON value.
|
||||
*
|
||||
* @param node the value to patch
|
||||
* @return the patched value
|
||||
* @throws JsonPatchException if patch fails
|
||||
*/
|
||||
public abstract JsonNode apply(final JsonNode node) throws JsonPatchException;
|
||||
|
||||
@Override
|
||||
public abstract String toString();
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
/**
|
||||
* JSON Patch {@code move} operation.
|
||||
* For this operation, {@code from} points to the value to move, and {@code
|
||||
* path} points to the new location of the moved value.
|
||||
* As for {@code add}:
|
||||
* <ul>
|
||||
* <li>the value at the destination path is either created or replaced;</li>
|
||||
* <li>it is created only if the immediate parent exists;</li>
|
||||
* <li>{@code -} appends at the end of an array.</li>
|
||||
* </ul>
|
||||
* It is an error condition if {@code from} does not point to a JSON value.
|
||||
* The specification adds another rule that the {@code from} path must not be
|
||||
* an immediate parent of {@code path}. Unfortunately, that doesn't really work.
|
||||
* Consider this patch:
|
||||
* <pre>
|
||||
* { "op": "move", "from": "/0", "path": "/0/x" }
|
||||
* </pre>
|
||||
* Even though {@code /0} is an immediate parent of {@code /0/x}, when this
|
||||
* patch is applied to:
|
||||
* <pre>
|
||||
* [ "victim", {} ]
|
||||
* </pre>
|
||||
* it actually succeeds and results in the patched value:
|
||||
* <pre>
|
||||
* [ { "x": "victim" } ]
|
||||
* </pre>
|
||||
*/
|
||||
public final class MoveOperation extends DualPathOperation {
|
||||
|
||||
@JsonCreator
|
||||
public MoveOperation(@JsonProperty("from") final JsonPointer from,
|
||||
@JsonProperty("path") final JsonPointer path) {
|
||||
super("move", from, path);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode apply(final JsonNode node) throws JsonPatchException {
|
||||
if (from.equals(path)) {
|
||||
return node.deepCopy();
|
||||
}
|
||||
final JsonNode movedNode = from.path(node);
|
||||
if (movedNode.isMissingNode()) {
|
||||
throw new JsonPatchException("no such path: " + path);
|
||||
}
|
||||
final JsonPatchOperation remove = new RemoveOperation(from);
|
||||
final JsonPatchOperation add = new AddOperation(path, movedNode);
|
||||
return add.apply(remove.apply(node));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Base class for patch operations taking a value in addition to a path.
|
||||
*/
|
||||
public abstract class PathValueOperation
|
||||
extends JsonPatchOperation {
|
||||
@JsonSerialize
|
||||
protected final JsonNode value;
|
||||
|
||||
/**
|
||||
* Protected constructor.
|
||||
*
|
||||
* @param op operation name
|
||||
* @param path affected path
|
||||
* @param value JSON value
|
||||
*/
|
||||
protected PathValueOperation(final String op, final JsonPointer path,
|
||||
final JsonNode value) {
|
||||
super(op, path);
|
||||
this.value = value.deepCopy();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void serialize(final JsonGenerator jgen,
|
||||
final SerializerProvider provider)
|
||||
throws IOException {
|
||||
jgen.writeStartObject();
|
||||
jgen.writeStringField("op", op);
|
||||
jgen.writeStringField("path", path.toString());
|
||||
jgen.writeFieldName("value");
|
||||
jgen.writeTree(value);
|
||||
jgen.writeEndObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final void serializeWithType(final JsonGenerator jgen,
|
||||
final SerializerProvider provider, final TypeSerializer typeSer)
|
||||
throws IOException, JsonProcessingException {
|
||||
serialize(jgen, provider);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String toString() {
|
||||
return "op: " + op + "; path: \"" + path + "\"; value: " + value;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.MissingNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* JSON Path {@code remove} operation
|
||||
* This operation only takes one pointer ({@code path}) as an argument. It
|
||||
* is an error condition if no JSON value exists at that pointer.
|
||||
*/
|
||||
public final class RemoveOperation
|
||||
extends JsonPatchOperation {
|
||||
@JsonCreator
|
||||
public RemoveOperation(@JsonProperty("path") final JsonPointer path) {
|
||||
super("remove", path);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode apply(final JsonNode node)
|
||||
throws JsonPatchException {
|
||||
if (path.isEmpty()) {
|
||||
return MissingNode.getInstance();
|
||||
}
|
||||
if (path.path(node).isMissingNode()) {
|
||||
throw new JsonPatchException("no such path");
|
||||
}
|
||||
final JsonNode ret = node.deepCopy();
|
||||
final JsonNode parentNode = path.parent().get(ret);
|
||||
final String raw = path.getLast().getToken().getRaw();
|
||||
if (parentNode.isObject()) {
|
||||
((ObjectNode) parentNode).remove(raw);
|
||||
} else {
|
||||
((ArrayNode) parentNode).remove(Integer.parseInt(raw));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serialize(final JsonGenerator jgen,
|
||||
final SerializerProvider provider)
|
||||
throws IOException {
|
||||
jgen.writeStartObject();
|
||||
jgen.writeStringField("op", "remove");
|
||||
jgen.writeStringField("path", path.toString());
|
||||
jgen.writeEndObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serializeWithType(final JsonGenerator jgen,
|
||||
final SerializerProvider provider, final TypeSerializer typeSer)
|
||||
throws IOException {
|
||||
serialize(jgen, provider);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "op: " + op + "; path: \"" + path + '"';
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
package org.xbib.content.json.patch;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.xbib.content.json.pointer.JsonPointer;
|
||||
|
||||
/**
|
||||
* JSON Patch {@code replace} operation
|
||||
* For this operation, {@code path} points to the value to replace, and
|
||||
* {@code value} is the replacement value.
|
||||
* It is an error condition if {@code path} does not point to an actual JSON
|
||||
* value.
|
||||
*/
|
||||
public final class ReplaceOperation
|
||||
extends PathValueOperation {
|
||||
@JsonCreator
|
||||
public ReplaceOperation(@JsonProperty("path") final JsonPointer path,
|
||||
@JsonProperty("value") final JsonNode value) {
|
||||
super("replace", path, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonNode apply(final JsonNode node)
|
||||
throws JsonPatchException {
|
||||
if (path.path(node).isMissingNode()) {
|
||||
throw new JsonPatchException("no such path");
|
||||
}
|
||||
final JsonNode replacement = value.deepCopy();
|
||||
if (path.isEmpty()) {
|
||||
return replacement;
|
||||
}
|
||||
final JsonNode ret = node.deepCopy();
|
||||
final JsonNode parent = path.parent().get(ret);
|
||||
final String rawToken = path.getLast().getToken().getRaw();
|
||||
if (parent.isObject()) {
|
||||
((ObjectNode) parent).set(rawToken, replacement);
|
||||
} else {
|
||||
((ArrayNode) parent).set(Integer.parseInt(rawToken), replacement);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue