initial commit
This commit is contained in:
commit
83e75bec05
158 changed files with 18850 additions and 0 deletions
14
.gitignore
vendored
Normal file
14
.gitignore
vendored
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
/.settings
|
||||||
|
/.classpath
|
||||||
|
/.project
|
||||||
|
/.gradle
|
||||||
|
**/data
|
||||||
|
**/work
|
||||||
|
**/logs
|
||||||
|
**/.idea
|
||||||
|
**/target
|
||||||
|
**/out
|
||||||
|
**/build
|
||||||
|
.DS_Store
|
||||||
|
*.iml
|
||||||
|
*~
|
202
LICENSE.txt
Normal file
202
LICENSE.txt
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
40
build.gradle
Normal file
40
build.gradle
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
plugins {
|
||||||
|
id "de.marcphilipp.nexus-publish" version "0.4.0"
|
||||||
|
id "io.codearte.nexus-staging" version "0.21.1"
|
||||||
|
}
|
||||||
|
|
||||||
|
wrapper {
|
||||||
|
gradleVersion = "${project.property('gradle.wrapper.version')}"
|
||||||
|
distributionType = Wrapper.DistributionType.ALL
|
||||||
|
}
|
||||||
|
ext {
|
||||||
|
user = 'jprante'
|
||||||
|
name = 'event'
|
||||||
|
description = 'Event framework for Java (NIO paths, files, timers, journals)'
|
||||||
|
inceptionYear = '2021'
|
||||||
|
url = 'https://github.com/' + user + '/' + name
|
||||||
|
scmUrl = 'https://github.com/' + user + '/' + name
|
||||||
|
scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git'
|
||||||
|
scmDeveloperConnection = 'scm:git:ssh://git@github.com:' + user + '/' + name + '.git'
|
||||||
|
issueManagementSystem = 'Github'
|
||||||
|
issueManagementUrl = ext.scmUrl + '/issues'
|
||||||
|
licenseName = 'The Apache License, Version 2.0'
|
||||||
|
licenseUrl = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||||
|
}
|
||||||
|
|
||||||
|
apply plugin: 'java-library'
|
||||||
|
apply from: rootProject.file('gradle/ide/idea.gradle')
|
||||||
|
apply from: rootProject.file('gradle/compile/java.gradle')
|
||||||
|
apply from: rootProject.file('gradle/test/junit5.gradle')
|
||||||
|
apply from: rootProject.file('gradle/repositories/maven.gradle')
|
||||||
|
apply from: rootProject.file('gradle/publishing/publication.gradle')
|
||||||
|
apply from: rootProject.file('gradle/publishing/sonatype.gradle')
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
api "org.xbib:settings-api:${project.property('xbib-content.version')}"
|
||||||
|
implementation "org.xbib:guava:${project.property('xbib-guava.version')}"
|
||||||
|
implementation "org.xbib:time:${project.property('xbib-time.version')}"
|
||||||
|
implementation "org.xbib:datastructures-json-tiny:${project.property('xbib-datastructures.version')}"
|
||||||
|
implementation "org.reactivestreams:reactive-streams:${project.property('reactivestreams.version')}"
|
||||||
|
testImplementation "io.reactivex.rxjava3:rxjava:${project.property('rxjava3.version')}"
|
||||||
|
}
|
12
gradle.properties
Normal file
12
gradle.properties
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
group = org.xbib
|
||||||
|
name = event
|
||||||
|
version = 0.0.1
|
||||||
|
|
||||||
|
org.gradle.warning.mode = ALL
|
||||||
|
gradle.wrapper.version = 7.3.2
|
||||||
|
xbib-guava.version = 30.1
|
||||||
|
xbib-content.version = 4.0.0
|
||||||
|
xbib-time.version = 2.1.1
|
||||||
|
xbib-datastructures.version = 1.0.0
|
||||||
|
reactivestreams.version = 1.0.3
|
||||||
|
rxjava3.version = 3.0.3
|
43
gradle/compile/java.gradle
Normal file
43
gradle/compile/java.gradle
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
|
||||||
|
apply plugin: 'java-library'
|
||||||
|
|
||||||
|
java {
|
||||||
|
modularity.inferModulePath.set(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
compileJava {
|
||||||
|
sourceCompatibility = JavaVersion.VERSION_11
|
||||||
|
targetCompatibility = JavaVersion.VERSION_11
|
||||||
|
}
|
||||||
|
|
||||||
|
compileTestJava {
|
||||||
|
sourceCompatibility = JavaVersion.VERSION_11
|
||||||
|
targetCompatibility = JavaVersion.VERSION_11
|
||||||
|
}
|
||||||
|
|
||||||
|
jar {
|
||||||
|
manifest {
|
||||||
|
attributes('Implementation-Version': project.version)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||||
|
classifier 'sources'
|
||||||
|
from sourceSets.main.allSource
|
||||||
|
}
|
||||||
|
|
||||||
|
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||||
|
classifier 'javadoc'
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts {
|
||||||
|
archives sourcesJar, javadocJar
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks.withType(JavaCompile) {
|
||||||
|
options.compilerArgs << '-Xlint:all,-fallthrough'
|
||||||
|
}
|
||||||
|
|
||||||
|
javadoc {
|
||||||
|
options.addStringOption('Xdoclint:none', '-quiet')
|
||||||
|
}
|
55
gradle/documentation/asciidoc.gradle
Normal file
55
gradle/documentation/asciidoc.gradle
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
apply plugin: 'org.xbib.gradle.plugin.asciidoctor'
|
||||||
|
|
||||||
|
configurations {
|
||||||
|
asciidoclet
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
asciidoclet "org.asciidoctor:asciidoclet:${project.property('asciidoclet.version')}"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
asciidoctor {
|
||||||
|
backends 'html5'
|
||||||
|
outputDir = file("${rootProject.projectDir}/docs")
|
||||||
|
separateOutputDirs = false
|
||||||
|
attributes 'source-highlighter': 'coderay',
|
||||||
|
idprefix: '',
|
||||||
|
idseparator: '-',
|
||||||
|
toc: 'left',
|
||||||
|
doctype: 'book',
|
||||||
|
icons: 'font',
|
||||||
|
encoding: 'utf-8',
|
||||||
|
sectlink: true,
|
||||||
|
sectanchors: true,
|
||||||
|
linkattrs: true,
|
||||||
|
imagesdir: 'img',
|
||||||
|
stylesheet: "${projectDir}/src/docs/asciidoc/css/foundation.css"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*javadoc {
|
||||||
|
options.docletpath = configurations.asciidoclet.files.asType(List)
|
||||||
|
options.doclet = 'org.asciidoctor.Asciidoclet'
|
||||||
|
//options.overview = "src/docs/asciidoclet/overview.adoc"
|
||||||
|
options.addStringOption "-base-dir", "${projectDir}"
|
||||||
|
options.addStringOption "-attribute",
|
||||||
|
"name=${project.name},version=${project.version},title-link=https://github.com/xbib/${project.name}"
|
||||||
|
configure(options) {
|
||||||
|
noTimestamp = true
|
||||||
|
}
|
||||||
|
}*/
|
||||||
|
|
||||||
|
|
||||||
|
/*javadoc {
|
||||||
|
options.docletpath = configurations.asciidoclet.files.asType(List)
|
||||||
|
options.doclet = 'org.asciidoctor.Asciidoclet'
|
||||||
|
options.overview = "${rootProject.projectDir}/src/docs/asciidoclet/overview.adoc"
|
||||||
|
options.addStringOption "-base-dir", "${projectDir}"
|
||||||
|
options.addStringOption "-attribute",
|
||||||
|
"name=${project.name},version=${project.version},title-link=https://github.com/xbib/${project.name}"
|
||||||
|
options.destinationDirectory(file("${projectDir}/docs/javadoc"))
|
||||||
|
configure(options) {
|
||||||
|
noTimestamp = true
|
||||||
|
}
|
||||||
|
}*/
|
13
gradle/ide/idea.gradle
Normal file
13
gradle/ide/idea.gradle
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
apply plugin: 'idea'
|
||||||
|
|
||||||
|
idea {
|
||||||
|
module {
|
||||||
|
outputDir file('build/classes/java/main')
|
||||||
|
testOutputDir file('build/classes/java/test')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (project.convention.findPlugin(JavaPluginConvention)) {
|
||||||
|
//sourceSets.main.output.classesDirs = file("build/classes/java/main")
|
||||||
|
//sourceSets.test.output.classesDirs = file("build/classes/java/test")
|
||||||
|
}
|
66
gradle/publishing/publication.gradle
Normal file
66
gradle/publishing/publication.gradle
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
|
||||||
|
apply plugin: "de.marcphilipp.nexus-publish"
|
||||||
|
|
||||||
|
publishing {
|
||||||
|
publications {
|
||||||
|
mavenJava(MavenPublication) {
|
||||||
|
from components.java
|
||||||
|
artifact sourcesJar
|
||||||
|
artifact javadocJar
|
||||||
|
pom {
|
||||||
|
name = project.name
|
||||||
|
description = rootProject.ext.description
|
||||||
|
url = rootProject.ext.url
|
||||||
|
inceptionYear = rootProject.ext.inceptionYear
|
||||||
|
packaging = 'jar'
|
||||||
|
organization {
|
||||||
|
name = 'xbib'
|
||||||
|
url = 'https://xbib.org'
|
||||||
|
}
|
||||||
|
developers {
|
||||||
|
developer {
|
||||||
|
id = 'jprante'
|
||||||
|
name = 'Jörg Prante'
|
||||||
|
email = 'joergprante@gmail.com'
|
||||||
|
url = 'https://github.com/jprante'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
scm {
|
||||||
|
url = rootProject.ext.scmUrl
|
||||||
|
connection = rootProject.ext.scmConnection
|
||||||
|
developerConnection = rootProject.ext.scmDeveloperConnection
|
||||||
|
}
|
||||||
|
issueManagement {
|
||||||
|
system = rootProject.ext.issueManagementSystem
|
||||||
|
url = rootProject.ext.issueManagementUrl
|
||||||
|
}
|
||||||
|
licenses {
|
||||||
|
license {
|
||||||
|
name = rootProject.ext.licenseName
|
||||||
|
url = rootProject.ext.licenseUrl
|
||||||
|
distribution = 'repo'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (project.hasProperty("signing.keyId")) {
|
||||||
|
apply plugin: 'signing'
|
||||||
|
signing {
|
||||||
|
sign publishing.publications.mavenJava
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (project.hasProperty("ossrhUsername")) {
|
||||||
|
nexusPublishing {
|
||||||
|
repositories {
|
||||||
|
sonatype {
|
||||||
|
username = project.property('ossrhUsername')
|
||||||
|
password = project.property('ossrhPassword')
|
||||||
|
packageGroup = "org.xbib"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
11
gradle/publishing/sonatype.gradle
Normal file
11
gradle/publishing/sonatype.gradle
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
|
||||||
|
if (project.hasProperty('ossrhUsername') && project.hasProperty('ossrhPassword')) {
|
||||||
|
|
||||||
|
apply plugin: 'io.codearte.nexus-staging'
|
||||||
|
|
||||||
|
nexusStaging {
|
||||||
|
username = project.property('ossrhUsername')
|
||||||
|
password = project.property('ossrhPassword')
|
||||||
|
packageGroup = "org.xbib"
|
||||||
|
}
|
||||||
|
}
|
4
gradle/repositories/maven.gradle
Normal file
4
gradle/repositories/maven.gradle
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
repositories {
|
||||||
|
mavenLocal()
|
||||||
|
mavenCentral()
|
||||||
|
}
|
27
gradle/test/junit5.gradle
Normal file
27
gradle/test/junit5.gradle
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
|
||||||
|
def junitVersion = project.hasProperty('junit.version')?project.property('junit.version'):'5.6.2'
|
||||||
|
def hamcrestVersion = project.hasProperty('hamcrest.version')?project.property('hamcrest.version'):'2.2'
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
testImplementation "org.junit.jupiter:junit-jupiter-api:${junitVersion}"
|
||||||
|
testImplementation "org.junit.jupiter:junit-jupiter-params:${junitVersion}"
|
||||||
|
testImplementation "org.hamcrest:hamcrest-library:${hamcrestVersion}"
|
||||||
|
testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${junitVersion}"
|
||||||
|
}
|
||||||
|
|
||||||
|
test {
|
||||||
|
useJUnitPlatform()
|
||||||
|
failFast = true
|
||||||
|
testLogging {
|
||||||
|
events 'STARTED', 'PASSED', 'FAILED', 'SKIPPED'
|
||||||
|
}
|
||||||
|
afterSuite { desc, result ->
|
||||||
|
if (!desc.parent) {
|
||||||
|
println "\nTest result: ${result.resultType}"
|
||||||
|
println "Test summary: ${result.testCount} tests, " +
|
||||||
|
"${result.successfulTestCount} succeeded, " +
|
||||||
|
"${result.failedTestCount} failed, " +
|
||||||
|
"${result.skippedTestCount} skipped"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
5
gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
5
gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
distributionBase=GRADLE_USER_HOME
|
||||||
|
distributionPath=wrapper/dists
|
||||||
|
distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.2-all.zip
|
||||||
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
|
zipStorePath=wrapper/dists
|
234
gradlew
vendored
Executable file
234
gradlew
vendored
Executable file
|
@ -0,0 +1,234 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
#
|
||||||
|
# Copyright © 2015-2021 the original authors.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
##############################################################################
|
||||||
|
#
|
||||||
|
# Gradle start up script for POSIX generated by Gradle.
|
||||||
|
#
|
||||||
|
# Important for running:
|
||||||
|
#
|
||||||
|
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
|
||||||
|
# noncompliant, but you have some other compliant shell such as ksh or
|
||||||
|
# bash, then to run this script, type that shell name before the whole
|
||||||
|
# command line, like:
|
||||||
|
#
|
||||||
|
# ksh Gradle
|
||||||
|
#
|
||||||
|
# Busybox and similar reduced shells will NOT work, because this script
|
||||||
|
# requires all of these POSIX shell features:
|
||||||
|
# * functions;
|
||||||
|
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
|
||||||
|
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
|
||||||
|
# * compound commands having a testable exit status, especially «case»;
|
||||||
|
# * various built-in commands including «command», «set», and «ulimit».
|
||||||
|
#
|
||||||
|
# Important for patching:
|
||||||
|
#
|
||||||
|
# (2) This script targets any POSIX shell, so it avoids extensions provided
|
||||||
|
# by Bash, Ksh, etc; in particular arrays are avoided.
|
||||||
|
#
|
||||||
|
# The "traditional" practice of packing multiple parameters into a
|
||||||
|
# space-separated string is a well documented source of bugs and security
|
||||||
|
# problems, so this is (mostly) avoided, by progressively accumulating
|
||||||
|
# options in "$@", and eventually passing that to Java.
|
||||||
|
#
|
||||||
|
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
|
||||||
|
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
|
||||||
|
# see the in-line comments for details.
|
||||||
|
#
|
||||||
|
# There are tweaks for specific operating systems such as AIX, CygWin,
|
||||||
|
# Darwin, MinGW, and NonStop.
|
||||||
|
#
|
||||||
|
# (3) This script is generated from the Groovy template
|
||||||
|
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||||
|
# within the Gradle project.
|
||||||
|
#
|
||||||
|
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||||
|
#
|
||||||
|
##############################################################################
|
||||||
|
|
||||||
|
# Attempt to set APP_HOME
|
||||||
|
|
||||||
|
# Resolve links: $0 may be a link
|
||||||
|
app_path=$0
|
||||||
|
|
||||||
|
# Need this for daisy-chained symlinks.
|
||||||
|
while
|
||||||
|
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
|
||||||
|
[ -h "$app_path" ]
|
||||||
|
do
|
||||||
|
ls=$( ls -ld "$app_path" )
|
||||||
|
link=${ls#*' -> '}
|
||||||
|
case $link in #(
|
||||||
|
/*) app_path=$link ;; #(
|
||||||
|
*) app_path=$APP_HOME$link ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||||
|
|
||||||
|
APP_NAME="Gradle"
|
||||||
|
APP_BASE_NAME=${0##*/}
|
||||||
|
|
||||||
|
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||||
|
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||||
|
|
||||||
|
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||||
|
MAX_FD=maximum
|
||||||
|
|
||||||
|
warn () {
|
||||||
|
echo "$*"
|
||||||
|
} >&2
|
||||||
|
|
||||||
|
die () {
|
||||||
|
echo
|
||||||
|
echo "$*"
|
||||||
|
echo
|
||||||
|
exit 1
|
||||||
|
} >&2
|
||||||
|
|
||||||
|
# OS specific support (must be 'true' or 'false').
|
||||||
|
cygwin=false
|
||||||
|
msys=false
|
||||||
|
darwin=false
|
||||||
|
nonstop=false
|
||||||
|
case "$( uname )" in #(
|
||||||
|
CYGWIN* ) cygwin=true ;; #(
|
||||||
|
Darwin* ) darwin=true ;; #(
|
||||||
|
MSYS* | MINGW* ) msys=true ;; #(
|
||||||
|
NONSTOP* ) nonstop=true ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
|
||||||
|
|
||||||
|
|
||||||
|
# Determine the Java command to use to start the JVM.
|
||||||
|
if [ -n "$JAVA_HOME" ] ; then
|
||||||
|
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
|
||||||
|
# IBM's JDK on AIX uses strange locations for the executables
|
||||||
|
JAVACMD=$JAVA_HOME/jre/sh/java
|
||||||
|
else
|
||||||
|
JAVACMD=$JAVA_HOME/bin/java
|
||||||
|
fi
|
||||||
|
if [ ! -x "$JAVACMD" ] ; then
|
||||||
|
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
|
||||||
|
|
||||||
|
Please set the JAVA_HOME variable in your environment to match the
|
||||||
|
location of your Java installation."
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
JAVACMD=java
|
||||||
|
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||||
|
|
||||||
|
Please set the JAVA_HOME variable in your environment to match the
|
||||||
|
location of your Java installation."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Increase the maximum file descriptors if we can.
|
||||||
|
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||||
|
case $MAX_FD in #(
|
||||||
|
max*)
|
||||||
|
MAX_FD=$( ulimit -H -n ) ||
|
||||||
|
warn "Could not query maximum file descriptor limit"
|
||||||
|
esac
|
||||||
|
case $MAX_FD in #(
|
||||||
|
'' | soft) :;; #(
|
||||||
|
*)
|
||||||
|
ulimit -n "$MAX_FD" ||
|
||||||
|
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Collect all arguments for the java command, stacking in reverse order:
|
||||||
|
# * args from the command line
|
||||||
|
# * the main class name
|
||||||
|
# * -classpath
|
||||||
|
# * -D...appname settings
|
||||||
|
# * --module-path (only if needed)
|
||||||
|
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
|
||||||
|
|
||||||
|
# For Cygwin or MSYS, switch paths to Windows format before running java
|
||||||
|
if "$cygwin" || "$msys" ; then
|
||||||
|
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
|
||||||
|
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
|
||||||
|
|
||||||
|
JAVACMD=$( cygpath --unix "$JAVACMD" )
|
||||||
|
|
||||||
|
# Now convert the arguments - kludge to limit ourselves to /bin/sh
|
||||||
|
for arg do
|
||||||
|
if
|
||||||
|
case $arg in #(
|
||||||
|
-*) false ;; # don't mess with options #(
|
||||||
|
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
|
||||||
|
[ -e "$t" ] ;; #(
|
||||||
|
*) false ;;
|
||||||
|
esac
|
||||||
|
then
|
||||||
|
arg=$( cygpath --path --ignore --mixed "$arg" )
|
||||||
|
fi
|
||||||
|
# Roll the args list around exactly as many times as the number of
|
||||||
|
# args, so each arg winds up back in the position where it started, but
|
||||||
|
# possibly modified.
|
||||||
|
#
|
||||||
|
# NB: a `for` loop captures its iteration list before it begins, so
|
||||||
|
# changing the positional parameters here affects neither the number of
|
||||||
|
# iterations, nor the values presented in `arg`.
|
||||||
|
shift # remove old arg
|
||||||
|
set -- "$@" "$arg" # push replacement arg
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Collect all arguments for the java command;
|
||||||
|
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||||
|
# shell script including quotes and variable substitutions, so put them in
|
||||||
|
# double quotes to make sure that they get re-expanded; and
|
||||||
|
# * put everything else in single quotes, so that it's not re-expanded.
|
||||||
|
|
||||||
|
set -- \
|
||||||
|
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||||
|
-classpath "$CLASSPATH" \
|
||||||
|
org.gradle.wrapper.GradleWrapperMain \
|
||||||
|
"$@"
|
||||||
|
|
||||||
|
# Use "xargs" to parse quoted args.
|
||||||
|
#
|
||||||
|
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
|
||||||
|
#
|
||||||
|
# In Bash we could simply go:
|
||||||
|
#
|
||||||
|
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
|
||||||
|
# set -- "${ARGS[@]}" "$@"
|
||||||
|
#
|
||||||
|
# but POSIX shell has neither arrays nor command substitution, so instead we
|
||||||
|
# post-process each arg (as a line of input to sed) to backslash-escape any
|
||||||
|
# character that might be a shell metacharacter, then use eval to reverse
|
||||||
|
# that process (while maintaining the separation between arguments), and wrap
|
||||||
|
# the whole thing up as a single "set" statement.
|
||||||
|
#
|
||||||
|
# This will of course break if any of these variables contains a newline or
|
||||||
|
# an unmatched quote.
|
||||||
|
#
|
||||||
|
|
||||||
|
eval "set -- $(
|
||||||
|
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
|
||||||
|
xargs -n1 |
|
||||||
|
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
|
||||||
|
tr '\n' ' '
|
||||||
|
)" '"$@"'
|
||||||
|
|
||||||
|
exec "$JAVACMD" "$@"
|
89
gradlew.bat
vendored
Normal file
89
gradlew.bat
vendored
Normal file
|
@ -0,0 +1,89 @@
|
||||||
|
@rem
|
||||||
|
@rem Copyright 2015 the original author or authors.
|
||||||
|
@rem
|
||||||
|
@rem Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
@rem you may not use this file except in compliance with the License.
|
||||||
|
@rem You may obtain a copy of the License at
|
||||||
|
@rem
|
||||||
|
@rem https://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
@rem
|
||||||
|
@rem Unless required by applicable law or agreed to in writing, software
|
||||||
|
@rem distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
@rem See the License for the specific language governing permissions and
|
||||||
|
@rem limitations under the License.
|
||||||
|
@rem
|
||||||
|
|
||||||
|
@if "%DEBUG%" == "" @echo off
|
||||||
|
@rem ##########################################################################
|
||||||
|
@rem
|
||||||
|
@rem Gradle startup script for Windows
|
||||||
|
@rem
|
||||||
|
@rem ##########################################################################
|
||||||
|
|
||||||
|
@rem Set local scope for the variables with windows NT shell
|
||||||
|
if "%OS%"=="Windows_NT" setlocal
|
||||||
|
|
||||||
|
set DIRNAME=%~dp0
|
||||||
|
if "%DIRNAME%" == "" set DIRNAME=.
|
||||||
|
set APP_BASE_NAME=%~n0
|
||||||
|
set APP_HOME=%DIRNAME%
|
||||||
|
|
||||||
|
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
|
||||||
|
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
|
||||||
|
|
||||||
|
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||||
|
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
|
||||||
|
|
||||||
|
@rem Find java.exe
|
||||||
|
if defined JAVA_HOME goto findJavaFromJavaHome
|
||||||
|
|
||||||
|
set JAVA_EXE=java.exe
|
||||||
|
%JAVA_EXE% -version >NUL 2>&1
|
||||||
|
if "%ERRORLEVEL%" == "0" goto execute
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||||
|
echo.
|
||||||
|
echo Please set the JAVA_HOME variable in your environment to match the
|
||||||
|
echo location of your Java installation.
|
||||||
|
|
||||||
|
goto fail
|
||||||
|
|
||||||
|
:findJavaFromJavaHome
|
||||||
|
set JAVA_HOME=%JAVA_HOME:"=%
|
||||||
|
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||||
|
|
||||||
|
if exist "%JAVA_EXE%" goto execute
|
||||||
|
|
||||||
|
echo.
|
||||||
|
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||||
|
echo.
|
||||||
|
echo Please set the JAVA_HOME variable in your environment to match the
|
||||||
|
echo location of your Java installation.
|
||||||
|
|
||||||
|
goto fail
|
||||||
|
|
||||||
|
:execute
|
||||||
|
@rem Setup the command line
|
||||||
|
|
||||||
|
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
|
||||||
|
|
||||||
|
|
||||||
|
@rem Execute Gradle
|
||||||
|
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
|
||||||
|
|
||||||
|
:end
|
||||||
|
@rem End local scope for the variables with windows NT shell
|
||||||
|
if "%ERRORLEVEL%"=="0" goto mainEnd
|
||||||
|
|
||||||
|
:fail
|
||||||
|
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
|
||||||
|
rem the _cmd.exe /c_ return code!
|
||||||
|
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
|
||||||
|
exit /b 1
|
||||||
|
|
||||||
|
:mainEnd
|
||||||
|
if "%OS%"=="Windows_NT" endlocal
|
||||||
|
|
||||||
|
:omega
|
0
settings.gradle
Normal file
0
settings.gradle
Normal file
14
src/main/java/org/xbib/event/Event.java
Normal file
14
src/main/java/org/xbib/event/Event.java
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
package org.xbib.event;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public interface Event {
|
||||||
|
|
||||||
|
void setKey(String key);
|
||||||
|
|
||||||
|
String getKey();
|
||||||
|
|
||||||
|
void setMap(Map<String, Object> map);
|
||||||
|
|
||||||
|
Map<String, Object> getMap();
|
||||||
|
}
|
4
src/main/java/org/xbib/event/EventConsumer.java
Normal file
4
src/main/java/org/xbib/event/EventConsumer.java
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
package org.xbib.event;
|
||||||
|
|
||||||
|
public interface EventConsumer {
|
||||||
|
}
|
4
src/main/java/org/xbib/event/EventService.java
Normal file
4
src/main/java/org/xbib/event/EventService.java
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
package org.xbib.event;
|
||||||
|
|
||||||
|
public class EventService {
|
||||||
|
}
|
4
src/main/java/org/xbib/event/FileFollowEvent.java
Normal file
4
src/main/java/org/xbib/event/FileFollowEvent.java
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
package org.xbib.event;
|
||||||
|
|
||||||
|
public class FileFollowEvent {
|
||||||
|
}
|
|
@ -0,0 +1,173 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.channels.AsynchronousFileChannel;
|
||||||
|
import java.nio.channels.CompletionHandler;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardOpenOption;
|
||||||
|
import java.security.InvalidParameterException;
|
||||||
|
import java.util.function.ObjIntConsumer;
|
||||||
|
|
||||||
|
import static java.nio.channels.AsynchronousFileChannel.open;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous non-blocking read operations that use an underlying AsynchronousFileChannel.
|
||||||
|
*/
|
||||||
|
public abstract class AbstractAsyncFileReaderLines {
|
||||||
|
static final int BUFFER_SIZE = 4096 * 8; // the transfer buffer size
|
||||||
|
private static final int MAX_LINE_SIZE = 4096;
|
||||||
|
private static final int LF = '\n';
|
||||||
|
private static final int CR = '\r';
|
||||||
|
//
|
||||||
|
// This flag will track whether this `Subscription` is to be considered cancelled or not.
|
||||||
|
private boolean cancelled = false;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous read chunk operation, callback based.
|
||||||
|
*/
|
||||||
|
private static void readBytes(
|
||||||
|
AsynchronousFileChannel asyncFile,
|
||||||
|
long position, // current read or write position in file
|
||||||
|
byte[] data, // buffer for current producing line
|
||||||
|
int size,
|
||||||
|
ObjIntConsumer<Throwable> completed) {
|
||||||
|
if (completed == null)
|
||||||
|
throw new InvalidParameterException("callback can't be null!");
|
||||||
|
if (size > data.length)
|
||||||
|
size = data.length;
|
||||||
|
if (size == 0) {
|
||||||
|
completed.accept(null, 0);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ByteBuffer buf = ByteBuffer.wrap(data, 0, size);
|
||||||
|
CompletionHandler<Integer, Object> readCompleted =
|
||||||
|
new CompletionHandler<>() {
|
||||||
|
@Override
|
||||||
|
public void completed(Integer result, Object attachment) {
|
||||||
|
completed.accept(null, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void failed(Throwable exc, Object attachment) {
|
||||||
|
completed.accept(exc, 0);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
asyncFile.read(buf, position, null, readCompleted);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected abstract void onError(Throwable error);
|
||||||
|
|
||||||
|
protected abstract void onComplete();
|
||||||
|
|
||||||
|
protected abstract void onProduceLine(String line);
|
||||||
|
|
||||||
|
public final void cancel() {
|
||||||
|
cancelled = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isCancelled() {
|
||||||
|
return cancelled;
|
||||||
|
}
|
||||||
|
|
||||||
|
final void readLines(Path file) throws IOException {
|
||||||
|
readLines(open(file, StandardOpenOption.READ), BUFFER_SIZE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read bytes from an {@code AsynchronousFileChannel}, which are decoded into characters
|
||||||
|
* using the UTF-8 charset.
|
||||||
|
* The resulting characters are parsed by line and passed to the destination buffer.
|
||||||
|
*
|
||||||
|
* @param asyncFile the nio associated file channel.
|
||||||
|
* @param bufferSize
|
||||||
|
*/
|
||||||
|
final void readLines(
|
||||||
|
AsynchronousFileChannel asyncFile,
|
||||||
|
int bufferSize) {
|
||||||
|
readLines(asyncFile, 0, 0, 0, new byte[bufferSize], new byte[MAX_LINE_SIZE], 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* There is a recursion on `readLines()`establishing a serial order among:
|
||||||
|
* `readLines()` -> `produceLine()` -> `onProduceLine()` -> `readLines()` -> and so on.
|
||||||
|
* It finishes with a call to `close()`.
|
||||||
|
*
|
||||||
|
* @param asyncFile the nio associated file channel.
|
||||||
|
* @param position current read or write position in file.
|
||||||
|
* @param bufpos read position in buffer.
|
||||||
|
* @param bufsize total bytes in buffer.
|
||||||
|
* @param buffer buffer for current producing line.
|
||||||
|
* @param auxline the transfer buffer.
|
||||||
|
* @param linepos current position in producing line.
|
||||||
|
*/
|
||||||
|
private void readLines(
|
||||||
|
AsynchronousFileChannel asyncFile,
|
||||||
|
long position, // current read or write position in file
|
||||||
|
int bufpos, // read position in buffer
|
||||||
|
int bufsize, // total bytes in buffer
|
||||||
|
byte[] buffer, // buffer for current producing line
|
||||||
|
byte[] auxline, // the transfer buffer
|
||||||
|
int linepos) // current position in producing line
|
||||||
|
{
|
||||||
|
while (bufpos < bufsize) {
|
||||||
|
if (buffer[bufpos] == LF) {
|
||||||
|
if (linepos > 0 && auxline[linepos - 1] == CR) linepos--;
|
||||||
|
bufpos++;
|
||||||
|
produceLine(auxline, linepos);
|
||||||
|
linepos = 0;
|
||||||
|
} else if (linepos == MAX_LINE_SIZE - 1) {
|
||||||
|
produceLine(auxline, linepos);
|
||||||
|
linepos = 0;
|
||||||
|
} else auxline[linepos++] = buffer[bufpos++];
|
||||||
|
}
|
||||||
|
int lastLinePos = linepos; // we need a final variable captured in the next lambda
|
||||||
|
if (!isCancelled()) readBytes(asyncFile, position, buffer, buffer.length, (err, res) -> {
|
||||||
|
if (isCancelled())
|
||||||
|
return;
|
||||||
|
if (err != null) {
|
||||||
|
onError(err);
|
||||||
|
close(asyncFile);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (res <= 0) {
|
||||||
|
// needed for last line that doesn't end with LF
|
||||||
|
if (lastLinePos > 0) {
|
||||||
|
produceLine(auxline, lastLinePos);
|
||||||
|
}
|
||||||
|
// Following it will invoke onComplete()
|
||||||
|
close(asyncFile);
|
||||||
|
} else {
|
||||||
|
readLines(asyncFile, position + res, 0, res, buffer, auxline, lastLinePos);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Performed from the IO background thread when it reached the end of the file.
|
||||||
|
*
|
||||||
|
* @param asyncFile
|
||||||
|
*/
|
||||||
|
private void close(AsynchronousFileChannel asyncFile) {
|
||||||
|
try {
|
||||||
|
asyncFile.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
onError(e); // Failed terminal state.
|
||||||
|
// Emission has finished. Does not propagate error on CompletableFuture.
|
||||||
|
} finally {
|
||||||
|
onComplete();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is called only from readLines() callback and performed from a background IO thread.
|
||||||
|
*
|
||||||
|
* @param auxline the transfer buffer.
|
||||||
|
* @param linepos current position in producing line.
|
||||||
|
*/
|
||||||
|
private void produceLine(byte[] auxline, int linepos) {
|
||||||
|
String line = new String(auxline, 0, linepos, StandardCharsets.UTF_8);
|
||||||
|
onProduceLine(line);
|
||||||
|
}
|
||||||
|
}
|
20
src/main/java/org/xbib/event/async/AddOnComplete.java
Normal file
20
src/main/java/org/xbib/event/async/AddOnComplete.java
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Subscriber;
|
||||||
|
|
||||||
|
public class AddOnComplete<T> extends SubscriberBuilder<T> {
|
||||||
|
|
||||||
|
private final Runnable action;
|
||||||
|
|
||||||
|
public AddOnComplete(Runnable action, Subscriber<T> sub) {
|
||||||
|
|
||||||
|
super(sub);
|
||||||
|
this.action = action;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onComplete() {
|
||||||
|
super.onComplete();
|
||||||
|
action.run();
|
||||||
|
}
|
||||||
|
}
|
30
src/main/java/org/xbib/event/async/AddOnError.java
Normal file
30
src/main/java/org/xbib/event/async/AddOnError.java
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Subscriber;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public class AddOnError<T> extends SubscriberBuilder<T> implements Subscriber<T> {
|
||||||
|
|
||||||
|
private final Consumer<Throwable> cons;
|
||||||
|
|
||||||
|
public AddOnError(Consumer<Throwable> cons, Subscriber<T> sub) {
|
||||||
|
super(sub);
|
||||||
|
this.cons = cons;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onNext(T item) {
|
||||||
|
try {
|
||||||
|
super.onNext(item);
|
||||||
|
} catch (Exception err) {
|
||||||
|
this.onError(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onError(Throwable throwable) {
|
||||||
|
super.onError(throwable);
|
||||||
|
cons.accept(throwable);
|
||||||
|
}
|
||||||
|
}
|
20
src/main/java/org/xbib/event/async/AddOnNext.java
Normal file
20
src/main/java/org/xbib/event/async/AddOnNext.java
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Subscriber;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public class AddOnNext<T> extends SubscriberBuilder<T> implements Subscriber<T> {
|
||||||
|
|
||||||
|
private final Consumer<T> cons;
|
||||||
|
|
||||||
|
public AddOnNext(Consumer<T> cons) {
|
||||||
|
super(new EmptySubscriber<>());
|
||||||
|
this.cons = cons;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onNext(T item) {
|
||||||
|
cons.accept(item);
|
||||||
|
}
|
||||||
|
}
|
22
src/main/java/org/xbib/event/async/AddOnSubscribe.java
Normal file
22
src/main/java/org/xbib/event/async/AddOnSubscribe.java
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Subscriber;
|
||||||
|
import org.reactivestreams.Subscription;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public class AddOnSubscribe<T> extends SubscriberBuilder<T> implements Subscriber<T> {
|
||||||
|
|
||||||
|
private final Consumer<Subscription> cons;
|
||||||
|
|
||||||
|
public AddOnSubscribe(Consumer<Subscription> cons, Subscriber<T> sub) {
|
||||||
|
super(sub);
|
||||||
|
this.cons = cons;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSubscribe(Subscription subscription) {
|
||||||
|
super.onSubscribe(subscription);
|
||||||
|
cons.accept(subscription);
|
||||||
|
}
|
||||||
|
}
|
74
src/main/java/org/xbib/event/async/AsyncFileQuery.java
Normal file
74
src/main/java/org/xbib/event/async/AsyncFileQuery.java
Normal file
|
@ -0,0 +1,74 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public class AsyncFileQuery extends AsyncQuery<String> {
|
||||||
|
|
||||||
|
private final Path file;
|
||||||
|
|
||||||
|
public AsyncFileQuery(Path file) {
|
||||||
|
this.file = file;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super String, ? super Throwable> cons) {
|
||||||
|
/*
|
||||||
|
* The following CF is used bidirectionally from the Reader to the user and vice-versa.
|
||||||
|
* The Reader may notify the user about end completion and the user may tell the Reader
|
||||||
|
* to stop read and invoke the callback when the CF is cancelled or completed by the user.
|
||||||
|
*/
|
||||||
|
CompletableFuture<Void> cf = new CompletableFuture<>();
|
||||||
|
try {
|
||||||
|
ReaderToCallback
|
||||||
|
.of(cons, () -> {
|
||||||
|
if (!cf.isDone()) cf.complete(null);
|
||||||
|
})
|
||||||
|
.apply(reader -> cf.whenComplete((nothing, err) -> reader.cancel()))
|
||||||
|
.readLines(file);
|
||||||
|
} catch (IOException e) {
|
||||||
|
cf.completeExceptionally(e);
|
||||||
|
}
|
||||||
|
return cf;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static class ReaderToCallback extends AbstractAsyncFileReaderLines {
|
||||||
|
private final BiConsumer<? super String, ? super Throwable> cons;
|
||||||
|
private final Runnable doOnComplete;
|
||||||
|
|
||||||
|
public ReaderToCallback(BiConsumer<? super String, ? super Throwable> cons, Runnable doOnComplete) {
|
||||||
|
this.cons = cons;
|
||||||
|
this.doOnComplete = doOnComplete;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ReaderToCallback of(BiConsumer<? super String, ? super Throwable> cons, Runnable doOnComplete) {
|
||||||
|
return new ReaderToCallback(cons, doOnComplete);
|
||||||
|
}
|
||||||
|
|
||||||
|
public final AbstractAsyncFileReaderLines apply(Consumer<AbstractAsyncFileReaderLines> cons) {
|
||||||
|
cons.accept(this);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onProduceLine(String line) {
|
||||||
|
cons.accept(line, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onError(Throwable err) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onComplete() {
|
||||||
|
doOnComplete.run();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
64
src/main/java/org/xbib/event/async/AsyncFileReaderBytes.java
Normal file
64
src/main/java/org/xbib/event/async/AsyncFileReaderBytes.java
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.channels.AsynchronousFileChannel;
|
||||||
|
import java.nio.channels.CompletionHandler;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
|
||||||
|
import static java.util.concurrent.CompletableFuture.completedFuture;
|
||||||
|
|
||||||
|
public class AsyncFileReaderBytes {
|
||||||
|
|
||||||
|
private AsyncFileReaderBytes() {
|
||||||
|
}
|
||||||
|
|
||||||
|
static CompletableFuture<Integer> readAllBytes(
|
||||||
|
AsynchronousFileChannel asyncFile,
|
||||||
|
ByteBuffer buffer,
|
||||||
|
int position,
|
||||||
|
ByteArrayOutputStream out) {
|
||||||
|
return readToByteArrayStream(asyncFile, buffer, position, out)
|
||||||
|
.thenCompose(index ->
|
||||||
|
index < 0
|
||||||
|
? completedFuture(position)
|
||||||
|
: readAllBytes(asyncFile, buffer.clear(), position + index, out));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
static CompletableFuture<Integer> readToByteArrayStream(
|
||||||
|
AsynchronousFileChannel asyncFile,
|
||||||
|
ByteBuffer buffer,
|
||||||
|
int position,
|
||||||
|
ByteArrayOutputStream out) {
|
||||||
|
CompletableFuture<Integer> promise = new CompletableFuture<>();
|
||||||
|
asyncFile.read(buffer, position, buffer, new CompletionHandler<Integer, ByteBuffer>() {
|
||||||
|
@Override
|
||||||
|
public void completed(Integer result, ByteBuffer attachment) {
|
||||||
|
if (result > 0) {
|
||||||
|
attachment.flip();
|
||||||
|
byte[] data = new byte[attachment.limit()]; // limit = result
|
||||||
|
attachment.get(data);
|
||||||
|
write(out, data);
|
||||||
|
}
|
||||||
|
promise.complete(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void failed(Throwable exc, ByteBuffer attachment) {
|
||||||
|
promise.completeExceptionally(exc);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void write(ByteArrayOutputStream out, byte[] data) {
|
||||||
|
try {
|
||||||
|
out.write(data);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
178
src/main/java/org/xbib/event/async/AsyncFileReaderLines.java
Normal file
178
src/main/java/org/xbib/event/async/AsyncFileReaderLines.java
Normal file
|
@ -0,0 +1,178 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Subscriber;
|
||||||
|
import org.reactivestreams.Subscription;
|
||||||
|
|
||||||
|
import java.nio.channels.AsynchronousFileChannel;
|
||||||
|
import java.util.concurrent.ConcurrentLinkedDeque;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous non-blocking read operations with a reactive based API
|
||||||
|
* implementing the Subscription proposal from reactivestreams.
|
||||||
|
*/
|
||||||
|
public class AsyncFileReaderLines extends AbstractAsyncFileReaderLines implements Subscription {
|
||||||
|
//
|
||||||
|
// We need a reference to the `Subscriber` so we can talk to it.
|
||||||
|
private final Subscriber<? super String> sub;
|
||||||
|
//
|
||||||
|
// We are using this `AtomicBoolean` to make sure that this `Subscription` doesn't run concurrently with itself,
|
||||||
|
// which would violate rule 1.3 among others (no concurrent notifications).
|
||||||
|
// Possible states: 0 (not emitting), 1 (emitting lines) and 3 (evaluating conditions requests and lines).
|
||||||
|
// The onEmit changes from states 0 -> 1 <-> 3 -> 0.
|
||||||
|
// It never changes from 1 to 0 directly. It must pass before by the state 3.
|
||||||
|
private final AtomicInteger onEmit = new AtomicInteger(0);
|
||||||
|
//
|
||||||
|
// This `ConcurrentLinkedQueue` will track the `onNext` signals that will be sent to the `Subscriber`.
|
||||||
|
private final ConcurrentLinkedDeque<String> lines = new ConcurrentLinkedDeque<>();
|
||||||
|
//
|
||||||
|
// Here we track the current demand, i.e. what has been requested but not yet delivered.
|
||||||
|
private final AtomicLong requests = new AtomicLong();
|
||||||
|
//
|
||||||
|
// Need to keep track of End-of-Stream
|
||||||
|
private boolean hasNext = true;
|
||||||
|
|
||||||
|
AsyncFileReaderLines(Subscriber<? super String> sub, AsynchronousFileChannel asyncFile, int bufferSize) {
|
||||||
|
this.sub = sub;
|
||||||
|
this.readLines(asyncFile, bufferSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void onError(Throwable error) {
|
||||||
|
sub.onError(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Performed from the IO background thread when it reached the end of the file.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
protected void onComplete() {
|
||||||
|
hasNext = false;
|
||||||
|
tryFlushPendingLines();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is called only from readLines() callback and performed from a background IO thread.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
protected void onProduceLine(String line) {
|
||||||
|
/**
|
||||||
|
* Always put the newly line into lines because a concurrent request
|
||||||
|
* may be asking for new lines and we should ensure the total order.
|
||||||
|
*/
|
||||||
|
lines.offer(line);
|
||||||
|
/**
|
||||||
|
* It only emits lines if subscription is not cancelled yet and there are still
|
||||||
|
* pending requests.
|
||||||
|
*/
|
||||||
|
while (!isCancelled() // This makes sure that rule 1.8 is upheld, i.e. we need to stop signalling "eventually"
|
||||||
|
&& requests.get() > 0 // This makes sure that rule 1.1 is upheld (sending more than was demanded)
|
||||||
|
&& !lines.isEmpty()) {
|
||||||
|
emitLine();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Emit a line to the Subscriber and decrement the number of pending requests.
|
||||||
|
*/
|
||||||
|
private void emitLine() {
|
||||||
|
String line = lines.poll();
|
||||||
|
if (line != null) {
|
||||||
|
sub.onNext(line);
|
||||||
|
requests.decrementAndGet();
|
||||||
|
} else {
|
||||||
|
terminateDueTo(new IllegalStateException("Unexpected race occur on lines offer. No other thread should concurrently should be taking lines!"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Implementation of `Subscription.request` registers that more elements are in demand.
|
||||||
|
* This request() only try to emitLines() after the End-of-Stream, in which case we
|
||||||
|
* should control mutual exclusion to the emitLines().
|
||||||
|
*
|
||||||
|
* @param l
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void request(long l) {
|
||||||
|
if (isCancelled()) return;
|
||||||
|
doRequest(l);
|
||||||
|
if (!hasNext) {
|
||||||
|
tryFlushPendingLines();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method may be invoked by request() in case of End-of-Stream or by close().
|
||||||
|
* Here the `onEmit` controls mutual exclusion to the emitLine() call.
|
||||||
|
*/
|
||||||
|
private void tryFlushPendingLines() {
|
||||||
|
int state;
|
||||||
|
// Spin try for emission entry.
|
||||||
|
// If it successfully change state from 0 to 1, then it will proceed to try to emit lines.
|
||||||
|
// It quits if someone else is already emitting lines (in state 1).
|
||||||
|
// If someone else is evaluating the conditions (state 3) then it will spin and retry.
|
||||||
|
while ((state = onEmit.compareAndExchange(0, 1)) > 0)
|
||||||
|
if (state == 1) return; // give up
|
||||||
|
//
|
||||||
|
// Start emission (in state 1)
|
||||||
|
while (toContinue()) {
|
||||||
|
emitLine();
|
||||||
|
}
|
||||||
|
// End emission (in state 3)
|
||||||
|
// Other thread entering at this moment in request() increments requests and
|
||||||
|
// this thread does not see the new value of requests and do not emit pending lines.
|
||||||
|
// Yet, the other thread will spin until onEmit change from 3 to 0 and then
|
||||||
|
// it will change onEmit to 1 and proceed emitting pending lines.
|
||||||
|
onEmit.set(0); // release onEmit
|
||||||
|
if (lines.isEmpty()) {
|
||||||
|
cancel(); // We need to consider this `Subscription` as cancelled as per rule 1.6
|
||||||
|
sub.onComplete(); // Then we signal `onComplete` as per rule 1.2 and 1.5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Here it will change to state 3 (on evaluation) and then to state 1 (emitting)
|
||||||
|
* if there are pending requests and lines to be emitted.
|
||||||
|
*/
|
||||||
|
private boolean toContinue() {
|
||||||
|
// First, change to state 3 corresponding to evaluation of requests and lines.
|
||||||
|
onEmit.set(3);
|
||||||
|
boolean cont = !isCancelled() // This makes sure that rule 1.8 is upheld, i.e. we need to stop signalling "eventually"
|
||||||
|
&& requests.get() > 0 // This makes sure that rule 1.1 is upheld (sending more than was demanded)
|
||||||
|
&& !lines.isEmpty();
|
||||||
|
// If there are pending requests and lines to be emitted, then change to
|
||||||
|
// state 1 that it will emit those lines.
|
||||||
|
if (cont) onEmit.set(1);
|
||||||
|
return cont;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will register inbound demand from our `Subscriber` and validate it against rule 3.9 and rule 3.17
|
||||||
|
*/
|
||||||
|
private void doRequest(final long n) {
|
||||||
|
if (n < 1)
|
||||||
|
terminateDueTo(new IllegalArgumentException(sub + " violated the Reactive Streams rule 3.9 by requesting a non-positive number of elements."));
|
||||||
|
else if (requests.get() + n < 1) {
|
||||||
|
// As governed by rule 3.17, when demand overflows `Long.MAX_VALUE` we treat the signalled demand as "effectively unbounded"
|
||||||
|
requests.set(Long.MAX_VALUE); // Here we protect from the overflow and treat it as "effectively unbounded"
|
||||||
|
} else {
|
||||||
|
requests.addAndGet(n); // Here we record the downstream demand
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is a helper method to ensure that we always `cancel` when we signal `onError` as per rule 1.6
|
||||||
|
*/
|
||||||
|
private void terminateDueTo(final Throwable t) {
|
||||||
|
cancel(); // When we signal onError, the subscription must be considered as cancelled, as per rule 1.6
|
||||||
|
try {
|
||||||
|
sub.onError(t); // Then we signal the error downstream, to the `Subscriber`
|
||||||
|
} catch (final Exception t2) { // If `onError` throws an exception, this is a spec violation according to rule 1.9, and all we can do is to log it.
|
||||||
|
Throwable ex = new IllegalStateException(sub + " violated the Reactive Streams rule 2.13 by throwing an exception from onError.", t2);
|
||||||
|
Logger.getGlobal().log(Level.SEVERE, "Violated the Reactive Streams rule 2.13", ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
119
src/main/java/org/xbib/event/async/AsyncFileWriter.java
Normal file
119
src/main/java/org/xbib/event/async/AsyncFileWriter.java
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.channels.AsynchronousFileChannel;
|
||||||
|
import java.nio.channels.CompletionHandler;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardOpenOption;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
|
||||||
|
import static java.nio.ByteBuffer.wrap;
|
||||||
|
import static java.nio.channels.AsynchronousFileChannel.open;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous non-blocking write operations with a {@code CompletableFuture} based API.
|
||||||
|
* These operations use an underlying {@code AsynchronousFileChannel}.
|
||||||
|
* All methods are asynchronous including the {@code close()} which chains a continuation
|
||||||
|
* on last resulting write {@code CompletableFuture} to close the {@code AsyncFileChannel} on completion.
|
||||||
|
*/
|
||||||
|
public class AsyncFileWriter implements AutoCloseable {
|
||||||
|
|
||||||
|
final AsynchronousFileChannel asyncFile;
|
||||||
|
/**
|
||||||
|
* File position after last write operation completion.
|
||||||
|
*/
|
||||||
|
private CompletableFuture<Integer> currentPosition = CompletableFuture.completedFuture(0);
|
||||||
|
|
||||||
|
public AsyncFileWriter(AsynchronousFileChannel asyncFile) {
|
||||||
|
this.asyncFile = asyncFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
public AsyncFileWriter(Path file, StandardOpenOption... options) throws IOException {
|
||||||
|
this(open(file, options));
|
||||||
|
}
|
||||||
|
|
||||||
|
static CompletableFuture<Integer> write(
|
||||||
|
AsynchronousFileChannel asyncFile,
|
||||||
|
ByteBuffer buf,
|
||||||
|
int position) {
|
||||||
|
CompletableFuture<Integer> promise = new CompletableFuture<>();
|
||||||
|
asyncFile.write(buf, position, null, new CompletionHandler<Integer, Object>() {
|
||||||
|
@Override
|
||||||
|
public void completed(Integer result, Object attachment) {
|
||||||
|
promise.complete(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void failed(Throwable exc, Object attachment) {
|
||||||
|
promise.completeExceptionally(exc);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
public CompletableFuture<Integer> getPosition() {
|
||||||
|
return currentPosition;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the given String appended with a newline separator
|
||||||
|
* and returns a CompletableFuture of the final file index
|
||||||
|
* after the completion of the corresponding write operation.
|
||||||
|
*/
|
||||||
|
public CompletableFuture<Integer> writeLine(CharSequence str) {
|
||||||
|
return write(str + System.lineSeparator());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the given String and returns a CompletableFuture of
|
||||||
|
* the final file index after the completion of the corresponding
|
||||||
|
* write operation.
|
||||||
|
*/
|
||||||
|
public CompletableFuture<Integer> write(String str) {
|
||||||
|
return write(str.getBytes());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the given byte array and returns a CompletableFuture of
|
||||||
|
* the final file index after the completion of the corresponding
|
||||||
|
* write operation.
|
||||||
|
*/
|
||||||
|
public CompletableFuture<Integer> write(byte[] bytes) {
|
||||||
|
return write(wrap(bytes));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the given byte buffer and returns a CompletableFuture of
|
||||||
|
* the final file index after the completion of the corresponding
|
||||||
|
* write operation.
|
||||||
|
*/
|
||||||
|
public CompletableFuture<Integer> write(ByteBuffer bytes) {
|
||||||
|
/*
|
||||||
|
* Wee need to update currentPosition field to keep track.
|
||||||
|
* The currentPosition field is used on close() method, which chains
|
||||||
|
* a continuation to close the AsyncFileChannel.
|
||||||
|
*/
|
||||||
|
currentPosition = currentPosition.thenCompose(index -> {
|
||||||
|
CompletableFuture<Integer> size = write(asyncFile, bytes, index);
|
||||||
|
return size.thenApply(length -> length + index);
|
||||||
|
});
|
||||||
|
return currentPosition;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous close operation.
|
||||||
|
* Chains a continuation on CompletableFuture resulting from last write operation,
|
||||||
|
* which closes the AsyncFileChannel on completion.
|
||||||
|
*
|
||||||
|
* @throws IOException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
if (asyncFile != null) {
|
||||||
|
currentPosition.whenComplete((res, ex) ->
|
||||||
|
AsyncFiles.closeAfc(asyncFile)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
256
src/main/java/org/xbib/event/async/AsyncFiles.java
Normal file
256
src/main/java/org/xbib/event/async/AsyncFiles.java
Normal file
|
@ -0,0 +1,256 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Publisher;
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.io.ByteArrayOutputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
import java.nio.channels.AsynchronousFileChannel;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.nio.file.StandardOpenOption;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
import static java.nio.channels.AsynchronousFileChannel.open;
|
||||||
|
import static java.nio.charset.StandardCharsets.UTF_8;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous non-blocking read and write operations with a reactive based API.
|
||||||
|
* Read operations return a CompletableFuture with a single String or a Publisher
|
||||||
|
* of strings corresponding to lines.
|
||||||
|
* Write methods return a CompletableFuture with the final file index after the
|
||||||
|
* completion of corresponding write operation.
|
||||||
|
* These operations use an underlying AsynchronousFileChannel.
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class AsyncFiles {
|
||||||
|
|
||||||
|
private AsyncFiles() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static AsyncQuery<String> asyncQuery(Path file) {
|
||||||
|
return new AsyncFileQuery(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the given file from the beginning using an AsyncFileChannel
|
||||||
|
* with a ByteBuffer of {@link AbstractAsyncFileReaderLines#BUFFER_SIZE BUFFER_SIZE} capacity.
|
||||||
|
*/
|
||||||
|
public static Publisher<String> lines(String file) {
|
||||||
|
return lines(Paths.get(file));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the given file from the beginning using an AsyncFileChannel
|
||||||
|
* with a ByteBuffer of {@link AbstractAsyncFileReaderLines#BUFFER_SIZE BUFFER_SIZE} capacity.
|
||||||
|
*/
|
||||||
|
public static Publisher<String> lines(Path file) {
|
||||||
|
return lines(AbstractAsyncFileReaderLines.BUFFER_SIZE, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the given file from the beginning using
|
||||||
|
* an AsyncFileChannel with a ByteBuffer of
|
||||||
|
* the specified bufferSize capacity.
|
||||||
|
*/
|
||||||
|
public static Publisher<String> lines(int bufferSize, String file) {
|
||||||
|
return lines(bufferSize, Paths.get(file));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the given file from the beginning using
|
||||||
|
* an AsyncFileChannel with a ByteBuffer of
|
||||||
|
* the specified bufferSize capacity.
|
||||||
|
*/
|
||||||
|
public static Publisher<String> lines(int bufferSize, Path file) {
|
||||||
|
return lines(bufferSize, file, StandardOpenOption.READ);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the given file from the beginning using
|
||||||
|
* an AsyncFileChannel with a ByteBuffer of
|
||||||
|
* the specified bufferSize capacity.
|
||||||
|
*/
|
||||||
|
public static Publisher<String> lines(int bufferSize, Path file, StandardOpenOption... options) {
|
||||||
|
return sub -> {
|
||||||
|
AsyncFileReaderLines reader = null;
|
||||||
|
try {
|
||||||
|
AsynchronousFileChannel asyncFile = open(file, options);
|
||||||
|
reader = new AsyncFileReaderLines(sub, asyncFile, bufferSize);
|
||||||
|
} catch (IOException e) {
|
||||||
|
sub.onSubscribe(reader);
|
||||||
|
sub.onError(e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
sub.onSubscribe(reader);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the file from the beginning using an AsyncFileChannel
|
||||||
|
* with a ByteBuffer of {@link AbstractAsyncFileReaderLines#BUFFER_SIZE BUFFER_SIZE} capacity.
|
||||||
|
* It automatically closes the underlying AsyncFileChannel when read is complete.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<String> readAll(String file) {
|
||||||
|
return readAll(Paths.get(file));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A callback based version of readAll().
|
||||||
|
* Reads the file from the beginning using an AsyncFileChannel
|
||||||
|
* with a ByteBuffer of {@link AbstractAsyncFileReaderLines#BUFFER_SIZE BUFFER_SIZE} capacity.
|
||||||
|
* It automatically closes the underlying AsyncFileChannel when read is complete.
|
||||||
|
*/
|
||||||
|
public static void readAll(String file, BiConsumer<Throwable, String> callback) {
|
||||||
|
readAll(file, AbstractAsyncFileReaderLines.BUFFER_SIZE)
|
||||||
|
.whenComplete((data, err) -> {
|
||||||
|
if (err != null) callback.accept(err, null);
|
||||||
|
else callback.accept(null, data);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the file from the beginning using
|
||||||
|
* an AsyncFileChannel with a ByteBuffer of
|
||||||
|
* the specified bufferSize capacity.
|
||||||
|
* It automatically closes the underlying AsyncFileChannel
|
||||||
|
* when read is complete.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<String> readAll(String file, int bufferSize) {
|
||||||
|
return readAll(Paths.get(file), bufferSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the file from the beginning using an AsyncFileChannel
|
||||||
|
* with a ByteBuffer of {@link AbstractAsyncFileReaderLines#BUFFER_SIZE BUFFER_SIZE} capacity.
|
||||||
|
* It automatically closes the underlying AsyncFileChannel
|
||||||
|
* when read is complete.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<String> readAll(Path file) {
|
||||||
|
return readAll(file, AbstractAsyncFileReaderLines.BUFFER_SIZE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the file from the beginning using
|
||||||
|
* an AsyncFileChannel with a ByteBuffer of
|
||||||
|
* the specified bufferSize capacity.
|
||||||
|
* It automatically closes the underlying AsyncFileChannel
|
||||||
|
* when read is complete.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<String> readAll(Path file, int bufferSize) {
|
||||||
|
return readAllBytes(file, bufferSize)
|
||||||
|
.thenApply(bytes -> new String(bytes, UTF_8));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads all bytes from the beginning of the file using an AsyncFileChannel
|
||||||
|
* with a ByteBuffer of {@link AbstractAsyncFileReaderLines#BUFFER_SIZE BUFFER_SIZE} capacity.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<byte[]> readAllBytes(Path file) {
|
||||||
|
return readAllBytes(file, AbstractAsyncFileReaderLines.BUFFER_SIZE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads all bytes from the beginning of the file
|
||||||
|
* using an AsyncFileChannel with a ByteBuffer of
|
||||||
|
* the specified bufferSize capacity.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<byte[]> readAllBytes(Path file, int bufferSize, StandardOpenOption... options) {
|
||||||
|
try {
|
||||||
|
ByteBuffer buffer = ByteBuffer.allocate(bufferSize);
|
||||||
|
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||||
|
AsynchronousFileChannel asyncFile = open(file, options);
|
||||||
|
CompletableFuture<byte[]> bytes = AsyncFileReaderBytes
|
||||||
|
.readAllBytes(asyncFile, buffer, 0, out)
|
||||||
|
.thenApply(position -> out.toByteArray());
|
||||||
|
/*
|
||||||
|
* Deliberately chained in this way.
|
||||||
|
* Code smell: If closeAfc throws an Exception it will be lost!
|
||||||
|
*/
|
||||||
|
bytes.whenCompleteAsync((pos, ex) -> closeAfc(asyncFile));
|
||||||
|
return bytes;
|
||||||
|
} catch (IOException e) {
|
||||||
|
return CompletableFuture.failedFuture(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes bytes to a file.
|
||||||
|
* The options parameter specifies how the file is created or opened.
|
||||||
|
* All bytes in the byte array are written to the file.
|
||||||
|
* The method ensures that the file is closed when all bytes have been
|
||||||
|
* written (or an I/O error or other runtime exception is thrown).
|
||||||
|
* Returns a CompletableFuture with the final file index
|
||||||
|
* after the completion of the corresponding write operation.
|
||||||
|
* If an I/O error occurs then it may complete the resulting CompletableFuture
|
||||||
|
* exceptionally.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<Integer> writeBytes(Path path, byte[] bytes) {
|
||||||
|
return writeBytes(path, bytes, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes bytes to a file.
|
||||||
|
* The options parameter specifies how the file is created or opened.
|
||||||
|
* All bytes in the byte array are written to the file.
|
||||||
|
* The method ensures that the underlying {@code AsynchronousFileChannel}
|
||||||
|
* is closed when all bytes have been written (or an I/O error or any other
|
||||||
|
* runtime exception is thrown).
|
||||||
|
* Returns a {@code CompletableFuture} with the final file index
|
||||||
|
* after the completion of the corresponding write operation.
|
||||||
|
* If an I/O error occurs then it may complete the resulting CompletableFuture
|
||||||
|
* exceptionally.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<Integer> writeBytes(Path path, byte[] bytes, StandardOpenOption... options) {
|
||||||
|
try (AsyncFileWriter writer = new AsyncFileWriter(path, options)) {
|
||||||
|
writer.write(bytes);
|
||||||
|
// The call to writer.close() is asynchronous and will chain
|
||||||
|
// a continuation to close the AsyncFileChannel only after completion.
|
||||||
|
return writer.getPosition();
|
||||||
|
} catch (IOException e) {
|
||||||
|
return CompletableFuture.failedFuture(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write lines of text to a file. Each line is a char sequence and
|
||||||
|
* is written to the file in sequence with each line terminated by
|
||||||
|
* the platform's line separator, as defined by the system property
|
||||||
|
* line.separator.
|
||||||
|
* Returns a CompletableFuture with the final file index
|
||||||
|
* after the completion of the corresponding write operation.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<Integer> write(Path path, Iterable<? extends CharSequence> lines) {
|
||||||
|
return write(path, lines, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write lines of text to a file. Each line is a char sequence and
|
||||||
|
* is written to the file in sequence with each line terminated by
|
||||||
|
* the platform's line separator, as defined by the system property
|
||||||
|
* line.separator.
|
||||||
|
* Returns a {@code CompletableFuture} with the final file index
|
||||||
|
* after the completion of the corresponding write operation.
|
||||||
|
*/
|
||||||
|
public static CompletableFuture<Integer> write(Path path, Iterable<? extends CharSequence> lines, StandardOpenOption... options) {
|
||||||
|
try (AsyncFileWriter writer = new AsyncFileWriter(path, options)) {
|
||||||
|
lines.forEach(writer::writeLine);
|
||||||
|
// The call to writer.close() is asynchronous and will chain
|
||||||
|
// a continuation to close the AsyncFileChannel only after completion.
|
||||||
|
return writer.getPosition();
|
||||||
|
} catch (Exception e) {
|
||||||
|
return CompletableFuture.failedFuture(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void closeAfc(AsynchronousFileChannel asyncFile) {
|
||||||
|
try {
|
||||||
|
asyncFile.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
28
src/main/java/org/xbib/event/async/EmptySubscriber.java
Normal file
28
src/main/java/org/xbib/event/async/EmptySubscriber.java
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Subscription;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* For subscribers pipeline composition.
|
||||||
|
* This should the leaf of the pipeline.
|
||||||
|
* This avoids to check if next is null.
|
||||||
|
*
|
||||||
|
* @param <T>
|
||||||
|
*/
|
||||||
|
public class EmptySubscriber<T> implements org.reactivestreams.Subscriber<T> {
|
||||||
|
@Override
|
||||||
|
public void onSubscribe(Subscription subscription) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onNext(T t) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onError(Throwable throwable) {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onComplete() {
|
||||||
|
}
|
||||||
|
}
|
47
src/main/java/org/xbib/event/async/SubscriberBuilder.java
Normal file
47
src/main/java/org/xbib/event/async/SubscriberBuilder.java
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import org.reactivestreams.Subscriber;
|
||||||
|
import org.reactivestreams.Subscription;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public class SubscriberBuilder<T> implements Subscriber<T> {
|
||||||
|
|
||||||
|
private final Subscriber<T> sub;
|
||||||
|
|
||||||
|
public SubscriberBuilder(Subscriber<T> sub) {
|
||||||
|
this.sub = sub;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SubscriberBuilder<T> doOnSubscribe(Consumer<Subscription> cons) {
|
||||||
|
return new AddOnSubscribe<>(cons, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SubscriberBuilder<T> doOnError(Consumer<Throwable> cons) {
|
||||||
|
return new AddOnError<>(cons, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SubscriberBuilder<T> doOnComplete(Runnable action) {
|
||||||
|
return new AddOnComplete<>(action, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onSubscribe(Subscription subscription) {
|
||||||
|
sub.onSubscribe(subscription);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onNext(T item) {
|
||||||
|
sub.onNext(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onError(Throwable throwable) {
|
||||||
|
sub.onError(throwable);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onComplete() {
|
||||||
|
sub.onComplete();
|
||||||
|
}
|
||||||
|
}
|
13
src/main/java/org/xbib/event/async/Subscribers.java
Normal file
13
src/main/java/org/xbib/event/async/Subscribers.java
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
package org.xbib.event.async;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public class Subscribers {
|
||||||
|
|
||||||
|
private Subscribers() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T> SubscriberBuilder<T> doOnNext(Consumer<T> cons) {
|
||||||
|
return new AddOnNext<>(cons);
|
||||||
|
}
|
||||||
|
}
|
12
src/main/java/org/xbib/event/clock/ClockEvent.java
Normal file
12
src/main/java/org/xbib/event/clock/ClockEvent.java
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
package org.xbib.event.clock;
|
||||||
|
|
||||||
|
import org.xbib.event.Event;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
|
public interface ClockEvent extends Event {
|
||||||
|
|
||||||
|
void setInstant(Instant instant);
|
||||||
|
|
||||||
|
Instant getInstant();
|
||||||
|
}
|
18
src/main/java/org/xbib/event/clock/ClockEventConsumer.java
Normal file
18
src/main/java/org/xbib/event/clock/ClockEventConsumer.java
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
package org.xbib.event.clock;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.AllowConcurrentEvents;
|
||||||
|
import com.google.common.eventbus.Subscribe;
|
||||||
|
import org.xbib.event.EventConsumer;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
public class ClockEventConsumer implements EventConsumer {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(ClockEventConsumer.class.getName());
|
||||||
|
|
||||||
|
@Subscribe
|
||||||
|
@AllowConcurrentEvents
|
||||||
|
void onEvent(ClockEvent event) {
|
||||||
|
logger.info("received demo clock event, instant = " + event.getInstant());
|
||||||
|
}
|
||||||
|
}
|
61
src/main/java/org/xbib/event/clock/ClockEventManager.java
Normal file
61
src/main/java/org/xbib/event/clock/ClockEventManager.java
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
package org.xbib.event.clock;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
import org.xbib.settings.Settings;
|
||||||
|
import org.xbib.time.schedule.CronExpression;
|
||||||
|
import org.xbib.time.schedule.CronSchedule;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.Executors;
|
||||||
|
import java.util.concurrent.ScheduledExecutorService;
|
||||||
|
import java.util.concurrent.ThreadFactory;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
public class ClockEventManager implements Closeable {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(ClockEventManager.class.getName());
|
||||||
|
|
||||||
|
private final CronSchedule<Integer> cronSchedule;
|
||||||
|
|
||||||
|
public ClockEventManager(Settings settings,
|
||||||
|
EventBus eventBus,
|
||||||
|
ClassLoader classLoader) {
|
||||||
|
ThreadFactory threadFactory = new ThreadFactory() {
|
||||||
|
int n = 1;
|
||||||
|
@Override
|
||||||
|
public Thread newThread(Runnable r) {
|
||||||
|
return new Thread(r, "malva-clockevent-" + (n++));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
ScheduledExecutorService executorService =
|
||||||
|
Executors.newScheduledThreadPool(settings.getAsInt("pool.size", 2), threadFactory);
|
||||||
|
this.cronSchedule = new CronSchedule<>(executorService);
|
||||||
|
for (Map.Entry<String,Settings> cronjobs : settings.getGroups("clock").entrySet()) {
|
||||||
|
Settings entrySettings = cronjobs.getValue();
|
||||||
|
if (entrySettings.getAsBoolean("enabled", true)) {
|
||||||
|
String entry = entrySettings.get("entry");
|
||||||
|
if (entry != null) {
|
||||||
|
String className = entrySettings.get("class", ClockEvent.class.getName());
|
||||||
|
try {
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Class<? extends ClockEvent> eventClass = (Class<? extends ClockEvent>) classLoader.loadClass(className);
|
||||||
|
cronSchedule.add(className, CronExpression.parse(entry), new ClockEventService(eventBus, eventClass));
|
||||||
|
logger.info("cron job " + cronjobs.getKey() +
|
||||||
|
" scheduled on " + entry +
|
||||||
|
", event class " + className);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.warning("unable to schedule cron job " + cronjobs.getKey() + ", reason " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cronSchedule.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
cronSchedule.close();
|
||||||
|
}
|
||||||
|
}
|
37
src/main/java/org/xbib/event/clock/ClockEventService.java
Normal file
37
src/main/java/org/xbib/event/clock/ClockEventService.java
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
package org.xbib.event.clock;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
public class ClockEventService implements Callable<Integer> {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(ClockEventService.class.getName());
|
||||||
|
|
||||||
|
private final EventBus eventBus;
|
||||||
|
|
||||||
|
private final Class<? extends ClockEvent> eventClass;
|
||||||
|
|
||||||
|
public ClockEventService(EventBus eventBus,
|
||||||
|
Class<? extends ClockEvent> eventClass) {
|
||||||
|
this.eventBus = eventBus;
|
||||||
|
this.eventClass = eventClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Integer call() throws Exception {
|
||||||
|
try {
|
||||||
|
logger.log(Level.FINE, "posting clock event " + eventClass.getName());
|
||||||
|
ClockEvent clockEvent = eventClass.getDeclaredConstructor().newInstance();
|
||||||
|
clockEvent.setInstant(Instant.now());
|
||||||
|
eventBus.post(clockEvent);
|
||||||
|
return 0;
|
||||||
|
} catch (Throwable t) {
|
||||||
|
logger.log(Level.WARNING, t.getMessage(), t);
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,80 @@
|
||||||
|
package org.xbib.event.persistence;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@SuppressWarnings("serial")
|
||||||
|
public class AbstractPersistenceStore extends LinkedHashMap<String, Object>
|
||||||
|
implements PersistenceStore<String, Object> {
|
||||||
|
|
||||||
|
private final Durability durability;
|
||||||
|
|
||||||
|
private final Codec<Map<String, Object>> codec;
|
||||||
|
|
||||||
|
private final String storeName;
|
||||||
|
|
||||||
|
public AbstractPersistenceStore(Codec<Map<String, Object>> codec,
|
||||||
|
Durability durability,
|
||||||
|
String storeName) {
|
||||||
|
super();
|
||||||
|
this.codec = codec;
|
||||||
|
this.durability = durability;
|
||||||
|
this.storeName = storeName;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Durability getDurability() {
|
||||||
|
return durability;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Codec<Map<String, Object>> getCodec() {
|
||||||
|
return codec;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void load() throws IOException {
|
||||||
|
clear();
|
||||||
|
Map<String, Object> map = codec.read(storeName);
|
||||||
|
if (map != null) {
|
||||||
|
putAll(map);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void begin() {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void commit() throws IOException {
|
||||||
|
codec.write(storeName, this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void rollback() {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
@Override
|
||||||
|
public void insert(String key, Object value) throws IOException {
|
||||||
|
putIfAbsent(key, new ArrayList<>());
|
||||||
|
List<Object> list = (List<Object>) get(key);
|
||||||
|
list.add(value);
|
||||||
|
put(key, list);
|
||||||
|
commit();
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
void remove(String key, Object value) throws IOException {
|
||||||
|
putIfAbsent(key, new ArrayList<>());
|
||||||
|
List<Object> list = (List<Object>) get(key);
|
||||||
|
list.remove(value);
|
||||||
|
put(key, list);
|
||||||
|
commit();
|
||||||
|
}
|
||||||
|
}
|
10
src/main/java/org/xbib/event/persistence/Codec.java
Normal file
10
src/main/java/org/xbib/event/persistence/Codec.java
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
package org.xbib.event.persistence;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public interface Codec<D> {
|
||||||
|
|
||||||
|
D read(String key) throws IOException;
|
||||||
|
|
||||||
|
void write(String key, D data) throws IOException;
|
||||||
|
}
|
6
src/main/java/org/xbib/event/persistence/Durability.java
Normal file
6
src/main/java/org/xbib/event/persistence/Durability.java
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
package org.xbib.event.persistence;
|
||||||
|
|
||||||
|
public enum Durability {
|
||||||
|
|
||||||
|
VOLATILE, EXPLICIT, CONTAINER_LIFETIME, APPLICATION_LIFETIME, PERMANENT
|
||||||
|
}
|
54
src/main/java/org/xbib/event/persistence/FileJsonCodec.java
Normal file
54
src/main/java/org/xbib/event/persistence/FileJsonCodec.java
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
package org.xbib.event.persistence;
|
||||||
|
|
||||||
|
import org.xbib.datastructures.json.tiny.Json;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Writer;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
|
|
||||||
|
public class FileJsonCodec implements Codec<Map<String, Object>> {
|
||||||
|
|
||||||
|
private final URI uri;
|
||||||
|
|
||||||
|
private final ReentrantReadWriteLock readWriteLock;
|
||||||
|
|
||||||
|
public FileJsonCodec(URI uri) {
|
||||||
|
this.uri = uri;
|
||||||
|
this.readWriteLock = new ReentrantReadWriteLock();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Object> read(String key) throws IOException {
|
||||||
|
try {
|
||||||
|
readWriteLock.readLock().lock();
|
||||||
|
Path p = openOrCreate(key);
|
||||||
|
return Files.exists(p) ? Json.toMap(Files.newBufferedReader(p)) : null;
|
||||||
|
} finally {
|
||||||
|
readWriteLock.readLock().unlock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void write(String key, Map<String, Object> data) throws IOException {
|
||||||
|
try {
|
||||||
|
readWriteLock.writeLock().lock();
|
||||||
|
Path p = openOrCreate(key);
|
||||||
|
try (Writer writer = Files.newBufferedWriter(p)) {
|
||||||
|
writer.write(Json.toString(data));
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
readWriteLock.writeLock().unlock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Path openOrCreate(String key) throws IOException {
|
||||||
|
Path path = Paths.get(uri);
|
||||||
|
Files.createDirectories(path);
|
||||||
|
return path.resolve(key);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
package org.xbib.event.persistence;
|
||||||
|
|
||||||
|
import org.xbib.settings.Settings;
|
||||||
|
|
||||||
|
import java.net.URI;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
@SuppressWarnings("serial")
|
||||||
|
public class FilePersistenceStore extends AbstractPersistenceStore {
|
||||||
|
|
||||||
|
public FilePersistenceStore(Settings settings, String name) {
|
||||||
|
this(URI.create(settings.get("store.uri", "file:///var/tmp/malva/store")),
|
||||||
|
Durability.valueOf(settings.get("store.durability", "PERMANENT")),
|
||||||
|
settings.get("store.name", name));
|
||||||
|
}
|
||||||
|
|
||||||
|
public FilePersistenceStore(URI uri, Durability durability, String storeName) {
|
||||||
|
this(new FileJsonCodec(uri), durability, storeName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public FilePersistenceStore(Codec<Map<String, Object>> codec, Durability durability, String storeName) {
|
||||||
|
super(codec, durability, storeName);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,21 @@
|
||||||
|
package org.xbib.event.persistence;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public interface PersistenceStore<K, V> extends Map<K, V> {
|
||||||
|
|
||||||
|
Durability getDurability();
|
||||||
|
|
||||||
|
Codec<Map<K, V>> getCodec();
|
||||||
|
|
||||||
|
void insert(K k, V v) throws IOException;
|
||||||
|
|
||||||
|
void load() throws IOException;
|
||||||
|
|
||||||
|
void begin();
|
||||||
|
|
||||||
|
void commit() throws IOException;
|
||||||
|
|
||||||
|
void rollback();
|
||||||
|
}
|
38
src/main/java/org/xbib/event/queue/QueueEvent.java
Normal file
38
src/main/java/org/xbib/event/queue/QueueEvent.java
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package org.xbib.event.queue;
|
||||||
|
|
||||||
|
import org.xbib.event.Event;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardCopyOption;
|
||||||
|
import java.nio.file.attribute.FileTime;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
public class QueueEvent implements Event {
|
||||||
|
|
||||||
|
private String key;
|
||||||
|
|
||||||
|
private Map<String, Object> map;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setKey(String key) {
|
||||||
|
this.key = key;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getKey() {
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void setMap(Map<String, Object> map) {
|
||||||
|
this.map = map;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<String, Object> getMap() {
|
||||||
|
return map;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,61 @@
|
||||||
|
package org.xbib.event.queue.path.simple;
|
||||||
|
|
||||||
|
import org.xbib.event.queue.QueueEvent;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardCopyOption;
|
||||||
|
import java.nio.file.attribute.FileTime;
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
|
public class PathQueueEvent extends QueueEvent {
|
||||||
|
|
||||||
|
static final String INCOMING = "incoming";
|
||||||
|
|
||||||
|
static final String SUCCESS = "success";
|
||||||
|
|
||||||
|
static final String FAIL = "fail";
|
||||||
|
|
||||||
|
private Path path;
|
||||||
|
|
||||||
|
private Path file;
|
||||||
|
|
||||||
|
private String suffix;
|
||||||
|
|
||||||
|
public void setFile(Path file) {
|
||||||
|
this.file = file;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Path getFile() {
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPath(Path path) {
|
||||||
|
this.path = path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Path getPath() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSuffix(String suffix) {
|
||||||
|
this.suffix = suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSuffix() {
|
||||||
|
return suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void success() throws IOException {
|
||||||
|
Files.setLastModifiedTime(file, FileTime.from(Instant.now()));
|
||||||
|
Files.move(file, path.resolve(SUCCESS).resolve(file.getFileName()).toAbsolutePath(),
|
||||||
|
StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void fail() throws IOException {
|
||||||
|
Files.setLastModifiedTime(file, FileTime.from(Instant.now()));
|
||||||
|
Files.move(file, path.resolve(FAIL).resolve(file.getFileName()).toAbsolutePath(),
|
||||||
|
StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,151 @@
|
||||||
|
package org.xbib.event.queue.path.simple;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
import org.xbib.datastructures.api.TimeValue;
|
||||||
|
import org.xbib.datastructures.json.tiny.Json;
|
||||||
|
import org.xbib.settings.Settings;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Writer;
|
||||||
|
import java.nio.file.FileVisitResult;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.nio.file.SimpleFileVisitor;
|
||||||
|
import java.nio.file.attribute.BasicFileAttributes;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
public class PathQueueEventManager {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(PathQueueEventManager.class.getName());
|
||||||
|
|
||||||
|
private final Settings settings;
|
||||||
|
|
||||||
|
private final Path path;
|
||||||
|
|
||||||
|
private final Map<Path, PathQueueEventService> pathEventServiceMap;
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public PathQueueEventManager(Settings settings,
|
||||||
|
EventBus eventBus,
|
||||||
|
ClassLoader classLoader) {
|
||||||
|
this.settings = settings;
|
||||||
|
this.pathEventServiceMap = new LinkedHashMap<>();
|
||||||
|
this.path = Paths.get(settings.get("path", "/var/tmp/default/queues"));
|
||||||
|
for (Map.Entry<String, Settings> entry : settings.getGroups("queues").entrySet()) {
|
||||||
|
String name = entry.getKey();
|
||||||
|
Settings definition = entry.getValue();
|
||||||
|
String eventClassName = definition.get("class");
|
||||||
|
if (eventClassName == null) {
|
||||||
|
throw new IllegalArgumentException("no path event class name given for path event manager at " + path);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
Class<? extends PathQueueEvent> eventClass = (Class<? extends PathQueueEvent>) classLoader.loadClass(eventClassName);
|
||||||
|
Path p = path.resolve(name);
|
||||||
|
if (!Files.exists(p)) {
|
||||||
|
logger.log(Level.INFO, "creating queue " + name + " at " + p);
|
||||||
|
Files.createDirectories(p);
|
||||||
|
}
|
||||||
|
for (String string : List.of(PathQueueEvent.INCOMING, PathQueueEvent.SUCCESS, PathQueueEvent.FAIL)) {
|
||||||
|
Path dir = p.resolve(string);
|
||||||
|
if (!Files.exists(dir)) {
|
||||||
|
logger.log(Level.INFO, "creating queue " + name + " dir = " + dir);
|
||||||
|
Files.createDirectories(dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!pathEventServiceMap.containsKey(p)) {
|
||||||
|
int maxFileSize = settings.getAsInt("maxfilesize", 10 * 1024 * 1024); // max. 10 MB
|
||||||
|
logger.log(Level.INFO, "creating path event service on " + p + " with class " + eventClassName + " max file size = " + maxFileSize);
|
||||||
|
PathQueueEventService pathEventService = new PathQueueEventService(eventBus, p, maxFileSize, eventClass);
|
||||||
|
pathEventServiceMap.put(p, pathEventService);
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.log(Level.WARNING, "unable to create path event service " + eventClassName + " reason " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.log(Level.INFO, pathEventServiceMap.size() + " path event services initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean put(String queue, String key, Map<String,Object> map) throws IOException {
|
||||||
|
String keyFileName = key + ".json";
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
if (Files.exists(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName)) ||
|
||||||
|
Files.exists(queuePath.resolve(PathQueueEvent.SUCCESS).resolve(keyFileName))) {
|
||||||
|
logger.log(Level.WARNING, "key " + key + " already exists");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
Path file = queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName);
|
||||||
|
try (Writer writer = Files.newBufferedWriter(file)) {
|
||||||
|
writer.write(Json.toString(map));
|
||||||
|
}
|
||||||
|
pathEventServiceMap.get(queuePath).postEvent(key, file);
|
||||||
|
purge(queue);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean put(String queue, String key, String value) throws IOException {
|
||||||
|
String keyFileName = key + ".txt";
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
if (Files.exists(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName)) ||
|
||||||
|
Files.exists(queuePath.resolve(PathQueueEvent.SUCCESS).resolve(keyFileName))) {
|
||||||
|
logger.log(Level.WARNING, "key " + key + " already exists");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
Path file = queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName);
|
||||||
|
try (Writer writer = Files.newBufferedWriter(file)) {
|
||||||
|
writer.write(value);
|
||||||
|
}
|
||||||
|
pathEventServiceMap.get(queuePath).postEvent(key, file);
|
||||||
|
purge(queue);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean exists(String queue, String key) {
|
||||||
|
String keyFileName = key + ".json";
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
return Files.exists(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName)) ||
|
||||||
|
Files.exists(queuePath.resolve(PathQueueEvent.SUCCESS).resolve(keyFileName));
|
||||||
|
}
|
||||||
|
|
||||||
|
public long sizeOfIncoming(String queue) throws IOException {
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
Path incomingPath = queuePath.resolve(PathQueueEvent.INCOMING);
|
||||||
|
return sizeOf(incomingPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long sizeOfSuccess(String queue) throws IOException {
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
Path successPath = queuePath.resolve(PathQueueEvent.SUCCESS);
|
||||||
|
return sizeOf(successPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long sizeOf(Path path) throws IOException {
|
||||||
|
Stream<Path> stream = Files.find(path, 1, (p, basicFileAttributes) -> Files.isRegularFile(p));
|
||||||
|
return stream.count();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void purge(String queue) throws IOException {
|
||||||
|
List<String> purgeQueues = Arrays.asList(settings.getAsArray("purge"));
|
||||||
|
if (purgeQueues.contains(queue)) {
|
||||||
|
TimeValue lifetime = settings.getAsTime("lifetime", TimeValue.timeValueHours(72));
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
Files.walkFileTree(queuePath, new SimpleFileVisitor<Path>() {
|
||||||
|
@Override
|
||||||
|
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||||
|
if (Instant.now().minusMillis(attrs.lastModifiedTime().toMillis()).toEpochMilli() > lifetime.millis()) {
|
||||||
|
logger.log(Level.WARNING, "lifetime " + lifetime + " exceeded, deleting " + file);
|
||||||
|
Files.delete(file);
|
||||||
|
}
|
||||||
|
return FileVisitResult.CONTINUE;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,102 @@
|
||||||
|
package org.xbib.event.queue.path.simple;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
import org.xbib.datastructures.json.tiny.Json;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
|
import java.lang.reflect.InvocationTargetException;
|
||||||
|
import java.nio.file.DirectoryStream;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static org.xbib.event.queue.path.simple.PathQueueEvent.INCOMING;
|
||||||
|
|
||||||
|
public class PathQueueEventService {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(PathQueueEventService.class.getName());
|
||||||
|
|
||||||
|
private final EventBus eventBus;
|
||||||
|
|
||||||
|
private final Path path;
|
||||||
|
|
||||||
|
private final int maxFileSize;
|
||||||
|
|
||||||
|
private final Class<? extends PathQueueEvent> pathEventClass;
|
||||||
|
|
||||||
|
private int eventCount;
|
||||||
|
|
||||||
|
public PathQueueEventService(EventBus eventBus,
|
||||||
|
Path path,
|
||||||
|
int maxFileSize,
|
||||||
|
Class<? extends PathQueueEvent> pathEventClass) throws IOException {
|
||||||
|
this.eventBus = eventBus;
|
||||||
|
this.path = path;
|
||||||
|
this.maxFileSize = maxFileSize;
|
||||||
|
this.pathEventClass = pathEventClass;
|
||||||
|
drainIncoming();
|
||||||
|
logger.log(Level.INFO, "at startup: watch service created for incoming files at " + path +
|
||||||
|
"max file size = " + maxFileSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void drainIncoming() throws IOException {
|
||||||
|
try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(path.resolve(INCOMING))) {
|
||||||
|
directoryStream.forEach(path -> {
|
||||||
|
if (Files.isRegularFile(path)) {
|
||||||
|
String key = path.getFileName().toString();
|
||||||
|
logger.log(Level.INFO, "while draining found key=" + key + " path=" + path);
|
||||||
|
try {
|
||||||
|
postEvent(key, path);
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.log(Level.SEVERE, e.getMessage(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void postEvent(String key, Path file) throws IOException {
|
||||||
|
String base = getBase(key);
|
||||||
|
String suffix = getSuffix(key);
|
||||||
|
PathQueueEvent event;
|
||||||
|
try {
|
||||||
|
event = pathEventClass.getConstructor().newInstance();
|
||||||
|
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
|
||||||
|
logger.log(Level.SEVERE, e.getMessage(), e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
event.setKey(base);
|
||||||
|
event.setFile(file);
|
||||||
|
event.setSuffix(suffix);
|
||||||
|
event.setMap(new LinkedHashMap<>());
|
||||||
|
event.setPath(path); // remember directory for fail() and success()
|
||||||
|
long fileSize = Files.size(file);
|
||||||
|
if (fileSize < maxFileSize && Objects.equals(suffix, "json")) {
|
||||||
|
// we read only small JSON files
|
||||||
|
try (Reader reader = Files.newBufferedReader(file)) {
|
||||||
|
event.setMap(Json.toMap(reader));
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.log(Level.WARNING, e.getMessage(), e);
|
||||||
|
}
|
||||||
|
logger.log(Level.INFO, "posting new event =" + event.getClass() + " " + event);
|
||||||
|
eventBus.post(event);
|
||||||
|
eventCount++;
|
||||||
|
} else {
|
||||||
|
logger.log(Level.SEVERE, "skipping post event because incoming file is too large, max file size = " + maxFileSize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getBase(String name) {
|
||||||
|
int pos = name.lastIndexOf('.');
|
||||||
|
return pos >= 0 ? name.substring(0, pos) : name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getSuffix(String name) {
|
||||||
|
int pos = name.lastIndexOf('.');
|
||||||
|
return pos >= 0 ? name.substring(pos + 1) : null;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,61 @@
|
||||||
|
package org.xbib.event.queue.path.watch;
|
||||||
|
|
||||||
|
import org.xbib.event.queue.QueueEvent;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardCopyOption;
|
||||||
|
import java.nio.file.attribute.FileTime;
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
|
public class PathQueueEvent extends QueueEvent {
|
||||||
|
|
||||||
|
static final String INCOMING = "incoming";
|
||||||
|
|
||||||
|
static final String SUCCESS = "success";
|
||||||
|
|
||||||
|
static final String FAIL = "fail";
|
||||||
|
|
||||||
|
private Path path;
|
||||||
|
|
||||||
|
private Path file;
|
||||||
|
|
||||||
|
private String suffix;
|
||||||
|
|
||||||
|
public void setFile(Path file) {
|
||||||
|
this.file = file;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Path getFile() {
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setPath(Path path) {
|
||||||
|
this.path = path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Path getPath() {
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSuffix(String suffix) {
|
||||||
|
this.suffix = suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSuffix() {
|
||||||
|
return suffix;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void success() throws IOException {
|
||||||
|
Files.setLastModifiedTime(file, FileTime.from(Instant.now()));
|
||||||
|
Files.move(file, path.resolve(SUCCESS).resolve(file.getFileName()).toAbsolutePath(),
|
||||||
|
StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void fail() throws IOException {
|
||||||
|
Files.setLastModifiedTime(file, FileTime.from(Instant.now()));
|
||||||
|
Files.move(file, path.resolve(FAIL).resolve(file.getFileName()).toAbsolutePath(),
|
||||||
|
StandardCopyOption.REPLACE_EXISTING);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,168 @@
|
||||||
|
package org.xbib.event.queue.path.watch;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
import org.xbib.datastructures.api.TimeValue;
|
||||||
|
import org.xbib.datastructures.json.tiny.Json;
|
||||||
|
import org.xbib.settings.Settings;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Writer;
|
||||||
|
import java.nio.file.FileVisitResult;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.nio.file.SimpleFileVisitor;
|
||||||
|
import java.nio.file.attribute.BasicFileAttributes;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
public class PathQueueEventManager implements Closeable {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(PathQueueEventManager.class.getName());
|
||||||
|
|
||||||
|
private final Settings settings;
|
||||||
|
|
||||||
|
private final Path path;
|
||||||
|
|
||||||
|
private final List<Future<?>> futures;
|
||||||
|
|
||||||
|
private final Map<Path, PathQueueEventService> pathEventServiceMap;
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public PathQueueEventManager(Settings settings,
|
||||||
|
EventBus eventBus,
|
||||||
|
ExecutorService executorService,
|
||||||
|
ClassLoader classLoader) {
|
||||||
|
this.settings = settings;
|
||||||
|
this.futures = new ArrayList<>();
|
||||||
|
this.pathEventServiceMap = new LinkedHashMap<>();
|
||||||
|
this.path = Paths.get(settings.get("path", "/var/tmp/default/queues"));
|
||||||
|
for (Map.Entry<String, Settings> entry : settings.getGroups("queues").entrySet()) {
|
||||||
|
String name = entry.getKey();
|
||||||
|
Settings definition = entry.getValue();
|
||||||
|
String eventClassName = definition.get("class");
|
||||||
|
if (eventClassName == null) {
|
||||||
|
throw new IllegalArgumentException("no path event class name given for path event manager at " + path);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
Class<? extends PathQueueEvent> eventClass = (Class<? extends PathQueueEvent>) classLoader.loadClass(eventClassName);
|
||||||
|
Path p = path.resolve(name);
|
||||||
|
if (!Files.exists(p)) {
|
||||||
|
logger.log(Level.INFO, "creating queue " + name + " at " + p);
|
||||||
|
Files.createDirectories(p);
|
||||||
|
}
|
||||||
|
for (String string : List.of(PathQueueEvent.INCOMING, PathQueueEvent.SUCCESS, PathQueueEvent.FAIL)) {
|
||||||
|
Path dir = p.resolve(string);
|
||||||
|
if (!Files.exists(dir)) {
|
||||||
|
logger.log(Level.INFO, "creating queue " + name + " dir = " + dir);
|
||||||
|
Files.createDirectories(dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!pathEventServiceMap.containsKey(p)) {
|
||||||
|
int maxFileSize = settings.getAsInt("maxfilesize", 10 * 1024 * 1024); // max. 10 MB
|
||||||
|
logger.log(Level.INFO, "creating path event service on " + p + " with class " + eventClassName + " max file size = " + maxFileSize);
|
||||||
|
PathQueueEventService pathEventService = new PathQueueEventService(eventBus, p, maxFileSize, eventClass);
|
||||||
|
pathEventServiceMap.put(p, pathEventService);
|
||||||
|
futures.add(executorService.submit(pathEventService));
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.log(Level.WARNING, "unable to create path event service " + eventClassName + " reason " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger.log(Level.INFO, pathEventServiceMap.size() + " path event services initialized");
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean put(String queue, String key, Map<String,Object> map) throws IOException {
|
||||||
|
String keyFileName = key + ".json";
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
if (Files.exists(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName)) ||
|
||||||
|
Files.exists(queuePath.resolve(PathQueueEvent.SUCCESS).resolve(keyFileName))) {
|
||||||
|
logger.log(Level.WARNING, "key " + key + " already exists");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
try (Writer writer = Files.newBufferedWriter(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName))) {
|
||||||
|
writer.write(Json.toString(map));
|
||||||
|
}
|
||||||
|
purge(queue);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean put(String queue, String key, String value) throws IOException {
|
||||||
|
String keyFileName = key + ".txt";
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
if (Files.exists(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName)) ||
|
||||||
|
Files.exists(queuePath.resolve(PathQueueEvent.SUCCESS).resolve(keyFileName))) {
|
||||||
|
logger.log(Level.WARNING, "key " + key + " already exists");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
try (Writer writer = Files.newBufferedWriter(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName))) {
|
||||||
|
writer.write(value);
|
||||||
|
}
|
||||||
|
purge(queue);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean exists(String queue, String key) {
|
||||||
|
String keyFileName = key + ".json";
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
return Files.exists(queuePath.resolve(PathQueueEvent.INCOMING).resolve(keyFileName)) ||
|
||||||
|
Files.exists(queuePath.resolve(PathQueueEvent.SUCCESS).resolve(keyFileName));
|
||||||
|
}
|
||||||
|
|
||||||
|
public long sizeOfIncoming(String queue) throws IOException {
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
Path incomingPath = queuePath.resolve(PathQueueEvent.INCOMING);
|
||||||
|
return sizeOf(incomingPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public long sizeOfSuccess(String queue) throws IOException {
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
Path successPath = queuePath.resolve(PathQueueEvent.SUCCESS);
|
||||||
|
return sizeOf(successPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long sizeOf(Path path) throws IOException {
|
||||||
|
Stream<Path> stream = Files.find(path, 1, (p, basicFileAttributes) -> Files.isRegularFile(p));
|
||||||
|
return stream.count();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void purge(String queue) throws IOException {
|
||||||
|
List<String> purgeQueues = Arrays.asList(settings.getAsArray("purge"));
|
||||||
|
if (purgeQueues.contains(queue)) {
|
||||||
|
TimeValue lifetime = settings.getAsTime("lifetime", TimeValue.timeValueHours(72));
|
||||||
|
Path queuePath = path.resolve(queue);
|
||||||
|
Files.walkFileTree(queuePath, new SimpleFileVisitor<Path>() {
|
||||||
|
@Override
|
||||||
|
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||||
|
if (Instant.now().minusMillis(attrs.lastModifiedTime().toMillis()).toEpochMilli() > lifetime.millis()) {
|
||||||
|
logger.log(Level.WARNING, "lifetime " + lifetime + " exceeded, deleting " + file);
|
||||||
|
Files.delete(file);
|
||||||
|
}
|
||||||
|
return FileVisitResult.CONTINUE;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
logger.log(Level.INFO, "closing all path event services");
|
||||||
|
for (PathQueueEventService service : pathEventServiceMap.values()) {
|
||||||
|
logger.log(Level.INFO, "closing path event service " + service);
|
||||||
|
service.close();
|
||||||
|
}
|
||||||
|
for (Future<?> future : futures) {
|
||||||
|
future.cancel(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,155 @@
|
||||||
|
package org.xbib.event.queue.path.watch;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
import org.xbib.datastructures.json.tiny.Json;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.Reader;
|
||||||
|
import java.lang.reflect.InvocationTargetException;
|
||||||
|
import java.nio.file.ClosedWatchServiceException;
|
||||||
|
import java.nio.file.DirectoryStream;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.StandardWatchEventKinds;
|
||||||
|
import java.nio.file.WatchEvent;
|
||||||
|
import java.nio.file.WatchKey;
|
||||||
|
import java.nio.file.WatchService;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static org.xbib.event.queue.path.watch.PathQueueEvent.INCOMING;
|
||||||
|
|
||||||
|
public class PathQueueEventService implements Callable<Integer>, Closeable {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(PathQueueEventService.class.getName());
|
||||||
|
|
||||||
|
private final EventBus eventBus;
|
||||||
|
|
||||||
|
private final Path path;
|
||||||
|
|
||||||
|
private final int maxFileSize;
|
||||||
|
|
||||||
|
private final Class<? extends PathQueueEvent> pathEventClass;
|
||||||
|
|
||||||
|
private final WatchService watchService;
|
||||||
|
|
||||||
|
private int eventCount;
|
||||||
|
|
||||||
|
private volatile boolean keepWatching;
|
||||||
|
|
||||||
|
public PathQueueEventService(EventBus eventBus,
|
||||||
|
Path path,
|
||||||
|
int maxFileSize,
|
||||||
|
Class<? extends PathQueueEvent> pathEventClass) throws IOException {
|
||||||
|
this.eventBus = eventBus;
|
||||||
|
this.path = path;
|
||||||
|
this.maxFileSize = maxFileSize;
|
||||||
|
this.pathEventClass = pathEventClass;
|
||||||
|
drainIncoming();
|
||||||
|
this.watchService = path.getFileSystem().newWatchService();
|
||||||
|
WatchEvent.Kind<?>[] kinds = new WatchEvent.Kind<?>[] { StandardWatchEventKinds.ENTRY_CREATE };
|
||||||
|
WatchKey watchKey = path.resolve(INCOMING).register(watchService, kinds);
|
||||||
|
keepWatching = true;
|
||||||
|
logger.log(Level.INFO, "at startup: watch service created for incoming files at " + path +
|
||||||
|
"max file size = " + maxFileSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Integer call() {
|
||||||
|
try {
|
||||||
|
logger.log(Level.INFO, "watch service running on " + path.resolve(INCOMING));
|
||||||
|
while (keepWatching && watchService != null) {
|
||||||
|
WatchKey watchKey = watchService.take();
|
||||||
|
logger.log(Level.FINE, "received a watch key " + watchKey);
|
||||||
|
for (WatchEvent<?> watchEvent : watchKey.pollEvents()) {
|
||||||
|
WatchEvent.Kind<?> kind = watchEvent.kind();
|
||||||
|
if (kind == StandardWatchEventKinds.OVERFLOW) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// we sleep here a bit, to give time to the OS to complete file writing
|
||||||
|
Thread.sleep(1000L);
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
WatchEvent<Path> pathWatchEvent = (WatchEvent<Path>) watchEvent;
|
||||||
|
String watchEventContext = pathWatchEvent.context().toString();
|
||||||
|
Path p = path.resolve(INCOMING).resolve(watchEventContext);
|
||||||
|
logger.log(Level.FINE, "watch event" + pathWatchEvent + " key=" + watchEventContext + " path=" + p);
|
||||||
|
postEvent(watchEventContext, p);
|
||||||
|
}
|
||||||
|
watchKey.reset();
|
||||||
|
}
|
||||||
|
} catch (ClosedWatchServiceException e) {
|
||||||
|
logger.log(Level.FINE, "closed watch key");
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
|
logger.log(Level.FINE, "interrupted: " + e.getMessage());
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.log(Level.SEVERE, e.getMessage(), e);
|
||||||
|
}
|
||||||
|
return eventCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
keepWatching = false;
|
||||||
|
if (watchService != null) {
|
||||||
|
logger.log(Level.FINE, "closing watch service " + watchService);
|
||||||
|
watchService.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void drainIncoming() throws IOException {
|
||||||
|
try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(path.resolve(INCOMING))) {
|
||||||
|
directoryStream.forEach(path -> {
|
||||||
|
if (Files.isRegularFile(path)) {
|
||||||
|
String key = path.getFileName().toString();
|
||||||
|
logger.log(Level.INFO, "while draining found key=" + key + " path=" + path);
|
||||||
|
try {
|
||||||
|
postEvent(key, path);
|
||||||
|
} catch (IOException | NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e) {
|
||||||
|
logger.log(Level.SEVERE, e.getMessage(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void postEvent(String key, Path file)
|
||||||
|
throws NoSuchMethodException, InvocationTargetException, InstantiationException, IllegalAccessException, IOException {
|
||||||
|
String base = getBase(key);
|
||||||
|
String suffix = getSuffix(key);
|
||||||
|
PathQueueEvent event = pathEventClass.getConstructor().newInstance();
|
||||||
|
event.setKey(base);
|
||||||
|
event.setFile(file);
|
||||||
|
event.setSuffix(suffix);
|
||||||
|
event.setMap(new LinkedHashMap<>());
|
||||||
|
event.setPath(path); // remember directory for fail() and success()
|
||||||
|
long fileSize = Files.size(file);
|
||||||
|
if (fileSize < maxFileSize && Objects.equals(suffix, "json")) {
|
||||||
|
// we read only small JSON files
|
||||||
|
try (Reader reader = Files.newBufferedReader(file)) {
|
||||||
|
event.setMap(Json.toMap(reader));
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.log(Level.WARNING, e.getMessage(), e);
|
||||||
|
}
|
||||||
|
logger.log(Level.INFO, "posting new event =" + event.getClass() + " " + event);
|
||||||
|
eventBus.post(event);
|
||||||
|
eventCount++;
|
||||||
|
} else {
|
||||||
|
logger.log(Level.SEVERE, "skipping post event because incoming file is too large, max file size = " + maxFileSize);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getBase(String name) {
|
||||||
|
int pos = name.lastIndexOf('.');
|
||||||
|
return pos >= 0 ? name.substring(0, pos) : name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getSuffix(String name) {
|
||||||
|
int pos = name.lastIndexOf('.');
|
||||||
|
return pos >= 0 ? name.substring(pos + 1) : null;
|
||||||
|
}
|
||||||
|
}
|
12
src/main/java/org/xbib/event/timer/TimerEvent.java
Normal file
12
src/main/java/org/xbib/event/timer/TimerEvent.java
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
package org.xbib.event.timer;
|
||||||
|
|
||||||
|
import org.xbib.event.Event;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
|
|
||||||
|
public interface TimerEvent extends Event {
|
||||||
|
|
||||||
|
void setInstant(Instant instant);
|
||||||
|
|
||||||
|
Instant getInstant();
|
||||||
|
}
|
90
src/main/java/org/xbib/event/timer/TimerEventManager.java
Normal file
90
src/main/java/org/xbib/event/timer/TimerEventManager.java
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
package org.xbib.event.timer;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
import org.xbib.event.persistence.FilePersistenceStore;
|
||||||
|
import org.xbib.event.persistence.PersistenceStore;
|
||||||
|
import org.xbib.settings.Settings;
|
||||||
|
import org.xbib.time.chronic.Chronic;
|
||||||
|
import org.xbib.time.chronic.Span;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.text.ParseException;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
public class TimerEventManager implements Closeable {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(TimerEventManager.class.getName());
|
||||||
|
|
||||||
|
private final Map<String, TimerEventService> services;
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public TimerEventManager(Settings settings,
|
||||||
|
EventBus eventBus,
|
||||||
|
ClassLoader classLoader,
|
||||||
|
ZoneId zoneId) {
|
||||||
|
this.services = new LinkedHashMap<>();
|
||||||
|
for (Map.Entry<String, Settings> entry : settings.getGroups("timer").entrySet()) {
|
||||||
|
String name = entry.getKey();
|
||||||
|
Settings timerSettings = entry.getValue();
|
||||||
|
String className = timerSettings.get("class", TimerEvent.class.getName());
|
||||||
|
try {
|
||||||
|
Class<? extends TimerEvent> eventClass = (Class<? extends TimerEvent>) classLoader.loadClass(className);
|
||||||
|
PersistenceStore<String, Object> persistenceStore = new FilePersistenceStore(timerSettings, name);
|
||||||
|
services.put(name, new TimerEventService(eventBus, name, eventClass, zoneId, persistenceStore));
|
||||||
|
logger.log(Level.INFO, "timer " + name + " active for timer event class " + className);
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.log(Level.WARNING, "unable to activate timer " + name + ", reason " + e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean put(String key, String timeSpec, Map<String,Object> map) throws ParseException, IOException {
|
||||||
|
if (services.containsKey(key)) {
|
||||||
|
Span span = Chronic.parse(timeSpec);
|
||||||
|
if (span != null) {
|
||||||
|
ZonedDateTime zonedDateTime = span.getBeginCalendar();
|
||||||
|
services.get(key).schedule(zonedDateTime.toInstant(), map);
|
||||||
|
logger.log(Level.INFO, "scheduled to " + zonedDateTime);
|
||||||
|
} else {
|
||||||
|
logger.log(Level.INFO, "timer event key " + key + ": can not understand time spec " + timeSpec);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
logger.log(Level.SEVERE, "unknown timer event key: " + key);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean put(String key, Instant instant, Map<String,Object> map) throws IOException {
|
||||||
|
if (services.containsKey(key)) {
|
||||||
|
services.get(key).schedule(instant, map);
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
logger.log(Level.SEVERE, "unknown timer event key: " + key);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void purge() {
|
||||||
|
for (Map.Entry<String, TimerEventService> entry : services.entrySet()) {
|
||||||
|
logger.log(Level.INFO, "purging timer " + entry.getKey());
|
||||||
|
entry.getValue().purge();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
for (Map.Entry<String, TimerEventService> entry : services.entrySet()) {
|
||||||
|
logger.log(Level.INFO, "closing timer " + entry.getKey());
|
||||||
|
entry.getValue().close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
131
src/main/java/org/xbib/event/timer/TimerEventService.java
Normal file
131
src/main/java/org/xbib/event/timer/TimerEventService.java
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
package org.xbib.event.timer;
|
||||||
|
|
||||||
|
import com.google.common.eventbus.EventBus;
|
||||||
|
import org.xbib.event.persistence.PersistenceStore;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.lang.reflect.InvocationTargetException;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Timer;
|
||||||
|
import java.util.TimerTask;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
class TimerEventService implements Closeable {
|
||||||
|
|
||||||
|
private static final Logger logger = Logger.getLogger(TimerEventService.class.getName());
|
||||||
|
|
||||||
|
private final EventBus eventBus;
|
||||||
|
|
||||||
|
private final Class<? extends TimerEvent> eventClass;
|
||||||
|
|
||||||
|
private final ZoneId zoneId;
|
||||||
|
|
||||||
|
private final PersistenceStore<String, Object> persistenceStore;
|
||||||
|
|
||||||
|
private final Timer timer;
|
||||||
|
|
||||||
|
public TimerEventService(EventBus eventBus,
|
||||||
|
String name,
|
||||||
|
Class<? extends TimerEvent> eventClass,
|
||||||
|
ZoneId zoneId,
|
||||||
|
PersistenceStore<String, Object> persistenceStore) throws IOException {
|
||||||
|
this.eventBus = eventBus;
|
||||||
|
this.eventClass = eventClass;
|
||||||
|
this.zoneId = zoneId;
|
||||||
|
this.persistenceStore = persistenceStore;
|
||||||
|
this.timer = new Timer();
|
||||||
|
int tasknum = reschedule();
|
||||||
|
logger.log(Level.INFO, "timer event service " + name + " loaded and rescheduled, " + tasknum + " timer tasks");
|
||||||
|
}
|
||||||
|
|
||||||
|
void schedule(Instant instant, Map<String, Object> task) throws IOException {
|
||||||
|
ZonedDateTime zonedDateTime = instant.atZone(zoneId);
|
||||||
|
task.put("scheduled", zonedDateTime.format(DateTimeFormatter.ISO_DATE_TIME));
|
||||||
|
TimerEventTask timerEventTask = new TimerEventTask(task);
|
||||||
|
Date date = Date.from(instant);
|
||||||
|
timer.schedule(timerEventTask, date);
|
||||||
|
persistenceStore.insert("tasks", task);
|
||||||
|
logger.log(Level.INFO, "new task " + task + " added, scheduled at " + date);
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
int reschedule() throws IOException {
|
||||||
|
logger.log(Level.INFO, "rescheduling timer events");
|
||||||
|
persistenceStore.load();
|
||||||
|
List<Map<String, Object>> tasks = (List<Map<String, Object>>) persistenceStore.getOrDefault("tasks", new ArrayList<>());
|
||||||
|
persistenceStore.clear();
|
||||||
|
persistenceStore.commit();
|
||||||
|
for (Map<String, Object> task : tasks) {
|
||||||
|
ZonedDateTime scheduledDate = ZonedDateTime.parse((String) task.get("scheduled"), DateTimeFormatter.ISO_DATE_TIME);
|
||||||
|
if (scheduledDate.isBefore(ZonedDateTime.now())) {
|
||||||
|
logger.log(Level.WARNING, "scheduled timer task " + task + " date already passed");
|
||||||
|
} else {
|
||||||
|
schedule(scheduledDate.toInstant(), task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tasks = (List<Map<String, Object>>) persistenceStore.getOrDefault("tasks", new ArrayList<>());
|
||||||
|
logger.log(Level.INFO, "rescheduling timer events complete: " + tasks.size() + " events");
|
||||||
|
return tasks.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
timer.cancel();
|
||||||
|
persistenceStore.commit();
|
||||||
|
}
|
||||||
|
|
||||||
|
void purge() {
|
||||||
|
timer.purge();
|
||||||
|
}
|
||||||
|
|
||||||
|
public class TimerEventTask extends TimerTask {
|
||||||
|
|
||||||
|
private final Map<String,Object> map;
|
||||||
|
|
||||||
|
public TimerEventTask(Map<String, Object> map) {
|
||||||
|
this.map = map;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
TimerEvent timerEvent;
|
||||||
|
try {
|
||||||
|
timerEvent = eventClass.getDeclaredConstructor().newInstance();
|
||||||
|
} catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
|
||||||
|
logger.log(Level.SEVERE, e.getMessage(), e);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
timerEvent.setInstant(Instant.now());
|
||||||
|
timerEvent.setMap(map);
|
||||||
|
logger.log(Level.FINE, "posting timer event " + timerEvent.getClass().getName() + " map = " + map);
|
||||||
|
eventBus.post(timerEvent);
|
||||||
|
persistenceStore.remove("tasks", this);
|
||||||
|
logger.log(Level.FINE, "persistence after remove: " + persistenceStore.get("tasks"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return map.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return map.hashCode();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object object) {
|
||||||
|
return object instanceof TimerEventTask && Objects.equals(map, object);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
19
src/main/java/org/xbib/event/yield/Advancer.java
Normal file
19
src/main/java/org/xbib/event/yield/Advancer.java
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
package org.xbib.event.yield;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sequential traverser with internal and individually step approach.
|
||||||
|
*/
|
||||||
|
public interface Advancer<T> {
|
||||||
|
/**
|
||||||
|
* An Advancer object without elements.
|
||||||
|
*/
|
||||||
|
static <R> Advancer<R> empty() {
|
||||||
|
return action -> false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a remaining element exists, yields that element through
|
||||||
|
* the given action.
|
||||||
|
*/
|
||||||
|
boolean tryAdvance(Yield<? super T> yield);
|
||||||
|
}
|
121
src/main/java/org/xbib/event/yield/AsyncQuery.java
Normal file
121
src/main/java/org/xbib/event/yield/AsyncQuery.java
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
package org.xbib.event.yield;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryDistinct;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryFilter;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryFlatMapConcat;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryFlatMapMerge;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryFork;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryMap;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryOf;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryOfIterator;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryOnNext;
|
||||||
|
import org.xbib.event.yield.async.AsyncQuerySkip;
|
||||||
|
import org.xbib.event.yield.async.AsyncQueryTakeWhile;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An asynchronous sequence of elements supporting sequential operations.
|
||||||
|
* Query operations are composed into a pipeline to perform computation.
|
||||||
|
*/
|
||||||
|
public abstract class AsyncQuery<T> implements AsyncTraverser<T> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an asynchronous sequential ordered query whose elements
|
||||||
|
* are the specified values in data parameter.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static <U> AsyncQuery<U> of(U... data) {
|
||||||
|
return new AsyncQueryOf<>(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an asynchronous sequential ordered query whose elements
|
||||||
|
* are the specified values in the Iterator parameter.
|
||||||
|
*/
|
||||||
|
public static <U> AsyncQuery<U> of(Iterator<U> iter) {
|
||||||
|
return new AsyncQueryOfIterator<>(iter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an asynchronous sequential ordered query whose elements
|
||||||
|
* are the specified values in data parameter running on thread pool.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static <U> AsyncQuery<U> fork(U... data) {
|
||||||
|
return new AsyncQueryFork<>(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a new asynchronous query emitting the same items of this query,
|
||||||
|
* additionally performing the provided action on each element as elements are consumed
|
||||||
|
* from the resulting query.
|
||||||
|
*/
|
||||||
|
public final AsyncQuery<T> onNext(BiConsumer<? super T, ? super Throwable> action) {
|
||||||
|
return new AsyncQueryOnNext<>(this, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a new asynchronous query consisting of the remaining elements of
|
||||||
|
* this query after discarding the first {@code n} elements of the query.
|
||||||
|
*/
|
||||||
|
public final AsyncQuery<T> skip(int n) {
|
||||||
|
return new AsyncQuerySkip<>(this, n);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an asynchronous query consisting of the elements of this query that match
|
||||||
|
* the given predicate.
|
||||||
|
*/
|
||||||
|
public final AsyncQuery<T> filter(Predicate<? super T> p) {
|
||||||
|
return new AsyncQueryFilter<>(this, p);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an asynchronous query consisting of the results of applying the given
|
||||||
|
* function to the elements of this query.
|
||||||
|
*/
|
||||||
|
public final <R> AsyncQuery<R> map(Function<? super T, ? extends R> mapper) {
|
||||||
|
return new AsyncQueryMap<>(this, mapper);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the distinct elements (according to
|
||||||
|
* {@link Object#equals(Object)}) of this query.
|
||||||
|
*/
|
||||||
|
public final AsyncQuery<T> distinct() {
|
||||||
|
return new AsyncQueryDistinct<>(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the longest prefix of elements taken from
|
||||||
|
* this query that match the given predicate.
|
||||||
|
*/
|
||||||
|
public final AsyncQuery<T> takeWhile(Predicate<? super T> predicate) {
|
||||||
|
return new AsyncQueryTakeWhile<>(this, predicate);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an asynchronous query consisting of the results of replacing each element of
|
||||||
|
* this query with the contents of a mapped query produced by applying
|
||||||
|
* the provided mapping function to each element.
|
||||||
|
* It waits for the inner flow to complete before starting to collect the next one.
|
||||||
|
*/
|
||||||
|
public final <R> AsyncQuery<R> flatMapConcat(Function<? super T, ? extends AsyncQuery<? extends R>> mapper) {
|
||||||
|
return new AsyncQueryFlatMapConcat<>(this, mapper);
|
||||||
|
}
|
||||||
|
|
||||||
|
public final <R> AsyncQuery<R> flatMapMerge(Function<? super T, ? extends AsyncQuery<? extends R>> mapper) {
|
||||||
|
return new AsyncQueryFlatMapMerge<>(this, mapper);
|
||||||
|
}
|
||||||
|
|
||||||
|
public final void blockingSubscribe() {
|
||||||
|
this
|
||||||
|
.subscribe((item, err) -> {
|
||||||
|
})
|
||||||
|
.join(); // In both previous cases cf will raise an exception.
|
||||||
|
}
|
||||||
|
}
|
23
src/main/java/org/xbib/event/yield/AsyncTraverser.java
Normal file
23
src/main/java/org/xbib/event/yield/AsyncTraverser.java
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
package org.xbib.event.yield;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronous traversal.
|
||||||
|
* We use a traverse method as its first choice to
|
||||||
|
* implement AsyncQuery operations.
|
||||||
|
* This is a special kind of traversal that disallows individually access.
|
||||||
|
*/
|
||||||
|
public interface AsyncTraverser<T> {
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially until all elements have been
|
||||||
|
* processed or an exception is thrown.
|
||||||
|
* The given consumer is invoked with the result (or null if none)
|
||||||
|
* and the exception (or null if none).
|
||||||
|
*
|
||||||
|
* @return A CompletableFuture to signal finish to enable cancellation
|
||||||
|
* through its cancel() method.
|
||||||
|
*/
|
||||||
|
CompletableFuture<Void> subscribe(BiConsumer<? super T, ? super Throwable> cons);
|
||||||
|
}
|
535
src/main/java/org/xbib/event/yield/Query.java
Normal file
535
src/main/java/org/xbib/event/yield/Query.java
Normal file
|
@ -0,0 +1,535 @@
|
||||||
|
package org.xbib.event.yield;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.boxes.BoolBox;
|
||||||
|
import org.xbib.event.yield.boxes.Box;
|
||||||
|
import org.xbib.event.yield.ops.Concat;
|
||||||
|
import org.xbib.event.yield.ops.Distinct;
|
||||||
|
import org.xbib.event.yield.ops.DropWhile;
|
||||||
|
import org.xbib.event.yield.ops.Filter;
|
||||||
|
import org.xbib.event.yield.ops.FlatMap;
|
||||||
|
import org.xbib.event.yield.ops.FromArray;
|
||||||
|
import org.xbib.event.yield.ops.FromList;
|
||||||
|
import org.xbib.event.yield.ops.FromStream;
|
||||||
|
import org.xbib.event.yield.ops.Generate;
|
||||||
|
import org.xbib.event.yield.ops.Iterate;
|
||||||
|
import org.xbib.event.yield.ops.Limit;
|
||||||
|
import org.xbib.event.yield.ops.Mapping;
|
||||||
|
import org.xbib.event.yield.ops.Peek;
|
||||||
|
import org.xbib.event.yield.ops.Skip;
|
||||||
|
import org.xbib.event.yield.ops.TakeWhile;
|
||||||
|
import org.xbib.event.yield.ops.Zip;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleQuery;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntQuery;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongQuery;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongTraverser;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Optional;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.Spliterator;
|
||||||
|
import java.util.Spliterators.AbstractSpliterator;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
import java.util.function.BinaryOperator;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.function.IntFunction;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
import java.util.function.ToDoubleFunction;
|
||||||
|
import java.util.function.ToIntFunction;
|
||||||
|
import java.util.function.ToLongFunction;
|
||||||
|
import java.util.function.UnaryOperator;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A sequence of elements supporting sequential operations.
|
||||||
|
* Query operations are composed into a pipeline to perform
|
||||||
|
* computation.
|
||||||
|
*/
|
||||||
|
public class Query<T> {
|
||||||
|
|
||||||
|
private final Advancer<T> adv;
|
||||||
|
private final Traverser<T> trav;
|
||||||
|
|
||||||
|
public Query(Advancer<T> adv, Traverser<T> trav) {
|
||||||
|
this.adv = adv;
|
||||||
|
this.trav = trav;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a sequential ordered query whose elements
|
||||||
|
* are the specified values in data parameter.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static <U> Query<U> of(U... data) {
|
||||||
|
FromArray<U> adv = new FromArray<>(data);
|
||||||
|
return new Query<>(adv, adv);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a sequential ordered query with elements
|
||||||
|
* from the provided List data.
|
||||||
|
*/
|
||||||
|
public static <U> Query<U> fromList(List<U> data) {
|
||||||
|
FromList<U> adv = new FromList<>(data);
|
||||||
|
return new Query<>(adv, adv);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a sequential ordered query with elements
|
||||||
|
* from the provided stream data.
|
||||||
|
*/
|
||||||
|
public static <U> Query<U> fromStream(Stream<U> data) {
|
||||||
|
FromStream<U> adv = new FromStream<>(data);
|
||||||
|
return new Query<>(adv, adv);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an infinite sequential ordered {@code Query} produced by iterative
|
||||||
|
* application of a function {@code f} to an initial element {@code seed},
|
||||||
|
* producing a {@code Query} consisting of {@code seed}, {@code f(seed)},
|
||||||
|
* {@code f(f(seed))}, etc.
|
||||||
|
*/
|
||||||
|
public static <U> Query<U> iterate(U seed, UnaryOperator<U> f) {
|
||||||
|
Iterate<U> iter = new Iterate<>(seed, f);
|
||||||
|
return new Query<>(iter, iter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an infinite sequential unordered {@code Query}
|
||||||
|
* where each element is generated by the provided Supplier.
|
||||||
|
*/
|
||||||
|
public static <U> Query<U> generate(Supplier<U> s) {
|
||||||
|
Generate<U> gen = new Generate<>(s);
|
||||||
|
return new Query<>(gen, gen);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or an
|
||||||
|
* exception is thrown.
|
||||||
|
*/
|
||||||
|
public final void traverse(Yield<? super T> yield) {
|
||||||
|
this.trav.traverse(yield);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a remaining element exists, yields that element through
|
||||||
|
* the given action.
|
||||||
|
*/
|
||||||
|
public boolean tryAdvance(Yield<? super T> action) {
|
||||||
|
return this.adv.tryAdvance(action);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or the traversal
|
||||||
|
* exited normally through the invocation of yield.bye().
|
||||||
|
*/
|
||||||
|
public final void shortCircuit(Yield<T> yield) {
|
||||||
|
try {
|
||||||
|
this.trav.traverse(yield);
|
||||||
|
} catch (TraversableFinishError e) {
|
||||||
|
/* Proceed */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the results of applying the given
|
||||||
|
* function to the elements of this query.
|
||||||
|
*/
|
||||||
|
public final <R> Query<R> map(Function<? super T, ? extends R> mapper) {
|
||||||
|
Mapping<T, R> map = new Mapping<>(this, mapper);
|
||||||
|
return new Query<>(map, map);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies a specified function to the corresponding elements of two
|
||||||
|
* sequences, producing a sequence of the results.
|
||||||
|
*/
|
||||||
|
public final <U, R> Query<R> zip(Query<U> other, BiFunction<? super T, ? super U, ? extends R> zipper) {
|
||||||
|
Zip<T, U, R> zip = new Zip<>(this, other, zipper);
|
||||||
|
return new Query<>(zip, zip);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@link IntQuery} with the elements of this {@code Query} mapped by
|
||||||
|
* a {@link ToIntFunction}
|
||||||
|
*
|
||||||
|
* @param mapper ToIntFunction used to map elements of this {@code Query} to int
|
||||||
|
*/
|
||||||
|
public final IntQuery mapToInt(ToIntFunction<? super T> mapper) {
|
||||||
|
return new IntQuery(IntAdvancer.from(adv, mapper), IntTraverser.from(trav, mapper));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@link LongQuery} with the elements of this {@code Query} mapped by
|
||||||
|
* a {@link ToLongFunction}
|
||||||
|
*
|
||||||
|
* @param mapper ToLongFunction used to map elements of this {@code Query} to long
|
||||||
|
*/
|
||||||
|
public final LongQuery mapToLong(ToLongFunction<? super T> mapper) {
|
||||||
|
return new LongQuery(LongAdvancer.from(adv, mapper), LongTraverser.from(trav, mapper));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@link DoubleQuery} with the elements of this {@code Query} mapped by
|
||||||
|
* a {@link ToDoubleFunction}
|
||||||
|
*
|
||||||
|
* @param mapper ToLongFunction used to map elements of this {@code Query} to double
|
||||||
|
*/
|
||||||
|
public final DoubleQuery mapToDouble(ToDoubleFunction<? super T> mapper) {
|
||||||
|
return new DoubleQuery(DoubleAdvancer.from(adv, mapper), DoubleTraverser.from(trav, mapper));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the elements of this query that match
|
||||||
|
* the given predicate.
|
||||||
|
*/
|
||||||
|
public final Query<T> filter(Predicate<? super T> p) {
|
||||||
|
Filter<T> filter = new Filter<>(this, p);
|
||||||
|
return new Query<>(filter, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the remaining elements of this query
|
||||||
|
* after discarding the first {@code n} elements of the query.
|
||||||
|
*/
|
||||||
|
public final Query<T> skip(int n) {
|
||||||
|
Skip<T> skip = new Skip<>(this, n);
|
||||||
|
return new Query<>(skip, skip);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the elements of this query, truncated
|
||||||
|
* to be no longer than {@code n} in length.
|
||||||
|
*/
|
||||||
|
public final Query<T> limit(int n) {
|
||||||
|
Limit<T> limit = new Limit<>(this, n);
|
||||||
|
return new Query<>(limit, limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the distinct elements (according to
|
||||||
|
* {@link Object#equals(Object)}) of this query.
|
||||||
|
*/
|
||||||
|
public final Query<T> distinct() {
|
||||||
|
Distinct<T> dis = new Distinct<>(this);
|
||||||
|
return new Query<>(dis, dis);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the results of replacing each element of
|
||||||
|
* this query with the contents of a mapped query produced by applying
|
||||||
|
* the provided mapping function to each element.
|
||||||
|
*/
|
||||||
|
public final <R> Query<R> flatMap(Function<? super T, ? extends Query<? extends R>> mapper) {
|
||||||
|
FlatMap<T, R> map = new FlatMap<>(this, mapper);
|
||||||
|
return new Query<>(map, map);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the elements of this query, additionally
|
||||||
|
* performing the provided action on each element as elements are consumed
|
||||||
|
* from the resulting query.
|
||||||
|
*/
|
||||||
|
public final Query<T> peek(Consumer<? super T> action) {
|
||||||
|
Peek<T> peek = new Peek<>(this, action);
|
||||||
|
return new Query<>(peek, peek);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the longest prefix of elements taken from
|
||||||
|
* this query that match the given predicate.
|
||||||
|
*/
|
||||||
|
public final Query<T> takeWhile(Predicate<? super T> predicate) {
|
||||||
|
TakeWhile<T> take = new TakeWhile<>(this, predicate);
|
||||||
|
return new Query<>(take, take);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@code then} operator lets you encapsulate a piece of an operator
|
||||||
|
* chain into a function.
|
||||||
|
* That function {@code next} is applied to this query to produce a new
|
||||||
|
* {@code Traverser} object that is encapsulated in the resulting query.
|
||||||
|
* On the other hand, the {@code nextAdv} is applied to this query to produce a new
|
||||||
|
* {@code Advancer} object that is encapsulated in the resulting query.
|
||||||
|
*/
|
||||||
|
public final <R> Query<R> then(Function<Query<T>, Advancer<R>> nextAdv, Function<Query<T>, Traverser<R>> next) {
|
||||||
|
return new Query<>(nextAdv.apply(this), next.apply(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@code then} operator lets you encapsulate a piece of an operator
|
||||||
|
* chain into a function.
|
||||||
|
* That function {@code next} is applied to this query to produce a new
|
||||||
|
* {@code Traverser} object that is encapsulated in the resulting query.
|
||||||
|
*/
|
||||||
|
public final <R> Query<R> then(Function<Query<T>, Traverser<R>> next) {
|
||||||
|
Advancer<R> nextAdv = item -> {
|
||||||
|
throw new UnsupportedOperationException(
|
||||||
|
"Missing tryAdvance() implementation! Use the overloaded then() providing both Advancer and Traverser!");
|
||||||
|
};
|
||||||
|
return new Query<>(nextAdv, next.apply(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list containing the elements of this query.
|
||||||
|
*/
|
||||||
|
public final List<T> toList() {
|
||||||
|
List<T> data = new ArrayList<>();
|
||||||
|
this.traverse(data::add);
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array containing the elements of this query.
|
||||||
|
*/
|
||||||
|
public final Object[] toArray() {
|
||||||
|
return this.toArray(Object[]::new);
|
||||||
|
}
|
||||||
|
|
||||||
|
public final Stream<T> toStream() {
|
||||||
|
Spliterator<T> iter = new AbstractSpliterator<T>(Long.MAX_VALUE, Spliterator.ORDERED) {
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Consumer<? super T> action) {
|
||||||
|
return adv.tryAdvance(action::accept);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void forEachRemaining(Consumer<? super T> action) {
|
||||||
|
trav.traverse(action::accept);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return StreamSupport.stream(iter, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an {@link Optional} describing the first element of this query,
|
||||||
|
* or an empty {@code Optional} if this query is empty.
|
||||||
|
*/
|
||||||
|
public final Optional<T> findFirst() {
|
||||||
|
Box<T> box = new Box<>();
|
||||||
|
this.tryAdvance(box::turnPresent);
|
||||||
|
return box.isPresent()
|
||||||
|
? Optional.of(box.getValue())
|
||||||
|
: Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the maximum element of this query according to the provided
|
||||||
|
* {@code Comparator}. This is a special case of a reduction.
|
||||||
|
*/
|
||||||
|
public final Optional<T> max(Comparator<? super T> cmp) {
|
||||||
|
class BoxMax extends Box<T> implements Yield<T> {
|
||||||
|
@Override
|
||||||
|
public final void ret(T item) {
|
||||||
|
if (!isPresent()) turnPresent(item);
|
||||||
|
else if (cmp.compare(item, value) > 0) value = item;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BoxMax b = new BoxMax();
|
||||||
|
this.traverse(b);
|
||||||
|
return b.isPresent() ? Optional.of(b.getValue()) : Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether any elements of this query match the provided
|
||||||
|
* predicate. May not evaluate the predicate on all elements if not
|
||||||
|
* necessary for determining the result. If the query is empty then
|
||||||
|
* {@code false} is returned and the predicate is not evaluated.
|
||||||
|
*/
|
||||||
|
public final boolean anyMatch(Predicate<? super T> p) {
|
||||||
|
BoolBox found = new BoolBox();
|
||||||
|
shortCircuit(item -> {
|
||||||
|
if (p.test(item)) {
|
||||||
|
found.set();
|
||||||
|
Yield.bye();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return found.isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether all elements of this query match the provided
|
||||||
|
* predicate. May not evaluate the predicate on all elements if not
|
||||||
|
* necessary for determining the result. If the query is empty then
|
||||||
|
* {@code true} is returned and the predicate is not evaluated.
|
||||||
|
*/
|
||||||
|
public final boolean allMatch(Predicate<? super T> p) {
|
||||||
|
BoolBox succeed = new BoolBox(true);
|
||||||
|
shortCircuit(item -> {
|
||||||
|
if (!p.test(item)) {
|
||||||
|
succeed.set(false);
|
||||||
|
Yield.bye();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return succeed.isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the count of elements in this query.
|
||||||
|
*/
|
||||||
|
public final long count() {
|
||||||
|
class Counter implements Yield<T> {
|
||||||
|
long n = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void ret(T item) {
|
||||||
|
++n;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Counter c = new Counter();
|
||||||
|
this.traverse(c);
|
||||||
|
return c.n;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an {@link Optional} with the resulting reduction of the elements of this {@code Query},
|
||||||
|
* if a reduction can be made, using the provided accumulator.
|
||||||
|
*/
|
||||||
|
public Optional<T> reduce(BinaryOperator<T> accumulator) {
|
||||||
|
Box<T> box = new Box<>();
|
||||||
|
if (this.tryAdvance(box::setValue)) {
|
||||||
|
return Optional.of(this.reduce(box.getValue(), accumulator));
|
||||||
|
} else {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the result of the reduction of the elements of this query,
|
||||||
|
* using the provided identity value and accumulator.
|
||||||
|
*/
|
||||||
|
public T reduce(T identity, BinaryOperator<T> accumulator) {
|
||||||
|
class BoxAccumulator extends Box<T> implements Yield<T> {
|
||||||
|
public BoxAccumulator(T identity) {
|
||||||
|
super(identity);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final void ret(T item) {
|
||||||
|
this.value = accumulator.apply(value, item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BoxAccumulator box = new BoxAccumulator(identity);
|
||||||
|
this.traverse(box);
|
||||||
|
return box.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or an
|
||||||
|
* exception is thrown.
|
||||||
|
*/
|
||||||
|
public final void forEach(Yield<? super T> yield) {
|
||||||
|
this.traverse(yield);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@link Set} containing the elements of this query.
|
||||||
|
*/
|
||||||
|
public final Set<T> toSet() {
|
||||||
|
Set<T> data = new HashSet<>();
|
||||||
|
this.traverse(data::add);
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array containing the elements of this query.
|
||||||
|
*/
|
||||||
|
public final <U> U[] toArray(IntFunction<U[]> generator) {
|
||||||
|
return this.toList().toArray(generator);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the concatenation of the input elements into a String, in encounter order.
|
||||||
|
*/
|
||||||
|
public final String join() {
|
||||||
|
return this.map(String::valueOf)
|
||||||
|
.collect(StringBuilder::new, StringBuilder::append)
|
||||||
|
.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an {@link Optional} describing any element of this query,
|
||||||
|
* or an empty {@code Optional} if this query is empty.
|
||||||
|
*/
|
||||||
|
public final Optional<T> findAny() {
|
||||||
|
return this.findFirst();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the minimum element of this query according to the provided
|
||||||
|
* {@code Comparator}. This is a special case of a reduction.
|
||||||
|
*/
|
||||||
|
public final Optional<T> min(Comparator<? super T> cmp) {
|
||||||
|
return this.max((a, b) -> cmp.compare(a, b) * -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether no elements of this query match the provided
|
||||||
|
* predicate. May not evaluate the predicate on all elements if not
|
||||||
|
* necessary for determining the result. If the query is empty then
|
||||||
|
* {@code true} is returned and the predicate is not evaluated.
|
||||||
|
*/
|
||||||
|
public final boolean noneMatch(Predicate<? super T> p) {
|
||||||
|
return !this.anyMatch(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Performs a mutable reduction operation on the elements of this {@code Query}.
|
||||||
|
* A mutable reduction is one in which the reduced value is a mutable result container, such as an ArrayList,
|
||||||
|
* and elements are incorporated by updating the state of the result rather than by replacing the result.
|
||||||
|
*/
|
||||||
|
public <R> R collect(Supplier<R> supplier, BiConsumer<R, ? super T> accumulator) {
|
||||||
|
R result = supplier.get();
|
||||||
|
this.traverse(elem -> accumulator.accept(result, elem));
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a concatenated {@code Query} in which the elements are
|
||||||
|
* all the elements of this {@code Query} followed by all the
|
||||||
|
* elements of the other {@code Query}.
|
||||||
|
*/
|
||||||
|
public final Query<T> concat(Query<T> other) {
|
||||||
|
Concat<T> con = new Concat<>(this, other);
|
||||||
|
return new Query<>(con, con);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code Query} consisting of the elements of this {@code Query},
|
||||||
|
* sorted according to the provided Comparator.
|
||||||
|
* <p>
|
||||||
|
* This is a stateful intermediate operation.
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public final Query<T> sorted(Comparator<T> comparator) {
|
||||||
|
T[] state = (T[]) this.toArray();
|
||||||
|
Arrays.sort(state, comparator);
|
||||||
|
FromArray<T> sorted = new FromArray<>(state);
|
||||||
|
return new Query<>(sorted, sorted);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code Query} consisting of the remaining elements of this query
|
||||||
|
* after discarding the first sequence of elements that match the given Predicate.
|
||||||
|
*/
|
||||||
|
public final Query<T> dropWhile(Predicate<T> predicate) {
|
||||||
|
DropWhile<T> drop = new DropWhile<>(this, predicate);
|
||||||
|
return new Query<>(drop, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
package org.xbib.event.yield;
|
||||||
|
|
||||||
|
@SuppressWarnings("serial")
|
||||||
|
public class TraversableFinishError extends Error {
|
||||||
|
|
||||||
|
public static final TraversableFinishError finishTraversal = new TraversableFinishError();
|
||||||
|
|
||||||
|
private TraversableFinishError() {
|
||||||
|
super("Auxiliary exception finishes traversal!", null, false, false);
|
||||||
|
}
|
||||||
|
}
|
24
src/main/java/org/xbib/event/yield/Traverser.java
Normal file
24
src/main/java/org/xbib/event/yield/Traverser.java
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
package org.xbib.event.yield;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk traversal.
|
||||||
|
* We uses traverse method as its first choice to
|
||||||
|
* implement Query operations.
|
||||||
|
* This is a special kind of traversal that disallows individually access.
|
||||||
|
*/
|
||||||
|
public interface Traverser<T> {
|
||||||
|
/**
|
||||||
|
* A Traverser object without elements.
|
||||||
|
*/
|
||||||
|
static <R> Traverser<R> empty() {
|
||||||
|
return action -> {
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or an
|
||||||
|
* exception is thrown.
|
||||||
|
*/
|
||||||
|
void traverse(Yield<? super T> yield);
|
||||||
|
}
|
16
src/main/java/org/xbib/event/yield/Yield.java
Normal file
16
src/main/java/org/xbib/event/yield/Yield.java
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
package org.xbib.event.yield;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Equivalent to {@code Consumer<T>} with a yield semantics.
|
||||||
|
*/
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface Yield<T> {
|
||||||
|
/**
|
||||||
|
* Auxiliary function for traversal short circuit.
|
||||||
|
*/
|
||||||
|
static void bye() {
|
||||||
|
throw TraversableFinishError.finishTraversal;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ret(T item);
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
public class AsyncQueryDistinct<T> extends AsyncQuery<T> {
|
||||||
|
|
||||||
|
private final AsyncQuery<T> upstream;
|
||||||
|
|
||||||
|
public AsyncQueryDistinct(AsyncQuery<T> upstream) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super T, ? super Throwable> cons) {
|
||||||
|
final HashSet<T> mem = new HashSet<>();
|
||||||
|
return upstream.subscribe((item, err) -> {
|
||||||
|
if (err != null) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (mem.add(item)) cons.accept(item, null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class AsyncQueryFilter<T> extends AsyncQuery<T> {
|
||||||
|
private final AsyncQuery<T> upstream;
|
||||||
|
private final Predicate<? super T> p;
|
||||||
|
|
||||||
|
public AsyncQueryFilter(AsyncQuery<T> upstream, Predicate<? super T> p) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.p = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super T, ? super Throwable> cons) {
|
||||||
|
return upstream.subscribe((item, err) -> {
|
||||||
|
if (err != null) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (p.test(item)) cons.accept(item, null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
public class AsyncQueryFlatMapConcat<T, R> extends AsyncQuery<R> {
|
||||||
|
|
||||||
|
private final AsyncQuery<T> upstream;
|
||||||
|
|
||||||
|
private final Function<? super T, ? extends AsyncQuery<? extends R>> mapper;
|
||||||
|
|
||||||
|
public AsyncQueryFlatMapConcat(AsyncQuery<T> upstream, Function<? super T, ? extends AsyncQuery<? extends R>> mapper) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.mapper = mapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super R, ? super Throwable> cons) {
|
||||||
|
return upstream.subscribe((item, err) -> {
|
||||||
|
if (err != null) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
mapper.apply(item)
|
||||||
|
.subscribe(cons)
|
||||||
|
.join();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,38 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
public class AsyncQueryFlatMapMerge<T, R> extends AsyncQuery<R> {
|
||||||
|
|
||||||
|
private final AsyncQuery<T> upstream;
|
||||||
|
|
||||||
|
private final Function<? super T, ? extends AsyncQuery<? extends R>> mapper;
|
||||||
|
|
||||||
|
public AsyncQueryFlatMapMerge(AsyncQuery<T> upstream, Function<? super T, ? extends AsyncQuery<? extends R>> mapper) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.mapper = mapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super R, ? super Throwable> cons) {
|
||||||
|
List<CompletableFuture<Void>> cfs = new ArrayList<>();
|
||||||
|
return upstream
|
||||||
|
.subscribe((item, err) -> {
|
||||||
|
if (err != null) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
cfs.add(mapper
|
||||||
|
.apply(item)
|
||||||
|
.subscribe(cons));
|
||||||
|
})
|
||||||
|
.thenCompose(ignore -> CompletableFuture.allOf(cfs.toArray(new CompletableFuture<?>[0])));
|
||||||
|
}
|
||||||
|
}
|
25
src/main/java/org/xbib/event/yield/async/AsyncQueryFork.java
Normal file
25
src/main/java/org/xbib/event/yield/async/AsyncQueryFork.java
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
import static java.util.concurrent.CompletableFuture.runAsync;
|
||||||
|
|
||||||
|
public class AsyncQueryFork<U> extends AsyncQuery<U> {
|
||||||
|
|
||||||
|
private final U[] data;
|
||||||
|
|
||||||
|
public AsyncQueryFork(U[] data) {
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super U, ? super Throwable> cons) {
|
||||||
|
return runAsync(() -> Query
|
||||||
|
.of(data)
|
||||||
|
.traverse(item -> cons.accept(item, null)));
|
||||||
|
}
|
||||||
|
}
|
30
src/main/java/org/xbib/event/yield/async/AsyncQueryMap.java
Normal file
30
src/main/java/org/xbib/event/yield/async/AsyncQueryMap.java
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
public class AsyncQueryMap<T, R> extends AsyncQuery<R> {
|
||||||
|
|
||||||
|
private final AsyncQuery<T> upstream;
|
||||||
|
|
||||||
|
private final Function<? super T, ? extends R> mapper;
|
||||||
|
|
||||||
|
public AsyncQueryMap(AsyncQuery<T> upstream, Function<? super T, ? extends R> mapper) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.mapper = mapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super R, ? super Throwable> cons) {
|
||||||
|
return upstream.subscribe((item, err) -> {
|
||||||
|
if (err != null) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
cons.accept(mapper.apply(item), null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
22
src/main/java/org/xbib/event/yield/async/AsyncQueryOf.java
Normal file
22
src/main/java/org/xbib/event/yield/async/AsyncQueryOf.java
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
public class AsyncQueryOf<U> extends AsyncQuery<U> {
|
||||||
|
|
||||||
|
private final U[] data;
|
||||||
|
|
||||||
|
public AsyncQueryOf(U[] data) {
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super U, ? super Throwable> cons) {
|
||||||
|
Query.of(data).traverse(item -> cons.accept(item, null));
|
||||||
|
return CompletableFuture.completedFuture(null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
public class AsyncQueryOfIterator<T> extends AsyncQuery<T> {
|
||||||
|
|
||||||
|
private final Iterator<T> iter;
|
||||||
|
|
||||||
|
public AsyncQueryOfIterator(Iterator<T> iter) {
|
||||||
|
this.iter = iter;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super T, ? super Throwable> cons) {
|
||||||
|
while (iter.hasNext()) {
|
||||||
|
cons.accept(iter.next(), null);
|
||||||
|
}
|
||||||
|
return CompletableFuture.completedFuture(null);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
public class AsyncQueryOnNext<T> extends AsyncQuery<T> {
|
||||||
|
|
||||||
|
private final AsyncQuery<T> upstream;
|
||||||
|
|
||||||
|
private final BiConsumer<? super T, ? super Throwable> action;
|
||||||
|
|
||||||
|
public AsyncQueryOnNext(AsyncQuery<T> upstream, BiConsumer<? super T, ? super Throwable> action) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.action = action;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super T, ? super Throwable> cons) {
|
||||||
|
return upstream.subscribe((item, err) -> {
|
||||||
|
action.accept(item, err);
|
||||||
|
cons.accept(item, err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
33
src/main/java/org/xbib/event/yield/async/AsyncQuerySkip.java
Normal file
33
src/main/java/org/xbib/event/yield/async/AsyncQuerySkip.java
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
import org.xbib.event.yield.AsyncTraverser;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
|
||||||
|
public class AsyncQuerySkip<T> extends AsyncQuery<T> {
|
||||||
|
|
||||||
|
private final AsyncTraverser<T> upstream;
|
||||||
|
|
||||||
|
private final int skip;
|
||||||
|
|
||||||
|
private int count = 0;
|
||||||
|
|
||||||
|
public AsyncQuerySkip(AsyncTraverser<T> upstream, int skip) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.skip = skip;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super T, ? super Throwable> cons) {
|
||||||
|
return upstream.subscribe((item, err) -> {
|
||||||
|
if (err != null) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (count >= skip) cons.accept(item, err);
|
||||||
|
else count++;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,60 @@
|
||||||
|
package org.xbib.event.yield.async;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.AsyncQuery;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class AsyncQueryTakeWhile<T> extends AsyncQuery<T> {
|
||||||
|
|
||||||
|
private final AsyncQuery<T> upstream;
|
||||||
|
|
||||||
|
private final Predicate<? super T> p;
|
||||||
|
|
||||||
|
private CompletableFuture<Void> subscription;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* After cancellation of upstream subscription we may still receive updates on consumer.
|
||||||
|
* To avoid propagation we must check if we have already cancelled the subscription.
|
||||||
|
* But we need a different flag from the CF subscription because this field may not be
|
||||||
|
* initialized yet on first check of the subscribe callback.
|
||||||
|
*/
|
||||||
|
private boolean finished = false;
|
||||||
|
|
||||||
|
public AsyncQueryTakeWhile(AsyncQuery<T> upstream, Predicate<? super T> p) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.p = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CompletableFuture<Void> subscribe(BiConsumer<? super T, ? super Throwable> cons) {
|
||||||
|
subscription = upstream.subscribe((item, err) -> {
|
||||||
|
/*
|
||||||
|
* After cancellation of upstream subscription we may still receive updates on consumer.
|
||||||
|
* To avoid propagation we must check if we have already cancelled the subscription.
|
||||||
|
*/
|
||||||
|
if (finished) {
|
||||||
|
if (subscription != null && !subscription.isDone())
|
||||||
|
subscription.complete(null);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (err != null) {
|
||||||
|
cons.accept(null, err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (p.test(item)) cons.accept(item, null);
|
||||||
|
else {
|
||||||
|
if (!finished) {
|
||||||
|
finished = true;
|
||||||
|
// We need this guard because we could meet conditions
|
||||||
|
// to finish processing, yet the outer subscribe() invocation
|
||||||
|
// has not returned and the subscription is still null.
|
||||||
|
if (subscription != null)
|
||||||
|
subscription.complete(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return subscription;
|
||||||
|
}
|
||||||
|
}
|
31
src/main/java/org/xbib/event/yield/boxes/BoolBox.java
Normal file
31
src/main/java/org/xbib/event/yield/boxes/BoolBox.java
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
package org.xbib.event.yield.boxes;
|
||||||
|
|
||||||
|
public class BoolBox {
|
||||||
|
|
||||||
|
private boolean value;
|
||||||
|
|
||||||
|
public BoolBox(boolean value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public BoolBox() {
|
||||||
|
this(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isTrue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isFalse() {
|
||||||
|
return !value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void set() {
|
||||||
|
value = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void set(boolean val) {
|
||||||
|
value = val;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
34
src/main/java/org/xbib/event/yield/boxes/Box.java
Normal file
34
src/main/java/org/xbib/event/yield/boxes/Box.java
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
package org.xbib.event.yield.boxes;
|
||||||
|
|
||||||
|
public class Box<T> {
|
||||||
|
|
||||||
|
protected T value;
|
||||||
|
|
||||||
|
private boolean isPresent;
|
||||||
|
|
||||||
|
public Box() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public Box(T identity) {
|
||||||
|
this.value = identity;
|
||||||
|
this.isPresent = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final boolean isPresent() {
|
||||||
|
return isPresent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final T getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final T setValue(T value) {
|
||||||
|
this.value = value;
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public final void turnPresent(T e) {
|
||||||
|
this.setValue(e);
|
||||||
|
this.isPresent = true;
|
||||||
|
}
|
||||||
|
}
|
39
src/main/java/org/xbib/event/yield/boxes/DoubleBox.java
Normal file
39
src/main/java/org/xbib/event/yield/boxes/DoubleBox.java
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
package org.xbib.event.yield.boxes;
|
||||||
|
|
||||||
|
public class DoubleBox {
|
||||||
|
|
||||||
|
protected double value;
|
||||||
|
|
||||||
|
private boolean isPresent;
|
||||||
|
|
||||||
|
public DoubleBox() {
|
||||||
|
this(Double.MIN_VALUE, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DoubleBox(double value, boolean isPresent) {
|
||||||
|
this.value = value;
|
||||||
|
this.isPresent = isPresent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public DoubleBox(double identity) {
|
||||||
|
this.value = identity;
|
||||||
|
this.isPresent = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValue(double value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isPresent() {
|
||||||
|
return isPresent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void turnPresent(double value) {
|
||||||
|
this.value = value;
|
||||||
|
isPresent = true;
|
||||||
|
}
|
||||||
|
}
|
38
src/main/java/org/xbib/event/yield/boxes/IntBox.java
Normal file
38
src/main/java/org/xbib/event/yield/boxes/IntBox.java
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package org.xbib.event.yield.boxes;
|
||||||
|
|
||||||
|
public class IntBox {
|
||||||
|
|
||||||
|
protected int value;
|
||||||
|
|
||||||
|
private boolean isPresent;
|
||||||
|
|
||||||
|
public IntBox() {
|
||||||
|
this(Integer.MIN_VALUE, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public IntBox(int value, boolean isPresent) {
|
||||||
|
this.value = value;
|
||||||
|
this.isPresent = isPresent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public IntBox(int identity) {
|
||||||
|
this.value = identity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValue(int value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isPresent() {
|
||||||
|
return isPresent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void turnPresent(int value) {
|
||||||
|
this.value = value;
|
||||||
|
isPresent = true;
|
||||||
|
}
|
||||||
|
}
|
38
src/main/java/org/xbib/event/yield/boxes/LongBox.java
Normal file
38
src/main/java/org/xbib/event/yield/boxes/LongBox.java
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package org.xbib.event.yield.boxes;
|
||||||
|
|
||||||
|
public class LongBox {
|
||||||
|
|
||||||
|
protected long value;
|
||||||
|
|
||||||
|
private boolean isPresent;
|
||||||
|
|
||||||
|
public LongBox() {
|
||||||
|
this(Long.MIN_VALUE, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public LongBox(long value, boolean isPresent) {
|
||||||
|
this.value = value;
|
||||||
|
this.isPresent = isPresent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public LongBox(long identity) {
|
||||||
|
this.value = identity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValue(long value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isPresent() {
|
||||||
|
return isPresent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void turnPresent(long value) {
|
||||||
|
this.value = value;
|
||||||
|
isPresent = true;
|
||||||
|
}
|
||||||
|
}
|
29
src/main/java/org/xbib/event/yield/ops/Concat.java
Normal file
29
src/main/java/org/xbib/event/yield/ops/Concat.java
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
public class Concat<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
private final Query<T> first;
|
||||||
|
|
||||||
|
private final Query<T> second;
|
||||||
|
|
||||||
|
public Concat(Query<T> first, Query<T> second) {
|
||||||
|
this.first = first;
|
||||||
|
this.second = second;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
this.first.traverse(yield);
|
||||||
|
this.second.traverse(yield);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
return first.tryAdvance(yield) || second.tryAdvance(yield);
|
||||||
|
}
|
||||||
|
}
|
38
src/main/java/org/xbib/event/yield/ops/Distinct.java
Normal file
38
src/main/java/org/xbib/event/yield/ops/Distinct.java
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
import org.xbib.event.yield.boxes.BoolBox;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
|
||||||
|
public class Distinct<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
final HashSet<T> mem = new HashSet<>();
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
public Distinct(Query<T> adv) {
|
||||||
|
this.upstream = adv;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
upstream.traverse(item -> {
|
||||||
|
if (mem.add(item)) yield.ret(item);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
final BoolBox found = new BoolBox();
|
||||||
|
while (found.isFalse() && upstream.tryAdvance(item -> {
|
||||||
|
if (mem.add(item)) {
|
||||||
|
yield.ret(item);
|
||||||
|
found.set();
|
||||||
|
}
|
||||||
|
})) ;
|
||||||
|
return found.isTrue();
|
||||||
|
}
|
||||||
|
}
|
55
src/main/java/org/xbib/event/yield/ops/DropWhile.java
Normal file
55
src/main/java/org/xbib/event/yield/ops/DropWhile.java
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class DropWhile<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final Predicate<T> predicate;
|
||||||
|
|
||||||
|
private boolean dropped;
|
||||||
|
|
||||||
|
public DropWhile(Query<T> upstream, Predicate<T> predicate) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.predicate = predicate;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
upstream.traverse(item -> {
|
||||||
|
if (!dropped && !predicate.test(item)) {
|
||||||
|
dropped = true;
|
||||||
|
}
|
||||||
|
if (dropped) {
|
||||||
|
yield.ret(item);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
if (dropped) {
|
||||||
|
return upstream.tryAdvance(yield);
|
||||||
|
} else {
|
||||||
|
while (!dropped && dropNext(yield)) {
|
||||||
|
// Intentionally empty. Action specified on yield statement of tryAdvance().
|
||||||
|
}
|
||||||
|
return dropped;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean dropNext(Yield<? super T> yield) {
|
||||||
|
return upstream.tryAdvance(item -> {
|
||||||
|
if (!predicate.test(item)) {
|
||||||
|
dropped = true;
|
||||||
|
yield.ret(item);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
44
src/main/java/org/xbib/event/yield/ops/Filter.java
Normal file
44
src/main/java/org/xbib/event/yield/ops/Filter.java
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
import org.xbib.event.yield.boxes.BoolBox;
|
||||||
|
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class Filter<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final Predicate<? super T> p;
|
||||||
|
|
||||||
|
public Filter(Query<T> adv, Predicate<? super T> p) {
|
||||||
|
this.upstream = adv;
|
||||||
|
this.p = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
upstream.traverse(e -> {
|
||||||
|
if (p.test(e))
|
||||||
|
yield.ret(e);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
BoolBox found = new BoolBox();
|
||||||
|
while (found.isFalse()) {
|
||||||
|
boolean hasNext = upstream.tryAdvance(item -> {
|
||||||
|
if (p.test(item)) {
|
||||||
|
yield.ret(item);
|
||||||
|
found.set();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (!hasNext) break;
|
||||||
|
}
|
||||||
|
return found.isTrue();
|
||||||
|
}
|
||||||
|
}
|
39
src/main/java/org/xbib/event/yield/ops/FlatMap.java
Normal file
39
src/main/java/org/xbib/event/yield/ops/FlatMap.java
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
public class FlatMap<T, R> implements Advancer<R>, Traverser<R> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final Function<? super T, ? extends Query<? extends R>> mapper;
|
||||||
|
|
||||||
|
private Query<? extends R> src;
|
||||||
|
|
||||||
|
public FlatMap(Query<T> query, Function<? super T, ? extends Query<? extends R>> mapper) {
|
||||||
|
this.upstream = query;
|
||||||
|
this.mapper = mapper;
|
||||||
|
src = new Query<>(Advancer.empty(), Traverser.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super R> yield) {
|
||||||
|
upstream.traverse(elem ->
|
||||||
|
mapper.apply(elem).traverse(yield));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super R> yield) {
|
||||||
|
while (!src.tryAdvance(yield)) {
|
||||||
|
if (!upstream.tryAdvance(t -> src = mapper.apply(t)))
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
42
src/main/java/org/xbib/event/yield/ops/FromArray.java
Normal file
42
src/main/java/org/xbib/event/yield/ops/FromArray.java
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
public class FromArray<U> implements Advancer<U>, Traverser<U> {
|
||||||
|
|
||||||
|
private final U[] data;
|
||||||
|
|
||||||
|
private int current;
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public FromArray(U... data) {
|
||||||
|
this.data = data;
|
||||||
|
this.current = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean hasNext() {
|
||||||
|
return current < data.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Continues from the point where tryAdvance or next left the
|
||||||
|
* internal iteration.
|
||||||
|
*
|
||||||
|
* @param yield yield
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super U> yield) {
|
||||||
|
for (int i = current; i < data.length; i++) {
|
||||||
|
yield.ret(data[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super U> yield) {
|
||||||
|
if (!hasNext()) return false;
|
||||||
|
yield.ret(data[current++]);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
26
src/main/java/org/xbib/event/yield/ops/FromList.java
Normal file
26
src/main/java/org/xbib/event/yield/ops/FromList.java
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Spliterator;
|
||||||
|
|
||||||
|
public class FromList<U> implements Advancer<U>, Traverser<U> {
|
||||||
|
private final Spliterator<U> current;
|
||||||
|
|
||||||
|
public FromList(List<U> data) {
|
||||||
|
this.current = data.spliterator();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super U> yield) {
|
||||||
|
current.forEachRemaining(yield::ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super U> yield) {
|
||||||
|
return current.tryAdvance(yield::ret);
|
||||||
|
}
|
||||||
|
}
|
28
src/main/java/org/xbib/event/yield/ops/FromStream.java
Normal file
28
src/main/java/org/xbib/event/yield/ops/FromStream.java
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.Spliterator;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
public class FromStream<U> implements Advancer<U>, Traverser<U> {
|
||||||
|
|
||||||
|
private final Spliterator<U> upstream;
|
||||||
|
|
||||||
|
public FromStream(Stream<U> data) {
|
||||||
|
this.upstream = data.spliterator();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super U> yield) {
|
||||||
|
upstream.forEachRemaining(yield::ret);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super U> yield) {
|
||||||
|
return upstream.tryAdvance(yield::ret);
|
||||||
|
}
|
||||||
|
}
|
29
src/main/java/org/xbib/event/yield/ops/Generate.java
Normal file
29
src/main/java/org/xbib/event/yield/ops/Generate.java
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
public class Generate<U> implements Advancer<U>, Traverser<U> {
|
||||||
|
|
||||||
|
private final Supplier<U> s;
|
||||||
|
|
||||||
|
public Generate(Supplier<U> s) {
|
||||||
|
this.s = s;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super U> yield) {
|
||||||
|
while (true) {
|
||||||
|
yield.ret(s.get());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super U> yield) {
|
||||||
|
yield.ret(s.get());
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
33
src/main/java/org/xbib/event/yield/ops/Iterate.java
Normal file
33
src/main/java/org/xbib/event/yield/ops/Iterate.java
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.function.UnaryOperator;
|
||||||
|
|
||||||
|
public class Iterate<U> implements Advancer<U>, Traverser<U> {
|
||||||
|
|
||||||
|
private final UnaryOperator<U> f;
|
||||||
|
|
||||||
|
private U prev;
|
||||||
|
|
||||||
|
public Iterate(U seed, UnaryOperator<U> f) {
|
||||||
|
this.f = f;
|
||||||
|
this.prev = seed;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super U> yield) {
|
||||||
|
for (U curr = prev; true; curr = f.apply(curr))
|
||||||
|
yield.ret(curr);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super U> yield) {
|
||||||
|
U curr = prev;
|
||||||
|
prev = f.apply(prev);
|
||||||
|
yield.ret(curr);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
38
src/main/java/org/xbib/event/yield/ops/Limit.java
Normal file
38
src/main/java/org/xbib/event/yield/ops/Limit.java
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
public class Limit<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final int n;
|
||||||
|
|
||||||
|
int count;
|
||||||
|
|
||||||
|
public Limit(Query<T> upstream, int n) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.n = n;
|
||||||
|
count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
if (count >= n) return false;
|
||||||
|
count++;
|
||||||
|
return upstream.tryAdvance(yield);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
if (count >= n)
|
||||||
|
throw new IllegalStateException("Traverser has already been operated on or closed!");
|
||||||
|
while (this.tryAdvance(yield)) {
|
||||||
|
// Intentionally empty. Action specified on yield statement of tryAdvance().
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
30
src/main/java/org/xbib/event/yield/ops/Mapping.java
Normal file
30
src/main/java/org/xbib/event/yield/ops/Mapping.java
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
public class Mapping<T, R> implements Advancer<R>, Traverser<R> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final Function<? super T, ? extends R> mapper;
|
||||||
|
|
||||||
|
public Mapping(Query<T> adv, Function<? super T, ? extends R> mapper) {
|
||||||
|
this.upstream = adv;
|
||||||
|
this.mapper = mapper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super R> yield) {
|
||||||
|
upstream.traverse(e -> yield.ret(mapper.apply(e)));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super R> yield) {
|
||||||
|
return upstream.tryAdvance(item -> yield.ret(mapper.apply(item)));
|
||||||
|
}
|
||||||
|
}
|
36
src/main/java/org/xbib/event/yield/ops/Peek.java
Normal file
36
src/main/java/org/xbib/event/yield/ops/Peek.java
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
|
public class Peek<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final Consumer<? super T> action;
|
||||||
|
|
||||||
|
public Peek(Query<T> adv, Consumer<? super T> action) {
|
||||||
|
this.upstream = adv;
|
||||||
|
this.action = action;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
upstream.traverse(item -> {
|
||||||
|
action.accept(item);
|
||||||
|
yield.ret(item);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
return upstream.tryAdvance(item -> {
|
||||||
|
action.accept(item);
|
||||||
|
yield.ret(item);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
43
src/main/java/org/xbib/event/yield/ops/Skip.java
Normal file
43
src/main/java/org/xbib/event/yield/ops/Skip.java
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
public class Skip<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final int n;
|
||||||
|
|
||||||
|
int index;
|
||||||
|
|
||||||
|
public Skip(Query<T> adv, int n) {
|
||||||
|
this.upstream = adv;
|
||||||
|
this.n = n;
|
||||||
|
index = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Continues from the point where tryAdvance or next left the
|
||||||
|
* internal iteration.
|
||||||
|
*
|
||||||
|
* @param yield the yield
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
upstream.traverse(item -> {
|
||||||
|
if (index++ >= n)
|
||||||
|
yield.ret(item);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
for (; index < n; index++)
|
||||||
|
upstream.tryAdvance(item -> {
|
||||||
|
});
|
||||||
|
return upstream.tryAdvance(yield);
|
||||||
|
}
|
||||||
|
}
|
44
src/main/java/org/xbib/event/yield/ops/TakeWhile.java
Normal file
44
src/main/java/org/xbib/event/yield/ops/TakeWhile.java
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
public class TakeWhile<T> implements Advancer<T>, Traverser<T> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final Predicate<? super T> predicate;
|
||||||
|
|
||||||
|
private boolean hasNext;
|
||||||
|
|
||||||
|
public TakeWhile(Query<T> upstream, Predicate<? super T> predicate) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.predicate = predicate;
|
||||||
|
this.hasNext = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super T> yield) {
|
||||||
|
if (!hasNext) return false; // Once predicate is false it finishes the iteration
|
||||||
|
Yield<T> takeWhile = item -> {
|
||||||
|
if (predicate.test(item)) {
|
||||||
|
yield.ret(item);
|
||||||
|
} else {
|
||||||
|
hasNext = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return upstream.tryAdvance(takeWhile) && hasNext;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super T> yield) {
|
||||||
|
upstream.shortCircuit(item -> {
|
||||||
|
if (!predicate.test(item)) Yield.bye();
|
||||||
|
yield.ret(item);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
43
src/main/java/org/xbib/event/yield/ops/Zip.java
Normal file
43
src/main/java/org/xbib/event/yield/ops/Zip.java
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
package org.xbib.event.yield.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
import org.xbib.event.yield.boxes.BoolBox;
|
||||||
|
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
|
||||||
|
public class Zip<T, U, R> implements Advancer<R>, Traverser<R> {
|
||||||
|
|
||||||
|
private final Query<T> upstream;
|
||||||
|
|
||||||
|
private final Query<U> other;
|
||||||
|
|
||||||
|
private final BiFunction<? super T, ? super U, ? extends R> zipper;
|
||||||
|
|
||||||
|
public Zip(Query<T> upstream, Query<U> other, BiFunction<? super T, ? super U, ? extends R> zipper) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.other = other;
|
||||||
|
this.zipper = zipper;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(Yield<? super R> yield) {
|
||||||
|
BoolBox consumed = new BoolBox();
|
||||||
|
upstream.tryAdvance(e1 -> other.tryAdvance(e2 -> {
|
||||||
|
yield.ret(zipper.apply(e1, e2));
|
||||||
|
consumed.set();
|
||||||
|
}));
|
||||||
|
return consumed.isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(Yield<? super R> yield) {
|
||||||
|
upstream.shortCircuit(e1 -> {
|
||||||
|
if (!other.tryAdvance(e2 -> yield.ret(zipper.apply(e1, e2))))
|
||||||
|
Yield.bye();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,69 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Advancer;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongAdvancer;
|
||||||
|
|
||||||
|
import java.util.function.DoubleToLongFunction;
|
||||||
|
import java.util.function.IntToDoubleFunction;
|
||||||
|
import java.util.function.LongToDoubleFunction;
|
||||||
|
import java.util.function.ToDoubleFunction;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sequential traverser with both internal and external iteration approach.
|
||||||
|
*/
|
||||||
|
public interface DoubleAdvancer extends Advancer<Double> {
|
||||||
|
/**
|
||||||
|
* An DoubleAdvancer object without elements.
|
||||||
|
*/
|
||||||
|
static DoubleAdvancer empty() {
|
||||||
|
return action -> false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A DoubleAdvancer object from a generic {@link Advancer} mapped by a {@link ToDoubleFunction}.
|
||||||
|
*
|
||||||
|
* @param source {@link Advancer} with the source elements for this {@code DoubleAdvancer}.
|
||||||
|
* @param mapper {@link ToDoubleFunction} that specifies how to map the source elements double values.
|
||||||
|
*/
|
||||||
|
static <T> DoubleAdvancer from(Advancer<T> source, ToDoubleFunction<? super T> mapper) {
|
||||||
|
return yield -> source.tryAdvance(item -> yield.ret(mapper.applyAsDouble(item)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A DoubleAdvancer object from a {@link LongAdvancer} mapped by a {@link LongToDoubleFunction}.
|
||||||
|
*
|
||||||
|
* @param source {@link LongAdvancer} with the source elements for this {@code LongAdvancer}.
|
||||||
|
* @param mapper {@link DoubleToLongFunction} that specifies how to map the source elements into double values.
|
||||||
|
*/
|
||||||
|
static DoubleAdvancer from(LongAdvancer source, LongToDoubleFunction mapper) {
|
||||||
|
return from((Advancer<Long>) source, mapper::applyAsDouble);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A DoubleAdvancer object from a {@link IntAdvancer} mapped by a {@link IntToDoubleFunction}.
|
||||||
|
*
|
||||||
|
* @param source {@link IntAdvancer} with the source elements for this {@code LongAdvancer}.
|
||||||
|
* @param mapper {@link IntToDoubleFunction} that specifies how to map the source elements into double values.
|
||||||
|
*/
|
||||||
|
static DoubleAdvancer from(IntAdvancer source, IntToDoubleFunction mapper) {
|
||||||
|
return from((Advancer<Integer>) source, mapper::applyAsDouble);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default advance implementation that calls the
|
||||||
|
* primitive version of it
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
default boolean tryAdvance(Yield<? super Double> yield) {
|
||||||
|
DoubleYield yld = yield::ret;
|
||||||
|
return this.tryAdvance(yld);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a remaining element exists, yields that element through
|
||||||
|
* the given action.
|
||||||
|
*/
|
||||||
|
boolean tryAdvance(DoubleYield yield);
|
||||||
|
}
|
|
@ -0,0 +1,583 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Query;
|
||||||
|
import org.xbib.event.yield.TraversableFinishError;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
import org.xbib.event.yield.boxes.BoolBox;
|
||||||
|
import org.xbib.event.yield.boxes.DoubleBox;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleConcat;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleDistinct;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleDropWhile;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleFilter;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleFlatMap;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleGenerate;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleIterate;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleLimit;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleMapToObj;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleMapping;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoublePeek;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleSkip;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleTakeWhile;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.DoubleZip;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.FromDoubleArray;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.ops.FromDoubleStream;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntQuery;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongQuery;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongTraverser;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.DoubleSummaryStatistics;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.OptionalDouble;
|
||||||
|
import java.util.Spliterator;
|
||||||
|
import java.util.Spliterators;
|
||||||
|
import java.util.function.DoubleBinaryOperator;
|
||||||
|
import java.util.function.DoubleConsumer;
|
||||||
|
import java.util.function.DoubleFunction;
|
||||||
|
import java.util.function.DoublePredicate;
|
||||||
|
import java.util.function.DoubleSupplier;
|
||||||
|
import java.util.function.DoubleToIntFunction;
|
||||||
|
import java.util.function.DoubleToLongFunction;
|
||||||
|
import java.util.function.DoubleUnaryOperator;
|
||||||
|
import java.util.function.Function;
|
||||||
|
import java.util.function.ObjDoubleConsumer;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
import java.util.stream.DoubleStream;
|
||||||
|
import java.util.stream.StreamSupport;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A sequence of primitive double-valued elements supporting sequential
|
||||||
|
* operations. This is the double primitive specialization of Query.
|
||||||
|
*/
|
||||||
|
public class DoubleQuery {
|
||||||
|
|
||||||
|
private final DoubleAdvancer adv;
|
||||||
|
private final DoubleTraverser trav;
|
||||||
|
|
||||||
|
public DoubleQuery(DoubleAdvancer adv, DoubleTraverser trav) {
|
||||||
|
this.adv = adv;
|
||||||
|
this.trav = trav;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a sequential ordered {@code DoubleQuery} with elements
|
||||||
|
* from the provided {@link DoubleStream} data.
|
||||||
|
*/
|
||||||
|
public static DoubleQuery fromStream(DoubleStream src) {
|
||||||
|
FromDoubleStream strm = new FromDoubleStream(src);
|
||||||
|
return new DoubleQuery(strm, strm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an infinite sequential ordered {@code DoubleQuery} produced by iterative
|
||||||
|
* application of a function {@code f} to an initial element {@code seed},
|
||||||
|
* producing a {@code DoubleQuery} consisting of {@code seed}, {@code f(seed)},
|
||||||
|
* {@code f(f(seed))}, etc.
|
||||||
|
*/
|
||||||
|
public static DoubleQuery iterate(double seed, DoubleUnaryOperator f) {
|
||||||
|
DoubleIterate iter = new DoubleIterate(seed, f);
|
||||||
|
return new DoubleQuery(iter, iter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an infinite sequential unordered {@code DoubleQuery}
|
||||||
|
* where each element is generated by the provided Supplier.
|
||||||
|
*/
|
||||||
|
public static DoubleQuery generate(DoubleSupplier s) {
|
||||||
|
DoubleGenerate gen = new DoubleGenerate(s);
|
||||||
|
return new DoubleQuery(gen, gen);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a sequential ordered {@code DoubleQuery} whose elements
|
||||||
|
* are the specified values in data parameter.
|
||||||
|
*/
|
||||||
|
public static DoubleQuery of(double... data) {
|
||||||
|
FromDoubleArray arr = new FromDoubleArray(data);
|
||||||
|
return new DoubleQuery(arr, arr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or an
|
||||||
|
* exception is thrown.
|
||||||
|
*/
|
||||||
|
public final void forEach(DoubleYield yield) {
|
||||||
|
this.traverse(yield);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or an
|
||||||
|
* exception is thrown.
|
||||||
|
*/
|
||||||
|
public final void traverse(DoubleYield yield) {
|
||||||
|
this.trav.traverse(yield);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a remaining element exists, yields that element through
|
||||||
|
* the given action.
|
||||||
|
*/
|
||||||
|
public boolean tryAdvance(DoubleYield action) {
|
||||||
|
return this.adv.tryAdvance(action);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the elements of this {@code DoubleQuery} that match
|
||||||
|
* the given predicate.
|
||||||
|
*/
|
||||||
|
public DoubleQuery filter(DoublePredicate p) {
|
||||||
|
DoubleFilter filter = new DoubleFilter(this, p);
|
||||||
|
return new DoubleQuery(filter, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the results of applying the given
|
||||||
|
* DoubleUnaryOperator to the elements of this {@code DoubleQuery}.
|
||||||
|
*
|
||||||
|
* @param op DoubleUnaryOperator used to map the elements of this DoubleQuery
|
||||||
|
*/
|
||||||
|
public DoubleQuery map(DoubleUnaryOperator op) {
|
||||||
|
DoubleMapping map = new DoubleMapping(this, op);
|
||||||
|
return new DoubleQuery(map, map);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code Query} consisting of the results of applying the given
|
||||||
|
* function to the elements of this {@code DoubleQuery}.
|
||||||
|
*
|
||||||
|
* @param function DoubleFunction used to map the elements of this DoubleQuery
|
||||||
|
*/
|
||||||
|
public <U> Query<U> mapToObj(DoubleFunction<? extends U> function) {
|
||||||
|
DoubleMapToObj<U> map = new DoubleMapToObj<>(this, function);
|
||||||
|
return new Query<>(map, map);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the results of replacing each element of
|
||||||
|
* this {@code DoubleQuery} with the contents of a mapped {@code DoubleQuery} produced by applying
|
||||||
|
* the provided mapping function to each element.
|
||||||
|
*/
|
||||||
|
public DoubleQuery flatMap(DoubleFunction<? extends DoubleQuery> function) {
|
||||||
|
DoubleFlatMap map = new DoubleFlatMap(this, function);
|
||||||
|
return new DoubleQuery(map, map);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a query consisting of the distinct elements (according to
|
||||||
|
* {@link Object#equals(Object)}) of this query.
|
||||||
|
*/
|
||||||
|
public DoubleQuery distinct() {
|
||||||
|
DoubleDistinct dist = new DoubleDistinct(this);
|
||||||
|
return new DoubleQuery(dist, dist);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the elements of this {@code DoubleQuery},
|
||||||
|
* sorted according to the same logic as {@code Arrays.sort(double[] a)}.
|
||||||
|
* <p>
|
||||||
|
* This is a stateful intermediate operation.
|
||||||
|
*/
|
||||||
|
public DoubleQuery sorted() {
|
||||||
|
double[] state = this.toArray();
|
||||||
|
Arrays.sort(state);
|
||||||
|
FromDoubleArray arr = new FromDoubleArray(state);
|
||||||
|
return new DoubleQuery(arr, arr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an array containing the elements of this {@code DoubleQuery}.
|
||||||
|
*/
|
||||||
|
public double[] toArray() {
|
||||||
|
List<Double> list = toList();
|
||||||
|
double[] result = new double[list.size()];
|
||||||
|
for (int i = 0; i < list.size(); i++) {
|
||||||
|
result[i] = list.get(i);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a List containing the elements of this {@code DoubleQuery}.
|
||||||
|
*/
|
||||||
|
public List<Double> toList() {
|
||||||
|
ArrayList<Double> result = new ArrayList<>();
|
||||||
|
this.traverse(result::add);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the elements of this {@code DoubleQuery}, additionally
|
||||||
|
* performing the provided action on each element as elements are consumed
|
||||||
|
* from the resulting {@code DoubleQuery}.
|
||||||
|
*/
|
||||||
|
public DoubleQuery peek(DoubleConsumer action) {
|
||||||
|
DoublePeek peek = new DoublePeek(this, action);
|
||||||
|
return new DoubleQuery(peek, peek);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the elements of this query, truncated
|
||||||
|
* to be no longer than {@code n} in length.
|
||||||
|
*
|
||||||
|
* @param n maximum amount of elements to retrieve from this {@code DoubleQuery}
|
||||||
|
*/
|
||||||
|
public DoubleQuery limit(int n) {
|
||||||
|
DoubleLimit lim = new DoubleLimit(this, n);
|
||||||
|
return new DoubleQuery(lim, lim);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the remaining elements of this {@code DoubleQuery}
|
||||||
|
* after discarding the first {@code n} elements of the {@code DoubleQuery}.
|
||||||
|
*
|
||||||
|
* @param n number of elements to discard
|
||||||
|
*/
|
||||||
|
public DoubleQuery skip(int n) {
|
||||||
|
DoubleSkip skip = new DoubleSkip(this, n);
|
||||||
|
return new DoubleQuery(skip, skip);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an {@link OptionalDouble} with the resulting reduction of the elements of this {@code DoubleQuery},
|
||||||
|
* if a reduction can be made, using the provided accumulator.
|
||||||
|
*/
|
||||||
|
public OptionalDouble reduce(DoubleBinaryOperator accumulator) {
|
||||||
|
DoubleBox box = new DoubleBox();
|
||||||
|
if (this.tryAdvance(box::setValue)) {
|
||||||
|
return OptionalDouble.of(this.reduce(box.getValue(), accumulator));
|
||||||
|
} else {
|
||||||
|
return OptionalDouble.empty();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the result of the reduction of the elements of this {@code DoubleQuery},
|
||||||
|
* using the provided identity value and accumulator.
|
||||||
|
*/
|
||||||
|
public double reduce(double identity, DoubleBinaryOperator accumulator) {
|
||||||
|
class BoxAccumulator extends DoubleBox implements DoubleYield {
|
||||||
|
public BoxAccumulator(double identity) {
|
||||||
|
super(identity);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void ret(double item) {
|
||||||
|
this.value = accumulator.applyAsDouble(value, item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
BoxAccumulator box = new BoxAccumulator(identity);
|
||||||
|
this.traverse(box);
|
||||||
|
return box.getValue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the lowest double of this {@code DoubleQuery}
|
||||||
|
*/
|
||||||
|
public OptionalDouble min() {
|
||||||
|
DoubleBox b = new DoubleBox();
|
||||||
|
this.traverse(e -> {
|
||||||
|
if (!b.isPresent()) {
|
||||||
|
b.turnPresent(e);
|
||||||
|
} else if (e < b.getValue()) {
|
||||||
|
b.setValue(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return b.isPresent() ? OptionalDouble.of(b.getValue()) : OptionalDouble.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the highest double of this {@code DoubleQuery}
|
||||||
|
*/
|
||||||
|
public OptionalDouble max() {
|
||||||
|
DoubleBox b = new DoubleBox();
|
||||||
|
this.traverse(e -> {
|
||||||
|
if (!b.isPresent()) {
|
||||||
|
b.turnPresent(e);
|
||||||
|
} else if (e > b.getValue()) {
|
||||||
|
b.setValue(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return b.isPresent() ? OptionalDouble.of(b.getValue()) : OptionalDouble.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the count of elements in this {@code DoubleQuery}.
|
||||||
|
*/
|
||||||
|
public final long count() {
|
||||||
|
class Counter implements DoubleYield {
|
||||||
|
long n = 0;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void ret(double item) {
|
||||||
|
++n;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Counter c = new Counter();
|
||||||
|
this.traverse(c);
|
||||||
|
return c.n;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an OptionalDouble describing the arithmetic mean of elements of this {@code DoubleQuery},
|
||||||
|
* or an empty optional if this {@code DoubleQuery} is empty. This is a special case of a reduction.
|
||||||
|
* <p>
|
||||||
|
* This is a terminal operation.
|
||||||
|
*/
|
||||||
|
public OptionalDouble average() {
|
||||||
|
double[] data = this.toArray();
|
||||||
|
double count = data.length;
|
||||||
|
if (count == 0) {
|
||||||
|
return OptionalDouble.empty();
|
||||||
|
}
|
||||||
|
double sum = DoubleQuery.of(data).sum();
|
||||||
|
return OptionalDouble.of(sum / count);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the sum of elements in this {@code DoubleQuery} .
|
||||||
|
* <p>
|
||||||
|
* This is a special case of a reduction.
|
||||||
|
*/
|
||||||
|
public double sum() {
|
||||||
|
return this.reduce(0, Double::sum);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an DoubleSummaryStatistics describing various summary data about
|
||||||
|
* the elements of this {@code DoubleQuery}. This is a special case of a reduction.
|
||||||
|
* <p>
|
||||||
|
* This is a terminal operation.
|
||||||
|
*/
|
||||||
|
public DoubleSummaryStatistics summaryStatistics() {
|
||||||
|
return this.collect(DoubleSummaryStatistics::new, DoubleSummaryStatistics::accept);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Performs a mutable reduction operation on the elements of this {@code DoubleQuery}.
|
||||||
|
* A mutable reduction is one in which the reduced value is a mutable result container, such as an ArrayList,
|
||||||
|
* and elements are incorporated by updating the state of the result rather than by replacing the result.
|
||||||
|
*/
|
||||||
|
public <R> R collect(Supplier<R> supplier, ObjDoubleConsumer<R> accumulator) {
|
||||||
|
R result = supplier.get();
|
||||||
|
this.traverse(elem -> accumulator.accept(result, elem));
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether all elements of this {@code DoubleQuery} match the provided
|
||||||
|
* {@link DoublePredicate}. May not evaluate the predicate on all elements if not
|
||||||
|
* necessary for determining the result. If the query is empty then
|
||||||
|
* {@code true} is returned and the predicate is not evaluated.
|
||||||
|
*
|
||||||
|
* @param p DoublePredicate used to test elements of this {@code DoubleQuery}
|
||||||
|
*/
|
||||||
|
public boolean allMatch(DoublePredicate p) {
|
||||||
|
BoolBox succeed = new BoolBox(true);
|
||||||
|
shortCircuit(item -> {
|
||||||
|
if (!p.test(item)) {
|
||||||
|
succeed.set(false);
|
||||||
|
Yield.bye();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return succeed.isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or the traversal
|
||||||
|
* exited normally through the invocation of yield.bye().
|
||||||
|
*/
|
||||||
|
public final void shortCircuit(DoubleYield yield) {
|
||||||
|
try {
|
||||||
|
this.trav.traverse(yield);
|
||||||
|
} catch (TraversableFinishError e) {
|
||||||
|
/* Proceed */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether no elements of this {@code DoubleQuery} match the provided
|
||||||
|
* {@link DoublePredicate}. May not evaluate the predicate on all elements if not
|
||||||
|
* necessary for determining the result. If the query is empty then
|
||||||
|
* {@code true} is returned and the predicate is not evaluated.
|
||||||
|
*
|
||||||
|
* @param p DoublePredicate used to test elements of this {@code DoubleQuery}
|
||||||
|
*/
|
||||||
|
public boolean noneMatch(DoublePredicate p) {
|
||||||
|
return !this.anyMatch(p);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether any elements of this {@code DoubleQuery} match the provided
|
||||||
|
* {@link DoublePredicate}. May not evaluate the predicate on all elements if not
|
||||||
|
* necessary for determining the result. If the query is empty then
|
||||||
|
* {@code false} is returned and the predicate is not evaluated.
|
||||||
|
*
|
||||||
|
* @param p DoublePredicate used to test elements of this {@code DoubleQuery}
|
||||||
|
*/
|
||||||
|
public boolean anyMatch(DoublePredicate p) {
|
||||||
|
BoolBox found = new BoolBox();
|
||||||
|
shortCircuit(item -> {
|
||||||
|
if (p.test(item)) {
|
||||||
|
found.set();
|
||||||
|
Yield.bye();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return found.isTrue();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an {@link OptionalDouble} describing any element of this {@code DoubleQuery},
|
||||||
|
* or an empty {@code OptionalDouble} if this {@code DoubleQuery} is empty.
|
||||||
|
*/
|
||||||
|
public OptionalDouble findAny() {
|
||||||
|
return this.findFirst();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns an {@link OptionalDouble} describing the first element of this {@code DoubleQuery},
|
||||||
|
* or an empty {@code OptionalDouble} if this {@code DoubleQuery} is empty.
|
||||||
|
*/
|
||||||
|
public OptionalDouble findFirst() {
|
||||||
|
DoubleBox box = new DoubleBox();
|
||||||
|
this.tryAdvance(box::turnPresent);
|
||||||
|
return box.isPresent()
|
||||||
|
? OptionalDouble.of(box.getValue())
|
||||||
|
: OptionalDouble.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code LongQuery} consisting of the elements of this {@code DoubleQuery},
|
||||||
|
* converted to long. It's equivalent to calling Math.round on every element
|
||||||
|
* <p>
|
||||||
|
* This is an intermediate operation.
|
||||||
|
*/
|
||||||
|
public LongQuery asLongQuery() {
|
||||||
|
return this.mapToLong(Math::round);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code LongQuery} consisting of the results of applying the given
|
||||||
|
* function to the elements of this {@code DoubleQuery}.
|
||||||
|
*
|
||||||
|
* @param function DoubleToLongFunction used to map the elements of this DoubleQuery
|
||||||
|
*/
|
||||||
|
public LongQuery mapToLong(DoubleToLongFunction function) {
|
||||||
|
return new LongQuery(LongAdvancer.from(adv, function), LongTraverser.from(trav, function));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code IntQuery} consisting of the elements of this {@code DoubleQuery},
|
||||||
|
* converted to int. It's equivalent to calling Math.round on every element
|
||||||
|
* <p>
|
||||||
|
* This is an intermediate operation.
|
||||||
|
*/
|
||||||
|
public IntQuery asIntQuery() {
|
||||||
|
return this.mapToInt(v -> (int) Math.round(v));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code IntQuery} consisting of the results of applying the given
|
||||||
|
* function to the elements of this {@code DoubleQuery}.
|
||||||
|
*
|
||||||
|
* @param function DoubleToIntFunction used to map the elements of this DoubleQuery
|
||||||
|
*/
|
||||||
|
public IntQuery mapToInt(DoubleToIntFunction function) {
|
||||||
|
return new IntQuery(IntAdvancer.from(adv, function), IntTraverser.from(trav, function));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a Stream consisting of the elements of this {@code DoubleQuery},
|
||||||
|
* each boxed to an Double.
|
||||||
|
*/
|
||||||
|
public Query<Double> boxed() {
|
||||||
|
return new Query<>(adv, trav);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DoubleStream toStream() {
|
||||||
|
Spliterator.OfDouble iter = new Spliterators.AbstractDoubleSpliterator(Long.MAX_VALUE, Spliterator.ORDERED) {
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(DoubleConsumer action) {
|
||||||
|
return adv.tryAdvance(action::accept);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void forEachRemaining(DoubleConsumer action) {
|
||||||
|
trav.traverse(action::accept);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return StreamSupport.doubleStream(iter, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@code then} operator lets you encapsulate a piece of an operator
|
||||||
|
* chain into a function.
|
||||||
|
* That function {@code next} is applied to this {@code DoubleQuery} to produce a new
|
||||||
|
* {@code DoubleTraverser} object that is encapsulated in the resulting {@code DoubleQuery}.
|
||||||
|
* On the other hand, the {@code nextAdv} is applied to this query to produce a new
|
||||||
|
* {@code DoubleAdvancer} object that is encapsulated in the resulting query.
|
||||||
|
*/
|
||||||
|
public final DoubleQuery then(
|
||||||
|
Function<DoubleQuery,
|
||||||
|
DoubleAdvancer> nextAdv,
|
||||||
|
Function<DoubleQuery, DoubleTraverser> next) {
|
||||||
|
return new DoubleQuery(nextAdv.apply(this), next.apply(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The {@code then} operator lets you encapsulate a piece of an operator
|
||||||
|
* chain into a function.
|
||||||
|
* That function {@code next} is applied to this {@code DoubleQuery} to produce a new
|
||||||
|
* {@code DoubleTraverser} object that is encapsulated in the resulting {@code DoubleQuery}.
|
||||||
|
*/
|
||||||
|
public final DoubleQuery then(Function<DoubleQuery, DoubleTraverser> next) {
|
||||||
|
DoubleAdvancer nextAdv = item -> {
|
||||||
|
throw new UnsupportedOperationException(
|
||||||
|
"Missing tryAdvance() implementation! Use the overloaded then() providing both Advancer and Traverser!");
|
||||||
|
};
|
||||||
|
return new DoubleQuery(nextAdv, next.apply(this));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the longest prefix of elements taken from
|
||||||
|
* this {@code DoubleQuery} that match the given predicate.
|
||||||
|
*/
|
||||||
|
public final DoubleQuery takeWhile(DoublePredicate predicate) {
|
||||||
|
DoubleTakeWhile take = new DoubleTakeWhile(this, predicate);
|
||||||
|
return new DoubleQuery(take, take);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a concatenated {@code Query} in which the elements are
|
||||||
|
* all the elements of this {@code Query} followed by all the
|
||||||
|
* elements of the other {@code Query}.
|
||||||
|
*/
|
||||||
|
public final DoubleQuery concat(DoubleQuery other) {
|
||||||
|
DoubleConcat cat = new DoubleConcat(this, other);
|
||||||
|
return new DoubleQuery(cat, cat);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a {@code DoubleQuery} consisting of the remaining elements of this query
|
||||||
|
* after discarding the first sequence of elements that match the given Predicate.
|
||||||
|
*/
|
||||||
|
public final DoubleQuery dropWhile(DoublePredicate predicate) {
|
||||||
|
DoubleDropWhile drop = new DoubleDropWhile(this, predicate);
|
||||||
|
return new DoubleQuery(drop, drop);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Applies a specified function to the corresponding elements of two
|
||||||
|
* sequences, producing a sequence of the results.
|
||||||
|
*/
|
||||||
|
public final DoubleQuery zip(DoubleQuery other, DoubleBinaryOperator zipper) {
|
||||||
|
DoubleZip zip = new DoubleZip(this, other, zipper);
|
||||||
|
return new DoubleQuery(zip, zip);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,74 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Traverser;
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
import org.xbib.event.yield.primitives.intgr.IntTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.lng.LongTraverser;
|
||||||
|
|
||||||
|
import java.util.function.DoubleToLongFunction;
|
||||||
|
import java.util.function.IntToDoubleFunction;
|
||||||
|
import java.util.function.LongToDoubleFunction;
|
||||||
|
import java.util.function.ToDoubleFunction;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk traversal.
|
||||||
|
* We use a traverse method as its first choice to
|
||||||
|
* implement Query operations.
|
||||||
|
* This is a special kind of traversal that disallows individually access.
|
||||||
|
*/
|
||||||
|
public interface DoubleTraverser extends Traverser<Double> {
|
||||||
|
/**
|
||||||
|
* An DoubleTraverser object without elements.
|
||||||
|
*/
|
||||||
|
static DoubleTraverser empty() {
|
||||||
|
return action -> {
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A DoubleTraverser object from a generic {@link Traverser} mapped by a {@link ToDoubleFunction}.
|
||||||
|
*
|
||||||
|
* @param source {@link Traverser} with the source elements for this {@code DoubleTraverser}.
|
||||||
|
* @param mapper {@link ToDoubleFunction} that specifies how to map the source elements double values.
|
||||||
|
*/
|
||||||
|
static <T> DoubleTraverser from(Traverser<T> source, ToDoubleFunction<? super T> mapper) {
|
||||||
|
return yield -> source.traverse(item -> yield.ret(mapper.applyAsDouble(item)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A DoubleTraverser object from a {@link LongTraverser} mapped by a {@link LongToDoubleFunction}.
|
||||||
|
*
|
||||||
|
* @param source {@link LongTraverser} with the source elements for this {@code LongTraverser}.
|
||||||
|
* @param mapper {@link DoubleToLongFunction} that specifies how to map the source elements into double values.
|
||||||
|
*/
|
||||||
|
static DoubleTraverser from(LongTraverser source, LongToDoubleFunction mapper) {
|
||||||
|
return from((Traverser<Long>) source, mapper::applyAsDouble);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A DoubleTraverser object from a {@link IntTraverser} mapped by a {@link IntToDoubleFunction}.
|
||||||
|
*
|
||||||
|
* @param source {@link IntTraverser} with the source elements for this {@code LongTraverser}.
|
||||||
|
* @param mapper {@link IntToDoubleFunction} that specifies how to map the source elements into double values.
|
||||||
|
*/
|
||||||
|
static DoubleTraverser from(IntTraverser source, IntToDoubleFunction mapper) {
|
||||||
|
return from((Traverser<Integer>) source, mapper::applyAsDouble);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default traverse implementation that calls the
|
||||||
|
* primitive version of it
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
default void traverse(Yield<? super Double> yield) {
|
||||||
|
DoubleYield yld = yield::ret;
|
||||||
|
this.traverse(yld);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yields elements sequentially in the current thread,
|
||||||
|
* until all elements have been processed or an
|
||||||
|
* exception is thrown.
|
||||||
|
*/
|
||||||
|
void traverse(DoubleYield yield);
|
||||||
|
}
|
|
@ -0,0 +1,16 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.Yield;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Equivalent to {@code DoubleConsumer} with a yield semantics.
|
||||||
|
*/
|
||||||
|
@FunctionalInterface
|
||||||
|
public interface DoubleYield extends Yield<Double> {
|
||||||
|
void ret(double item);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
default void ret(Double item) {
|
||||||
|
this.ret((double) item);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,29 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleQuery;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleYield;
|
||||||
|
|
||||||
|
public class DoubleConcat implements DoubleAdvancer, DoubleTraverser {
|
||||||
|
|
||||||
|
private final DoubleQuery first;
|
||||||
|
|
||||||
|
private final DoubleQuery second;
|
||||||
|
|
||||||
|
public DoubleConcat(DoubleQuery first, DoubleQuery second) {
|
||||||
|
this.first = first;
|
||||||
|
this.second = second;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(DoubleYield yield) {
|
||||||
|
this.first.traverse(yield);
|
||||||
|
this.second.traverse(yield);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(DoubleYield yield) {
|
||||||
|
return first.tryAdvance(yield) || second.tryAdvance(yield);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,41 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.boxes.BoolBox;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleQuery;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleYield;
|
||||||
|
|
||||||
|
import java.util.HashSet;
|
||||||
|
|
||||||
|
public class DoubleDistinct implements DoubleAdvancer, DoubleTraverser {
|
||||||
|
|
||||||
|
final HashSet<Double> mem = new HashSet<>();
|
||||||
|
|
||||||
|
private final DoubleQuery upstream;
|
||||||
|
|
||||||
|
public DoubleDistinct(DoubleQuery adv) {
|
||||||
|
this.upstream = adv;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(DoubleYield yield) {
|
||||||
|
upstream.traverse(item -> {
|
||||||
|
if (mem.add(item)) {
|
||||||
|
yield.ret(item);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(DoubleYield yield) {
|
||||||
|
final BoolBox found = new BoolBox();
|
||||||
|
while (found.isFalse() && upstream.tryAdvance(item -> {
|
||||||
|
if (mem.add(item)) {
|
||||||
|
yield.ret(item);
|
||||||
|
found.set();
|
||||||
|
}
|
||||||
|
})) ;
|
||||||
|
return found.isTrue();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,54 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleQuery;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleYield;
|
||||||
|
|
||||||
|
import java.util.function.DoublePredicate;
|
||||||
|
|
||||||
|
public class DoubleDropWhile implements DoubleAdvancer, DoubleTraverser {
|
||||||
|
|
||||||
|
private final DoubleQuery upstream;
|
||||||
|
private final DoublePredicate predicate;
|
||||||
|
private boolean dropped;
|
||||||
|
|
||||||
|
public DoubleDropWhile(DoubleQuery upstream, DoublePredicate predicate) {
|
||||||
|
this.upstream = upstream;
|
||||||
|
this.predicate = predicate;
|
||||||
|
this.dropped = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(DoubleYield yield) {
|
||||||
|
upstream.traverse(item -> {
|
||||||
|
if (!dropped && !predicate.test(item)) {
|
||||||
|
dropped = true;
|
||||||
|
}
|
||||||
|
if (dropped) {
|
||||||
|
yield.ret(item);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(DoubleYield yield) {
|
||||||
|
if (dropped) {
|
||||||
|
return upstream.tryAdvance(yield);
|
||||||
|
} else {
|
||||||
|
while (!dropped && dropNext(yield)) {
|
||||||
|
// Intentionally empty. Action specified on yield statement of tryAdvance().
|
||||||
|
}
|
||||||
|
return dropped;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean dropNext(DoubleYield yield) {
|
||||||
|
return upstream.tryAdvance(item -> {
|
||||||
|
if (!predicate.test(item)) {
|
||||||
|
dropped = true;
|
||||||
|
yield.ret(item);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
package org.xbib.event.yield.primitives.dbl.ops;
|
||||||
|
|
||||||
|
import org.xbib.event.yield.boxes.BoolBox;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleAdvancer;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleQuery;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleTraverser;
|
||||||
|
import org.xbib.event.yield.primitives.dbl.DoubleYield;
|
||||||
|
|
||||||
|
import java.util.function.DoublePredicate;
|
||||||
|
|
||||||
|
public class DoubleFilter implements DoubleAdvancer, DoubleTraverser {
|
||||||
|
|
||||||
|
private final DoubleQuery upstream;
|
||||||
|
|
||||||
|
private final DoublePredicate p;
|
||||||
|
|
||||||
|
public DoubleFilter(DoubleQuery adv, DoublePredicate p) {
|
||||||
|
this.upstream = adv;
|
||||||
|
this.p = p;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void traverse(DoubleYield yield) {
|
||||||
|
upstream.traverse(e -> {
|
||||||
|
if (p.test(e)) {
|
||||||
|
yield.ret(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean tryAdvance(DoubleYield yield) {
|
||||||
|
BoolBox found = new BoolBox();
|
||||||
|
while (found.isFalse()) {
|
||||||
|
boolean hasNext = upstream.tryAdvance(item -> {
|
||||||
|
if (p.test(item)) {
|
||||||
|
yield.ret(item);
|
||||||
|
found.set();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (!hasNext) break;
|
||||||
|
}
|
||||||
|
return found.isTrue();
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue