Merge branch '2.2' of alkmene:joerg/elx into 2.2
This commit is contained in:
commit
c8c8846bb2
121 changed files with 5774 additions and 2792 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -9,5 +9,7 @@
|
|||
/.project
|
||||
/.gradle
|
||||
build
|
||||
out
|
||||
plugins
|
||||
*.iml
|
||||
*~
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
language: java
|
||||
sudo: required
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.m2
|
||||
- openjdk11
|
||||
|
|
172
build.gradle
172
build.gradle
|
@ -1,157 +1,41 @@
|
|||
plugins {
|
||||
id "org.sonarqube" version "2.6.1"
|
||||
id "io.codearte.nexus-staging" version "0.11.0"
|
||||
id "com.github.spotbugs" version "1.6.9"
|
||||
id "org.xbib.gradle.plugin.asciidoctor" version "1.5.6.0.1"
|
||||
id "de.marcphilipp.nexus-publish" version "0.4.0"
|
||||
id "io.codearte.nexus-staging" version "0.21.1"
|
||||
}
|
||||
|
||||
printf "Host: %s\nOS: %s %s %s\nJVM: %s %s %s %s\nGradle: %s Groovy: %s Java: %s\n" +
|
||||
"Build: group: ${project.group} name: ${project.name} version: ${project.version}\n",
|
||||
InetAddress.getLocalHost(),
|
||||
System.getProperty("os.name"),
|
||||
System.getProperty("os.arch"),
|
||||
System.getProperty("os.version"),
|
||||
System.getProperty("java.version"),
|
||||
System.getProperty("java.vm.version"),
|
||||
System.getProperty("java.vm.vendor"),
|
||||
System.getProperty("java.vm.name"),
|
||||
gradle.gradleVersion,
|
||||
GroovySystem.getVersion(),
|
||||
JavaVersion.current()
|
||||
wrapper {
|
||||
gradleVersion = "${project.property('gradle.wrapper.version')}"
|
||||
distributionType = Wrapper.DistributionType.ALL
|
||||
}
|
||||
|
||||
if (JavaVersion.current() < JavaVersion.VERSION_11) {
|
||||
throw new GradleException("This build must be run with java 11 or higher")
|
||||
ext {
|
||||
user = 'jprante'
|
||||
name = 'elx'
|
||||
description = 'Extensions for Elasticsearch clients (node and transport)'
|
||||
inceptionYear = '2019'
|
||||
url = 'https://github.com/' + user + '/' + name
|
||||
scmUrl = 'https://github.com/' + user + '/' + name
|
||||
scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git'
|
||||
scmDeveloperConnection = 'scm:git:ssh://git@github.com:' + user + '/' + name + '.git'
|
||||
issueManagementSystem = 'Github'
|
||||
issueManagementUrl = ext.scmUrl + '/issues'
|
||||
licenseName = 'The Apache License, Version 2.0'
|
||||
licenseUrl = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'signing'
|
||||
apply plugin: 'com.github.spotbugs'
|
||||
apply plugin: 'pmd'
|
||||
apply plugin: 'checkstyle'
|
||||
apply plugin: 'org.xbib.gradle.plugin.asciidoctor'
|
||||
|
||||
configurations {
|
||||
asciidoclet
|
||||
wagon
|
||||
}
|
||||
apply plugin: 'java-library'
|
||||
|
||||
dependencies {
|
||||
testCompile "junit:junit:${project.property('junit.version')}"
|
||||
testCompile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
|
||||
testCompile "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}"
|
||||
wagon "org.apache.maven.wagon:wagon-ssh:${project.property('wagon.version')}"
|
||||
testImplementation "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
|
||||
testImplementation "org.apache.logging.log4j:log4j-jul:${project.property('log4j.version')}"
|
||||
testImplementation "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}"
|
||||
}
|
||||
|
||||
sourceCompatibility = JavaVersion.VERSION_1_8
|
||||
targetCompatibility = JavaVersion.VERSION_1_8
|
||||
|
||||
tasks.withType(JavaCompile) {
|
||||
options.compilerArgs << "-Xlint:all"
|
||||
if (!options.compilerArgs.contains("-processor")) {
|
||||
options.compilerArgs << '-proc:none'
|
||||
}
|
||||
apply from: rootProject.file('gradle/ide/idea.gradle')
|
||||
apply from: rootProject.file('gradle/compile/java.gradle')
|
||||
apply from: rootProject.file('gradle/test/junit5.gradle')
|
||||
apply from: rootProject.file('gradle/publishing/publication.gradle')
|
||||
}
|
||||
|
||||
test {
|
||||
jvmArgs =[
|
||||
'--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED',
|
||||
'--add-opens=java.base/java.nio=ALL-UNNAMED'
|
||||
]
|
||||
systemProperty 'jna.debug_load', 'true'
|
||||
testLogging {
|
||||
showStandardStreams = true
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
||||
|
||||
clean {
|
||||
delete "data"
|
||||
delete "logs"
|
||||
delete "out"
|
||||
}
|
||||
|
||||
/*javadoc {
|
||||
options.docletpath = configurations.asciidoclet.files.asType(List)
|
||||
options.doclet = 'org.asciidoctor.Asciidoclet'
|
||||
options.overview = "src/docs/asciidoclet/overview.adoc"
|
||||
options.addStringOption "-base-dir", "${projectDir}"
|
||||
options.addStringOption "-attribute",
|
||||
"name=${project.name},version=${project.version},title-link=https://github.com/xbib/${project.name}"
|
||||
configure(options) {
|
||||
noTimestamp = true
|
||||
}
|
||||
}*/
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier 'javadoc'
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
from sourceSets.main.allSource
|
||||
classifier 'sources'
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives javadocJar, sourcesJar
|
||||
}
|
||||
|
||||
if (project.hasProperty('signing.keyId')) {
|
||||
signing {
|
||||
sign configurations.archives
|
||||
}
|
||||
}
|
||||
|
||||
apply from: "${rootProject.projectDir}/gradle/publish.gradle"
|
||||
|
||||
spotbugs {
|
||||
effort = "max"
|
||||
reportLevel = "low"
|
||||
//includeFilter = file("findbugs-exclude.xml")
|
||||
}
|
||||
|
||||
tasks.withType(com.github.spotbugs.SpotBugsTask) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = false
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(Pmd) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = true
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
tasks.withType(Checkstyle) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = true
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
|
||||
pmd {
|
||||
toolVersion = '6.11.0'
|
||||
ruleSets = ['category/java/bestpractices.xml']
|
||||
}
|
||||
|
||||
checkstyle {
|
||||
configFile = rootProject.file('config/checkstyle/checkstyle.xml')
|
||||
ignoreFailures = true
|
||||
showViolations = true
|
||||
}
|
||||
|
||||
sonarqube {
|
||||
properties {
|
||||
property "sonar.projectName", "${project.group} ${project.name}"
|
||||
property "sonar.sourceEncoding", "UTF-8"
|
||||
property "sonar.tests", "src/test/java"
|
||||
property "sonar.scm.provider", "git"
|
||||
property "sonar.junit.reportsPath", "build/test-results/test/"
|
||||
}
|
||||
}
|
||||
}
|
||||
apply from: rootProject.file('gradle/publishing/sonatype.gradle')
|
||||
|
|
|
@ -1,19 +1,24 @@
|
|||
dependencies {
|
||||
compile "org.xbib:metrics:${project.property('xbib-metrics.version')}"
|
||||
compile("org.elasticsearch:elasticsearch:${project.property('elasticsearch.version')}") {
|
||||
// exclude ES jackson yaml, cbor, smile versions
|
||||
api "org.xbib:metrics-common:${project.property('xbib-metrics.version')}"
|
||||
api("org.elasticsearch:elasticsearch:${project.property('elasticsearch.version')}") {
|
||||
// exclude original ES jackson yaml, cbor, smile version (2.6.2)
|
||||
exclude group: 'com.fasterxml.jackson.dataformat'
|
||||
// dependencies that are not meant for client
|
||||
// these dependencies that are not meant for client applications
|
||||
exclude module: 'securesm'
|
||||
// we use log4j2, not log4j
|
||||
exclude group: 'log4j'
|
||||
// we use our own guava
|
||||
exclude group: 'com.google.guava'
|
||||
}
|
||||
// override log4j2 of Elastic with ours
|
||||
compile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
|
||||
api "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
|
||||
// override ES jackson with our jackson version
|
||||
// for Elasticsearch session, ES uses SMILE when encoding source for SearchRequest
|
||||
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${project.property('jackson-dataformat.version')}"
|
||||
api "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${project.property('jackson.version')}"
|
||||
// CBOR ist default JSON content compression encoding in ES 2.2.1
|
||||
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${project.property('jackson-dataformat.version')}"
|
||||
api "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${project.property('jackson.version')}"
|
||||
// not used, but maybe in other projects
|
||||
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${project.property('jackson-dataformat.version')}"
|
||||
api "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${project.property('jackson.version')}"
|
||||
// lift guava
|
||||
api "org.xbib:guava:${project.property('xbib-guava.version')}"
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
|
||||
dependencies {
|
||||
compile("org.elasticsearch.client:transport:${rootProject.property('elasticsearch.version')}") {
|
||||
exclude group: 'org.elasticsearch', module: 'securesm'
|
||||
exclude group: 'org.elasticsearch.plugin', module: 'transport-netty3-client'
|
||||
exclude group: 'org.elasticsearch.plugin', module: 'reindex-client'
|
||||
exclude group: 'org.elasticsearch.plugin', module: 'percolator-client'
|
||||
exclude group: 'org.elasticsearch.plugin', module: 'lang-mustache-client'
|
||||
}
|
||||
// we try to override the Elasticsearch netty by our netty version which might be more recent
|
||||
compile "io.netty:netty-buffer:${rootProject.property('netty.version')}"
|
||||
compile "io.netty:netty-codec-http:${rootProject.property('netty.version')}"
|
||||
compile "io.netty:netty-handler:${rootProject.property('netty.version')}"
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-api"
|
||||
}
|
|
@ -14,6 +14,10 @@ public interface BulkController extends Closeable, Flushable {
|
|||
|
||||
void init(Settings settings);
|
||||
|
||||
void inactivate();
|
||||
|
||||
BulkMetric getBulkMetric();
|
||||
|
||||
Throwable getLastBulkError();
|
||||
|
||||
void startBulkMode(IndexDefinition indexDefinition) throws IOException;
|
||||
|
@ -21,16 +25,15 @@ public interface BulkController extends Closeable, Flushable {
|
|||
void startBulkMode(String indexName, long startRefreshIntervalInSeconds,
|
||||
long stopRefreshIntervalInSeconds) throws IOException;
|
||||
|
||||
void index(IndexRequest indexRequest);
|
||||
void bulkIndex(IndexRequest indexRequest);
|
||||
|
||||
void delete(DeleteRequest deleteRequest);
|
||||
void bulkDelete(DeleteRequest deleteRequest);
|
||||
|
||||
void update(UpdateRequest updateRequest);
|
||||
void bulkUpdate(UpdateRequest updateRequest);
|
||||
|
||||
boolean waitForResponses(long timeout, TimeUnit timeUnit);
|
||||
boolean waitForBulkResponses(long timeout, TimeUnit timeUnit);
|
||||
|
||||
void stopBulkMode(IndexDefinition indexDefinition) throws IOException;
|
||||
|
||||
void stopBulkMode(String index, long timeout, TimeUnit timeUnit) throws IOException;
|
||||
|
||||
}
|
||||
|
|
43
elx-api/src/main/java/org/xbib/elx/api/BulkListener.java
Normal file
43
elx-api/src/main/java/org/xbib/elx/api/BulkListener.java
Normal file
|
@ -0,0 +1,43 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
||||
public interface BulkListener {
|
||||
|
||||
/**
|
||||
* Callback before the bulk is executed.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
*/
|
||||
void beforeBulk(long executionId, BulkRequest request);
|
||||
|
||||
/**
|
||||
* Callback after a successful execution of bulk request.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param response response
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, BulkResponse response);
|
||||
|
||||
/**
|
||||
* Callback after a failed execution of bulk request.
|
||||
*
|
||||
* Note that in case an instance of <code>InterruptedException</code> is passed, which means that request
|
||||
* processing has been
|
||||
* cancelled externally, the thread's interruption status has been restored prior to calling this method.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param failure failure
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, Throwable failure);
|
||||
|
||||
/**
|
||||
* Get the last bulk error.
|
||||
* @return the last bulk error
|
||||
*/
|
||||
Throwable getLastBulkError();
|
||||
}
|
|
@ -1,15 +1,12 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.xbib.metrics.Count;
|
||||
import org.xbib.metrics.Metered;
|
||||
import org.xbib.metrics.api.Count;
|
||||
import org.xbib.metrics.api.Metered;
|
||||
|
||||
import java.io.Closeable;
|
||||
|
||||
public interface BulkMetric extends Closeable {
|
||||
|
||||
void init(Settings settings);
|
||||
|
||||
Metered getTotalIngest();
|
||||
|
||||
Count getTotalIngestSizeInBytes();
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
|
@ -10,55 +8,12 @@ import java.util.concurrent.TimeUnit;
|
|||
|
||||
public interface BulkProcessor extends Closeable, Flushable {
|
||||
|
||||
BulkProcessor add(ActionRequest<?> request);
|
||||
|
||||
BulkProcessor add(ActionRequest<?> request, Object payload);
|
||||
@SuppressWarnings("rawtypes")
|
||||
BulkProcessor add(ActionRequest request);
|
||||
|
||||
boolean awaitFlush(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
|
||||
boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
|
||||
interface BulkRequestHandler {
|
||||
|
||||
void execute(BulkRequest bulkRequest, long executionId);
|
||||
|
||||
boolean close(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A listener for the execution.
|
||||
*/
|
||||
public interface Listener {
|
||||
|
||||
/**
|
||||
* Callback before the bulk is executed.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
*/
|
||||
void beforeBulk(long executionId, BulkRequest request);
|
||||
|
||||
/**
|
||||
* Callback after a successful execution of bulk request.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param response response
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, BulkResponse response);
|
||||
|
||||
/**
|
||||
* Callback after a failed execution of bulk request.
|
||||
*
|
||||
* Note that in case an instance of <code>InterruptedException</code> is passed, which means that request
|
||||
* processing has been
|
||||
* cancelled externally, the thread's interruption status has been restored prior to calling this method.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param failure failure
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, Throwable failure);
|
||||
}
|
||||
BulkListener getBulkListener();
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public interface BulkRequestHandler {
|
||||
|
||||
void execute(BulkRequest bulkRequest, long executionId);
|
||||
|
||||
boolean close(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
}
|
|
@ -6,6 +6,7 @@ import org.elasticsearch.action.update.UpdateRequest;
|
|||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
|
@ -34,12 +35,6 @@ public interface ExtendedClient extends Flushable, Closeable {
|
|||
*/
|
||||
ElasticsearchClient getClient();
|
||||
|
||||
/**
|
||||
* Get bulk metric.
|
||||
* @return the bulk metric
|
||||
*/
|
||||
BulkMetric getBulkMetric();
|
||||
|
||||
/**
|
||||
* Get buulk control.
|
||||
* @return the bulk control
|
||||
|
@ -126,8 +121,9 @@ public interface ExtendedClient extends Flushable, Closeable {
|
|||
* @param id the id
|
||||
* @param source the source
|
||||
* @return this
|
||||
* @throws IOException if update fails
|
||||
*/
|
||||
ExtendedClient update(String index, String id, BytesReference source);
|
||||
ExtendedClient update(String index, String id, BytesReference source) throws IOException;
|
||||
|
||||
/**
|
||||
* Update document. Use with precaution! Does not work in all cases.
|
||||
|
@ -169,6 +165,16 @@ public interface ExtendedClient extends Flushable, Closeable {
|
|||
*/
|
||||
ExtendedClient newIndex(String index, InputStream settings, InputStream mapping) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param settings settings
|
||||
* @return this
|
||||
* @throws IOException if settings is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index, Settings settings) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
|
@ -189,7 +195,18 @@ public interface ExtendedClient extends Flushable, Closeable {
|
|||
* @return this
|
||||
* @throws IOException if settings/mapping is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index, Settings settings, Map<String, Object> mapping) throws IOException;
|
||||
ExtendedClient newIndex(String index, Settings settings, XContentBuilder mapping) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param settings settings
|
||||
* @param mapping mapping
|
||||
* @return this
|
||||
* @throws IOException if settings/mapping is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index, Settings settings, Map<String, ?> mapping) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
|
@ -364,9 +381,11 @@ public interface ExtendedClient extends Flushable, Closeable {
|
|||
* @param index the index
|
||||
* @param key the key of the value to be updated
|
||||
* @param value the new value
|
||||
* @param timeout timeout
|
||||
* @param timeUnit time unit
|
||||
* @throws IOException if update index setting failed
|
||||
*/
|
||||
void updateIndexSetting(String index, String key, Object value) throws IOException;
|
||||
void updateIndexSetting(String index, String key, Object value, long timeout, TimeUnit timeUnit) throws IOException;
|
||||
|
||||
/**
|
||||
* Resolve alias.
|
||||
|
@ -386,11 +405,11 @@ public interface ExtendedClient extends Flushable, Closeable {
|
|||
String resolveMostRecentIndex(String alias);
|
||||
|
||||
/**
|
||||
* Get all index filters.
|
||||
* Get all index aliases.
|
||||
* @param index the index
|
||||
* @return map of index filters
|
||||
* @return map of index aliases
|
||||
*/
|
||||
Map<String, String> getIndexFilters(String index);
|
||||
Map<String, String> getAliases(String index);
|
||||
|
||||
/**
|
||||
* Shift from one index to another.
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface IndexAliasAdder {
|
||||
|
||||
void addIndexAlias(IndicesAliasesRequestBuilder builder, String index, String alias);
|
||||
void addIndexAlias(IndicesAliasesRequest request, String index, String alias);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import java.util.List;
|
|||
|
||||
public interface IndexPruneResult {
|
||||
|
||||
enum State { NOTHING_TO_DO, SUCCESS, NONE };
|
||||
enum State { NOTHING_TO_DO, SUCCESS, NONE, FAIL };
|
||||
|
||||
State getState();
|
||||
|
||||
|
|
25
elx-api/src/main/java/org/xbib/elx/api/ReadClient.java
Normal file
25
elx-api/src/main/java/org/xbib/elx/api/ReadClient.java
Normal file
|
@ -0,0 +1,25 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
||||
public interface ReadClient {
|
||||
|
||||
ActionFuture<GetResponse> get(GetRequest getRequest);
|
||||
|
||||
void get(GetRequest request, ActionListener<GetResponse> listener);
|
||||
|
||||
ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request);
|
||||
|
||||
void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener);
|
||||
|
||||
ActionFuture<SearchResponse> search(SearchRequest request);
|
||||
|
||||
void search(SearchRequest request, ActionListener<SearchResponse> listener);
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
public interface ReadClientProvider<C extends ReadClient> {
|
||||
|
||||
C getReadClient();
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
/**
|
||||
* The API of the Elasticsearch extensions.
|
||||
* The API of the extended Elasticsearch clients.
|
||||
*/
|
||||
package org.xbib.elx.api;
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
dependencies {
|
||||
compile project(':elx-api')
|
||||
compile "org.xbib:guice:${project.property('xbib-guice.version')}"
|
||||
// add all dependencies to runtime source set, even that which are excluded by Elasticsearch jar,
|
||||
// for metaprogramming. We are in Groovyland.
|
||||
runtime "com.vividsolutions:jts:${project.property('jts.version')}"
|
||||
runtime "com.github.spullara.mustache.java:compiler:${project.property('mustache.version')}"
|
||||
runtime "net.java.dev.jna:jna:${project.property('jna.version')}"
|
||||
api project(':elx-api')
|
||||
implementation "org.xbib:guice:${project.property('xbib-guice.version')}"
|
||||
runtimeOnly "com.vividsolutions:jts:${project.property('jts.version')}"
|
||||
runtimeOnly "com.github.spullara.mustache.java:compiler:${project.property('mustache.version')}"
|
||||
runtimeOnly "net.java.dev.jna:jna:${project.property('jna.version')}"
|
||||
}
|
||||
|
|
|
@ -1,65 +0,0 @@
|
|||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
maven {
|
||||
url 'http://xbib.org/repository'
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.xbib.elasticsearch:gradle-plugin-elasticsearch-build:6.2.2.0"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'org.xbib.gradle.plugin.elasticsearch.build'
|
||||
|
||||
configurations {
|
||||
main
|
||||
tests
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':api')
|
||||
compile "org.xbib:metrics:${project.property('xbib-metrics.version')}"
|
||||
compileOnly "org.apache.logging.log4j:log4j-api:${project.property('log4j.version')}"
|
||||
testCompile "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
testRuntime "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-common"
|
||||
}
|
||||
|
||||
/*
|
||||
task testJar(type: Jar, dependsOn: testClasses) {
|
||||
baseName = "${project.archivesBaseName}-tests"
|
||||
from sourceSets.test.output
|
||||
}
|
||||
*/
|
||||
|
||||
artifacts {
|
||||
main jar
|
||||
tests testJar
|
||||
archives sourcesJar, javadocJar
|
||||
}
|
||||
|
||||
test {
|
||||
enabled = false
|
||||
jvmArgs "-javaagent:" + configurations.alpnagent.asPath
|
||||
systemProperty 'path.home', project.buildDir.absolutePath
|
||||
testLogging {
|
||||
showStandardStreams = true
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
||||
|
||||
randomizedTest {
|
||||
enabled = false
|
||||
}
|
||||
|
||||
esTest {
|
||||
// test with the jars, not the classes, for security manager
|
||||
// classpath = files(configurations.testRuntime) + configurations.main.artifacts.files + configurations.tests.artifacts.files
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
}
|
||||
esTest.dependsOn jar, testJar
|
||||
|
|
@ -2,6 +2,7 @@ package org.xbib.elx.common;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
|
@ -9,33 +10,30 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
|||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeAction;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryAction;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsAction;
|
||||
|
@ -46,27 +44,31 @@ import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest
|
|||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.metadata.AliasAction;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.cluster.metadata.AliasOrIndex;
|
||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.xbib.elx.api.BulkController;
|
||||
import org.xbib.elx.api.BulkMetric;
|
||||
import org.xbib.elx.api.ExtendedClient;
|
||||
import org.xbib.elx.api.IndexAliasAdder;
|
||||
import org.xbib.elx.api.IndexDefinition;
|
||||
|
@ -82,14 +84,14 @@ import java.time.LocalDate;
|
|||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
@ -106,22 +108,11 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
private static final Logger logger = LogManager.getLogger(AbstractExtendedClient.class.getName());
|
||||
|
||||
/**
|
||||
* The one and only index type name used in the extended client.
|
||||
* Notr that all Elasticsearch version < 6.2.0 do not allow a prepending "_".
|
||||
*/
|
||||
private static final String TYPE_NAME = "doc";
|
||||
|
||||
/**
|
||||
* The Elasticsearch client.
|
||||
*/
|
||||
private ElasticsearchClient client;
|
||||
|
||||
private BulkMetric bulkMetric;
|
||||
|
||||
private BulkController bulkController;
|
||||
|
||||
private AtomicBoolean closed;
|
||||
private final AtomicBoolean closed;
|
||||
|
||||
private static final IndexShiftResult EMPTY_INDEX_SHIFT_RESULT = new IndexShiftResult() {
|
||||
@Override
|
||||
|
@ -159,6 +150,8 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
protected abstract ElasticsearchClient createClient(Settings settings) throws IOException;
|
||||
|
||||
protected abstract void closeClient() throws IOException;
|
||||
|
||||
protected AbstractExtendedClient() {
|
||||
closed = new AtomicBoolean(false);
|
||||
}
|
||||
|
@ -166,9 +159,6 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
@Override
|
||||
public AbstractExtendedClient setClient(ElasticsearchClient client) {
|
||||
this.client = client;
|
||||
this.bulkMetric = new DefaultBulkMetric();
|
||||
bulkMetric.start();
|
||||
this.bulkController = new DefaultBulkController(this, bulkMetric);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -177,11 +167,6 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
return client;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkMetric getBulkMetric() {
|
||||
return bulkMetric;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkController getBulkController() {
|
||||
return bulkController;
|
||||
|
@ -189,13 +174,12 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
@Override
|
||||
public AbstractExtendedClient init(Settings settings) throws IOException {
|
||||
logger.info("initializing with settings = " + settings.toDelimitedString(','));
|
||||
if (client == null) {
|
||||
client = createClient(settings);
|
||||
}
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.start();
|
||||
}
|
||||
if (bulkController != null) {
|
||||
if (bulkController == null) {
|
||||
this.bulkController = new DefaultBulkController(this);
|
||||
bulkController.init(settings);
|
||||
}
|
||||
return this;
|
||||
|
@ -210,27 +194,23 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
if (closed.compareAndSet(false, true)) {
|
||||
if (bulkMetric != null) {
|
||||
logger.info("closing bulk metric before bulk controller (for precise measurement)");
|
||||
bulkMetric.close();
|
||||
}
|
||||
if (bulkController != null) {
|
||||
logger.info("closing bulk controller");
|
||||
bulkController.close();
|
||||
}
|
||||
logger.info("shutdown complete");
|
||||
closeClient();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getClusterName() {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
try {
|
||||
ClusterStateRequestBuilder clusterStateRequestBuilder =
|
||||
new ClusterStateRequestBuilder(client, ClusterStateAction.INSTANCE).all();
|
||||
ClusterStateResponse clusterStateResponse = clusterStateRequestBuilder.execute().actionGet();
|
||||
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().clear();
|
||||
ClusterStateResponse clusterStateResponse =
|
||||
client.execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
|
||||
return clusterStateResponse.getClusterName().value();
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
|
@ -246,7 +226,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
@Override
|
||||
public ExtendedClient newIndex(IndexDefinition indexDefinition) throws IOException {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
waitForCluster("YELLOW", 30L, TimeUnit.SECONDS);
|
||||
URL indexSettings = indexDefinition.getSettingsUrl();
|
||||
if (indexSettings == null) {
|
||||
|
@ -280,42 +260,70 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient newIndex(String index) {
|
||||
return newIndex(index, Settings.EMPTY, (Map<String, Object>) null);
|
||||
public ExtendedClient newIndex(String index) throws IOException {
|
||||
return newIndex(index, Settings.EMPTY, (Map<String, ?>) null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient newIndex(String index, InputStream settings, InputStream mapping) throws IOException {
|
||||
return newIndex(index,
|
||||
Settings.settingsBuilder().loadFromStream(".json", settings).build(),
|
||||
JsonXContent.jsonXContent.createParser(mapping).mapOrdered());
|
||||
mapping != null ? JsonXContent.jsonXContent.createParser(mapping).mapOrdered() : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient newIndex(String index, Settings settings) throws IOException {
|
||||
return newIndex(index, settings, (Map<String, ?>) null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient newIndex(String index, Settings settings, String mapping) throws IOException {
|
||||
return newIndex(index, settings,
|
||||
JsonXContent.jsonXContent.createParser(mapping).mapOrdered());
|
||||
mapping != null ? JsonXContent.jsonXContent.createParser(mapping).mapOrdered() : null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient newIndex(String index, Settings settings, Map<String, Object> mapping) {
|
||||
ensureActive();
|
||||
public ExtendedClient newIndex(String index, Settings settings, XContentBuilder mapping) {
|
||||
ensureClient();
|
||||
if (index == null) {
|
||||
logger.warn("no index name given to create index");
|
||||
return this;
|
||||
}
|
||||
CreateIndexRequestBuilder createIndexRequestBuilder =
|
||||
new CreateIndexRequestBuilder(client, CreateIndexAction.INSTANCE).setIndex(index);
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest().index(index);
|
||||
if (settings != null) {
|
||||
createIndexRequestBuilder.setSettings(settings);
|
||||
createIndexRequest.settings(settings);
|
||||
}
|
||||
if (mapping != null) {
|
||||
createIndexRequestBuilder.addMapping(TYPE_NAME, mapping);
|
||||
createIndexRequest.mapping("doc", mapping);
|
||||
}
|
||||
CreateIndexResponse createIndexResponse = createIndexRequestBuilder.execute().actionGet();
|
||||
logger.info("index {} created: {}", index, createIndexResponse);
|
||||
CreateIndexResponse createIndexResponse = client.execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet();
|
||||
if (createIndexResponse.isAcknowledged()) {
|
||||
return this;
|
||||
}
|
||||
throw new IllegalStateException("index creation not acknowledged: " + index);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ExtendedClient newIndex(String index, Settings settings, Map<String, ?> mapping) {
|
||||
ensureClient();
|
||||
if (index == null) {
|
||||
logger.warn("no index name given to create index");
|
||||
return this;
|
||||
}
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest().index(index);
|
||||
if (settings != null) {
|
||||
createIndexRequest.settings(settings);
|
||||
}
|
||||
if (mapping != null) {
|
||||
createIndexRequest.mapping("doc", mapping);
|
||||
}
|
||||
CreateIndexResponse createIndexResponse = client.execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet();
|
||||
if (createIndexResponse.isAcknowledged()) {
|
||||
return this;
|
||||
}
|
||||
throw new IllegalStateException("index creation not acknowledged: " + index);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient deleteIndex(IndexDefinition indexDefinition) {
|
||||
|
@ -324,14 +332,13 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
@Override
|
||||
public ExtendedClient deleteIndex(String index) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
if (index == null) {
|
||||
logger.warn("no index name given to delete index");
|
||||
return this;
|
||||
}
|
||||
DeleteIndexRequestBuilder deleteIndexRequestBuilder =
|
||||
new DeleteIndexRequestBuilder(client, DeleteIndexAction.INSTANCE, index);
|
||||
deleteIndexRequestBuilder.execute().actionGet();
|
||||
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest().indices(index);
|
||||
client.execute(DeleteIndexAction.INSTANCE, deleteIndexRequest).actionGet();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -345,7 +352,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
public ExtendedClient startBulk(String index, long startRefreshIntervalSeconds, long stopRefreshIntervalSeconds)
|
||||
throws IOException {
|
||||
if (bulkController != null) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
bulkController.startBulkMode(index, startRefreshIntervalSeconds, stopRefreshIntervalSeconds);
|
||||
}
|
||||
return this;
|
||||
|
@ -354,7 +361,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
@Override
|
||||
public ExtendedClient stopBulk(IndexDefinition indexDefinition) throws IOException {
|
||||
if (bulkController != null) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
bulkController.stopBulkMode(indexDefinition);
|
||||
}
|
||||
return this;
|
||||
|
@ -363,92 +370,101 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
@Override
|
||||
public ExtendedClient stopBulk(String index, long timeout, TimeUnit timeUnit) throws IOException {
|
||||
if (bulkController != null) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
bulkController.stopBulkMode(index, timeout, timeUnit);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient index(String index, String id, boolean create, BytesReference source) {
|
||||
return index(new IndexRequest(index, TYPE_NAME, id).create(create).source(source));
|
||||
public ExtendedClient index(String index, String id, boolean create, String source) {
|
||||
return index(new IndexRequest().index(index).type("doc").id(id).create(create)
|
||||
.source(source.getBytes(StandardCharsets.UTF_8)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient index(String index, String id, boolean create, String source) {
|
||||
return index(new IndexRequest(index, TYPE_NAME, id).create(create).source(source.getBytes(StandardCharsets.UTF_8)));
|
||||
public ExtendedClient index(String index, String id, boolean create, BytesReference source) {
|
||||
return index(new IndexRequest().index(index).type("doc").id(id).create(create)
|
||||
.source(source));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient index(IndexRequest indexRequest) {
|
||||
ensureActive();
|
||||
bulkController.index(indexRequest);
|
||||
ensureClient();
|
||||
bulkController.bulkIndex(indexRequest);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient delete(String index, String id) {
|
||||
return delete(new DeleteRequest(index, TYPE_NAME, id));
|
||||
return delete(new DeleteRequest().index(index).type("doc").id(id));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient delete(DeleteRequest deleteRequest) {
|
||||
ensureActive();
|
||||
bulkController.delete(deleteRequest);
|
||||
ensureClient();
|
||||
bulkController.bulkDelete(deleteRequest);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient update(String index, String id, BytesReference source) {
|
||||
return update(new UpdateRequest(index, TYPE_NAME, id).doc(source));
|
||||
return update(new UpdateRequest().index(index).type("doc").id(id)
|
||||
.doc(source));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient update(String index, String id, String source) {
|
||||
return update(new UpdateRequest(index, TYPE_NAME, id).doc(source.getBytes(StandardCharsets.UTF_8)));
|
||||
return update(new UpdateRequest().index(index).type("doc").id(id)
|
||||
.doc(source.getBytes(StandardCharsets.UTF_8)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedClient update(UpdateRequest updateRequest) {
|
||||
ensureActive();
|
||||
bulkController.update(updateRequest);
|
||||
ensureClient();
|
||||
bulkController.bulkUpdate(updateRequest);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForResponses(long timeout, TimeUnit timeUnit) {
|
||||
ensureActive();
|
||||
return bulkController.waitForResponses(timeout, timeUnit);
|
||||
ensureClient();
|
||||
return bulkController.waitForBulkResponses(timeout, timeUnit);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForRecovery(String index, long maxWaitTime, TimeUnit timeUnit) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
ensureIndexGiven(index);
|
||||
RecoveryResponse response = client.execute(RecoveryAction.INSTANCE, new RecoveryRequest(index)).actionGet();
|
||||
int shards = response.getTotalShards();
|
||||
GetSettingsRequest settingsRequest = new GetSettingsRequest();
|
||||
settingsRequest.indices(index);
|
||||
GetSettingsResponse settingsResponse = client.execute(GetSettingsAction.INSTANCE, settingsRequest).actionGet();
|
||||
int shards = settingsResponse.getIndexToSettings().get(index).getAsInt("index.number_of_shards", -1);
|
||||
if (shards > 0) {
|
||||
TimeValue timeout = toTimeValue(maxWaitTime, timeUnit);
|
||||
ClusterHealthRequest clusterHealthRequest = new ClusterHealthRequest()
|
||||
.indices(new String[]{index})
|
||||
.waitForActiveShards(shards)
|
||||
.timeout(timeout);
|
||||
ClusterHealthResponse healthResponse =
|
||||
client.execute(ClusterHealthAction.INSTANCE, new ClusterHealthRequest(index)
|
||||
.waitForActiveShards(shards).timeout(timeout)).actionGet();
|
||||
client.execute(ClusterHealthAction.INSTANCE, clusterHealthRequest).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
logger.error("timeout waiting for recovery");
|
||||
logger.warn("timeout waiting for recovery");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForCluster(String statusString, long maxWaitTime, TimeUnit timeUnit) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
ClusterHealthStatus status = ClusterHealthStatus.fromString(statusString);
|
||||
TimeValue timeout = toTimeValue(maxWaitTime, timeUnit);
|
||||
ClusterHealthResponse healthResponse = client.execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().timeout(timeout).waitForStatus(status)).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("timeout, cluster state is " + healthResponse.getStatus().name() + " and not " + status.name());
|
||||
}
|
||||
logger.warn("timeout, cluster state is " + healthResponse.getStatus().name() + " and not " + status.name());
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
@ -456,7 +472,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
@Override
|
||||
public String getHealthColor(long maxWaitTime, TimeUnit timeUnit) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
try {
|
||||
TimeValue timeout = toTimeValue(maxWaitTime, timeUnit);
|
||||
ClusterHealthResponse healthResponse = client.execute(ClusterHealthAction.INSTANCE,
|
||||
|
@ -485,7 +501,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
public ExtendedClient updateReplicaLevel(String index, int level, long maxWaitTime, TimeUnit timeUnit) throws IOException {
|
||||
waitForCluster("YELLOW", maxWaitTime, timeUnit); // let cluster settle down from critical operations
|
||||
if (level > 0) {
|
||||
updateIndexSetting(index, "number_of_replicas", level);
|
||||
updateIndexSetting(index, "number_of_replicas", level, maxWaitTime, timeUnit);
|
||||
waitForRecovery(index, maxWaitTime, timeUnit);
|
||||
}
|
||||
return this;
|
||||
|
@ -513,7 +529,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
@Override
|
||||
public ExtendedClient flushIndex(String index) {
|
||||
if (index != null) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
client.execute(FlushAction.INSTANCE, new FlushRequest(index)).actionGet();
|
||||
}
|
||||
return this;
|
||||
|
@ -522,31 +538,20 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
@Override
|
||||
public ExtendedClient refreshIndex(String index) {
|
||||
if (index != null) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
client.execute(RefreshAction.INSTANCE, new RefreshRequest(index)).actionGet();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String resolveAlias(String alias) {
|
||||
ensureActive();
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client, GetAliasesAction.INSTANCE);
|
||||
GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.setAliases(alias).execute().actionGet();
|
||||
if (!getAliasesResponse.getAliases().isEmpty()) {
|
||||
return getAliasesResponse.getAliases().keys().iterator().next().value;
|
||||
}
|
||||
return alias;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String resolveMostRecentIndex(String alias) {
|
||||
ensureActive();
|
||||
if (alias == null) {
|
||||
return null;
|
||||
}
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client, GetAliasesAction.INSTANCE);
|
||||
GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.setAliases(alias).execute().actionGet();
|
||||
ensureClient();
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().aliases(alias);
|
||||
GetAliasesResponse getAliasesResponse = client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet();
|
||||
Pattern pattern = Pattern.compile("^(.*?)(\\d+)$");
|
||||
Set<String> indices = new TreeSet<>(Collections.reverseOrder());
|
||||
for (ObjectCursor<String> indexName : getAliasesResponse.getAliases().keys()) {
|
||||
|
@ -558,10 +563,28 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
return indices.isEmpty() ? alias : indices.iterator().next();
|
||||
}
|
||||
|
||||
public Map<String, String> getAliases(String index) {
|
||||
if (index == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices(index);
|
||||
return getFilters(client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getIndexFilters(String index) {
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client, GetAliasesAction.INSTANCE);
|
||||
return getFilters(getAliasesRequestBuilder.setIndices(index).execute().actionGet());
|
||||
public String resolveAlias(String alias) {
|
||||
ensureClient();
|
||||
ClusterStateRequest clusterStateRequest = new ClusterStateRequest();
|
||||
clusterStateRequest.blocks(false);
|
||||
clusterStateRequest.metaData(true);
|
||||
clusterStateRequest.nodes(false);
|
||||
clusterStateRequest.routingTable(false);
|
||||
clusterStateRequest.customs(false);
|
||||
ClusterStateResponse clusterStateResponse =
|
||||
client.execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
|
||||
SortedMap<String, AliasOrIndex> map = clusterStateResponse.getState().getMetaData().getAliasAndIndexLookup();
|
||||
AliasOrIndex aliasOrIndex = map.get(alias);
|
||||
return aliasOrIndex != null ? aliasOrIndex.getIndices().iterator().next().getIndex() : null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -592,61 +615,82 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
@Override
|
||||
public IndexShiftResult shiftIndex(String index, String fullIndexName,
|
||||
List<String> additionalAliases, IndexAliasAdder adder) {
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
if (index == null) {
|
||||
return EMPTY_INDEX_SHIFT_RESULT; // nothing to shift to
|
||||
}
|
||||
if (index.equals(fullIndexName)) {
|
||||
return EMPTY_INDEX_SHIFT_RESULT; // nothing to shift to
|
||||
}
|
||||
// two situations: 1. there is a new alias 2. there is already an old index with the alias
|
||||
waitForCluster("YELLOW", 30L, TimeUnit.SECONDS);
|
||||
// two situations: 1. a new alias 2. there is already an old index with the alias
|
||||
String oldIndex = resolveAlias(index);
|
||||
final Map<String, String> oldFilterMap = oldIndex.equals(index) ? null : getIndexFilters(oldIndex);
|
||||
final List<String> newAliases = new LinkedList<>();
|
||||
final List<String> moveAliases = new LinkedList<>();
|
||||
IndicesAliasesRequestBuilder requestBuilder = new IndicesAliasesRequestBuilder(client, IndicesAliasesAction.INSTANCE);
|
||||
if (oldFilterMap == null || !oldFilterMap.containsKey(index)) {
|
||||
// never apply a filter for trunk index name
|
||||
requestBuilder.addAlias(fullIndexName, index);
|
||||
Map<String, String> oldAliasMap = index.equals(oldIndex) ? null : getAliases(oldIndex);
|
||||
logger.debug("old index = {} old alias map = {}", oldIndex, oldAliasMap);
|
||||
final List<String> newAliases = new ArrayList<>();
|
||||
final List<String> moveAliases = new ArrayList<>();
|
||||
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
||||
if (oldAliasMap == null || !oldAliasMap.containsKey(index)) {
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
|
||||
fullIndexName, index));
|
||||
newAliases.add(index);
|
||||
}
|
||||
// move existing aliases
|
||||
if (oldFilterMap != null) {
|
||||
for (Map.Entry<String, String> entry : oldFilterMap.entrySet()) {
|
||||
if (oldAliasMap != null) {
|
||||
for (Map.Entry<String, String> entry : oldAliasMap.entrySet()) {
|
||||
String alias = entry.getKey();
|
||||
String filter = entry.getValue();
|
||||
requestBuilder.removeAlias(oldIndex, alias);
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.REMOVE,
|
||||
oldIndex, alias));
|
||||
if (filter != null) {
|
||||
requestBuilder.addAlias(fullIndexName, alias, filter);
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
|
||||
fullIndexName, alias).filter(filter));
|
||||
} else {
|
||||
requestBuilder.addAlias(fullIndexName, alias);
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
|
||||
fullIndexName, alias));
|
||||
}
|
||||
moveAliases.add(alias);
|
||||
}
|
||||
}
|
||||
// a list of aliases that should be added, check if new or old
|
||||
if (additionalAliases != null) {
|
||||
for (String extraAlias : additionalAliases) {
|
||||
if (oldFilterMap == null || !oldFilterMap.containsKey(extraAlias)) {
|
||||
for (String additionalAlias : additionalAliases) {
|
||||
if (oldAliasMap == null || !oldAliasMap.containsKey(additionalAlias)) {
|
||||
// index alias adder only active on extra aliases, and if alias is new
|
||||
if (adder != null) {
|
||||
adder.addIndexAlias(requestBuilder, fullIndexName, extraAlias);
|
||||
adder.addIndexAlias(indicesAliasesRequest, fullIndexName, additionalAlias);
|
||||
} else {
|
||||
requestBuilder.addAlias(fullIndexName, extraAlias);
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
|
||||
fullIndexName, additionalAlias));
|
||||
}
|
||||
newAliases.add(extraAlias);
|
||||
newAliases.add(additionalAlias);
|
||||
} else {
|
||||
String filter = oldFilterMap.get(extraAlias);
|
||||
requestBuilder.removeAlias(oldIndex, extraAlias);
|
||||
String filter = oldAliasMap.get(additionalAlias);
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.REMOVE,
|
||||
oldIndex, additionalAlias));
|
||||
if (filter != null) {
|
||||
requestBuilder.addAlias(fullIndexName, extraAlias, filter);
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
|
||||
fullIndexName, additionalAlias).filter(filter));
|
||||
} else {
|
||||
requestBuilder.addAlias(fullIndexName, extraAlias);
|
||||
indicesAliasesRequest.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
|
||||
fullIndexName, additionalAlias));
|
||||
}
|
||||
moveAliases.add(extraAlias);
|
||||
moveAliases.add(additionalAlias);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!newAliases.isEmpty() || !moveAliases.isEmpty()) {
|
||||
logger.info("new aliases = {}, moved aliases = {}", newAliases, moveAliases);
|
||||
requestBuilder.execute().actionGet();
|
||||
if (!indicesAliasesRequest.getAliasActions().isEmpty()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (IndicesAliasesRequest.AliasActions aliasActions : indicesAliasesRequest.getAliasActions()) {
|
||||
sb.append("[").append(aliasActions.actionType().name())
|
||||
.append(",indices=").append(Arrays.asList(aliasActions.indices()))
|
||||
.append(",aliases=").append(Arrays.asList(aliasActions.aliases())).append("]");
|
||||
}
|
||||
logger.debug("indices alias request = {}", sb.toString());
|
||||
IndicesAliasesResponse indicesAliasesResponse =
|
||||
client.execute(IndicesAliasesAction.INSTANCE, indicesAliasesRequest).actionGet();
|
||||
logger.debug("response isAcknowledged = {}",
|
||||
indicesAliasesResponse.isAcknowledged());
|
||||
}
|
||||
return new SuccessIndexShiftResult(moveAliases, newAliases);
|
||||
}
|
||||
|
@ -665,11 +709,11 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
if (index.equals(fullIndexName)) {
|
||||
return EMPTY_INDEX_PRUNE_RESULT;
|
||||
}
|
||||
ensureActive();
|
||||
ensureClient();
|
||||
GetIndexRequestBuilder getIndexRequestBuilder = new GetIndexRequestBuilder(client, GetIndexAction.INSTANCE);
|
||||
GetIndexResponse getIndexResponse = getIndexRequestBuilder.execute().actionGet();
|
||||
Pattern pattern = Pattern.compile("^(.*?)(\\d+)$");
|
||||
logger.info("{} indices", getIndexResponse.getIndices().length);
|
||||
logger.info("pruneIndex: total of {} indices", getIndexResponse.getIndices().length);
|
||||
List<String> candidateIndices = new ArrayList<>();
|
||||
for (String s : getIndexResponse.getIndices()) {
|
||||
Matcher m = pattern.matcher(s);
|
||||
|
@ -692,7 +736,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
if (m2.matches()) {
|
||||
Integer i2 = Integer.parseInt(m2.group(2));
|
||||
int kept = candidateIndices.size() - indicesToDelete.size();
|
||||
if ((delta == 0 || (delta > 0 && i1 - i2 > delta)) && mintokeep <= kept) {
|
||||
if ((delta == 0 || (delta > 0 && i1 - i2 >= delta)) && mintokeep <= kept) {
|
||||
indicesToDelete.add(s);
|
||||
}
|
||||
}
|
||||
|
@ -705,19 +749,30 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest()
|
||||
.indices(indicesToDelete.toArray(s));
|
||||
DeleteIndexResponse response = client.execute(DeleteIndexAction.INSTANCE, deleteIndexRequest).actionGet();
|
||||
if (response.isAcknowledged()) {
|
||||
logger.log(Level.INFO, "deletion of {} acknowledged, waiting for GREEN", Arrays.asList(s));
|
||||
waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
|
||||
return new SuccessPruneResult(candidateIndices, indicesToDelete, response);
|
||||
} else {
|
||||
logger.log(Level.WARN, "deletion of {} not acknowledged", Arrays.asList(s));
|
||||
return new FailPruneResult(candidateIndices, indicesToDelete, response);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long mostRecentDocument(String index, String timestampfieldname) {
|
||||
ensureActive();
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, SearchAction.INSTANCE);
|
||||
SortBuilder sort = SortBuilders.fieldSort(timestampfieldname).order(SortOrder.DESC);
|
||||
SearchResponse searchResponse = searchRequestBuilder.setIndices(index)
|
||||
.addField(timestampfieldname)
|
||||
.setSize(1)
|
||||
.addSort(sort)
|
||||
.execute().actionGet();
|
||||
ensureClient();
|
||||
SortBuilder sort = SortBuilders
|
||||
.fieldSort(timestampfieldname)
|
||||
.order(SortOrder.DESC);
|
||||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder()
|
||||
.sort(sort)
|
||||
.field(timestampfieldname)
|
||||
.size(1);
|
||||
SearchRequest searchRequest = new SearchRequest()
|
||||
.indices(index)
|
||||
.source(sourceBuilder);
|
||||
SearchResponse searchResponse = client.execute(SearchAction.INSTANCE, searchRequest).actionGet();
|
||||
if (searchResponse.getHits().getHits().length == 1) {
|
||||
SearchHit hit = searchResponse.getHits().getHits()[0];
|
||||
if (hit.getFields().get(timestampfieldname) != null) {
|
||||
|
@ -741,11 +796,10 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
@Override
|
||||
public boolean forceMerge(String index, long maxWaitTime, TimeUnit timeUnit) {
|
||||
TimeValue timeout = toTimeValue(maxWaitTime, timeUnit);
|
||||
ForceMergeRequestBuilder forceMergeRequestBuilder =
|
||||
new ForceMergeRequestBuilder(client, ForceMergeAction.INSTANCE);
|
||||
forceMergeRequestBuilder.setIndices(index);
|
||||
ForceMergeRequest forceMergeRequest = new ForceMergeRequest();
|
||||
forceMergeRequest.indices(index);
|
||||
try {
|
||||
forceMergeRequestBuilder.execute().get(timeout.getMillis(), TimeUnit.MILLISECONDS);
|
||||
client.execute(ForceMergeAction.INSTANCE, forceMergeRequest).get(timeout.getMillis(), TimeUnit.MILLISECONDS);
|
||||
return true;
|
||||
} catch (TimeoutException e) {
|
||||
logger.error("timeout");
|
||||
|
@ -794,8 +848,8 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void updateIndexSetting(String index, String key, Object value) throws IOException {
|
||||
ensureActive();
|
||||
public void updateIndexSetting(String index, String key, Object value, long timeout, TimeUnit timeUnit) throws IOException {
|
||||
ensureClient();
|
||||
if (index == null) {
|
||||
throw new IOException("no index name given");
|
||||
}
|
||||
|
@ -808,11 +862,11 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
Settings.Builder updateSettingsBuilder = Settings.settingsBuilder();
|
||||
updateSettingsBuilder.put(key, value.toString());
|
||||
UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(index)
|
||||
.settings(updateSettingsBuilder);
|
||||
.settings(updateSettingsBuilder).timeout(toTimeValue(timeout, timeUnit));
|
||||
client.execute(UpdateSettingsAction.INSTANCE, updateSettingsRequest).actionGet();
|
||||
}
|
||||
|
||||
private void ensureActive() {
|
||||
private void ensureClient() {
|
||||
if (this instanceof MockExtendedClient) {
|
||||
return;
|
||||
}
|
||||
|
@ -844,10 +898,9 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
}
|
||||
|
||||
public void checkMapping(String index) {
|
||||
ensureActive();
|
||||
GetMappingsRequestBuilder getMappingsRequestBuilder = new GetMappingsRequestBuilder(client, GetMappingsAction.INSTANCE)
|
||||
.setIndices(index);
|
||||
GetMappingsResponse getMappingsResponse = getMappingsRequestBuilder.execute().actionGet();
|
||||
ensureClient();
|
||||
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices(index);
|
||||
GetMappingsResponse getMappingsResponse = client.execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet();
|
||||
ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> map = getMappingsResponse.getMappings();
|
||||
map.keys().forEach((Consumer<ObjectCursor<String>>) stringObjectCursor -> {
|
||||
ImmutableOpenMap<String, MappingMetaData> mappings = map.get(stringObjectCursor.value);
|
||||
|
@ -861,25 +914,24 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
|
||||
private void checkMapping(String index, String type, MappingMetaData mappingMetaData) {
|
||||
try {
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, SearchAction.INSTANCE);
|
||||
SearchResponse searchResponse = searchRequestBuilder.setSize(0)
|
||||
.setIndices(index)
|
||||
.setTypes(type)
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.execute()
|
||||
.actionGet();
|
||||
SearchSourceBuilder builder = new SearchSourceBuilder()
|
||||
.query(QueryBuilders.matchAllQuery())
|
||||
.size(0);
|
||||
SearchRequest searchRequest = new SearchRequest()
|
||||
.indices(index)
|
||||
.source(builder);
|
||||
SearchResponse searchResponse =
|
||||
client.execute(SearchAction.INSTANCE, searchRequest).actionGet();
|
||||
long total = searchResponse.getHits().getTotalHits();
|
||||
if (total > 0L) {
|
||||
Map<String, Long> fields = new TreeMap<>();
|
||||
Map<String, Object> root = mappingMetaData.getSourceAsMap();
|
||||
checkMapping(index, type, "", "", root, fields);
|
||||
checkMapping(index, "", "", root, fields);
|
||||
AtomicInteger empty = new AtomicInteger();
|
||||
Map<String, Long> map = sortByValue(fields);
|
||||
map.forEach((key, value) -> {
|
||||
logger.info("{} {} {}",
|
||||
key,
|
||||
value,
|
||||
(double) value * 100 / total);
|
||||
key, value, (double) value * 100 / total);
|
||||
if (value == 0) {
|
||||
empty.incrementAndGet();
|
||||
}
|
||||
|
@ -893,7 +945,7 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void checkMapping(String index, String type,
|
||||
private void checkMapping(String index,
|
||||
String pathDef, String fieldName, Map<String, Object> map,
|
||||
Map<String, Long> fields) {
|
||||
String path = pathDef;
|
||||
|
@ -918,26 +970,27 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
String fieldType = o instanceof String ? o.toString() : null;
|
||||
// do not recurse into our custom field mapper
|
||||
if (!"standardnumber".equals(fieldType) && !"ref".equals(fieldType)) {
|
||||
checkMapping(index, type, path, key, child, fields);
|
||||
checkMapping(index, path, key, child, fields);
|
||||
}
|
||||
} else if ("type".equals(key)) {
|
||||
QueryBuilder filterBuilder = QueryBuilders.existsQuery(path);
|
||||
QueryBuilder queryBuilder = QueryBuilders.constantScoreQuery(filterBuilder);
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client, SearchAction.INSTANCE);
|
||||
SearchResponse searchResponse = searchRequestBuilder.setSize(0)
|
||||
.setIndices(index)
|
||||
.setTypes(type)
|
||||
.setQuery(queryBuilder)
|
||||
.execute()
|
||||
.actionGet();
|
||||
fields.put(path, searchResponse.getHits().totalHits());
|
||||
SearchSourceBuilder builder = new SearchSourceBuilder()
|
||||
.query(queryBuilder)
|
||||
.size(0);
|
||||
SearchRequest searchRequest = new SearchRequest()
|
||||
.indices(index)
|
||||
.source(builder);
|
||||
SearchResponse searchResponse =
|
||||
client.execute(SearchAction.INSTANCE, searchRequest).actionGet();
|
||||
fields.put(path, searchResponse.getHits().getTotalHits());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static <K, V extends Comparable<? super V>> Map<K, V> sortByValue(Map<K, V> map) {
|
||||
Map<K, V> result = new LinkedHashMap<>();
|
||||
map.entrySet().stream().sorted(Comparator.comparing(Map.Entry::getValue))
|
||||
map.entrySet().stream().sorted(Map.Entry.comparingByValue())
|
||||
.forEachOrdered(e -> result.put(e.getKey(), e.getValue()));
|
||||
return result;
|
||||
}
|
||||
|
@ -1021,6 +1074,42 @@ public abstract class AbstractExtendedClient implements ExtendedClient {
|
|||
}
|
||||
}
|
||||
|
||||
private static class FailPruneResult implements IndexPruneResult {
|
||||
|
||||
List<String> candidateIndices;
|
||||
|
||||
List<String> indicesToDelete;
|
||||
|
||||
DeleteIndexResponse response;
|
||||
|
||||
FailPruneResult(List<String> candidateIndices, List<String> indicesToDelete,
|
||||
DeleteIndexResponse response) {
|
||||
this.candidateIndices = candidateIndices;
|
||||
this.indicesToDelete = indicesToDelete;
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexPruneResult.State getState() {
|
||||
return IndexPruneResult.State.FAIL;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getCandidateIndices() {
|
||||
return candidateIndices;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getDeletedIndices() {
|
||||
return indicesToDelete;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAcknowledged() {
|
||||
return response.isAcknowledged();
|
||||
}
|
||||
}
|
||||
|
||||
private static class NothingToDoPruneResult implements IndexPruneResult {
|
||||
|
||||
List<String> candidateIndices;
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -16,6 +19,8 @@ import java.util.ServiceLoader;
|
|||
@SuppressWarnings("rawtypes")
|
||||
public class ClientBuilder {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ClientBuilder.class);
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
private final Settings.Builder settingsBuilder;
|
||||
|
@ -97,6 +102,10 @@ public class ClientBuilder {
|
|||
if (provider == null) {
|
||||
throw new IllegalArgumentException("no provider");
|
||||
}
|
||||
return (C) providerMap.get(provider).getExtendedClient().setClient(client).init(settingsBuilder.build());
|
||||
Settings settings = settingsBuilder.build();
|
||||
logger.log(Level.INFO, "settings = " + settings.toDelimitedString(','));
|
||||
return (C) providerMap.get(provider).getExtendedClient()
|
||||
.setClient(client)
|
||||
.init(settings);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,17 +2,14 @@ package org.xbib.elx.common;
|
|||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.xbib.elx.api.BulkController;
|
||||
import org.xbib.elx.api.BulkListener;
|
||||
import org.xbib.elx.api.BulkMetric;
|
||||
import org.xbib.elx.api.BulkProcessor;
|
||||
import org.xbib.elx.api.ExtendedClient;
|
||||
|
@ -34,25 +31,23 @@ public class DefaultBulkController implements BulkController {
|
|||
|
||||
private final BulkMetric bulkMetric;
|
||||
|
||||
private BulkProcessor bulkProcessor;
|
||||
|
||||
private final List<String> indexNames;
|
||||
|
||||
private final Map<String, Long> startBulkRefreshIntervals;
|
||||
|
||||
private final Map<String, Long> stopBulkRefreshIntervals;
|
||||
|
||||
private long maxWaitTime;
|
||||
private final long maxWaitTime;
|
||||
|
||||
private TimeUnit maxWaitTimeUnit;
|
||||
private final TimeUnit maxWaitTimeUnit;
|
||||
|
||||
private BulkProcessor bulkProcessor;
|
||||
private final AtomicBoolean active;
|
||||
|
||||
private BulkListener bulkListener;
|
||||
|
||||
private AtomicBoolean active;
|
||||
|
||||
public DefaultBulkController(ExtendedClient client, BulkMetric bulkMetric) {
|
||||
public DefaultBulkController(ExtendedClient client) {
|
||||
this.client = client;
|
||||
this.bulkMetric = bulkMetric;
|
||||
this.bulkMetric = new DefaultBulkMetric();
|
||||
this.indexNames = new ArrayList<>();
|
||||
this.active = new AtomicBoolean(false);
|
||||
this.startBulkRefreshIntervals = new HashMap<>();
|
||||
|
@ -61,9 +56,14 @@ public class DefaultBulkController implements BulkController {
|
|||
this.maxWaitTimeUnit = TimeUnit.SECONDS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkMetric getBulkMetric() {
|
||||
return bulkMetric;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Throwable getLastBulkError() {
|
||||
return bulkListener.getLastBulkError();
|
||||
return bulkProcessor.getBulkListener().getLastBulkError();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -77,21 +77,29 @@ public class DefaultBulkController implements BulkController {
|
|||
ByteSizeValue maxVolumePerRequest = settings.getAsBytesSize(Parameters.MAX_VOLUME_PER_REQUEST.name(),
|
||||
ByteSizeValue.parseBytesSizeValue(Parameters.DEFAULT_MAX_VOLUME_PER_REQUEST.getString(),
|
||||
"maxVolumePerRequest"));
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("bulk processor up with maxActionsPerRequest = {} maxConcurrentRequests = {} " +
|
||||
"flushIngestInterval = {} maxVolumePerRequest = {}",
|
||||
maxActionsPerRequest, maxConcurrentRequests, flushIngestInterval, maxVolumePerRequest);
|
||||
}
|
||||
this.bulkListener = new BulkListener();
|
||||
DefaultBulkProcessor.Builder builder = DefaultBulkProcessor.builder((Client) client.getClient(), bulkListener)
|
||||
boolean enableBulkLogging = settings.getAsBoolean(Parameters.ENABLE_BULK_LOGGING.name(),
|
||||
Parameters.ENABLE_BULK_LOGGING.getValue());
|
||||
BulkListener bulkListener = new DefaultBulkListener(this, bulkMetric, enableBulkLogging);
|
||||
this.bulkProcessor = DefaultBulkProcessor.builder(client.getClient(), bulkListener)
|
||||
.setBulkActions(maxActionsPerRequest)
|
||||
.setConcurrentRequests(maxConcurrentRequests)
|
||||
.setFlushInterval(flushIngestInterval)
|
||||
.setBulkSize(maxVolumePerRequest);
|
||||
this.bulkProcessor = builder.build();
|
||||
.setBulkSize(maxVolumePerRequest)
|
||||
.build();
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("bulk processor up with maxActionsPerRequest = {} maxConcurrentRequests = {} " +
|
||||
"flushIngestInterval = {} maxVolumePerRequest = {}, bulk logging = {}",
|
||||
maxActionsPerRequest, maxConcurrentRequests, flushIngestInterval, maxVolumePerRequest,
|
||||
enableBulkLogging);
|
||||
}
|
||||
this.active.set(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void inactivate() {
|
||||
this.active.set(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startBulkMode(IndexDefinition indexDefinition) throws IOException {
|
||||
startBulkMode(indexDefinition.getFullIndexName(), indexDefinition.getStartRefreshInterval(),
|
||||
|
@ -107,70 +115,60 @@ public class DefaultBulkController implements BulkController {
|
|||
startBulkRefreshIntervals.put(indexName, startRefreshIntervalInSeconds);
|
||||
stopBulkRefreshIntervals.put(indexName, stopRefreshIntervalInSeconds);
|
||||
if (startRefreshIntervalInSeconds != 0L) {
|
||||
client.updateIndexSetting(indexName, "refresh_interval", startRefreshIntervalInSeconds + "s");
|
||||
client.updateIndexSetting(indexName, "refresh_interval", startRefreshIntervalInSeconds + "s",
|
||||
30L, TimeUnit.SECONDS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void index(IndexRequest indexRequest) {
|
||||
if (!active.get()) {
|
||||
throw new IllegalStateException("inactive");
|
||||
}
|
||||
public void bulkIndex(IndexRequest indexRequest) {
|
||||
ensureActiveAndBulk();
|
||||
try {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().inc(indexRequest.index(), indexRequest.type(), indexRequest.id());
|
||||
}
|
||||
bulkProcessor.add(indexRequest);
|
||||
} catch (Exception e) {
|
||||
bulkListener.lastBulkError = e;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk add of index failed: " + e.getMessage(), e);
|
||||
}
|
||||
inactivate();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(DeleteRequest deleteRequest) {
|
||||
public void bulkDelete(DeleteRequest deleteRequest) {
|
||||
if (!active.get()) {
|
||||
throw new IllegalStateException("inactive");
|
||||
}
|
||||
try {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().inc(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
|
||||
}
|
||||
bulkProcessor.add(deleteRequest);
|
||||
} catch (Exception e) {
|
||||
bulkListener.lastBulkError = e;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk add of delete failed: " + e.getMessage(), e);
|
||||
}
|
||||
inactivate();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(UpdateRequest updateRequest) {
|
||||
public void bulkUpdate(UpdateRequest updateRequest) {
|
||||
if (!active.get()) {
|
||||
throw new IllegalStateException("inactive");
|
||||
}
|
||||
try {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().inc(updateRequest.index(), updateRequest.type(), updateRequest.id());
|
||||
}
|
||||
bulkProcessor.add(updateRequest);
|
||||
} catch (Exception e) {
|
||||
bulkListener.lastBulkError = e;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk add of update failed: " + e.getMessage(), e);
|
||||
}
|
||||
inactivate();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForResponses(long timeout, TimeUnit timeUnit) {
|
||||
public boolean waitForBulkResponses(long timeout, TimeUnit timeUnit) {
|
||||
try {
|
||||
return bulkProcessor.awaitFlush(timeout, timeUnit);
|
||||
} catch (InterruptedException e) {
|
||||
|
@ -189,11 +187,12 @@ public class DefaultBulkController implements BulkController {
|
|||
@Override
|
||||
public void stopBulkMode(String index, long timeout, TimeUnit timeUnit) throws IOException {
|
||||
flush();
|
||||
if (waitForResponses(timeout, timeUnit)) {
|
||||
if (waitForBulkResponses(timeout, timeUnit)) {
|
||||
if (indexNames.contains(index)) {
|
||||
Long secs = stopBulkRefreshIntervals.get(index);
|
||||
if (secs != null && secs != 0L) {
|
||||
client.updateIndexSetting(index, "refresh_interval", secs + "s");
|
||||
client.updateIndexSetting(index, "refresh_interval", secs + "s",
|
||||
30L, TimeUnit.SECONDS);
|
||||
}
|
||||
indexNames.remove(index);
|
||||
}
|
||||
|
@ -210,11 +209,13 @@ public class DefaultBulkController implements BulkController {
|
|||
@Override
|
||||
public void close() throws IOException {
|
||||
flush();
|
||||
bulkMetric.close();
|
||||
if (client.waitForResponses(maxWaitTime, maxWaitTimeUnit)) {
|
||||
for (String index : indexNames) {
|
||||
Long secs = stopBulkRefreshIntervals.get(index);
|
||||
if (secs != null && secs != 0L)
|
||||
client.updateIndexSetting(index, "refresh_interval", secs + "s");
|
||||
client.updateIndexSetting(index, "refresh_interval", secs + "s",
|
||||
30L, TimeUnit.SECONDS);
|
||||
}
|
||||
indexNames.clear();
|
||||
}
|
||||
|
@ -223,87 +224,12 @@ public class DefaultBulkController implements BulkController {
|
|||
}
|
||||
}
|
||||
|
||||
private class BulkListener implements DefaultBulkProcessor.Listener {
|
||||
|
||||
private final Logger logger = LogManager.getLogger("org.xbib.elx.BulkProcessor.Listener");
|
||||
|
||||
private Throwable lastBulkError = null;
|
||||
|
||||
@Override
|
||||
public void beforeBulk(long executionId, BulkRequest request) {
|
||||
long l = 0;
|
||||
if (bulkMetric != null) {
|
||||
l = bulkMetric.getCurrentIngest().getCount();
|
||||
bulkMetric.getCurrentIngest().inc();
|
||||
int n = request.numberOfActions();
|
||||
bulkMetric.getSubmitted().inc(n);
|
||||
bulkMetric.getCurrentIngestNumDocs().inc(n);
|
||||
bulkMetric.getTotalIngestSizeInBytes().inc(request.estimatedSizeInBytes());
|
||||
private void ensureActiveAndBulk() {
|
||||
if (!active.get()) {
|
||||
throw new IllegalStateException("inactive");
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("before bulk [{}] [actions={}] [bytes={}] [concurrent requests={}]",
|
||||
executionId,
|
||||
request.numberOfActions(),
|
||||
request.estimatedSizeInBytes(),
|
||||
l);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
|
||||
long l = 0;
|
||||
if (bulkMetric != null) {
|
||||
l = bulkMetric.getCurrentIngest().getCount();
|
||||
bulkMetric.getCurrentIngest().dec();
|
||||
bulkMetric.getSucceeded().inc(response.getItems().length);
|
||||
}
|
||||
int n = 0;
|
||||
for (BulkItemResponse itemResponse : response.getItems()) {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().dec(itemResponse.getIndex(), itemResponse.getType(), itemResponse.getId());
|
||||
}
|
||||
if (itemResponse.isFailed()) {
|
||||
n++;
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getSucceeded().dec(1);
|
||||
bulkMetric.getFailed().inc(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (bulkMetric != null && logger.isDebugEnabled()) {
|
||||
logger.debug("after bulk [{}] [succeeded={}] [failed={}] [{}ms] {} concurrent requests",
|
||||
executionId,
|
||||
bulkMetric.getSucceeded().getCount(),
|
||||
bulkMetric.getFailed().getCount(),
|
||||
response.getTook().millis(),
|
||||
l);
|
||||
}
|
||||
if (n > 0) {
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk [{}] failed with {} failed items, failure message = {}",
|
||||
executionId, n, response.buildFailureMessage());
|
||||
}
|
||||
} else {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngestNumDocs().dec(response.getItems().length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().dec();
|
||||
}
|
||||
lastBulkError = failure;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("after bulk [" + executionId + "] error", failure);
|
||||
}
|
||||
}
|
||||
|
||||
Throwable getLastBulkError() {
|
||||
return lastBulkError;
|
||||
if (bulkProcessor == null) {
|
||||
throw new UnsupportedOperationException("bulk processor not present");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,109 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.xbib.elx.api.BulkController;
|
||||
import org.xbib.elx.api.BulkListener;
|
||||
import org.xbib.elx.api.BulkMetric;
|
||||
|
||||
class DefaultBulkListener implements BulkListener {
|
||||
|
||||
private final Logger logger = LogManager.getLogger(BulkListener.class.getName());
|
||||
|
||||
private final BulkController bulkController;
|
||||
|
||||
private final BulkMetric bulkMetric;
|
||||
|
||||
private final boolean isBulkLoggingEnabled;
|
||||
|
||||
private Throwable lastBulkError = null;
|
||||
|
||||
public DefaultBulkListener(BulkController bulkController,
|
||||
BulkMetric bulkMetric,
|
||||
boolean isBulkLoggingEnabled) {
|
||||
this.bulkController = bulkController;
|
||||
this.bulkMetric = bulkMetric;
|
||||
this.isBulkLoggingEnabled = isBulkLoggingEnabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeBulk(long executionId, BulkRequest request) {
|
||||
long l = 0;
|
||||
if (bulkMetric != null) {
|
||||
l = bulkMetric.getCurrentIngest().getCount();
|
||||
bulkMetric.getCurrentIngest().inc();
|
||||
int n = request.numberOfActions();
|
||||
bulkMetric.getSubmitted().inc(n);
|
||||
bulkMetric.getCurrentIngestNumDocs().inc(n);
|
||||
bulkMetric.getTotalIngestSizeInBytes().inc(request.estimatedSizeInBytes());
|
||||
}
|
||||
if (isBulkLoggingEnabled && logger.isDebugEnabled()) {
|
||||
logger.debug("before bulk [{}] [actions={}] [bytes={}] [concurrent requests={}]",
|
||||
executionId,
|
||||
request.numberOfActions(),
|
||||
request.estimatedSizeInBytes(),
|
||||
l);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
|
||||
long l = 0;
|
||||
if (bulkMetric != null) {
|
||||
l = bulkMetric.getCurrentIngest().getCount();
|
||||
bulkMetric.getCurrentIngest().dec();
|
||||
bulkMetric.getSucceeded().inc(response.getItems().length);
|
||||
}
|
||||
int n = 0;
|
||||
for (BulkItemResponse itemResponse : response.getItems()) {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().dec(itemResponse.getIndex(), itemResponse.getType(), itemResponse.getId());
|
||||
}
|
||||
if (itemResponse.isFailed()) {
|
||||
n++;
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getSucceeded().dec(1);
|
||||
bulkMetric.getFailed().inc(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isBulkLoggingEnabled && bulkMetric != null && logger.isDebugEnabled()) {
|
||||
logger.debug("after bulk [{}] [succeeded={}] [failed={}] [{}ms] {} concurrent requests",
|
||||
executionId,
|
||||
bulkMetric.getSucceeded().getCount(),
|
||||
bulkMetric.getFailed().getCount(),
|
||||
response.getTook().millis(),
|
||||
l);
|
||||
}
|
||||
if (n > 0) {
|
||||
if (isBulkLoggingEnabled && logger.isErrorEnabled()) {
|
||||
logger.error("bulk [{}] failed with {} failed items, failure message = {}",
|
||||
executionId, n, response.buildFailureMessage());
|
||||
}
|
||||
} else {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngestNumDocs().dec(response.getItems().length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().dec();
|
||||
}
|
||||
lastBulkError = failure;
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("after bulk [" + executionId + "] error", failure);
|
||||
}
|
||||
bulkController.inactivate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Throwable getLastBulkError() {
|
||||
return lastBulkError;
|
||||
}
|
||||
}
|
|
@ -1,11 +1,10 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.xbib.elx.api.BulkMetric;
|
||||
import org.xbib.metrics.Count;
|
||||
import org.xbib.metrics.CountMetric;
|
||||
import org.xbib.metrics.Meter;
|
||||
import org.xbib.metrics.Metered;
|
||||
import org.xbib.metrics.api.Count;
|
||||
import org.xbib.metrics.api.Metered;
|
||||
import org.xbib.metrics.common.CountMetric;
|
||||
import org.xbib.metrics.common.Meter;
|
||||
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
|
@ -37,10 +36,6 @@ public class DefaultBulkMetric implements BulkMetric {
|
|||
submitted = new CountMetric();
|
||||
succeeded = new CountMetric();
|
||||
failed = new CountMetric();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(Settings settings) {
|
||||
start();
|
||||
}
|
||||
|
||||
|
|
|
@ -5,14 +5,17 @@ import org.elasticsearch.action.ActionRequest;
|
|||
import org.elasticsearch.action.bulk.BulkAction;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.xbib.elx.api.BulkListener;
|
||||
import org.xbib.elx.api.BulkProcessor;
|
||||
import org.xbib.elx.api.BulkRequestHandler;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
|
@ -28,6 +31,8 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
*/
|
||||
public class DefaultBulkProcessor implements BulkProcessor {
|
||||
|
||||
private final BulkListener bulkListener;
|
||||
|
||||
private final int bulkActions;
|
||||
|
||||
private final long bulkSize;
|
||||
|
@ -44,20 +49,25 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
|
||||
private volatile boolean closed;
|
||||
|
||||
private DefaultBulkProcessor(Client client, Listener listener, String name, int concurrentRequests,
|
||||
int bulkActions, ByteSizeValue bulkSize, TimeValue flushInterval) {
|
||||
private DefaultBulkProcessor(ElasticsearchClient client,
|
||||
BulkListener bulkListener,
|
||||
String name,
|
||||
int concurrentRequests,
|
||||
int bulkActions,
|
||||
ByteSizeValue bulkSize,
|
||||
TimeValue flushInterval) {
|
||||
this.bulkListener = bulkListener;
|
||||
this.executionIdGen = new AtomicLong();
|
||||
this.closed = false;
|
||||
this.bulkActions = bulkActions;
|
||||
this.bulkSize = bulkSize.bytes();
|
||||
this.bulkSize = bulkSize.getBytes();
|
||||
this.bulkRequest = new BulkRequest();
|
||||
this.bulkRequestHandler = concurrentRequests == 0 ?
|
||||
new SyncBulkRequestHandler(client, listener) :
|
||||
new AsyncBulkRequestHandler(client, listener, concurrentRequests);
|
||||
new SyncBulkRequestHandler(client, bulkListener) :
|
||||
new AsyncBulkRequestHandler(client, bulkListener, concurrentRequests);
|
||||
if (flushInterval != null) {
|
||||
this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1,
|
||||
EsExecutors.daemonThreadFactory(client.settings(),
|
||||
name != null ? "[" + name + "]" : "" + "bulk_processor"));
|
||||
EsExecutors.daemonThreadFactory(name != null ? "[" + name + "]" : "" + "bulk_processor"));
|
||||
this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
|
||||
this.scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
|
||||
this.scheduledFuture = this.scheduler.scheduleWithFixedDelay(new Flush(), flushInterval.millis(),
|
||||
|
@ -68,13 +78,18 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
public static Builder builder(Client client, Listener listener) {
|
||||
if (client == null) {
|
||||
throw new NullPointerException("The client you specified while building a BulkProcessor is null");
|
||||
}
|
||||
public static Builder builder(ElasticsearchClient client,
|
||||
BulkListener listener) {
|
||||
Objects.requireNonNull(client, "The client you specified while building a BulkProcessor is null");
|
||||
Objects.requireNonNull(listener, "A listener for the BulkProcessor is required but null");
|
||||
return new Builder(client, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkListener getBulkListener() {
|
||||
return bulkListener;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for bulk request handler with flush.
|
||||
* @param timeout the timeout value
|
||||
|
@ -84,6 +99,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
*/
|
||||
@Override
|
||||
public synchronized boolean awaitFlush(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
Objects.requireNonNull(unit, "A time unit is required for awaitFlush() but null");
|
||||
if (closed) {
|
||||
return true;
|
||||
}
|
||||
|
@ -92,7 +108,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
execute();
|
||||
}
|
||||
// wait for all bulk responses
|
||||
return this.bulkRequestHandler.close(timeout, unit);
|
||||
return bulkRequestHandler.close(timeout, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -112,18 +128,19 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
*/
|
||||
@Override
|
||||
public synchronized boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
Objects.requireNonNull(unit, "A time unit is required for awaitCLose() but null");
|
||||
if (closed) {
|
||||
return true;
|
||||
}
|
||||
closed = true;
|
||||
if (this.scheduledFuture != null) {
|
||||
FutureUtils.cancel(this.scheduledFuture);
|
||||
this.scheduler.shutdown();
|
||||
if (scheduledFuture != null) {
|
||||
FutureUtils.cancel(scheduledFuture);
|
||||
scheduler.shutdown();
|
||||
}
|
||||
if (bulkRequest.numberOfActions() > 0) {
|
||||
execute();
|
||||
}
|
||||
return this.bulkRequestHandler.close(timeout, unit);
|
||||
return bulkRequestHandler.close(timeout, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -132,21 +149,10 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
* @param request request
|
||||
* @return his bulk processor
|
||||
*/
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Override
|
||||
public DefaultBulkProcessor add(ActionRequest<?> request) {
|
||||
return add(request, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds either a delete or an index request with a payload.
|
||||
*
|
||||
* @param request request
|
||||
* @param payload payload
|
||||
* @return his bulk processor
|
||||
*/
|
||||
@Override
|
||||
public DefaultBulkProcessor add(ActionRequest<?> request, Object payload) {
|
||||
internalAdd(request, payload);
|
||||
public DefaultBulkProcessor add(ActionRequest request) {
|
||||
internalAdd(request);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -176,13 +182,13 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
|
||||
private void ensureOpen() {
|
||||
if (closed) {
|
||||
throw new IllegalStateException("bulk process already closed");
|
||||
throw new IllegalStateException("bulk processor already closed");
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized void internalAdd(ActionRequest<?> request, Object payload) {
|
||||
private synchronized void internalAdd(ActionRequest<?> request) {
|
||||
ensureOpen();
|
||||
bulkRequest.add(request, payload);
|
||||
bulkRequest.add(request);
|
||||
executeIfNeeded();
|
||||
}
|
||||
|
||||
|
@ -204,8 +210,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
private boolean isOverTheLimit() {
|
||||
return bulkActions != -1 &&
|
||||
bulkRequest.numberOfActions() >= bulkActions ||
|
||||
bulkSize != -1 &&
|
||||
bulkRequest.estimatedSizeInBytes() >= bulkSize;
|
||||
bulkSize != -1 && bulkRequest.estimatedSizeInBytes() >= bulkSize;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -213,9 +218,9 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
*/
|
||||
public static class Builder {
|
||||
|
||||
private final Client client;
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
private final Listener listener;
|
||||
private final BulkListener bulkListener;
|
||||
|
||||
private String name;
|
||||
|
||||
|
@ -232,11 +237,11 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
* to be notified on the completion of bulk requests.
|
||||
*
|
||||
* @param client the client
|
||||
* @param listener the listener
|
||||
* @param bulkListener the listener
|
||||
*/
|
||||
Builder(Client client, Listener listener) {
|
||||
Builder(ElasticsearchClient client, BulkListener bulkListener) {
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
this.bulkListener = bulkListener;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -306,7 +311,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
* @return a bulk processor
|
||||
*/
|
||||
public DefaultBulkProcessor build() {
|
||||
return new DefaultBulkProcessor(client, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval);
|
||||
return new DefaultBulkProcessor(client, bulkListener, name, concurrentRequests, bulkActions, bulkSize, flushInterval);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -328,11 +333,11 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
|
||||
private static class SyncBulkRequestHandler implements BulkRequestHandler {
|
||||
|
||||
private final Client client;
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
private final DefaultBulkProcessor.Listener listener;
|
||||
private final BulkListener listener;
|
||||
|
||||
SyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener) {
|
||||
SyncBulkRequestHandler(ElasticsearchClient client, BulkListener listener) {
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
@ -360,15 +365,15 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
|
||||
private static class AsyncBulkRequestHandler implements BulkRequestHandler {
|
||||
|
||||
private final Client client;
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
private final DefaultBulkProcessor.Listener listener;
|
||||
private final BulkListener listener;
|
||||
|
||||
private final Semaphore semaphore;
|
||||
|
||||
private final int concurrentRequests;
|
||||
|
||||
private AsyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener, int concurrentRequests) {
|
||||
private AsyncBulkRequestHandler(ElasticsearchClient client, BulkListener listener, int concurrentRequests) {
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
this.concurrentRequests = concurrentRequests;
|
||||
|
@ -409,7 +414,8 @@ public class DefaultBulkProcessor implements BulkProcessor {
|
|||
} catch (Exception e) {
|
||||
listener.afterBulk(executionId, bulkRequest, e);
|
||||
} finally {
|
||||
if (!bulkRequestSetupSuccessful && acquired) { // if we fail on client.bulk() release the semaphore
|
||||
if (!bulkRequestSetupSuccessful && acquired) {
|
||||
// if we fail on client.bulk() release the semaphore
|
||||
semaphore.release();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Mock client, it does not perform any actions on a cluster. Useful for testing.
|
||||
* A mocked client, it does not perform any actions on a cluster. Useful for testing.
|
||||
*/
|
||||
public class MockExtendedClient extends AbstractExtendedClient {
|
||||
|
||||
|
@ -28,6 +28,10 @@ public class MockExtendedClient extends AbstractExtendedClient {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void closeClient() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient index(String index, String id, boolean create, String source) {
|
||||
return this;
|
||||
|
|
|
@ -2,6 +2,8 @@ package org.xbib.elx.common;
|
|||
|
||||
public enum Parameters {
|
||||
|
||||
ENABLE_BULK_LOGGING(false),
|
||||
|
||||
DEFAULT_MAX_ACTIONS_PER_REQUEST(1000),
|
||||
|
||||
DEFAULT_MAX_CONCURRENT_REQUESTS(Runtime.getRuntime().availableProcessors()),
|
||||
|
@ -18,10 +20,16 @@ public enum Parameters {
|
|||
|
||||
FLUSH_INTERVAL("flush_interval");
|
||||
|
||||
boolean flag;
|
||||
|
||||
int num;
|
||||
|
||||
String string;
|
||||
|
||||
Parameters(boolean flag) {
|
||||
this.flag = flag;
|
||||
}
|
||||
|
||||
Parameters(int num) {
|
||||
this.num = num;
|
||||
}
|
||||
|
@ -30,6 +38,10 @@ public enum Parameters {
|
|||
this.string = string;
|
||||
}
|
||||
|
||||
boolean getValue() {
|
||||
return flag;
|
||||
}
|
||||
|
||||
int getNum() {
|
||||
return num;
|
||||
}
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
public class MockNode extends Node {
|
||||
|
||||
public MockNode() {
|
||||
super(Settings.EMPTY);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
super(InternalSettingsPreparer.prepareEnvironment(settings, null), Version.CURRENT, classpathPlugins);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings, Class<? extends Plugin> classpathPlugin) {
|
||||
this(settings, list(classpathPlugin));
|
||||
}
|
||||
|
||||
private static Collection<Class<? extends Plugin>> list(Class<? extends Plugin> classpathPlugin) {
|
||||
Collection<Class<? extends Plugin>> list = new ArrayList<>();
|
||||
list.add(classpathPlugin);
|
||||
return list;
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
package org.elasticsearch.node;
|
|
@ -1,16 +0,0 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
public class MockExtendedClientProviderTest {
|
||||
|
||||
@Test
|
||||
public void testMockExtendedProvider() throws IOException {
|
||||
MockExtendedClient client = ClientBuilder.builder().provider(MockExtendedClientProvider.class).build();
|
||||
assertNotNull(client);
|
||||
}
|
||||
}
|
|
@ -1,56 +0,0 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.bulk.BulkAction;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class SearchTest extends NodeTestUtils {
|
||||
|
||||
@Test
|
||||
public void testSearch() throws Exception {
|
||||
Client client = client("1");
|
||||
BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE);
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
IndexRequest indexRequest = new IndexRequest("pages", "row")
|
||||
.source(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("user1", "joerg")
|
||||
.field("user2", "joerg")
|
||||
.field("user3", "joerg")
|
||||
.field("user4", "joerg")
|
||||
.field("user5", "joerg")
|
||||
.field("user6", "joerg")
|
||||
.field("user7", "joerg")
|
||||
.field("user8", "joerg")
|
||||
.field("user9", "joerg")
|
||||
.field("rowcount", i)
|
||||
.field("rs", 1234));
|
||||
builder.add(indexRequest);
|
||||
}
|
||||
client.bulk(builder.request()).actionGet();
|
||||
client.admin().indices().refresh(new RefreshRequest()).actionGet();
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
QueryBuilder queryStringBuilder = QueryBuilders.queryStringQuery("rs:" + 1234);
|
||||
SearchRequestBuilder requestBuilder = client.prepareSearch()
|
||||
.setIndices("pages")
|
||||
.setTypes("row")
|
||||
.setQuery(queryStringBuilder)
|
||||
.addSort("rowcount", SortOrder.DESC)
|
||||
.setFrom(i * 10).setSize(10);
|
||||
SearchResponse searchResponse = requestBuilder.execute().actionGet();
|
||||
assertTrue(searchResponse.getHits().getTotalHits() > 0);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexAction;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.junit.Test;
|
||||
|
||||
public class SimpleTest extends NodeTestUtils {
|
||||
|
||||
protected Settings getNodeSettings() {
|
||||
return settingsBuilder()
|
||||
.put(super.getNodeSettings())
|
||||
.put("index.analysis.analyzer.default.filter.0", "lowercase")
|
||||
.put("index.analysis.analyzer.default.filter.1", "trim")
|
||||
.put("index.analysis.analyzer.default.tokenizer", "keyword")
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
try {
|
||||
DeleteIndexRequestBuilder deleteIndexRequestBuilder =
|
||||
new DeleteIndexRequestBuilder(client("1"), DeleteIndexAction.INSTANCE, "test");
|
||||
deleteIndexRequestBuilder.execute().actionGet();
|
||||
} catch (IndexNotFoundException e) {
|
||||
// ignore if index not found
|
||||
}
|
||||
IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(client("1"), IndexAction.INSTANCE);
|
||||
indexRequestBuilder
|
||||
.setIndex("test")
|
||||
.setType("test")
|
||||
.setId("1")
|
||||
.setSource(jsonBuilder().startObject().field("field",
|
||||
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8").endObject())
|
||||
.setRefresh(true)
|
||||
.execute()
|
||||
.actionGet();
|
||||
String doc = client("1").prepareSearch("test")
|
||||
.setTypes("test")
|
||||
.setQuery(matchQuery("field",
|
||||
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8"))
|
||||
.execute()
|
||||
.actionGet()
|
||||
.getHits().getAt(0).getSourceAsString();
|
||||
|
||||
assertEquals(doc,
|
||||
"{\"field\":\"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8\"}");
|
||||
}
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class WildcardTest extends NodeTestUtils {
|
||||
|
||||
protected Settings getNodeSettings() {
|
||||
return Settings.settingsBuilder()
|
||||
.put(super.getNodeSettings())
|
||||
.put("cluster.routing.allocation.disk.threshold_enabled", false)
|
||||
.put("discovery.zen.multicast.enabled", false)
|
||||
.put("http.enabled", false)
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWildcard() throws Exception {
|
||||
index(client("1"), "1", "010");
|
||||
index(client("1"), "2", "0*0");
|
||||
// exact
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("010").defaultField("field"), 1);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1);
|
||||
// pattern
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*10").defaultField("field"), 1);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1?
|
||||
}
|
||||
|
||||
private void index(Client client, String id, String fieldValue) throws IOException {
|
||||
client.index(new IndexRequest("index", "type", id)
|
||||
.source(XContentFactory.jsonBuilder().startObject().field("field", fieldValue).endObject())
|
||||
.refresh(true)).actionGet();
|
||||
}
|
||||
|
||||
private long count(Client client, QueryBuilder queryBuilder) {
|
||||
return client.prepareSearch("index").setTypes("type")
|
||||
.setQuery(queryBuilder)
|
||||
.execute().actionGet().getHits().getTotalHits();
|
||||
}
|
||||
|
||||
private void validateCount(Client client, QueryBuilder queryBuilder, long expectedHits) {
|
||||
final long actualHits = count(client, queryBuilder);
|
||||
if (actualHits != expectedHits) {
|
||||
throw new RuntimeException("actualHits=" + actualHits + ", expectedHits=" + expectedHits);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
package org.xbib.elx.common;
|
|
@ -1,21 +1,20 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
package org.xbib.elx.common.test;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.cluster.metadata.AliasAction;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
@ -24,56 +23,62 @@ import java.util.TreeSet;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class AliasTest extends NodeTestUtils {
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@ExtendWith(TestExtension.class)
|
||||
class AliasTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(AliasTest.class.getName());
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
AliasTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAlias() {
|
||||
Client client = client("1");
|
||||
CreateIndexRequest indexRequest = new CreateIndexRequest("test");
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
// put alias
|
||||
void testAlias() {
|
||||
ElasticsearchClient client = helper.client("1");
|
||||
CreateIndexRequest indexRequest = new CreateIndexRequest("test_index");
|
||||
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
|
||||
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
||||
String[] indices = new String[]{"test"};
|
||||
String[] indices = new String[]{"test_index"};
|
||||
String[] aliases = new String[]{"test_alias"};
|
||||
IndicesAliasesRequest.AliasActions aliasAction =
|
||||
new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, indices, aliases);
|
||||
indicesAliasesRequest.addAliasAction(aliasAction);
|
||||
client.admin().indices().aliases(indicesAliasesRequest).actionGet();
|
||||
client.execute(IndicesAliasesAction.INSTANCE, indicesAliasesRequest).actionGet();
|
||||
// get alias
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest(Strings.EMPTY_ARRAY);
|
||||
long t0 = System.nanoTime();
|
||||
GetAliasesResponse getAliasesResponse = client.admin().indices().getAliases(getAliasesRequest).actionGet();
|
||||
GetAliasesResponse getAliasesResponse =
|
||||
client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet();
|
||||
long t1 = (System.nanoTime() - t0) / 1000000;
|
||||
logger.info("{} time(ms) = {}", getAliasesResponse.getAliases(), t1);
|
||||
assertTrue(t1 >= 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMostRecentIndex() {
|
||||
Client client = client("1");
|
||||
void testMostRecentIndex() {
|
||||
ElasticsearchClient client = helper.client("1");
|
||||
String alias = "test";
|
||||
CreateIndexRequest indexRequest = new CreateIndexRequest("test20160101");
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
|
||||
indexRequest = new CreateIndexRequest("test20160102");
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
|
||||
indexRequest = new CreateIndexRequest("test20160103");
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
|
||||
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
||||
String[] indices = new String[] { "test20160101", "test20160102", "test20160103" };
|
||||
String[] aliases = new String[] { alias };
|
||||
IndicesAliasesRequest.AliasActions aliasAction =
|
||||
new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, indices, aliases);
|
||||
indicesAliasesRequest.addAliasAction(aliasAction);
|
||||
client.admin().indices().aliases(indicesAliasesRequest).actionGet();
|
||||
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client,
|
||||
GetAliasesAction.INSTANCE);
|
||||
GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.setAliases(alias).execute().actionGet();
|
||||
client.execute(IndicesAliasesAction.INSTANCE, indicesAliasesRequest).actionGet();
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
|
||||
getAliasesRequest.aliases(alias);
|
||||
GetAliasesResponse getAliasesResponse = client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet();
|
||||
Pattern pattern = Pattern.compile("^(.*?)(\\d+)$");
|
||||
Set<String> result = new TreeSet<>(Collections.reverseOrder());
|
||||
for (ObjectCursor<String> indexName : getAliasesResponse.getAliases().keys()) {
|
||||
|
@ -90,5 +95,4 @@ public class AliasTest extends NodeTestUtils {
|
|||
assertEquals("test20160101", it.next());
|
||||
logger.info("success: result={}", result);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.common.MockExtendedClient;
|
||||
import org.xbib.elx.common.MockExtendedClientProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
|
||||
class MockExtendedClientProviderTest {
|
||||
|
||||
@Test
|
||||
void testMockExtendedProvider() throws IOException {
|
||||
MockExtendedClient client = ClientBuilder.builder().provider(MockExtendedClientProvider.class).build();
|
||||
assertNotNull(client);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
public class MockNode extends Node {
|
||||
|
||||
public MockNode(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,21 +1,23 @@
|
|||
package org.xbib.elx.common;
|
||||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.NetworkInterface;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
|
||||
public class NetworkTest {
|
||||
// walk over all found interfaces (this is slow - multicast/pings are performed)
|
||||
@Disabled
|
||||
class NetworkTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(NetworkTest.class);
|
||||
|
||||
@Test
|
||||
public void testNetwork() throws Exception {
|
||||
// walk very slowly over all interfaces
|
||||
void testNetwork() throws Exception {
|
||||
Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces();
|
||||
for (NetworkInterface netint : Collections.list(nets)) {
|
||||
System.out.println("checking network interface = " + netint.getName());
|
|
@ -0,0 +1,70 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.bulk.BulkAction;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@ExtendWith(TestExtension.class)
|
||||
class SearchTest {
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
SearchTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSearch() throws Exception {
|
||||
ElasticsearchClient client = helper.client("1");
|
||||
BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE);
|
||||
for (int i = 0; i < 1; i++) {
|
||||
IndexRequest indexRequest = new IndexRequest().index("pages").type("row")
|
||||
.source(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("user1", "joerg")
|
||||
.field("user2", "joerg")
|
||||
.field("user3", "joerg")
|
||||
.field("user4", "joerg")
|
||||
.field("user5", "joerg")
|
||||
.field("user6", "joerg")
|
||||
.field("user7", "joerg")
|
||||
.field("user8", "joerg")
|
||||
.field("user9", "joerg")
|
||||
.field("rowcount", i)
|
||||
.field("rs", 1234)
|
||||
.endObject());
|
||||
builder.add(indexRequest);
|
||||
}
|
||||
client.execute(BulkAction.INSTANCE, builder.request()).actionGet();
|
||||
client.execute(RefreshAction.INSTANCE, new RefreshRequest()).actionGet();
|
||||
for (int i = 0; i < 1; i++) {
|
||||
QueryBuilder queryStringBuilder = QueryBuilders.queryStringQuery("rs:" + 1234);
|
||||
SearchSourceBuilder searchSource = new SearchSourceBuilder();
|
||||
searchSource.query(queryStringBuilder);
|
||||
searchSource.sort("rowcount", SortOrder.DESC);
|
||||
searchSource.from(i * 10);
|
||||
searchSource.size(10);
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.indices("pages");
|
||||
searchRequest.types("row");
|
||||
searchRequest.source(searchSource);
|
||||
SearchResponse searchResponse = client.execute(SearchAction.INSTANCE, searchRequest).actionGet();
|
||||
assertTrue(searchResponse.getHits().getTotalHits() > 0);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.index.IndexAction;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
||||
@ExtendWith(TestExtension.class)
|
||||
class SimpleTest {
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
SimpleTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testSimple() throws Exception {
|
||||
try {
|
||||
DeleteIndexRequest deleteIndexRequest =
|
||||
new DeleteIndexRequest().indices("test");
|
||||
helper.client("1").execute(DeleteIndexAction.INSTANCE, deleteIndexRequest).actionGet();
|
||||
} catch (IndexNotFoundException e) {
|
||||
// ignore if index not found
|
||||
}
|
||||
Settings indexSettings = Settings.settingsBuilder()
|
||||
.put("index.analysis.analyzer.default.filter.0", "lowercase")
|
||||
.put("index.analysis.analyzer.default.filter.1", "trim")
|
||||
.put("index.analysis.analyzer.default.tokenizer", "keyword")
|
||||
.build();
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest();
|
||||
createIndexRequest.index("test").settings(indexSettings);
|
||||
helper.client("1").execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet();
|
||||
IndexRequest indexRequest = new IndexRequest();
|
||||
indexRequest.index("test").type("test").id("1")
|
||||
.source(XContentFactory.jsonBuilder().startObject().field("field",
|
||||
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8").endObject());
|
||||
helper.client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
|
||||
RefreshRequest refreshRequest = new RefreshRequest();
|
||||
refreshRequest.indices("test");
|
||||
helper.client("1").execute(RefreshAction.INSTANCE, refreshRequest).actionGet();
|
||||
SearchSourceBuilder builder = new SearchSourceBuilder();
|
||||
builder.query(QueryBuilders.matchQuery("field",
|
||||
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8"));
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.indices("test").types("test");
|
||||
searchRequest.source(builder);
|
||||
String doc = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet()
|
||||
.getHits().getAt(0).getSourceAsString();
|
||||
assertEquals(doc,
|
||||
"{\"field\":\"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8\"}");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,217 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.client.support.AbstractClient;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.junit.jupiter.api.extension.AfterEachCallback;
|
||||
import org.junit.jupiter.api.extension.BeforeEachCallback;
|
||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||
import org.junit.jupiter.api.extension.ParameterContext;
|
||||
import org.junit.jupiter.api.extension.ParameterResolutionException;
|
||||
import org.junit.jupiter.api.extension.ParameterResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class TestExtension implements ParameterResolver, BeforeEachCallback, AfterEachCallback {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
private static final Random random = new Random();
|
||||
|
||||
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
|
||||
|
||||
private static final String key = "es-instance";
|
||||
|
||||
private static final AtomicInteger count = new AtomicInteger(0);
|
||||
|
||||
private static final ExtensionContext.Namespace ns =
|
||||
ExtensionContext.Namespace.create(TestExtension.class);
|
||||
|
||||
@Override
|
||||
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
|
||||
throws ParameterResolutionException {
|
||||
return parameterContext.getParameter().getType().equals(Helper.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
|
||||
throws ParameterResolutionException {
|
||||
// initialize new helper here, increase counter
|
||||
return extensionContext.getParent().get().getStore(ns)
|
||||
.getOrComputeIfAbsent(key + count.incrementAndGet(), key -> create(), Helper.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeEach(ExtensionContext extensionContext) throws Exception {
|
||||
Helper helper = extensionContext.getParent().get().getStore(ns)
|
||||
.getOrComputeIfAbsent(key + count.get(), key -> create(), Helper.class);
|
||||
logger.info("starting cluster with helper " + helper + " at " + helper.getHome());
|
||||
helper.startNode("1");
|
||||
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
|
||||
NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet();
|
||||
Object obj = response.iterator().next().getTransport().getAddress()
|
||||
.publishAddress();
|
||||
String host = null;
|
||||
int port = 0;
|
||||
if (obj instanceof InetSocketTransportAddress) {
|
||||
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
|
||||
host = address.address().getHostName();
|
||||
port = address.address().getPort();
|
||||
}
|
||||
try {
|
||||
ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
|
||||
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
throw new IOException("cluster state is " + healthResponse.getStatus().name()
|
||||
+ ", from here on, everything will fail!");
|
||||
}
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
|
||||
}
|
||||
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
|
||||
ClusterStateResponse clusterStateResponse =
|
||||
helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
|
||||
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
|
||||
logger.info("host = {} port = {}", host, port);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterEach(ExtensionContext extensionContext) throws Exception {
|
||||
Helper helper = extensionContext.getParent().get().getStore(ns)
|
||||
.getOrComputeIfAbsent(key + count.get(), key -> create(), Helper.class);
|
||||
closeNodes(helper);
|
||||
deleteFiles(Paths.get(helper.getHome() + "/data"));
|
||||
logger.info("data files wiped");
|
||||
Thread.sleep(2000L); // let OS commit changes
|
||||
}
|
||||
|
||||
private void closeNodes(Helper helper) throws IOException {
|
||||
logger.info("closing all clients");
|
||||
for (AbstractClient client : helper.clients.values()) {
|
||||
client.close();
|
||||
}
|
||||
logger.info("closing all nodes");
|
||||
for (Node node : helper.nodes.values()) {
|
||||
if (node != null) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
logger.info("all nodes closed");
|
||||
}
|
||||
|
||||
private static void deleteFiles(Path directory) throws IOException {
|
||||
if (Files.exists(directory)) {
|
||||
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private Helper create() {
|
||||
Helper helper = new Helper();
|
||||
helper.setHome(System.getProperty("path.home") + "/" + helper.randomString(8));
|
||||
helper.setClusterName("test-cluster-" + helper.randomString(8));
|
||||
logger.info("cluster: " + helper.getClusterName() + " home: " + helper.getHome());
|
||||
return helper;
|
||||
}
|
||||
|
||||
static class Helper {
|
||||
|
||||
String home;
|
||||
|
||||
String cluster;
|
||||
|
||||
Map<String, Node> nodes = new HashMap<>();
|
||||
|
||||
Map<String, AbstractClient> clients = new HashMap<>();
|
||||
|
||||
void setHome(String home) {
|
||||
this.home = home;
|
||||
}
|
||||
|
||||
String getHome() {
|
||||
return home;
|
||||
}
|
||||
|
||||
void setClusterName(String cluster) {
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
String getClusterName() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
Settings getNodeSettings() {
|
||||
return Settings.builder()
|
||||
.put("cluster.name", getClusterName())
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
||||
void startNode(String id) {
|
||||
buildNode(id).start();
|
||||
}
|
||||
|
||||
ElasticsearchClient client(String id) {
|
||||
return clients.get(id);
|
||||
}
|
||||
|
||||
String randomString(int len) {
|
||||
final char[] buf = new char[len];
|
||||
final int n = numbersAndLetters.length - 1;
|
||||
for (int i = 0; i < buf.length; i++) {
|
||||
buf[i] = numbersAndLetters[random.nextInt(n)];
|
||||
}
|
||||
return new String(buf);
|
||||
}
|
||||
|
||||
private Node buildNode(String id) {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(getNodeSettings())
|
||||
.put("node.name", id)
|
||||
.build();
|
||||
Node node = new MockNode(nodeSettings);
|
||||
AbstractClient client = (AbstractClient) node.client();
|
||||
nodes.put(id, node);
|
||||
clients.put(id, client);
|
||||
return node;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.index.IndexAction;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ExtendWith(TestExtension.class)
|
||||
class WildcardTest {
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
WildcardTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testWildcard() throws Exception {
|
||||
ElasticsearchClient client = helper.client("1");
|
||||
index(client, "1", "010");
|
||||
index(client, "2", "0*0");
|
||||
// exact
|
||||
validateCount(client, QueryBuilders.queryStringQuery("010").defaultField("field"), 1);
|
||||
validateCount(client, QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1);
|
||||
// pattern
|
||||
validateCount(client, QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2?
|
||||
validateCount(client, QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2?
|
||||
validateCount(client, QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2?
|
||||
validateCount(client, QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0);
|
||||
validateCount(client, QueryBuilders.queryStringQuery("*10").defaultField("field"), 1);
|
||||
validateCount(client, QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1);
|
||||
validateCount(client, QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1?
|
||||
validateCount(client, QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1?
|
||||
}
|
||||
|
||||
private void index(ElasticsearchClient client, String id, String fieldValue) throws IOException {
|
||||
client.execute(IndexAction.INSTANCE, new IndexRequest().index("index").type("type").id(id)
|
||||
.source(XContentFactory.jsonBuilder().startObject().field("field", fieldValue).endObject()))
|
||||
.actionGet();
|
||||
client.execute(RefreshAction.INSTANCE, new RefreshRequest()).actionGet();
|
||||
}
|
||||
|
||||
private long count(ElasticsearchClient client, QueryBuilder queryBuilder) {
|
||||
SearchSourceBuilder builder = new SearchSourceBuilder()
|
||||
.query(queryBuilder);
|
||||
SearchRequest searchRequest = new SearchRequest()
|
||||
.indices("index")
|
||||
.types("type")
|
||||
.source(builder);
|
||||
return client.execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits();
|
||||
}
|
||||
|
||||
private void validateCount(ElasticsearchClient client, QueryBuilder queryBuilder, long expectedHits) {
|
||||
final long actualHits = count(client, queryBuilder);
|
||||
if (actualHits != expectedHits) {
|
||||
throw new RuntimeException("actualHits=" + actualHits + ", expectedHits=" + expectedHits);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
package org.xbib.elx.common.test;
|
4
elx-http/build.gradle
Normal file
4
elx-http/build.gradle
Normal file
|
@ -0,0 +1,4 @@
|
|||
dependencies {
|
||||
api project(':elx-common')
|
||||
api "org.xbib:netty-http-client:${project.property('xbib-netty-http.version')}"
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
maven {
|
||||
url 'http://xbib.org/repository'
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.xbib.elasticsearch:gradle-plugin-elasticsearch-build:6.2.2.0"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'org.xbib.gradle.plugin.elasticsearch.build'
|
||||
|
||||
configurations {
|
||||
main
|
||||
tests
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':common')
|
||||
compile "org.xbib:netty-http-client:${project.property('xbib-netty-http-client.version')}"
|
||||
testCompile "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
testRuntime "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-common"
|
||||
}
|
||||
|
||||
/*
|
||||
task testJar(type: Jar, dependsOn: testClasses) {
|
||||
baseName = "${project.archivesBaseName}-tests"
|
||||
from sourceSets.test.output
|
||||
}
|
||||
*/
|
||||
|
||||
artifacts {
|
||||
main jar
|
||||
tests testJar
|
||||
archives sourcesJar, javadocJar
|
||||
}
|
||||
|
||||
test {
|
||||
enabled = true
|
||||
include '**/SimpleTest.*'
|
||||
testLogging {
|
||||
showStandardStreams = true
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
||||
|
||||
randomizedTest {
|
||||
enabled = false
|
||||
}
|
||||
|
||||
esTest {
|
||||
enabled = true
|
||||
// test with the jars, not the classes, for security manager
|
||||
// classpath = files(configurations.testRuntime) + configurations.main.artifacts.files + configurations.tests.artifacts.files
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
// maybe we like some extra security policy for our code
|
||||
systemProperty 'tests.security.policy', '/extra-security.policy'
|
||||
}
|
||||
esTest.dependsOn jar, testJar
|
127
elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClient.java
Normal file
127
elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClient.java
Normal file
|
@ -0,0 +1,127 @@
|
|||
package org.xbib.elx.http;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.GenericAction;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.xbib.elx.common.AbstractExtendedClient;
|
||||
import org.xbib.netty.http.client.Client;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
|
||||
/**
|
||||
* Elasticsearch HTTP client.
|
||||
*/
|
||||
public class ExtendedHttpClient extends AbstractExtendedClient implements ElasticsearchClient {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ExtendedHttpClient.class);
|
||||
|
||||
private Client nettyHttpClient;
|
||||
|
||||
private final ClassLoader classLoader;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
private final Map<GenericAction, HttpAction> actionMap;
|
||||
|
||||
private String url;
|
||||
|
||||
public ExtendedHttpClient() {
|
||||
this.classLoader = ExtendedHttpClient.class.getClassLoader();
|
||||
this.actionMap = new HashMap<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public ExtendedHttpClient init(Settings settings) throws IOException {
|
||||
super.init(settings);
|
||||
if (settings == null) {
|
||||
return null;
|
||||
}
|
||||
this.url = settings.get("url");
|
||||
ServiceLoader<HttpAction> httpActionServiceLoader = ServiceLoader.load(HttpAction.class, classLoader);
|
||||
for (HttpAction<? extends ActionRequest, ? extends ActionResponse> httpAction : httpActionServiceLoader) {
|
||||
httpAction.setSettings(settings);
|
||||
actionMap.put(httpAction.getActionInstance(), httpAction);
|
||||
}
|
||||
this.nettyHttpClient = Client.builder().enableDebug().build();
|
||||
logger.info("extended HTTP client initialized with {} actions", actionMap.size());
|
||||
return this;
|
||||
}
|
||||
|
||||
public Client internalClient() {
|
||||
return nettyHttpClient;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ElasticsearchClient getClient() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ElasticsearchClient createClient(Settings settings) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void closeClient() throws IOException {
|
||||
nettyHttpClient.shutdownGracefully();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <Request extends ActionRequest, Response extends ActionResponse,
|
||||
RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response>
|
||||
execute(Action<Request, Response, RequestBuilder> action, Request request) {
|
||||
PlainActionFuture<Response> actionFuture = PlainActionFuture.newFuture();
|
||||
execute(action, request, actionFuture);
|
||||
return actionFuture;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <Request extends ActionRequest, Response extends ActionResponse,
|
||||
RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void
|
||||
execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
|
||||
doExecute(action, request, listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <Request extends ActionRequest, Response extends ActionResponse,
|
||||
RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder
|
||||
prepareExecute(Action<Request, Response, RequestBuilder> action) {
|
||||
return action.newRequestBuilder(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ThreadPool threadPool() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
private <R extends ActionRequest, T extends ActionResponse, B extends ActionRequestBuilder<R, T, B>>
|
||||
void doExecute(Action<R, T, B> action, R request, ActionListener<T> listener) {
|
||||
HttpAction httpAction = actionMap.get(action);
|
||||
if (httpAction == null) {
|
||||
throw new IllegalStateException("failed to find http action [" + action + "] to execute");
|
||||
}
|
||||
try {
|
||||
HttpActionContext httpActionContext = new HttpActionContext(this, request, url);
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("submitting request {} to URL {}", request, url);
|
||||
}
|
||||
httpAction.execute(httpActionContext, listener);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
package org.xbib.elx.http;
|
||||
|
||||
import org.xbib.elx.api.ExtendedClientProvider;
|
||||
|
||||
public class ExtendedHttpClientProvider implements ExtendedClientProvider<ExtendedHttpClient> {
|
||||
@Override
|
||||
public ExtendedHttpClient getExtendedClient() {
|
||||
return new ExtendedHttpClient();
|
||||
}
|
||||
}
|
169
elx-http/src/main/java/org/xbib/elx/http/HttpAction.java
Normal file
169
elx-http/src/main/java/org/xbib/elx/http/HttpAction.java
Normal file
|
@ -0,0 +1,169 @@
|
|||
package org.xbib.elx.http;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.handler.codec.http.HttpHeaderNames;
|
||||
import io.netty.handler.codec.http.HttpMethod;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.GenericAction;
|
||||
import org.elasticsearch.action.support.PlainActionFuture;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.xbib.elx.http.util.CheckedFunction;
|
||||
import org.xbib.netty.http.client.Request;
|
||||
import org.xbib.netty.http.client.RequestBuilder;
|
||||
import org.xbib.netty.http.client.transport.Transport;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
/**
|
||||
* Base class for HTTP actions.
|
||||
*
|
||||
* @param <R> the request type
|
||||
* @param <T> the response type
|
||||
*/
|
||||
public abstract class HttpAction<R extends ActionRequest, T extends ActionResponse> {
|
||||
|
||||
private final Logger logger = LogManager.getLogger(getClass().getName());
|
||||
|
||||
private static final String APPLICATION_JSON = "application/json";
|
||||
|
||||
private Settings settings;
|
||||
|
||||
void setSettings(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
public abstract GenericAction<R, T> getActionInstance();
|
||||
|
||||
public final ActionFuture<T> execute(HttpActionContext<R, T> httpActionContext) throws IOException {
|
||||
PlainActionFuture<T> future = PlainActionFuture.newFuture();
|
||||
execute(httpActionContext, future);
|
||||
return future;
|
||||
}
|
||||
|
||||
public final void execute(HttpActionContext<R, T> httpActionContext, ActionListener<T> listener) throws IOException {
|
||||
try {
|
||||
ActionRequestValidationException validationException = httpActionContext.getRequest().validate();
|
||||
if (validationException != null) {
|
||||
listener.onFailure(validationException);
|
||||
return;
|
||||
}
|
||||
RequestBuilder httpRequestBuilder =
|
||||
createHttpRequest(httpActionContext.getUrl(), httpActionContext.getRequest());
|
||||
httpRequestBuilder.setUserAgent("elx-http/1.0");
|
||||
Request httpRequest = httpRequestBuilder.build();
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("action = {} request = {}", this.getClass().getName(), httpRequest.toString());
|
||||
}
|
||||
httpRequest.setResponseListener(fullHttpResponse -> {
|
||||
try {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("got HTTP response: status code = " + fullHttpResponse.status().code() +
|
||||
" headers = " + fullHttpResponse.headers().entries() +
|
||||
" content = " + fullHttpResponse.content().toString(StandardCharsets.UTF_8));
|
||||
}
|
||||
listener.onResponse(parseToResponse(httpActionContext.setHttpResponse(fullHttpResponse)));
|
||||
} catch (Exception e) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
Transport transport = httpActionContext.getExtendedHttpClient().internalClient().execute(httpRequest);
|
||||
httpActionContext.setHttpClientTransport(transport);
|
||||
if (transport.isFailed()) {
|
||||
listener.onFailure(transport.getFailure());
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
if (listener != null) {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
protected RequestBuilder newGetRequest(String url, String path) {
|
||||
return Request.builder(HttpMethod.GET).url(url).uri(path);
|
||||
}
|
||||
|
||||
protected RequestBuilder newGetRequest(String url, String path, BytesReference content) {
|
||||
return newRequest(HttpMethod.GET, url, path, content);
|
||||
}
|
||||
|
||||
protected RequestBuilder newHeadRequest(String url, String path) {
|
||||
return newRequest(HttpMethod.HEAD, url, path);
|
||||
}
|
||||
|
||||
protected RequestBuilder newPostRequest(String url, String path) {
|
||||
return newRequest(HttpMethod.POST, url, path);
|
||||
}
|
||||
|
||||
protected RequestBuilder newPostRequest(String url, String path, BytesReference content) {
|
||||
return newRequest(HttpMethod.POST, url, path, content);
|
||||
}
|
||||
|
||||
protected RequestBuilder newPostRequest(String url, String path, String content) {
|
||||
return newRequest(HttpMethod.POST, url, path, content);
|
||||
}
|
||||
|
||||
protected RequestBuilder newPutRequest(String url, String path) {
|
||||
return newRequest(HttpMethod.PUT, url, path);
|
||||
}
|
||||
|
||||
protected RequestBuilder newPutRequest(String url, String path, String content) {
|
||||
return newRequest(HttpMethod.PUT, url, path, content);
|
||||
}
|
||||
|
||||
protected RequestBuilder newPutRequest(String url, String path, BytesReference content) {
|
||||
return newRequest(HttpMethod.PUT, url, path, content);
|
||||
}
|
||||
|
||||
protected RequestBuilder newDeleteRequest(String url, String path, BytesReference content) {
|
||||
return newRequest(HttpMethod.DELETE, url, path, content);
|
||||
}
|
||||
|
||||
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path) {
|
||||
return Request.builder(method).url(baseUrl).uri(path);
|
||||
}
|
||||
|
||||
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, BytesReference content) {
|
||||
return Request.builder(method).url(baseUrl).uri(path).content(content.toBytesRef().bytes, APPLICATION_JSON);
|
||||
}
|
||||
|
||||
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, String content) {
|
||||
return Request.builder(method).url(baseUrl).uri(path).content(content, APPLICATION_JSON);
|
||||
}
|
||||
|
||||
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, ByteBuf byteBuf) {
|
||||
return Request.builder(method).url(baseUrl).uri(path).content(byteBuf, APPLICATION_JSON);
|
||||
}
|
||||
|
||||
protected T parseToResponse(HttpActionContext<R, T> httpActionContext) throws IOException {
|
||||
String mediaType = httpActionContext.getHttpResponse().headers().get(HttpHeaderNames.CONTENT_TYPE);
|
||||
// strip off "; charset=UTF-8"
|
||||
int pos = mediaType.indexOf(";");
|
||||
mediaType = pos >= 0 ? mediaType.substring(0, pos) : mediaType;
|
||||
XContentType xContentType = XContentType.fromRestContentType(mediaType);
|
||||
if (xContentType == null) {
|
||||
throw new IllegalStateException("unsupported content-type: " + mediaType);
|
||||
}
|
||||
String body = httpActionContext.getHttpResponse().content().toString(StandardCharsets.UTF_8);
|
||||
T t;
|
||||
try (XContentParser parser = xContentType.xContent().createParser(body)) {
|
||||
t = entityParser().apply(parser);
|
||||
}
|
||||
return t;
|
||||
}
|
||||
|
||||
protected abstract RequestBuilder createHttpRequest(String baseUrl, R request) throws IOException;
|
||||
|
||||
protected abstract CheckedFunction<XContentParser, T, IOException> entityParser();
|
||||
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
package org.xbib.elx.http;
|
||||
|
||||
import io.netty.handler.codec.http.FullHttpResponse;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.xbib.netty.http.client.transport.Transport;
|
||||
|
||||
/**
|
||||
* HTTP action context.
|
||||
*
|
||||
* @param <R> request type
|
||||
* @param <T> response type
|
||||
*/
|
||||
public class HttpActionContext<R extends ActionRequest, T extends ActionResponse> {
|
||||
|
||||
private final ExtendedHttpClient extendedHttpClient;
|
||||
|
||||
private final R request;
|
||||
|
||||
private final String url;
|
||||
|
||||
private Transport httpClientTransport;
|
||||
|
||||
private FullHttpResponse httpResponse;
|
||||
|
||||
HttpActionContext(ExtendedHttpClient extendedHttpClient, R request, String url) {
|
||||
this.extendedHttpClient = extendedHttpClient;
|
||||
this.request = request;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public ExtendedHttpClient getExtendedHttpClient() {
|
||||
return extendedHttpClient;
|
||||
}
|
||||
|
||||
public R getRequest() {
|
||||
return request;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void setHttpClientTransport(Transport httpClientTransport) {
|
||||
this.httpClientTransport = httpClientTransport;
|
||||
}
|
||||
|
||||
public Transport getHttpClientTransport() {
|
||||
return httpClientTransport;
|
||||
}
|
||||
|
||||
public HttpActionContext<R, T> setHttpResponse(FullHttpResponse fullHttpResponse) {
|
||||
this.httpResponse = fullHttpResponse;
|
||||
return this;
|
||||
}
|
||||
|
||||
public FullHttpResponse getHttpResponse() {
|
||||
return httpResponse;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,179 @@
|
|||
package org.xbib.elx.http.action.get;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.GenericAction;
|
||||
import org.elasticsearch.action.get.GetAction;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||
import org.xbib.elx.http.util.CheckedFunction;
|
||||
import org.xbib.elx.http.HttpAction;
|
||||
import org.xbib.netty.http.client.RequestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class HttpGetAction extends HttpAction<GetRequest, GetResponse> {
|
||||
|
||||
@Override
|
||||
public GenericAction<GetRequest, GetResponse> getActionInstance() {
|
||||
return GetAction.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RequestBuilder createHttpRequest(String url, GetRequest request) {
|
||||
return newGetRequest(url, request.index() + "/" + request.type() + "/" + request.id());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CheckedFunction<XContentParser, GetResponse, IOException> entityParser() {
|
||||
return this::fromXContent;
|
||||
}
|
||||
|
||||
public GetResponse fromXContent(XContentParser parser) throws IOException {
|
||||
GetResult getResult = Helper.fromXContent(parser);
|
||||
if (getResult.getIndex() == null && getResult.getType() == null && getResult.getId() == null) {
|
||||
throw new ElasticsearchException(parser.getTokenLocation() + ":" +
|
||||
String.format(Locale.ROOT, "Missing required fields [%s,%s,%s]", "_index", "_type", "_id"));
|
||||
}
|
||||
return new GetResponse(getResult);
|
||||
}
|
||||
|
||||
static class Helper {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("helper");
|
||||
|
||||
static final String _INDEX = "_index";
|
||||
static final String _TYPE = "_type";
|
||||
static final String _ID = "_id";
|
||||
private static final String _VERSION = "_version";
|
||||
private static final String FOUND = "found";
|
||||
private static final String FIELDS = "fields";
|
||||
|
||||
static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) {
|
||||
if (actual != expected) {
|
||||
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
|
||||
throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual));
|
||||
}
|
||||
}
|
||||
|
||||
static GetResult fromXContent(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
|
||||
return fromXContentEmbedded(parser);
|
||||
}
|
||||
|
||||
static GetResult fromXContentEmbedded(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
return fromXContentEmbedded(parser, null, null, null);
|
||||
}
|
||||
|
||||
static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
String currentFieldName = parser.currentName();
|
||||
long version = -1;
|
||||
Boolean found = null;
|
||||
BytesReference source = null;
|
||||
Map<String, GetField> fields = new HashMap<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (_INDEX.equals(currentFieldName)) {
|
||||
index = parser.text();
|
||||
} else if (_TYPE.equals(currentFieldName)) {
|
||||
type = parser.text();
|
||||
} else if (_ID.equals(currentFieldName)) {
|
||||
id = parser.text();
|
||||
} else if (_VERSION.equals(currentFieldName)) {
|
||||
version = parser.longValue();
|
||||
} else if (FOUND.equals(currentFieldName)) {
|
||||
found = parser.booleanValue();
|
||||
} else {
|
||||
fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText())));
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (SourceFieldMapper.NAME.equals(currentFieldName)) {
|
||||
try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
source = builder.bytes();
|
||||
}
|
||||
} else if (FIELDS.equals(currentFieldName)) {
|
||||
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
GetField getField = getFieldFromXContent(parser);
|
||||
fields.put(getField.getName(), getField);
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("_ignored".equals(currentFieldName)) {
|
||||
fields.put(currentFieldName, new GetField(currentFieldName, parser.list()));
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
return new GetResult(index, type, id, version, found, source, fields);
|
||||
}
|
||||
|
||||
static GetField getFieldFromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
String fieldName = parser.currentName();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
|
||||
List<Object> values = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
values.add(parseFieldsValue(parser));
|
||||
}
|
||||
return new GetField(fieldName, values);
|
||||
}
|
||||
|
||||
static Object parseFieldsValue(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
Object value = null;
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
|
||||
value = parser.text();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
value = parser.numberValue();
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
value = parser.booleanValue();
|
||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
|
||||
value = new BytesArray(parser.binaryValue());
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
value = null;
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
value = parser.mapOrdered();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
value = parser.listOrderedMap();
|
||||
} else {
|
||||
throwUnknownToken(token, parser.getTokenLocation());
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
|
||||
String message = "Failed to parse object: unexpected token [%s] found";
|
||||
throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,255 @@
|
|||
package org.xbib.elx.http.action.get;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.GenericAction;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.MultiGetAction;
|
||||
import org.elasticsearch.action.get.MultiGetItemResponse;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.get.GetField;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||
import org.xbib.elx.http.HttpAction;
|
||||
import org.xbib.elx.http.action.search.HttpSearchAction;
|
||||
import org.xbib.elx.http.util.CheckedFunction;
|
||||
import org.xbib.netty.http.client.RequestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class HttpMultiGetAction extends HttpAction<MultiGetRequest, MultiGetResponse> {
|
||||
|
||||
@Override
|
||||
public GenericAction<MultiGetRequest, MultiGetResponse> getActionInstance() {
|
||||
return MultiGetAction.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RequestBuilder createHttpRequest(String url, MultiGetRequest request) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startArray("docs");
|
||||
for (MultiGetRequest.Item item : request.getItems()) {
|
||||
builder.startObject()
|
||||
.field("_index", item.index())
|
||||
.field("_type", item.type())
|
||||
.field("_id", item.id());
|
||||
if (item.fields() != null) {
|
||||
builder.array("fields", item.fields());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray().endObject();
|
||||
return newPostRequest(url, "_mget", builder.bytes());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CheckedFunction<XContentParser, MultiGetResponse, IOException> entityParser() {
|
||||
return Helper::fromXContent;
|
||||
}
|
||||
|
||||
static class Helper {
|
||||
|
||||
private static final ParseField INDEX = new ParseField("_index");
|
||||
private static final ParseField TYPE = new ParseField("_type");
|
||||
private static final ParseField ID = new ParseField("_id");
|
||||
private static final ParseField ERROR = new ParseField("error");
|
||||
private static final ParseField DOCS = new ParseField("docs");
|
||||
|
||||
static final String _INDEX = "_index";
|
||||
static final String _TYPE = "_type";
|
||||
static final String _ID = "_id";
|
||||
private static final String _VERSION = "_version";
|
||||
private static final String FOUND = "found";
|
||||
private static final String FIELDS = "fields";
|
||||
|
||||
|
||||
static MultiGetResponse fromXContent(XContentParser parser) throws IOException {
|
||||
String currentFieldName = null;
|
||||
List<MultiGetItemResponse> items = new ArrayList<>();
|
||||
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
|
||||
switch (token) {
|
||||
case FIELD_NAME:
|
||||
currentFieldName = parser.currentName();
|
||||
break;
|
||||
case START_ARRAY:
|
||||
if (DOCS.getPreferredName().equals(currentFieldName)) {
|
||||
for (token = parser.nextToken(); token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
items.add(parseItem(parser));
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
return new MultiGetResponse(items.toArray(new MultiGetItemResponse[0]));
|
||||
}
|
||||
|
||||
private static MultiGetItemResponse parseItem(XContentParser parser) throws IOException {
|
||||
String currentFieldName = null;
|
||||
String index = null;
|
||||
String type = null;
|
||||
String id = null;
|
||||
ElasticsearchException exception = null;
|
||||
GetResult getResult = null;
|
||||
ParseFieldMatcher matcher = new ParseFieldMatcher(Settings.EMPTY);
|
||||
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
|
||||
switch (token) {
|
||||
case FIELD_NAME:
|
||||
currentFieldName = parser.currentName();
|
||||
getResult = fromXContentEmbedded(parser, index, type, id);
|
||||
break;
|
||||
case VALUE_STRING:
|
||||
if (matcher.match(currentFieldName, INDEX)) {
|
||||
index = parser.text();
|
||||
} else if (matcher.match(currentFieldName, TYPE)) {
|
||||
type = parser.text();
|
||||
} else if (matcher.match(currentFieldName, ID)) {
|
||||
id = parser.text();
|
||||
}
|
||||
break;
|
||||
case START_OBJECT:
|
||||
if (matcher.match(currentFieldName, ERROR)) {
|
||||
exception = HttpSearchAction.Helper.elasticsearchExceptionFromXContent(parser);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
// If unknown tokens are encounter then these should be ignored, because
|
||||
// this is parsing logic on the client side.
|
||||
break;
|
||||
}
|
||||
if (getResult != null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (exception != null) {
|
||||
return new MultiGetItemResponse(null, new MultiGetResponse.Failure(index, type, id, exception));
|
||||
} else {
|
||||
GetResponse getResponse = new GetResponse(getResult);
|
||||
return new MultiGetItemResponse(getResponse, null);
|
||||
}
|
||||
}
|
||||
|
||||
static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) {
|
||||
if (actual != expected) {
|
||||
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
|
||||
throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual));
|
||||
}
|
||||
}
|
||||
|
||||
static GetResult fromXContentEmbedded(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
return fromXContentEmbedded(parser, null, null, null);
|
||||
}
|
||||
|
||||
static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
String currentFieldName = parser.currentName();
|
||||
long version = -1;
|
||||
Boolean found = null;
|
||||
BytesReference source = null;
|
||||
Map<String, GetField> fields = new HashMap<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (_INDEX.equals(currentFieldName)) {
|
||||
index = parser.text();
|
||||
} else if (_TYPE.equals(currentFieldName)) {
|
||||
type = parser.text();
|
||||
} else if (_ID.equals(currentFieldName)) {
|
||||
id = parser.text();
|
||||
} else if (_VERSION.equals(currentFieldName)) {
|
||||
version = parser.longValue();
|
||||
} else if (FOUND.equals(currentFieldName)) {
|
||||
found = parser.booleanValue();
|
||||
} else {
|
||||
fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText())));
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (SourceFieldMapper.NAME.equals(currentFieldName)) {
|
||||
try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
source = builder.bytes();
|
||||
}
|
||||
} else if (FIELDS.equals(currentFieldName)) {
|
||||
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
GetField getField = getFieldFromXContent(parser);
|
||||
fields.put(getField.getName(), getField);
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("_ignored".equals(currentFieldName)) {
|
||||
fields.put(currentFieldName, new GetField(currentFieldName, parser.list()));
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
return new GetResult(index, type, id, version, found, source, fields);
|
||||
}
|
||||
|
||||
static GetField getFieldFromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
String fieldName = parser.currentName();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
|
||||
List<Object> values = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
values.add(parseFieldsValue(parser));
|
||||
}
|
||||
return new GetField(fieldName, values);
|
||||
}
|
||||
|
||||
static Object parseFieldsValue(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
Object value = null;
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
|
||||
value = parser.text();
|
||||
} else if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
value = parser.numberValue();
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
value = parser.booleanValue();
|
||||
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
|
||||
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
|
||||
value = new BytesArray(parser.binaryValue());
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
value = null;
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
value = parser.mapOrdered();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
value = parser.listOrderedMap();
|
||||
} else {
|
||||
throwUnknownToken(token, parser.getTokenLocation());
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
|
||||
String message = "Failed to parse object: unexpected token [%s] found";
|
||||
throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,597 @@
|
|||
package org.xbib.elx.http.action.search;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.InternalSearchHits;
|
||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
|
||||
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
|
||||
import org.xbib.elx.http.util.CheckedFunction;
|
||||
import org.xbib.elx.http.HttpAction;
|
||||
import org.xbib.elx.http.util.ObjectParser;
|
||||
import org.xbib.elx.http.util.XContentParserUtils;
|
||||
import org.xbib.netty.http.client.RequestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.xbib.elx.http.util.ObjectParser.ValueType.STRING;
|
||||
import static org.xbib.elx.http.util.XContentParserUtils.ensureExpectedToken;
|
||||
|
||||
public class HttpSearchAction extends HttpAction<SearchRequest, SearchResponse> {
|
||||
|
||||
@Override
|
||||
public SearchAction getActionInstance() {
|
||||
return SearchAction.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RequestBuilder createHttpRequest(String url, SearchRequest request) {
|
||||
String index = request.indices() != null ? "/" + String.join(",", request.indices()) : "";
|
||||
return newPostRequest(url, index + "/_search", request.source());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CheckedFunction<XContentParser, SearchResponse, IOException> entityParser() {
|
||||
return Helper::fromXContent;
|
||||
}
|
||||
|
||||
public static class Helper {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("helper");
|
||||
|
||||
private static final ParseField SCROLL_ID = new ParseField("_scroll_id");
|
||||
private static final ParseField TOOK = new ParseField("took");
|
||||
private static final ParseField TIMED_OUT = new ParseField("timed_out");
|
||||
private static final ParseField TERMINATED_EARLY = new ParseField("terminated_early");
|
||||
|
||||
private static final ParseField _SHARDS_FIELD = new ParseField("_shards");
|
||||
private static final ParseField TOTAL_FIELD = new ParseField("total");
|
||||
private static final ParseField SUCCESSFUL_FIELD = new ParseField("successful");
|
||||
private static final ParseField SKIPPED_FIELD = new ParseField("skipped");
|
||||
private static final ParseField FAILED_FIELD = new ParseField("failed");
|
||||
private static final ParseField FAILURES_FIELD = new ParseField("failures");
|
||||
|
||||
private static final String HITS = "hits";
|
||||
|
||||
private static final String TOTAL = "total";
|
||||
private static final String MAX_SCORE = "max_score";
|
||||
|
||||
private static final String _NESTED = "_nested";
|
||||
|
||||
private static final String _INDEX = "_index";
|
||||
private static final String _TYPE = "_type";
|
||||
private static final String _ID = "_id";
|
||||
private static final String _VERSION = "_version";
|
||||
private static final String _SCORE = "_score";
|
||||
private static final String FIELDS = "fields";
|
||||
private static final String HIGHLIGHT = "highlight";
|
||||
private static final String SORT = "sort";
|
||||
private static final String MATCHED_QUERIES = "matched_queries";
|
||||
private static final String _EXPLANATION = "_explanation";
|
||||
private static final String INNER_HITS = "inner_hits";
|
||||
private static final String _SHARD = "_shard";
|
||||
private static final String _NODE = "_node";
|
||||
|
||||
private static final String AGGREGATIONS_FIELD = "aggregations";
|
||||
|
||||
private static final String TYPED_KEYS_DELIMITER = "#";
|
||||
|
||||
private static final String SUGGEST_NAME = "suggest";
|
||||
|
||||
private static final String REASON_FIELD = "reason";
|
||||
private static final String NODE_FIELD = "node";
|
||||
private static final String INDEX_FIELD = "index";
|
||||
private static final String SHARD_FIELD = "shard";
|
||||
|
||||
private static final String TYPE = "type";
|
||||
private static final String REASON = "reason";
|
||||
private static final String CAUSED_BY = "caused_by";
|
||||
private static final String STACK_TRACE = "stack_trace";
|
||||
private static final String HEADER = "header";
|
||||
private static final String ROOT_CAUSE = "root_cause";
|
||||
|
||||
private static ObjectParser<Map<String, Object>, Void> MAP_PARSER =
|
||||
new ObjectParser<>("innerHitParser", true, HashMap::new);
|
||||
|
||||
|
||||
static {
|
||||
declareInnerHitsParseFields(MAP_PARSER);
|
||||
}
|
||||
|
||||
public static SearchResponse fromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
parser.nextToken();
|
||||
return innerFromXContent(parser);
|
||||
}
|
||||
|
||||
static SearchResponse innerFromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
String currentFieldName = parser.currentName();
|
||||
InternalSearchHits hits = null;
|
||||
InternalAggregations aggs = null;
|
||||
Suggest suggest = null;
|
||||
boolean timedOut = false;
|
||||
Boolean terminatedEarly = null;
|
||||
long tookInMillis = -1;
|
||||
int successfulShards = -1;
|
||||
int totalShards = -1;
|
||||
String scrollId = null;
|
||||
List<ShardSearchFailure> failures = new ArrayList<>();
|
||||
ParseFieldMatcher matcher = new ParseFieldMatcher(Settings.EMPTY);
|
||||
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (matcher.match(currentFieldName, SCROLL_ID)) {
|
||||
scrollId = parser.text();
|
||||
} else if (matcher.match(currentFieldName, TOOK)) {
|
||||
tookInMillis = parser.longValue();
|
||||
} else if (matcher.match(currentFieldName, TIMED_OUT)) {
|
||||
timedOut = parser.booleanValue();
|
||||
} else if (matcher.match(currentFieldName, TERMINATED_EARLY)) {
|
||||
terminatedEarly = parser.booleanValue();
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (HITS.equals(currentFieldName)) {
|
||||
logger.debug("searchHitsFromXContent");
|
||||
hits = searchHitsFromXContent(parser);
|
||||
} else if (AGGREGATIONS_FIELD.equals(currentFieldName)) {
|
||||
aggs = aggregationsFromXContent(parser);
|
||||
} else if (SUGGEST_NAME.equals(currentFieldName)) {
|
||||
suggest = suggestFromXContent(parser);
|
||||
} else if (matcher.match(currentFieldName, _SHARDS_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (matcher.match(currentFieldName, FAILED_FIELD)) {
|
||||
parser.intValue(); // we don't need it but need to consume it
|
||||
} else if (matcher.match(currentFieldName, SUCCESSFUL_FIELD)) {
|
||||
successfulShards = parser.intValue();
|
||||
} else if (matcher.match(currentFieldName, TOTAL_FIELD)) {
|
||||
totalShards = parser.intValue();
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (matcher.match(currentFieldName, FAILURES_FIELD)) {
|
||||
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
failures.add(shardSearchFailureFromXContent(parser));
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
// TODO profileResults
|
||||
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, aggs, suggest,
|
||||
null, timedOut, terminatedEarly);
|
||||
return new SearchResponse(internalResponse, scrollId, totalShards, successfulShards, tookInMillis,
|
||||
failures.toArray(ShardSearchFailure.EMPTY_ARRAY));
|
||||
}
|
||||
|
||||
static InternalSearchHits searchHitsFromXContent(XContentParser parser) throws IOException {
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||
parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
}
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
String currentFieldName = null;
|
||||
List<InternalSearchHit> hits = new ArrayList<>();
|
||||
long totalHits = -1L;
|
||||
float maxScore = 0f;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (TOTAL.equals(currentFieldName)) {
|
||||
totalHits = parser.longValue();
|
||||
} else if (MAX_SCORE.equals(currentFieldName)) {
|
||||
maxScore = parser.floatValue();
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_NULL) {
|
||||
if (MAX_SCORE.equals(currentFieldName)) {
|
||||
maxScore = Float.NaN; // NaN gets rendered as null-field
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (HITS.equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
logger.debug("searchHitFromXContent");
|
||||
hits.add(searchHitFromXContent(parser));
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
InternalSearchHit[] internalSearchHits = hits.toArray(new InternalSearchHit[0]);
|
||||
return new InternalSearchHits(internalSearchHits, totalHits, maxScore);
|
||||
}
|
||||
|
||||
static InternalSearchHit searchHitFromXContent(XContentParser parser) {
|
||||
return createFromMap(MAP_PARSER.apply(parser, null));
|
||||
}
|
||||
|
||||
static InternalSearchHit createFromMap(Map<String, Object> values) {
|
||||
logger.debug("values = {}", values);
|
||||
String id = get(_ID, values, null);
|
||||
Text type = get(_TYPE, values, null);
|
||||
InternalSearchHit.InternalNestedIdentity nestedIdentity = get(_NESTED, values, null);
|
||||
Map<String, SearchHitField> fields = get(FIELDS, values, Collections.emptyMap());
|
||||
InternalSearchHit searchHit = new InternalSearchHit(-1, id, type, nestedIdentity, fields);
|
||||
String index = get(_INDEX, values, null);
|
||||
ShardId shardId = get(_SHARD, values, null);
|
||||
String nodeId = get(_NODE, values, null);
|
||||
if (shardId != null && nodeId != null) {
|
||||
assert shardId.index().getName().equals(index);
|
||||
searchHit.shard(new SearchShardTarget(nodeId, index, shardId.id()));
|
||||
}
|
||||
searchHit.score(get(_SCORE, values, Float.NaN));
|
||||
searchHit.version(get(_VERSION, values, -1L));
|
||||
searchHit.sortValues(get(SORT, values, new Object[0]));
|
||||
searchHit.highlightFields(get(HIGHLIGHT, values, null));
|
||||
searchHit.sourceRef(get(SourceFieldMapper.NAME, values, null));
|
||||
searchHit.explanation(get(_EXPLANATION, values, null));
|
||||
searchHit.setInnerHits(get(INNER_HITS, values, null));
|
||||
List<String> matchedQueries = get(MATCHED_QUERIES, values, null);
|
||||
if (matchedQueries != null) {
|
||||
searchHit.matchedQueries(matchedQueries.toArray(new String[0]));
|
||||
}
|
||||
return searchHit;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T get(String key, Map<String, Object> map, T defaultValue) {
|
||||
return (T) map.getOrDefault(key, defaultValue);
|
||||
}
|
||||
|
||||
static InternalAggregations aggregationsFromXContent(XContentParser parser) throws IOException {
|
||||
final List<InternalAggregation> aggregations = new ArrayList<>();
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
SetOnce<InternalAggregation> typedAgg = new SetOnce<>();
|
||||
String currentField = parser.currentName();
|
||||
XContentParserUtils.parseTypedKeysObject(parser, TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set);
|
||||
if (typedAgg.get() != null) {
|
||||
aggregations.add(typedAgg.get());
|
||||
} else {
|
||||
throw new ElasticsearchException(parser.getTokenLocation() + ":" +
|
||||
String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField));
|
||||
}
|
||||
}
|
||||
}
|
||||
return new InternalAggregations(aggregations);
|
||||
}
|
||||
|
||||
static Suggest suggestFromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
List<Suggest.Suggestion<? extends Entry<? extends Option>>> suggestions = new ArrayList<>();
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
String currentField = parser.currentName();
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
|
||||
Suggest.Suggestion<? extends Entry<? extends Option>> suggestion = suggestionFromXContent(parser);
|
||||
if (suggestion != null) {
|
||||
suggestions.add(suggestion);
|
||||
} else {
|
||||
throw new ElasticsearchException(parser.getTokenLocation() + ":" +
|
||||
String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField));
|
||||
}
|
||||
}
|
||||
return new Suggest(suggestions);
|
||||
}
|
||||
|
||||
static Suggest.Suggestion<? extends Entry<? extends Option>> suggestionFromXContent(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
|
||||
SetOnce<Suggest.Suggestion> suggestion = new SetOnce<>();
|
||||
XContentParserUtils.parseTypedKeysObject(parser, "#", Suggest.Suggestion.class, suggestion::set);
|
||||
return suggestion.get();
|
||||
}
|
||||
|
||||
static ShardSearchFailure shardSearchFailureFromXContent(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token;
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
String currentFieldName = null;
|
||||
int shardId = -1;
|
||||
String indexName = null;
|
||||
String nodeId = null;
|
||||
ElasticsearchException exception = null;
|
||||
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (SHARD_FIELD.equals(currentFieldName)) {
|
||||
shardId = parser.intValue();
|
||||
} else if (INDEX_FIELD.equals(currentFieldName)) {
|
||||
indexName = parser.text();
|
||||
} else if (NODE_FIELD.equals(currentFieldName)) {
|
||||
nodeId = parser.text();
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (REASON_FIELD.equals(currentFieldName)) {
|
||||
exception = elasticsearchExceptionFromXContent(parser);
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
SearchShardTarget searchShardTarget = null;
|
||||
if (nodeId != null) {
|
||||
searchShardTarget = new SearchShardTarget(nodeId, indexName, shardId);
|
||||
}
|
||||
return new ShardSearchFailure(exception, searchShardTarget);
|
||||
}
|
||||
|
||||
public static ElasticsearchException elasticsearchExceptionFromXContent(XContentParser parser) throws IOException {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
return elasticsearchExceptionFromXContent(parser, false);
|
||||
}
|
||||
|
||||
static ElasticsearchException elasticsearchExceptionFromXContent(XContentParser parser, boolean parseRootCauses)
|
||||
throws IOException {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
|
||||
String type = null, reason = null, stack = null;
|
||||
ElasticsearchException cause = null;
|
||||
Map<String, List<String>> metadata = new HashMap<>();
|
||||
Map<String, List<String>> headers = new HashMap<>();
|
||||
List<ElasticsearchException> rootCauses = new ArrayList<>();
|
||||
|
||||
for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) {
|
||||
String currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
|
||||
if (token.isValue()) {
|
||||
if (TYPE.equals(currentFieldName)) {
|
||||
type = parser.text();
|
||||
} else if (REASON.equals(currentFieldName)) {
|
||||
reason = parser.text();
|
||||
} else if (STACK_TRACE.equals(currentFieldName)) {
|
||||
stack = parser.text();
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
metadata.put(currentFieldName, Collections.singletonList(parser.text()));
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (CAUSED_BY.equals(currentFieldName)) {
|
||||
cause = elasticsearchExceptionFromXContent(parser);
|
||||
} else if (HEADER.equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else {
|
||||
List<String> values = headers.getOrDefault(currentFieldName, new ArrayList<>());
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
values.add(parser.text());
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
values.add(parser.text());
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
parser.skipChildren();
|
||||
}
|
||||
headers.put(currentFieldName, values);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseRootCauses && ROOT_CAUSE.equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
rootCauses.add(elasticsearchExceptionFromXContent(parser));
|
||||
}
|
||||
} else {
|
||||
List<String> values = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
values.add(parser.text());
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
if (values.size() > 0) {
|
||||
if (metadata.containsKey(currentFieldName)) {
|
||||
values.addAll(metadata.get(currentFieldName));
|
||||
}
|
||||
metadata.put(currentFieldName, values);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ElasticsearchException e = new ElasticsearchException(buildMessage(type, reason, stack), cause);
|
||||
for (Map.Entry<String, List<String>> header : headers.entrySet()) {
|
||||
e.addHeader(header.getKey(), header.getValue());
|
||||
}
|
||||
for (ElasticsearchException rootCause : rootCauses) {
|
||||
e.addSuppressed(rootCause);
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
static String buildMessage(String type, String reason, String stack) {
|
||||
StringBuilder message = new StringBuilder("Elasticsearch exception [");
|
||||
message.append(TYPE).append('=').append(type).append(", ");
|
||||
message.append(REASON).append('=').append(reason);
|
||||
if (stack != null) {
|
||||
message.append(", ").append(STACK_TRACE).append('=').append(stack);
|
||||
}
|
||||
message.append(']');
|
||||
return message.toString();
|
||||
}
|
||||
|
||||
private static void declareInnerHitsParseFields(ObjectParser<Map<String, Object>, Void> parser) {
|
||||
declareMetaDataFields(parser);
|
||||
parser.declareString((map, value) -> map.put(_TYPE, new Text(value)), new ParseField(_TYPE));
|
||||
parser.declareString((map, value) -> map.put(_INDEX, value), new ParseField(_INDEX));
|
||||
parser.declareString((map, value) -> map.put(_ID, value), new ParseField(_ID));
|
||||
parser.declareString((map, value) -> map.put(_NODE, value), new ParseField(_NODE));
|
||||
parser.declareField((map, value) -> map.put(_SCORE, value), SearchHit::parseScore, new ParseField(_SCORE),
|
||||
ObjectParser.ValueType.FLOAT_OR_NULL);
|
||||
parser.declareLong((map, value) -> map.put(_VERSION, value), new ParseField(_VERSION));
|
||||
parser.declareField((map, value) -> map.put(_SHARD, value), (p, c) -> ShardId.fromString(p.text()),
|
||||
new ParseField(_SHARD), STRING);
|
||||
parser.declareObject((map, value) -> map.put(SourceFieldMapper.NAME, value), (p, c) -> parseSourceBytes(p),
|
||||
new ParseField(SourceFieldMapper.NAME));
|
||||
parser.declareObject((map, value) -> map.put(HIGHLIGHT, value), (p, c) -> parseHighlightFields(p),
|
||||
new ParseField(HIGHLIGHT));
|
||||
parser.declareObject((map, value) -> {
|
||||
Map<String, SearchHitField> fieldMap = get(FIELDS, map, new HashMap<String, SearchHitField>());
|
||||
fieldMap.putAll(value);
|
||||
map.put(FIELDS, fieldMap);
|
||||
}, (p, c) -> parseFields(p), new ParseField(FIELDS));
|
||||
parser.declareObject((map, value) -> map.put(_EXPLANATION, value), (p, c) -> parseExplanation(p),
|
||||
new ParseField(_EXPLANATION));
|
||||
parser.declareObject((map, value) -> map.put(_NESTED, value), SearchHit.NestedIdentity::fromXContent,
|
||||
new ParseField(_NESTED));
|
||||
parser.declareObject((map, value) -> map.put(INNER_HITS, value), (p,c) -> parseInnerHits(p),
|
||||
new ParseField(INNER_HITS));
|
||||
parser.declareStringArray((map, list) -> map.put(MATCHED_QUERIES, list), new ParseField(MATCHED_QUERIES));
|
||||
parser.declareField((map, list) -> map.put(SORT, list), SearchSortValues::fromXContent, new ParseField(SORT),
|
||||
ObjectParser.ValueType.OBJECT_ARRAY);
|
||||
}
|
||||
|
||||
private static void declareMetaDataFields(ObjectParser<Map<String, Object>, Void> parser) {
|
||||
for (String metadatafield : MapperService.getAllMetaFields()) {
|
||||
if (!metadatafield.equals(_ID) && !metadatafield.equals(_INDEX) && !metadatafield.equals(_TYPE)) {
|
||||
parser.declareField((map, field) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, SearchHitField> fieldMap = (Map<String, SearchHitField>) map.computeIfAbsent(FIELDS,
|
||||
v -> new HashMap<String, SearchHitField>());
|
||||
fieldMap.put(field.getName(), field);
|
||||
}, (p, c) -> {
|
||||
List<Object> values = new ArrayList<>();
|
||||
values.add(parseFieldsValue(p));
|
||||
return new InternalSearchHit(metadatafield, values);
|
||||
}, new ParseField(metadatafield), ObjectParser.ValueType.VALUE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Map<String, SearchHitField> parseFields(XContentParser parser) throws IOException {
|
||||
Map<String, SearchHitField> fields = new HashMap<>();
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
SearchHitField field = SearchHitField.fromXContent(parser);
|
||||
fields.put(field.getName(), field);
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
private static Map<String, SearchHits> parseInnerHits(XContentParser parser) throws IOException {
|
||||
Map<String, SearchHits> innerHits = new HashMap<>();
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
|
||||
String name = parser.currentName();
|
||||
ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS);
|
||||
innerHits.put(name, SearchHits.fromXContent(parser));
|
||||
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
}
|
||||
return innerHits;
|
||||
}
|
||||
|
||||
private static Map<String, HighlightField> parseHighlightFields(XContentParser parser) throws IOException {
|
||||
Map<String, HighlightField> highlightFields = new HashMap<>();
|
||||
while((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
HighlightField highlightField = HighlightField.fromXContent(parser);
|
||||
highlightFields.put(highlightField.getName(), highlightField);
|
||||
}
|
||||
return highlightFields;
|
||||
}
|
||||
|
||||
private static Explanation parseExplanation(XContentParser parser) throws IOException {
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
|
||||
XContentParser.Token token;
|
||||
Float value = null;
|
||||
String description = null;
|
||||
List<Explanation> details = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
String currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (Fields.VALUE.equals(currentFieldName)) {
|
||||
value = parser.floatValue();
|
||||
} else if (Fields.DESCRIPTION.equals(currentFieldName)) {
|
||||
description = parser.textOrNull();
|
||||
} else if (Fields.DETAILS.equals(currentFieldName)) {
|
||||
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
details.add(parseExplanation(parser));
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
if (value == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "missing explanation value");
|
||||
}
|
||||
if (description == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "missing explanation description");
|
||||
}
|
||||
return Explanation.match(value, description, details);
|
||||
}
|
||||
|
||||
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Fields.VALUE, explanation.getValue());
|
||||
builder.field(Fields.DESCRIPTION, explanation.getDescription());
|
||||
Explanation[] innerExps = explanation.getDetails();
|
||||
if (innerExps != null) {
|
||||
builder.startArray(Fields.DETAILS);
|
||||
for (Explanation exp : innerExps) {
|
||||
buildExplanation(builder, exp);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,217 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public abstract class AbstractObjectParser<Value, Context>
|
||||
implements BiFunction<XContentParser, Context, Value>, ContextParser<Context, Value> {
|
||||
|
||||
/**
|
||||
* Declare some field. Usually it is easier to use {@link #declareString(BiConsumer, ParseField)} or
|
||||
* {@link #declareObject(BiConsumer, ContextParser, ParseField)} rather than call this directly.
|
||||
*/
|
||||
public abstract <T> void declareField(BiConsumer<Value, T> consumer, ContextParser<Context, T> parser, ParseField parseField,
|
||||
ObjectParser.ValueType type);
|
||||
|
||||
/**
|
||||
* Declares named objects in the style of aggregations. These are named
|
||||
* inside and object like this:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "aggregations": {
|
||||
* "name_1": { "aggregation_type": {} },
|
||||
* "name_2": { "aggregation_type": {} },
|
||||
* "name_3": { "aggregation_type": {} }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* Unlike the other version of this method, "ordered" mode (arrays of
|
||||
* objects) is not supported.
|
||||
*
|
||||
* See NamedObjectHolder in ObjectParserTests for examples of how to invoke
|
||||
* this.
|
||||
*
|
||||
* @param consumer
|
||||
* sets the values once they have been parsed
|
||||
* @param namedObjectParser
|
||||
* parses each named object
|
||||
* @param parseField
|
||||
* the field to parse
|
||||
*/
|
||||
public abstract <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer,
|
||||
ObjectParser.NamedObjectParser<T, Context> namedObjectParser,
|
||||
ParseField parseField);
|
||||
|
||||
/**
|
||||
* Declares named objects in the style of highlighting's field element.
|
||||
* These are usually named inside and object like this:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "highlight": {
|
||||
* "fields": { <------ this one
|
||||
* "title": {},
|
||||
* "body": {},
|
||||
* "category": {}
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* but, when order is important, some may be written this way:
|
||||
*
|
||||
* <pre>
|
||||
* <code>
|
||||
* {
|
||||
* "highlight": {
|
||||
* "fields": [ <------ this one
|
||||
* {"title": {}},
|
||||
* {"body": {}},
|
||||
* {"category": {}}
|
||||
* ]
|
||||
* }
|
||||
* }
|
||||
* </code>
|
||||
* </pre>
|
||||
*
|
||||
* This is because json doesn't enforce ordering. Elasticsearch reads it in
|
||||
* the order sent but tools that generate json are free to put object
|
||||
* members in an unordered Map, jumbling them. Thus, if you care about order
|
||||
* you can send the object in the second way.
|
||||
*
|
||||
* See NamedObjectHolder in ObjectParserTests for examples of how to invoke
|
||||
* this.
|
||||
*
|
||||
* @param consumer
|
||||
* sets the values once they have been parsed
|
||||
* @param namedObjectParser
|
||||
* parses each named object
|
||||
* @param orderedModeCallback
|
||||
* called when the named object is parsed using the "ordered"
|
||||
* mode (the array of objects)
|
||||
* @param parseField
|
||||
* the field to parse
|
||||
*/
|
||||
public abstract <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer,
|
||||
ObjectParser.NamedObjectParser<T, Context> namedObjectParser,
|
||||
Consumer<Value> orderedModeCallback,
|
||||
ParseField parseField);
|
||||
|
||||
public abstract String getName();
|
||||
|
||||
public <T> void declareField(BiConsumer<Value, T> consumer, CheckedFunction<XContentParser, T, IOException> parser,
|
||||
ParseField parseField, ObjectParser.ValueType type) {
|
||||
if (parser == null) {
|
||||
throw new IllegalArgumentException("[parser] is required");
|
||||
}
|
||||
declareField(consumer, (p, c) -> parser.apply(p), parseField, type);
|
||||
}
|
||||
|
||||
public <T> void declareObject(BiConsumer<Value, T> consumer, ContextParser<Context, T> objectParser, ParseField field) {
|
||||
declareField(consumer, (p, c) -> objectParser.parse(p, c), field, ObjectParser.ValueType.OBJECT);
|
||||
}
|
||||
|
||||
public void declareFloat(BiConsumer<Value, Float> consumer, ParseField field) {
|
||||
// Using a method reference here angers some compilers
|
||||
declareField(consumer, p -> p.floatValue(), field, ObjectParser.ValueType.FLOAT);
|
||||
}
|
||||
|
||||
public void declareDouble(BiConsumer<Value, Double> consumer, ParseField field) {
|
||||
// Using a method reference here angers some compilers
|
||||
declareField(consumer, p -> p.doubleValue(), field, ObjectParser.ValueType.DOUBLE);
|
||||
}
|
||||
|
||||
public void declareLong(BiConsumer<Value, Long> consumer, ParseField field) {
|
||||
// Using a method reference here angers some compilers
|
||||
declareField(consumer, p -> p.longValue(), field, ObjectParser.ValueType.LONG);
|
||||
}
|
||||
|
||||
public void declareInt(BiConsumer<Value, Integer> consumer, ParseField field) {
|
||||
// Using a method reference here angers some compilers
|
||||
declareField(consumer, p -> p.intValue(), field, ObjectParser.ValueType.INT);
|
||||
}
|
||||
|
||||
public void declareString(BiConsumer<Value, String> consumer, ParseField field) {
|
||||
declareField(consumer, XContentParser::text, field, ObjectParser.ValueType.STRING);
|
||||
}
|
||||
|
||||
public void declareStringOrNull(BiConsumer<Value, String> consumer, ParseField field) {
|
||||
declareField(consumer, (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.text(), field,
|
||||
ObjectParser.ValueType.STRING_OR_NULL);
|
||||
}
|
||||
|
||||
public void declareBoolean(BiConsumer<Value, Boolean> consumer, ParseField field) {
|
||||
declareField(consumer, XContentParser::booleanValue, field, ObjectParser.ValueType.BOOLEAN);
|
||||
}
|
||||
|
||||
public <T> void declareObjectArray(BiConsumer<Value, List<T>> consumer, ContextParser<Context, T> objectParser,
|
||||
ParseField field) {
|
||||
declareFieldArray(consumer, objectParser, field, ObjectParser.ValueType.OBJECT_ARRAY);
|
||||
}
|
||||
|
||||
public void declareStringArray(BiConsumer<Value, List<String>> consumer, ParseField field) {
|
||||
declareFieldArray(consumer, (p, c) -> p.text(), field, ObjectParser.ValueType.STRING_ARRAY);
|
||||
}
|
||||
|
||||
public void declareDoubleArray(BiConsumer<Value, List<Double>> consumer, ParseField field) {
|
||||
declareFieldArray(consumer, (p, c) -> p.doubleValue(), field, ObjectParser.ValueType.DOUBLE_ARRAY);
|
||||
}
|
||||
|
||||
public void declareFloatArray(BiConsumer<Value, List<Float>> consumer, ParseField field) {
|
||||
declareFieldArray(consumer, (p, c) -> p.floatValue(), field, ObjectParser.ValueType.FLOAT_ARRAY);
|
||||
}
|
||||
|
||||
public void declareLongArray(BiConsumer<Value, List<Long>> consumer, ParseField field) {
|
||||
declareFieldArray(consumer, (p, c) -> p.longValue(), field, ObjectParser.ValueType.LONG_ARRAY);
|
||||
}
|
||||
|
||||
public void declareIntArray(BiConsumer<Value, List<Integer>> consumer, ParseField field) {
|
||||
declareFieldArray(consumer, (p, c) -> p.intValue(), field, ObjectParser.ValueType.INT_ARRAY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Declares a field that can contain an array of elements listed in the type ValueType enum
|
||||
*/
|
||||
public <T> void declareFieldArray(BiConsumer<Value, List<T>> consumer, ContextParser<Context, T> itemParser,
|
||||
ParseField field, ObjectParser.ValueType type) {
|
||||
declareField(consumer, (p, c) -> parseArray(p, () -> itemParser.parse(p, c)), field, type);
|
||||
}
|
||||
|
||||
private interface IOSupplier<T> {
|
||||
T get() throws IOException;
|
||||
}
|
||||
|
||||
private static <T> List<T> parseArray(XContentParser parser, IOSupplier<T> supplier) throws IOException {
|
||||
List<T> list = new ArrayList<>();
|
||||
if (parser.currentToken().isValue()
|
||||
|| parser.currentToken() == XContentParser.Token.VALUE_NULL
|
||||
|| parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
list.add(supplier.get()); // single value
|
||||
} else {
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
if (parser.currentToken().isValue()
|
||||
|| parser.currentToken() == XContentParser.Token.VALUE_NULL
|
||||
|| parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
list.add(supplier.get());
|
||||
} else {
|
||||
throw new IllegalStateException("expected value but got [" + parser.currentToken() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
/**
|
||||
* A {@link BiConsumer}-like interface which allows throwing checked exceptions.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface CheckedBiConsumer<T, U, E extends Exception> {
|
||||
void accept(T t, U u) throws E;
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface CheckedFunction<T, R, E extends Exception> {
|
||||
R apply(T t) throws E;
|
||||
}
|
|
@ -0,0 +1,13 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Reads an object from a parser using some context.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface ContextParser<Context, T> {
|
||||
T parse(XContentParser p, Context c) throws IOException;
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
|
||||
public class NamedObjectNotFoundException extends XContentParseException {
|
||||
|
||||
public NamedObjectNotFoundException(String message) {
|
||||
this(null, message);
|
||||
}
|
||||
|
||||
public NamedObjectNotFoundException(XContentLocation location, String message) {
|
||||
super(location, message);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,101 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
|
||||
public class NamedXContentRegistry {
|
||||
|
||||
public static class Entry {
|
||||
/** The class that this entry can read. */
|
||||
public final Class<?> categoryClass;
|
||||
|
||||
/** A name for the entry which is unique within the {@link #categoryClass}. */
|
||||
public final ParseField name;
|
||||
|
||||
/** A parser capability of parser the entry's class. */
|
||||
private final ContextParser<Object, ?> parser;
|
||||
|
||||
/** Creates a new entry which can be stored by the registry. */
|
||||
public <T> Entry(Class<T> categoryClass, ParseField name, CheckedFunction<XContentParser, ? extends T, IOException> parser) {
|
||||
this.categoryClass = Objects.requireNonNull(categoryClass);
|
||||
this.name = Objects.requireNonNull(name);
|
||||
this.parser = Objects.requireNonNull((p, c) -> parser.apply(p));
|
||||
}
|
||||
/**
|
||||
* Creates a new entry which can be stored by the registry.
|
||||
* Prefer {@link Entry#Entry(Class, ParseField, CheckedFunction)} unless you need a context to carry around while parsing.
|
||||
*/
|
||||
public <T> Entry(Class<T> categoryClass, ParseField name, ContextParser<Object, ? extends T> parser) {
|
||||
this.categoryClass = Objects.requireNonNull(categoryClass);
|
||||
this.name = Objects.requireNonNull(name);
|
||||
this.parser = Objects.requireNonNull(parser);
|
||||
}
|
||||
}
|
||||
|
||||
private final Map<Class<?>, Map<String, Entry>> registry;
|
||||
|
||||
public NamedXContentRegistry(List<Entry> entries) {
|
||||
if (entries.isEmpty()) {
|
||||
registry = emptyMap();
|
||||
return;
|
||||
}
|
||||
entries = new ArrayList<>(entries);
|
||||
entries.sort(Comparator.comparing(e -> e.categoryClass.getName()));
|
||||
|
||||
Map<Class<?>, Map<String, Entry>> registry = new HashMap<>();
|
||||
Map<String, Entry> parsers = null;
|
||||
Class<?> currentCategory = null;
|
||||
for (Entry entry : entries) {
|
||||
if (currentCategory != entry.categoryClass) {
|
||||
if (currentCategory != null) {
|
||||
// we've seen the last of this category, put it into the big map
|
||||
registry.put(currentCategory, unmodifiableMap(parsers));
|
||||
}
|
||||
parsers = new HashMap<>();
|
||||
currentCategory = entry.categoryClass;
|
||||
}
|
||||
|
||||
for (String name : entry.name.getAllNamesIncludedDeprecated()) {
|
||||
Object old = parsers.put(name, entry);
|
||||
if (old != null) {
|
||||
throw new IllegalArgumentException("NamedXContent [" + currentCategory.getName() + "][" + entry.name + "]" +
|
||||
" is already registered for [" + old.getClass().getName() + "]," +
|
||||
" cannot register [" + entry.parser.getClass().getName() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
// handle the last category
|
||||
registry.put(currentCategory, unmodifiableMap(parsers));
|
||||
|
||||
this.registry = unmodifiableMap(registry);
|
||||
}
|
||||
|
||||
public <T, C> T parseNamedObject(Class<T> categoryClass, String name, XContentParser parser, C context) throws IOException {
|
||||
Map<String, Entry> parsers = registry.get(categoryClass);
|
||||
if (parsers == null) {
|
||||
if (registry.isEmpty()) {
|
||||
// The "empty" registry will never work so we throw a better exception as a hint.
|
||||
throw new NamedObjectNotFoundException("named objects are not supported for this parser");
|
||||
}
|
||||
throw new NamedObjectNotFoundException("unknown named object category [" + categoryClass.getName() + "]");
|
||||
}
|
||||
Entry entry = parsers.get(name);
|
||||
if (entry == null) {
|
||||
throw new NamedObjectNotFoundException(parser.getTokenLocation(), "unable to parse " + categoryClass.getSimpleName() +
|
||||
" with name [" + name + "]: parser not found");
|
||||
}
|
||||
return categoryClass.cast(entry.parser.parse(parser, context));
|
||||
}
|
||||
|
||||
}
|
441
elx-http/src/main/java/org/xbib/elx/http/util/ObjectParser.java
Normal file
441
elx-http/src/main/java/org/xbib/elx/http/util/ObjectParser.java
Normal file
|
@ -0,0 +1,441 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Array;
|
||||
import java.util.ArrayList;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.START_ARRAY;
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.START_OBJECT;
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_BOOLEAN;
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_EMBEDDED_OBJECT;
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_NULL;
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_NUMBER;
|
||||
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_STRING;
|
||||
|
||||
/**
|
||||
* A declarative, stateless parser that turns XContent into setter calls. A single parser should be defined for each object being parsed,
|
||||
* nested elements can be added via {@link #declareObject(BiConsumer, ContextParser, ParseField)} which should be satisfied where possible
|
||||
* by passing another instance of {@link ObjectParser}, this one customized for that Object.
|
||||
* <p>
|
||||
* This class works well for object that do have a constructor argument or that can be built using information available from earlier in the
|
||||
* XContent.
|
||||
* </p>
|
||||
* <p>
|
||||
* Instances of {@link ObjectParser} should be setup by declaring a constant field for the parsers and declaring all fields in a static
|
||||
* block just below the creation of the parser. Like this:
|
||||
* </p>
|
||||
* <pre>{@code
|
||||
* private static final ObjectParser<Thing, SomeContext> PARSER = new ObjectParser<>("thing", Thing::new));
|
||||
* static {
|
||||
* PARSER.declareInt(Thing::setMineral, new ParseField("mineral"));
|
||||
* PARSER.declareInt(Thing::setFruit, new ParseField("fruit"));
|
||||
* }
|
||||
* }</pre>
|
||||
* It's highly recommended to use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of
|
||||
* {@link #declareField} which can be used to implement exceptional parsing operations not covered by the high level methods.
|
||||
*/
|
||||
public final class ObjectParser<Value, Context> extends AbstractObjectParser<Value, Context> {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ObjectParser.class.getName());
|
||||
|
||||
public static <Value, ElementValue> BiConsumer<Value, List<ElementValue>> fromList(Class<ElementValue> c,
|
||||
BiConsumer<Value, ElementValue[]> consumer) {
|
||||
return (Value v, List<ElementValue> l) -> {
|
||||
@SuppressWarnings("unchecked")
|
||||
ElementValue[] array = (ElementValue[]) Array.newInstance(c, l.size());
|
||||
consumer.accept(v, l.toArray(array));
|
||||
};
|
||||
}
|
||||
|
||||
private final Map<String, FieldParser> fieldParserMap = new HashMap<>();
|
||||
|
||||
private final String name;
|
||||
|
||||
private final Supplier<Value> valueSupplier;
|
||||
|
||||
/**
|
||||
* Should this parser ignore unknown fields? This should generally be set to true only when parsing responses from external systems,
|
||||
* never when parsing requests from users.
|
||||
*/
|
||||
private final boolean ignoreUnknownFields;
|
||||
|
||||
/**
|
||||
* Creates a new ObjectParser instance with a name. This name is used to reference the parser in exceptions and messages.
|
||||
*/
|
||||
public ObjectParser(String name) {
|
||||
this(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new ObjectParser instance which a name.
|
||||
* @param name the parsers name, used to reference the parser in exceptions and messages.
|
||||
* @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser.
|
||||
*/
|
||||
public ObjectParser(String name, @Nullable Supplier<Value> valueSupplier) {
|
||||
this(name, false, valueSupplier);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new ObjectParser instance which a name.
|
||||
* @param name the parsers name, used to reference the parser in exceptions and messages.
|
||||
* @param ignoreUnknownFields Should this parser ignore unknown fields? This should generally be set to true only when parsing
|
||||
* responses from external systems, never when parsing requests from users.
|
||||
* @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser.
|
||||
*/
|
||||
public ObjectParser(String name, boolean ignoreUnknownFields, @Nullable Supplier<Value> valueSupplier) {
|
||||
this.name = name;
|
||||
this.valueSupplier = valueSupplier;
|
||||
this.ignoreUnknownFields = ignoreUnknownFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a Value from the given {@link XContentParser}
|
||||
* @param parser the parser to build a value from
|
||||
* @param context context needed for parsing
|
||||
* @return a new value instance drawn from the provided value supplier on {@link #ObjectParser(String, Supplier)}
|
||||
* @throws IOException if an IOException occurs.
|
||||
*/
|
||||
@Override
|
||||
public Value parse(XContentParser parser, Context context) throws IOException {
|
||||
if (valueSupplier == null) {
|
||||
throw new NullPointerException("valueSupplier is not set");
|
||||
}
|
||||
return parse(parser, valueSupplier.get(), context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a Value from the given {@link XContentParser}
|
||||
* @param parser the parser to build a value from
|
||||
* @param value the value to fill from the parser
|
||||
* @param context a context that is passed along to all declared field parsers
|
||||
* @return the parsed value
|
||||
* @throws IOException if an IOException occurs.
|
||||
*/
|
||||
public Value parse(XContentParser parser, Value value, Context context) throws IOException {
|
||||
logger.debug("parse");
|
||||
XContentParser.Token token;
|
||||
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
token = parser.currentToken();
|
||||
} else {
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token);
|
||||
}
|
||||
}
|
||||
FieldParser fieldParser = null;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
fieldParser = getParser(currentFieldName, parser);
|
||||
logger.debug("currentFieldName={} fieldParser={}", currentFieldName, fieldParser);
|
||||
} else {
|
||||
if (currentFieldName == null) {
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] no field found");
|
||||
}
|
||||
if (fieldParser == null) {
|
||||
assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields";
|
||||
parser.skipChildren(); // noop if parser points to a value, skips children if parser is start object or start array
|
||||
} else {
|
||||
fieldParser.assertSupports(name, parser, currentFieldName);
|
||||
parseSub(parser, fieldParser, currentFieldName, value, context);
|
||||
}
|
||||
fieldParser = null;
|
||||
}
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Value apply(XContentParser parser, Context context) {
|
||||
if (valueSupplier == null) {
|
||||
throw new NullPointerException("valueSupplier is not set");
|
||||
}
|
||||
try {
|
||||
return parse(parser, valueSupplier.get(), context);
|
||||
} catch (IOException e) {
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] failed to parse object", e);
|
||||
}
|
||||
}
|
||||
|
||||
public interface Parser<Value, Context> {
|
||||
void parse(XContentParser parser, Value value, Context context) throws IOException;
|
||||
}
|
||||
|
||||
public void declareField(Parser<Value, Context> p, ParseField parseField, ValueType type) {
|
||||
if (parseField == null) {
|
||||
throw new IllegalArgumentException("[parseField] is required");
|
||||
}
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("[type] is required");
|
||||
}
|
||||
FieldParser fieldParser = new FieldParser(p, type.supportedTokens(), parseField, type);
|
||||
for (String fieldValue : parseField.getAllNamesIncludedDeprecated()) {
|
||||
fieldParserMap.putIfAbsent(fieldValue, fieldParser);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> void declareField(BiConsumer<Value, T> consumer, ContextParser<Context, T> parser, ParseField parseField,
|
||||
ValueType type) {
|
||||
if (consumer == null) {
|
||||
throw new IllegalArgumentException("[consumer] is required");
|
||||
}
|
||||
if (parser == null) {
|
||||
throw new IllegalArgumentException("[parser] is required");
|
||||
}
|
||||
declareField((p, v, c) -> consumer.accept(v, parser.parse(p, c)), parseField, type);
|
||||
}
|
||||
|
||||
public <T> void declareObjectOrDefault(BiConsumer<Value, T> consumer, BiFunction<XContentParser, Context, T> objectParser,
|
||||
Supplier<T> defaultValue, ParseField field) {
|
||||
declareField((p, v, c) -> {
|
||||
if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (p.booleanValue()) {
|
||||
consumer.accept(v, defaultValue.get());
|
||||
}
|
||||
} else {
|
||||
consumer.accept(v, objectParser.apply(p, c));
|
||||
}
|
||||
}, field, ValueType.OBJECT_OR_BOOLEAN);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer, NamedObjectParser<T, Context> namedObjectParser,
|
||||
Consumer<Value> orderedModeCallback, ParseField field) {
|
||||
// This creates and parses the named object
|
||||
BiFunction<XContentParser, Context, T> objectParser = (XContentParser p, Context c) -> {
|
||||
if (p.currentToken() != XContentParser.Token.FIELD_NAME) {
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
+ "fields or an array where each entry is an object with a single field");
|
||||
}
|
||||
// This messy exception nesting has the nice side effect of telling the use which field failed to parse
|
||||
try {
|
||||
String name = p.currentName();
|
||||
try {
|
||||
return namedObjectParser.parse(p, c, name);
|
||||
} catch (Exception e) {
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] failed to parse field [" + name + "]", e);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] error while parsing", e);
|
||||
}
|
||||
};
|
||||
declareField((XContentParser p, Value v, Context c) -> {
|
||||
List<T> fields = new ArrayList<>();
|
||||
XContentParser.Token token;
|
||||
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||
// Fields are just named entries in a single object
|
||||
while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
fields.add(objectParser.apply(p, c));
|
||||
}
|
||||
} else if (p.currentToken() == XContentParser.Token.START_ARRAY) {
|
||||
// Fields are objects in an array. Each object contains a named field.
|
||||
orderedModeCallback.accept(v);
|
||||
while ((token = p.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
+ "fields or an array where each entry is an object with a single field");
|
||||
}
|
||||
p.nextToken(); // Move to the first field in the object
|
||||
fields.add(objectParser.apply(p, c));
|
||||
p.nextToken(); // Move past the object, should be back to into the array
|
||||
if (p.currentToken() != XContentParser.Token.END_OBJECT) {
|
||||
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
|
||||
+ "fields or an array where each entry is an object with a single field");
|
||||
}
|
||||
}
|
||||
}
|
||||
consumer.accept(v, fields);
|
||||
}, field, ValueType.OBJECT_ARRAY);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer, NamedObjectParser<T, Context> namedObjectParser,
|
||||
ParseField field) {
|
||||
Consumer<Value> orderedModeCallback = (v) -> {
|
||||
throw new IllegalArgumentException("[" + field + "] doesn't support arrays. Use a single object with multiple fields.");
|
||||
};
|
||||
declareNamedObjects(consumer, namedObjectParser, orderedModeCallback, field);
|
||||
}
|
||||
|
||||
/**
|
||||
* Functional interface for instantiating and parsing named objects. See ObjectParserTests#NamedObject for the canonical way to
|
||||
* implement this for objects that themselves have a parser.
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface NamedObjectParser<T, Context> {
|
||||
T parse(XContentParser p, Context c, String name) throws IOException;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the parser.
|
||||
*/
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
private void parseArray(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
|
||||
throws IOException {
|
||||
assert parser.currentToken() == XContentParser.Token.START_ARRAY : "Token was: " + parser.currentToken();
|
||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
||||
}
|
||||
|
||||
private void parseValue(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
|
||||
throws IOException {
|
||||
try {
|
||||
fieldParser.parser.parse(parser, value, context);
|
||||
} catch (Exception ex) {
|
||||
throw new XContentParseException(parser.getTokenLocation(),
|
||||
"[" + name + "] failed to parse field [" + currentFieldName + "]", ex);
|
||||
}
|
||||
}
|
||||
|
||||
private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
|
||||
throws IOException {
|
||||
final XContentParser.Token token = parser.currentToken();
|
||||
switch (token) {
|
||||
case START_OBJECT:
|
||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
||||
/*
|
||||
* Well behaving parsers should consume the entire object but
|
||||
* asserting that they do that is not something we can do
|
||||
* efficiently here. Instead we can check that they end on an
|
||||
* END_OBJECT. They could end on the *wrong* end object and
|
||||
* this test won't catch them, but that is the price that we pay
|
||||
* for having a cheap test.
|
||||
*/
|
||||
if (parser.currentToken() != XContentParser.Token.END_OBJECT) {
|
||||
throw new IllegalStateException("parser for [" + currentFieldName + "] did not end on END_OBJECT");
|
||||
}
|
||||
break;
|
||||
case START_ARRAY:
|
||||
parseArray(parser, fieldParser, currentFieldName, value, context);
|
||||
/*
|
||||
* Well behaving parsers should consume the entire array but
|
||||
* asserting that they do that is not something we can do
|
||||
* efficiently here. Instead we can check that they end on an
|
||||
* END_ARRAY. They could end on the *wrong* end array and
|
||||
* this test won't catch them, but that is the price that we pay
|
||||
* for having a cheap test.
|
||||
*/
|
||||
if (parser.currentToken() != XContentParser.Token.END_ARRAY) {
|
||||
throw new IllegalStateException("parser for [" + currentFieldName + "] did not end on END_ARRAY");
|
||||
}
|
||||
break;
|
||||
case END_OBJECT:
|
||||
case END_ARRAY:
|
||||
case FIELD_NAME:
|
||||
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected");
|
||||
case VALUE_STRING:
|
||||
case VALUE_NUMBER:
|
||||
case VALUE_BOOLEAN:
|
||||
case VALUE_EMBEDDED_OBJECT:
|
||||
case VALUE_NULL:
|
||||
parseValue(parser, fieldParser, currentFieldName, value, context);
|
||||
}
|
||||
}
|
||||
|
||||
private FieldParser getParser(String fieldName, XContentParser xContentParser) {
|
||||
FieldParser parser = fieldParserMap.get(fieldName);
|
||||
if (parser == null && false == ignoreUnknownFields) {
|
||||
throw new XContentParseException(xContentParser.getTokenLocation(),
|
||||
"[" + name + "] unknown field [" + fieldName + "], parser not found");
|
||||
}
|
||||
return parser;
|
||||
}
|
||||
|
||||
private class FieldParser {
|
||||
private final Parser<Value, Context> parser;
|
||||
private final EnumSet<XContentParser.Token> supportedTokens;
|
||||
private final ParseField parseField;
|
||||
private final ValueType type;
|
||||
|
||||
FieldParser(Parser<Value, Context> parser, EnumSet<XContentParser.Token> supportedTokens, ParseField parseField, ValueType type) {
|
||||
this.parser = parser;
|
||||
this.supportedTokens = supportedTokens;
|
||||
this.parseField = parseField;
|
||||
this.type = type;
|
||||
}
|
||||
|
||||
void assertSupports(String parserName, XContentParser parser, String currentFieldName) {
|
||||
if (!supportedTokens.contains(parser.currentToken())) {
|
||||
throw new XContentParseException(parser.getTokenLocation(),
|
||||
"[" + parserName + "] " + currentFieldName + " doesn't support values of type: " + parser.currentToken());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "FieldParser{" +
|
||||
"preferred_name=" + parseField.getPreferredName() +
|
||||
", supportedTokens=" + supportedTokens +
|
||||
", type=" + type.name() +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
|
||||
public enum ValueType {
|
||||
STRING(VALUE_STRING),
|
||||
STRING_OR_NULL(VALUE_STRING, VALUE_NULL),
|
||||
FLOAT(VALUE_NUMBER, VALUE_STRING),
|
||||
FLOAT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
|
||||
DOUBLE(VALUE_NUMBER, VALUE_STRING),
|
||||
DOUBLE_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
|
||||
LONG(VALUE_NUMBER, VALUE_STRING),
|
||||
LONG_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
|
||||
INT(VALUE_NUMBER, VALUE_STRING),
|
||||
INT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
|
||||
BOOLEAN(VALUE_BOOLEAN, VALUE_STRING),
|
||||
STRING_ARRAY(START_ARRAY, VALUE_STRING),
|
||||
FLOAT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
|
||||
DOUBLE_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
|
||||
LONG_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
|
||||
INT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
|
||||
BOOLEAN_ARRAY(START_ARRAY, VALUE_BOOLEAN),
|
||||
OBJECT(START_OBJECT),
|
||||
OBJECT_OR_NULL(START_OBJECT, VALUE_NULL),
|
||||
OBJECT_ARRAY(START_OBJECT, START_ARRAY),
|
||||
OBJECT_OR_BOOLEAN(START_OBJECT, VALUE_BOOLEAN),
|
||||
OBJECT_OR_STRING(START_OBJECT, VALUE_STRING),
|
||||
OBJECT_OR_LONG(START_OBJECT, VALUE_NUMBER),
|
||||
OBJECT_ARRAY_BOOLEAN_OR_STRING(START_OBJECT, START_ARRAY, VALUE_BOOLEAN, VALUE_STRING),
|
||||
OBJECT_ARRAY_OR_STRING(START_OBJECT, START_ARRAY, VALUE_STRING),
|
||||
VALUE(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING),
|
||||
VALUE_OBJECT_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING, START_OBJECT, START_ARRAY),
|
||||
VALUE_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_NUMBER, VALUE_STRING, START_ARRAY);
|
||||
|
||||
private final EnumSet<XContentParser.Token> tokens;
|
||||
|
||||
ValueType(XContentParser.Token first, XContentParser.Token... rest) {
|
||||
this.tokens = EnumSet.of(first, rest);
|
||||
}
|
||||
|
||||
public EnumSet<XContentParser.Token> supportedTokens() {
|
||||
return this.tokens;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ObjectParser{" +
|
||||
"name='" + name + '\'' +
|
||||
", fields=" + fieldParserMap.values() +
|
||||
'}';
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Thrown when one of the XContent parsers cannot parse something.
|
||||
*/
|
||||
public class XContentParseException extends IllegalArgumentException {
|
||||
|
||||
private final Optional<XContentLocation> location;
|
||||
|
||||
public XContentParseException(String message) {
|
||||
this(null, message);
|
||||
}
|
||||
|
||||
public XContentParseException(XContentLocation location, String message) {
|
||||
super(message);
|
||||
this.location = Optional.ofNullable(location);
|
||||
}
|
||||
|
||||
public XContentParseException(XContentLocation location, String message, Exception cause) {
|
||||
super(message, cause);
|
||||
this.location = Optional.ofNullable(location);
|
||||
}
|
||||
|
||||
public int getLineNumber() {
|
||||
return location.map(l -> l.lineNumber).orElse(-1);
|
||||
}
|
||||
|
||||
public int getColumnNumber() {
|
||||
return location.map(l -> l.columnNumber).orElse(-1);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public XContentLocation getLocation() {
|
||||
return location.orElse(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMessage() {
|
||||
return location.map(l -> "[" + l.toString() + "] ").orElse("") + super.getMessage();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
package org.xbib.elx.http.util;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.xbib.elx.http.util.aggregations.ParsedStringTerms;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
public class XContentParserUtils {
|
||||
|
||||
private static final NamedXContentRegistry xContentRegistry = new NamedXContentRegistry(getDefaultNamedXContents());
|
||||
|
||||
public static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) {
|
||||
if (actual != expected) {
|
||||
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
|
||||
throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual));
|
||||
}
|
||||
}
|
||||
|
||||
public static <T> void parseTypedKeysObject(XContentParser parser, String delimiter, Class<T> objectClass, Consumer<T> consumer)
|
||||
throws IOException {
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT && parser.currentToken() != XContentParser.Token.START_ARRAY) {
|
||||
throwUnknownToken(parser.currentToken(), parser.getTokenLocation());
|
||||
}
|
||||
String currentFieldName = parser.currentName();
|
||||
if (Strings.hasLength(currentFieldName)) {
|
||||
int position = currentFieldName.indexOf(delimiter);
|
||||
if (position > 0) {
|
||||
String type = currentFieldName.substring(0, position);
|
||||
String name = currentFieldName.substring(position + 1);
|
||||
consumer.accept(namedObject(parser, objectClass, type, name));
|
||||
return;
|
||||
}
|
||||
// if we didn't find a delimiter we ignore the object or array for forward compatibility instead of throwing an error
|
||||
parser.skipChildren();
|
||||
} else {
|
||||
throw new ElasticsearchException(parser.getTokenLocation() + ":" + "Failed to parse object: empty key");
|
||||
}
|
||||
}
|
||||
|
||||
public static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
|
||||
String message = "Failed to parse object: unexpected token [%s] found";
|
||||
throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token));
|
||||
}
|
||||
|
||||
static <T> T namedObject(XContentParser parser, Class<T> categoryClass, String name, Object context) throws IOException {
|
||||
return xContentRegistry.parseNamedObject(categoryClass, name, parser, context);
|
||||
}
|
||||
|
||||
public static List<NamedXContentRegistry.Entry> getDefaultNamedXContents() {
|
||||
Map<String, ContextParser<Object, ? extends Aggregation>> map = new HashMap<>();
|
||||
//map.put("terms", (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
|
||||
return map.entrySet().stream()
|
||||
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package org.xbib.elx.http.util.aggregations;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
||||
final class CommonFields {
|
||||
public static final ParseField META = new ParseField("meta");
|
||||
public static final ParseField BUCKETS = new ParseField("buckets");
|
||||
public static final ParseField VALUE = new ParseField("value");
|
||||
public static final ParseField VALUES = new ParseField("values");
|
||||
public static final ParseField VALUE_AS_STRING = new ParseField("value_as_string");
|
||||
public static final ParseField DOC_COUNT = new ParseField("doc_count");
|
||||
public static final ParseField KEY = new ParseField("key");
|
||||
public static final ParseField KEY_AS_STRING = new ParseField("key_as_string");
|
||||
public static final ParseField FROM = new ParseField("from");
|
||||
public static final ParseField FROM_AS_STRING = new ParseField("from_as_string");
|
||||
public static final ParseField TO = new ParseField("to");
|
||||
public static final ParseField TO_AS_STRING = new ParseField("to_as_string");
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
package org.xbib.elx.http.util.aggregations;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.xbib.elx.http.util.ObjectParser;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* An implementation of {@link Aggregation} that is parsed from a REST response.
|
||||
* Serves as a base class for all aggregation implementations that are parsed from REST.
|
||||
*/
|
||||
public abstract class ParsedAggregation implements Aggregation {
|
||||
|
||||
protected static void declareAggregationFields(ObjectParser<? extends ParsedAggregation, Void> objectParser) {
|
||||
objectParser.declareObject((parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata),
|
||||
(parser, context) -> parser.map(), CommonFields.META);
|
||||
}
|
||||
|
||||
private String name;
|
||||
protected Map<String, Object> metadata;
|
||||
|
||||
@Override
|
||||
public final String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
protected void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Map<String, Object> getMetaData() {
|
||||
return metadata;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,149 @@
|
|||
package org.xbib.elx.http.util.aggregations;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
|
||||
import org.xbib.elx.http.util.CheckedBiConsumer;
|
||||
import org.xbib.elx.http.util.CheckedFunction;
|
||||
import org.xbib.elx.http.util.ObjectParser;
|
||||
import org.xbib.elx.http.util.XContentParserUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.xbib.elx.http.util.XContentParserUtils.ensureExpectedToken;
|
||||
|
||||
public abstract class ParsedMultiBucketAggregation<B extends ParsedMultiBucketAggregation.Bucket>
|
||||
extends ParsedAggregation implements MultiBucketsAggregation {
|
||||
|
||||
protected final List<B> buckets = new ArrayList<>();
|
||||
|
||||
protected boolean keyed = false;
|
||||
|
||||
protected static void declareMultiBucketAggregationFields(final ObjectParser<? extends ParsedMultiBucketAggregation, Void> objectParser,
|
||||
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser,
|
||||
final CheckedFunction<XContentParser, ParsedBucket, IOException> keyedBucketParser) {
|
||||
declareAggregationFields(objectParser);
|
||||
objectParser.declareField((parser, aggregation, context) -> {
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
aggregation.keyed = true;
|
||||
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
|
||||
aggregation.buckets.add(keyedBucketParser.apply(parser));
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
aggregation.keyed = false;
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
aggregation.buckets.add(bucketParser.apply(parser));
|
||||
}
|
||||
}
|
||||
}, CommonFields.BUCKETS, ObjectParser.ValueType.OBJECT_ARRAY);
|
||||
}
|
||||
|
||||
public abstract static class ParsedBucket implements MultiBucketsAggregation.Bucket {
|
||||
|
||||
private Aggregations aggregations;
|
||||
private String keyAsString;
|
||||
private long docCount;
|
||||
private boolean keyed;
|
||||
|
||||
protected void setKeyAsString(String keyAsString) {
|
||||
this.keyAsString = keyAsString;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return keyAsString;
|
||||
}
|
||||
|
||||
protected void setDocCount(long docCount) {
|
||||
this.docCount = docCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getDocCount() {
|
||||
return docCount;
|
||||
}
|
||||
|
||||
public void setKeyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
}
|
||||
|
||||
protected boolean isKeyed() {
|
||||
return keyed;
|
||||
}
|
||||
|
||||
protected void setAggregations(Aggregations aggregations) {
|
||||
this.aggregations = aggregations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Aggregations getAggregations() {
|
||||
return aggregations;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
/*if (keyed) {
|
||||
builder.startObject(getKeyAsString());
|
||||
} else {
|
||||
builder.startObject();
|
||||
}
|
||||
if (keyAsString != null) {
|
||||
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
|
||||
}
|
||||
keyToXContent(builder);
|
||||
builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
|
||||
aggregations.toXContentInternal(builder, params);
|
||||
builder.endObject();*/
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
|
||||
}
|
||||
|
||||
protected static <B extends ParsedBucket> B parseXContent(final XContentParser parser,
|
||||
final boolean keyed,
|
||||
final Supplier<B> bucketSupplier,
|
||||
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer)
|
||||
throws IOException {
|
||||
final B bucket = bucketSupplier.get();
|
||||
bucket.setKeyed(keyed);
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
String currentFieldName = parser.currentName();
|
||||
if (keyed) {
|
||||
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
|
||||
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
|
||||
}
|
||||
List<InternalAggregation> aggregations = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
|
||||
bucket.setKeyAsString(parser.text());
|
||||
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
|
||||
keyConsumer.accept(parser, bucket);
|
||||
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
|
||||
bucket.setDocCount(parser.longValue());
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
|
||||
keyConsumer.accept(parser, bucket);
|
||||
} else {
|
||||
XContentParserUtils.parseTypedKeysObject(parser, "#", InternalAggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new InternalAggregations(aggregations));
|
||||
return bucket;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
package org.xbib.elx.http.util.aggregations;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.xbib.elx.http.util.ObjectParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.CharBuffer;
|
||||
import java.util.List;
|
||||
|
||||
public class ParsedStringTerms extends ParsedTerms {
|
||||
|
||||
public String getType() {
|
||||
return "terms";
|
||||
}
|
||||
|
||||
private static ObjectParser<ParsedStringTerms, Void> PARSER =
|
||||
new ObjectParser<>(ParsedStringTerms.class.getSimpleName(), true, ParsedStringTerms::new);
|
||||
|
||||
static {
|
||||
declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
|
||||
}
|
||||
|
||||
public static ParsedStringTerms fromXContent(XContentParser parser, String name) throws IOException {
|
||||
ParsedStringTerms aggregation = PARSER.parse(parser, null);
|
||||
aggregation.setName(name);
|
||||
return aggregation;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(String path) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public static class ParsedBucket extends ParsedTerms.ParsedBucket {
|
||||
|
||||
private BytesRef key;
|
||||
|
||||
@Override
|
||||
public Object getKey() {
|
||||
return getKeyAsString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
String keyAsString = super.getKeyAsString();
|
||||
if (keyAsString != null) {
|
||||
return keyAsString;
|
||||
}
|
||||
if (key != null) {
|
||||
return key.utf8ToString();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getProperty(String containingAggName, List<String> path) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Number getKeyAsNumber() {
|
||||
if (key != null) {
|
||||
return Double.parseDouble(key.utf8ToString());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
|
||||
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
|
||||
}
|
||||
|
||||
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
|
||||
return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> {
|
||||
CharBuffer cb = charBufferOrNull(p);
|
||||
if (cb == null) {
|
||||
bucket.key = null;
|
||||
} else {
|
||||
bucket.key = new BytesRef(cb);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static CharBuffer charBufferOrNull(XContentParser parser) throws IOException {
|
||||
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
return null;
|
||||
}
|
||||
return CharBuffer.wrap(parser.textCharacters(), parser.textOffset(), parser.textLength());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
package org.xbib.elx.http.util.aggregations;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.xbib.elx.http.util.CheckedBiConsumer;
|
||||
import org.xbib.elx.http.util.CheckedFunction;
|
||||
import org.xbib.elx.http.util.ObjectParser;
|
||||
import org.xbib.elx.http.util.XContentParserUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public abstract class ParsedTerms extends ParsedMultiBucketAggregation<ParsedTerms.ParsedBucket> implements Terms {
|
||||
|
||||
protected static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound");
|
||||
|
||||
protected static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count");
|
||||
|
||||
protected long docCountErrorUpperBound;
|
||||
|
||||
protected long sumOtherDocCount;
|
||||
|
||||
@Override
|
||||
public long getDocCountError() {
|
||||
return docCountErrorUpperBound;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSumOfOtherDocCounts() {
|
||||
return sumOtherDocCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Terms.Bucket> getBuckets() {
|
||||
//return buckets;
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Terms.Bucket getBucketByKey(String term) {
|
||||
for (Terms.Bucket bucket : getBuckets()) {
|
||||
if (bucket.getKeyAsString().equals(term)) {
|
||||
return bucket;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
static void declareParsedTermsFields(final ObjectParser<? extends ParsedTerms, Void> objectParser,
|
||||
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser) {
|
||||
declareMultiBucketAggregationFields(objectParser, bucketParser::apply, bucketParser::apply);
|
||||
objectParser.declareLong((parsedTerms, value) -> parsedTerms.docCountErrorUpperBound = value ,
|
||||
DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME);
|
||||
objectParser.declareLong((parsedTerms, value) -> parsedTerms.sumOtherDocCount = value,
|
||||
SUM_OF_OTHER_DOC_COUNTS);
|
||||
}
|
||||
|
||||
public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket /*implements Terms.Bucket*/ {
|
||||
|
||||
boolean showDocCountError = false;
|
||||
protected long docCountError;
|
||||
|
||||
public long getDocCountError() {
|
||||
return docCountError;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
/*builder.startObject();
|
||||
keyToXContent(builder);
|
||||
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
|
||||
if (showDocCountError) {
|
||||
builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError());
|
||||
}
|
||||
getAggregations().toXContentInternal(builder, params);
|
||||
builder.endObject();*/
|
||||
return builder;
|
||||
}
|
||||
|
||||
static <B extends ParsedBucket> B parseTermsBucketXContent(final XContentParser parser, final Supplier<B> bucketSupplier,
|
||||
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer)
|
||||
throws IOException {
|
||||
|
||||
final B bucket = bucketSupplier.get();
|
||||
final List<InternalAggregation> aggregations = new ArrayList<>();
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = parser.currentName();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
|
||||
bucket.setKeyAsString(parser.text());
|
||||
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
|
||||
keyConsumer.accept(parser, bucket);
|
||||
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
|
||||
bucket.setDocCount(parser.longValue());
|
||||
} else if (DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName().equals(currentFieldName)) {
|
||||
bucket.docCountError = parser.longValue();
|
||||
bucket.showDocCountError = true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
XContentParserUtils.parseTypedKeysObject(parser, "#", InternalAggregation.class,
|
||||
aggregations::add);
|
||||
}
|
||||
}
|
||||
bucket.setAggregations(new InternalAggregations(aggregations));
|
||||
return bucket;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
org.xbib.elx.http.ExtendedHttpClientProvider
|
|
@ -0,0 +1,3 @@
|
|||
org.xbib.elx.http.action.search.HttpSearchAction
|
||||
org.xbib.elx.http.action.get.HttpGetAction
|
||||
org.xbib.elx.http.action.get.HttpMultiGetAction
|
122
elx-http/src/test/java/org/xbib/elx/http/test/ClientTest.java
Normal file
122
elx-http/src/test/java/org/xbib/elx/http/test/ClientTest.java
Normal file
|
@ -0,0 +1,122 @@
|
|||
package org.xbib.elx.http.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.get.GetAction;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.action.get.MultiGetAction;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetResponse;
|
||||
import org.elasticsearch.action.index.IndexAction;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.http.ExtendedHttpClient;
|
||||
import org.xbib.elx.http.ExtendedHttpClientProvider;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class ClientTest extends TestBase {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ClientTest.class.getName());
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testGet() throws Exception {
|
||||
try (ExtendedHttpClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedHttpClientProvider.class)
|
||||
.put("url", "http://" + host + ":" + httpPort)
|
||||
.build()) {
|
||||
IndexRequest indexRequest = new IndexRequest();
|
||||
indexRequest.index("test");
|
||||
indexRequest.type("test");
|
||||
indexRequest.id("1");
|
||||
indexRequest.source("test", "Hello Jörg");
|
||||
IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
|
||||
client("1").execute(RefreshAction.INSTANCE, new RefreshRequest());
|
||||
|
||||
GetRequest getRequest = new GetRequest();
|
||||
getRequest.index("test");
|
||||
getRequest.type("test");
|
||||
getRequest.id("1");
|
||||
|
||||
GetResponse getResponse = client.execute(GetAction.INSTANCE, getRequest).actionGet();
|
||||
|
||||
assertTrue(getResponse.isExists());
|
||||
assertEquals("{\"test\":\"Hello Jörg\"}", getResponse.getSourceAsString());
|
||||
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
}
|
||||
}
|
||||
|
||||
@Ignore
|
||||
@Test
|
||||
public void testMultiGet() throws Exception {
|
||||
try (ExtendedHttpClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedHttpClientProvider.class)
|
||||
.put("url", "http://" + host + ":" + httpPort)
|
||||
.build()) {
|
||||
IndexRequest indexRequest = new IndexRequest();
|
||||
indexRequest.index("test");
|
||||
indexRequest.type("test");
|
||||
indexRequest.id("1");
|
||||
indexRequest.source("test", "Hello Jörg");
|
||||
IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
|
||||
client("1").execute(RefreshAction.INSTANCE, new RefreshRequest());
|
||||
|
||||
MultiGetRequest multiGetRequest = new MultiGetRequest();
|
||||
multiGetRequest.add("test", "test", "1");
|
||||
|
||||
MultiGetResponse multiGetResponse = client.execute(MultiGetAction.INSTANCE, multiGetRequest).actionGet();
|
||||
|
||||
assertEquals(1, multiGetResponse.getResponses().length);
|
||||
assertEquals("{\"test\":\"Hello Jörg\"}", multiGetResponse.getResponses()[0].getResponse().getSourceAsString());
|
||||
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchDoc() throws Exception {
|
||||
try (ExtendedHttpClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedHttpClientProvider.class)
|
||||
.put("url", "http://" + host + ":" + httpPort)
|
||||
.build()) {
|
||||
IndexRequest indexRequest = new IndexRequest();
|
||||
indexRequest.index("test");
|
||||
indexRequest.type("test");
|
||||
indexRequest.id("1");
|
||||
indexRequest.source("test", "Hello Jörg");
|
||||
IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
|
||||
client("1").execute(RefreshAction.INSTANCE, new RefreshRequest());
|
||||
|
||||
SearchSourceBuilder builder = new SearchSourceBuilder();
|
||||
builder.query(QueryBuilders.matchAllQuery());
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.indices("test");
|
||||
searchRequest.types("test");
|
||||
searchRequest.source(builder);
|
||||
SearchResponse searchResponse = client.execute(SearchAction.INSTANCE, searchRequest).actionGet();
|
||||
long hits = searchResponse.getHits().getTotalHits();
|
||||
assertEquals(1, hits);
|
||||
logger.info("hits = {} source = {}", hits, searchResponse.getHits().getHits()[0].getSourceAsString());
|
||||
assertEquals("{\"test\":\"Hello Jörg\"}", searchResponse.getHits().getHits()[0].getSourceAsString());
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
}
|
||||
}
|
||||
}
|
12
elx-http/src/test/java/org/xbib/elx/http/test/MockNode.java
Normal file
12
elx-http/src/test/java/org/xbib/elx/http/test/MockNode.java
Normal file
|
@ -0,0 +1,12 @@
|
|||
package org.xbib.elx.http.test;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
public class MockNode extends Node {
|
||||
|
||||
public MockNode(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elx.common;
|
||||
package org.xbib.elx.http.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -6,18 +6,21 @@ import org.elasticsearch.ElasticsearchTimeoutException;
|
|||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.client.support.AbstractClient;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.xbib.elx.common.util.NetworkUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
|
@ -29,59 +32,39 @@ import java.nio.file.attribute.BasicFileAttributes;
|
|||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
|
||||
public class NodeTestUtils {
|
||||
public class TestBase {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
private static Random random = new Random();
|
||||
private static final Random random = new Random();
|
||||
|
||||
private static char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
|
||||
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
|
||||
|
||||
private Map<String, Node> nodes = new HashMap<>();
|
||||
|
||||
private Map<String, AbstractClient> clients = new HashMap<>();
|
||||
|
||||
private AtomicInteger counter = new AtomicInteger();
|
||||
|
||||
private String cluster;
|
||||
|
||||
private String host;
|
||||
protected String host;
|
||||
|
||||
private int port;
|
||||
protected int port;
|
||||
|
||||
private static void deleteFiles() throws IOException {
|
||||
Path directory = Paths.get(getHome() + "/data");
|
||||
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
protected int httpPort;
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
logger.info("starting");
|
||||
setClusterName();
|
||||
setClusterName("test-cluster-" + System.getProperty("user.name"));
|
||||
startNode("1");
|
||||
findNodeAddress();
|
||||
try {
|
||||
ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
|
||||
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.YELLOW)
|
||||
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
throw new IOException("cluster state is " + healthResponse.getStatus().name()
|
||||
|
@ -90,6 +73,12 @@ public class NodeTestUtils {
|
|||
} catch (ElasticsearchTimeoutException e) {
|
||||
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
|
||||
}
|
||||
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
|
||||
ClusterStateResponse clusterStateResponse =
|
||||
client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
|
||||
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
|
||||
logger.info("host = {} port = {}", host, port);
|
||||
|
||||
} catch (Throwable t) {
|
||||
logger.error("startNodes failed", t);
|
||||
}
|
||||
|
@ -114,18 +103,15 @@ public class NodeTestUtils {
|
|||
}
|
||||
}
|
||||
|
||||
protected void setClusterName() {
|
||||
this.cluster = "test-helper-cluster-"
|
||||
+ NetworkUtils.getLocalAddress().getHostName()
|
||||
+ "-" + System.getProperty("user.name")
|
||||
+ "-" + counter.incrementAndGet();
|
||||
protected void setClusterName(String cluster) {
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
protected String getClusterName() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
protected Settings getSettings() {
|
||||
protected Settings getTransportSettings() {
|
||||
return settingsBuilder()
|
||||
.put("host", host)
|
||||
.put("port", port)
|
||||
|
@ -137,14 +123,6 @@ public class NodeTestUtils {
|
|||
protected Settings getNodeSettings() {
|
||||
return settingsBuilder()
|
||||
.put("cluster.name", cluster)
|
||||
.put("cluster.routing.schedule", "50ms")
|
||||
.put("cluster.routing.allocation.disk.threshold_enabled", false)
|
||||
.put("discovery.zen.multicast.enabled", true)
|
||||
.put("discovery.zen.multicast.ping_timeout", "5s")
|
||||
.put("http.enabled", true)
|
||||
.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors())
|
||||
.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low
|
||||
.put("index.number_of_replicas", 0)
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
@ -153,40 +131,30 @@ public class NodeTestUtils {
|
|||
return System.getProperty("path.home", System.getProperty("user.dir"));
|
||||
}
|
||||
|
||||
public void startNode(String id) {
|
||||
protected void startNode(String id) {
|
||||
buildNode(id).start();
|
||||
}
|
||||
|
||||
public AbstractClient client(String id) {
|
||||
protected AbstractClient client(String id) {
|
||||
return clients.get(id);
|
||||
}
|
||||
|
||||
private void closeNodes() {
|
||||
logger.info("closing all clients");
|
||||
for (AbstractClient client : clients.values()) {
|
||||
client.close();
|
||||
}
|
||||
clients.clear();
|
||||
logger.info("closing all nodes");
|
||||
for (Node node : nodes.values()) {
|
||||
if (node != null) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
nodes.clear();
|
||||
logger.info("all nodes closed");
|
||||
}
|
||||
|
||||
protected void findNodeAddress() {
|
||||
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
|
||||
NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet();
|
||||
Object obj = response.iterator().next().getTransport().getAddress()
|
||||
.publishAddress();
|
||||
if (obj instanceof InetSocketTransportAddress) {
|
||||
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
|
||||
for (NodeInfo nodeInfo : response) {
|
||||
TransportAddress transportAddress = nodeInfo.getTransport().getAddress().publishAddress();
|
||||
if (transportAddress instanceof InetSocketTransportAddress) {
|
||||
InetSocketTransportAddress address = (InetSocketTransportAddress) transportAddress;
|
||||
host = address.address().getHostName();
|
||||
port = address.address().getPort();
|
||||
}
|
||||
transportAddress = nodeInfo.getHttp().getAddress().publishAddress();
|
||||
if (transportAddress instanceof InetSocketTransportAddress) {
|
||||
InetSocketTransportAddress address = (InetSocketTransportAddress) transportAddress;
|
||||
httpPort = address.address().getPort();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Node buildNode(String id) {
|
||||
|
@ -210,4 +178,37 @@ public class NodeTestUtils {
|
|||
}
|
||||
return new String(buf);
|
||||
}
|
||||
|
||||
private void closeNodes() {
|
||||
logger.info("closing all clients");
|
||||
for (AbstractClient client : clients.values()) {
|
||||
client.close();
|
||||
}
|
||||
clients.clear();
|
||||
logger.info("closing all nodes");
|
||||
for (Node node : nodes.values()) {
|
||||
if (node != null) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
nodes.clear();
|
||||
logger.info("all nodes closed");
|
||||
}
|
||||
|
||||
private static void deleteFiles() throws IOException {
|
||||
Path directory = Paths.get(getHome() + "/data");
|
||||
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
13
elx-http/src/test/resources/log4j2.xml
Normal file
13
elx-http/src/test/resources/log4j2.xml
Normal file
|
@ -0,0 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<configuration status="OFF">
|
||||
<appenders>
|
||||
<Console name="Console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{ISO8601}][%-5p][%-25c][%t] %m%n"/>
|
||||
</Console>
|
||||
</appenders>
|
||||
<Loggers>
|
||||
<Root level="debug">
|
||||
<AppenderRef ref="Console" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
</configuration>
|
|
@ -1,3 +1,3 @@
|
|||
dependencies {
|
||||
compile project(':elx-common')
|
||||
api project(':elx-common')
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
maven {
|
||||
url 'http://xbib.org/repository'
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.xbib.elasticsearch:gradle-plugin-elasticsearch-build:6.2.3.4"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'org.xbib.gradle.plugin.elasticsearch.build'
|
||||
|
||||
configurations {
|
||||
main
|
||||
tests
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':common')
|
||||
testCompile "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
testRuntime "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-node"
|
||||
}
|
||||
|
||||
/*
|
||||
task testJar(type: Jar, dependsOn: testClasses) {
|
||||
baseName = "${project.archivesBaseName}-tests"
|
||||
from sourceSets.test.output
|
||||
}
|
||||
*/
|
||||
|
||||
artifacts {
|
||||
main jar
|
||||
tests testJar
|
||||
archives sourcesJar, javadocJar
|
||||
}
|
||||
|
||||
test {
|
||||
enabled = false
|
||||
jvmArgs "-javaagent:" + configurations.alpnagent.asPath
|
||||
systemProperty 'path.home', projectDir.absolutePath
|
||||
testLogging {
|
||||
showStandardStreams = true
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
||||
|
||||
randomizedTest {
|
||||
enabled = false
|
||||
}
|
||||
|
||||
|
||||
esTest {
|
||||
// test with the jars, not the classes, for security manager
|
||||
// classpath = files(configurations.testRuntime) + configurations.main.artifacts.files + configurations.tests.artifacts.files
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
// maybe we like some extra security policy for our code
|
||||
systemProperty 'tests.security.policy', '/extra-security.policy'
|
||||
}
|
||||
esTest.dependsOn jar, testJar
|
|
@ -34,7 +34,7 @@ public class ExtendedNodeClient extends AbstractExtendedClient {
|
|||
.put("node.data", false)
|
||||
.build();
|
||||
logger.info("creating node client on {} with effective settings {}",
|
||||
version, effectiveSettings.toString());
|
||||
version, effectiveSettings.getAsMap());
|
||||
Collection<Class<? extends Plugin>> plugins = Collections.emptyList();
|
||||
this.node = new BulkNode(new Environment(effectiveSettings), plugins);
|
||||
try {
|
||||
|
@ -48,16 +48,11 @@ public class ExtendedNodeClient extends AbstractExtendedClient {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
super.close();
|
||||
try {
|
||||
protected void closeClient() {
|
||||
if (node != null) {
|
||||
logger.debug("closing node...");
|
||||
node.close();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private static class BulkNode extends Node {
|
||||
|
|
|
@ -1,30 +0,0 @@
|
|||
package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
public class MockNode extends Node {
|
||||
|
||||
public MockNode(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
super(InternalSettingsPreparer.prepareEnvironment(settings, null), Version.CURRENT, classpathPlugins);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings, Class<? extends Plugin> classpathPlugin) {
|
||||
this(settings, list(classpathPlugin));
|
||||
}
|
||||
|
||||
private static Collection<Class<? extends Plugin>> list(Class<? extends Plugin> classpathPlugin) {
|
||||
Collection<Class<? extends Plugin>> list = new ArrayList<>();
|
||||
list.add(classpathPlugin);
|
||||
return list;
|
||||
}
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
package org.xbib.elx.node;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
public class ClusterBlockTest extends NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
setClusterName();
|
||||
startNode("1");
|
||||
// do not wait for green health state
|
||||
logger.info("ready");
|
||||
} catch (Throwable t) {
|
||||
logger.error("startNodes failed", t);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings getNodeSettings() {
|
||||
return Settings.settingsBuilder()
|
||||
.put(super.getNodeSettings())
|
||||
.put("discovery.zen.minimum_master_nodes", 2) // block until we have two nodes
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test(expected = ClusterBlockException.class)
|
||||
public void testClusterBlock() throws Exception {
|
||||
Client client = client("1");
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("field1", "value1").endObject();
|
||||
IndexRequestBuilder irb = client.prepareIndex("test", "test", "1").setSource(builder);
|
||||
BulkRequestBuilder brb = client.prepareBulk();
|
||||
brb.add(irb);
|
||||
brb.execute().actionGet();
|
||||
}
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
package org.xbib.elx.node;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.common.Parameters;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class DuplicateIDTest extends NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(DuplicateIDTest.class.getSimpleName());
|
||||
|
||||
private static final Long MAX_ACTIONS_PER_REQUEST = 1000L;
|
||||
|
||||
private static final Long ACTIONS = 12345L;
|
||||
|
||||
@Test
|
||||
public void testDuplicateDocIDs() throws Exception {
|
||||
long numactions = ACTIONS;
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
for (int i = 0; i < ACTIONS; i++) {
|
||||
client.index("test", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
client.refreshIndex("test");
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
.setIndices("test")
|
||||
.setTypes("test")
|
||||
.setQuery(matchAllQuery());
|
||||
long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits();
|
||||
logger.info("hits = {}", hits);
|
||||
assertTrue(hits < ACTIONS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.close();
|
||||
assertEquals(numactions, client.getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
package org.xbib.elx.node;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@Ignore
|
||||
public class IndexShiftTest extends NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getSimpleName());
|
||||
|
||||
@Test
|
||||
public void testIndexShift() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test1234");
|
||||
for (int i = 0; i < 1; i++) {
|
||||
client.index("test1234", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.refreshIndex("test1234");
|
||||
|
||||
List<String> simpleAliases = Arrays.asList("a", "b", "c");
|
||||
client.shiftIndex("test", "test1234", simpleAliases);
|
||||
|
||||
client.newIndex("test5678");
|
||||
for (int i = 0; i < 1; i++) {
|
||||
client.index("test5678", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.refreshIndex("test5678");
|
||||
|
||||
simpleAliases = Arrays.asList("d", "e", "f");
|
||||
client.shiftIndex("test", "test5678", simpleAliases, (builder, index, alias) ->
|
||||
builder.addAlias(index, alias, QueryBuilders.termQuery("my_key", alias)));
|
||||
Map<String, String> indexFilters = client.getIndexFilters("test5678");
|
||||
logger.info("aliases of index test5678 = {}", indexFilters);
|
||||
assertTrue(indexFilters.containsKey("a"));
|
||||
assertTrue(indexFilters.containsKey("b"));
|
||||
assertTrue(indexFilters.containsKey("c"));
|
||||
assertTrue(indexFilters.containsKey("d"));
|
||||
assertTrue(indexFilters.containsKey("e"));
|
||||
|
||||
Map<String, String> aliases = client.getIndexFilters(client.resolveAlias("test"));
|
||||
logger.info("aliases of alias test = {}", aliases);
|
||||
assertTrue(aliases.containsKey("a"));
|
||||
assertTrue(aliases.containsKey("b"));
|
||||
assertTrue(aliases.containsKey("c"));
|
||||
assertTrue(aliases.containsKey("d"));
|
||||
assertTrue(aliases.containsKey("e"));
|
||||
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
client.close();
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,182 +0,0 @@
|
|||
package org.xbib.elx.node;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.client.support.AbstractClient;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.xbib.elx.common.util.NetworkUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
private static Random random = new Random();
|
||||
|
||||
private static char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
|
||||
|
||||
private Map<String, Node> nodes = new HashMap<>();
|
||||
|
||||
private Map<String, AbstractClient> clients = new HashMap<>();
|
||||
|
||||
private AtomicInteger counter = new AtomicInteger();
|
||||
|
||||
protected String clusterName;
|
||||
|
||||
private static void deleteFiles() throws IOException {
|
||||
Path directory = Paths.get(getHome() + "/data");
|
||||
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
logger.info("starting");
|
||||
setClusterName();
|
||||
startNode("1");
|
||||
try {
|
||||
ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
|
||||
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
throw new IOException("cluster state is " + healthResponse.getStatus().name()
|
||||
+ ", from here on, everything will fail!");
|
||||
}
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
logger.error("startNodes failed", t);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void stopNodes() {
|
||||
try {
|
||||
closeNodes();
|
||||
} catch (Exception e) {
|
||||
logger.error("can not close nodes", e);
|
||||
} finally {
|
||||
try {
|
||||
deleteFiles();
|
||||
logger.info("data files wiped");
|
||||
Thread.sleep(2000L); // let OS commit changes
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void setClusterName() {
|
||||
this.clusterName = "test-helper-cluster-"
|
||||
+ NetworkUtils.getLocalAddress().getHostName()
|
||||
+ "-" + System.getProperty("user.name")
|
||||
+ "-" + counter.incrementAndGet();
|
||||
}
|
||||
|
||||
protected Settings getNodeSettings() {
|
||||
return settingsBuilder()
|
||||
.put("cluster.name", clusterName)
|
||||
.put("cluster.routing.schedule", "50ms")
|
||||
.put("cluster.routing.allocation.disk.threshold_enabled", false)
|
||||
.put("discovery.zen.multicast.enabled", true)
|
||||
.put("discovery.zen.multicast.ping_timeout", "5s")
|
||||
.put("http.enabled", true)
|
||||
.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors())
|
||||
.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low
|
||||
.put("index.number_of_replicas", 0)
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
||||
protected static String getHome() {
|
||||
return System.getProperty("path.home", System.getProperty("user.dir"));
|
||||
}
|
||||
|
||||
public void startNode(String id) {
|
||||
buildNode(id).start();
|
||||
}
|
||||
|
||||
public AbstractClient client(String id) {
|
||||
return clients.get(id);
|
||||
}
|
||||
|
||||
private void closeNodes() {
|
||||
logger.info("closing all clients");
|
||||
for (AbstractClient client : clients.values()) {
|
||||
client.close();
|
||||
}
|
||||
clients.clear();
|
||||
logger.info("closing all nodes");
|
||||
for (Node node : nodes.values()) {
|
||||
if (node != null) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
nodes.clear();
|
||||
logger.info("all nodes closed");
|
||||
}
|
||||
|
||||
private Node buildNode(String id) {
|
||||
Settings nodeSettings = settingsBuilder()
|
||||
.put(getNodeSettings())
|
||||
.put("name", id)
|
||||
.build();
|
||||
logger.info("settings={}", nodeSettings.getAsMap());
|
||||
Node node = new MockNode(nodeSettings);
|
||||
AbstractClient client = (AbstractClient) node.client();
|
||||
nodes.put(id, node);
|
||||
clients.put(id, client);
|
||||
logger.info("clients={}", clients);
|
||||
return node;
|
||||
}
|
||||
|
||||
protected String randomString(int len) {
|
||||
final char[] buf = new char[len];
|
||||
final int n = numbersAndLetters.length - 1;
|
||||
for (int i = 0; i < buf.length; i++) {
|
||||
buf[i] = numbersAndLetters[random.nextInt(n)];
|
||||
}
|
||||
return new String(buf);
|
||||
}
|
||||
}
|
|
@ -1,149 +0,0 @@
|
|||
package org.xbib.elx.node;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.indexing.IndexingStats;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
@Ignore
|
||||
public class ReplicaTest extends NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ReplicaTest.class.getSimpleName());
|
||||
|
||||
@Test
|
||||
public void testReplicaLevel() throws Exception {
|
||||
|
||||
// we need nodes for replica levels
|
||||
startNode("2");
|
||||
startNode("3");
|
||||
startNode("4");
|
||||
|
||||
Settings settingsTest1 = Settings.settingsBuilder()
|
||||
.put("index.number_of_shards", 2)
|
||||
.put("index.number_of_replicas", 3)
|
||||
.build();
|
||||
|
||||
Settings settingsTest2 = Settings.settingsBuilder()
|
||||
.put("index.number_of_shards", 2)
|
||||
.put("index.number_of_replicas", 1)
|
||||
.build();
|
||||
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.build();
|
||||
|
||||
try {
|
||||
client.newIndex("test1", settingsTest1, new HashMap<>())
|
||||
.newIndex("test2", settingsTest2, new HashMap<>());
|
||||
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
|
||||
for (int i = 0; i < 1234; i++) {
|
||||
client.index("test1", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
for (int i = 0; i < 1234; i++) {
|
||||
client.index("test2", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
logger.info("refreshing");
|
||||
client.refreshIndex("test1");
|
||||
client.refreshIndex("test2");
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
.setIndices("test1", "test2")
|
||||
.setQuery(matchAllQuery());
|
||||
long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits();
|
||||
logger.info("query total hits={}", hits);
|
||||
assertEquals(2468, hits);
|
||||
IndicesStatsRequestBuilder indicesStatsRequestBuilder = new IndicesStatsRequestBuilder(client.getClient(), IndicesStatsAction.INSTANCE)
|
||||
.all();
|
||||
IndicesStatsResponse response = indicesStatsRequestBuilder.execute().actionGet();
|
||||
for (Map.Entry<String, IndexStats> m : response.getIndices().entrySet()) {
|
||||
IndexStats indexStats = m.getValue();
|
||||
CommonStats commonStats = indexStats.getTotal();
|
||||
IndexingStats indexingStats = commonStats.getIndexing();
|
||||
IndexingStats.Stats stats = indexingStats.getTotal();
|
||||
logger.info("index {}: count = {}", m.getKey(), stats.getIndexCount());
|
||||
for (Map.Entry<Integer, IndexShardStats> me : indexStats.getIndexShards().entrySet()) {
|
||||
IndexShardStats indexShardStats = me.getValue();
|
||||
CommonStats commonShardStats = indexShardStats.getTotal();
|
||||
logger.info("shard {} count = {}", me.getKey(),
|
||||
commonShardStats.getIndexing().getTotal().getIndexCount());
|
||||
}
|
||||
}
|
||||
try {
|
||||
client.deleteIndex("test1")
|
||||
.deleteIndex("test2");
|
||||
} catch (Exception e) {
|
||||
logger.error("delete index failed, ignored. Reason:", e);
|
||||
}
|
||||
client.close();
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateReplicaLevel() throws Exception {
|
||||
|
||||
long numberOfShards = 2;
|
||||
int replicaLevel = 3;
|
||||
|
||||
// we need 3 nodes for replica level 3
|
||||
startNode("2");
|
||||
startNode("3");
|
||||
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.put("index.number_of_shards", numberOfShards)
|
||||
.put("index.number_of_replicas", 0)
|
||||
.build();
|
||||
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.build();
|
||||
|
||||
try {
|
||||
client.newIndex("replicatest", settings, new HashMap<>());
|
||||
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
|
||||
for (int i = 0; i < 12345; i++) {
|
||||
client.index("replicatest",null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
client.updateReplicaLevel("replicatest", replicaLevel, 30L, TimeUnit.SECONDS);
|
||||
assertEquals(replicaLevel, client.getReplicaLevel("replicatest"));
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.close();
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,9 +1,4 @@
|
|||
package org.xbib.elx.node;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
package org.xbib.elx.node.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -17,75 +12,55 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.common.Parameters;
|
||||
import org.xbib.elx.node.ExtendedNodeClient;
|
||||
import org.xbib.elx.node.ExtendedNodeClientProvider;
|
||||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class ClientTest extends NodeTestUtils {
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ClientTest.class.getSimpleName());
|
||||
@ExtendWith(TestExtension.class)
|
||||
class ClientTest {
|
||||
|
||||
private static final Long ACTIONS = 25000L;
|
||||
private static final Logger logger = LogManager.getLogger(ClientTest.class.getName());
|
||||
|
||||
private static final Long MAX_ACTIONS_PER_REQUEST = 1000L;
|
||||
private static final Long ACTIONS = 1000L;
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
super.startNodes();
|
||||
startNode("2");
|
||||
} catch (Throwable t) {
|
||||
logger.error("startNodes failed", t);
|
||||
}
|
||||
private static final Long MAX_ACTIONS_PER_REQUEST = 100L;
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
ClientTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleDoc() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(30))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
assertEquals(1, client.getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewIndex() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
void testNewIndex() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5))
|
||||
.build();
|
||||
client.newIndex("test");
|
||||
client.newIndex("test1");
|
||||
client.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapping() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
void testMapping() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5))
|
||||
.build();
|
||||
XContentBuilder builder = jsonBuilder()
|
||||
XContentBuilder builder = JsonXContent.contentBuilder()
|
||||
.startObject()
|
||||
.startObject("doc")
|
||||
.startObject("properties")
|
||||
|
@ -95,41 +70,68 @@ public class ClientTest extends NodeTestUtils {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
client.newIndex("test", Settings.EMPTY, builder.string());
|
||||
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test");
|
||||
client.newIndex("test2", Settings.EMPTY, builder.string());
|
||||
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test2");
|
||||
GetMappingsResponse getMappingsResponse =
|
||||
client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet();
|
||||
logger.info("mappings={}", getMappingsResponse.getMappings());
|
||||
assertTrue(getMappingsResponse.getMappings().get("test").containsKey("doc"));
|
||||
assertTrue(getMappingsResponse.getMappings().get("test2").containsKey("doc"));
|
||||
client.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomDocs() throws Exception {
|
||||
long numactions = ACTIONS;
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
void testSingleDoc() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(30))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
for (int i = 0; i < ACTIONS; i++) {
|
||||
client.index("test", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.newIndex("test3");
|
||||
client.index("test3", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
assertEquals(numactions, client.getBulkMetric().getSucceeded().getCount());
|
||||
assertEquals(1, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.refreshIndex("test");
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRandomDocs() throws Exception {
|
||||
long numactions = ACTIONS;
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test4");
|
||||
for (int i = 0; i < ACTIONS; i++) {
|
||||
client.index("test4", null, false,
|
||||
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(60L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
assertEquals(numactions, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.refreshIndex("test4");
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
.setQuery(QueryBuilders.matchAllQuery()).setSize(0);
|
||||
.setIndices("test4")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setSize(0);
|
||||
assertEquals(numactions,
|
||||
searchRequestBuilder.execute().actionGet().getHits().getTotalHits());
|
||||
client.close();
|
||||
|
@ -137,37 +139,38 @@ public class ClientTest extends NodeTestUtils {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testThreadedRandomDocs() throws Exception {
|
||||
void testThreadedRandomDocs() throws Exception {
|
||||
int maxthreads = Runtime.getRuntime().availableProcessors();
|
||||
Long maxActionsPerRequest = MAX_ACTIONS_PER_REQUEST;
|
||||
final Long actions = ACTIONS;
|
||||
long maxActionsPerRequest = MAX_ACTIONS_PER_REQUEST;
|
||||
final long actions = ACTIONS;
|
||||
logger.info("NodeClient max={} maxactions={} maxloop={}", maxthreads, maxActionsPerRequest, actions);
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.MAX_CONCURRENT_REQUESTS.name(), maxthreads * 2)
|
||||
.put(Parameters.MAX_CONCURRENT_REQUESTS.name(), maxthreads)
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), maxActionsPerRequest)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test")
|
||||
.startBulk("test", -1, 1000);
|
||||
ThreadPoolExecutor pool = EsExecutors.newFixed("bulk-nodeclient-test", maxthreads, 30,
|
||||
EsExecutors.daemonThreadFactory("bulk-nodeclient-test"));
|
||||
client.newIndex("test5")
|
||||
.startBulk("test5", -1, 1000);
|
||||
ThreadPoolExecutor pool = EsExecutors.newFixed("nodeclient-test", maxthreads, 30,
|
||||
EsExecutors.daemonThreadFactory("nodeclient-test"));
|
||||
final CountDownLatch latch = new CountDownLatch(maxthreads);
|
||||
for (int i = 0; i < maxthreads; i++) {
|
||||
pool.execute(() -> {
|
||||
for (int i1 = 0; i1 < actions; i1++) {
|
||||
client.index("test", null, false,"{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
client.index("test5", null, false,
|
||||
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
|
||||
}
|
||||
latch.countDown();
|
||||
});
|
||||
}
|
||||
logger.info("waiting for latch...");
|
||||
if (latch.await(5, TimeUnit.MINUTES)) {
|
||||
if (latch.await(60, TimeUnit.SECONDS)) {
|
||||
logger.info("flush...");
|
||||
client.flush();
|
||||
client.waitForResponses(60L, TimeUnit.SECONDS);
|
||||
logger.info("got all responses, pool shutdown...");
|
||||
logger.info("pool shutdown...");
|
||||
pool.shutdown();
|
||||
logger.info("pool is shut down");
|
||||
} else {
|
||||
|
@ -176,15 +179,17 @@ public class ClientTest extends NodeTestUtils {
|
|||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.stopBulk("test", 30L, TimeUnit.SECONDS);
|
||||
assertEquals(maxthreads * actions, client.getBulkMetric().getSucceeded().getCount());
|
||||
client.stopBulk("test5", 60L, TimeUnit.SECONDS);
|
||||
assertEquals(maxthreads * actions, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.refreshIndex("test");
|
||||
client.refreshIndex("test5");
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
.setQuery(QueryBuilders.matchAllQuery()).setSize(0);
|
||||
.setIndices("test5")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setSize(0);
|
||||
assertEquals(maxthreads * actions,
|
||||
searchRequestBuilder.execute().actionGet().getHits().getTotalHits());
|
||||
client.close();
|
|
@ -0,0 +1,73 @@
|
|||
package org.xbib.elx.node.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.common.Parameters;
|
||||
import org.xbib.elx.node.ExtendedNodeClient;
|
||||
import org.xbib.elx.node.ExtendedNodeClientProvider;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@ExtendWith(TestExtension.class)
|
||||
class DuplicateIDTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(DuplicateIDTest.class.getName());
|
||||
|
||||
private static final Long MAX_ACTIONS_PER_REQUEST = 10L;
|
||||
|
||||
private static final Long ACTIONS = 50L;
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
DuplicateIDTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDuplicateDocIDs() throws Exception {
|
||||
long numactions = ACTIONS;
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test_dup");
|
||||
for (int i = 0; i < ACTIONS; i++) {
|
||||
client.index("test_dup", helper.randomString(1), false,
|
||||
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
client.refreshIndex("test_dup");
|
||||
SearchSourceBuilder builder = new SearchSourceBuilder();
|
||||
builder.query(QueryBuilders.matchAllQuery());
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.indices("test_dup");
|
||||
searchRequest.source(builder);
|
||||
long hits = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits();
|
||||
logger.info("hits = {}", hits);
|
||||
assertTrue(hits < ACTIONS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.close();
|
||||
assertEquals(numactions, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
package org.xbib.elx.node.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsAction;
|
||||
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
|
||||
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.xbib.elx.api.IndexPruneResult;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.node.ExtendedNodeClient;
|
||||
import org.xbib.elx.node.ExtendedNodeClientProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@ExtendWith(TestExtension.class)
|
||||
class IndexPruneTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName());
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
IndexPruneTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testPrune() throws IOException {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.build();
|
||||
try {
|
||||
Settings settings = Settings.builder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
.build();
|
||||
client.newIndex("test_prune1", settings);
|
||||
client.shiftIndex("test_prune", "test_prune1", Collections.emptyList());
|
||||
client.newIndex("test_prune2", settings);
|
||||
client.shiftIndex("test_prune", "test_prune2", Collections.emptyList());
|
||||
client.newIndex("test_prune3", settings);
|
||||
client.shiftIndex("test_prune", "test_prune3", Collections.emptyList());
|
||||
client.newIndex("test_prune4", settings);
|
||||
client.shiftIndex("test_prune", "test_prune4", Collections.emptyList());
|
||||
IndexPruneResult indexPruneResult =
|
||||
client.pruneIndex("test_prune", "test_prune4", 2, 2, true);
|
||||
assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune1"));
|
||||
assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune2"));
|
||||
assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune3"));
|
||||
assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune4"));
|
||||
List<Boolean> list = new ArrayList<>();
|
||||
for (String index : Arrays.asList("test_prune1", "test_prune2", "test_prune3", "test_prune4")) {
|
||||
IndicesExistsRequest indicesExistsRequest = new IndicesExistsRequest();
|
||||
indicesExistsRequest.indices(new String[] { index });
|
||||
IndicesExistsResponse indicesExistsResponse =
|
||||
client.getClient().execute(IndicesExistsAction.INSTANCE, indicesExistsRequest).actionGet();
|
||||
list.add(indicesExistsResponse.isExists());
|
||||
}
|
||||
logger.info(list);
|
||||
assertFalse(list.get(0));
|
||||
assertFalse(list.get(1));
|
||||
assertTrue(list.get(2));
|
||||
assertTrue(list.get(3));
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.close();
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
package org.xbib.elx.node.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.cluster.metadata.AliasAction;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.xbib.elx.api.IndexShiftResult;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.node.ExtendedNodeClient;
|
||||
import org.xbib.elx.node.ExtendedNodeClientProvider;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@ExtendWith(TestExtension.class)
|
||||
class IndexShiftTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName());
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
IndexShiftTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
void testIndexShift() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.build();
|
||||
try {
|
||||
Settings settings = Settings.builder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
.build();
|
||||
client.newIndex("test1234", settings);
|
||||
for (int i = 0; i < 1; i++) {
|
||||
client.index("test1234", helper.randomString(1), false,
|
||||
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
IndexShiftResult indexShiftResult =
|
||||
client.shiftIndex("test_shift", "test1234", Arrays.asList("a", "b", "c"));
|
||||
assertTrue(indexShiftResult.getNewAliases().contains("a"));
|
||||
assertTrue(indexShiftResult.getNewAliases().contains("b"));
|
||||
assertTrue(indexShiftResult.getNewAliases().contains("c"));
|
||||
assertTrue(indexShiftResult.getMovedAliases().isEmpty());
|
||||
|
||||
Map<String, String> aliases = client.getAliases("test1234");
|
||||
assertTrue(aliases.containsKey("a"));
|
||||
assertTrue(aliases.containsKey("b"));
|
||||
assertTrue(aliases.containsKey("c"));
|
||||
assertTrue(aliases.containsKey("test_shift"));
|
||||
|
||||
String resolved = client.resolveAlias("test_shift");
|
||||
aliases = client.getAliases(resolved);
|
||||
assertTrue(aliases.containsKey("a"));
|
||||
assertTrue(aliases.containsKey("b"));
|
||||
assertTrue(aliases.containsKey("c"));
|
||||
assertTrue(aliases.containsKey("test_shift"));
|
||||
|
||||
client.newIndex("test5678", settings);
|
||||
for (int i = 0; i < 1; i++) {
|
||||
client.index("test5678", helper.randomString(1), false,
|
||||
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
|
||||
indexShiftResult = client.shiftIndex("test_shift", "test5678", Arrays.asList("d", "e", "f"),
|
||||
(request, index, alias) -> request.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
|
||||
index, alias).filter(QueryBuilders.termQuery("my_key", alias)))
|
||||
);
|
||||
assertTrue(indexShiftResult.getNewAliases().contains("d"));
|
||||
assertTrue(indexShiftResult.getNewAliases().contains("e"));
|
||||
assertTrue(indexShiftResult.getNewAliases().contains("f"));
|
||||
assertTrue(indexShiftResult.getMovedAliases().contains("a"));
|
||||
assertTrue(indexShiftResult.getMovedAliases().contains("b"));
|
||||
assertTrue(indexShiftResult.getMovedAliases().contains("c"));
|
||||
|
||||
aliases = client.getAliases("test5678");
|
||||
assertTrue(aliases.containsKey("a"));
|
||||
assertTrue(aliases.containsKey("b"));
|
||||
assertTrue(aliases.containsKey("c"));
|
||||
assertTrue(aliases.containsKey("d"));
|
||||
assertTrue(aliases.containsKey("e"));
|
||||
assertTrue(aliases.containsKey("f"));
|
||||
|
||||
resolved = client.resolveAlias("test_shift");
|
||||
aliases = client.getAliases(resolved);
|
||||
assertTrue(aliases.containsKey("a"));
|
||||
assertTrue(aliases.containsKey("b"));
|
||||
assertTrue(aliases.containsKey("c"));
|
||||
assertTrue(aliases.containsKey("d"));
|
||||
assertTrue(aliases.containsKey("e"));
|
||||
assertTrue(aliases.containsKey("f"));
|
||||
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.close();
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
12
elx-node/src/test/java/org/xbib/elx/node/test/MockNode.java
Normal file
12
elx-node/src/test/java/org/xbib/elx/node/test/MockNode.java
Normal file
|
@ -0,0 +1,12 @@
|
|||
package org.xbib.elx.node.test;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.Node;
|
||||
|
||||
public class MockNode extends Node {
|
||||
|
||||
public MockNode(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,59 +1,61 @@
|
|||
package org.xbib.elx.node;
|
||||
package org.xbib.elx.node.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.api.IndexDefinition;
|
||||
import org.xbib.elx.node.ExtendedNodeClient;
|
||||
import org.xbib.elx.node.ExtendedNodeClientProvider;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
|
||||
public class SmokeTest extends NodeTestUtils {
|
||||
@ExtendWith(TestExtension.class)
|
||||
class SmokeTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(SmokeTest.class.getSimpleName());
|
||||
private static final Logger logger = LogManager.getLogger(SmokeTest.class.getName());
|
||||
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
SmokeTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void smokeTest() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
|
||||
void smokeTest() throws Exception {
|
||||
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
|
||||
.provider(ExtendedNodeClientProvider.class)
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
|
||||
client.newIndex("test_smoke");
|
||||
client.index("test_smoke", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
|
||||
client.flush();
|
||||
client.waitForResponses(30, TimeUnit.SECONDS);
|
||||
|
||||
assertEquals(clusterName, client.getClusterName());
|
||||
|
||||
client.checkMapping("test");
|
||||
|
||||
client.update("test", "1", "{ \"name\" : \"Another name\"}");
|
||||
assertEquals(helper.getClusterName(), client.getClusterName());
|
||||
client.checkMapping("test_smoke");
|
||||
client.update("test_smoke", "1", "{ \"name\" : \"Another name\"}");
|
||||
client.flush();
|
||||
|
||||
client.waitForRecovery("test", 10L, TimeUnit.SECONDS);
|
||||
|
||||
client.delete("test", "1");
|
||||
client.deleteIndex("test");
|
||||
|
||||
IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test2", Settings.settingsBuilder()
|
||||
client.waitForRecovery("test_smoke", 10L, TimeUnit.SECONDS);
|
||||
client.delete("test_smoke", "1");
|
||||
client.deleteIndex("test_smoke");
|
||||
IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test_smoke_2", Settings.settingsBuilder()
|
||||
.build());
|
||||
assertEquals(0, indexDefinition.getReplicaLevel());
|
||||
client.newIndex(indexDefinition);
|
||||
client.index(indexDefinition.getFullIndexName(), "1", true, "{ \"name\" : \"Hello World\"}");
|
||||
client.flush();
|
||||
client.updateReplicaLevel(indexDefinition, 2);
|
||||
|
||||
int replica = client.getReplicaLevel(indexDefinition);
|
||||
assertEquals(2, replica);
|
||||
|
||||
client.deleteIndex(indexDefinition);
|
||||
assertEquals(0, client.getBulkMetric().getFailed().getCount());
|
||||
assertEquals(4, client.getBulkMetric().getSucceeded().getCount());
|
||||
assertEquals(0, client.getBulkController().getBulkMetric().getFailed().getCount());
|
||||
assertEquals(4, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
217
elx-node/src/test/java/org/xbib/elx/node/test/TestExtension.java
Normal file
217
elx-node/src/test/java/org/xbib/elx/node/test/TestExtension.java
Normal file
|
@ -0,0 +1,217 @@
|
|||
package org.xbib.elx.node.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.client.support.AbstractClient;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.junit.jupiter.api.extension.AfterEachCallback;
|
||||
import org.junit.jupiter.api.extension.BeforeEachCallback;
|
||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||
import org.junit.jupiter.api.extension.ParameterContext;
|
||||
import org.junit.jupiter.api.extension.ParameterResolutionException;
|
||||
import org.junit.jupiter.api.extension.ParameterResolver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class TestExtension implements ParameterResolver, BeforeEachCallback, AfterEachCallback {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
private static final Random random = new Random();
|
||||
|
||||
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
|
||||
|
||||
private static final String key = "es-instance";
|
||||
|
||||
private static final AtomicInteger count = new AtomicInteger(0);
|
||||
|
||||
private static final ExtensionContext.Namespace ns =
|
||||
ExtensionContext.Namespace.create(TestExtension.class);
|
||||
|
||||
@Override
|
||||
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
|
||||
throws ParameterResolutionException {
|
||||
return parameterContext.getParameter().getType().equals(Helper.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
|
||||
throws ParameterResolutionException {
|
||||
// initialize new helper here, increase counter
|
||||
return extensionContext.getParent().get().getStore(ns)
|
||||
.getOrComputeIfAbsent(key + count.incrementAndGet(), key -> create(), Helper.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeEach(ExtensionContext extensionContext) throws Exception {
|
||||
Helper helper = extensionContext.getParent().get().getStore(ns)
|
||||
.getOrComputeIfAbsent(key + count.get(), key -> create(), Helper.class);
|
||||
logger.info("starting cluster with helper " + helper + " at " + helper.getHome());
|
||||
helper.startNode("1");
|
||||
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
|
||||
NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet();
|
||||
Object obj = response.iterator().next().getTransport().getAddress()
|
||||
.publishAddress();
|
||||
String host = null;
|
||||
int port = 0;
|
||||
if (obj instanceof InetSocketTransportAddress) {
|
||||
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
|
||||
host = address.address().getHostName();
|
||||
port = address.address().getPort();
|
||||
}
|
||||
try {
|
||||
ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
|
||||
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
throw new IOException("cluster state is " + healthResponse.getStatus().name()
|
||||
+ ", from here on, everything will fail!");
|
||||
}
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
|
||||
}
|
||||
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
|
||||
ClusterStateResponse clusterStateResponse =
|
||||
helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
|
||||
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
|
||||
logger.info("host = {} port = {}", host, port);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterEach(ExtensionContext extensionContext) throws Exception {
|
||||
Helper helper = extensionContext.getParent().get().getStore(ns)
|
||||
.getOrComputeIfAbsent(key + count.get(), key -> create(), Helper.class);
|
||||
closeNodes(helper);
|
||||
deleteFiles(Paths.get(helper.getHome() + "/data"));
|
||||
logger.info("data files wiped");
|
||||
Thread.sleep(2000L); // let OS commit changes
|
||||
}
|
||||
|
||||
private void closeNodes(Helper helper) throws IOException {
|
||||
logger.info("closing all clients");
|
||||
for (AbstractClient client : helper.clients.values()) {
|
||||
client.close();
|
||||
}
|
||||
logger.info("closing all nodes");
|
||||
for (Node node : helper.nodes.values()) {
|
||||
if (node != null) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
logger.info("all nodes closed");
|
||||
}
|
||||
|
||||
private static void deleteFiles(Path directory) throws IOException {
|
||||
if (Files.exists(directory)) {
|
||||
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private Helper create() {
|
||||
Helper helper = new Helper();
|
||||
helper.setHome(System.getProperty("path.home") + "/" + helper.randomString(8));
|
||||
helper.setClusterName("test-cluster-" + helper.randomString(8));
|
||||
logger.info("cluster: " + helper.getClusterName() + " home: " + helper.getHome());
|
||||
return helper;
|
||||
}
|
||||
|
||||
static class Helper {
|
||||
|
||||
String home;
|
||||
|
||||
String cluster;
|
||||
|
||||
Map<String, Node> nodes = new HashMap<>();
|
||||
|
||||
Map<String, AbstractClient> clients = new HashMap<>();
|
||||
|
||||
void setHome(String home) {
|
||||
this.home = home;
|
||||
}
|
||||
|
||||
String getHome() {
|
||||
return home;
|
||||
}
|
||||
|
||||
void setClusterName(String cluster) {
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
String getClusterName() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
Settings getNodeSettings() {
|
||||
return Settings.builder()
|
||||
.put("cluster.name", getClusterName())
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
||||
void startNode(String id) {
|
||||
buildNode(id).start();
|
||||
}
|
||||
|
||||
ElasticsearchClient client(String id) {
|
||||
return clients.get(id);
|
||||
}
|
||||
|
||||
String randomString(int len) {
|
||||
final char[] buf = new char[len];
|
||||
final int n = numbersAndLetters.length - 1;
|
||||
for (int i = 0; i < buf.length; i++) {
|
||||
buf[i] = numbersAndLetters[random.nextInt(n)];
|
||||
}
|
||||
return new String(buf);
|
||||
}
|
||||
|
||||
private Node buildNode(String id) {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(getNodeSettings())
|
||||
.put("node.name", id)
|
||||
.build();
|
||||
Node node = new MockNode(nodeSettings);
|
||||
AbstractClient client = (AbstractClient) node.client();
|
||||
nodes.put(id, node);
|
||||
clients.put(id, client);
|
||||
return node;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
</Console>
|
||||
</appenders>
|
||||
<Loggers>
|
||||
<Root level="debug">
|
||||
<Root level="info">
|
||||
<AppenderRef ref="Console" />
|
||||
</Root>
|
||||
</Loggers>
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
dependencies {
|
||||
compile project(':elx-common')
|
||||
api project(':elx-common')
|
||||
}
|
|
@ -1,63 +0,0 @@
|
|||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
maven {
|
||||
url 'http://xbib.org/repository'
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.xbib.elasticsearch:gradle-plugin-elasticsearch-build:6.2.2.0"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'org.xbib.gradle.plugin.elasticsearch.build'
|
||||
|
||||
configurations {
|
||||
main
|
||||
tests
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':common')
|
||||
testCompile "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
testRuntime "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-transport"
|
||||
}
|
||||
|
||||
task testJar(type: Jar, dependsOn: testClasses) {
|
||||
baseName = "${project.archivesBaseName}-tests"
|
||||
from sourceSets.test.output
|
||||
}
|
||||
|
||||
artifacts {
|
||||
main jar
|
||||
tests testJar
|
||||
archives sourcesJar, javadocJar
|
||||
}
|
||||
|
||||
esTest {
|
||||
enabled = true
|
||||
// test with the jars, not the classes, for security manager
|
||||
classpath = files(configurations.testRuntime) + configurations.main.artifacts.files + configurations.tests.artifacts.files
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
// maybe we like some extra security policy for our code
|
||||
systemProperty 'tests.security.policy', '/extra-security.policy'
|
||||
}
|
||||
esTest.dependsOn jar, testJar
|
||||
|
||||
randomizedTest {
|
||||
enabled = false
|
||||
}
|
||||
|
||||
test {
|
||||
enabled = false
|
||||
jvmArgs "-javaagent:" + configurations.alpnagent.asPath
|
||||
systemProperty 'path.home', projectDir.absolutePath
|
||||
testLogging {
|
||||
showStandardStreams = true
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
|
@ -57,6 +57,15 @@ public class ExtendedTransportClient extends AbstractExtendedClient {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void closeClient() {
|
||||
if (getClient() != null) {
|
||||
TransportClient client = (TransportClient) getClient();
|
||||
client.close();
|
||||
client.threadPool().shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExtendedTransportClient init(Settings settings) throws IOException {
|
||||
super.init(settings);
|
||||
|
@ -73,18 +82,6 @@ public class ExtendedTransportClient extends AbstractExtendedClient {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void close() throws IOException {
|
||||
super.close();
|
||||
logger.info("closing");
|
||||
if (getClient() != null) {
|
||||
TransportClient client = (TransportClient) getClient();
|
||||
client.close();
|
||||
client.threadPool().shutdown();
|
||||
}
|
||||
logger.info("close completed");
|
||||
}
|
||||
|
||||
private Collection<InetSocketTransportAddress> findAddresses(Settings settings) throws IOException {
|
||||
final int defaultPort = settings.getAsInt("port", 9300);
|
||||
Collection<InetSocketTransportAddress> addresses = new ArrayList<>();
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
public class MockNode extends Node {
|
||||
|
||||
public MockNode() {
|
||||
super(Settings.EMPTY);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
super(InternalSettingsPreparer.prepareEnvironment(settings, null), Version.CURRENT, classpathPlugins);
|
||||
}
|
||||
|
||||
public MockNode(Settings settings, Class<? extends Plugin> classpathPlugin) {
|
||||
this(settings, list(classpathPlugin));
|
||||
}
|
||||
|
||||
private static Collection<Class<? extends Plugin>> list(Class<? extends Plugin> classpathPlugin) {
|
||||
Collection<Class<? extends Plugin>> list = new ArrayList<>();
|
||||
list.add(classpathPlugin);
|
||||
return list;
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
package org.elasticsearch.node;
|
|
@ -1,61 +0,0 @@
|
|||
package org.xbib.elx.transport;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.common.Parameters;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class DuplicateIDTest extends NodeTestUtils {
|
||||
|
||||
private final static Logger logger = LogManager.getLogger(DuplicateIDTest.class.getSimpleName());
|
||||
|
||||
private final static Long MAX_ACTIONS_PER_REQUEST = 1000L;
|
||||
|
||||
private final static Long ACTIONS = 12345L;
|
||||
|
||||
@Test
|
||||
public void testDuplicateDocIDs() throws Exception {
|
||||
long numactions = ACTIONS;
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
for (int i = 0; i < ACTIONS; i++) {
|
||||
client.index("test", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
client.refreshIndex("test");
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
.setIndices("test")
|
||||
.setTypes("test")
|
||||
.setQuery(matchAllQuery());
|
||||
long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits();
|
||||
logger.info("hits = {}", hits);
|
||||
assertTrue(hits < ACTIONS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.close();
|
||||
assertEquals(numactions, client.getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
package org.xbib.elx.transport;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class IndexShiftTest extends NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getSimpleName());
|
||||
|
||||
@Test
|
||||
public void testIndexAlias() throws Exception {
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings()).build();
|
||||
try {
|
||||
client.newIndex("test1234");
|
||||
for (int i = 0; i < 1; i++) {
|
||||
client.index("test1234", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.refreshIndex("test1234");
|
||||
|
||||
List<String> simpleAliases = Arrays.asList("a", "b", "c");
|
||||
client.shiftIndex("test", "test1234", simpleAliases);
|
||||
|
||||
client.newIndex("test5678");
|
||||
for (int i = 0; i < 1; i++) {
|
||||
client.index("test5678", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.refreshIndex("test5678");
|
||||
|
||||
simpleAliases = Arrays.asList("d", "e", "f");
|
||||
client.shiftIndex("test", "test5678", simpleAliases, (builder, index, alias) ->
|
||||
builder.addAlias(index, alias, QueryBuilders.termQuery("my_key", alias)));
|
||||
Map<String, String> indexFilters = client.getIndexFilters("test5678");
|
||||
logger.info("index filters of index test5678 = {}", indexFilters);
|
||||
assertTrue(indexFilters.containsKey("a"));
|
||||
assertTrue(indexFilters.containsKey("b"));
|
||||
assertTrue(indexFilters.containsKey("c"));
|
||||
assertTrue(indexFilters.containsKey("d"));
|
||||
assertTrue(indexFilters.containsKey("e"));
|
||||
|
||||
Map<String, String> aliases = client.getIndexFilters(client.resolveAlias("test"));
|
||||
logger.info("aliases of alias test = {}", aliases);
|
||||
assertTrue(aliases.containsKey("a"));
|
||||
assertTrue(aliases.containsKey("b"));
|
||||
assertTrue(aliases.containsKey("c"));
|
||||
assertTrue(aliases.containsKey("d"));
|
||||
assertTrue(aliases.containsKey("e"));
|
||||
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,214 +0,0 @@
|
|||
package org.xbib.elx.transport;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.client.support.AbstractClient;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.InetSocketTransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.node.MockNode;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.xbib.elx.common.util.NetworkUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
|
||||
public class NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
private static Random random = new Random();
|
||||
|
||||
private static char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
|
||||
|
||||
private Map<String, Node> nodes = new HashMap<>();
|
||||
|
||||
private Map<String, AbstractClient> clients = new HashMap<>();
|
||||
|
||||
private AtomicInteger counter = new AtomicInteger();
|
||||
|
||||
private String cluster;
|
||||
|
||||
private String host;
|
||||
|
||||
private int port;
|
||||
|
||||
private static void deleteFiles() throws IOException {
|
||||
Path directory = Paths.get(getHome() + "/data");
|
||||
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
logger.info("starting");
|
||||
setClusterName();
|
||||
startNode("1");
|
||||
findNodeAddress();
|
||||
try {
|
||||
ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
|
||||
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
throw new IOException("cluster state is " + healthResponse.getStatus().name()
|
||||
+ ", from here on, everything will fail!");
|
||||
}
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
|
||||
}
|
||||
} catch (Throwable t) {
|
||||
logger.error("startNodes failed", t);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void stopNodes() {
|
||||
try {
|
||||
closeNodes();
|
||||
} catch (Exception e) {
|
||||
logger.error("can not close nodes", e);
|
||||
} finally {
|
||||
try {
|
||||
deleteFiles();
|
||||
logger.info("data files wiped");
|
||||
Thread.sleep(2000L); // let OS commit changes
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void setClusterName() {
|
||||
this.cluster = "test-helper-cluster-"
|
||||
+ NetworkUtils.getLocalAddress().getHostName()
|
||||
+ "-" + System.getProperty("user.name")
|
||||
+ "-" + counter.incrementAndGet();
|
||||
}
|
||||
|
||||
protected String getClusterName() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
protected Settings getSettings() {
|
||||
return settingsBuilder()
|
||||
.put("host", host)
|
||||
.put("port", port)
|
||||
.put("cluster.name", cluster)
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
||||
protected Settings getNodeSettings() {
|
||||
return settingsBuilder()
|
||||
.put("cluster.name", cluster)
|
||||
.put("cluster.routing.schedule", "50ms")
|
||||
.put("cluster.routing.allocation.disk.threshold_enabled", false)
|
||||
.put("discovery.zen.multicast.enabled", true)
|
||||
.put("discovery.zen.multicast.ping_timeout", "5s")
|
||||
.put("http.enabled", true)
|
||||
.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors())
|
||||
.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low
|
||||
.put("index.number_of_replicas", 0)
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
||||
protected static String getHome() {
|
||||
return System.getProperty("path.home", System.getProperty("user.dir"));
|
||||
}
|
||||
|
||||
public void startNode(String id) {
|
||||
buildNode(id).start();
|
||||
}
|
||||
|
||||
public AbstractClient client(String id) {
|
||||
return clients.get(id);
|
||||
}
|
||||
|
||||
private void closeNodes() {
|
||||
logger.info("closing all clients");
|
||||
for (AbstractClient client : clients.values()) {
|
||||
client.close();
|
||||
}
|
||||
clients.clear();
|
||||
logger.info("closing all nodes");
|
||||
for (Node node : nodes.values()) {
|
||||
if (node != null) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
nodes.clear();
|
||||
logger.info("all nodes closed");
|
||||
}
|
||||
|
||||
protected void findNodeAddress() {
|
||||
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
|
||||
NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet();
|
||||
Object obj = response.iterator().next().getTransport().getAddress()
|
||||
.publishAddress();
|
||||
if (obj instanceof InetSocketTransportAddress) {
|
||||
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
|
||||
host = address.address().getHostName();
|
||||
port = address.address().getPort();
|
||||
}
|
||||
}
|
||||
|
||||
private Node buildNode(String id) {
|
||||
Settings nodeSettings = settingsBuilder()
|
||||
.put(getNodeSettings())
|
||||
.put("name", id)
|
||||
.build();
|
||||
logger.info("settings={}", nodeSettings.getAsMap());
|
||||
Node node = new MockNode(nodeSettings);
|
||||
AbstractClient client = (AbstractClient) node.client();
|
||||
nodes.put(id, node);
|
||||
clients.put(id, client);
|
||||
logger.info("clients={}", clients);
|
||||
return node;
|
||||
}
|
||||
|
||||
protected String randomString(int len) {
|
||||
final char[] buf = new char[len];
|
||||
final int n = numbersAndLetters.length - 1;
|
||||
for (int i = 0; i < buf.length; i++) {
|
||||
buf[i] = numbersAndLetters[random.nextInt(n)];
|
||||
}
|
||||
return new String(buf);
|
||||
}
|
||||
}
|
|
@ -1,150 +0,0 @@
|
|||
package org.xbib.elx.transport;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.indices.stats.CommonStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndexStats;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.indexing.IndexingStats;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class ReplicaTest extends NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ReplicaTest.class.getSimpleName());
|
||||
|
||||
@Test
|
||||
public void testReplicaLevel() throws Exception {
|
||||
|
||||
// we need nodes for replica levels
|
||||
startNode("2");
|
||||
startNode("3");
|
||||
startNode("4");
|
||||
|
||||
Settings settingsTest1 = Settings.settingsBuilder()
|
||||
.put("index.number_of_shards", 2)
|
||||
.put("index.number_of_replicas", 3)
|
||||
.build();
|
||||
|
||||
Settings settingsTest2 = Settings.settingsBuilder()
|
||||
.put("index.number_of_shards", 2)
|
||||
.put("index.number_of_replicas", 1)
|
||||
.build();
|
||||
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.build();
|
||||
|
||||
try {
|
||||
client.newIndex("test1", settingsTest1, new HashMap<>())
|
||||
.newIndex("test2", settingsTest2, new HashMap<>());
|
||||
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
|
||||
for (int i = 0; i < 1234; i++) {
|
||||
client.index("test1", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
for (int i = 0; i < 1234; i++) {
|
||||
client.index("test2", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
client.refreshIndex("test1");
|
||||
client.refreshIndex("test2");
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
.setIndices("test1", "test2")
|
||||
.setQuery(matchAllQuery());
|
||||
long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits();
|
||||
logger.info("query total hits={}", hits);
|
||||
assertEquals(2468, hits);
|
||||
|
||||
// TODO move to api
|
||||
IndicesStatsRequestBuilder indicesStatsRequestBuilder = new IndicesStatsRequestBuilder(client.getClient(),
|
||||
IndicesStatsAction.INSTANCE).all();
|
||||
IndicesStatsResponse response = indicesStatsRequestBuilder.execute().actionGet();
|
||||
for (Map.Entry<String, IndexStats> m : response.getIndices().entrySet()) {
|
||||
IndexStats indexStats = m.getValue();
|
||||
CommonStats commonStats = indexStats.getTotal();
|
||||
IndexingStats indexingStats = commonStats.getIndexing();
|
||||
IndexingStats.Stats stats = indexingStats.getTotal();
|
||||
logger.info("index {}: count = {}", m.getKey(), stats.getIndexCount());
|
||||
for (Map.Entry<Integer, IndexShardStats> me : indexStats.getIndexShards().entrySet()) {
|
||||
IndexShardStats indexShardStats = me.getValue();
|
||||
CommonStats commonShardStats = indexShardStats.getTotal();
|
||||
logger.info("shard {} count = {}", me.getKey(),
|
||||
commonShardStats.getIndexing().getTotal().getIndexCount());
|
||||
}
|
||||
}
|
||||
try {
|
||||
client.deleteIndex("test1").deleteIndex("test2");
|
||||
} catch (Exception e) {
|
||||
logger.error("delete index failed, ignored. Reason:", e);
|
||||
}
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateReplicaLevel() throws Exception {
|
||||
|
||||
long numberOfShards = 2;
|
||||
int replicaLevel = 3;
|
||||
|
||||
// we need 3 nodes for replica level 3
|
||||
startNode("2");
|
||||
startNode("3");
|
||||
|
||||
int shardsAfterReplica;
|
||||
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.build();
|
||||
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.put("index.number_of_shards", numberOfShards)
|
||||
.put("index.number_of_replicas", 0)
|
||||
.build();
|
||||
|
||||
try {
|
||||
client.newIndex("replicatest", settings, new HashMap<>());
|
||||
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
|
||||
for (int i = 0; i < 12345; i++) {
|
||||
client.index("replicatest", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
client.updateReplicaLevel("replicatest", replicaLevel, 30L, TimeUnit.SECONDS);
|
||||
assertEquals(replicaLevel, client.getReplicaLevel("replicatest"));
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.close();
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
package org.xbib.elx.transport;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class SmokeTest extends NodeTestUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(SmokeTest.class.getSimpleName());
|
||||
|
||||
@Test
|
||||
public void testSingleDocNodeClient() throws Exception {
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
|
||||
client.flush();
|
||||
client.waitForResponses(30, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
assertEquals(1, client.getBulkMetric().getSucceeded().getCount());
|
||||
client.close();
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
package org.xbib.elx.transport;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elx.transport;
|
||||
package org.xbib.elx.transport.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -13,88 +13,53 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.common.Parameters;
|
||||
import org.xbib.elx.transport.ExtendedTransportClient;
|
||||
import org.xbib.elx.transport.ExtendedTransportClientProvider;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
public class ClientTest extends NodeTestUtils {
|
||||
@ExtendWith(TestExtension.class)
|
||||
class ClientTest {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(ClientTest.class.getSimpleName());
|
||||
private static final Logger logger = LogManager.getLogger(ClientTest.class.getName());
|
||||
|
||||
private static final Long ACTIONS = 100L;
|
||||
|
||||
private static final Long MAX_ACTIONS_PER_REQUEST = 1000L;
|
||||
|
||||
private static final Long ACTIONS = 1234L;
|
||||
private final TestExtension.Helper helper;
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
super.startNodes();
|
||||
startNode("2");
|
||||
} catch (Throwable t) {
|
||||
logger.error("startNodes failed", t);
|
||||
}
|
||||
ClientTest(TestExtension.Helper helper) {
|
||||
this.helper = helper;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClientIndexOp() throws Exception {
|
||||
void testClientIndexOp() throws Exception {
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.put(helper.getTransportSettings())
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.build();
|
||||
client.newIndex("test");
|
||||
try {
|
||||
client.deleteIndex("test")
|
||||
.newIndex("test")
|
||||
.deleteIndex("test");
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.error("no node available");
|
||||
} finally {
|
||||
client.newIndex("test1");
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleDoc() throws Exception {
|
||||
void testMapping() throws Exception {
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}");
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
assertEquals(1, client.getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapping() throws Exception {
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.put(helper.getTransportSettings())
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5))
|
||||
.build();
|
||||
XContentBuilder builder = jsonBuilder()
|
||||
|
@ -107,35 +72,32 @@ public class ClientTest extends NodeTestUtils {
|
|||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
client.newIndex("test", Settings.EMPTY, builder.string());
|
||||
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test");
|
||||
client.newIndex("test2", Settings.EMPTY, builder.string());
|
||||
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test2");
|
||||
GetMappingsResponse getMappingsResponse =
|
||||
client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet();
|
||||
logger.info("mappings={}", getMappingsResponse.getMappings());
|
||||
assertTrue(getMappingsResponse.getMappings().get("test").containsKey("doc"));
|
||||
assertTrue(getMappingsResponse.getMappings().get("test2").containsKey("doc"));
|
||||
client.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandomDocs() throws Exception {
|
||||
long numactions = ACTIONS;
|
||||
void testSingleDoc() throws Exception {
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.put(helper.getTransportSettings())
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test");
|
||||
for (int i = 0; i < ACTIONS; i++) {
|
||||
client.index("test", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
}
|
||||
client.newIndex("test3");
|
||||
client.index("test3", "1", true, "{ \"name\" : \"Hello World\"}");
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
assertEquals(numactions, client.getBulkMetric().getSucceeded().getCount());
|
||||
assertEquals(1, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
|
@ -145,32 +107,63 @@ public class ClientTest extends NodeTestUtils {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testThreadedRandomDocs() throws Exception {
|
||||
void testRandomDocs() throws Exception {
|
||||
long numactions = ACTIONS;
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(helper.getTransportSettings())
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test4");
|
||||
for (int i = 0; i < ACTIONS; i++) {
|
||||
client.index("test4", null, false,
|
||||
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
|
||||
}
|
||||
client.flush();
|
||||
client.waitForResponses(60L, TimeUnit.SECONDS);
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
assertEquals(numactions, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
client.refreshIndex("test4");
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
.setIndices("test4")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setSize(0);
|
||||
assertEquals(numactions,
|
||||
searchRequestBuilder.execute().actionGet().getHits().getTotalHits());
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testThreadedRandomDocs() throws Exception {
|
||||
int maxthreads = Runtime.getRuntime().availableProcessors();
|
||||
long maxactions = MAX_ACTIONS_PER_REQUEST;
|
||||
final long maxloop = ACTIONS;
|
||||
|
||||
Settings settingsForIndex = Settings.settingsBuilder()
|
||||
.put("index.number_of_shards", 2)
|
||||
.put("index.number_of_replicas", 1)
|
||||
.build();
|
||||
|
||||
final ExtendedTransportClient client = ClientBuilder.builder()
|
||||
.provider(ExtendedTransportClientProvider.class)
|
||||
.put(getSettings())
|
||||
.put(helper.getTransportSettings())
|
||||
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), maxactions)
|
||||
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
|
||||
.build();
|
||||
try {
|
||||
client.newIndex("test", settingsForIndex, new HashMap<>())
|
||||
.startBulk("test", -1, 1000);
|
||||
ThreadPoolExecutor pool = EsExecutors.newFixed("bulkclient-test", maxthreads, 30,
|
||||
EsExecutors.daemonThreadFactory("bulkclient-test"));
|
||||
client.newIndex("test5")
|
||||
.startBulk("test5", -1, 1000);
|
||||
ThreadPoolExecutor pool = EsExecutors.newFixed("transportclient-test", maxthreads, 30,
|
||||
EsExecutors.daemonThreadFactory("transportclient-test"));
|
||||
final CountDownLatch latch = new CountDownLatch(maxthreads);
|
||||
for (int i = 0; i < maxthreads; i++) {
|
||||
pool.execute(() -> {
|
||||
for (int i1 = 0; i1 < maxloop; i1++) {
|
||||
client.index("test",null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
|
||||
client.index("test5",null, false,
|
||||
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
|
||||
}
|
||||
latch.countDown();
|
||||
});
|
||||
|
@ -179,25 +172,25 @@ public class ClientTest extends NodeTestUtils {
|
|||
if (latch.await(60, TimeUnit.SECONDS)) {
|
||||
logger.info("flush ...");
|
||||
client.flush();
|
||||
client.waitForResponses(30L, TimeUnit.SECONDS);
|
||||
logger.info("pool to be shut down ...");
|
||||
client.waitForResponses(60L, TimeUnit.SECONDS);
|
||||
logger.info("pool shutdown ...");
|
||||
pool.shutdown();
|
||||
logger.info("poot shut down");
|
||||
logger.info("poot is shut down");
|
||||
} else {
|
||||
logger.warn("latch timeout");
|
||||
}
|
||||
client.stopBulk("test", 30L, TimeUnit.SECONDS);
|
||||
assertEquals(maxthreads * maxloop, client.getBulkMetric().getSucceeded().getCount());
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn("skipping, no node available");
|
||||
} finally {
|
||||
client.stopBulk("test5", 60L, TimeUnit.SECONDS);
|
||||
assertEquals(maxthreads * maxloop, client.getBulkController().getBulkMetric().getSucceeded().getCount());
|
||||
if (client.getBulkController().getLastBulkError() != null) {
|
||||
logger.error("error", client.getBulkController().getLastBulkError());
|
||||
}
|
||||
assertNull(client.getBulkController().getLastBulkError());
|
||||
// extra search lookup
|
||||
client.refreshIndex("test");
|
||||
client.refreshIndex("test5");
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
|
||||
// to avoid NPE at org.elasticsearch.action.search.SearchRequest.writeTo(SearchRequest.java:580)
|
||||
.setIndices("_all")
|
||||
.setIndices("test5")
|
||||
.setQuery(QueryBuilders.matchAllQuery())
|
||||
.setSize(0);
|
||||
assertEquals(maxthreads * maxloop,
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue