From 3439386e026d16f254d5260a3655006580f4d54f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=CC=88rg=20Prante?= Date: Thu, 2 May 2019 19:46:21 +0200 Subject: [PATCH] switch to Junit 5 --- build.gradle | 58 +- elx-api/build.gradle | 11 +- .../java/org/xbib/elx/api/ReadClient.java | 25 + .../org/xbib/elx/api/ReadClientProvider.java | 7 + .../elx/common/DefaultBulkController.java | 20 +- .../xbib/elx/common/DefaultBulkProcessor.java | 24 +- .../org/xbib/elx/common/test/AliasTest.java | 58 +- .../elx/common/test/ClusterBlockTest.java | 48 -- .../test/MockExtendedClientProviderTest.java | 8 +- .../org/xbib/elx/common/test/NetworkTest.java | 10 +- .../org/xbib/elx/common/test/SearchTest.java | 51 +- .../org/xbib/elx/common/test/SimpleTest.java | 28 +- .../org/xbib/elx/common/test/TestBase.java | 212 ------- .../xbib/elx/common/test/TestExtension.java | 216 +++++++ .../xbib/elx/common/test/WildcardTest.java | 62 +- elx-http/build.gradle | 4 + .../org/xbib/elx/http/ExtendedHttpClient.java | 127 ++++ .../elx/http/ExtendedHttpClientProvider.java | 10 + .../java/org/xbib/elx/http/HttpAction.java | 169 +++++ .../org/xbib/elx/http/HttpActionContext.java | 60 ++ .../elx/http/action/get/HttpGetAction.java | 179 ++++++ .../http/action/get/HttpMultiGetAction.java | 255 ++++++++ .../http/action/search/HttpSearchAction.java | 597 ++++++++++++++++++ .../elx/http/util/AbstractObjectParser.java | 217 +++++++ .../xbib/elx/http/util/CheckedBiConsumer.java | 11 + .../xbib/elx/http/util/CheckedFunction.java | 6 + .../org/xbib/elx/http/util/ContextParser.java | 13 + .../util/NamedObjectNotFoundException.java | 14 + .../elx/http/util/NamedXContentRegistry.java | 101 +++ .../org/xbib/elx/http/util/ObjectParser.java | 441 +++++++++++++ .../elx/http/util/XContentParseException.java | 47 ++ .../elx/http/util/XContentParserUtils.java | 68 ++ .../http/util/aggregations/CommonFields.java | 18 + .../util/aggregations/ParsedAggregation.java | 40 ++ .../ParsedMultiBucketAggregation.java | 149 +++++ .../util/aggregations/ParsedStringTerms.java | 103 +++ .../http/util/aggregations/ParsedTerms.java | 118 ++++ .../org.xbib.elx.api.ExtendedClientProvider | 1 + .../services/org.xbib.elx.http.HttpAction | 3 + .../org/xbib/elx/http/test/ClientTest.java | 122 ++++ .../org/xbib/elx/http/test}/MockNode.java | 3 +- .../org/xbib/elx/http/test}/TestBase.java | 38 +- elx-http/src/test/resources/log4j2.xml | 13 + .../org/xbib/elx/node/test/ClientTest.java | 159 ++--- .../xbib/elx/node/test/DuplicateIDTest.java | 36 +- .../xbib/elx/node/test/IndexPruneTest.java | 53 +- .../xbib/elx/node/test/IndexShiftTest.java | 40 +- .../org/xbib/elx/node/test/ReplicaTest.java | 151 ----- .../org/xbib/elx/node/test/SmokeTest.java | 46 +- .../java/org/xbib/elx/node/test/TestBase.java | 212 ------- .../org/xbib/elx/node/test/TestExtension.java | 213 +++++++ .../org/xbib/elx/transport/ReplicaTest.java | 150 ----- .../org/xbib/elx/transport/package-info.java | 1 - .../elx/transport/{ => test}/ClientTest.java | 168 +++-- .../transport/{ => test}/DuplicateIDTest.java | 42 +- .../transport/{ => test}/IndexPruneTest.java | 54 +- .../transport/{ => test}/IndexShiftTest.java | 44 +- .../org/xbib/elx/transport/test/MockNode.java | 11 + .../elx/transport/{ => test}/SmokeTest.java | 47 +- .../elx/transport/test/TestExtension.java | 229 +++++++ .../xbib/elx/transport/test/package-info.java | 1 + gradle.properties | 7 +- gradle/wrapper/gradle-wrapper.jar | Bin 55190 -> 55616 bytes gradle/wrapper/gradle-wrapper.properties | 4 +- gradlew | 18 +- gradlew.bat | 18 +- settings.gradle | 1 - 67 files changed, 4210 insertions(+), 1260 deletions(-) create mode 100644 elx-api/src/main/java/org/xbib/elx/api/ReadClient.java create mode 100644 elx-api/src/main/java/org/xbib/elx/api/ReadClientProvider.java delete mode 100644 elx-common/src/test/java/org/xbib/elx/common/test/ClusterBlockTest.java delete mode 100644 elx-common/src/test/java/org/xbib/elx/common/test/TestBase.java create mode 100644 elx-common/src/test/java/org/xbib/elx/common/test/TestExtension.java create mode 100644 elx-http/build.gradle create mode 100644 elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClient.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClientProvider.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/HttpAction.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/HttpActionContext.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/action/get/HttpGetAction.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/action/get/HttpMultiGetAction.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/action/search/HttpSearchAction.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/AbstractObjectParser.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/CheckedBiConsumer.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/CheckedFunction.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/ContextParser.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/NamedObjectNotFoundException.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/NamedXContentRegistry.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/ObjectParser.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/XContentParseException.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/XContentParserUtils.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/aggregations/CommonFields.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedAggregation.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedMultiBucketAggregation.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedStringTerms.java create mode 100644 elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedTerms.java create mode 100644 elx-http/src/main/resources/META-INF/services/org.xbib.elx.api.ExtendedClientProvider create mode 100644 elx-http/src/main/resources/META-INF/services/org.xbib.elx.http.HttpAction create mode 100644 elx-http/src/test/java/org/xbib/elx/http/test/ClientTest.java rename {elx-transport/src/test/java/org/xbib/elx/transport => elx-http/src/test/java/org/xbib/elx/http/test}/MockNode.java (85%) rename {elx-transport/src/test/java/org/xbib/elx/transport => elx-http/src/test/java/org/xbib/elx/http/test}/TestBase.java (86%) create mode 100644 elx-http/src/test/resources/log4j2.xml delete mode 100644 elx-node/src/test/java/org/xbib/elx/node/test/ReplicaTest.java delete mode 100644 elx-node/src/test/java/org/xbib/elx/node/test/TestBase.java create mode 100644 elx-node/src/test/java/org/xbib/elx/node/test/TestExtension.java delete mode 100644 elx-transport/src/test/java/org/xbib/elx/transport/ReplicaTest.java delete mode 100644 elx-transport/src/test/java/org/xbib/elx/transport/package-info.java rename elx-transport/src/test/java/org/xbib/elx/transport/{ => test}/ClientTest.java (67%) rename elx-transport/src/test/java/org/xbib/elx/transport/{ => test}/DuplicateIDTest.java (60%) rename elx-transport/src/test/java/org/xbib/elx/transport/{ => test}/IndexPruneTest.java (62%) rename elx-transport/src/test/java/org/xbib/elx/transport/{ => test}/IndexShiftTest.java (73%) create mode 100644 elx-transport/src/test/java/org/xbib/elx/transport/test/MockNode.java rename elx-transport/src/test/java/org/xbib/elx/transport/{ => test}/SmokeTest.java (61%) create mode 100644 elx-transport/src/test/java/org/xbib/elx/transport/test/TestExtension.java create mode 100644 elx-transport/src/test/java/org/xbib/elx/transport/test/package-info.java diff --git a/build.gradle b/build.gradle index 2e44f5f..1333813 100644 --- a/build.gradle +++ b/build.gradle @@ -20,7 +20,7 @@ printf "Host: %s\nOS: %s %s %s\nJVM: %s %s %s %s\nGradle: %s Groovy: %s Java: %s JavaVersion.current() if (JavaVersion.current() < JavaVersion.VERSION_11) { - throw new GradleException("This build must be run with java 11 or higher") + throw new GradleException("The build must be run with Java 11") } subprojects { @@ -38,9 +38,11 @@ subprojects { } dependencies { - testCompile "junit:junit:${project.property('junit.version')}" - testCompile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" - testCompile "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}" + testImplementation "org.junit.jupiter:junit-jupiter-api:${project.property('junit.version')}" + testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${project.property('junit.version')}" + testImplementation "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" + testImplementation "org.apache.logging.log4j:log4j-jul:${project.property('log4j.version')}" + testImplementation "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}" asciidoclet "org.xbib:asciidoclet:${project.property('asciidoclet.version')}" wagon "org.apache.maven.wagon:wagon-ssh:${project.property('wagon.version')}" } @@ -63,43 +65,44 @@ subprojects { } test { - jvmArgs =[ - '--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED', - '--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED', - '--add-opens=java.base/java.nio=ALL-UNNAMED' - ] + useJUnitPlatform() + // we MUST use this hack because of Elasticsearch 2.2.1 Lucene 5.4.1 MMapDirectory unmap() hackery + doFirst { + jvmArgs = [ + '--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED', + '--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED', + '--add-opens=java.base/java.nio=ALL-UNNAMED' + ] + } + systemProperty 'java.util.logging.manager', 'org.apache.logging.log4j.jul.LogManager' systemProperty 'jna.debug_load', 'true' + systemProperty 'path.home', "${project.buildDir}" + failFast = false testLogging { - showStandardStreams = true - exceptionFormat = 'full' + events 'PASSED', 'FAILED', 'SKIPPED' + } + afterSuite { desc, result -> + if (!desc.parent) { + println "\nTest result: ${result.resultType}" + println "Test summary: ${result.testCount} tests, " + + "${result.successfulTestCount} succeeded, " + + "${result.failedTestCount} failed, " + + "${result.skippedTestCount} skipped" + } } } clean { - delete "data" - delete "logs" delete "out" } - /*javadoc { - options.docletpath = configurations.asciidoclet.files.asType(List) - options.doclet = 'org.asciidoctor.Asciidoclet' - options.overview = "src/docs/asciidoclet/overview.adoc" - options.addStringOption "-base-dir", "${projectDir}" - options.addStringOption "-attribute", - "name=${project.name},version=${project.version},title-link=https://github.com/jprante/${project.name}" - configure(options) { - noTimestamp = true - } - }*/ - task javadocJar(type: Jar, dependsOn: javadoc) { - classifier 'javadoc' + archiveClassifier.set('javadoc') } task sourcesJar(type: Jar, dependsOn: classes) { from sourceSets.main.allSource - classifier 'sources' + archiveClassifier.set('sources') } artifacts { @@ -135,6 +138,7 @@ subprojects { html.enabled = true } } + tasks.withType(Checkstyle) { ignoreFailures = true reports { diff --git a/elx-api/build.gradle b/elx-api/build.gradle index 47596cb..21f7734 100644 --- a/elx-api/build.gradle +++ b/elx-api/build.gradle @@ -1,19 +1,20 @@ dependencies { compile "org.xbib:metrics-common:${project.property('xbib-metrics.version')}" compile("org.elasticsearch:elasticsearch:${project.property('elasticsearch.version')}") { - // exclude ES jackson yaml, cbor, smile versions + // exclude original ES jackson yaml, cbor, smile version (2.6.2) exclude group: 'com.fasterxml.jackson.dataformat' - // dependencies that are not meant for client + // these dependencies that are not meant for client applications exclude module: 'securesm' // we use log4j2, not log4j exclude group: 'log4j' } // override log4j2 of Elastic with ours compile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}" + // override ES jackson with our jackson version // for Elasticsearch session, ES uses SMILE when encoding source for SearchRequest - compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${project.property('jackson-dataformat.version')}" + compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${project.property('jackson.version')}" // CBOR ist default JSON content compression encoding in ES 2.2.1 - compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${project.property('jackson-dataformat.version')}" + compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${project.property('jackson.version')}" // not used, but maybe in other projects - compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${project.property('jackson-dataformat.version')}" + compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${project.property('jackson.version')}" } \ No newline at end of file diff --git a/elx-api/src/main/java/org/xbib/elx/api/ReadClient.java b/elx-api/src/main/java/org/xbib/elx/api/ReadClient.java new file mode 100644 index 0000000..2ecb857 --- /dev/null +++ b/elx-api/src/main/java/org/xbib/elx/api/ReadClient.java @@ -0,0 +1,25 @@ +package org.xbib.elx.api; + +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; + +public interface ReadClient { + + ActionFuture get(GetRequest getRequest); + + void get(GetRequest request, ActionListener listener); + + ActionFuture multiGet(MultiGetRequest request); + + void multiGet(MultiGetRequest request, ActionListener listener); + + ActionFuture search(SearchRequest request); + + void search(SearchRequest request, ActionListener listener); +} diff --git a/elx-api/src/main/java/org/xbib/elx/api/ReadClientProvider.java b/elx-api/src/main/java/org/xbib/elx/api/ReadClientProvider.java new file mode 100644 index 0000000..6640686 --- /dev/null +++ b/elx-api/src/main/java/org/xbib/elx/api/ReadClientProvider.java @@ -0,0 +1,7 @@ +package org.xbib.elx.api; + +@FunctionalInterface +public interface ReadClientProvider { + + C getReadClient(); +} diff --git a/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkController.java b/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkController.java index 30d5b52..f6af929 100644 --- a/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkController.java +++ b/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkController.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -83,12 +82,12 @@ public class DefaultBulkController implements BulkController { maxActionsPerRequest, maxConcurrentRequests, flushIngestInterval, maxVolumePerRequest); } this.bulkListener = new BulkListener(); - DefaultBulkProcessor.Builder builder = DefaultBulkProcessor.builder((Client) client.getClient(), bulkListener) + this.bulkProcessor = DefaultBulkProcessor.builder(client.getClient(), bulkListener) .setBulkActions(maxActionsPerRequest) .setConcurrentRequests(maxConcurrentRequests) .setFlushInterval(flushIngestInterval) - .setBulkSize(maxVolumePerRequest); - this.bulkProcessor = builder.build(); + .setBulkSize(maxVolumePerRequest) + .build(); this.active.set(true); } @@ -115,6 +114,7 @@ public class DefaultBulkController implements BulkController { @Override public void index(IndexRequest indexRequest) { + ensureActiveAndBulk(); if (!active.get()) { throw new IllegalStateException("inactive"); } @@ -226,6 +226,18 @@ public class DefaultBulkController implements BulkController { } } + private void ensureActiveAndBulk() { + if (!active.get()) { + throw new IllegalStateException("inactive"); + } + if (bulkProcessor == null) { + throw new UnsupportedOperationException("bulk processor not present"); + } + if (bulkListener == null) { + throw new UnsupportedOperationException("bulk listener not present"); + } + } + private class BulkListener implements DefaultBulkProcessor.Listener { private final Logger logger = LogManager.getLogger("org.xbib.elx.BulkProcessor.Listener"); diff --git a/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkProcessor.java b/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkProcessor.java index 436d375..e8048cc 100644 --- a/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkProcessor.java +++ b/elx-common/src/main/java/org/xbib/elx/common/DefaultBulkProcessor.java @@ -5,7 +5,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; @@ -44,20 +44,22 @@ public class DefaultBulkProcessor implements BulkProcessor { private volatile boolean closed; - private DefaultBulkProcessor(Client client, Listener listener, String name, int concurrentRequests, + private DefaultBulkProcessor(ElasticsearchClient client, Listener listener, String name, int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, TimeValue flushInterval) { this.executionIdGen = new AtomicLong(); this.closed = false; this.bulkActions = bulkActions; this.bulkSize = bulkSize.getBytes(); this.bulkRequest = new BulkRequest(); + if (listener == null) { + throw new IllegalArgumentException(); + } this.bulkRequestHandler = concurrentRequests == 0 ? new SyncBulkRequestHandler(client, listener) : new AsyncBulkRequestHandler(client, listener, concurrentRequests); if (flushInterval != null) { this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1, - EsExecutors.daemonThreadFactory(client.settings(), - name != null ? "[" + name + "]" : "" + "bulk_processor")); + EsExecutors.daemonThreadFactory(name != null ? "[" + name + "]" : "" + "bulk_processor")); this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); this.scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); this.scheduledFuture = this.scheduler.scheduleWithFixedDelay(new Flush(), flushInterval.millis(), @@ -68,7 +70,7 @@ public class DefaultBulkProcessor implements BulkProcessor { } } - public static Builder builder(Client client, Listener listener) { + public static Builder builder(ElasticsearchClient client, Listener listener) { if (client == null) { throw new NullPointerException("The client you specified while building a BulkProcessor is null"); } @@ -215,7 +217,7 @@ public class DefaultBulkProcessor implements BulkProcessor { */ public static class Builder { - private final Client client; + private final ElasticsearchClient client; private final Listener listener; @@ -236,7 +238,7 @@ public class DefaultBulkProcessor implements BulkProcessor { * @param client the client * @param listener the listener */ - Builder(Client client, Listener listener) { + Builder(ElasticsearchClient client, Listener listener) { this.client = client; this.listener = listener; } @@ -330,11 +332,11 @@ public class DefaultBulkProcessor implements BulkProcessor { private static class SyncBulkRequestHandler implements BulkRequestHandler { - private final Client client; + private final ElasticsearchClient client; private final DefaultBulkProcessor.Listener listener; - SyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener) { + SyncBulkRequestHandler(ElasticsearchClient client, DefaultBulkProcessor.Listener listener) { this.client = client; this.listener = listener; } @@ -362,7 +364,7 @@ public class DefaultBulkProcessor implements BulkProcessor { private static class AsyncBulkRequestHandler implements BulkRequestHandler { - private final Client client; + private final ElasticsearchClient client; private final DefaultBulkProcessor.Listener listener; @@ -370,7 +372,7 @@ public class DefaultBulkProcessor implements BulkProcessor { private final int concurrentRequests; - private AsyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener, int concurrentRequests) { + private AsyncBulkRequestHandler(ElasticsearchClient client, DefaultBulkProcessor.Listener listener, int concurrentRequests) { this.client = client; this.listener = listener; this.concurrentRequests = concurrentRequests; diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/AliasTest.java b/elx-common/src/test/java/org/xbib/elx/common/test/AliasTest.java index b1b7f95..928dbe6 100644 --- a/elx-common/src/test/java/org/xbib/elx/common/test/AliasTest.java +++ b/elx-common/src/test/java/org/xbib/elx/common/test/AliasTest.java @@ -1,20 +1,20 @@ package org.xbib.elx.common.test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.common.Strings; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import java.util.Collections; import java.util.Iterator; @@ -23,53 +23,58 @@ import java.util.TreeSet; import java.util.regex.Matcher; import java.util.regex.Pattern; -/** - * - */ -public class AliasTest extends TestBase { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@ExtendWith(TestExtension.class) +class AliasTest { private static final Logger logger = LogManager.getLogger(AliasTest.class.getName()); + private final TestExtension.Helper helper; + + AliasTest(TestExtension.Helper helper) { + this.helper = helper; + } + @Test - public void testAlias() { - Client client = client("1"); - CreateIndexRequest indexRequest = new CreateIndexRequest("test"); - client.admin().indices().create(indexRequest).actionGet(); - // put alias + void testAlias() { + ElasticsearchClient client = helper.client("1"); + CreateIndexRequest indexRequest = new CreateIndexRequest("test_index"); + client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet(); IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); - String[] indices = new String[]{"test"}; + String[] indices = new String[]{"test_index"}; String[] aliases = new String[]{"test_alias"}; IndicesAliasesRequest.AliasActions aliasAction = new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, indices, aliases); indicesAliasesRequest.addAliasAction(aliasAction); - client.admin().indices().aliases(indicesAliasesRequest).actionGet(); + client.execute(IndicesAliasesAction.INSTANCE, indicesAliasesRequest).actionGet(); // get alias GetAliasesRequest getAliasesRequest = new GetAliasesRequest(Strings.EMPTY_ARRAY); long t0 = System.nanoTime(); - GetAliasesResponse getAliasesResponse = client.admin().indices().getAliases(getAliasesRequest).actionGet(); + GetAliasesResponse getAliasesResponse = client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet(); long t1 = (System.nanoTime() - t0) / 1000000; logger.info("{} time(ms) = {}", getAliasesResponse.getAliases(), t1); assertTrue(t1 >= 0); } @Test - public void testMostRecentIndex() { - Client client = client("1"); + void testMostRecentIndex() { + ElasticsearchClient client = helper.client("1"); String alias = "test"; CreateIndexRequest indexRequest = new CreateIndexRequest("test20160101"); - client.admin().indices().create(indexRequest).actionGet(); + client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet(); indexRequest = new CreateIndexRequest("test20160102"); - client.admin().indices().create(indexRequest).actionGet(); + client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet(); indexRequest = new CreateIndexRequest("test20160103"); - client.admin().indices().create(indexRequest).actionGet(); + client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet(); IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest(); - String[] indices = new String[]{"test20160101", "test20160102", "test20160103"}; - String[] aliases = new String[]{alias}; + String[] indices = new String[] { "test20160101", "test20160102", "test20160103" }; + String[] aliases = new String[] { alias }; IndicesAliasesRequest.AliasActions aliasAction = new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, indices, aliases); indicesAliasesRequest.addAliasAction(aliasAction); - client.admin().indices().aliases(indicesAliasesRequest).actionGet(); - + client.execute(IndicesAliasesAction.INSTANCE, indicesAliasesRequest).actionGet(); GetAliasesRequest getAliasesRequest = new GetAliasesRequest(); getAliasesRequest.aliases(alias); GetAliasesResponse getAliasesResponse = client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet(); @@ -89,5 +94,4 @@ public class AliasTest extends TestBase { assertEquals("test20160101", it.next()); logger.info("success: result={}", result); } - } diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/ClusterBlockTest.java b/elx-common/src/test/java/org/xbib/elx/common/test/ClusterBlockTest.java deleted file mode 100644 index dae869f..0000000 --- a/elx-common/src/test/java/org/xbib/elx/common/test/ClusterBlockTest.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.xbib.elx.common.test; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.block.ClusterBlockException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.junit.Before; -import org.junit.Test; - -public class ClusterBlockTest extends TestBase { - - private static final Logger logger = LogManager.getLogger("test"); - - @Before - public void startNodes() { - try { - setClusterName("test-cluster" + System.getProperty("user.name")); - startNode("1"); - // do not wait for green health state - logger.info("ready"); - } catch (Throwable t) { - logger.error("startNodes failed", t); - } - } - - @Override - protected Settings getNodeSettings() { - return Settings.settingsBuilder() - .put(super.getNodeSettings()) - .put("discovery.zen.minimum_master_nodes", 2) // block until we have two nodes - .build(); - } - - @Test(expected = ClusterBlockException.class) - public void testClusterBlock() throws Exception { - Client client = client("1"); - XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("field1", "value1").endObject(); - IndexRequestBuilder irb = client.prepareIndex("test", "test", "1").setSource(builder); - BulkRequestBuilder brb = client.prepareBulk(); - brb.add(irb); - brb.execute().actionGet(); - } -} diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/MockExtendedClientProviderTest.java b/elx-common/src/test/java/org/xbib/elx/common/test/MockExtendedClientProviderTest.java index cbe7972..e21817c 100644 --- a/elx-common/src/test/java/org/xbib/elx/common/test/MockExtendedClientProviderTest.java +++ b/elx-common/src/test/java/org/xbib/elx/common/test/MockExtendedClientProviderTest.java @@ -1,18 +1,18 @@ package org.xbib.elx.common.test; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.common.MockExtendedClient; import org.xbib.elx.common.MockExtendedClientProvider; import java.io.IOException; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; -public class MockExtendedClientProviderTest { +class MockExtendedClientProviderTest { @Test - public void testMockExtendedProvider() throws IOException { + void testMockExtendedProvider() throws IOException { MockExtendedClient client = ClientBuilder.builder().provider(MockExtendedClientProvider.class).build(); assertNotNull(client); } diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/NetworkTest.java b/elx-common/src/test/java/org/xbib/elx/common/test/NetworkTest.java index 7933343..ef44d74 100644 --- a/elx-common/src/test/java/org/xbib/elx/common/test/NetworkTest.java +++ b/elx-common/src/test/java/org/xbib/elx/common/test/NetworkTest.java @@ -2,20 +2,22 @@ package org.xbib.elx.common.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; import java.net.InetAddress; import java.net.NetworkInterface; import java.util.Collections; import java.util.Enumeration; -public class NetworkTest { +// walk over all found interfaces (this is slow - multicast/pings are performed) +@Disabled +class NetworkTest { private static final Logger logger = LogManager.getLogger(NetworkTest.class); @Test - public void testNetwork() throws Exception { - // walk over all found interfaces (this is slow - multicast/pings are performed) + void testNetwork() throws Exception { Enumeration nets = NetworkInterface.getNetworkInterfaces(); for (NetworkInterface netint : Collections.list(nets)) { System.out.println("checking network interface = " + netint.getName()); diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/SearchTest.java b/elx-common/src/test/java/org/xbib/elx/common/test/SearchTest.java index 6e23f0b..2b82483 100644 --- a/elx-common/src/test/java/org/xbib/elx/common/test/SearchTest.java +++ b/elx-common/src/test/java/org/xbib/elx/common/test/SearchTest.java @@ -1,27 +1,38 @@ package org.xbib.elx.common.test; +import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.SortOrder; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class SearchTest extends TestBase { +@ExtendWith(TestExtension.class) +class SearchTest { + + private final TestExtension.Helper helper; + + SearchTest(TestExtension.Helper helper) { + this.helper = helper; + } @Test - public void testSearch() throws Exception { - Client client = client("1"); + void testSearch() throws Exception { + ElasticsearchClient client = helper.client("1"); BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE); - for (int i = 0; i < 1000; i++) { + for (int i = 0; i < 1; i++) { IndexRequest indexRequest = new IndexRequest("pages", "row") .source(XContentFactory.jsonBuilder() .startObject() @@ -39,18 +50,20 @@ public class SearchTest extends TestBase { .endObject()); builder.add(indexRequest); } - client.bulk(builder.request()).actionGet(); - client.admin().indices().refresh(new RefreshRequest()).actionGet(); - - for (int i = 0; i < 100; i++) { + client.execute(BulkAction.INSTANCE, builder.request()).actionGet(); + client.execute(RefreshAction.INSTANCE, new RefreshRequest()).actionGet(); + for (int i = 0; i < 1; i++) { QueryBuilder queryStringBuilder = QueryBuilders.queryStringQuery("rs:" + 1234); - SearchRequestBuilder requestBuilder = client.prepareSearch() - .setIndices("pages") - .setTypes("row") - .setQuery(queryStringBuilder) - .addSort("rowcount", SortOrder.DESC) - .setFrom(i * 10).setSize(10); - SearchResponse searchResponse = requestBuilder.execute().actionGet(); + SearchSourceBuilder searchSource = new SearchSourceBuilder(); + searchSource.query(queryStringBuilder); + searchSource.sort("rowcount", SortOrder.DESC); + searchSource.from(i * 10); + searchSource.size(10); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices("pages"); + searchRequest.types("row"); + searchRequest.source(searchSource); + SearchResponse searchResponse = client.execute(SearchAction.INSTANCE, searchRequest).actionGet(); assertTrue(searchResponse.getHits().getTotalHits() > 0); } } diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/SimpleTest.java b/elx-common/src/test/java/org/xbib/elx/common/test/SimpleTest.java index 5948daa..1ee6bfb 100644 --- a/elx-common/src/test/java/org/xbib/elx/common/test/SimpleTest.java +++ b/elx-common/src/test/java/org/xbib/elx/common/test/SimpleTest.java @@ -1,6 +1,6 @@ package org.xbib.elx.common.test; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; @@ -17,43 +17,51 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; -public class SimpleTest extends TestBase { +@ExtendWith(TestExtension.class) +class SimpleTest { + + private final TestExtension.Helper helper; + + SimpleTest(TestExtension.Helper helper) { + this.helper = helper; + } @Test - public void test() throws Exception { + void test() throws Exception { try { DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest().indices("test"); - client("1").execute(DeleteIndexAction.INSTANCE, deleteIndexRequest).actionGet(); + helper.client("1").execute(DeleteIndexAction.INSTANCE, deleteIndexRequest).actionGet(); } catch (IndexNotFoundException e) { // ignore if index not found } Settings indexSettings = Settings.settingsBuilder() - .put(super.getNodeSettings()) + .put(helper.getNodeSettings()) .put("index.analysis.analyzer.default.filter.0", "lowercase") .put("index.analysis.analyzer.default.filter.1", "trim") .put("index.analysis.analyzer.default.tokenizer", "keyword") .build(); CreateIndexRequest createIndexRequest = new CreateIndexRequest(); createIndexRequest.index("test").settings(indexSettings); - client("1").execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet(); + helper.client("1").execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet(); IndexRequest indexRequest = new IndexRequest(); indexRequest.index("test").type("test").id("1") .source(XContentFactory.jsonBuilder().startObject().field("field", "1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8").endObject()); - client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet(); + helper.client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet(); RefreshRequest refreshRequest = new RefreshRequest(); refreshRequest.indices("test"); - client("1").execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); + helper.client("1").execute(RefreshAction.INSTANCE, refreshRequest).actionGet(); SearchSourceBuilder builder = new SearchSourceBuilder(); builder.query(QueryBuilders.matchQuery("field", "1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8")); SearchRequest searchRequest = new SearchRequest(); searchRequest.indices("test").types("test"); searchRequest.source(builder); - String doc = client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet() + String doc = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet() .getHits().getAt(0).getSourceAsString(); assertEquals(doc, "{\"field\":\"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8\"}"); diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/TestBase.java b/elx-common/src/test/java/org/xbib/elx/common/test/TestBase.java deleted file mode 100644 index 12dc194..0000000 --- a/elx-common/src/test/java/org/xbib/elx/common/test/TestBase.java +++ /dev/null @@ -1,212 +0,0 @@ -package org.xbib.elx.common.test; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchTimeoutException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.client.support.AbstractClient; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.node.Node; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.nio.file.FileVisitResult; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.HashMap; -import java.util.Map; -import java.util.Random; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; - -public class TestBase { - - private static final Logger logger = LogManager.getLogger("test"); - - private static final Random random = new Random(); - - private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray(); - - private Map nodes = new HashMap<>(); - - private Map clients = new HashMap<>(); - - private String cluster; - - private String host; - - private int port; - - @Before - public void startNodes() { - try { - logger.info("starting"); - setClusterName("test-cluster-" + System.getProperty("user.name")); - startNode("1"); - findNodeAddress(); - try { - ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE, - new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN) - .timeout(TimeValue.timeValueSeconds(30))).actionGet(); - if (healthResponse != null && healthResponse.isTimedOut()) { - throw new IOException("cluster state is " + healthResponse.getStatus().name() - + ", from here on, everything will fail!"); - } - } catch (ElasticsearchTimeoutException e) { - throw new IOException("cluster does not respond to health request, cowardly refusing to continue"); - } - ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all(); - ClusterStateResponse clusterStateResponse = - client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet(); - logger.info("cluster name = {}", clusterStateResponse.getClusterName().value()); - logger.info("host = {} port = {}", host, port); - - } catch (Throwable t) { - logger.error("startNodes failed", t); - } - } - - @After - public void stopNodes() { - try { - closeNodes(); - } catch (Exception e) { - logger.error("can not close nodes", e); - } finally { - try { - deleteFiles(); - logger.info("data files wiped"); - Thread.sleep(2000L); // let OS commit changes - } catch (IOException e) { - logger.error(e.getMessage(), e); - } catch (InterruptedException e) { - // ignore - } - } - } - - protected void setClusterName(String cluster) { - this.cluster = cluster; - } - - protected String getClusterName() { - return cluster; - } - - protected Settings getTransportSettings() { - return settingsBuilder() - .put("host", host) - .put("port", port) - .put("cluster.name", cluster) - .put("path.home", getHome()) - .build(); - } - - protected Settings getNodeSettings() { - return settingsBuilder() - .put("cluster.name", cluster) - //.put("cluster.routing.schedule", "50ms") - //.put("cluster.routing.allocation.disk.threshold_enabled", false) - //.put("discovery.zen.multicast.enabled", true) - //.put("discovery.zen.multicast.ping_timeout", "5s") - //.put("http.enabled", true) - //.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors()) - //.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low - //.put("index.number_of_replicas", 0) - .put("path.home", getHome()) - .build(); - } - - protected static String getHome() { - return System.getProperty("path.home", System.getProperty("user.dir")); - } - - protected void startNode(String id) { - buildNode(id).start(); - } - - protected AbstractClient client(String id) { - return clients.get(id); - } - - protected void findNodeAddress() { - NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true); - NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); - Object obj = response.iterator().next().getTransport().getAddress() - .publishAddress(); - if (obj instanceof InetSocketTransportAddress) { - InetSocketTransportAddress address = (InetSocketTransportAddress) obj; - host = address.address().getHostName(); - port = address.address().getPort(); - } - } - - private Node buildNode(String id) { - Settings nodeSettings = settingsBuilder() - .put(getNodeSettings()) - .put("name", id) - .build(); - Node node = new MockNode(nodeSettings); - AbstractClient client = (AbstractClient) node.client(); - nodes.put(id, node); - clients.put(id, client); - logger.info("clients={}", clients); - return node; - } - - protected String randomString(int len) { - final char[] buf = new char[len]; - final int n = numbersAndLetters.length - 1; - for (int i = 0; i < buf.length; i++) { - buf[i] = numbersAndLetters[random.nextInt(n)]; - } - return new String(buf); - } - - private void closeNodes() { - logger.info("closing all clients"); - for (AbstractClient client : clients.values()) { - client.close(); - } - clients.clear(); - logger.info("closing all nodes"); - for (Node node : nodes.values()) { - if (node != null) { - node.close(); - } - } - nodes.clear(); - logger.info("all nodes closed"); - } - - private static void deleteFiles() throws IOException { - Path directory = Paths.get(getHome() + "/data"); - Files.walkFileTree(directory, new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - Files.delete(file); - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - Files.delete(dir); - return FileVisitResult.CONTINUE; - } - }); - } -} diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/TestExtension.java b/elx-common/src/test/java/org/xbib/elx/common/test/TestExtension.java new file mode 100644 index 0000000..fdf65e6 --- /dev/null +++ b/elx-common/src/test/java/org/xbib/elx/common/test/TestExtension.java @@ -0,0 +1,216 @@ +package org.xbib.elx.common.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.support.AbstractClient; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.node.Node; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; + +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; + +public class TestExtension implements ParameterResolver, BeforeAllCallback, AfterAllCallback { + + private static final Logger logger = LogManager.getLogger("test"); + + private static final Random random = new Random(); + + private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray(); + + private Map nodes = new HashMap<>(); + + private Map clients = new HashMap<>(); + + private String home; + + private String cluster; + + private String host; + + private int port; + + private static final String key = "es-instance"; + + private static final ExtensionContext.Namespace ns = + ExtensionContext.Namespace.create(TestExtension.class); + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return parameterContext.getParameter().getType().equals(Helper.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return extensionContext.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create()); + } + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + Helper helper = context.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create(), Helper.class); + setHome(System.getProperty("path.home") + "/" + helper.randomString(8)); + setClusterName("test-cluster-" + System.getProperty("user.name")); + logger.info("starting cluster"); + deleteFiles(Paths.get(getHome() + "/data")); + logger.info("data files wiped"); + Thread.sleep(2000L); // let OS commit changes + helper.startNode("1"); + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true); + NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet(); + Object obj = response.iterator().next().getTransport().getAddress() + .publishAddress(); + if (obj instanceof InetSocketTransportAddress) { + InetSocketTransportAddress address = (InetSocketTransportAddress) obj; + host = address.address().getHostName(); + port = address.address().getPort(); + } + try { + ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE, + new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN) + .timeout(TimeValue.timeValueSeconds(30))).actionGet(); + if (healthResponse != null && healthResponse.isTimedOut()) { + throw new IOException("cluster state is " + healthResponse.getStatus().name() + + ", from here on, everything will fail!"); + } + } catch (ElasticsearchTimeoutException e) { + throw new IOException("cluster does not respond to health request, cowardly refusing to continue"); + } + ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all(); + ClusterStateResponse clusterStateResponse = + helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet(); + logger.info("cluster name = {}", clusterStateResponse.getClusterName().value()); + logger.info("host = {} port = {}", host, port); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + closeNodes(); + deleteFiles(Paths.get(getHome() + "/data")); + } + + private void setClusterName(String cluster) { + this.cluster = cluster; + } + + private String getClusterName() { + return cluster; + } + + private void setHome(String home) { + this.home = home; + } + + private String getHome() { + return home; + } + + private void closeNodes() { + logger.info("closing all clients"); + for (AbstractClient client : clients.values()) { + client.close(); + } + clients.clear(); + logger.info("closing all nodes"); + for (Node node : nodes.values()) { + if (node != null) { + node.close(); + } + } + nodes.clear(); + logger.info("all nodes closed"); + } + + private static void deleteFiles(Path directory) throws IOException { + if (Files.exists(directory)) { + Files.walkFileTree(directory, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + Files.delete(file); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + Files.delete(dir); + return FileVisitResult.CONTINUE; + } + }); + } + } + + private Helper create() { + return new Helper(); + } + + class Helper { + + Settings getNodeSettings() { + return settingsBuilder() + .put("cluster.name", getClusterName()) + .put("path.home", getHome()) + .build(); + } + + void startNode(String id) { + buildNode(id).start(); + } + + private Node buildNode(String id) { + Settings nodeSettings = settingsBuilder() + .put(getNodeSettings()) + .put("name", id) + .build(); + Node node = new MockNode(nodeSettings); + AbstractClient client = (AbstractClient) node.client(); + nodes.put(id, node); + clients.put(id, client); + logger.info("clients={}", clients); + return node; + } + + String randomString(int len) { + final char[] buf = new char[len]; + final int n = numbersAndLetters.length - 1; + for (int i = 0; i < buf.length; i++) { + buf[i] = numbersAndLetters[random.nextInt(n)]; + } + return new String(buf); + } + + ElasticsearchClient client(String id) { + return clients.get(id); + } + + } +} diff --git a/elx-common/src/test/java/org/xbib/elx/common/test/WildcardTest.java b/elx-common/src/test/java/org/xbib/elx/common/test/WildcardTest.java index 1bb681c..4f298aa 100644 --- a/elx-common/src/test/java/org/xbib/elx/common/test/WildcardTest.java +++ b/elx-common/src/test/java/org/xbib/elx/common/test/WildcardTest.java @@ -6,53 +6,51 @@ import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import java.io.IOException; -public class WildcardTest extends TestBase { +@ExtendWith(TestExtension.class) +class WildcardTest { - /*protected Settings getNodeSettings() { - return Settings.settingsBuilder() - .put(super.getNodeSettings()) - .put("cluster.routing.allocation.disk.threshold_enabled", false) - .put("discovery.zen.multicast.enabled", false) - .put("http.enabled", false) - .put("index.number_of_shards", 1) - .put("index.number_of_replicas", 0) - .build(); - }*/ + private final TestExtension.Helper helper; - @Test - public void testWildcard() throws Exception { - index(client("1"), "1", "010"); - index(client("1"), "2", "0*0"); - // exact - validateCount(client("1"), QueryBuilders.queryStringQuery("010").defaultField("field"), 1); - validateCount(client("1"), QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1); - // pattern - validateCount(client("1"), QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2? - validateCount(client("1"), QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2? - validateCount(client("1"), QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2? - validateCount(client("1"), QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0); - validateCount(client("1"), QueryBuilders.queryStringQuery("*10").defaultField("field"), 1); - validateCount(client("1"), QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1); - validateCount(client("1"), QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1? - validateCount(client("1"), QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1? + WildcardTest(TestExtension.Helper helper) { + this.helper = helper; } - private void index(Client client, String id, String fieldValue) throws IOException { + @Test + void testWildcard() throws Exception { + ElasticsearchClient client = helper.client("1"); + index(client, "1", "010"); + index(client, "2", "0*0"); + // exact + validateCount(client, QueryBuilders.queryStringQuery("010").defaultField("field"), 1); + validateCount(client, QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1); + // pattern + validateCount(client, QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2? + validateCount(client, QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2? + validateCount(client, QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2? + validateCount(client, QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0); + validateCount(client, QueryBuilders.queryStringQuery("*10").defaultField("field"), 1); + validateCount(client, QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1); + validateCount(client, QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1? + validateCount(client, QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1? + } + + private void index(ElasticsearchClient client, String id, String fieldValue) throws IOException { client.execute(IndexAction.INSTANCE, new IndexRequest("index", "type", id) .source(XContentFactory.jsonBuilder().startObject().field("field", fieldValue).endObject())).actionGet(); client.execute(RefreshAction.INSTANCE, new RefreshRequest()).actionGet(); } - private long count(Client client, QueryBuilder queryBuilder) { + private long count(ElasticsearchClient client, QueryBuilder queryBuilder) { SearchSourceBuilder builder = new SearchSourceBuilder(); builder.query(queryBuilder); SearchRequest searchRequest = new SearchRequest(); @@ -62,7 +60,7 @@ public class WildcardTest extends TestBase { return client.execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits(); } - private void validateCount(Client client, QueryBuilder queryBuilder, long expectedHits) { + private void validateCount(ElasticsearchClient client, QueryBuilder queryBuilder, long expectedHits) { final long actualHits = count(client, queryBuilder); if (actualHits != expectedHits) { throw new RuntimeException("actualHits=" + actualHits + ", expectedHits=" + expectedHits); diff --git a/elx-http/build.gradle b/elx-http/build.gradle new file mode 100644 index 0000000..021d75d --- /dev/null +++ b/elx-http/build.gradle @@ -0,0 +1,4 @@ +dependencies { + compile project(':elx-common') + compile "org.xbib:netty-http-client:${project.property('xbib-netty-http.version')}" +} \ No newline at end of file diff --git a/elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClient.java b/elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClient.java new file mode 100644 index 0000000..1f51857 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClient.java @@ -0,0 +1,127 @@ +package org.xbib.elx.http; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.xbib.elx.common.AbstractExtendedClient; +import org.xbib.netty.http.client.Client; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.ServiceLoader; + +/** + * Elasticsearch HTTP client. + */ +public class ExtendedHttpClient extends AbstractExtendedClient implements ElasticsearchClient { + + private static final Logger logger = LogManager.getLogger(ExtendedHttpClient.class); + + private Client nettyHttpClient; + + private final ClassLoader classLoader; + + @SuppressWarnings("rawtypes") + private final Map actionMap; + + private String url; + + public ExtendedHttpClient() { + this.classLoader = ExtendedHttpClient.class.getClassLoader(); + this.actionMap = new HashMap<>(); + } + + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public ExtendedHttpClient init(Settings settings) throws IOException { + super.init(settings); + if (settings == null) { + return null; + } + this.url = settings.get("url"); + ServiceLoader httpActionServiceLoader = ServiceLoader.load(HttpAction.class, classLoader); + for (HttpAction httpAction : httpActionServiceLoader) { + httpAction.setSettings(settings); + actionMap.put(httpAction.getActionInstance(), httpAction); + } + this.nettyHttpClient = Client.builder().enableDebug().build(); + logger.info("extended HTTP client initialized with {} actions", actionMap.size()); + return this; + } + + public Client internalClient() { + return nettyHttpClient; + } + + @Override + public ElasticsearchClient getClient() { + return this; + } + + @Override + protected ElasticsearchClient createClient(Settings settings) { + return this; + } + + @Override + protected void closeClient() throws IOException { + nettyHttpClient.shutdownGracefully(); + } + + @Override + public > ActionFuture + execute(Action action, Request request) { + PlainActionFuture actionFuture = PlainActionFuture.newFuture(); + execute(action, request, actionFuture); + return actionFuture; + } + + @Override + public > void + execute(Action action, Request request, ActionListener listener) { + doExecute(action, request, listener); + } + + @Override + public > RequestBuilder + prepareExecute(Action action) { + return action.newRequestBuilder(this); + } + + @Override + public ThreadPool threadPool() { + throw new UnsupportedOperationException(); + } + + @SuppressWarnings({"unchecked", "rawtypes"}) + private > + void doExecute(Action action, R request, ActionListener listener) { + HttpAction httpAction = actionMap.get(action); + if (httpAction == null) { + throw new IllegalStateException("failed to find http action [" + action + "] to execute"); + } + try { + HttpActionContext httpActionContext = new HttpActionContext(this, request, url); + if (logger.isDebugEnabled()) { + logger.debug("submitting request {} to URL {}", request, url); + } + httpAction.execute(httpActionContext, listener); + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClientProvider.java b/elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClientProvider.java new file mode 100644 index 0000000..e91c923 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/ExtendedHttpClientProvider.java @@ -0,0 +1,10 @@ +package org.xbib.elx.http; + +import org.xbib.elx.api.ExtendedClientProvider; + +public class ExtendedHttpClientProvider implements ExtendedClientProvider { + @Override + public ExtendedHttpClient getExtendedClient() { + return new ExtendedHttpClient(); + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/HttpAction.java b/elx-http/src/main/java/org/xbib/elx/http/HttpAction.java new file mode 100644 index 0000000..1b6399b --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/HttpAction.java @@ -0,0 +1,169 @@ +package org.xbib.elx.http; + +import io.netty.buffer.ByteBuf; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpMethod; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.xbib.elx.http.util.CheckedFunction; +import org.xbib.netty.http.client.Request; +import org.xbib.netty.http.client.RequestBuilder; +import org.xbib.netty.http.client.transport.Transport; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +/** + * Base class for HTTP actions. + * + * @param the request type + * @param the response type + */ +public abstract class HttpAction { + + private final Logger logger = LogManager.getLogger(getClass().getName()); + + private static final String APPLICATION_JSON = "application/json"; + + private Settings settings; + + void setSettings(Settings settings) { + this.settings = settings; + } + + public abstract GenericAction getActionInstance(); + + public final ActionFuture execute(HttpActionContext httpActionContext) throws IOException { + PlainActionFuture future = PlainActionFuture.newFuture(); + execute(httpActionContext, future); + return future; + } + + public final void execute(HttpActionContext httpActionContext, ActionListener listener) throws IOException { + try { + ActionRequestValidationException validationException = httpActionContext.getRequest().validate(); + if (validationException != null) { + listener.onFailure(validationException); + return; + } + RequestBuilder httpRequestBuilder = + createHttpRequest(httpActionContext.getUrl(), httpActionContext.getRequest()); + httpRequestBuilder.setUserAgent("elx-http/1.0"); + Request httpRequest = httpRequestBuilder.build(); + if (logger.isTraceEnabled()) { + logger.trace("action = {} request = {}", this.getClass().getName(), httpRequest.toString()); + } + httpRequest.setResponseListener(fullHttpResponse -> { + try { + if (logger.isTraceEnabled()) { + logger.trace("got HTTP response: status code = " + fullHttpResponse.status().code() + + " headers = " + fullHttpResponse.headers().entries() + + " content = " + fullHttpResponse.content().toString(StandardCharsets.UTF_8)); + } + listener.onResponse(parseToResponse(httpActionContext.setHttpResponse(fullHttpResponse))); + } catch (Exception e) { + listener.onFailure(e); + } + }); + Transport transport = httpActionContext.getExtendedHttpClient().internalClient().execute(httpRequest); + httpActionContext.setHttpClientTransport(transport); + if (transport.isFailed()) { + listener.onFailure(transport.getFailure()); + } + } catch (Throwable e) { + if (listener != null) { + listener.onFailure(e); + } + throw new IOException(e); + } + } + + protected RequestBuilder newGetRequest(String url, String path) { + return Request.builder(HttpMethod.GET).url(url).uri(path); + } + + protected RequestBuilder newGetRequest(String url, String path, BytesReference content) { + return newRequest(HttpMethod.GET, url, path, content); + } + + protected RequestBuilder newHeadRequest(String url, String path) { + return newRequest(HttpMethod.HEAD, url, path); + } + + protected RequestBuilder newPostRequest(String url, String path) { + return newRequest(HttpMethod.POST, url, path); + } + + protected RequestBuilder newPostRequest(String url, String path, BytesReference content) { + return newRequest(HttpMethod.POST, url, path, content); + } + + protected RequestBuilder newPostRequest(String url, String path, String content) { + return newRequest(HttpMethod.POST, url, path, content); + } + + protected RequestBuilder newPutRequest(String url, String path) { + return newRequest(HttpMethod.PUT, url, path); + } + + protected RequestBuilder newPutRequest(String url, String path, String content) { + return newRequest(HttpMethod.PUT, url, path, content); + } + + protected RequestBuilder newPutRequest(String url, String path, BytesReference content) { + return newRequest(HttpMethod.PUT, url, path, content); + } + + protected RequestBuilder newDeleteRequest(String url, String path, BytesReference content) { + return newRequest(HttpMethod.DELETE, url, path, content); + } + + protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path) { + return Request.builder(method).url(baseUrl).uri(path); + } + + protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, BytesReference content) { + return Request.builder(method).url(baseUrl).uri(path).content(content.toBytesRef().bytes, APPLICATION_JSON); + } + + protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, String content) { + return Request.builder(method).url(baseUrl).uri(path).content(content, APPLICATION_JSON); + } + + protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, ByteBuf byteBuf) { + return Request.builder(method).url(baseUrl).uri(path).content(byteBuf, APPLICATION_JSON); + } + + protected T parseToResponse(HttpActionContext httpActionContext) throws IOException { + String mediaType = httpActionContext.getHttpResponse().headers().get(HttpHeaderNames.CONTENT_TYPE); + // strip off "; charset=UTF-8" + int pos = mediaType.indexOf(";"); + mediaType = pos >= 0 ? mediaType.substring(0, pos) : mediaType; + XContentType xContentType = XContentType.fromRestContentType(mediaType); + if (xContentType == null) { + throw new IllegalStateException("unsupported content-type: " + mediaType); + } + String body = httpActionContext.getHttpResponse().content().toString(StandardCharsets.UTF_8); + T t; + try (XContentParser parser = xContentType.xContent().createParser(body)) { + t = entityParser().apply(parser); + } + return t; + } + + protected abstract RequestBuilder createHttpRequest(String baseUrl, R request) throws IOException; + + protected abstract CheckedFunction entityParser(); + +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/HttpActionContext.java b/elx-http/src/main/java/org/xbib/elx/http/HttpActionContext.java new file mode 100644 index 0000000..0a0abeb --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/HttpActionContext.java @@ -0,0 +1,60 @@ +package org.xbib.elx.http; + +import io.netty.handler.codec.http.FullHttpResponse; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.xbib.netty.http.client.transport.Transport; + +/** + * HTTP action context. + * + * @param request type + * @param response type + */ +public class HttpActionContext { + + private final ExtendedHttpClient extendedHttpClient; + + private final R request; + + private final String url; + + private Transport httpClientTransport; + + private FullHttpResponse httpResponse; + + HttpActionContext(ExtendedHttpClient extendedHttpClient, R request, String url) { + this.extendedHttpClient = extendedHttpClient; + this.request = request; + this.url = url; + } + + public ExtendedHttpClient getExtendedHttpClient() { + return extendedHttpClient; + } + + public R getRequest() { + return request; + } + + public String getUrl() { + return url; + } + + public void setHttpClientTransport(Transport httpClientTransport) { + this.httpClientTransport = httpClientTransport; + } + + public Transport getHttpClientTransport() { + return httpClientTransport; + } + + public HttpActionContext setHttpResponse(FullHttpResponse fullHttpResponse) { + this.httpResponse = fullHttpResponse; + return this; + } + + public FullHttpResponse getHttpResponse() { + return httpResponse; + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/action/get/HttpGetAction.java b/elx-http/src/main/java/org/xbib/elx/http/action/get/HttpGetAction.java new file mode 100644 index 0000000..dbb9e53 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/action/get/HttpGetAction.java @@ -0,0 +1,179 @@ +package org.xbib.elx.http.action.get; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.get.GetField; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.xbib.elx.http.util.CheckedFunction; +import org.xbib.elx.http.HttpAction; +import org.xbib.netty.http.client.RequestBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.function.Supplier; + +public class HttpGetAction extends HttpAction { + + @Override + public GenericAction getActionInstance() { + return GetAction.INSTANCE; + } + + @Override + protected RequestBuilder createHttpRequest(String url, GetRequest request) { + return newGetRequest(url, request.index() + "/" + request.type() + "/" + request.id()); + } + + @Override + protected CheckedFunction entityParser() { + return this::fromXContent; + } + + public GetResponse fromXContent(XContentParser parser) throws IOException { + GetResult getResult = Helper.fromXContent(parser); + if (getResult.getIndex() == null && getResult.getType() == null && getResult.getId() == null) { + throw new ElasticsearchException(parser.getTokenLocation() + ":" + + String.format(Locale.ROOT, "Missing required fields [%s,%s,%s]", "_index", "_type", "_id")); + } + return new GetResponse(getResult); + } + + static class Helper { + + private static final Logger logger = LogManager.getLogger("helper"); + + static final String _INDEX = "_index"; + static final String _TYPE = "_type"; + static final String _ID = "_id"; + private static final String _VERSION = "_version"; + private static final String FOUND = "found"; + private static final String FIELDS = "fields"; + + static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) { + if (actual != expected) { + String message = "Failed to parse object: expecting token of type [%s] but found [%s]"; + throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual)); + } + } + + static GetResult fromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + return fromXContentEmbedded(parser); + } + + static GetResult fromXContentEmbedded(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + return fromXContentEmbedded(parser, null, null, null); + } + + static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String currentFieldName = parser.currentName(); + long version = -1; + Boolean found = null; + BytesReference source = null; + Map fields = new HashMap<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (_INDEX.equals(currentFieldName)) { + index = parser.text(); + } else if (_TYPE.equals(currentFieldName)) { + type = parser.text(); + } else if (_ID.equals(currentFieldName)) { + id = parser.text(); + } else if (_VERSION.equals(currentFieldName)) { + version = parser.longValue(); + } else if (FOUND.equals(currentFieldName)) { + found = parser.booleanValue(); + } else { + fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText()))); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (SourceFieldMapper.NAME.equals(currentFieldName)) { + try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { + builder.copyCurrentStructure(parser); + source = builder.bytes(); + } + } else if (FIELDS.equals(currentFieldName)) { + while(parser.nextToken() != XContentParser.Token.END_OBJECT) { + GetField getField = getFieldFromXContent(parser); + fields.put(getField.getName(), getField); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if ("_ignored".equals(currentFieldName)) { + fields.put(currentFieldName, new GetField(currentFieldName, parser.list())); + } else { + parser.skipChildren(); + } + } + } + return new GetResult(index, type, id, version, found, source, fields); + } + + static GetField getFieldFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + String fieldName = parser.currentName(); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation); + List values = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + values.add(parseFieldsValue(parser)); + } + return new GetField(fieldName, values); + } + + static Object parseFieldsValue(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + Object value = null; + if (token == XContentParser.Token.VALUE_STRING) { + //binary values will be parsed back and returned as base64 strings when reading from json and yaml + value = parser.text(); + } else if (token == XContentParser.Token.VALUE_NUMBER) { + value = parser.numberValue(); + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + value = parser.booleanValue(); + } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { + //binary values will be parsed back and returned as BytesArray when reading from cbor and smile + value = new BytesArray(parser.binaryValue()); + } else if (token == XContentParser.Token.VALUE_NULL) { + value = null; + } else if (token == XContentParser.Token.START_OBJECT) { + value = parser.mapOrdered(); + } else if (token == XContentParser.Token.START_ARRAY) { + value = parser.listOrderedMap(); + } else { + throwUnknownToken(token, parser.getTokenLocation()); + } + return value; + } + + static void throwUnknownToken(XContentParser.Token token, XContentLocation location) { + String message = "Failed to parse object: unexpected token [%s] found"; + throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token)); + } + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/action/get/HttpMultiGetAction.java b/elx-http/src/main/java/org/xbib/elx/http/action/get/HttpMultiGetAction.java new file mode 100644 index 0000000..3e1c6d1 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/action/get/HttpMultiGetAction.java @@ -0,0 +1,255 @@ +package org.xbib.elx.http.action.get; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetAction; +import org.elasticsearch.action.get.MultiGetItemResponse; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.get.GetField; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.xbib.elx.http.HttpAction; +import org.xbib.elx.http.action.search.HttpSearchAction; +import org.xbib.elx.http.util.CheckedFunction; +import org.xbib.netty.http.client.RequestBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.function.Supplier; + +public class HttpMultiGetAction extends HttpAction { + + @Override + public GenericAction getActionInstance() { + return MultiGetAction.INSTANCE; + } + + @Override + protected RequestBuilder createHttpRequest(String url, MultiGetRequest request) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startArray("docs"); + for (MultiGetRequest.Item item : request.getItems()) { + builder.startObject() + .field("_index", item.index()) + .field("_type", item.type()) + .field("_id", item.id()); + if (item.fields() != null) { + builder.array("fields", item.fields()); + } + builder.endObject(); + } + builder.endArray().endObject(); + return newPostRequest(url, "_mget", builder.bytes()); + } + + @Override + protected CheckedFunction entityParser() { + return Helper::fromXContent; + } + + static class Helper { + + private static final ParseField INDEX = new ParseField("_index"); + private static final ParseField TYPE = new ParseField("_type"); + private static final ParseField ID = new ParseField("_id"); + private static final ParseField ERROR = new ParseField("error"); + private static final ParseField DOCS = new ParseField("docs"); + + static final String _INDEX = "_index"; + static final String _TYPE = "_type"; + static final String _ID = "_id"; + private static final String _VERSION = "_version"; + private static final String FOUND = "found"; + private static final String FIELDS = "fields"; + + + static MultiGetResponse fromXContent(XContentParser parser) throws IOException { + String currentFieldName = null; + List items = new ArrayList<>(); + for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { + switch (token) { + case FIELD_NAME: + currentFieldName = parser.currentName(); + break; + case START_ARRAY: + if (DOCS.getPreferredName().equals(currentFieldName)) { + for (token = parser.nextToken(); token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) { + if (token == XContentParser.Token.START_OBJECT) { + items.add(parseItem(parser)); + } + } + } + break; + default: + break; + } + } + return new MultiGetResponse(items.toArray(new MultiGetItemResponse[0])); + } + + private static MultiGetItemResponse parseItem(XContentParser parser) throws IOException { + String currentFieldName = null; + String index = null; + String type = null; + String id = null; + ElasticsearchException exception = null; + GetResult getResult = null; + ParseFieldMatcher matcher = new ParseFieldMatcher(Settings.EMPTY); + for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { + switch (token) { + case FIELD_NAME: + currentFieldName = parser.currentName(); + getResult = fromXContentEmbedded(parser, index, type, id); + break; + case VALUE_STRING: + if (matcher.match(currentFieldName, INDEX)) { + index = parser.text(); + } else if (matcher.match(currentFieldName, TYPE)) { + type = parser.text(); + } else if (matcher.match(currentFieldName, ID)) { + id = parser.text(); + } + break; + case START_OBJECT: + if (matcher.match(currentFieldName, ERROR)) { + exception = HttpSearchAction.Helper.elasticsearchExceptionFromXContent(parser); + } + break; + default: + // If unknown tokens are encounter then these should be ignored, because + // this is parsing logic on the client side. + break; + } + if (getResult != null) { + break; + } + } + if (exception != null) { + return new MultiGetItemResponse(null, new MultiGetResponse.Failure(index, type, id, exception)); + } else { + GetResponse getResponse = new GetResponse(getResult); + return new MultiGetItemResponse(getResponse, null); + } + } + + static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) { + if (actual != expected) { + String message = "Failed to parse object: expecting token of type [%s] but found [%s]"; + throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual)); + } + } + + static GetResult fromXContentEmbedded(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + return fromXContentEmbedded(parser, null, null, null); + } + + static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String currentFieldName = parser.currentName(); + long version = -1; + Boolean found = null; + BytesReference source = null; + Map fields = new HashMap<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (_INDEX.equals(currentFieldName)) { + index = parser.text(); + } else if (_TYPE.equals(currentFieldName)) { + type = parser.text(); + } else if (_ID.equals(currentFieldName)) { + id = parser.text(); + } else if (_VERSION.equals(currentFieldName)) { + version = parser.longValue(); + } else if (FOUND.equals(currentFieldName)) { + found = parser.booleanValue(); + } else { + fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText()))); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (SourceFieldMapper.NAME.equals(currentFieldName)) { + try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) { + builder.copyCurrentStructure(parser); + source = builder.bytes(); + } + } else if (FIELDS.equals(currentFieldName)) { + while(parser.nextToken() != XContentParser.Token.END_OBJECT) { + GetField getField = getFieldFromXContent(parser); + fields.put(getField.getName(), getField); + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if ("_ignored".equals(currentFieldName)) { + fields.put(currentFieldName, new GetField(currentFieldName, parser.list())); + } else { + parser.skipChildren(); + } + } + } + return new GetResult(index, type, id, version, found, source, fields); + } + + static GetField getFieldFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + String fieldName = parser.currentName(); + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation); + List values = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + values.add(parseFieldsValue(parser)); + } + return new GetField(fieldName, values); + } + + static Object parseFieldsValue(XContentParser parser) throws IOException { + XContentParser.Token token = parser.currentToken(); + Object value = null; + if (token == XContentParser.Token.VALUE_STRING) { + //binary values will be parsed back and returned as base64 strings when reading from json and yaml + value = parser.text(); + } else if (token == XContentParser.Token.VALUE_NUMBER) { + value = parser.numberValue(); + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + value = parser.booleanValue(); + } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { + //binary values will be parsed back and returned as BytesArray when reading from cbor and smile + value = new BytesArray(parser.binaryValue()); + } else if (token == XContentParser.Token.VALUE_NULL) { + value = null; + } else if (token == XContentParser.Token.START_OBJECT) { + value = parser.mapOrdered(); + } else if (token == XContentParser.Token.START_ARRAY) { + value = parser.listOrderedMap(); + } else { + throwUnknownToken(token, parser.getTokenLocation()); + } + return value; + } + + static void throwUnknownToken(XContentParser.Token token, XContentLocation location) { + String message = "Failed to parse object: unexpected token [%s] found"; + throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token)); + } + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/action/search/HttpSearchAction.java b/elx-http/src/main/java/org/xbib/elx/http/action/search/HttpSearchAction.java new file mode 100644 index 0000000..58a5040 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/action/search/HttpSearchAction.java @@ -0,0 +1,597 @@ +package org.xbib.elx.http.action.search; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.internal.SourceFieldMapper; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHitField; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.internal.InternalSearchHit; +import org.elasticsearch.search.internal.InternalSearchHits; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; +import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; +import org.xbib.elx.http.util.CheckedFunction; +import org.xbib.elx.http.HttpAction; +import org.xbib.elx.http.util.ObjectParser; +import org.xbib.elx.http.util.XContentParserUtils; +import org.xbib.netty.http.client.RequestBuilder; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.xbib.elx.http.util.ObjectParser.ValueType.STRING; +import static org.xbib.elx.http.util.XContentParserUtils.ensureExpectedToken; + +public class HttpSearchAction extends HttpAction { + + @Override + public SearchAction getActionInstance() { + return SearchAction.INSTANCE; + } + + @Override + protected RequestBuilder createHttpRequest(String url, SearchRequest request) { + String index = request.indices() != null ? "/" + String.join(",", request.indices()) : ""; + return newPostRequest(url, index + "/_search", request.source()); + } + + @Override + protected CheckedFunction entityParser() { + return Helper::fromXContent; + } + + public static class Helper { + + private static final Logger logger = LogManager.getLogger("helper"); + + private static final ParseField SCROLL_ID = new ParseField("_scroll_id"); + private static final ParseField TOOK = new ParseField("took"); + private static final ParseField TIMED_OUT = new ParseField("timed_out"); + private static final ParseField TERMINATED_EARLY = new ParseField("terminated_early"); + + private static final ParseField _SHARDS_FIELD = new ParseField("_shards"); + private static final ParseField TOTAL_FIELD = new ParseField("total"); + private static final ParseField SUCCESSFUL_FIELD = new ParseField("successful"); + private static final ParseField SKIPPED_FIELD = new ParseField("skipped"); + private static final ParseField FAILED_FIELD = new ParseField("failed"); + private static final ParseField FAILURES_FIELD = new ParseField("failures"); + + private static final String HITS = "hits"; + + private static final String TOTAL = "total"; + private static final String MAX_SCORE = "max_score"; + + private static final String _NESTED = "_nested"; + + private static final String _INDEX = "_index"; + private static final String _TYPE = "_type"; + private static final String _ID = "_id"; + private static final String _VERSION = "_version"; + private static final String _SCORE = "_score"; + private static final String FIELDS = "fields"; + private static final String HIGHLIGHT = "highlight"; + private static final String SORT = "sort"; + private static final String MATCHED_QUERIES = "matched_queries"; + private static final String _EXPLANATION = "_explanation"; + private static final String INNER_HITS = "inner_hits"; + private static final String _SHARD = "_shard"; + private static final String _NODE = "_node"; + + private static final String AGGREGATIONS_FIELD = "aggregations"; + + private static final String TYPED_KEYS_DELIMITER = "#"; + + private static final String SUGGEST_NAME = "suggest"; + + private static final String REASON_FIELD = "reason"; + private static final String NODE_FIELD = "node"; + private static final String INDEX_FIELD = "index"; + private static final String SHARD_FIELD = "shard"; + + private static final String TYPE = "type"; + private static final String REASON = "reason"; + private static final String CAUSED_BY = "caused_by"; + private static final String STACK_TRACE = "stack_trace"; + private static final String HEADER = "header"; + private static final String ROOT_CAUSE = "root_cause"; + + private static ObjectParser, Void> MAP_PARSER = + new ObjectParser<>("innerHitParser", true, HashMap::new); + + + static { + declareInnerHitsParseFields(MAP_PARSER); + } + + public static SearchResponse fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + parser.nextToken(); + return innerFromXContent(parser); + } + + static SearchResponse innerFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + String currentFieldName = parser.currentName(); + InternalSearchHits hits = null; + InternalAggregations aggs = null; + Suggest suggest = null; + boolean timedOut = false; + Boolean terminatedEarly = null; + long tookInMillis = -1; + int successfulShards = -1; + int totalShards = -1; + String scrollId = null; + List failures = new ArrayList<>(); + ParseFieldMatcher matcher = new ParseFieldMatcher(Settings.EMPTY); + for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (matcher.match(currentFieldName, SCROLL_ID)) { + scrollId = parser.text(); + } else if (matcher.match(currentFieldName, TOOK)) { + tookInMillis = parser.longValue(); + } else if (matcher.match(currentFieldName, TIMED_OUT)) { + timedOut = parser.booleanValue(); + } else if (matcher.match(currentFieldName, TERMINATED_EARLY)) { + terminatedEarly = parser.booleanValue(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (HITS.equals(currentFieldName)) { + logger.debug("searchHitsFromXContent"); + hits = searchHitsFromXContent(parser); + } else if (AGGREGATIONS_FIELD.equals(currentFieldName)) { + aggs = aggregationsFromXContent(parser); + } else if (SUGGEST_NAME.equals(currentFieldName)) { + suggest = suggestFromXContent(parser); + } else if (matcher.match(currentFieldName, _SHARDS_FIELD)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (matcher.match(currentFieldName, FAILED_FIELD)) { + parser.intValue(); // we don't need it but need to consume it + } else if (matcher.match(currentFieldName, SUCCESSFUL_FIELD)) { + successfulShards = parser.intValue(); + } else if (matcher.match(currentFieldName, TOTAL_FIELD)) { + totalShards = parser.intValue(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (matcher.match(currentFieldName, FAILURES_FIELD)) { + while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + failures.add(shardSearchFailureFromXContent(parser)); + } + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + } else { + parser.skipChildren(); + } + } + } + // TODO profileResults + InternalSearchResponse internalResponse = new InternalSearchResponse(hits, aggs, suggest, + null, timedOut, terminatedEarly); + return new SearchResponse(internalResponse, scrollId, totalShards, successfulShards, tookInMillis, + failures.toArray(ShardSearchFailure.EMPTY_ARRAY)); + } + + static InternalSearchHits searchHitsFromXContent(XContentParser parser) throws IOException { + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + parser.nextToken(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + } + XContentParser.Token token = parser.currentToken(); + String currentFieldName = null; + List hits = new ArrayList<>(); + long totalHits = -1L; + float maxScore = 0f; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (TOTAL.equals(currentFieldName)) { + totalHits = parser.longValue(); + } else if (MAX_SCORE.equals(currentFieldName)) { + maxScore = parser.floatValue(); + } + } else if (token == XContentParser.Token.VALUE_NULL) { + if (MAX_SCORE.equals(currentFieldName)) { + maxScore = Float.NaN; // NaN gets rendered as null-field + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (HITS.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + logger.debug("searchHitFromXContent"); + hits.add(searchHitFromXContent(parser)); + } + } else { + parser.skipChildren(); + } + } + } + InternalSearchHit[] internalSearchHits = hits.toArray(new InternalSearchHit[0]); + return new InternalSearchHits(internalSearchHits, totalHits, maxScore); + } + + static InternalSearchHit searchHitFromXContent(XContentParser parser) { + return createFromMap(MAP_PARSER.apply(parser, null)); + } + + static InternalSearchHit createFromMap(Map values) { + logger.debug("values = {}", values); + String id = get(_ID, values, null); + Text type = get(_TYPE, values, null); + InternalSearchHit.InternalNestedIdentity nestedIdentity = get(_NESTED, values, null); + Map fields = get(FIELDS, values, Collections.emptyMap()); + InternalSearchHit searchHit = new InternalSearchHit(-1, id, type, nestedIdentity, fields); + String index = get(_INDEX, values, null); + ShardId shardId = get(_SHARD, values, null); + String nodeId = get(_NODE, values, null); + if (shardId != null && nodeId != null) { + assert shardId.index().getName().equals(index); + searchHit.shard(new SearchShardTarget(nodeId, index, shardId.id())); + } + searchHit.score(get(_SCORE, values, Float.NaN)); + searchHit.version(get(_VERSION, values, -1L)); + searchHit.sortValues(get(SORT, values, new Object[0])); + searchHit.highlightFields(get(HIGHLIGHT, values, null)); + searchHit.sourceRef(get(SourceFieldMapper.NAME, values, null)); + searchHit.explanation(get(_EXPLANATION, values, null)); + searchHit.setInnerHits(get(INNER_HITS, values, null)); + List matchedQueries = get(MATCHED_QUERIES, values, null); + if (matchedQueries != null) { + searchHit.matchedQueries(matchedQueries.toArray(new String[0])); + } + return searchHit; + } + + @SuppressWarnings("unchecked") + private static T get(String key, Map map, T defaultValue) { + return (T) map.getOrDefault(key, defaultValue); + } + + static InternalAggregations aggregationsFromXContent(XContentParser parser) throws IOException { + final List aggregations = new ArrayList<>(); + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_OBJECT) { + SetOnce typedAgg = new SetOnce<>(); + String currentField = parser.currentName(); + XContentParserUtils.parseTypedKeysObject(parser, TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set); + if (typedAgg.get() != null) { + aggregations.add(typedAgg.get()); + } else { + throw new ElasticsearchException(parser.getTokenLocation() + ":" + + String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField)); + } + } + } + return new InternalAggregations(aggregations); + } + + static Suggest suggestFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + List>> suggestions = new ArrayList<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + String currentField = parser.currentName(); + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation); + Suggest.Suggestion> suggestion = suggestionFromXContent(parser); + if (suggestion != null) { + suggestions.add(suggestion); + } else { + throw new ElasticsearchException(parser.getTokenLocation() + ":" + + String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField)); + } + } + return new Suggest(suggestions); + } + + static Suggest.Suggestion> suggestionFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); + SetOnce suggestion = new SetOnce<>(); + XContentParserUtils.parseTypedKeysObject(parser, "#", Suggest.Suggestion.class, suggestion::set); + return suggestion.get(); + } + + static ShardSearchFailure shardSearchFailureFromXContent(XContentParser parser) throws IOException { + XContentParser.Token token; + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + String currentFieldName = null; + int shardId = -1; + String indexName = null; + String nodeId = null; + ElasticsearchException exception = null; + while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (SHARD_FIELD.equals(currentFieldName)) { + shardId = parser.intValue(); + } else if (INDEX_FIELD.equals(currentFieldName)) { + indexName = parser.text(); + } else if (NODE_FIELD.equals(currentFieldName)) { + nodeId = parser.text(); + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (REASON_FIELD.equals(currentFieldName)) { + exception = elasticsearchExceptionFromXContent(parser); + } else { + parser.skipChildren(); + } + } else { + parser.skipChildren(); + } + } + SearchShardTarget searchShardTarget = null; + if (nodeId != null) { + searchShardTarget = new SearchShardTarget(nodeId, indexName, shardId); + } + return new ShardSearchFailure(exception, searchShardTarget); + } + + public static ElasticsearchException elasticsearchExceptionFromXContent(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + return elasticsearchExceptionFromXContent(parser, false); + } + + static ElasticsearchException elasticsearchExceptionFromXContent(XContentParser parser, boolean parseRootCauses) + throws IOException { + XContentParser.Token token = parser.currentToken(); + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + + String type = null, reason = null, stack = null; + ElasticsearchException cause = null; + Map> metadata = new HashMap<>(); + Map> headers = new HashMap<>(); + List rootCauses = new ArrayList<>(); + + for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + + if (token.isValue()) { + if (TYPE.equals(currentFieldName)) { + type = parser.text(); + } else if (REASON.equals(currentFieldName)) { + reason = parser.text(); + } else if (STACK_TRACE.equals(currentFieldName)) { + stack = parser.text(); + } else if (token == XContentParser.Token.VALUE_STRING) { + metadata.put(currentFieldName, Collections.singletonList(parser.text())); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (CAUSED_BY.equals(currentFieldName)) { + cause = elasticsearchExceptionFromXContent(parser); + } else if (HEADER.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else { + List values = headers.getOrDefault(currentFieldName, new ArrayList<>()); + if (token == XContentParser.Token.VALUE_STRING) { + values.add(parser.text()); + } else if (token == XContentParser.Token.START_ARRAY) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + values.add(parser.text()); + } else { + parser.skipChildren(); + } + } + } else if (token == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + } + headers.put(currentFieldName, values); + } + } + } else { + parser.skipChildren(); + } + } else if (token == XContentParser.Token.START_ARRAY) { + if (parseRootCauses && ROOT_CAUSE.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + rootCauses.add(elasticsearchExceptionFromXContent(parser)); + } + } else { + List values = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token == XContentParser.Token.VALUE_STRING) { + values.add(parser.text()); + } else { + parser.skipChildren(); + } + } + if (values.size() > 0) { + if (metadata.containsKey(currentFieldName)) { + values.addAll(metadata.get(currentFieldName)); + } + metadata.put(currentFieldName, values); + } + } + } + } + ElasticsearchException e = new ElasticsearchException(buildMessage(type, reason, stack), cause); + for (Map.Entry> header : headers.entrySet()) { + e.addHeader(header.getKey(), header.getValue()); + } + for (ElasticsearchException rootCause : rootCauses) { + e.addSuppressed(rootCause); + } + return e; + } + + static String buildMessage(String type, String reason, String stack) { + StringBuilder message = new StringBuilder("Elasticsearch exception ["); + message.append(TYPE).append('=').append(type).append(", "); + message.append(REASON).append('=').append(reason); + if (stack != null) { + message.append(", ").append(STACK_TRACE).append('=').append(stack); + } + message.append(']'); + return message.toString(); + } + + private static void declareInnerHitsParseFields(ObjectParser, Void> parser) { + declareMetaDataFields(parser); + parser.declareString((map, value) -> map.put(_TYPE, new Text(value)), new ParseField(_TYPE)); + parser.declareString((map, value) -> map.put(_INDEX, value), new ParseField(_INDEX)); + parser.declareString((map, value) -> map.put(_ID, value), new ParseField(_ID)); + parser.declareString((map, value) -> map.put(_NODE, value), new ParseField(_NODE)); + parser.declareField((map, value) -> map.put(_SCORE, value), SearchHit::parseScore, new ParseField(_SCORE), + ObjectParser.ValueType.FLOAT_OR_NULL); + parser.declareLong((map, value) -> map.put(_VERSION, value), new ParseField(_VERSION)); + parser.declareField((map, value) -> map.put(_SHARD, value), (p, c) -> ShardId.fromString(p.text()), + new ParseField(_SHARD), STRING); + parser.declareObject((map, value) -> map.put(SourceFieldMapper.NAME, value), (p, c) -> parseSourceBytes(p), + new ParseField(SourceFieldMapper.NAME)); + parser.declareObject((map, value) -> map.put(HIGHLIGHT, value), (p, c) -> parseHighlightFields(p), + new ParseField(HIGHLIGHT)); + parser.declareObject((map, value) -> { + Map fieldMap = get(FIELDS, map, new HashMap()); + fieldMap.putAll(value); + map.put(FIELDS, fieldMap); + }, (p, c) -> parseFields(p), new ParseField(FIELDS)); + parser.declareObject((map, value) -> map.put(_EXPLANATION, value), (p, c) -> parseExplanation(p), + new ParseField(_EXPLANATION)); + parser.declareObject((map, value) -> map.put(_NESTED, value), SearchHit.NestedIdentity::fromXContent, + new ParseField(_NESTED)); + parser.declareObject((map, value) -> map.put(INNER_HITS, value), (p,c) -> parseInnerHits(p), + new ParseField(INNER_HITS)); + parser.declareStringArray((map, list) -> map.put(MATCHED_QUERIES, list), new ParseField(MATCHED_QUERIES)); + parser.declareField((map, list) -> map.put(SORT, list), SearchSortValues::fromXContent, new ParseField(SORT), + ObjectParser.ValueType.OBJECT_ARRAY); + } + + private static void declareMetaDataFields(ObjectParser, Void> parser) { + for (String metadatafield : MapperService.getAllMetaFields()) { + if (!metadatafield.equals(_ID) && !metadatafield.equals(_INDEX) && !metadatafield.equals(_TYPE)) { + parser.declareField((map, field) -> { + @SuppressWarnings("unchecked") + Map fieldMap = (Map) map.computeIfAbsent(FIELDS, + v -> new HashMap()); + fieldMap.put(field.getName(), field); + }, (p, c) -> { + List values = new ArrayList<>(); + values.add(parseFieldsValue(p)); + return new InternalSearchHit(metadatafield, values); + }, new ParseField(metadatafield), ObjectParser.ValueType.VALUE); + } + } + } + + private static Map parseFields(XContentParser parser) throws IOException { + Map fields = new HashMap<>(); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + SearchHitField field = SearchHitField.fromXContent(parser); + fields.put(field.getName(), field); + } + return fields; + } + + private static Map parseInnerHits(XContentParser parser) throws IOException { + Map innerHits = new HashMap<>(); + while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + String name = parser.currentName(); + ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS); + innerHits.put(name, SearchHits.fromXContent(parser)); + ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation); + } + return innerHits; + } + + private static Map parseHighlightFields(XContentParser parser) throws IOException { + Map highlightFields = new HashMap<>(); + while((parser.nextToken()) != XContentParser.Token.END_OBJECT) { + HighlightField highlightField = HighlightField.fromXContent(parser); + highlightFields.put(highlightField.getName(), highlightField); + } + return highlightFields; + } + + private static Explanation parseExplanation(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + XContentParser.Token token; + Float value = null; + String description = null; + List details = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String currentFieldName = parser.currentName(); + token = parser.nextToken(); + if (Fields.VALUE.equals(currentFieldName)) { + value = parser.floatValue(); + } else if (Fields.DESCRIPTION.equals(currentFieldName)) { + description = parser.textOrNull(); + } else if (Fields.DETAILS.equals(currentFieldName)) { + ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + details.add(parseExplanation(parser)); + } + } else { + parser.skipChildren(); + } + } + if (value == null) { + throw new ParsingException(parser.getTokenLocation(), "missing explanation value"); + } + if (description == null) { + throw new ParsingException(parser.getTokenLocation(), "missing explanation description"); + } + return Explanation.match(value, description, details); + } + + private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { + builder.startObject(); + builder.field(Fields.VALUE, explanation.getValue()); + builder.field(Fields.DESCRIPTION, explanation.getDescription()); + Explanation[] innerExps = explanation.getDetails(); + if (innerExps != null) { + builder.startArray(Fields.DETAILS); + for (Explanation exp : innerExps) { + buildExplanation(builder, exp); + } + builder.endArray(); + } + builder.endObject(); + } + + } +} + diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/AbstractObjectParser.java b/elx-http/src/main/java/org/xbib/elx/http/util/AbstractObjectParser.java new file mode 100644 index 0000000..1f854f1 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/AbstractObjectParser.java @@ -0,0 +1,217 @@ +package org.xbib.elx.http.util; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.Consumer; + +public abstract class AbstractObjectParser + implements BiFunction, ContextParser { + + /** + * Declare some field. Usually it is easier to use {@link #declareString(BiConsumer, ParseField)} or + * {@link #declareObject(BiConsumer, ContextParser, ParseField)} rather than call this directly. + */ + public abstract void declareField(BiConsumer consumer, ContextParser parser, ParseField parseField, + ObjectParser.ValueType type); + + /** + * Declares named objects in the style of aggregations. These are named + * inside and object like this: + * + *
+     * 
+     * {
+     *   "aggregations": {
+     *     "name_1": { "aggregation_type": {} },
+     *     "name_2": { "aggregation_type": {} },
+     *     "name_3": { "aggregation_type": {} }
+     *     }
+     *   }
+     * }
+     * 
+     * 
+ * + * Unlike the other version of this method, "ordered" mode (arrays of + * objects) is not supported. + * + * See NamedObjectHolder in ObjectParserTests for examples of how to invoke + * this. + * + * @param consumer + * sets the values once they have been parsed + * @param namedObjectParser + * parses each named object + * @param parseField + * the field to parse + */ + public abstract void declareNamedObjects(BiConsumer> consumer, + ObjectParser.NamedObjectParser namedObjectParser, + ParseField parseField); + + /** + * Declares named objects in the style of highlighting's field element. + * These are usually named inside and object like this: + * + *
+     * 
+     * {
+     *   "highlight": {
+     *     "fields": {        <------ this one
+     *       "title": {},
+     *       "body": {},
+     *       "category": {}
+     *     }
+     *   }
+     * }
+     * 
+     * 
+ * + * but, when order is important, some may be written this way: + * + *
+     * 
+     * {
+     *   "highlight": {
+     *     "fields": [        <------ this one
+     *       {"title": {}},
+     *       {"body": {}},
+     *       {"category": {}}
+     *     ]
+     *   }
+     * }
+     * 
+     * 
+ * + * This is because json doesn't enforce ordering. Elasticsearch reads it in + * the order sent but tools that generate json are free to put object + * members in an unordered Map, jumbling them. Thus, if you care about order + * you can send the object in the second way. + * + * See NamedObjectHolder in ObjectParserTests for examples of how to invoke + * this. + * + * @param consumer + * sets the values once they have been parsed + * @param namedObjectParser + * parses each named object + * @param orderedModeCallback + * called when the named object is parsed using the "ordered" + * mode (the array of objects) + * @param parseField + * the field to parse + */ + public abstract void declareNamedObjects(BiConsumer> consumer, + ObjectParser.NamedObjectParser namedObjectParser, + Consumer orderedModeCallback, + ParseField parseField); + + public abstract String getName(); + + public void declareField(BiConsumer consumer, CheckedFunction parser, + ParseField parseField, ObjectParser.ValueType type) { + if (parser == null) { + throw new IllegalArgumentException("[parser] is required"); + } + declareField(consumer, (p, c) -> parser.apply(p), parseField, type); + } + + public void declareObject(BiConsumer consumer, ContextParser objectParser, ParseField field) { + declareField(consumer, (p, c) -> objectParser.parse(p, c), field, ObjectParser.ValueType.OBJECT); + } + + public void declareFloat(BiConsumer consumer, ParseField field) { + // Using a method reference here angers some compilers + declareField(consumer, p -> p.floatValue(), field, ObjectParser.ValueType.FLOAT); + } + + public void declareDouble(BiConsumer consumer, ParseField field) { + // Using a method reference here angers some compilers + declareField(consumer, p -> p.doubleValue(), field, ObjectParser.ValueType.DOUBLE); + } + + public void declareLong(BiConsumer consumer, ParseField field) { + // Using a method reference here angers some compilers + declareField(consumer, p -> p.longValue(), field, ObjectParser.ValueType.LONG); + } + + public void declareInt(BiConsumer consumer, ParseField field) { + // Using a method reference here angers some compilers + declareField(consumer, p -> p.intValue(), field, ObjectParser.ValueType.INT); + } + + public void declareString(BiConsumer consumer, ParseField field) { + declareField(consumer, XContentParser::text, field, ObjectParser.ValueType.STRING); + } + + public void declareStringOrNull(BiConsumer consumer, ParseField field) { + declareField(consumer, (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.text(), field, + ObjectParser.ValueType.STRING_OR_NULL); + } + + public void declareBoolean(BiConsumer consumer, ParseField field) { + declareField(consumer, XContentParser::booleanValue, field, ObjectParser.ValueType.BOOLEAN); + } + + public void declareObjectArray(BiConsumer> consumer, ContextParser objectParser, + ParseField field) { + declareFieldArray(consumer, objectParser, field, ObjectParser.ValueType.OBJECT_ARRAY); + } + + public void declareStringArray(BiConsumer> consumer, ParseField field) { + declareFieldArray(consumer, (p, c) -> p.text(), field, ObjectParser.ValueType.STRING_ARRAY); + } + + public void declareDoubleArray(BiConsumer> consumer, ParseField field) { + declareFieldArray(consumer, (p, c) -> p.doubleValue(), field, ObjectParser.ValueType.DOUBLE_ARRAY); + } + + public void declareFloatArray(BiConsumer> consumer, ParseField field) { + declareFieldArray(consumer, (p, c) -> p.floatValue(), field, ObjectParser.ValueType.FLOAT_ARRAY); + } + + public void declareLongArray(BiConsumer> consumer, ParseField field) { + declareFieldArray(consumer, (p, c) -> p.longValue(), field, ObjectParser.ValueType.LONG_ARRAY); + } + + public void declareIntArray(BiConsumer> consumer, ParseField field) { + declareFieldArray(consumer, (p, c) -> p.intValue(), field, ObjectParser.ValueType.INT_ARRAY); + } + + /** + * Declares a field that can contain an array of elements listed in the type ValueType enum + */ + public void declareFieldArray(BiConsumer> consumer, ContextParser itemParser, + ParseField field, ObjectParser.ValueType type) { + declareField(consumer, (p, c) -> parseArray(p, () -> itemParser.parse(p, c)), field, type); + } + + private interface IOSupplier { + T get() throws IOException; + } + + private static List parseArray(XContentParser parser, IOSupplier supplier) throws IOException { + List list = new ArrayList<>(); + if (parser.currentToken().isValue() + || parser.currentToken() == XContentParser.Token.VALUE_NULL + || parser.currentToken() == XContentParser.Token.START_OBJECT) { + list.add(supplier.get()); // single value + } else { + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + if (parser.currentToken().isValue() + || parser.currentToken() == XContentParser.Token.VALUE_NULL + || parser.currentToken() == XContentParser.Token.START_OBJECT) { + list.add(supplier.get()); + } else { + throw new IllegalStateException("expected value but got [" + parser.currentToken() + "]"); + } + } + } + return list; + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/CheckedBiConsumer.java b/elx-http/src/main/java/org/xbib/elx/http/util/CheckedBiConsumer.java new file mode 100644 index 0000000..7213e9d --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/CheckedBiConsumer.java @@ -0,0 +1,11 @@ +package org.xbib.elx.http.util; + +import java.util.function.BiConsumer; + +/** + * A {@link BiConsumer}-like interface which allows throwing checked exceptions. + */ +@FunctionalInterface +public interface CheckedBiConsumer { + void accept(T t, U u) throws E; +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/CheckedFunction.java b/elx-http/src/main/java/org/xbib/elx/http/util/CheckedFunction.java new file mode 100644 index 0000000..a2e4d8f --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/CheckedFunction.java @@ -0,0 +1,6 @@ +package org.xbib.elx.http.util; + +@FunctionalInterface +public interface CheckedFunction { + R apply(T t) throws E; +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/ContextParser.java b/elx-http/src/main/java/org/xbib/elx/http/util/ContextParser.java new file mode 100644 index 0000000..08534aa --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/ContextParser.java @@ -0,0 +1,13 @@ +package org.xbib.elx.http.util; + +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +/** + * Reads an object from a parser using some context. + */ +@FunctionalInterface +public interface ContextParser { + T parse(XContentParser p, Context c) throws IOException; +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/NamedObjectNotFoundException.java b/elx-http/src/main/java/org/xbib/elx/http/util/NamedObjectNotFoundException.java new file mode 100644 index 0000000..1415beb --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/NamedObjectNotFoundException.java @@ -0,0 +1,14 @@ +package org.xbib.elx.http.util; + +import org.elasticsearch.common.xcontent.XContentLocation; + +public class NamedObjectNotFoundException extends XContentParseException { + + public NamedObjectNotFoundException(String message) { + this(null, message); + } + + public NamedObjectNotFoundException(XContentLocation location, String message) { + super(location, message); + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/NamedXContentRegistry.java b/elx-http/src/main/java/org/xbib/elx/http/util/NamedXContentRegistry.java new file mode 100644 index 0000000..ad5362c --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/NamedXContentRegistry.java @@ -0,0 +1,101 @@ +package org.xbib.elx.http.util; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.unmodifiableMap; + +public class NamedXContentRegistry { + + public static class Entry { + /** The class that this entry can read. */ + public final Class categoryClass; + + /** A name for the entry which is unique within the {@link #categoryClass}. */ + public final ParseField name; + + /** A parser capability of parser the entry's class. */ + private final ContextParser parser; + + /** Creates a new entry which can be stored by the registry. */ + public Entry(Class categoryClass, ParseField name, CheckedFunction parser) { + this.categoryClass = Objects.requireNonNull(categoryClass); + this.name = Objects.requireNonNull(name); + this.parser = Objects.requireNonNull((p, c) -> parser.apply(p)); + } + /** + * Creates a new entry which can be stored by the registry. + * Prefer {@link Entry#Entry(Class, ParseField, CheckedFunction)} unless you need a context to carry around while parsing. + */ + public Entry(Class categoryClass, ParseField name, ContextParser parser) { + this.categoryClass = Objects.requireNonNull(categoryClass); + this.name = Objects.requireNonNull(name); + this.parser = Objects.requireNonNull(parser); + } + } + + private final Map, Map> registry; + + public NamedXContentRegistry(List entries) { + if (entries.isEmpty()) { + registry = emptyMap(); + return; + } + entries = new ArrayList<>(entries); + entries.sort(Comparator.comparing(e -> e.categoryClass.getName())); + + Map, Map> registry = new HashMap<>(); + Map parsers = null; + Class currentCategory = null; + for (Entry entry : entries) { + if (currentCategory != entry.categoryClass) { + if (currentCategory != null) { + // we've seen the last of this category, put it into the big map + registry.put(currentCategory, unmodifiableMap(parsers)); + } + parsers = new HashMap<>(); + currentCategory = entry.categoryClass; + } + + for (String name : entry.name.getAllNamesIncludedDeprecated()) { + Object old = parsers.put(name, entry); + if (old != null) { + throw new IllegalArgumentException("NamedXContent [" + currentCategory.getName() + "][" + entry.name + "]" + + " is already registered for [" + old.getClass().getName() + "]," + + " cannot register [" + entry.parser.getClass().getName() + "]"); + } + } + } + // handle the last category + registry.put(currentCategory, unmodifiableMap(parsers)); + + this.registry = unmodifiableMap(registry); + } + + public T parseNamedObject(Class categoryClass, String name, XContentParser parser, C context) throws IOException { + Map parsers = registry.get(categoryClass); + if (parsers == null) { + if (registry.isEmpty()) { + // The "empty" registry will never work so we throw a better exception as a hint. + throw new NamedObjectNotFoundException("named objects are not supported for this parser"); + } + throw new NamedObjectNotFoundException("unknown named object category [" + categoryClass.getName() + "]"); + } + Entry entry = parsers.get(name); + if (entry == null) { + throw new NamedObjectNotFoundException(parser.getTokenLocation(), "unable to parse " + categoryClass.getSimpleName() + + " with name [" + name + "]: parser not found"); + } + return categoryClass.cast(entry.parser.parse(parser, context)); + } + +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/ObjectParser.java b/elx-http/src/main/java/org/xbib/elx/http/util/ObjectParser.java new file mode 100644 index 0000000..febb64b --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/ObjectParser.java @@ -0,0 +1,441 @@ +package org.xbib.elx.http.util; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.BiFunction; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import static org.elasticsearch.common.xcontent.XContentParser.Token.START_ARRAY; +import static org.elasticsearch.common.xcontent.XContentParser.Token.START_OBJECT; +import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_BOOLEAN; +import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_EMBEDDED_OBJECT; +import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_NULL; +import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_NUMBER; +import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_STRING; + +/** + * A declarative, stateless parser that turns XContent into setter calls. A single parser should be defined for each object being parsed, + * nested elements can be added via {@link #declareObject(BiConsumer, ContextParser, ParseField)} which should be satisfied where possible + * by passing another instance of {@link ObjectParser}, this one customized for that Object. + *

+ * This class works well for object that do have a constructor argument or that can be built using information available from earlier in the + * XContent. + *

+ *

+ * Instances of {@link ObjectParser} should be setup by declaring a constant field for the parsers and declaring all fields in a static + * block just below the creation of the parser. Like this: + *

+ *
{@code
+ *   private static final ObjectParser PARSER = new ObjectParser<>("thing", Thing::new));
+ *   static {
+ *       PARSER.declareInt(Thing::setMineral, new ParseField("mineral"));
+ *       PARSER.declareInt(Thing::setFruit, new ParseField("fruit"));
+ *   }
+ * }
+ * It's highly recommended to use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of + * {@link #declareField} which can be used to implement exceptional parsing operations not covered by the high level methods. + */ +public final class ObjectParser extends AbstractObjectParser { + + private static final Logger logger = LogManager.getLogger(ObjectParser.class.getName()); + + public static BiConsumer> fromList(Class c, + BiConsumer consumer) { + return (Value v, List l) -> { + @SuppressWarnings("unchecked") + ElementValue[] array = (ElementValue[]) Array.newInstance(c, l.size()); + consumer.accept(v, l.toArray(array)); + }; + } + + private final Map fieldParserMap = new HashMap<>(); + + private final String name; + + private final Supplier valueSupplier; + + /** + * Should this parser ignore unknown fields? This should generally be set to true only when parsing responses from external systems, + * never when parsing requests from users. + */ + private final boolean ignoreUnknownFields; + + /** + * Creates a new ObjectParser instance with a name. This name is used to reference the parser in exceptions and messages. + */ + public ObjectParser(String name) { + this(name, null); + } + + /** + * Creates a new ObjectParser instance which a name. + * @param name the parsers name, used to reference the parser in exceptions and messages. + * @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser. + */ + public ObjectParser(String name, @Nullable Supplier valueSupplier) { + this(name, false, valueSupplier); + } + + /** + * Creates a new ObjectParser instance which a name. + * @param name the parsers name, used to reference the parser in exceptions and messages. + * @param ignoreUnknownFields Should this parser ignore unknown fields? This should generally be set to true only when parsing + * responses from external systems, never when parsing requests from users. + * @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser. + */ + public ObjectParser(String name, boolean ignoreUnknownFields, @Nullable Supplier valueSupplier) { + this.name = name; + this.valueSupplier = valueSupplier; + this.ignoreUnknownFields = ignoreUnknownFields; + } + + /** + * Parses a Value from the given {@link XContentParser} + * @param parser the parser to build a value from + * @param context context needed for parsing + * @return a new value instance drawn from the provided value supplier on {@link #ObjectParser(String, Supplier)} + * @throws IOException if an IOException occurs. + */ + @Override + public Value parse(XContentParser parser, Context context) throws IOException { + if (valueSupplier == null) { + throw new NullPointerException("valueSupplier is not set"); + } + return parse(parser, valueSupplier.get(), context); + } + + /** + * Parses a Value from the given {@link XContentParser} + * @param parser the parser to build a value from + * @param value the value to fill from the parser + * @param context a context that is passed along to all declared field parsers + * @return the parsed value + * @throws IOException if an IOException occurs. + */ + public Value parse(XContentParser parser, Value value, Context context) throws IOException { + logger.debug("parse"); + XContentParser.Token token; + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + token = parser.currentToken(); + } else { + token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token); + } + } + FieldParser fieldParser = null; + String currentFieldName = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + fieldParser = getParser(currentFieldName, parser); + logger.debug("currentFieldName={} fieldParser={}", currentFieldName, fieldParser); + } else { + if (currentFieldName == null) { + throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] no field found"); + } + if (fieldParser == null) { + assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields"; + parser.skipChildren(); // noop if parser points to a value, skips children if parser is start object or start array + } else { + fieldParser.assertSupports(name, parser, currentFieldName); + parseSub(parser, fieldParser, currentFieldName, value, context); + } + fieldParser = null; + } + } + return value; + } + + @Override + public Value apply(XContentParser parser, Context context) { + if (valueSupplier == null) { + throw new NullPointerException("valueSupplier is not set"); + } + try { + return parse(parser, valueSupplier.get(), context); + } catch (IOException e) { + throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] failed to parse object", e); + } + } + + public interface Parser { + void parse(XContentParser parser, Value value, Context context) throws IOException; + } + + public void declareField(Parser p, ParseField parseField, ValueType type) { + if (parseField == null) { + throw new IllegalArgumentException("[parseField] is required"); + } + if (type == null) { + throw new IllegalArgumentException("[type] is required"); + } + FieldParser fieldParser = new FieldParser(p, type.supportedTokens(), parseField, type); + for (String fieldValue : parseField.getAllNamesIncludedDeprecated()) { + fieldParserMap.putIfAbsent(fieldValue, fieldParser); + } + } + + @Override + public void declareField(BiConsumer consumer, ContextParser parser, ParseField parseField, + ValueType type) { + if (consumer == null) { + throw new IllegalArgumentException("[consumer] is required"); + } + if (parser == null) { + throw new IllegalArgumentException("[parser] is required"); + } + declareField((p, v, c) -> consumer.accept(v, parser.parse(p, c)), parseField, type); + } + + public void declareObjectOrDefault(BiConsumer consumer, BiFunction objectParser, + Supplier defaultValue, ParseField field) { + declareField((p, v, c) -> { + if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { + if (p.booleanValue()) { + consumer.accept(v, defaultValue.get()); + } + } else { + consumer.accept(v, objectParser.apply(p, c)); + } + }, field, ValueType.OBJECT_OR_BOOLEAN); + } + + @Override + public void declareNamedObjects(BiConsumer> consumer, NamedObjectParser namedObjectParser, + Consumer orderedModeCallback, ParseField field) { + // This creates and parses the named object + BiFunction objectParser = (XContentParser p, Context c) -> { + if (p.currentToken() != XContentParser.Token.FIELD_NAME) { + throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of " + + "fields or an array where each entry is an object with a single field"); + } + // This messy exception nesting has the nice side effect of telling the use which field failed to parse + try { + String name = p.currentName(); + try { + return namedObjectParser.parse(p, c, name); + } catch (Exception e) { + throw new XContentParseException(p.getTokenLocation(), "[" + field + "] failed to parse field [" + name + "]", e); + } + } catch (IOException e) { + throw new XContentParseException(p.getTokenLocation(), "[" + field + "] error while parsing", e); + } + }; + declareField((XContentParser p, Value v, Context c) -> { + List fields = new ArrayList<>(); + XContentParser.Token token; + if (p.currentToken() == XContentParser.Token.START_OBJECT) { + // Fields are just named entries in a single object + while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { + fields.add(objectParser.apply(p, c)); + } + } else if (p.currentToken() == XContentParser.Token.START_ARRAY) { + // Fields are objects in an array. Each object contains a named field. + orderedModeCallback.accept(v); + while ((token = p.nextToken()) != XContentParser.Token.END_ARRAY) { + if (token != XContentParser.Token.START_OBJECT) { + throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of " + + "fields or an array where each entry is an object with a single field"); + } + p.nextToken(); // Move to the first field in the object + fields.add(objectParser.apply(p, c)); + p.nextToken(); // Move past the object, should be back to into the array + if (p.currentToken() != XContentParser.Token.END_OBJECT) { + throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of " + + "fields or an array where each entry is an object with a single field"); + } + } + } + consumer.accept(v, fields); + }, field, ValueType.OBJECT_ARRAY); + } + + @Override + public void declareNamedObjects(BiConsumer> consumer, NamedObjectParser namedObjectParser, + ParseField field) { + Consumer orderedModeCallback = (v) -> { + throw new IllegalArgumentException("[" + field + "] doesn't support arrays. Use a single object with multiple fields."); + }; + declareNamedObjects(consumer, namedObjectParser, orderedModeCallback, field); + } + + /** + * Functional interface for instantiating and parsing named objects. See ObjectParserTests#NamedObject for the canonical way to + * implement this for objects that themselves have a parser. + */ + @FunctionalInterface + public interface NamedObjectParser { + T parse(XContentParser p, Context c, String name) throws IOException; + } + + /** + * Get the name of the parser. + */ + @Override + public String getName() { + return name; + } + + private void parseArray(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) + throws IOException { + assert parser.currentToken() == XContentParser.Token.START_ARRAY : "Token was: " + parser.currentToken(); + parseValue(parser, fieldParser, currentFieldName, value, context); + } + + private void parseValue(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) + throws IOException { + try { + fieldParser.parser.parse(parser, value, context); + } catch (Exception ex) { + throw new XContentParseException(parser.getTokenLocation(), + "[" + name + "] failed to parse field [" + currentFieldName + "]", ex); + } + } + + private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context) + throws IOException { + final XContentParser.Token token = parser.currentToken(); + switch (token) { + case START_OBJECT: + parseValue(parser, fieldParser, currentFieldName, value, context); + /* + * Well behaving parsers should consume the entire object but + * asserting that they do that is not something we can do + * efficiently here. Instead we can check that they end on an + * END_OBJECT. They could end on the *wrong* end object and + * this test won't catch them, but that is the price that we pay + * for having a cheap test. + */ + if (parser.currentToken() != XContentParser.Token.END_OBJECT) { + throw new IllegalStateException("parser for [" + currentFieldName + "] did not end on END_OBJECT"); + } + break; + case START_ARRAY: + parseArray(parser, fieldParser, currentFieldName, value, context); + /* + * Well behaving parsers should consume the entire array but + * asserting that they do that is not something we can do + * efficiently here. Instead we can check that they end on an + * END_ARRAY. They could end on the *wrong* end array and + * this test won't catch them, but that is the price that we pay + * for having a cheap test. + */ + if (parser.currentToken() != XContentParser.Token.END_ARRAY) { + throw new IllegalStateException("parser for [" + currentFieldName + "] did not end on END_ARRAY"); + } + break; + case END_OBJECT: + case END_ARRAY: + case FIELD_NAME: + throw new XContentParseException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected"); + case VALUE_STRING: + case VALUE_NUMBER: + case VALUE_BOOLEAN: + case VALUE_EMBEDDED_OBJECT: + case VALUE_NULL: + parseValue(parser, fieldParser, currentFieldName, value, context); + } + } + + private FieldParser getParser(String fieldName, XContentParser xContentParser) { + FieldParser parser = fieldParserMap.get(fieldName); + if (parser == null && false == ignoreUnknownFields) { + throw new XContentParseException(xContentParser.getTokenLocation(), + "[" + name + "] unknown field [" + fieldName + "], parser not found"); + } + return parser; + } + + private class FieldParser { + private final Parser parser; + private final EnumSet supportedTokens; + private final ParseField parseField; + private final ValueType type; + + FieldParser(Parser parser, EnumSet supportedTokens, ParseField parseField, ValueType type) { + this.parser = parser; + this.supportedTokens = supportedTokens; + this.parseField = parseField; + this.type = type; + } + + void assertSupports(String parserName, XContentParser parser, String currentFieldName) { + if (!supportedTokens.contains(parser.currentToken())) { + throw new XContentParseException(parser.getTokenLocation(), + "[" + parserName + "] " + currentFieldName + " doesn't support values of type: " + parser.currentToken()); + } + } + + @Override + public String toString() { + return "FieldParser{" + + "preferred_name=" + parseField.getPreferredName() + + ", supportedTokens=" + supportedTokens + + ", type=" + type.name() + + '}'; + } + } + + public enum ValueType { + STRING(VALUE_STRING), + STRING_OR_NULL(VALUE_STRING, VALUE_NULL), + FLOAT(VALUE_NUMBER, VALUE_STRING), + FLOAT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL), + DOUBLE(VALUE_NUMBER, VALUE_STRING), + DOUBLE_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL), + LONG(VALUE_NUMBER, VALUE_STRING), + LONG_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL), + INT(VALUE_NUMBER, VALUE_STRING), + INT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL), + BOOLEAN(VALUE_BOOLEAN, VALUE_STRING), + STRING_ARRAY(START_ARRAY, VALUE_STRING), + FLOAT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING), + DOUBLE_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING), + LONG_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING), + INT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING), + BOOLEAN_ARRAY(START_ARRAY, VALUE_BOOLEAN), + OBJECT(START_OBJECT), + OBJECT_OR_NULL(START_OBJECT, VALUE_NULL), + OBJECT_ARRAY(START_OBJECT, START_ARRAY), + OBJECT_OR_BOOLEAN(START_OBJECT, VALUE_BOOLEAN), + OBJECT_OR_STRING(START_OBJECT, VALUE_STRING), + OBJECT_OR_LONG(START_OBJECT, VALUE_NUMBER), + OBJECT_ARRAY_BOOLEAN_OR_STRING(START_OBJECT, START_ARRAY, VALUE_BOOLEAN, VALUE_STRING), + OBJECT_ARRAY_OR_STRING(START_OBJECT, START_ARRAY, VALUE_STRING), + VALUE(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING), + VALUE_OBJECT_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING, START_OBJECT, START_ARRAY), + VALUE_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_NUMBER, VALUE_STRING, START_ARRAY); + + private final EnumSet tokens; + + ValueType(XContentParser.Token first, XContentParser.Token... rest) { + this.tokens = EnumSet.of(first, rest); + } + + public EnumSet supportedTokens() { + return this.tokens; + } + } + + @Override + public String toString() { + return "ObjectParser{" + + "name='" + name + '\'' + + ", fields=" + fieldParserMap.values() + + '}'; + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/XContentParseException.java b/elx-http/src/main/java/org/xbib/elx/http/util/XContentParseException.java new file mode 100644 index 0000000..254179f --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/XContentParseException.java @@ -0,0 +1,47 @@ +package org.xbib.elx.http.util; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.xcontent.XContentLocation; + +import java.util.Optional; + +/** + * Thrown when one of the XContent parsers cannot parse something. + */ +public class XContentParseException extends IllegalArgumentException { + + private final Optional location; + + public XContentParseException(String message) { + this(null, message); + } + + public XContentParseException(XContentLocation location, String message) { + super(message); + this.location = Optional.ofNullable(location); + } + + public XContentParseException(XContentLocation location, String message, Exception cause) { + super(message, cause); + this.location = Optional.ofNullable(location); + } + + public int getLineNumber() { + return location.map(l -> l.lineNumber).orElse(-1); + } + + public int getColumnNumber() { + return location.map(l -> l.columnNumber).orElse(-1); + } + + @Nullable + public XContentLocation getLocation() { + return location.orElse(null); + } + + @Override + public String getMessage() { + return location.map(l -> "[" + l.toString() + "] ").orElse("") + super.getMessage(); + } + +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/XContentParserUtils.java b/elx-http/src/main/java/org/xbib/elx/http/util/XContentParserUtils.java new file mode 100644 index 0000000..fb78890 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/XContentParserUtils.java @@ -0,0 +1,68 @@ +package org.xbib.elx.http.util; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.Aggregation; +import org.xbib.elx.http.util.aggregations.ParsedStringTerms; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +public class XContentParserUtils { + + private static final NamedXContentRegistry xContentRegistry = new NamedXContentRegistry(getDefaultNamedXContents()); + + public static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) { + if (actual != expected) { + String message = "Failed to parse object: expecting token of type [%s] but found [%s]"; + throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual)); + } + } + + public static void parseTypedKeysObject(XContentParser parser, String delimiter, Class objectClass, Consumer consumer) + throws IOException { + if (parser.currentToken() != XContentParser.Token.START_OBJECT && parser.currentToken() != XContentParser.Token.START_ARRAY) { + throwUnknownToken(parser.currentToken(), parser.getTokenLocation()); + } + String currentFieldName = parser.currentName(); + if (Strings.hasLength(currentFieldName)) { + int position = currentFieldName.indexOf(delimiter); + if (position > 0) { + String type = currentFieldName.substring(0, position); + String name = currentFieldName.substring(position + 1); + consumer.accept(namedObject(parser, objectClass, type, name)); + return; + } + // if we didn't find a delimiter we ignore the object or array for forward compatibility instead of throwing an error + parser.skipChildren(); + } else { + throw new ElasticsearchException(parser.getTokenLocation() + ":" + "Failed to parse object: empty key"); + } + } + + public static void throwUnknownToken(XContentParser.Token token, XContentLocation location) { + String message = "Failed to parse object: unexpected token [%s] found"; + throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token)); + } + + static T namedObject(XContentParser parser, Class categoryClass, String name, Object context) throws IOException { + return xContentRegistry.parseNamedObject(categoryClass, name, parser, context); + } + + public static List getDefaultNamedXContents() { + Map> map = new HashMap<>(); + //map.put("terms", (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); + return map.entrySet().stream() + .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/CommonFields.java b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/CommonFields.java new file mode 100644 index 0000000..27c1c55 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/CommonFields.java @@ -0,0 +1,18 @@ +package org.xbib.elx.http.util.aggregations; + +import org.elasticsearch.common.ParseField; + +final class CommonFields { + public static final ParseField META = new ParseField("meta"); + public static final ParseField BUCKETS = new ParseField("buckets"); + public static final ParseField VALUE = new ParseField("value"); + public static final ParseField VALUES = new ParseField("values"); + public static final ParseField VALUE_AS_STRING = new ParseField("value_as_string"); + public static final ParseField DOC_COUNT = new ParseField("doc_count"); + public static final ParseField KEY = new ParseField("key"); + public static final ParseField KEY_AS_STRING = new ParseField("key_as_string"); + public static final ParseField FROM = new ParseField("from"); + public static final ParseField FROM_AS_STRING = new ParseField("from_as_string"); + public static final ParseField TO = new ParseField("to"); + public static final ParseField TO_AS_STRING = new ParseField("to_as_string"); +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedAggregation.java b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedAggregation.java new file mode 100644 index 0000000..b110aa6 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedAggregation.java @@ -0,0 +1,40 @@ +package org.xbib.elx.http.util.aggregations; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.search.aggregations.Aggregation; +import org.xbib.elx.http.util.ObjectParser; + +import java.util.Collections; +import java.util.Map; + +/** + * An implementation of {@link Aggregation} that is parsed from a REST response. + * Serves as a base class for all aggregation implementations that are parsed from REST. + */ +public abstract class ParsedAggregation implements Aggregation { + + protected static void declareAggregationFields(ObjectParser objectParser) { + objectParser.declareObject((parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata), + (parser, context) -> parser.map(), CommonFields.META); + } + + private String name; + protected Map metadata; + + @Override + public final String getName() { + return name; + } + + protected void setName(String name) { + this.name = name; + } + + @Override + public final Map getMetaData() { + return metadata; + } +} \ No newline at end of file diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedMultiBucketAggregation.java b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedMultiBucketAggregation.java new file mode 100644 index 0000000..bd0c81d --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedMultiBucketAggregation.java @@ -0,0 +1,149 @@ +package org.xbib.elx.http.util.aggregations; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.xbib.elx.http.util.CheckedBiConsumer; +import org.xbib.elx.http.util.CheckedFunction; +import org.xbib.elx.http.util.ObjectParser; +import org.xbib.elx.http.util.XContentParserUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +import static org.xbib.elx.http.util.XContentParserUtils.ensureExpectedToken; + +public abstract class ParsedMultiBucketAggregation + extends ParsedAggregation implements MultiBucketsAggregation { + + protected final List buckets = new ArrayList<>(); + + protected boolean keyed = false; + + protected static void declareMultiBucketAggregationFields(final ObjectParser objectParser, + final CheckedFunction bucketParser, + final CheckedFunction keyedBucketParser) { + declareAggregationFields(objectParser); + objectParser.declareField((parser, aggregation, context) -> { + XContentParser.Token token = parser.currentToken(); + if (token == XContentParser.Token.START_OBJECT) { + aggregation.keyed = true; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + aggregation.buckets.add(keyedBucketParser.apply(parser)); + } + } else if (token == XContentParser.Token.START_ARRAY) { + aggregation.keyed = false; + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + aggregation.buckets.add(bucketParser.apply(parser)); + } + } + }, CommonFields.BUCKETS, ObjectParser.ValueType.OBJECT_ARRAY); + } + + public abstract static class ParsedBucket implements MultiBucketsAggregation.Bucket { + + private Aggregations aggregations; + private String keyAsString; + private long docCount; + private boolean keyed; + + protected void setKeyAsString(String keyAsString) { + this.keyAsString = keyAsString; + } + + @Override + public String getKeyAsString() { + return keyAsString; + } + + protected void setDocCount(long docCount) { + this.docCount = docCount; + } + + @Override + public long getDocCount() { + return docCount; + } + + public void setKeyed(boolean keyed) { + this.keyed = keyed; + } + + protected boolean isKeyed() { + return keyed; + } + + protected void setAggregations(Aggregations aggregations) { + this.aggregations = aggregations; + } + + @Override + public Aggregations getAggregations() { + return aggregations; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + /*if (keyed) { + builder.startObject(getKeyAsString()); + } else { + builder.startObject(); + } + if (keyAsString != null) { + builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString()); + } + keyToXContent(builder); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); + aggregations.toXContentInternal(builder, params); + builder.endObject();*/ + return builder; + } + + protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { + return builder.field(CommonFields.KEY.getPreferredName(), getKey()); + } + + protected static B parseXContent(final XContentParser parser, + final boolean keyed, + final Supplier bucketSupplier, + final CheckedBiConsumer keyConsumer) + throws IOException { + final B bucket = bucketSupplier.get(); + bucket.setKeyed(keyed); + XContentParser.Token token = parser.currentToken(); + String currentFieldName = parser.currentName(); + if (keyed) { + ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + } + List aggregations = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) { + bucket.setKeyAsString(parser.text()); + } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { + keyConsumer.accept(parser, bucket); + } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) { + bucket.setDocCount(parser.longValue()); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { + keyConsumer.accept(parser, bucket); + } else { + XContentParserUtils.parseTypedKeysObject(parser, "#", InternalAggregation.class, + aggregations::add); + } + } + } + bucket.setAggregations(new InternalAggregations(aggregations)); + return bucket; + } + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedStringTerms.java b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedStringTerms.java new file mode 100644 index 0000000..b2f759b --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedStringTerms.java @@ -0,0 +1,103 @@ +package org.xbib.elx.http.util.aggregations; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.xbib.elx.http.util.ObjectParser; + +import java.io.IOException; +import java.nio.CharBuffer; +import java.util.List; + +public class ParsedStringTerms extends ParsedTerms { + + public String getType() { + return "terms"; + } + + private static ObjectParser PARSER = + new ObjectParser<>(ParsedStringTerms.class.getSimpleName(), true, ParsedStringTerms::new); + + static { + declareParsedTermsFields(PARSER, ParsedBucket::fromXContent); + } + + public static ParsedStringTerms fromXContent(XContentParser parser, String name) throws IOException { + ParsedStringTerms aggregation = PARSER.parse(parser, null); + aggregation.setName(name); + return aggregation; + } + + @Override + public Object getProperty(String path) { + throw new UnsupportedOperationException(); + } + + public static class ParsedBucket extends ParsedTerms.ParsedBucket { + + private BytesRef key; + + @Override + public Object getKey() { + return getKeyAsString(); + } + + @Override + public String getKeyAsString() { + String keyAsString = super.getKeyAsString(); + if (keyAsString != null) { + return keyAsString; + } + if (key != null) { + return key.utf8ToString(); + } + return null; + } + + @Override + public Object getProperty(String containingAggName, List path) { + throw new UnsupportedOperationException(); + } + + public Number getKeyAsNumber() { + if (key != null) { + return Double.parseDouble(key.utf8ToString()); + } + return null; + } + + protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { + return builder.field(CommonFields.KEY.getPreferredName(), getKey()); + } + + static ParsedBucket fromXContent(XContentParser parser) throws IOException { + return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> { + CharBuffer cb = charBufferOrNull(p); + if (cb == null) { + bucket.key = null; + } else { + bucket.key = new BytesRef(cb); + } + }); + } + + static CharBuffer charBufferOrNull(XContentParser parser) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + return CharBuffer.wrap(parser.textCharacters(), parser.textOffset(), parser.textLength()); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + throw new UnsupportedOperationException(); + } + } +} diff --git a/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedTerms.java b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedTerms.java new file mode 100644 index 0000000..fc34516 --- /dev/null +++ b/elx-http/src/main/java/org/xbib/elx/http/util/aggregations/ParsedTerms.java @@ -0,0 +1,118 @@ +package org.xbib.elx.http.util.aggregations; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.xbib.elx.http.util.CheckedBiConsumer; +import org.xbib.elx.http.util.CheckedFunction; +import org.xbib.elx.http.util.ObjectParser; +import org.xbib.elx.http.util.XContentParserUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +public abstract class ParsedTerms extends ParsedMultiBucketAggregation implements Terms { + + protected static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound"); + + protected static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count"); + + protected long docCountErrorUpperBound; + + protected long sumOtherDocCount; + + @Override + public long getDocCountError() { + return docCountErrorUpperBound; + } + + @Override + public long getSumOfOtherDocCounts() { + return sumOtherDocCount; + } + + @Override + public List getBuckets() { + //return buckets; + throw new UnsupportedOperationException(); + } + + @Override + public Terms.Bucket getBucketByKey(String term) { + for (Terms.Bucket bucket : getBuckets()) { + if (bucket.getKeyAsString().equals(term)) { + return bucket; + } + } + return null; + } + + static void declareParsedTermsFields(final ObjectParser objectParser, + final CheckedFunction bucketParser) { + declareMultiBucketAggregationFields(objectParser, bucketParser::apply, bucketParser::apply); + objectParser.declareLong((parsedTerms, value) -> parsedTerms.docCountErrorUpperBound = value , + DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME); + objectParser.declareLong((parsedTerms, value) -> parsedTerms.sumOtherDocCount = value, + SUM_OF_OTHER_DOC_COUNTS); + } + + public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket /*implements Terms.Bucket*/ { + + boolean showDocCountError = false; + protected long docCountError; + + public long getDocCountError() { + return docCountError; + } + + @Override + public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + /*builder.startObject(); + keyToXContent(builder); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); + if (showDocCountError) { + builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); + } + getAggregations().toXContentInternal(builder, params); + builder.endObject();*/ + return builder; + } + + static B parseTermsBucketXContent(final XContentParser parser, final Supplier bucketSupplier, + final CheckedBiConsumer keyConsumer) + throws IOException { + + final B bucket = bucketSupplier.get(); + final List aggregations = new ArrayList<>(); + XContentParser.Token token; + String currentFieldName = parser.currentName(); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) { + bucket.setKeyAsString(parser.text()); + } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { + keyConsumer.accept(parser, bucket); + } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) { + bucket.setDocCount(parser.longValue()); + } else if (DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName().equals(currentFieldName)) { + bucket.docCountError = parser.longValue(); + bucket.showDocCountError = true; + } + } else if (token == XContentParser.Token.START_OBJECT) { + XContentParserUtils.parseTypedKeysObject(parser, "#", InternalAggregation.class, + aggregations::add); + } + } + bucket.setAggregations(new InternalAggregations(aggregations)); + return bucket; + } + } +} diff --git a/elx-http/src/main/resources/META-INF/services/org.xbib.elx.api.ExtendedClientProvider b/elx-http/src/main/resources/META-INF/services/org.xbib.elx.api.ExtendedClientProvider new file mode 100644 index 0000000..0c75f14 --- /dev/null +++ b/elx-http/src/main/resources/META-INF/services/org.xbib.elx.api.ExtendedClientProvider @@ -0,0 +1 @@ +org.xbib.elx.http.ExtendedHttpClientProvider \ No newline at end of file diff --git a/elx-http/src/main/resources/META-INF/services/org.xbib.elx.http.HttpAction b/elx-http/src/main/resources/META-INF/services/org.xbib.elx.http.HttpAction new file mode 100644 index 0000000..3d3ea95 --- /dev/null +++ b/elx-http/src/main/resources/META-INF/services/org.xbib.elx.http.HttpAction @@ -0,0 +1,3 @@ +org.xbib.elx.http.action.search.HttpSearchAction +org.xbib.elx.http.action.get.HttpGetAction +org.xbib.elx.http.action.get.HttpMultiGetAction diff --git a/elx-http/src/test/java/org/xbib/elx/http/test/ClientTest.java b/elx-http/src/test/java/org/xbib/elx/http/test/ClientTest.java new file mode 100644 index 0000000..36f038a --- /dev/null +++ b/elx-http/src/test/java/org/xbib/elx/http/test/ClientTest.java @@ -0,0 +1,122 @@ +package org.xbib.elx.http.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.admin.indices.refresh.RefreshAction; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.get.MultiGetAction; +import org.elasticsearch.action.get.MultiGetRequest; +import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.transport.NoNodeAvailableException; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.junit.Ignore; +import org.junit.Test; +import org.xbib.elx.common.ClientBuilder; +import org.xbib.elx.http.ExtendedHttpClient; +import org.xbib.elx.http.ExtendedHttpClientProvider; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class ClientTest extends TestBase { + + private static final Logger logger = LogManager.getLogger(ClientTest.class.getName()); + + @Ignore + @Test + public void testGet() throws Exception { + try (ExtendedHttpClient client = ClientBuilder.builder() + .provider(ExtendedHttpClientProvider.class) + .put("url", "http://" + host + ":" + httpPort) + .build()) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index("test"); + indexRequest.type("test"); + indexRequest.id("1"); + indexRequest.source("test", "Hello Jörg"); + IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet(); + client("1").execute(RefreshAction.INSTANCE, new RefreshRequest()); + + GetRequest getRequest = new GetRequest(); + getRequest.index("test"); + getRequest.type("test"); + getRequest.id("1"); + + GetResponse getResponse = client.execute(GetAction.INSTANCE, getRequest).actionGet(); + + assertTrue(getResponse.isExists()); + assertEquals("{\"test\":\"Hello Jörg\"}", getResponse.getSourceAsString()); + + } catch (NoNodeAvailableException e) { + logger.warn("skipping, no node available"); + } + } + + @Ignore + @Test + public void testMultiGet() throws Exception { + try (ExtendedHttpClient client = ClientBuilder.builder() + .provider(ExtendedHttpClientProvider.class) + .put("url", "http://" + host + ":" + httpPort) + .build()) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index("test"); + indexRequest.type("test"); + indexRequest.id("1"); + indexRequest.source("test", "Hello Jörg"); + IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet(); + client("1").execute(RefreshAction.INSTANCE, new RefreshRequest()); + + MultiGetRequest multiGetRequest = new MultiGetRequest(); + multiGetRequest.add("test", "test", "1"); + + MultiGetResponse multiGetResponse = client.execute(MultiGetAction.INSTANCE, multiGetRequest).actionGet(); + + assertEquals(1, multiGetResponse.getResponses().length); + assertEquals("{\"test\":\"Hello Jörg\"}", multiGetResponse.getResponses()[0].getResponse().getSourceAsString()); + + } catch (NoNodeAvailableException e) { + logger.warn("skipping, no node available"); + } + } + + @Test + public void testSearchDoc() throws Exception { + try (ExtendedHttpClient client = ClientBuilder.builder() + .provider(ExtendedHttpClientProvider.class) + .put("url", "http://" + host + ":" + httpPort) + .build()) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index("test"); + indexRequest.type("test"); + indexRequest.id("1"); + indexRequest.source("test", "Hello Jörg"); + IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet(); + client("1").execute(RefreshAction.INSTANCE, new RefreshRequest()); + + SearchSourceBuilder builder = new SearchSourceBuilder(); + builder.query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest(); + searchRequest.indices("test"); + searchRequest.types("test"); + searchRequest.source(builder); + SearchResponse searchResponse = client.execute(SearchAction.INSTANCE, searchRequest).actionGet(); + long hits = searchResponse.getHits().getTotalHits(); + assertEquals(1, hits); + logger.info("hits = {} source = {}", hits, searchResponse.getHits().getHits()[0].getSourceAsString()); + assertEquals("{\"test\":\"Hello Jörg\"}", searchResponse.getHits().getHits()[0].getSourceAsString()); + } catch (NoNodeAvailableException e) { + logger.warn("skipping, no node available"); + } + } +} diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/MockNode.java b/elx-http/src/test/java/org/xbib/elx/http/test/MockNode.java similarity index 85% rename from elx-transport/src/test/java/org/xbib/elx/transport/MockNode.java rename to elx-http/src/test/java/org/xbib/elx/http/test/MockNode.java index 747e333..a344fc7 100644 --- a/elx-transport/src/test/java/org/xbib/elx/transport/MockNode.java +++ b/elx-http/src/test/java/org/xbib/elx/http/test/MockNode.java @@ -1,4 +1,4 @@ -package org.xbib.elx.transport; +package org.xbib.elx.http.test; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; @@ -8,4 +8,5 @@ public class MockNode extends Node { public MockNode(Settings settings) { super(settings); } + } diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/TestBase.java b/elx-http/src/test/java/org/xbib/elx/http/test/TestBase.java similarity index 86% rename from elx-transport/src/test/java/org/xbib/elx/transport/TestBase.java rename to elx-http/src/test/java/org/xbib/elx/http/test/TestBase.java index 95cedb4..78a6485 100644 --- a/elx-transport/src/test/java/org/xbib/elx/transport/TestBase.java +++ b/elx-http/src/test/java/org/xbib/elx/http/test/TestBase.java @@ -1,4 +1,4 @@ -package org.xbib.elx.transport; +package org.xbib.elx.http.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -6,6 +6,7 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -15,6 +16,7 @@ import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.Node; import org.junit.After; @@ -47,9 +49,11 @@ public class TestBase { private String cluster; - private String host; + protected String host; - private int port; + protected int port; + + protected int httpPort; @Before public void startNodes() { @@ -60,7 +64,7 @@ public class TestBase { findNodeAddress(); try { ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE, - new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN) + new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.YELLOW) .timeout(TimeValue.timeValueSeconds(30))).actionGet(); if (healthResponse != null && healthResponse.isTimedOut()) { throw new IOException("cluster state is " + healthResponse.getStatus().name() @@ -119,14 +123,6 @@ public class TestBase { protected Settings getNodeSettings() { return settingsBuilder() .put("cluster.name", cluster) - //.put("cluster.routing.schedule", "50ms") - //.put("cluster.routing.allocation.disk.threshold_enabled", false) - //.put("discovery.zen.multicast.enabled", true) - //.put("discovery.zen.multicast.ping_timeout", "5s") - //.put("http.enabled", true) - //.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors()) - //.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low - //.put("index.number_of_replicas", 0) .put("path.home", getHome()) .build(); } @@ -146,12 +142,18 @@ public class TestBase { protected void findNodeAddress() { NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true); NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); - Object obj = response.iterator().next().getTransport().getAddress() - .publishAddress(); - if (obj instanceof InetSocketTransportAddress) { - InetSocketTransportAddress address = (InetSocketTransportAddress) obj; - host = address.address().getHostName(); - port = address.address().getPort(); + for (NodeInfo nodeInfo : response) { + TransportAddress transportAddress = nodeInfo.getTransport().getAddress().publishAddress(); + if (transportAddress instanceof InetSocketTransportAddress) { + InetSocketTransportAddress address = (InetSocketTransportAddress) transportAddress; + host = address.address().getHostName(); + port = address.address().getPort(); + } + transportAddress = nodeInfo.getHttp().getAddress().publishAddress(); + if (transportAddress instanceof InetSocketTransportAddress) { + InetSocketTransportAddress address = (InetSocketTransportAddress) transportAddress; + httpPort = address.address().getPort(); + } } } diff --git a/elx-http/src/test/resources/log4j2.xml b/elx-http/src/test/resources/log4j2.xml new file mode 100644 index 0000000..1258d7f --- /dev/null +++ b/elx-http/src/test/resources/log4j2.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/ClientTest.java b/elx-node/src/test/java/org/xbib/elx/node/test/ClientTest.java index dc147b0..f922bcc 100644 --- a/elx-node/src/test/java/org/xbib/elx/node/test/ClientTest.java +++ b/elx-node/src/test/java/org/xbib/elx/node/test/ClientTest.java @@ -1,10 +1,5 @@ package org.xbib.elx.node.test; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction; @@ -17,9 +12,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryBuilders; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.common.Parameters; import org.xbib.elx.node.ExtendedNodeClient; @@ -29,34 +25,70 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -public class ClientTest extends TestBase { +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@ExtendWith(TestExtension.class) +class ClientTest { private static final Logger logger = LogManager.getLogger(ClientTest.class.getName()); - private static final Long ACTIONS = 25000L; + private static final Long ACTIONS = 1000L; - private static final Long MAX_ACTIONS_PER_REQUEST = 1000L; + private static final Long MAX_ACTIONS_PER_REQUEST = 100L; - @Before - public void startNodes() { - try { - super.startNodes(); - startNode("2"); - } catch (Throwable t) { - logger.error("startNodes failed", t); - } + private final TestExtension.Helper helper; + + ClientTest(TestExtension.Helper helper) { + this.helper = helper; } @Test - public void testSingleDoc() throws Exception { - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) + void testNewIndex() throws Exception { + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) + .provider(ExtendedNodeClientProvider.class) + .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5)) + .build(); + client.newIndex("test1"); + client.close(); + } + + @Test + void testMapping() throws Exception { + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) + .provider(ExtendedNodeClientProvider.class) + .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5)) + .build(); + XContentBuilder builder = JsonXContent.contentBuilder() + .startObject() + .startObject("doc") + .startObject("properties") + .startObject("location") + .field("type", "geo_point") + .endObject() + .endObject() + .endObject() + .endObject(); + client.newIndex("test2", Settings.EMPTY, builder.string()); + GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test2"); + GetMappingsResponse getMappingsResponse = + client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet(); + logger.info("mappings={}", getMappingsResponse.getMappings()); + assertTrue(getMappingsResponse.getMappings().get("test2").containsKey("doc")); + client.close(); + } + + @Test + void testSingleDoc() throws Exception { + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) .provider(ExtendedNodeClientProvider.class) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST) .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(30)) .build(); try { - client.newIndex("test"); - client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest + client.newIndex("test3"); + client.index("test3", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); } catch (NoNodeAvailableException e) { @@ -72,55 +104,21 @@ public class ClientTest extends TestBase { } @Test - public void testNewIndex() throws Exception { - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) - .provider(ExtendedNodeClientProvider.class) - .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5)) - .build(); - client.newIndex("test"); - client.close(); - } - - @Test - public void testMapping() throws Exception { - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) - .provider(ExtendedNodeClientProvider.class) - .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5)) - .build(); - XContentBuilder builder = jsonBuilder() - .startObject() - .startObject("doc") - .startObject("properties") - .startObject("location") - .field("type", "geo_point") - .endObject() - .endObject() - .endObject() - .endObject(); - client.newIndex("test", Settings.EMPTY, builder.string()); - GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test"); - GetMappingsResponse getMappingsResponse = - client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet(); - logger.info("mappings={}", getMappingsResponse.getMappings()); - assertTrue(getMappingsResponse.getMappings().get("test").containsKey("doc")); - client.close(); - } - - @Test - public void testRandomDocs() throws Exception { + void testRandomDocs() throws Exception { long numactions = ACTIONS; - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) .provider(ExtendedNodeClientProvider.class) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST) .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60)) .build(); try { - client.newIndex("test"); + client.newIndex("test4"); for (int i = 0; i < ACTIONS; i++) { - client.index("test", null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test4", null, false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); - client.waitForResponses(30L, TimeUnit.SECONDS); + client.waitForResponses(60L, TimeUnit.SECONDS); } catch (NoNodeAvailableException e) { logger.warn("skipping, no node available"); } finally { @@ -129,9 +127,11 @@ public class ClientTest extends TestBase { logger.error("error", client.getBulkController().getLastBulkError()); } assertNull(client.getBulkController().getLastBulkError()); - client.refreshIndex("test"); + client.refreshIndex("test4"); SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE) - .setQuery(QueryBuilders.matchAllQuery()).setSize(0); + .setIndices("test4") + .setQuery(QueryBuilders.matchAllQuery()) + .setSize(0); assertEquals(numactions, searchRequestBuilder.execute().actionGet().getHits().getTotalHits()); client.close(); @@ -139,37 +139,38 @@ public class ClientTest extends TestBase { } @Test - public void testThreadedRandomDocs() throws Exception { + void testThreadedRandomDocs() throws Exception { int maxthreads = Runtime.getRuntime().availableProcessors(); - Long maxActionsPerRequest = MAX_ACTIONS_PER_REQUEST; - final Long actions = ACTIONS; + long maxActionsPerRequest = MAX_ACTIONS_PER_REQUEST; + final long actions = ACTIONS; logger.info("NodeClient max={} maxactions={} maxloop={}", maxthreads, maxActionsPerRequest, actions); - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) .provider(ExtendedNodeClientProvider.class) .put(Parameters.MAX_CONCURRENT_REQUESTS.name(), maxthreads) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), maxActionsPerRequest) .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60)) .build(); try { - client.newIndex("test") - .startBulk("test", -1, 1000); - ThreadPoolExecutor pool = EsExecutors.newFixed("bulk-nodeclient-test", maxthreads, 30, - EsExecutors.daemonThreadFactory("bulk-nodeclient-test")); + client.newIndex("test5") + .startBulk("test5", -1, 1000); + ThreadPoolExecutor pool = EsExecutors.newFixed("nodeclient-test", maxthreads, 30, + EsExecutors.daemonThreadFactory("nodeclient-test")); final CountDownLatch latch = new CountDownLatch(maxthreads); for (int i = 0; i < maxthreads; i++) { pool.execute(() -> { for (int i1 = 0; i1 < actions; i1++) { - client.index("test", null, false,"{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test5", null, false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } latch.countDown(); }); } logger.info("waiting for latch..."); - if (latch.await(5, TimeUnit.MINUTES)) { + if (latch.await(60, TimeUnit.SECONDS)) { logger.info("flush..."); client.flush(); client.waitForResponses(60L, TimeUnit.SECONDS); - logger.info("got all responses, pool shutdown..."); + logger.info("pool shutdown..."); pool.shutdown(); logger.info("pool is shut down"); } else { @@ -178,15 +179,17 @@ public class ClientTest extends TestBase { } catch (NoNodeAvailableException e) { logger.warn("skipping, no node available"); } finally { - client.stopBulk("test", 30L, TimeUnit.SECONDS); + client.stopBulk("test5", 60L, TimeUnit.SECONDS); assertEquals(maxthreads * actions, client.getBulkMetric().getSucceeded().getCount()); if (client.getBulkController().getLastBulkError() != null) { logger.error("error", client.getBulkController().getLastBulkError()); } assertNull(client.getBulkController().getLastBulkError()); - client.refreshIndex("test"); + client.refreshIndex("test5"); SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE) - .setQuery(QueryBuilders.matchAllQuery()).setSize(0); + .setIndices("test5") + .setQuery(QueryBuilders.matchAllQuery()) + .setSize(0); assertEquals(maxthreads * actions, searchRequestBuilder.execute().actionGet().getHits().getTotalHits()); client.close(); diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/DuplicateIDTest.java b/elx-node/src/test/java/org/xbib/elx/node/test/DuplicateIDTest.java index 9ea5c40..0ea421c 100644 --- a/elx-node/src/test/java/org/xbib/elx/node/test/DuplicateIDTest.java +++ b/elx-node/src/test/java/org/xbib/elx/node/test/DuplicateIDTest.java @@ -7,7 +7,8 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.common.Parameters; import org.xbib.elx.node.ExtendedNodeClient; @@ -15,38 +16,47 @@ import org.xbib.elx.node.ExtendedNodeClientProvider; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class DuplicateIDTest extends TestBase { +@ExtendWith(TestExtension.class) +class DuplicateIDTest { private static final Logger logger = LogManager.getLogger(DuplicateIDTest.class.getName()); - private static final Long MAX_ACTIONS_PER_REQUEST = 1000L; + private static final Long MAX_ACTIONS_PER_REQUEST = 10L; - private static final Long ACTIONS = 12345L; + private static final Long ACTIONS = 50L; + + private final TestExtension.Helper helper; + + DuplicateIDTest(TestExtension.Helper helper) { + this.helper = helper; + } @Test - public void testDuplicateDocIDs() throws Exception { + void testDuplicateDocIDs() throws Exception { long numactions = ACTIONS; - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) .provider(ExtendedNodeClientProvider.class) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST) .build(); try { - client.newIndex("test"); + client.newIndex("test_dup"); for (int i = 0; i < ACTIONS; i++) { - client.index("test", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test_dup", helper.randomString(1), false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); - client.refreshIndex("test"); + client.refreshIndex("test_dup"); SearchSourceBuilder builder = new SearchSourceBuilder(); builder.query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices("test"); - searchRequest.types("test"); + searchRequest.indices("test_dup"); searchRequest.source(builder); - long hits = client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits(); + long hits = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits(); logger.info("hits = {}", hits); assertTrue(hits < ACTIONS); } catch (NoNodeAvailableException e) { diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/IndexPruneTest.java b/elx-node/src/test/java/org/xbib/elx/node/test/IndexPruneTest.java index b0b6428..7a2c3fc 100644 --- a/elx-node/src/test/java/org/xbib/elx/node/test/IndexPruneTest.java +++ b/elx-node/src/test/java/org/xbib/elx/node/test/IndexPruneTest.java @@ -7,7 +7,8 @@ import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsReques import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.common.settings.Settings; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.api.IndexPruneResult; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.node.ExtendedNodeClient; @@ -19,17 +20,24 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class IndexPruneTest extends TestBase { +@ExtendWith(TestExtension.class) +class IndexPruneTest { private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName()); + private final TestExtension.Helper helper; + + IndexPruneTest(TestExtension.Helper helper) { + this.helper = helper; + } + @Test - public void testPrune() throws IOException { - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) + void testPrune() throws IOException { + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) .provider(ExtendedNodeClientProvider.class) .build(); try { @@ -37,25 +45,22 @@ public class IndexPruneTest extends TestBase { .put("index.number_of_shards", 1) .put("index.number_of_replicas", 0) .build(); - client.newIndex("test1", settings); - client.shiftIndex("test", "test1", Collections.emptyList()); - client.newIndex("test2", settings); - client.shiftIndex("test", "test2", Collections.emptyList()); - client.newIndex("test3", settings); - client.shiftIndex("test", "test3", Collections.emptyList()); - client.newIndex("test4", settings); - client.shiftIndex("test", "test4", Collections.emptyList()); - + client.newIndex("test_prune1", settings); + client.shiftIndex("test_prune", "test_prune1", Collections.emptyList()); + client.newIndex("test_prune2", settings); + client.shiftIndex("test_prune", "test_prune2", Collections.emptyList()); + client.newIndex("test_prune3", settings); + client.shiftIndex("test_prune", "test_prune3", Collections.emptyList()); + client.newIndex("test_prune4", settings); + client.shiftIndex("test_prune", "test_prune4", Collections.emptyList()); IndexPruneResult indexPruneResult = - client.pruneIndex("test", "test4", 2, 2, true); - - assertTrue(indexPruneResult.getDeletedIndices().contains("test1")); - assertTrue(indexPruneResult.getDeletedIndices().contains("test2")); - assertFalse(indexPruneResult.getDeletedIndices().contains("test3")); - assertFalse(indexPruneResult.getDeletedIndices().contains("test4")); - + client.pruneIndex("test_prune", "test_prune4", 2, 2, true); + assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune1")); + assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune2")); + assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune3")); + assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune4")); List list = new ArrayList<>(); - for (String index : Arrays.asList("test1", "test2", "test3", "test4")) { + for (String index : Arrays.asList("test_prune1", "test_prune2", "test_prune3", "test_prune4")) { IndicesExistsRequest indicesExistsRequest = new IndicesExistsRequest(); indicesExistsRequest.indices(new String[] { index }); IndicesExistsResponse indicesExistsResponse = diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/IndexShiftTest.java b/elx-node/src/test/java/org/xbib/elx/node/test/IndexShiftTest.java index 6c900e8..87d422c 100644 --- a/elx-node/src/test/java/org/xbib/elx/node/test/IndexShiftTest.java +++ b/elx-node/src/test/java/org/xbib/elx/node/test/IndexShiftTest.java @@ -7,7 +7,8 @@ import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.api.IndexShiftResult; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.node.ExtendedNodeClient; @@ -17,16 +18,23 @@ import java.util.Arrays; import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class IndexShiftTest extends TestBase { +@ExtendWith(TestExtension.class) +class IndexShiftTest { private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName()); + private final TestExtension.Helper helper; + + IndexShiftTest(TestExtension.Helper helper) { + this.helper = helper; + } + @Test - public void testIndexShift() throws Exception { - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) + void testIndexShift() throws Exception { + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) .provider(ExtendedNodeClientProvider.class) .build(); try { @@ -36,14 +44,13 @@ public class IndexShiftTest extends TestBase { .build(); client.newIndex("test1234", settings); for (int i = 0; i < 1; i++) { - client.index("test1234", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test1234", helper.randomString(1), false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); - IndexShiftResult indexShiftResult = - client.shiftIndex("test", "test1234", Arrays.asList("a", "b", "c")); - + client.shiftIndex("test_shift", "test1234", Arrays.asList("a", "b", "c")); assertTrue(indexShiftResult.getNewAliases().contains("a")); assertTrue(indexShiftResult.getNewAliases().contains("b")); assertTrue(indexShiftResult.getNewAliases().contains("c")); @@ -53,23 +60,24 @@ public class IndexShiftTest extends TestBase { assertTrue(aliases.containsKey("a")); assertTrue(aliases.containsKey("b")); assertTrue(aliases.containsKey("c")); - assertTrue(aliases.containsKey("test")); + assertTrue(aliases.containsKey("test_shift")); - String resolved = client.resolveAlias("test"); + String resolved = client.resolveAlias("test_shift"); aliases = client.getAliases(resolved); assertTrue(aliases.containsKey("a")); assertTrue(aliases.containsKey("b")); assertTrue(aliases.containsKey("c")); - assertTrue(aliases.containsKey("test")); + assertTrue(aliases.containsKey("test_shift")); client.newIndex("test5678", settings); for (int i = 0; i < 1; i++) { - client.index("test5678", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test5678", helper.randomString(1), false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); - indexShiftResult = client.shiftIndex("test", "test5678", Arrays.asList("d", "e", "f"), + indexShiftResult = client.shiftIndex("test_shift", "test5678", Arrays.asList("d", "e", "f"), (request, index, alias) -> request.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, index, alias).filter(QueryBuilders.termQuery("my_key", alias))) ); @@ -88,7 +96,7 @@ public class IndexShiftTest extends TestBase { assertTrue(aliases.containsKey("e")); assertTrue(aliases.containsKey("f")); - resolved = client.resolveAlias("test"); + resolved = client.resolveAlias("test_shift"); aliases = client.getAliases(resolved); assertTrue(aliases.containsKey("a")); assertTrue(aliases.containsKey("b")); diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/ReplicaTest.java b/elx-node/src/test/java/org/xbib/elx/node/test/ReplicaTest.java deleted file mode 100644 index 78a83db..0000000 --- a/elx-node/src/test/java/org/xbib/elx/node/test/ReplicaTest.java +++ /dev/null @@ -1,151 +0,0 @@ -package org.xbib.elx.node.test; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.action.admin.indices.stats.IndexShardStats; -import org.elasticsearch.action.admin.indices.stats.IndexStats; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.indexing.IndexingStats; -import org.junit.Ignore; -import org.junit.Test; -import org.xbib.elx.common.ClientBuilder; -import org.xbib.elx.node.ExtendedNodeClient; -import org.xbib.elx.node.ExtendedNodeClientProvider; - -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -@Ignore -public class ReplicaTest extends TestBase { - - private static final Logger logger = LogManager.getLogger(ReplicaTest.class.getName()); - - @Test - public void testReplicaLevel() throws Exception { - - // we need nodes for replica levels - startNode("2"); - startNode("3"); - startNode("4"); - - Settings settingsTest1 = Settings.settingsBuilder() - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 3) - .build(); - - Settings settingsTest2 = Settings.settingsBuilder() - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 1) - .build(); - - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) - .provider(ExtendedNodeClientProvider.class) - .build(); - - try { - client.newIndex("test1", settingsTest1, new HashMap<>()) - .newIndex("test2", settingsTest2, new HashMap<>()); - client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS); - for (int i = 0; i < 1234; i++) { - client.index("test1", null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); - } - for (int i = 0; i < 1234; i++) { - client.index("test2", null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); - } - client.flush(); - client.waitForResponses(30L, TimeUnit.SECONDS); - } catch (NoNodeAvailableException e) { - logger.warn("skipping, no node available"); - } finally { - logger.info("refreshing"); - client.refreshIndex("test1"); - client.refreshIndex("test2"); - SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE) - .setIndices("test1", "test2") - .setQuery(matchAllQuery()); - long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits(); - logger.info("query total hits={}", hits); - assertEquals(2468, hits); - IndicesStatsRequestBuilder indicesStatsRequestBuilder = new IndicesStatsRequestBuilder(client.getClient(), IndicesStatsAction.INSTANCE) - .all(); - IndicesStatsResponse response = indicesStatsRequestBuilder.execute().actionGet(); - for (Map.Entry m : response.getIndices().entrySet()) { - IndexStats indexStats = m.getValue(); - CommonStats commonStats = indexStats.getTotal(); - IndexingStats indexingStats = commonStats.getIndexing(); - IndexingStats.Stats stats = indexingStats.getTotal(); - logger.info("index {}: count = {}", m.getKey(), stats.getIndexCount()); - for (Map.Entry me : indexStats.getIndexShards().entrySet()) { - IndexShardStats indexShardStats = me.getValue(); - CommonStats commonShardStats = indexShardStats.getTotal(); - logger.info("shard {} count = {}", me.getKey(), - commonShardStats.getIndexing().getTotal().getIndexCount()); - } - } - try { - client.deleteIndex("test1") - .deleteIndex("test2"); - } catch (Exception e) { - logger.error("delete index failed, ignored. Reason:", e); - } - client.close(); - if (client.getBulkController().getLastBulkError() != null) { - logger.error("error", client.getBulkController().getLastBulkError()); - } - assertNull(client.getBulkController().getLastBulkError()); - } - } - - @Test - public void testUpdateReplicaLevel() throws Exception { - - long numberOfShards = 2; - int replicaLevel = 3; - - // we need 3 nodes for replica level 3 - startNode("2"); - startNode("3"); - - Settings settings = Settings.settingsBuilder() - .put("index.number_of_shards", numberOfShards) - .put("index.number_of_replicas", 0) - .build(); - - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) - .provider(ExtendedNodeClientProvider.class) - .build(); - - try { - client.newIndex("replicatest", settings, new HashMap<>()); - client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS); - for (int i = 0; i < 12345; i++) { - client.index("replicatest",null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); - } - client.flush(); - client.waitForResponses(30L, TimeUnit.SECONDS); - client.updateReplicaLevel("replicatest", replicaLevel, 30L, TimeUnit.SECONDS); - assertEquals(replicaLevel, client.getReplicaLevel("replicatest")); - } catch (NoNodeAvailableException e) { - logger.warn("skipping, no node available"); - } finally { - client.close(); - if (client.getBulkController().getLastBulkError() != null) { - logger.error("error", client.getBulkController().getLastBulkError()); - } - assertNull(client.getBulkController().getLastBulkError()); - } - } - -} diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/SmokeTest.java b/elx-node/src/test/java/org/xbib/elx/node/test/SmokeTest.java index 8000063..ae33bd9 100644 --- a/elx-node/src/test/java/org/xbib/elx/node/test/SmokeTest.java +++ b/elx-node/src/test/java/org/xbib/elx/node/test/SmokeTest.java @@ -4,7 +4,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.common.settings.Settings; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.api.IndexDefinition; import org.xbib.elx.node.ExtendedNodeClient; @@ -12,47 +13,46 @@ import org.xbib.elx.node.ExtendedNodeClientProvider; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; -public class SmokeTest extends TestBase { +@ExtendWith(TestExtension.class) +class SmokeTest { private static final Logger logger = LogManager.getLogger(SmokeTest.class.getName()); + private final TestExtension.Helper helper; + + SmokeTest(TestExtension.Helper helper) { + this.helper = helper; + } + @Test - public void smokeTest() throws Exception { - final ExtendedNodeClient client = ClientBuilder.builder(client("1")) + void smokeTest() throws Exception { + final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1")) .provider(ExtendedNodeClientProvider.class) .build(); try { - client.newIndex("test"); - client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest + client.newIndex("test_smoke"); + client.index("test_smoke", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest client.flush(); client.waitForResponses(30, TimeUnit.SECONDS); - - assertEquals(getClusterName(), client.getClusterName()); - - client.checkMapping("test"); - - client.update("test", "1", "{ \"name\" : \"Another name\"}"); + assertEquals(helper.getCluster(), client.getClusterName()); + client.checkMapping("test_smoke"); + client.update("test_smoke", "1", "{ \"name\" : \"Another name\"}"); client.flush(); - - client.waitForRecovery("test", 10L, TimeUnit.SECONDS); - - client.delete("test", "1"); - client.deleteIndex("test"); - - IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test2", Settings.settingsBuilder() + client.waitForRecovery("test_smoke", 10L, TimeUnit.SECONDS); + client.delete("test_smoke", "1"); + client.deleteIndex("test_smoke"); + IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test_smoke", Settings.settingsBuilder() .build()); assertEquals(0, indexDefinition.getReplicaLevel()); client.newIndex(indexDefinition); client.index(indexDefinition.getFullIndexName(), "1", true, "{ \"name\" : \"Hello World\"}"); client.flush(); client.updateReplicaLevel(indexDefinition, 2); - int replica = client.getReplicaLevel(indexDefinition); assertEquals(2, replica); - client.deleteIndex(indexDefinition); assertEquals(0, client.getBulkMetric().getFailed().getCount()); assertEquals(4, client.getBulkMetric().getSucceeded().getCount()); diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/TestBase.java b/elx-node/src/test/java/org/xbib/elx/node/test/TestBase.java deleted file mode 100644 index 2c486e0..0000000 --- a/elx-node/src/test/java/org/xbib/elx/node/test/TestBase.java +++ /dev/null @@ -1,212 +0,0 @@ -package org.xbib.elx.node.test; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchTimeoutException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; -import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; -import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.client.support.AbstractClient; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.node.Node; -import org.junit.After; -import org.junit.Before; - -import java.io.IOException; -import java.nio.file.FileVisitResult; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.HashMap; -import java.util.Map; -import java.util.Random; - -import static org.elasticsearch.common.settings.Settings.settingsBuilder; - -public class TestBase { - - private static final Logger logger = LogManager.getLogger("test"); - - private static final Random random = new Random(); - - private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray(); - - private Map nodes = new HashMap<>(); - - private Map clients = new HashMap<>(); - - private String cluster; - - private String host; - - private int port; - - @Before - public void startNodes() { - try { - logger.info("starting"); - setClusterName("test-cluster-" + System.getProperty("user.name")); - startNode("1"); - findNodeAddress(); - try { - ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE, - new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN) - .timeout(TimeValue.timeValueSeconds(30))).actionGet(); - if (healthResponse != null && healthResponse.isTimedOut()) { - throw new IOException("cluster state is " + healthResponse.getStatus().name() - + ", from here on, everything will fail!"); - } - } catch (ElasticsearchTimeoutException e) { - throw new IOException("cluster does not respond to health request, cowardly refusing to continue"); - } - ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all(); - ClusterStateResponse clusterStateResponse = - client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet(); - logger.info("cluster name = {}", clusterStateResponse.getClusterName().value()); - logger.info("host = {} port = {}", host, port); - - } catch (Throwable t) { - logger.error("startNodes failed", t); - } - } - - @After - public void stopNodes() { - try { - closeNodes(); - } catch (Exception e) { - logger.error("can not close nodes", e); - } finally { - try { - deleteFiles(); - logger.info("data files wiped"); - Thread.sleep(2000L); // let OS commit changes - } catch (IOException e) { - logger.error(e.getMessage(), e); - } catch (InterruptedException e) { - // ignore - } - } - } - - protected void setClusterName(String cluster) { - this.cluster = cluster; - } - - protected String getClusterName() { - return cluster; - } - - protected Settings getTransportSettings() { - return settingsBuilder() - .put("host", host) - .put("port", port) - .put("cluster.name", cluster) - .put("path.home", getHome()) - .build(); - } - - protected Settings getNodeSettings() { - return settingsBuilder() - .put("cluster.name", cluster) - //.put("cluster.routing.schedule", "50ms") - //.put("cluster.routing.allocation.disk.threshold_enabled", false) - //.put("discovery.zen.multicast.enabled", true) - //.put("discovery.zen.multicast.ping_timeout", "5s") - //.put("http.enabled", true) - //.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors()) - //.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low - //.put("index.number_of_replicas", 0) - .put("path.home", getHome()) - .build(); - } - - protected static String getHome() { - return System.getProperty("path.home", System.getProperty("user.dir")); - } - - protected void startNode(String id) { - buildNode(id).start(); - } - - protected AbstractClient client(String id) { - return clients.get(id); - } - - protected void findNodeAddress() { - NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true); - NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); - Object obj = response.iterator().next().getTransport().getAddress() - .publishAddress(); - if (obj instanceof InetSocketTransportAddress) { - InetSocketTransportAddress address = (InetSocketTransportAddress) obj; - host = address.address().getHostName(); - port = address.address().getPort(); - } - } - - private Node buildNode(String id) { - Settings nodeSettings = settingsBuilder() - .put(getNodeSettings()) - .put("name", id) - .build(); - Node node = new MockNode(nodeSettings); - AbstractClient client = (AbstractClient) node.client(); - nodes.put(id, node); - clients.put(id, client); - logger.info("clients={}", clients); - return node; - } - - protected String randomString(int len) { - final char[] buf = new char[len]; - final int n = numbersAndLetters.length - 1; - for (int i = 0; i < buf.length; i++) { - buf[i] = numbersAndLetters[random.nextInt(n)]; - } - return new String(buf); - } - - private void closeNodes() { - logger.info("closing all clients"); - for (AbstractClient client : clients.values()) { - client.close(); - } - clients.clear(); - logger.info("closing all nodes"); - for (Node node : nodes.values()) { - if (node != null) { - node.close(); - } - } - nodes.clear(); - logger.info("all nodes closed"); - } - - private static void deleteFiles() throws IOException { - Path directory = Paths.get(getHome() + "/data"); - Files.walkFileTree(directory, new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - Files.delete(file); - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { - Files.delete(dir); - return FileVisitResult.CONTINUE; - } - }); - } -} diff --git a/elx-node/src/test/java/org/xbib/elx/node/test/TestExtension.java b/elx-node/src/test/java/org/xbib/elx/node/test/TestExtension.java new file mode 100644 index 0000000..7d28686 --- /dev/null +++ b/elx-node/src/test/java/org/xbib/elx/node/test/TestExtension.java @@ -0,0 +1,213 @@ +package org.xbib.elx.node.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.support.AbstractClient; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.node.Node; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; + +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; + +public class TestExtension implements ParameterResolver, BeforeAllCallback, AfterAllCallback { + + private static final Logger logger = LogManager.getLogger("test"); + + private static final Random random = new Random(); + + private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray(); + + private Map nodes = new HashMap<>(); + + private Map clients = new HashMap<>(); + + private String home; + + private String cluster; + + private String host; + + private int port; + + private static final String key = "es-instance"; + + private static final ExtensionContext.Namespace ns = + ExtensionContext.Namespace.create(TestExtension.class); + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return parameterContext.getParameter().getType().equals(Helper.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return extensionContext.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create()); + } + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + Helper helper = context.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create(), Helper.class); + setHome(System.getProperty("path.home") + "/" + helper.randomString(8)); + setClusterName("test-cluster-" + System.getProperty("user.name")); + deleteFiles(Paths.get(getHome() + "/data")); + logger.info("data files wiped"); + Thread.sleep(2000L); // let OS commit changes + logger.info("starting cluster"); + helper.startNode("1"); + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true); + NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet(); + Object obj = response.iterator().next().getTransport().getAddress() + .publishAddress(); + if (obj instanceof InetSocketTransportAddress) { + InetSocketTransportAddress address = (InetSocketTransportAddress) obj; + host = address.address().getHostName(); + port = address.address().getPort(); + } + try { + ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE, + new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN) + .timeout(TimeValue.timeValueSeconds(30))).actionGet(); + if (healthResponse != null && healthResponse.isTimedOut()) { + throw new IOException("cluster state is " + healthResponse.getStatus().name() + + ", from here on, everything will fail!"); + } + } catch (ElasticsearchTimeoutException e) { + throw new IOException("cluster does not respond to health request, cowardly refusing to continue"); + } + ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all(); + ClusterStateResponse clusterStateResponse = + helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet(); + logger.info("cluster name = {}", clusterStateResponse.getClusterName().value()); + logger.info("host = {} port = {}", host, port); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + closeNodes(); + deleteFiles(Paths.get(getHome() + "/data")); + } + + private void setClusterName(String cluster) { + this.cluster = cluster; + } + + private String getClusterName() { + return cluster; + } + + private void setHome(String home) { + this.home = home; + } + + private String getHome() { + return home; + } + + private void closeNodes() { + logger.info("closing all clients"); + for (AbstractClient client : clients.values()) { + client.close(); + } + clients.clear(); + logger.info("closing all nodes"); + for (Node node : nodes.values()) { + if (node != null) { + node.close(); + } + } + nodes.clear(); + logger.info("all nodes closed"); + } + + private static void deleteFiles(Path directory) throws IOException { + if (Files.exists(directory)) { + Files.walkFileTree(directory, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + Files.delete(file); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + Files.delete(dir); + return FileVisitResult.CONTINUE; + } + }); + } + } + + private Helper create() { + return new Helper(); + } + + class Helper { + + void startNode(String id) { + buildNode(id).start(); + } + + private Node buildNode(String id) { + Settings nodeSettings = settingsBuilder() + .put("cluster.name", getClusterName()) + .put("path.home", getHome()) + .put("name", id) + .build(); + Node node = new MockNode(nodeSettings); + AbstractClient client = (AbstractClient) node.client(); + nodes.put(id, node); + clients.put(id, client); + logger.info("clients={}", clients); + return node; + } + + String randomString(int len) { + final char[] buf = new char[len]; + final int n = numbersAndLetters.length - 1; + for (int i = 0; i < buf.length; i++) { + buf[i] = numbersAndLetters[random.nextInt(n)]; + } + return new String(buf); + } + + ElasticsearchClient client(String id) { + return clients.get(id); + } + + String getCluster() { + return getClusterName(); + } + } +} diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/ReplicaTest.java b/elx-transport/src/test/java/org/xbib/elx/transport/ReplicaTest.java deleted file mode 100644 index c4f9af0..0000000 --- a/elx-transport/src/test/java/org/xbib/elx/transport/ReplicaTest.java +++ /dev/null @@ -1,150 +0,0 @@ -package org.xbib.elx.transport; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.admin.indices.stats.CommonStats; -import org.elasticsearch.action.admin.indices.stats.IndexShardStats; -import org.elasticsearch.action.admin.indices.stats.IndexStats; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.search.SearchAction; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.client.transport.NoNodeAvailableException; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.indexing.IndexingStats; -import org.junit.Test; -import org.xbib.elx.common.ClientBuilder; - -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -public class ReplicaTest extends TestBase { - - private static final Logger logger = LogManager.getLogger(ReplicaTest.class.getName()); - - @Test - public void testReplicaLevel() throws Exception { - - // we need nodes for replica levels - startNode("2"); - startNode("3"); - startNode("4"); - - Settings settingsTest1 = Settings.settingsBuilder() - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 3) - .build(); - - Settings settingsTest2 = Settings.settingsBuilder() - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 1) - .build(); - - final ExtendedTransportClient client = ClientBuilder.builder() - .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) - .build(); - - try { - client.newIndex("test1", settingsTest1, new HashMap<>()) - .newIndex("test2", settingsTest2, new HashMap<>()); - client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS); - for (int i = 0; i < 1234; i++) { - client.index("test1", null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); - } - for (int i = 0; i < 1234; i++) { - client.index("test2", null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); - } - client.flush(); - client.waitForResponses(30L, TimeUnit.SECONDS); - client.refreshIndex("test1"); - client.refreshIndex("test2"); - } catch (NoNodeAvailableException e) { - logger.warn("skipping, no node available"); - } finally { - SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE) - .setIndices("test1", "test2") - .setQuery(matchAllQuery()); - long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits(); - logger.info("query total hits={}", hits); - assertEquals(2468, hits); - - // TODO move to api - IndicesStatsRequestBuilder indicesStatsRequestBuilder = new IndicesStatsRequestBuilder(client.getClient(), - IndicesStatsAction.INSTANCE).all(); - IndicesStatsResponse response = indicesStatsRequestBuilder.execute().actionGet(); - for (Map.Entry m : response.getIndices().entrySet()) { - IndexStats indexStats = m.getValue(); - CommonStats commonStats = indexStats.getTotal(); - IndexingStats indexingStats = commonStats.getIndexing(); - IndexingStats.Stats stats = indexingStats.getTotal(); - logger.info("index {}: count = {}", m.getKey(), stats.getIndexCount()); - for (Map.Entry me : indexStats.getIndexShards().entrySet()) { - IndexShardStats indexShardStats = me.getValue(); - CommonStats commonShardStats = indexShardStats.getTotal(); - logger.info("shard {} count = {}", me.getKey(), - commonShardStats.getIndexing().getTotal().getIndexCount()); - } - } - try { - client.deleteIndex("test1").deleteIndex("test2"); - } catch (Exception e) { - logger.error("delete index failed, ignored. Reason:", e); - } - if (client.getBulkController().getLastBulkError() != null) { - logger.error("error", client.getBulkController().getLastBulkError()); - } - assertNull(client.getBulkController().getLastBulkError()); - client.close(); - } - } - - @Test - public void testUpdateReplicaLevel() throws Exception { - - long numberOfShards = 2; - int replicaLevel = 3; - - // we need 3 nodes for replica level 3 - startNode("2"); - startNode("3"); - - int shardsAfterReplica; - - final ExtendedTransportClient client = ClientBuilder.builder() - .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) - .build(); - - Settings settings = Settings.settingsBuilder() - .put("index.number_of_shards", numberOfShards) - .put("index.number_of_replicas", 0) - .build(); - - try { - client.newIndex("replicatest", settings, new HashMap<>()); - client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS); - for (int i = 0; i < 12345; i++) { - client.index("replicatest", null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); - } - client.flush(); - client.waitForResponses(30L, TimeUnit.SECONDS); - client.updateReplicaLevel("replicatest", replicaLevel, 30L, TimeUnit.SECONDS); - assertEquals(replicaLevel, client.getReplicaLevel("replicatest")); - } catch (NoNodeAvailableException e) { - logger.warn("skipping, no node available"); - } finally { - client.close(); - if (client.getBulkController().getLastBulkError() != null) { - logger.error("error", client.getBulkController().getLastBulkError()); - } - assertNull(client.getBulkController().getLastBulkError()); - } - } -} diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/package-info.java b/elx-transport/src/test/java/org/xbib/elx/transport/package-info.java deleted file mode 100644 index 7abcc5a..0000000 --- a/elx-transport/src/test/java/org/xbib/elx/transport/package-info.java +++ /dev/null @@ -1 +0,0 @@ -package org.xbib.elx.transport; \ No newline at end of file diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/ClientTest.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/ClientTest.java similarity index 67% rename from elx-transport/src/test/java/org/xbib/elx/transport/ClientTest.java rename to elx-transport/src/test/java/org/xbib/elx/transport/test/ClientTest.java index c4dc4fa..54b68c5 100644 --- a/elx-transport/src/test/java/org/xbib/elx/transport/ClientTest.java +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/ClientTest.java @@ -1,4 +1,4 @@ -package org.xbib.elx.transport; +package org.xbib.elx.transport.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -13,69 +13,87 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.common.Parameters; +import org.xbib.elx.transport.ExtendedTransportClient; +import org.xbib.elx.transport.ExtendedTransportClientProvider; -import java.util.HashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class ClientTest extends TestBase { +@ExtendWith(TestExtension.class) +class ClientTest { private static final Logger logger = LogManager.getLogger(ClientTest.class.getName()); + private static final Long ACTIONS = 100L; + private static final Long MAX_ACTIONS_PER_REQUEST = 1000L; - private static final Long ACTIONS = 1234L; + private final TestExtension.Helper helper; - @Before - public void startNodes() { - try { - super.startNodes(); - startNode("2"); - } catch (Throwable t) { - logger.error("startNodes failed", t); - } + ClientTest(TestExtension.Helper helper) { + this.helper = helper; + helper.startNode("2"); } @Test - public void testClientIndexOp() throws Exception { + void testClientIndexOp() throws Exception { final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) + .put(helper.getTransportSettings()) .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60)) .build(); - client.newIndex("test"); - try { - client.deleteIndex("test") - .newIndex("test") - .deleteIndex("test"); - } catch (NoNodeAvailableException e) { - logger.error("no node available"); - } finally { - client.close(); - } + client.newIndex("test1"); + client.close(); } @Test - public void testSingleDoc() throws Exception { + void testMapping() throws Exception { final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) + .put(helper.getTransportSettings()) + .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5)) + .build(); + XContentBuilder builder = jsonBuilder() + .startObject() + .startObject("doc") + .startObject("properties") + .startObject("location") + .field("type", "geo_point") + .endObject() + .endObject() + .endObject() + .endObject(); + client.newIndex("test2", Settings.EMPTY, builder.string()); + GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test2"); + GetMappingsResponse getMappingsResponse = + client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet(); + logger.info("mappings={}", getMappingsResponse.getMappings()); + assertTrue(getMappingsResponse.getMappings().get("test2").containsKey("doc")); + client.close(); + } + + @Test + void testSingleDoc() throws Exception { + final ExtendedTransportClient client = ClientBuilder.builder() + .provider(ExtendedTransportClientProvider.class) + .put(helper.getTransportSettings()) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST) .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60)) .build(); try { - client.newIndex("test"); - client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); + client.newIndex("test3"); + client.index("test3", "1", true, "{ \"name\" : \"Hello World\"}"); client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); } catch (NoNodeAvailableException e) { @@ -91,47 +109,22 @@ public class ClientTest extends TestBase { } @Test - public void testMapping() throws Exception { - final ExtendedTransportClient client = ClientBuilder.builder() - .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) - .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5)) - .build(); - XContentBuilder builder = jsonBuilder() - .startObject() - .startObject("doc") - .startObject("properties") - .startObject("location") - .field("type", "geo_point") - .endObject() - .endObject() - .endObject() - .endObject(); - client.newIndex("test", Settings.EMPTY, builder.string()); - GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test"); - GetMappingsResponse getMappingsResponse = - client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet(); - logger.info("mappings={}", getMappingsResponse.getMappings()); - assertTrue(getMappingsResponse.getMappings().get("test").containsKey("doc")); - client.close(); - } - - @Test - public void testRandomDocs() throws Exception { + void testRandomDocs() throws Exception { long numactions = ACTIONS; final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) + .put(helper.getTransportSettings()) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST) .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60)) .build(); try { - client.newIndex("test"); + client.newIndex("test4"); for (int i = 0; i < ACTIONS; i++) { - client.index("test", null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test4", null, false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); - client.waitForResponses(30L, TimeUnit.SECONDS); + client.waitForResponses(60L, TimeUnit.SECONDS); } catch (NoNodeAvailableException e) { logger.warn("skipping, no node available"); } finally { @@ -140,37 +133,40 @@ public class ClientTest extends TestBase { logger.error("error", client.getBulkController().getLastBulkError()); } assertNull(client.getBulkController().getLastBulkError()); + client.refreshIndex("test4"); + SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE) + .setIndices("test4") + .setQuery(QueryBuilders.matchAllQuery()) + .setSize(0); + assertEquals(numactions, + searchRequestBuilder.execute().actionGet().getHits().getTotalHits()); client.close(); } } @Test - public void testThreadedRandomDocs() throws Exception { + @Disabled + void testThreadedRandomDocs() throws Exception { int maxthreads = Runtime.getRuntime().availableProcessors(); long maxactions = MAX_ACTIONS_PER_REQUEST; final long maxloop = ACTIONS; - - Settings settingsForIndex = Settings.settingsBuilder() - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 1) - .build(); - final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) + .put(helper.getTransportSettings()) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), maxactions) .put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60)) .build(); try { - client.newIndex("test", settingsForIndex, new HashMap<>()) - .startBulk("test", -1, 1000); - ThreadPoolExecutor pool = EsExecutors.newFixed("bulkclient-test", maxthreads, 30, - EsExecutors.daemonThreadFactory("bulkclient-test")); + client.newIndex("test5") + .startBulk("test5", -1, 1000); + ThreadPoolExecutor pool = EsExecutors.newFixed("transportclient-test", maxthreads, 30, + EsExecutors.daemonThreadFactory("transportclient-test")); final CountDownLatch latch = new CountDownLatch(maxthreads); for (int i = 0; i < maxthreads; i++) { pool.execute(() -> { for (int i1 = 0; i1 < maxloop; i1++) { - client.index("test",null, false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test5",null, false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } latch.countDown(); }); @@ -179,25 +175,25 @@ public class ClientTest extends TestBase { if (latch.await(60, TimeUnit.SECONDS)) { logger.info("flush ..."); client.flush(); - client.waitForResponses(30L, TimeUnit.SECONDS); - logger.info("pool to be shut down ..."); + client.waitForResponses(60L, TimeUnit.SECONDS); + logger.info("pool shutdown ..."); pool.shutdown(); - logger.info("poot shut down"); + logger.info("poot is shut down"); + } else { + logger.warn("latch timeout"); } - client.stopBulk("test", 30L, TimeUnit.SECONDS); - assertEquals(maxthreads * maxloop, client.getBulkMetric().getSucceeded().getCount()); } catch (NoNodeAvailableException e) { logger.warn("skipping, no node available"); } finally { + client.stopBulk("test5", 60L, TimeUnit.SECONDS); + assertEquals(maxthreads * maxloop, client.getBulkMetric().getSucceeded().getCount()); if (client.getBulkController().getLastBulkError() != null) { logger.error("error", client.getBulkController().getLastBulkError()); } assertNull(client.getBulkController().getLastBulkError()); - // extra search lookup - client.refreshIndex("test"); + client.refreshIndex("test5"); SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE) - // to avoid NPE at org.elasticsearch.action.search.SearchRequest.writeTo(SearchRequest.java:580) - .setIndices("_all") + .setIndices("test5") .setQuery(QueryBuilders.matchAllQuery()) .setSize(0); assertEquals(maxthreads * maxloop, diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/DuplicateIDTest.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/DuplicateIDTest.java similarity index 60% rename from elx-transport/src/test/java/org/xbib/elx/transport/DuplicateIDTest.java rename to elx-transport/src/test/java/org/xbib/elx/transport/test/DuplicateIDTest.java index 279fb6b..8172f89 100644 --- a/elx-transport/src/test/java/org/xbib/elx/transport/DuplicateIDTest.java +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/DuplicateIDTest.java @@ -1,4 +1,4 @@ -package org.xbib.elx.transport; +package org.xbib.elx.transport.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -7,47 +7,57 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.common.ClientBuilder; import org.xbib.elx.common.Parameters; +import org.xbib.elx.transport.ExtendedTransportClient; +import org.xbib.elx.transport.ExtendedTransportClientProvider; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class DuplicateIDTest extends TestBase { +@ExtendWith(TestExtension.class) +class DuplicateIDTest { private final static Logger logger = LogManager.getLogger(DuplicateIDTest.class.getName()); - private final static Long MAX_ACTIONS_PER_REQUEST = 1000L; + private final static Long MAX_ACTIONS_PER_REQUEST = 10L; - private final static Long ACTIONS = 12345L; + private final static Long ACTIONS = 5L; + + private final TestExtension.Helper helper; + + DuplicateIDTest(TestExtension.Helper helper) { + this.helper = helper; + } @Test - public void testDuplicateDocIDs() throws Exception { + void testDuplicateDocIDs() throws Exception { long numactions = ACTIONS; final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) + .put(helper.getTransportSettings()) .put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST) .build(); try { - client.newIndex("test"); + client.newIndex("test_dup"); for (int i = 0; i < ACTIONS; i++) { - client.index("test", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test_dup", helper.randomString(1), false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); - client.refreshIndex("test"); + client.refreshIndex("test_dup"); SearchSourceBuilder builder = new SearchSourceBuilder(); builder.query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest(); - searchRequest.indices("test"); - searchRequest.types("test"); + searchRequest.indices("test_dup"); searchRequest.source(builder); - long hits = client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits(); + long hits = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits(); logger.info("hits = {}", hits); assertTrue(hits < ACTIONS); } catch (NoNodeAvailableException e) { diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/IndexPruneTest.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/IndexPruneTest.java similarity index 62% rename from elx-transport/src/test/java/org/xbib/elx/transport/IndexPruneTest.java rename to elx-transport/src/test/java/org/xbib/elx/transport/test/IndexPruneTest.java index 4ce1843..de886d1 100644 --- a/elx-transport/src/test/java/org/xbib/elx/transport/IndexPruneTest.java +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/IndexPruneTest.java @@ -1,4 +1,4 @@ -package org.xbib.elx.transport; +package org.xbib.elx.transport.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -7,9 +7,12 @@ import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsReques import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.common.settings.Settings; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.api.IndexPruneResult; import org.xbib.elx.common.ClientBuilder; +import org.xbib.elx.transport.ExtendedTransportClient; +import org.xbib.elx.transport.ExtendedTransportClientProvider; import java.io.IOException; import java.util.ArrayList; @@ -17,44 +20,51 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class IndexPruneTest extends TestBase { +@ExtendWith(TestExtension.class) +class IndexPruneTest { private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName()); + private final TestExtension.Helper helper; + + IndexPruneTest(TestExtension.Helper helper) { + this.helper = helper; + } + @Test - public void testPrune() throws IOException { + void testPrune() throws IOException { final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) + .put(helper.getTransportSettings()) .build(); try { Settings settings = Settings.builder() .put("index.number_of_shards", 1) .put("index.number_of_replicas", 0) .build(); - client.newIndex("test1", settings); - client.shiftIndex("test", "test1", Collections.emptyList()); - client.newIndex("test2", settings); - client.shiftIndex("test", "test2", Collections.emptyList()); - client.newIndex("test3", settings); - client.shiftIndex("test", "test3", Collections.emptyList()); - client.newIndex("test4", settings); - client.shiftIndex("test", "test4", Collections.emptyList()); + client.newIndex("test_prune1", settings); + client.shiftIndex("test_prune", "test_prune1", Collections.emptyList()); + client.newIndex("test_prune2", settings); + client.shiftIndex("test_prune", "test_prune2", Collections.emptyList()); + client.newIndex("test_prune3", settings); + client.shiftIndex("test_prune", "test_prune3", Collections.emptyList()); + client.newIndex("test_prune4", settings); + client.shiftIndex("test_prune", "test_prune4", Collections.emptyList()); IndexPruneResult indexPruneResult = - client.pruneIndex("test", "test4", 2, 2, true); + client.pruneIndex("test_prune", "test_prune4", 2, 2, true); - assertTrue(indexPruneResult.getDeletedIndices().contains("test1")); - assertTrue(indexPruneResult.getDeletedIndices().contains("test2")); - assertFalse(indexPruneResult.getDeletedIndices().contains("test3")); - assertFalse(indexPruneResult.getDeletedIndices().contains("test4")); + assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune1")); + assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune2")); + assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune3")); + assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune4")); List list = new ArrayList<>(); - for (String index : Arrays.asList("test1", "test2", "test3", "test4")) { + for (String index : Arrays.asList("test_prune1", "test_prune2", "test_prune3", "test_prune4")) { IndicesExistsRequest indicesExistsRequest = new IndicesExistsRequest(); indicesExistsRequest.indices(new String[] { index }); IndicesExistsResponse indicesExistsResponse = diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/IndexShiftTest.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/IndexShiftTest.java similarity index 73% rename from elx-transport/src/test/java/org/xbib/elx/transport/IndexShiftTest.java rename to elx-transport/src/test/java/org/xbib/elx/transport/test/IndexShiftTest.java index 41388c7..1c57312 100644 --- a/elx-transport/src/test/java/org/xbib/elx/transport/IndexShiftTest.java +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/IndexShiftTest.java @@ -1,4 +1,4 @@ -package org.xbib.elx.transport; +package org.xbib.elx.transport.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -7,26 +7,36 @@ import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.api.IndexShiftResult; import org.xbib.elx.common.ClientBuilder; +import org.xbib.elx.transport.ExtendedTransportClient; +import org.xbib.elx.transport.ExtendedTransportClientProvider; import java.util.Arrays; import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; -public class IndexShiftTest extends TestBase { +@ExtendWith(TestExtension.class) +class IndexShiftTest { private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName()); + private final TestExtension.Helper helper; + + IndexShiftTest(TestExtension.Helper helper) { + this.helper = helper; + } + @Test - public void testIndexAlias() throws Exception { + void testIndexAlias() throws Exception { final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()).build(); + .put(helper.getTransportSettings()).build(); try { Settings settings = Settings.builder() .put("index.number_of_shards", 1) @@ -34,13 +44,14 @@ public class IndexShiftTest extends TestBase { .build(); client.newIndex("test1234", settings); for (int i = 0; i < 1; i++) { - client.index("test1234", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test1234", helper.randomString(1), false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); IndexShiftResult indexShiftResult = - client.shiftIndex("test", "test1234", Arrays.asList("a", "b", "c")); + client.shiftIndex("test_shift", "test1234", Arrays.asList("a", "b", "c")); assertTrue(indexShiftResult.getNewAliases().contains("a")); assertTrue(indexShiftResult.getNewAliases().contains("b")); @@ -51,23 +62,24 @@ public class IndexShiftTest extends TestBase { assertTrue(aliases.containsKey("a")); assertTrue(aliases.containsKey("b")); assertTrue(aliases.containsKey("c")); - assertTrue(aliases.containsKey("test")); + assertTrue(aliases.containsKey("test_shift")); - String resolved = client.resolveAlias("test"); + String resolved = client.resolveAlias("test_shift"); aliases = client.getAliases(resolved); assertTrue(aliases.containsKey("a")); assertTrue(aliases.containsKey("b")); assertTrue(aliases.containsKey("c")); - assertTrue(aliases.containsKey("test")); + assertTrue(aliases.containsKey("test_shift")); client.newIndex("test5678", settings); for (int i = 0; i < 1; i++) { - client.index("test5678", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}"); + client.index("test5678", helper.randomString(1), false, + "{ \"name\" : \"" + helper.randomString(32) + "\"}"); } client.flush(); client.waitForResponses(30L, TimeUnit.SECONDS); - indexShiftResult = client.shiftIndex("test", "test5678", Arrays.asList("d", "e", "f"), + indexShiftResult = client.shiftIndex("test_shift", "test5678", Arrays.asList("d", "e", "f"), (request, index, alias) -> request.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, index, alias).filter(QueryBuilders.termQuery("my_key", alias))) ); @@ -86,7 +98,7 @@ public class IndexShiftTest extends TestBase { assertTrue(aliases.containsKey("e")); assertTrue(aliases.containsKey("f")); - resolved = client.resolveAlias("test"); + resolved = client.resolveAlias("test_shift"); aliases = client.getAliases(resolved); assertTrue(aliases.containsKey("a")); assertTrue(aliases.containsKey("b")); @@ -98,11 +110,11 @@ public class IndexShiftTest extends TestBase { } catch (NoNodeAvailableException e) { logger.warn("skipping, no node available"); } finally { + client.close(); if (client.getBulkController().getLastBulkError() != null) { logger.error("error", client.getBulkController().getLastBulkError()); } assertNull(client.getBulkController().getLastBulkError()); - client.close(); } } } diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/test/MockNode.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/MockNode.java new file mode 100644 index 0000000..4127d22 --- /dev/null +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/MockNode.java @@ -0,0 +1,11 @@ +package org.xbib.elx.transport.test; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; + +public class MockNode extends Node { + + public MockNode(Settings settings) { + super(settings); + } +} diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/SmokeTest.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/SmokeTest.java similarity index 61% rename from elx-transport/src/test/java/org/xbib/elx/transport/SmokeTest.java rename to elx-transport/src/test/java/org/xbib/elx/transport/test/SmokeTest.java index 3721157..0572ef5 100644 --- a/elx-transport/src/test/java/org/xbib/elx/transport/SmokeTest.java +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/SmokeTest.java @@ -1,46 +1,50 @@ -package org.xbib.elx.transport; +package org.xbib.elx.transport.test; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.common.settings.Settings; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.xbib.elx.api.IndexDefinition; import org.xbib.elx.common.ClientBuilder; +import org.xbib.elx.transport.ExtendedTransportClient; +import org.xbib.elx.transport.ExtendedTransportClientProvider; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; -public class SmokeTest extends TestBase { +@ExtendWith(TestExtension.class) +class SmokeTest extends TestExtension { private static final Logger logger = LogManager.getLogger(SmokeTest.class.getName()); + private final TestExtension.Helper helper; + + SmokeTest(TestExtension.Helper helper) { + this.helper = helper; + } + @Test - public void testSingleDocNodeClient() throws Exception { + void testSingleDocNodeClient() throws Exception { final ExtendedTransportClient client = ClientBuilder.builder() .provider(ExtendedTransportClientProvider.class) - .put(getTransportSettings()) + .put(helper.getTransportSettings()) .build(); try { - client.newIndex("test"); - client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest + client.newIndex("test_smoke"); + client.index("test_smoke", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest client.flush(); client.waitForResponses(30, TimeUnit.SECONDS); - - assertEquals(getClusterName(), client.getClusterName()); - - client.checkMapping("test"); - - client.update("test", "1", "{ \"name\" : \"Another name\"}"); + assertEquals(helper.getCluster(), client.getClusterName()); + client.checkMapping("test_smoke"); + client.update("test_smoke", "1", "{ \"name\" : \"Another name\"}"); client.flush(); - - client.waitForRecovery("test", 10L, TimeUnit.SECONDS); - - client.delete("test", "1"); - client.deleteIndex("test"); - + client.waitForRecovery("test_smoke", 10L, TimeUnit.SECONDS); + client.delete("test_smoke", "1"); + client.deleteIndex("test_smoke"); IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test2", Settings.settingsBuilder() .build()); assertEquals(0, indexDefinition.getReplicaLevel()); @@ -48,7 +52,6 @@ public class SmokeTest extends TestBase { client.index(indexDefinition.getFullIndexName(), "1", true, "{ \"name\" : \"Hello World\"}"); client.flush(); client.updateReplicaLevel(indexDefinition, 2); - int replica = client.getReplicaLevel(indexDefinition); assertEquals(2, replica); diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/test/TestExtension.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/TestExtension.java new file mode 100644 index 0000000..8c6fe19 --- /dev/null +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/TestExtension.java @@ -0,0 +1,229 @@ +package org.xbib.elx.transport.test; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.support.AbstractClient; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.node.Node; +import org.junit.jupiter.api.extension.AfterAllCallback; +import org.junit.jupiter.api.extension.BeforeAllCallback; +import org.junit.jupiter.api.extension.ExtensionContext; +import org.junit.jupiter.api.extension.ParameterContext; +import org.junit.jupiter.api.extension.ParameterResolutionException; +import org.junit.jupiter.api.extension.ParameterResolver; + +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; + +import static org.elasticsearch.common.settings.Settings.settingsBuilder; + +public class TestExtension implements ParameterResolver, BeforeAllCallback, AfterAllCallback { + + private static final Logger logger = LogManager.getLogger("test"); + + private static final Random random = new Random(); + + private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray(); + + private Map nodes = new HashMap<>(); + + private Map clients = new HashMap<>(); + + private String home; + + private String cluster; + + private String host; + + private int port; + + private static final String key = "es-instance"; + + private static final ExtensionContext.Namespace ns = + ExtensionContext.Namespace.create(TestExtension.class); + + @Override + public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return parameterContext.getParameter().getType().equals(Helper.class); + } + + @Override + public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext) + throws ParameterResolutionException { + return extensionContext.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create()); + } + + @Override + public void beforeAll(ExtensionContext context) throws Exception { + Helper helper = context.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create(), Helper.class); + setHome(System.getProperty("path.home") + "/" + helper.randomString(8)); + setClusterName("test-cluster-" + System.getProperty("user.name")); + deleteFiles(Paths.get(getHome() + "/data")); + logger.info("data files wiped: " + getHome()); + Thread.sleep(2000L); // let OS commit changes + logger.info("starting cluster"); + helper.startNode("1"); + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true); + NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet(); + Object obj = response.iterator().next().getTransport().getAddress() + .publishAddress(); + if (obj instanceof InetSocketTransportAddress) { + InetSocketTransportAddress address = (InetSocketTransportAddress) obj; + host = address.address().getHostName(); + port = address.address().getPort(); + } + try { + ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE, + new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN) + .timeout(TimeValue.timeValueSeconds(30))).actionGet(); + if (healthResponse != null && healthResponse.isTimedOut()) { + throw new IOException("cluster state is " + healthResponse.getStatus().name() + + ", from here on, everything will fail!"); + } + } catch (ElasticsearchTimeoutException e) { + throw new IOException("cluster does not respond to health request, cowardly refusing to continue"); + } + ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all(); + ClusterStateResponse clusterStateResponse = + helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet(); + logger.info("cluster name = {}", clusterStateResponse.getClusterName().value()); + logger.info("host = {} port = {}", host, port); + } + + @Override + public void afterAll(ExtensionContext context) throws Exception { + closeNodes(); + deleteFiles(Paths.get(getHome() + "/data")); + logger.info("cluster stopped"); + } + + private void setClusterName(String cluster) { + this.cluster = cluster; + } + + private String getClusterName() { + return cluster; + } + + private void setHome(String home) { + this.home = home; + } + + private String getHome() { + return home; + } + + private void closeNodes() { + logger.info("closing all clients"); + for (AbstractClient client : clients.values()) { + client.close(); + } + clients.clear(); + logger.info("closing all nodes"); + for (Node node : nodes.values()) { + if (node != null) { + node.close(); + } + } + nodes.clear(); + logger.info("all nodes closed"); + } + + private static void deleteFiles(Path directory) throws IOException { + if (Files.exists(directory)) { + Files.walkFileTree(directory, new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + Files.delete(file); + return FileVisitResult.CONTINUE; + } + + @Override + public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { + Files.delete(dir); + return FileVisitResult.CONTINUE; + } + }); + } + } + + private Helper create() { + return new Helper(); + } + + class Helper { + + Settings getNodeSettings() { + return settingsBuilder() + .put("cluster.name", getClusterName()) + .put("path.home", getHome()) + .build(); + } + + Settings getTransportSettings() { + return settingsBuilder() + .put("host", host) + .put("port", port) + .put("cluster.name", getClusterName()) + .put("path.home", getHome() + "/transport") + .build(); + } + + void startNode(String id) { + buildNode(id).start(); + } + + private Node buildNode(String id) { + Settings nodeSettings = settingsBuilder() + .put(getNodeSettings()) + .put("name", id) + .build(); + Node node = new MockNode(nodeSettings); + AbstractClient client = (AbstractClient) node.client(); + nodes.put(id, node); + clients.put(id, client); + logger.info("clients={}", clients); + return node; + } + + String randomString(int len) { + final char[] buf = new char[len]; + final int n = numbersAndLetters.length - 1; + for (int i = 0; i < buf.length; i++) { + buf[i] = numbersAndLetters[random.nextInt(n)]; + } + return new String(buf); + } + + ElasticsearchClient client(String id) { + return clients.get(id); + } + + String getCluster() { + return getClusterName(); + } + } +} diff --git a/elx-transport/src/test/java/org/xbib/elx/transport/test/package-info.java b/elx-transport/src/test/java/org/xbib/elx/transport/test/package-info.java new file mode 100644 index 0000000..b038c6f --- /dev/null +++ b/elx-transport/src/test/java/org/xbib/elx/transport/test/package-info.java @@ -0,0 +1 @@ +package org.xbib.elx.transport.test; \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index ae3020f..c904bfb 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,18 +1,19 @@ group = org.xbib name = elx -version = 2.2.1.7 +version = 2.2.1.8 xbib-metrics.version = 1.2.0 xbib-guice.version = 4.0.4 +xbib-netty-http.version = 4.1.35.0 elasticsearch.version = 2.2.1 +jackson.version = 2.6.7 jna.version = 4.5.2 log4j.version = 2.11.1 mustache.version = 0.9.5 jts.version = 1.13 -jackson-dataformat.version = 2.8.11 -junit.version = 4.12 +junit.version = 5.4.2 wagon.version = 3.0.0 asciidoclet.version = 1.5.4 diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 87b738cbd051603d91cc39de6cb000dd98fe6b02..5c2d1cf016b3885f6930543d57b744ea8c220a1a 100644 GIT binary patch delta 3320 zcmai0c|2768`iN!wwN(!Oxeo5?`tVU3{m#%jC~noTx!q_nHtNnR`zAgWC@krB#b55 znJk4YA);()+(!K-w|npJuix)IpYu7-^SqzuJ>T~|?;j_-ma(;-@!<_I_B>B@4FVej z11CRtM@$8afpkN^v*te{ycR9yTldxXJbmio?@}x{9}zaw&=aQt(a^ZXN9S3i8a+Z% zGc@&(5}jplZjJKk2wNlTp(mbeKL5J9Gjo==yT{-eVKj?*rT1%bQ@%#Xce~~1f{19^ zoD75QEoSzDVh@!9qG4yl`;9=Ysp?rRX=(8$VDRz=R+oA3>jLxjW-H!-2biNSYuy)U z7-B-qC5l;>qjMTg!DbWPY}h7qxi6xp)_T)_O2+*&NDg?v;RyY@5XtWHx%(ImQ_3E% zA%$s3xrxE0Fk>DhG!pG)4}I!pWJl~QtV_3Jl2W4PuWWssMq^UpGatK+4CING9pB#5 z_NDc)aonVrZuXsr5!RcE#?aXFZQjt2VMd)-p00K$EheT?H!m_D2Mdqq;0moaO=C&y zgJnvzgUn!wkx^{r049pU#gsIMhl`%{MDNl;}JRbneC zSTB=5f;o9=2Rt24_lt&%%f~m{Ts)zu8H9j`INrgMp>l-|k%Kj%U`OXL1J2e+CJHJxreHLD_#o*ZeuXE4uGDQAJS_PpEGt7hmd7psmLEBL^h zD#JbHiklZEXkk9(6uF$ErsUu^jg7c~1oRS&CuTq*Xg_cOvGw~FZ&1#p(6|jz9lJnP zSIJ)sX_W2$PSksX&}*_ejz+t*X)xK|JcakaMRGd%c*R)cQcT|?sM^#{fdjh5_I$iK zBX_d;wz+cf>b}r!i3yo6eaua)d`|Mi_|Q3mAz5Qn?#~xgE9In<;TwYN^~mtaYy#WU z*ffWtxwlk&!e@UfqQ$bn23RDFV3o-H_WM}44yQpYw;JuRf$at#XX-qmuVnKqg-Bo# zJjZE39)!{i$qJh?oJzVzWFDlSW;{Wf`Z)33Y$Fh^+qasrsEJsfy9yhyTFe?Lej&3n zEAS(D8WCt(ew(SGD z-J#7@l?KI*ZbS)AVQ23qV&{c=$@zUp0@6=kZp+5by+gnAWdB||7e=!yJ|WTpG0OC7 zKlKWFv6#(>nrEq@d1i-#L9SVxTDNb1DaY%2$=@)`k&3s8wz$M*;THa&!2Isj%6CQS zY>A4HtmWY3@9e@F)mCHJQzBz~Lt(wcJE{!CAr=wxn4|5n(jslTy)~IF?tNK zD^2#hTM0d6MDg>`9;s5*(4W1V8y}F8OT6Xap{`=h1XVKO3zrBh=;JnIs*RB>@7t5T zwV=G^T)L=(9P7tS={6`tEBBBm^u~_!-#m75G*h}y_Jj7|STtiY_LDR5UUHI@awWmB zDn6q9{2M-EHaTm53ln%ENJ$HpLwRcL>7^hUrM=}&`qmWTgtr{Ul*Lqcd_9S0xZ1s>F2dVd(s)3&$`gxFAu6jXYIS ze#M~w@=X@lm)sFI4EEiqKh7JxN=_?+}D=iHCc&S2<^VPZ6 zYKXZgvi(Yne9}k6o=ezgquABVB77}x$nKXh`@LjH&lQPqm_;MTL>4RGO|E#_7AS4@43rz=ij?gcMZalnd-JK4ILhL)Ee(3G zN}g99HmhxoBjHR~y@b>-7{f+`p zIZ<^8%d;wCA#xfwSc6$DNVPjAX6FCkb|MQ|6hFyz9UhoLF0^xUd#*^2Ofn zOJgmwDyb1=Z8T)ArRy|VQOM+BrhZ>W_ELJ6u(d^JTu|j%*6g8JKZ-ewoj)sXJCdS= zHOo?HscL;Z`H18}%WnE1&o42KZ+=fg(*VN>t>kRkcd{mP9NF6;MnzH&m2WsD)sX~h zbhv|Ux$w2avQwoI`IKiGMLrL;Z>R}Y_0K*L=63V z)ut+5tM74Glzb?92kbu5@3M#1Hi7K3$c)?TL$}`aKf0hC3`r!>Xy3!f{ z`}Y#@$`|mG1JlKzVE!vD04aX}x#hV*+AC>bQ|%XJ1<&;=0?uX!RM?CIB=+!tgkB-w zu*HF--^U4#nG1mXz0v^0@|UCs1lt}!1zTaTwoe+k?sPym`pyB-F25ivXx)#1|1%|e zJ7Vpujkk#Lu%U{v6xiQ5LW2`~QXrR`ja@*L=b0ejT977v%C)0WAik0gV7U z6a-7##p#p>>>3a{^Z}e3Z~?A|foBFU12bqaEE*0vqdCCVLFq%{;F%$Dkb6i8;Qo!C z&;zkU(!i5zbSMd)zQzg8(kU^HPQ^flVIzR)<^jwbwget09YD?zV*rx+mx@0IN{#S< zsB|8Ve>>sJI7sHE!@=(((ttqL0ks%C4M^r5!0H?rJ;MV|jtT)1cMl{|9xo_Okp@Ka ze^CzbCPf?IDFWLlE`V1FDDpZ0C@7~VMZt%!6%SFtxz{!Tb1UfBDEg~49x!4|2#_L! zX=6UXeh28_?VY*suC^Sy!?XXp?9-G{ zEbF`ELqycMcTK-$-pw|Jox9S^<_NX$7{PI7aX1p5N>aOyj&D01H#;3?=q^!=_mq@k zUHheWO_|CDYA~8r<-%q8&Gm$uPSx4S`reKPnv?Nif4kS)^smTg&m@kLYT87txGxGxw+Qc zTAi=`vzavOlyLrgf2A~;1~Gx$jcb|fkhfctRt6CjRooL|#wr)(*8D4n;2cBe>p9_T zCeJf!IgCH0h1m)UPLk3hZz120oe5YH$oXjSMHcPv@#wX;OP5bBSJMavm2}5Q8(V&# zXGA!+dAwOiXuQ)|+XwF2HW1@_MPm3*v{M86V_~+xk1K7cI7mxBKU5#bofCjZqqjs$ z(sipv#Ul%KJ)h?ua}a3Dg(6yaxeJ(HD-&`AT9kZJVLJTz?WIfgao$bYwEhXh+&GA= zkpI03HVxtWc*H!~z~9%DC;;Qej=WppOD!i1$MO1`&8LW%IWd2sbnS7j+<0b`v1%qx!owUU+ZIHJFp1yH9BFvUYI^up=ZYX$K_YM|Bn2fCG3sq#(EpRB$|A9~9*^M%Sq)EAjr0&W`hHyz96Z9h*odHK|Ju$JQ0c zO9oayZQv;2b{pLJo`T)C%yS@sAKO*WC%22XDmrdRTd;uFr*sb_{GDl=*Y`l*;>lNWh=XCbn#V}C&jmw3>t zNH(fnG%j@AI$TSggf(e3DxrpHjnpeKExsb|hC`kxjD4HUSmu)&aJNt&DtCWh#51*} zS!qfplP(f0`hJ)VHrXFD_uB7ia4#%U)3S8lGY9^(T1)M8xQxP*3w4&QJr~O`$A&N5 z_taom$34zt+reJDV?oZ*qr5ERUH7#~xm7)D(u#q#m`~~-F+TZ6Q*L)s_#T3GZUuZM zhCH9!{qXnD)9jln$|GDeDPqo=+D6#vQkAjdHtT>{VxU#AQJW-je=UWN5*R>v5vWF6 zK_6z?#thq>&%@fu5epvO$rfx`v9GojdOLGFaQ2V8?Ri z(?L2JBK(;G)bIF7r5T6Ahzst5k4j#hvhl3a`@Ksfyj3^Cx}zGE)vm$ecB$?~2`S&e zE)Nx6TiDO*JO6UmWWc+zLDmnII+)ROEvW3_{*%Fjs8Q^k4+Z&cJ0lp=@p*N!fw0>L zPSWrxar=HPDCwZnmN%orA-K2142{bJ0el>N{KM(xoHJu_HWSQihq^y%SEmj>CsBjl zj6)jxqm7NwiVHh-xQ`ex^02-y_ZO`A`P(1UwLK5G_T8=uI8@e%Kh31Xay z>H$7OG8cQ%>c_RjXhRA|Yh=93MnM)V0JlD#yP-1YNx}5`sg}-vE%slfve&}e$*L>+ zSAq_CMc5SYx6N)5h%-)?JOAhiVM5`TWT7?<9 zKKxMMb9GXHpQ1ajAr?!hxcauobJLf{IpvJ=9ny}FwdGCYmwgj?0qhIG{5zbTTVc2b zo+3h|{F_Yg96k{?rVn`m`%d??#avI-eh^XnTH2r*o>5n>`UuIsuCIeN5Br62W!Yy#8)0uWcVG%-QnMHczpWoe zftoSf-WJq~x8`|ws<-9{Va9@s#SoH3uw`>4!~uyB-(lV)SD9f(TPNa!o7JLL%!a)@gUmedno%~}$ z#zZLYah$5mf@Z2}a(oDDM^$qq>*nb;?aVn?D`($Om=?j+T%S?eSgR1t=zzwGw|kvM zt~WiOO&UVW=7N=8ERxM<4?Wbj4bPIP4z3=hjp(uuT}ne*E9ct0)Lsk?bG=1nNo=oB z0JEoKzAw45q-lB!IbJKsY=Lpru48qY6ql!Z#J13ywC&7??l&AtxiowZ|Cg(k*UE#@ zrJm|m^EV_6jz}f($PrOb`S;imdEwtu`#cCu3aMXBgUUH4t2j_qu=KmOO645(v(_DL z^G5PF%RR0@X5D{(V%x5L{xD1Sa>^wR+$0j(DeVfwk;tp3<@i$~qOsvx^uUy!zV8G0~0`$f?VV=?vm zOwYnZB>UV_b#sh6ibtN`5I+l%mTE9T%*J!xaz}cWisUNLg@>nEiKv4hgmv`5C)GIDbBOgq{?5K-!=>z{CLJ$wIBkL-~yV{}~e*^#eZ1f%)RR;DgcM zfOqnA#42!t$D;@!QT3n50ve1d0$Zl^m}ABc){bz2HDhq#o&{ZLlQ=*lO9Alv7y_uW z`bTL2KkVsP<{%6$`1yeL}DmCZuxPZRJp*( z*Kk1M23@g@UjhQ6PEZ{58CL@Aqv>cB0|#ltT;SR`95{}ptMe0@zz&v<>j{GNDt-bE zn5EFw?u0e)Ee+J0^aq@C>E_j>A%MyU^@?Rcohe{^TCd{d<=ub5$bWAh