switch to Junit 5

Jörg Prante 5 years ago
parent 47a0ca64fb
commit 3439386e02

@ -20,7 +20,7 @@ printf "Host: %s\nOS: %s %s %s\nJVM: %s %s %s %s\nGradle: %s Groovy: %s Java: %s
JavaVersion.current()
if (JavaVersion.current() < JavaVersion.VERSION_11) {
throw new GradleException("This build must be run with java 11 or higher")
throw new GradleException("The build must be run with Java 11")
}
subprojects {
@ -38,9 +38,11 @@ subprojects {
}
dependencies {
testCompile "junit:junit:${project.property('junit.version')}"
testCompile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
testCompile "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}"
testImplementation "org.junit.jupiter:junit-jupiter-api:${project.property('junit.version')}"
testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:${project.property('junit.version')}"
testImplementation "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
testImplementation "org.apache.logging.log4j:log4j-jul:${project.property('log4j.version')}"
testImplementation "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}"
asciidoclet "org.xbib:asciidoclet:${project.property('asciidoclet.version')}"
wagon "org.apache.maven.wagon:wagon-ssh:${project.property('wagon.version')}"
}
@ -63,43 +65,44 @@ subprojects {
}
test {
jvmArgs =[
'--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED',
'--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED',
'--add-opens=java.base/java.nio=ALL-UNNAMED'
]
useJUnitPlatform()
// we MUST use this hack because of Elasticsearch 2.2.1 Lucene 5.4.1 MMapDirectory unmap() hackery
doFirst {
jvmArgs = [
'--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED',
'--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED',
'--add-opens=java.base/java.nio=ALL-UNNAMED'
]
}
systemProperty 'java.util.logging.manager', 'org.apache.logging.log4j.jul.LogManager'
systemProperty 'jna.debug_load', 'true'
systemProperty 'path.home', "${project.buildDir}"
failFast = false
testLogging {
showStandardStreams = true
exceptionFormat = 'full'
events 'PASSED', 'FAILED', 'SKIPPED'
}
afterSuite { desc, result ->
if (!desc.parent) {
println "\nTest result: ${result.resultType}"
println "Test summary: ${result.testCount} tests, " +
"${result.successfulTestCount} succeeded, " +
"${result.failedTestCount} failed, " +
"${result.skippedTestCount} skipped"
}
}
}
clean {
delete "data"
delete "logs"
delete "out"
}
/*javadoc {
options.docletpath = configurations.asciidoclet.files.asType(List)
options.doclet = 'org.asciidoctor.Asciidoclet'
options.overview = "src/docs/asciidoclet/overview.adoc"
options.addStringOption "-base-dir", "${projectDir}"
options.addStringOption "-attribute",
"name=${project.name},version=${project.version},title-link=https://github.com/jprante/${project.name}"
configure(options) {
noTimestamp = true
}
}*/
task javadocJar(type: Jar, dependsOn: javadoc) {
classifier 'javadoc'
archiveClassifier.set('javadoc')
}
task sourcesJar(type: Jar, dependsOn: classes) {
from sourceSets.main.allSource
classifier 'sources'
archiveClassifier.set('sources')
}
artifacts {
@ -135,6 +138,7 @@ subprojects {
html.enabled = true
}
}
tasks.withType(Checkstyle) {
ignoreFailures = true
reports {

@ -1,19 +1,20 @@
dependencies {
compile "org.xbib:metrics-common:${project.property('xbib-metrics.version')}"
compile("org.elasticsearch:elasticsearch:${project.property('elasticsearch.version')}") {
// exclude ES jackson yaml, cbor, smile versions
// exclude original ES jackson yaml, cbor, smile version (2.6.2)
exclude group: 'com.fasterxml.jackson.dataformat'
// dependencies that are not meant for client
// these dependencies that are not meant for client applications
exclude module: 'securesm'
// we use log4j2, not log4j
exclude group: 'log4j'
}
// override log4j2 of Elastic with ours
compile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
// override ES jackson with our jackson version
// for Elasticsearch session, ES uses SMILE when encoding source for SearchRequest
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${project.property('jackson-dataformat.version')}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-smile:${project.property('jackson.version')}"
// CBOR ist default JSON content compression encoding in ES 2.2.1
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${project.property('jackson-dataformat.version')}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:${project.property('jackson.version')}"
// not used, but maybe in other projects
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${project.property('jackson-dataformat.version')}"
compile "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${project.property('jackson.version')}"
}

@ -0,0 +1,25 @@
package org.xbib.elx.api;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
public interface ReadClient {
ActionFuture<GetResponse> get(GetRequest getRequest);
void get(GetRequest request, ActionListener<GetResponse> listener);
ActionFuture<MultiGetResponse> multiGet(MultiGetRequest request);
void multiGet(MultiGetRequest request, ActionListener<MultiGetResponse> listener);
ActionFuture<SearchResponse> search(SearchRequest request);
void search(SearchRequest request, ActionListener<SearchResponse> listener);
}

@ -0,0 +1,7 @@
package org.xbib.elx.api;
@FunctionalInterface
public interface ReadClientProvider<C extends ReadClient> {
C getReadClient();
}

@ -8,7 +8,6 @@ import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@ -83,12 +82,12 @@ public class DefaultBulkController implements BulkController {
maxActionsPerRequest, maxConcurrentRequests, flushIngestInterval, maxVolumePerRequest);
}
this.bulkListener = new BulkListener();
DefaultBulkProcessor.Builder builder = DefaultBulkProcessor.builder((Client) client.getClient(), bulkListener)
this.bulkProcessor = DefaultBulkProcessor.builder(client.getClient(), bulkListener)
.setBulkActions(maxActionsPerRequest)
.setConcurrentRequests(maxConcurrentRequests)
.setFlushInterval(flushIngestInterval)
.setBulkSize(maxVolumePerRequest);
this.bulkProcessor = builder.build();
.setBulkSize(maxVolumePerRequest)
.build();
this.active.set(true);
}
@ -115,6 +114,7 @@ public class DefaultBulkController implements BulkController {
@Override
public void index(IndexRequest indexRequest) {
ensureActiveAndBulk();
if (!active.get()) {
throw new IllegalStateException("inactive");
}
@ -226,6 +226,18 @@ public class DefaultBulkController implements BulkController {
}
}
private void ensureActiveAndBulk() {
if (!active.get()) {
throw new IllegalStateException("inactive");
}
if (bulkProcessor == null) {
throw new UnsupportedOperationException("bulk processor not present");
}
if (bulkListener == null) {
throw new UnsupportedOperationException("bulk listener not present");
}
}
private class BulkListener implements DefaultBulkProcessor.Listener {
private final Logger logger = LogManager.getLogger("org.xbib.elx.BulkProcessor.Listener");

@ -5,7 +5,7 @@ import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
@ -44,20 +44,22 @@ public class DefaultBulkProcessor implements BulkProcessor {
private volatile boolean closed;
private DefaultBulkProcessor(Client client, Listener listener, String name, int concurrentRequests,
private DefaultBulkProcessor(ElasticsearchClient client, Listener listener, String name, int concurrentRequests,
int bulkActions, ByteSizeValue bulkSize, TimeValue flushInterval) {
this.executionIdGen = new AtomicLong();
this.closed = false;
this.bulkActions = bulkActions;
this.bulkSize = bulkSize.getBytes();
this.bulkRequest = new BulkRequest();
if (listener == null) {
throw new IllegalArgumentException();
}
this.bulkRequestHandler = concurrentRequests == 0 ?
new SyncBulkRequestHandler(client, listener) :
new AsyncBulkRequestHandler(client, listener, concurrentRequests);
if (flushInterval != null) {
this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1,
EsExecutors.daemonThreadFactory(client.settings(),
name != null ? "[" + name + "]" : "" + "bulk_processor"));
EsExecutors.daemonThreadFactory(name != null ? "[" + name + "]" : "" + "bulk_processor"));
this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
this.scheduledFuture = this.scheduler.scheduleWithFixedDelay(new Flush(), flushInterval.millis(),
@ -68,7 +70,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
}
}
public static Builder builder(Client client, Listener listener) {
public static Builder builder(ElasticsearchClient client, Listener listener) {
if (client == null) {
throw new NullPointerException("The client you specified while building a BulkProcessor is null");
}
@ -215,7 +217,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
*/
public static class Builder {
private final Client client;
private final ElasticsearchClient client;
private final Listener listener;
@ -236,7 +238,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
* @param client the client
* @param listener the listener
*/
Builder(Client client, Listener listener) {
Builder(ElasticsearchClient client, Listener listener) {
this.client = client;
this.listener = listener;
}
@ -330,11 +332,11 @@ public class DefaultBulkProcessor implements BulkProcessor {
private static class SyncBulkRequestHandler implements BulkRequestHandler {
private final Client client;
private final ElasticsearchClient client;
private final DefaultBulkProcessor.Listener listener;
SyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener) {
SyncBulkRequestHandler(ElasticsearchClient client, DefaultBulkProcessor.Listener listener) {
this.client = client;
this.listener = listener;
}
@ -362,7 +364,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
private static class AsyncBulkRequestHandler implements BulkRequestHandler {
private final Client client;
private final ElasticsearchClient client;
private final DefaultBulkProcessor.Listener listener;
@ -370,7 +372,7 @@ public class DefaultBulkProcessor implements BulkProcessor {
private final int concurrentRequests;
private AsyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener, int concurrentRequests) {
private AsyncBulkRequestHandler(ElasticsearchClient client, DefaultBulkProcessor.Listener listener, int concurrentRequests) {
this.client = client;
this.listener = listener;
this.concurrentRequests = concurrentRequests;

@ -1,20 +1,20 @@
package org.xbib.elx.common.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.common.Strings;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import java.util.Collections;
import java.util.Iterator;
@ -23,53 +23,58 @@ import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*
*/
public class AliasTest extends TestBase {
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ExtendWith(TestExtension.class)
class AliasTest {
private static final Logger logger = LogManager.getLogger(AliasTest.class.getName());
private final TestExtension.Helper helper;
AliasTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testAlias() {
Client client = client("1");
CreateIndexRequest indexRequest = new CreateIndexRequest("test");
client.admin().indices().create(indexRequest).actionGet();
// put alias
void testAlias() {
ElasticsearchClient client = helper.client("1");
CreateIndexRequest indexRequest = new CreateIndexRequest("test_index");
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
String[] indices = new String[]{"test"};
String[] indices = new String[]{"test_index"};
String[] aliases = new String[]{"test_alias"};
IndicesAliasesRequest.AliasActions aliasAction =
new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, indices, aliases);
indicesAliasesRequest.addAliasAction(aliasAction);
client.admin().indices().aliases(indicesAliasesRequest).actionGet();
client.execute(IndicesAliasesAction.INSTANCE, indicesAliasesRequest).actionGet();
// get alias
GetAliasesRequest getAliasesRequest = new GetAliasesRequest(Strings.EMPTY_ARRAY);
long t0 = System.nanoTime();
GetAliasesResponse getAliasesResponse = client.admin().indices().getAliases(getAliasesRequest).actionGet();
GetAliasesResponse getAliasesResponse = client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet();
long t1 = (System.nanoTime() - t0) / 1000000;
logger.info("{} time(ms) = {}", getAliasesResponse.getAliases(), t1);
assertTrue(t1 >= 0);
}
@Test
public void testMostRecentIndex() {
Client client = client("1");
void testMostRecentIndex() {
ElasticsearchClient client = helper.client("1");
String alias = "test";
CreateIndexRequest indexRequest = new CreateIndexRequest("test20160101");
client.admin().indices().create(indexRequest).actionGet();
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
indexRequest = new CreateIndexRequest("test20160102");
client.admin().indices().create(indexRequest).actionGet();
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
indexRequest = new CreateIndexRequest("test20160103");
client.admin().indices().create(indexRequest).actionGet();
client.execute(CreateIndexAction.INSTANCE, indexRequest).actionGet();
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
String[] indices = new String[]{"test20160101", "test20160102", "test20160103"};
String[] aliases = new String[]{alias};
String[] indices = new String[] { "test20160101", "test20160102", "test20160103" };
String[] aliases = new String[] { alias };
IndicesAliasesRequest.AliasActions aliasAction =
new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD, indices, aliases);
indicesAliasesRequest.addAliasAction(aliasAction);
client.admin().indices().aliases(indicesAliasesRequest).actionGet();
client.execute(IndicesAliasesAction.INSTANCE, indicesAliasesRequest).actionGet();
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
getAliasesRequest.aliases(alias);
GetAliasesResponse getAliasesResponse = client.execute(GetAliasesAction.INSTANCE, getAliasesRequest).actionGet();
@ -89,5 +94,4 @@ public class AliasTest extends TestBase {
assertEquals("test20160101", it.next());
logger.info("success: result={}", result);
}
}

@ -1,48 +0,0 @@
package org.xbib.elx.common.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.junit.Before;
import org.junit.Test;
public class ClusterBlockTest extends TestBase {
private static final Logger logger = LogManager.getLogger("test");
@Before
public void startNodes() {
try {
setClusterName("test-cluster" + System.getProperty("user.name"));
startNode("1");
// do not wait for green health state
logger.info("ready");
} catch (Throwable t) {
logger.error("startNodes failed", t);
}
}
@Override
protected Settings getNodeSettings() {
return Settings.settingsBuilder()
.put(super.getNodeSettings())
.put("discovery.zen.minimum_master_nodes", 2) // block until we have two nodes
.build();
}
@Test(expected = ClusterBlockException.class)
public void testClusterBlock() throws Exception {
Client client = client("1");
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("field1", "value1").endObject();
IndexRequestBuilder irb = client.prepareIndex("test", "test", "1").setSource(builder);
BulkRequestBuilder brb = client.prepareBulk();
brb.add(irb);
brb.execute().actionGet();
}
}

@ -1,18 +1,18 @@
package org.xbib.elx.common.test;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.common.MockExtendedClient;
import org.xbib.elx.common.MockExtendedClientProvider;
import java.io.IOException;
import static org.junit.Assert.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNotNull;
public class MockExtendedClientProviderTest {
class MockExtendedClientProviderTest {
@Test
public void testMockExtendedProvider() throws IOException {
void testMockExtendedProvider() throws IOException {
MockExtendedClient client = ClientBuilder.builder().provider(MockExtendedClientProvider.class).build();
assertNotNull(client);
}

@ -2,20 +2,22 @@ package org.xbib.elx.common.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.Test;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.util.Collections;
import java.util.Enumeration;
public class NetworkTest {
// walk over all found interfaces (this is slow - multicast/pings are performed)
@Disabled
class NetworkTest {
private static final Logger logger = LogManager.getLogger(NetworkTest.class);
@Test
public void testNetwork() throws Exception {
// walk over all found interfaces (this is slow - multicast/pings are performed)
void testNetwork() throws Exception {
Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces();
for (NetworkInterface netint : Collections.list(nets)) {
System.out.println("checking network interface = " + netint.getName());

@ -1,27 +1,38 @@
package org.xbib.elx.common.test;
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class SearchTest extends TestBase {
@ExtendWith(TestExtension.class)
class SearchTest {
private final TestExtension.Helper helper;
SearchTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testSearch() throws Exception {
Client client = client("1");
void testSearch() throws Exception {
ElasticsearchClient client = helper.client("1");
BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE);
for (int i = 0; i < 1000; i++) {
for (int i = 0; i < 1; i++) {
IndexRequest indexRequest = new IndexRequest("pages", "row")
.source(XContentFactory.jsonBuilder()
.startObject()
@ -39,18 +50,20 @@ public class SearchTest extends TestBase {
.endObject());
builder.add(indexRequest);
}
client.bulk(builder.request()).actionGet();
client.admin().indices().refresh(new RefreshRequest()).actionGet();
for (int i = 0; i < 100; i++) {
client.execute(BulkAction.INSTANCE, builder.request()).actionGet();
client.execute(RefreshAction.INSTANCE, new RefreshRequest()).actionGet();
for (int i = 0; i < 1; i++) {
QueryBuilder queryStringBuilder = QueryBuilders.queryStringQuery("rs:" + 1234);
SearchRequestBuilder requestBuilder = client.prepareSearch()
.setIndices("pages")
.setTypes("row")
.setQuery(queryStringBuilder)
.addSort("rowcount", SortOrder.DESC)
.setFrom(i * 10).setSize(10);
SearchResponse searchResponse = requestBuilder.execute().actionGet();
SearchSourceBuilder searchSource = new SearchSourceBuilder();
searchSource.query(queryStringBuilder);
searchSource.sort("rowcount", SortOrder.DESC);
searchSource.from(i * 10);
searchSource.size(10);
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("pages");
searchRequest.types("row");
searchRequest.source(searchSource);
SearchResponse searchResponse = client.execute(SearchAction.INSTANCE, searchRequest).actionGet();
assertTrue(searchResponse.getHits().getTotalHits() > 0);
}
}

@ -1,6 +1,6 @@
package org.xbib.elx.common.test;
import static org.junit.Assert.assertEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
@ -17,43 +17,51 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
public class SimpleTest extends TestBase {
@ExtendWith(TestExtension.class)
class SimpleTest {
private final TestExtension.Helper helper;
SimpleTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void test() throws Exception {
void test() throws Exception {
try {
DeleteIndexRequest deleteIndexRequest =
new DeleteIndexRequest().indices("test");
client("1").execute(DeleteIndexAction.INSTANCE, deleteIndexRequest).actionGet();
helper.client("1").execute(DeleteIndexAction.INSTANCE, deleteIndexRequest).actionGet();
} catch (IndexNotFoundException e) {
// ignore if index not found
}
Settings indexSettings = Settings.settingsBuilder()
.put(super.getNodeSettings())
.put(helper.getNodeSettings())
.put("index.analysis.analyzer.default.filter.0", "lowercase")
.put("index.analysis.analyzer.default.filter.1", "trim")
.put("index.analysis.analyzer.default.tokenizer", "keyword")
.build();
CreateIndexRequest createIndexRequest = new CreateIndexRequest();
createIndexRequest.index("test").settings(indexSettings);
client("1").execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet();
helper.client("1").execute(CreateIndexAction.INSTANCE, createIndexRequest).actionGet();
IndexRequest indexRequest = new IndexRequest();
indexRequest.index("test").type("test").id("1")
.source(XContentFactory.jsonBuilder().startObject().field("field",
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8").endObject());
client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
helper.client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
RefreshRequest refreshRequest = new RefreshRequest();
refreshRequest.indices("test");
client("1").execute(RefreshAction.INSTANCE, refreshRequest).actionGet();
helper.client("1").execute(RefreshAction.INSTANCE, refreshRequest).actionGet();
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.query(QueryBuilders.matchQuery("field",
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8"));
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("test").types("test");
searchRequest.source(builder);
String doc = client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet()
String doc = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet()
.getHits().getAt(0).getSourceAsString();
assertEquals(doc,
"{\"field\":\"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8\"}");

@ -1,212 +0,0 @@
package org.xbib.elx.common.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.Node;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class TestBase {
private static final Logger logger = LogManager.getLogger("test");
private static final Random random = new Random();
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
private Map<String, Node> nodes = new HashMap<>();
private Map<String, AbstractClient> clients = new HashMap<>();
private String cluster;
private String host;
private int port;
@Before
public void startNodes() {
try {
logger.info("starting");
setClusterName("test-cluster-" + System.getProperty("user.name"));
startNode("1");
findNodeAddress();
try {
ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE,
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
if (healthResponse != null && healthResponse.isTimedOut()) {
throw new IOException("cluster state is " + healthResponse.getStatus().name()
+ ", from here on, everything will fail!");
}
} catch (ElasticsearchTimeoutException e) {
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
}
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
ClusterStateResponse clusterStateResponse =
client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
logger.info("host = {} port = {}", host, port);
} catch (Throwable t) {
logger.error("startNodes failed", t);
}
}
@After
public void stopNodes() {
try {
closeNodes();
} catch (Exception e) {
logger.error("can not close nodes", e);
} finally {
try {
deleteFiles();
logger.info("data files wiped");
Thread.sleep(2000L); // let OS commit changes
} catch (IOException e) {
logger.error(e.getMessage(), e);
} catch (InterruptedException e) {
// ignore
}
}
}
protected void setClusterName(String cluster) {
this.cluster = cluster;
}
protected String getClusterName() {
return cluster;
}
protected Settings getTransportSettings() {
return settingsBuilder()
.put("host", host)
.put("port", port)
.put("cluster.name", cluster)
.put("path.home", getHome())
.build();
}
protected Settings getNodeSettings() {
return settingsBuilder()
.put("cluster.name", cluster)
//.put("cluster.routing.schedule", "50ms")
//.put("cluster.routing.allocation.disk.threshold_enabled", false)
//.put("discovery.zen.multicast.enabled", true)
//.put("discovery.zen.multicast.ping_timeout", "5s")
//.put("http.enabled", true)
//.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors())
//.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low
//.put("index.number_of_replicas", 0)
.put("path.home", getHome())
.build();
}
protected static String getHome() {
return System.getProperty("path.home", System.getProperty("user.dir"));
}
protected void startNode(String id) {
buildNode(id).start();
}
protected AbstractClient client(String id) {
return clients.get(id);
}
protected void findNodeAddress() {
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet();
Object obj = response.iterator().next().getTransport().getAddress()
.publishAddress();
if (obj instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
host = address.address().getHostName();
port = address.address().getPort();
}
}
private Node buildNode(String id) {
Settings nodeSettings = settingsBuilder()
.put(getNodeSettings())
.put("name", id)
.build();
Node node = new MockNode(nodeSettings);
AbstractClient client = (AbstractClient) node.client();
nodes.put(id, node);
clients.put(id, client);
logger.info("clients={}", clients);
return node;
}
protected String randomString(int len) {
final char[] buf = new char[len];
final int n = numbersAndLetters.length - 1;
for (int i = 0; i < buf.length; i++) {
buf[i] = numbersAndLetters[random.nextInt(n)];
}
return new String(buf);
}
private void closeNodes() {
logger.info("closing all clients");
for (AbstractClient client : clients.values()) {
client.close();
}
clients.clear();
logger.info("closing all nodes");
for (Node node : nodes.values()) {
if (node != null) {
node.close();
}
}
nodes.clear();
logger.info("all nodes closed");
}
private static void deleteFiles() throws IOException {
Path directory = Paths.get(getHome() + "/data");
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}

@ -0,0 +1,216 @@
package org.xbib.elx.common.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.Node;
import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class TestExtension implements ParameterResolver, BeforeAllCallback, AfterAllCallback {
private static final Logger logger = LogManager.getLogger("test");
private static final Random random = new Random();
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
private Map<String, Node> nodes = new HashMap<>();
private Map<String, AbstractClient> clients = new HashMap<>();
private String home;
private String cluster;
private String host;
private int port;
private static final String key = "es-instance";
private static final ExtensionContext.Namespace ns =
ExtensionContext.Namespace.create(TestExtension.class);
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return parameterContext.getParameter().getType().equals(Helper.class);
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return extensionContext.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create());
}
@Override
public void beforeAll(ExtensionContext context) throws Exception {
Helper helper = context.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create(), Helper.class);
setHome(System.getProperty("path.home") + "/" + helper.randomString(8));
setClusterName("test-cluster-" + System.getProperty("user.name"));
logger.info("starting cluster");
deleteFiles(Paths.get(getHome() + "/data"));
logger.info("data files wiped");
Thread.sleep(2000L); // let OS commit changes
helper.startNode("1");
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet();
Object obj = response.iterator().next().getTransport().getAddress()
.publishAddress();
if (obj instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
host = address.address().getHostName();
port = address.address().getPort();
}
try {
ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE,
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
if (healthResponse != null && healthResponse.isTimedOut()) {
throw new IOException("cluster state is " + healthResponse.getStatus().name()
+ ", from here on, everything will fail!");
}
} catch (ElasticsearchTimeoutException e) {
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
}
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
ClusterStateResponse clusterStateResponse =
helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
logger.info("host = {} port = {}", host, port);
}
@Override
public void afterAll(ExtensionContext context) throws Exception {
closeNodes();
deleteFiles(Paths.get(getHome() + "/data"));
}
private void setClusterName(String cluster) {
this.cluster = cluster;
}
private String getClusterName() {
return cluster;
}
private void setHome(String home) {
this.home = home;
}
private String getHome() {
return home;
}
private void closeNodes() {
logger.info("closing all clients");
for (AbstractClient client : clients.values()) {
client.close();
}
clients.clear();
logger.info("closing all nodes");
for (Node node : nodes.values()) {
if (node != null) {
node.close();
}
}
nodes.clear();
logger.info("all nodes closed");
}
private static void deleteFiles(Path directory) throws IOException {
if (Files.exists(directory)) {
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}
private Helper create() {
return new Helper();
}
class Helper {
Settings getNodeSettings() {
return settingsBuilder()
.put("cluster.name", getClusterName())
.put("path.home", getHome())
.build();
}
void startNode(String id) {
buildNode(id).start();
}
private Node buildNode(String id) {
Settings nodeSettings = settingsBuilder()
.put(getNodeSettings())
.put("name", id)
.build();
Node node = new MockNode(nodeSettings);
AbstractClient client = (AbstractClient) node.client();
nodes.put(id, node);
clients.put(id, client);
logger.info("clients={}", clients);
return node;
}
String randomString(int len) {
final char[] buf = new char[len];
final int n = numbersAndLetters.length - 1;
for (int i = 0; i < buf.length; i++) {
buf[i] = numbersAndLetters[random.nextInt(n)];
}
return new String(buf);
}
ElasticsearchClient client(String id) {
return clients.get(id);
}
}
}

@ -6,53 +6,51 @@ import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import java.io.IOException;
public class WildcardTest extends TestBase {
@ExtendWith(TestExtension.class)
class WildcardTest {
/*protected Settings getNodeSettings() {
return Settings.settingsBuilder()
.put(super.getNodeSettings())
.put("cluster.routing.allocation.disk.threshold_enabled", false)
.put("discovery.zen.multicast.enabled", false)
.put("http.enabled", false)
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
.build();
}*/
private final TestExtension.Helper helper;
WildcardTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testWildcard() throws Exception {
index(client("1"), "1", "010");
index(client("1"), "2", "0*0");
void testWildcard() throws Exception {
ElasticsearchClient client = helper.client("1");
index(client, "1", "010");
index(client, "2", "0*0");
// exact
validateCount(client("1"), QueryBuilders.queryStringQuery("010").defaultField("field"), 1);
validateCount(client("1"), QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1);
validateCount(client, QueryBuilders.queryStringQuery("010").defaultField("field"), 1);
validateCount(client, QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1);
// pattern
validateCount(client("1"), QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2?
validateCount(client("1"), QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2?
validateCount(client("1"), QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2?
validateCount(client("1"), QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0);
validateCount(client("1"), QueryBuilders.queryStringQuery("*10").defaultField("field"), 1);
validateCount(client("1"), QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1);
validateCount(client("1"), QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1?
validateCount(client("1"), QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1?
validateCount(client, QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2?
validateCount(client, QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2?
validateCount(client, QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2?
validateCount(client, QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0);
validateCount(client, QueryBuilders.queryStringQuery("*10").defaultField("field"), 1);
validateCount(client, QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1);
validateCount(client, QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1?
validateCount(client, QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1?
}
private void index(Client client, String id, String fieldValue) throws IOException {
private void index(ElasticsearchClient client, String id, String fieldValue) throws IOException {
client.execute(IndexAction.INSTANCE, new IndexRequest("index", "type", id)
.source(XContentFactory.jsonBuilder().startObject().field("field", fieldValue).endObject())).actionGet();
client.execute(RefreshAction.INSTANCE, new RefreshRequest()).actionGet();
}
private long count(Client client, QueryBuilder queryBuilder) {
private long count(ElasticsearchClient client, QueryBuilder queryBuilder) {
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.query(queryBuilder);
SearchRequest searchRequest = new SearchRequest();
@ -62,7 +60,7 @@ public class WildcardTest extends TestBase {
return client.execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits();
}
private void validateCount(Client client, QueryBuilder queryBuilder, long expectedHits) {
private void validateCount(ElasticsearchClient client, QueryBuilder queryBuilder, long expectedHits) {
final long actualHits = count(client, queryBuilder);
if (actualHits != expectedHits) {
throw new RuntimeException("actualHits=" + actualHits + ", expectedHits=" + expectedHits);

@ -0,0 +1,4 @@
dependencies {
compile project(':elx-common')
compile "org.xbib:netty-http-client:${project.property('xbib-netty-http.version')}"
}

@ -0,0 +1,127 @@
package org.xbib.elx.http;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.Action;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.threadpool.ThreadPool;
import org.xbib.elx.common.AbstractExtendedClient;
import org.xbib.netty.http.client.Client;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.ServiceLoader;
/**
* Elasticsearch HTTP client.
*/
public class ExtendedHttpClient extends AbstractExtendedClient implements ElasticsearchClient {
private static final Logger logger = LogManager.getLogger(ExtendedHttpClient.class);
private Client nettyHttpClient;
private final ClassLoader classLoader;
@SuppressWarnings("rawtypes")
private final Map<GenericAction, HttpAction> actionMap;
private String url;
public ExtendedHttpClient() {
this.classLoader = ExtendedHttpClient.class.getClassLoader();
this.actionMap = new HashMap<>();
}
@Override
@SuppressWarnings({"unchecked", "rawtypes"})
public ExtendedHttpClient init(Settings settings) throws IOException {
super.init(settings);
if (settings == null) {
return null;
}
this.url = settings.get("url");
ServiceLoader<HttpAction> httpActionServiceLoader = ServiceLoader.load(HttpAction.class, classLoader);
for (HttpAction<? extends ActionRequest, ? extends ActionResponse> httpAction : httpActionServiceLoader) {
httpAction.setSettings(settings);
actionMap.put(httpAction.getActionInstance(), httpAction);
}
this.nettyHttpClient = Client.builder().enableDebug().build();
logger.info("extended HTTP client initialized with {} actions", actionMap.size());
return this;
}
public Client internalClient() {
return nettyHttpClient;
}
@Override
public ElasticsearchClient getClient() {
return this;
}
@Override
protected ElasticsearchClient createClient(Settings settings) {
return this;
}
@Override
protected void closeClient() throws IOException {
nettyHttpClient.shutdownGracefully();
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse,
RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> ActionFuture<Response>
execute(Action<Request, Response, RequestBuilder> action, Request request) {
PlainActionFuture<Response> actionFuture = PlainActionFuture.newFuture();
execute(action, request, actionFuture);
return actionFuture;
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse,
RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> void
execute(Action<Request, Response, RequestBuilder> action, Request request, ActionListener<Response> listener) {
doExecute(action, request, listener);
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse,
RequestBuilder extends ActionRequestBuilder<Request, Response, RequestBuilder>> RequestBuilder
prepareExecute(Action<Request, Response, RequestBuilder> action) {
return action.newRequestBuilder(this);
}
@Override
public ThreadPool threadPool() {
throw new UnsupportedOperationException();
}
@SuppressWarnings({"unchecked", "rawtypes"})
private <R extends ActionRequest, T extends ActionResponse, B extends ActionRequestBuilder<R, T, B>>
void doExecute(Action<R, T, B> action, R request, ActionListener<T> listener) {
HttpAction httpAction = actionMap.get(action);
if (httpAction == null) {
throw new IllegalStateException("failed to find http action [" + action + "] to execute");
}
try {
HttpActionContext httpActionContext = new HttpActionContext(this, request, url);
if (logger.isDebugEnabled()) {
logger.debug("submitting request {} to URL {}", request, url);
}
httpAction.execute(httpActionContext, listener);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}

@ -0,0 +1,10 @@
package org.xbib.elx.http;
import org.xbib.elx.api.ExtendedClientProvider;
public class ExtendedHttpClientProvider implements ExtendedClientProvider<ExtendedHttpClient> {
@Override
public ExtendedHttpClient getExtendedClient() {
return new ExtendedHttpClient();
}
}

@ -0,0 +1,169 @@
package org.xbib.elx.http;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpMethod;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.xbib.elx.http.util.CheckedFunction;
import org.xbib.netty.http.client.Request;
import org.xbib.netty.http.client.RequestBuilder;
import org.xbib.netty.http.client.transport.Transport;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
/**
* Base class for HTTP actions.
*
* @param <R> the request type
* @param <T> the response type
*/
public abstract class HttpAction<R extends ActionRequest, T extends ActionResponse> {
private final Logger logger = LogManager.getLogger(getClass().getName());
private static final String APPLICATION_JSON = "application/json";
private Settings settings;
void setSettings(Settings settings) {
this.settings = settings;
}
public abstract GenericAction<R, T> getActionInstance();
public final ActionFuture<T> execute(HttpActionContext<R, T> httpActionContext) throws IOException {
PlainActionFuture<T> future = PlainActionFuture.newFuture();
execute(httpActionContext, future);
return future;
}
public final void execute(HttpActionContext<R, T> httpActionContext, ActionListener<T> listener) throws IOException {
try {
ActionRequestValidationException validationException = httpActionContext.getRequest().validate();
if (validationException != null) {
listener.onFailure(validationException);
return;
}
RequestBuilder httpRequestBuilder =
createHttpRequest(httpActionContext.getUrl(), httpActionContext.getRequest());
httpRequestBuilder.setUserAgent("elx-http/1.0");
Request httpRequest = httpRequestBuilder.build();
if (logger.isTraceEnabled()) {
logger.trace("action = {} request = {}", this.getClass().getName(), httpRequest.toString());
}
httpRequest.setResponseListener(fullHttpResponse -> {
try {
if (logger.isTraceEnabled()) {
logger.trace("got HTTP response: status code = " + fullHttpResponse.status().code() +
" headers = " + fullHttpResponse.headers().entries() +
" content = " + fullHttpResponse.content().toString(StandardCharsets.UTF_8));
}
listener.onResponse(parseToResponse(httpActionContext.setHttpResponse(fullHttpResponse)));
} catch (Exception e) {
listener.onFailure(e);
}
});
Transport transport = httpActionContext.getExtendedHttpClient().internalClient().execute(httpRequest);
httpActionContext.setHttpClientTransport(transport);
if (transport.isFailed()) {
listener.onFailure(transport.getFailure());
}
} catch (Throwable e) {
if (listener != null) {
listener.onFailure(e);
}
throw new IOException(e);
}
}
protected RequestBuilder newGetRequest(String url, String path) {
return Request.builder(HttpMethod.GET).url(url).uri(path);
}
protected RequestBuilder newGetRequest(String url, String path, BytesReference content) {
return newRequest(HttpMethod.GET, url, path, content);
}
protected RequestBuilder newHeadRequest(String url, String path) {
return newRequest(HttpMethod.HEAD, url, path);
}
protected RequestBuilder newPostRequest(String url, String path) {
return newRequest(HttpMethod.POST, url, path);
}
protected RequestBuilder newPostRequest(String url, String path, BytesReference content) {
return newRequest(HttpMethod.POST, url, path, content);
}
protected RequestBuilder newPostRequest(String url, String path, String content) {
return newRequest(HttpMethod.POST, url, path, content);
}
protected RequestBuilder newPutRequest(String url, String path) {
return newRequest(HttpMethod.PUT, url, path);
}
protected RequestBuilder newPutRequest(String url, String path, String content) {
return newRequest(HttpMethod.PUT, url, path, content);
}
protected RequestBuilder newPutRequest(String url, String path, BytesReference content) {
return newRequest(HttpMethod.PUT, url, path, content);
}
protected RequestBuilder newDeleteRequest(String url, String path, BytesReference content) {
return newRequest(HttpMethod.DELETE, url, path, content);
}
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path) {
return Request.builder(method).url(baseUrl).uri(path);
}
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, BytesReference content) {
return Request.builder(method).url(baseUrl).uri(path).content(content.toBytesRef().bytes, APPLICATION_JSON);
}
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, String content) {
return Request.builder(method).url(baseUrl).uri(path).content(content, APPLICATION_JSON);
}
protected RequestBuilder newRequest(HttpMethod method, String baseUrl, String path, ByteBuf byteBuf) {
return Request.builder(method).url(baseUrl).uri(path).content(byteBuf, APPLICATION_JSON);
}
protected T parseToResponse(HttpActionContext<R, T> httpActionContext) throws IOException {
String mediaType = httpActionContext.getHttpResponse().headers().get(HttpHeaderNames.CONTENT_TYPE);
// strip off "; charset=UTF-8"
int pos = mediaType.indexOf(";");
mediaType = pos >= 0 ? mediaType.substring(0, pos) : mediaType;
XContentType xContentType = XContentType.fromRestContentType(mediaType);
if (xContentType == null) {
throw new IllegalStateException("unsupported content-type: " + mediaType);
}
String body = httpActionContext.getHttpResponse().content().toString(StandardCharsets.UTF_8);
T t;
try (XContentParser parser = xContentType.xContent().createParser(body)) {
t = entityParser().apply(parser);
}
return t;
}
protected abstract RequestBuilder createHttpRequest(String baseUrl, R request) throws IOException;
protected abstract CheckedFunction<XContentParser, T, IOException> entityParser();
}

@ -0,0 +1,60 @@
package org.xbib.elx.http;
import io.netty.handler.codec.http.FullHttpResponse;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.xbib.netty.http.client.transport.Transport;
/**
* HTTP action context.
*
* @param <R> request type
* @param <T> response type
*/
public class HttpActionContext<R extends ActionRequest, T extends ActionResponse> {
private final ExtendedHttpClient extendedHttpClient;
private final R request;
private final String url;
private Transport httpClientTransport;
private FullHttpResponse httpResponse;
HttpActionContext(ExtendedHttpClient extendedHttpClient, R request, String url) {
this.extendedHttpClient = extendedHttpClient;
this.request = request;
this.url = url;
}
public ExtendedHttpClient getExtendedHttpClient() {
return extendedHttpClient;
}
public R getRequest() {
return request;
}
public String getUrl() {
return url;
}
public void setHttpClientTransport(Transport httpClientTransport) {
this.httpClientTransport = httpClientTransport;
}
public Transport getHttpClientTransport() {
return httpClientTransport;
}
public HttpActionContext<R, T> setHttpResponse(FullHttpResponse fullHttpResponse) {
this.httpResponse = fullHttpResponse;
return this;
}
public FullHttpResponse getHttpResponse() {
return httpResponse;
}
}

@ -0,0 +1,179 @@
package org.xbib.elx.http.action.get;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.get.GetAction;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.get.GetField;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.xbib.elx.http.util.CheckedFunction;
import org.xbib.elx.http.HttpAction;
import org.xbib.netty.http.client.RequestBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Supplier;
public class HttpGetAction extends HttpAction<GetRequest, GetResponse> {
@Override
public GenericAction<GetRequest, GetResponse> getActionInstance() {
return GetAction.INSTANCE;
}
@Override
protected RequestBuilder createHttpRequest(String url, GetRequest request) {
return newGetRequest(url, request.index() + "/" + request.type() + "/" + request.id());
}
@Override
protected CheckedFunction<XContentParser, GetResponse, IOException> entityParser() {
return this::fromXContent;
}
public GetResponse fromXContent(XContentParser parser) throws IOException {
GetResult getResult = Helper.fromXContent(parser);
if (getResult.getIndex() == null && getResult.getType() == null && getResult.getId() == null) {
throw new ElasticsearchException(parser.getTokenLocation() + ":" +
String.format(Locale.ROOT, "Missing required fields [%s,%s,%s]", "_index", "_type", "_id"));
}
return new GetResponse(getResult);
}
static class Helper {
private static final Logger logger = LogManager.getLogger("helper");
static final String _INDEX = "_index";
static final String _TYPE = "_type";
static final String _ID = "_id";
private static final String _VERSION = "_version";
private static final String FOUND = "found";
private static final String FIELDS = "fields";
static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) {
if (actual != expected) {
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual));
}
}
static GetResult fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation);
return fromXContentEmbedded(parser);
}
static GetResult fromXContentEmbedded(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
return fromXContentEmbedded(parser, null, null, null);
}
static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String currentFieldName = parser.currentName();
long version = -1;
Boolean found = null;
BytesReference source = null;
Map<String, GetField> fields = new HashMap<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (_INDEX.equals(currentFieldName)) {
index = parser.text();
} else if (_TYPE.equals(currentFieldName)) {
type = parser.text();
} else if (_ID.equals(currentFieldName)) {
id = parser.text();
} else if (_VERSION.equals(currentFieldName)) {
version = parser.longValue();
} else if (FOUND.equals(currentFieldName)) {
found = parser.booleanValue();
} else {
fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText())));
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SourceFieldMapper.NAME.equals(currentFieldName)) {
try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) {
builder.copyCurrentStructure(parser);
source = builder.bytes();
}
} else if (FIELDS.equals(currentFieldName)) {
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {
GetField getField = getFieldFromXContent(parser);
fields.put(getField.getName(), getField);
}
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("_ignored".equals(currentFieldName)) {
fields.put(currentFieldName, new GetField(currentFieldName, parser.list()));
} else {
parser.skipChildren();
}
}
}
return new GetResult(index, type, id, version, found, source, fields);
}
static GetField getFieldFromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String fieldName = parser.currentName();
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
List<Object> values = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
values.add(parseFieldsValue(parser));
}
return new GetField(fieldName, values);
}
static Object parseFieldsValue(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
Object value = null;
if (token == XContentParser.Token.VALUE_STRING) {
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
value = parser.text();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
value = parser.numberValue();
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
value = parser.booleanValue();
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
value = new BytesArray(parser.binaryValue());
} else if (token == XContentParser.Token.VALUE_NULL) {
value = null;
} else if (token == XContentParser.Token.START_OBJECT) {
value = parser.mapOrdered();
} else if (token == XContentParser.Token.START_ARRAY) {
value = parser.listOrderedMap();
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
return value;
}
static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
String message = "Failed to parse object: unexpected token [%s] found";
throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token));
}
}
}

@ -0,0 +1,255 @@
package org.xbib.elx.http.action.get;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetAction;
import org.elasticsearch.action.get.MultiGetItemResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.get.GetField;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.xbib.elx.http.HttpAction;
import org.xbib.elx.http.action.search.HttpSearchAction;
import org.xbib.elx.http.util.CheckedFunction;
import org.xbib.netty.http.client.RequestBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Supplier;
public class HttpMultiGetAction extends HttpAction<MultiGetRequest, MultiGetResponse> {
@Override
public GenericAction<MultiGetRequest, MultiGetResponse> getActionInstance() {
return MultiGetAction.INSTANCE;
}
@Override
protected RequestBuilder createHttpRequest(String url, MultiGetRequest request) throws IOException {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startArray("docs");
for (MultiGetRequest.Item item : request.getItems()) {
builder.startObject()
.field("_index", item.index())
.field("_type", item.type())
.field("_id", item.id());
if (item.fields() != null) {
builder.array("fields", item.fields());
}
builder.endObject();
}
builder.endArray().endObject();
return newPostRequest(url, "_mget", builder.bytes());
}
@Override
protected CheckedFunction<XContentParser, MultiGetResponse, IOException> entityParser() {
return Helper::fromXContent;
}
static class Helper {
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ERROR = new ParseField("error");
private static final ParseField DOCS = new ParseField("docs");
static final String _INDEX = "_index";
static final String _TYPE = "_type";
static final String _ID = "_id";
private static final String _VERSION = "_version";
private static final String FOUND = "found";
private static final String FIELDS = "fields";
static MultiGetResponse fromXContent(XContentParser parser) throws IOException {
String currentFieldName = null;
List<MultiGetItemResponse> items = new ArrayList<>();
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
switch (token) {
case FIELD_NAME:
currentFieldName = parser.currentName();
break;
case START_ARRAY:
if (DOCS.getPreferredName().equals(currentFieldName)) {
for (token = parser.nextToken(); token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) {
if (token == XContentParser.Token.START_OBJECT) {
items.add(parseItem(parser));
}
}
}
break;
default:
break;
}
}
return new MultiGetResponse(items.toArray(new MultiGetItemResponse[0]));
}
private static MultiGetItemResponse parseItem(XContentParser parser) throws IOException {
String currentFieldName = null;
String index = null;
String type = null;
String id = null;
ElasticsearchException exception = null;
GetResult getResult = null;
ParseFieldMatcher matcher = new ParseFieldMatcher(Settings.EMPTY);
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
switch (token) {
case FIELD_NAME:
currentFieldName = parser.currentName();
getResult = fromXContentEmbedded(parser, index, type, id);
break;
case VALUE_STRING:
if (matcher.match(currentFieldName, INDEX)) {
index = parser.text();
} else if (matcher.match(currentFieldName, TYPE)) {
type = parser.text();
} else if (matcher.match(currentFieldName, ID)) {
id = parser.text();
}
break;
case START_OBJECT:
if (matcher.match(currentFieldName, ERROR)) {
exception = HttpSearchAction.Helper.elasticsearchExceptionFromXContent(parser);
}
break;
default:
// If unknown tokens are encounter then these should be ignored, because
// this is parsing logic on the client side.
break;
}
if (getResult != null) {
break;
}
}
if (exception != null) {
return new MultiGetItemResponse(null, new MultiGetResponse.Failure(index, type, id, exception));
} else {
GetResponse getResponse = new GetResponse(getResult);
return new MultiGetItemResponse(getResponse, null);
}
}
static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) {
if (actual != expected) {
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual));
}
}
static GetResult fromXContentEmbedded(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
return fromXContentEmbedded(parser, null, null, null);
}
static GetResult fromXContentEmbedded(XContentParser parser, String index, String type, String id) throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String currentFieldName = parser.currentName();
long version = -1;
Boolean found = null;
BytesReference source = null;
Map<String, GetField> fields = new HashMap<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (_INDEX.equals(currentFieldName)) {
index = parser.text();
} else if (_TYPE.equals(currentFieldName)) {
type = parser.text();
} else if (_ID.equals(currentFieldName)) {
id = parser.text();
} else if (_VERSION.equals(currentFieldName)) {
version = parser.longValue();
} else if (FOUND.equals(currentFieldName)) {
found = parser.booleanValue();
} else {
fields.put(currentFieldName, new GetField(currentFieldName, Collections.singletonList(parser.objectText())));
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (SourceFieldMapper.NAME.equals(currentFieldName)) {
try (XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent())) {
builder.copyCurrentStructure(parser);
source = builder.bytes();
}
} else if (FIELDS.equals(currentFieldName)) {
while(parser.nextToken() != XContentParser.Token.END_OBJECT) {
GetField getField = getFieldFromXContent(parser);
fields.put(getField.getName(), getField);
}
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if ("_ignored".equals(currentFieldName)) {
fields.put(currentFieldName, new GetField(currentFieldName, parser.list()));
} else {
parser.skipChildren();
}
}
}
return new GetResult(index, type, id, version, found, source, fields);
}
static GetField getFieldFromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String fieldName = parser.currentName();
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
List<Object> values = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
values.add(parseFieldsValue(parser));
}
return new GetField(fieldName, values);
}
static Object parseFieldsValue(XContentParser parser) throws IOException {
XContentParser.Token token = parser.currentToken();
Object value = null;
if (token == XContentParser.Token.VALUE_STRING) {
//binary values will be parsed back and returned as base64 strings when reading from json and yaml
value = parser.text();
} else if (token == XContentParser.Token.VALUE_NUMBER) {
value = parser.numberValue();
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
value = parser.booleanValue();
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
//binary values will be parsed back and returned as BytesArray when reading from cbor and smile
value = new BytesArray(parser.binaryValue());
} else if (token == XContentParser.Token.VALUE_NULL) {
value = null;
} else if (token == XContentParser.Token.START_OBJECT) {
value = parser.mapOrdered();
} else if (token == XContentParser.Token.START_ARRAY) {
value = parser.listOrderedMap();
} else {
throwUnknownToken(token, parser.getTokenLocation());
}
return value;
}
static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
String message = "Failed to parse object: unexpected token [%s] found";
throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token));
}
}
}

@ -0,0 +1,597 @@
package org.xbib.elx.http.action.search;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option;
import org.xbib.elx.http.util.CheckedFunction;
import org.xbib.elx.http.HttpAction;
import org.xbib.elx.http.util.ObjectParser;
import org.xbib.elx.http.util.XContentParserUtils;
import org.xbib.netty.http.client.RequestBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.xbib.elx.http.util.ObjectParser.ValueType.STRING;
import static org.xbib.elx.http.util.XContentParserUtils.ensureExpectedToken;
public class HttpSearchAction extends HttpAction<SearchRequest, SearchResponse> {
@Override
public SearchAction getActionInstance() {
return SearchAction.INSTANCE;
}
@Override
protected RequestBuilder createHttpRequest(String url, SearchRequest request) {
String index = request.indices() != null ? "/" + String.join(",", request.indices()) : "";
return newPostRequest(url, index + "/_search", request.source());
}
@Override
protected CheckedFunction<XContentParser, SearchResponse, IOException> entityParser() {
return Helper::fromXContent;
}
public static class Helper {
private static final Logger logger = LogManager.getLogger("helper");
private static final ParseField SCROLL_ID = new ParseField("_scroll_id");
private static final ParseField TOOK = new ParseField("took");
private static final ParseField TIMED_OUT = new ParseField("timed_out");
private static final ParseField TERMINATED_EARLY = new ParseField("terminated_early");
private static final ParseField _SHARDS_FIELD = new ParseField("_shards");
private static final ParseField TOTAL_FIELD = new ParseField("total");
private static final ParseField SUCCESSFUL_FIELD = new ParseField("successful");
private static final ParseField SKIPPED_FIELD = new ParseField("skipped");
private static final ParseField FAILED_FIELD = new ParseField("failed");
private static final ParseField FAILURES_FIELD = new ParseField("failures");
private static final String HITS = "hits";
private static final String TOTAL = "total";
private static final String MAX_SCORE = "max_score";
private static final String _NESTED = "_nested";
private static final String _INDEX = "_index";
private static final String _TYPE = "_type";
private static final String _ID = "_id";
private static final String _VERSION = "_version";
private static final String _SCORE = "_score";
private static final String FIELDS = "fields";
private static final String HIGHLIGHT = "highlight";
private static final String SORT = "sort";
private static final String MATCHED_QUERIES = "matched_queries";
private static final String _EXPLANATION = "_explanation";
private static final String INNER_HITS = "inner_hits";
private static final String _SHARD = "_shard";
private static final String _NODE = "_node";
private static final String AGGREGATIONS_FIELD = "aggregations";
private static final String TYPED_KEYS_DELIMITER = "#";
private static final String SUGGEST_NAME = "suggest";
private static final String REASON_FIELD = "reason";
private static final String NODE_FIELD = "node";
private static final String INDEX_FIELD = "index";
private static final String SHARD_FIELD = "shard";
private static final String TYPE = "type";
private static final String REASON = "reason";
private static final String CAUSED_BY = "caused_by";
private static final String STACK_TRACE = "stack_trace";
private static final String HEADER = "header";
private static final String ROOT_CAUSE = "root_cause";
private static ObjectParser<Map<String, Object>, Void> MAP_PARSER =
new ObjectParser<>("innerHitParser", true, HashMap::new);
static {
declareInnerHitsParseFields(MAP_PARSER);
}
public static SearchResponse fromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
parser.nextToken();
return innerFromXContent(parser);
}
static SearchResponse innerFromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String currentFieldName = parser.currentName();
InternalSearchHits hits = null;
InternalAggregations aggs = null;
Suggest suggest = null;
boolean timedOut = false;
Boolean terminatedEarly = null;
long tookInMillis = -1;
int successfulShards = -1;
int totalShards = -1;
String scrollId = null;
List<ShardSearchFailure> failures = new ArrayList<>();
ParseFieldMatcher matcher = new ParseFieldMatcher(Settings.EMPTY);
for (XContentParser.Token token = parser.nextToken(); token != XContentParser.Token.END_OBJECT; token = parser.nextToken()) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (matcher.match(currentFieldName, SCROLL_ID)) {
scrollId = parser.text();
} else if (matcher.match(currentFieldName, TOOK)) {
tookInMillis = parser.longValue();
} else if (matcher.match(currentFieldName, TIMED_OUT)) {
timedOut = parser.booleanValue();
} else if (matcher.match(currentFieldName, TERMINATED_EARLY)) {
terminatedEarly = parser.booleanValue();
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (HITS.equals(currentFieldName)) {
logger.debug("searchHitsFromXContent");
hits = searchHitsFromXContent(parser);
} else if (AGGREGATIONS_FIELD.equals(currentFieldName)) {
aggs = aggregationsFromXContent(parser);
} else if (SUGGEST_NAME.equals(currentFieldName)) {
suggest = suggestFromXContent(parser);
} else if (matcher.match(currentFieldName, _SHARDS_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (matcher.match(currentFieldName, FAILED_FIELD)) {
parser.intValue(); // we don't need it but need to consume it
} else if (matcher.match(currentFieldName, SUCCESSFUL_FIELD)) {
successfulShards = parser.intValue();
} else if (matcher.match(currentFieldName, TOTAL_FIELD)) {
totalShards = parser.intValue();
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (matcher.match(currentFieldName, FAILURES_FIELD)) {
while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
failures.add(shardSearchFailureFromXContent(parser));
}
} else {
parser.skipChildren();
}
} else {
parser.skipChildren();
}
}
} else {
parser.skipChildren();
}
}
}
// TODO profileResults
InternalSearchResponse internalResponse = new InternalSearchResponse(hits, aggs, suggest,
null, timedOut, terminatedEarly);
return new SearchResponse(internalResponse, scrollId, totalShards, successfulShards, tookInMillis,
failures.toArray(ShardSearchFailure.EMPTY_ARRAY));
}
static InternalSearchHits searchHitsFromXContent(XContentParser parser) throws IOException {
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
parser.nextToken();
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
}
XContentParser.Token token = parser.currentToken();
String currentFieldName = null;
List<InternalSearchHit> hits = new ArrayList<>();
long totalHits = -1L;
float maxScore = 0f;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (TOTAL.equals(currentFieldName)) {
totalHits = parser.longValue();
} else if (MAX_SCORE.equals(currentFieldName)) {
maxScore = parser.floatValue();
}
} else if (token == XContentParser.Token.VALUE_NULL) {
if (MAX_SCORE.equals(currentFieldName)) {
maxScore = Float.NaN; // NaN gets rendered as null-field
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (HITS.equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
logger.debug("searchHitFromXContent");
hits.add(searchHitFromXContent(parser));
}
} else {
parser.skipChildren();
}
}
}
InternalSearchHit[] internalSearchHits = hits.toArray(new InternalSearchHit[0]);
return new InternalSearchHits(internalSearchHits, totalHits, maxScore);
}
static InternalSearchHit searchHitFromXContent(XContentParser parser) {
return createFromMap(MAP_PARSER.apply(parser, null));
}
static InternalSearchHit createFromMap(Map<String, Object> values) {
logger.debug("values = {}", values);
String id = get(_ID, values, null);
Text type = get(_TYPE, values, null);
InternalSearchHit.InternalNestedIdentity nestedIdentity = get(_NESTED, values, null);
Map<String, SearchHitField> fields = get(FIELDS, values, Collections.emptyMap());
InternalSearchHit searchHit = new InternalSearchHit(-1, id, type, nestedIdentity, fields);
String index = get(_INDEX, values, null);
ShardId shardId = get(_SHARD, values, null);
String nodeId = get(_NODE, values, null);
if (shardId != null && nodeId != null) {
assert shardId.index().getName().equals(index);
searchHit.shard(new SearchShardTarget(nodeId, index, shardId.id()));
}
searchHit.score(get(_SCORE, values, Float.NaN));
searchHit.version(get(_VERSION, values, -1L));
searchHit.sortValues(get(SORT, values, new Object[0]));
searchHit.highlightFields(get(HIGHLIGHT, values, null));
searchHit.sourceRef(get(SourceFieldMapper.NAME, values, null));
searchHit.explanation(get(_EXPLANATION, values, null));
searchHit.setInnerHits(get(INNER_HITS, values, null));
List<String> matchedQueries = get(MATCHED_QUERIES, values, null);
if (matchedQueries != null) {
searchHit.matchedQueries(matchedQueries.toArray(new String[0]));
}
return searchHit;
}
@SuppressWarnings("unchecked")
private static <T> T get(String key, Map<String, Object> map, T defaultValue) {
return (T) map.getOrDefault(key, defaultValue);
}
static InternalAggregations aggregationsFromXContent(XContentParser parser) throws IOException {
final List<InternalAggregation> aggregations = new ArrayList<>();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.START_OBJECT) {
SetOnce<InternalAggregation> typedAgg = new SetOnce<>();
String currentField = parser.currentName();
XContentParserUtils.parseTypedKeysObject(parser, TYPED_KEYS_DELIMITER, InternalAggregation.class, typedAgg::set);
if (typedAgg.get() != null) {
aggregations.add(typedAgg.get());
} else {
throw new ElasticsearchException(parser.getTokenLocation() + ":" +
String.format(Locale.ROOT, "Could not parse aggregation keyed as [%s]", currentField));
}
}
}
return new InternalAggregations(aggregations);
}
static Suggest suggestFromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
List<Suggest.Suggestion<? extends Entry<? extends Option>>> suggestions = new ArrayList<>();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String currentField = parser.currentName();
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser::getTokenLocation);
Suggest.Suggestion<? extends Entry<? extends Option>> suggestion = suggestionFromXContent(parser);
if (suggestion != null) {
suggestions.add(suggestion);
} else {
throw new ElasticsearchException(parser.getTokenLocation() + ":" +
String.format(Locale.ROOT, "Could not parse suggestion keyed as [%s]", currentField));
}
}
return new Suggest(suggestions);
}
static Suggest.Suggestion<? extends Entry<? extends Option>> suggestionFromXContent(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation);
SetOnce<Suggest.Suggestion> suggestion = new SetOnce<>();
XContentParserUtils.parseTypedKeysObject(parser, "#", Suggest.Suggestion.class, suggestion::set);
return suggestion.get();
}
static ShardSearchFailure shardSearchFailureFromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
String currentFieldName = null;
int shardId = -1;
String indexName = null;
String nodeId = null;
ElasticsearchException exception = null;
while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (SHARD_FIELD.equals(currentFieldName)) {
shardId = parser.intValue();
} else if (INDEX_FIELD.equals(currentFieldName)) {
indexName = parser.text();
} else if (NODE_FIELD.equals(currentFieldName)) {
nodeId = parser.text();
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (REASON_FIELD.equals(currentFieldName)) {
exception = elasticsearchExceptionFromXContent(parser);
} else {
parser.skipChildren();
}
} else {
parser.skipChildren();
}
}
SearchShardTarget searchShardTarget = null;
if (nodeId != null) {
searchShardTarget = new SearchShardTarget(nodeId, indexName, shardId);
}
return new ShardSearchFailure(exception, searchShardTarget);
}
public static ElasticsearchException elasticsearchExceptionFromXContent(XContentParser parser) throws IOException {
XContentParser.Token token = parser.nextToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
return elasticsearchExceptionFromXContent(parser, false);
}
static ElasticsearchException elasticsearchExceptionFromXContent(XContentParser parser, boolean parseRootCauses)
throws IOException {
XContentParser.Token token = parser.currentToken();
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String type = null, reason = null, stack = null;
ElasticsearchException cause = null;
Map<String, List<String>> metadata = new HashMap<>();
Map<String, List<String>> headers = new HashMap<>();
List<ElasticsearchException> rootCauses = new ArrayList<>();
for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) {
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (token.isValue()) {
if (TYPE.equals(currentFieldName)) {
type = parser.text();
} else if (REASON.equals(currentFieldName)) {
reason = parser.text();
} else if (STACK_TRACE.equals(currentFieldName)) {
stack = parser.text();
} else if (token == XContentParser.Token.VALUE_STRING) {
metadata.put(currentFieldName, Collections.singletonList(parser.text()));
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (CAUSED_BY.equals(currentFieldName)) {
cause = elasticsearchExceptionFromXContent(parser);
} else if (HEADER.equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else {
List<String> values = headers.getOrDefault(currentFieldName, new ArrayList<>());
if (token == XContentParser.Token.VALUE_STRING) {
values.add(parser.text());
} else if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
values.add(parser.text());
} else {
parser.skipChildren();
}
}
} else if (token == XContentParser.Token.START_OBJECT) {
parser.skipChildren();
}
headers.put(currentFieldName, values);
}
}
} else {
parser.skipChildren();
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (parseRootCauses && ROOT_CAUSE.equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
rootCauses.add(elasticsearchExceptionFromXContent(parser));
}
} else {
List<String> values = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
values.add(parser.text());
} else {
parser.skipChildren();
}
}
if (values.size() > 0) {
if (metadata.containsKey(currentFieldName)) {
values.addAll(metadata.get(currentFieldName));
}
metadata.put(currentFieldName, values);
}
}
}
}
ElasticsearchException e = new ElasticsearchException(buildMessage(type, reason, stack), cause);
for (Map.Entry<String, List<String>> header : headers.entrySet()) {
e.addHeader(header.getKey(), header.getValue());
}
for (ElasticsearchException rootCause : rootCauses) {
e.addSuppressed(rootCause);
}
return e;
}
static String buildMessage(String type, String reason, String stack) {
StringBuilder message = new StringBuilder("Elasticsearch exception [");
message.append(TYPE).append('=').append(type).append(", ");
message.append(REASON).append('=').append(reason);
if (stack != null) {
message.append(", ").append(STACK_TRACE).append('=').append(stack);
}
message.append(']');
return message.toString();
}
private static void declareInnerHitsParseFields(ObjectParser<Map<String, Object>, Void> parser) {
declareMetaDataFields(parser);
parser.declareString((map, value) -> map.put(_TYPE, new Text(value)), new ParseField(_TYPE));
parser.declareString((map, value) -> map.put(_INDEX, value), new ParseField(_INDEX));
parser.declareString((map, value) -> map.put(_ID, value), new ParseField(_ID));
parser.declareString((map, value) -> map.put(_NODE, value), new ParseField(_NODE));
parser.declareField((map, value) -> map.put(_SCORE, value), SearchHit::parseScore, new ParseField(_SCORE),
ObjectParser.ValueType.FLOAT_OR_NULL);
parser.declareLong((map, value) -> map.put(_VERSION, value), new ParseField(_VERSION));
parser.declareField((map, value) -> map.put(_SHARD, value), (p, c) -> ShardId.fromString(p.text()),
new ParseField(_SHARD), STRING);
parser.declareObject((map, value) -> map.put(SourceFieldMapper.NAME, value), (p, c) -> parseSourceBytes(p),
new ParseField(SourceFieldMapper.NAME));
parser.declareObject((map, value) -> map.put(HIGHLIGHT, value), (p, c) -> parseHighlightFields(p),
new ParseField(HIGHLIGHT));
parser.declareObject((map, value) -> {
Map<String, SearchHitField> fieldMap = get(FIELDS, map, new HashMap<String, SearchHitField>());
fieldMap.putAll(value);
map.put(FIELDS, fieldMap);
}, (p, c) -> parseFields(p), new ParseField(FIELDS));
parser.declareObject((map, value) -> map.put(_EXPLANATION, value), (p, c) -> parseExplanation(p),
new ParseField(_EXPLANATION));
parser.declareObject((map, value) -> map.put(_NESTED, value), SearchHit.NestedIdentity::fromXContent,
new ParseField(_NESTED));
parser.declareObject((map, value) -> map.put(INNER_HITS, value), (p,c) -> parseInnerHits(p),
new ParseField(INNER_HITS));
parser.declareStringArray((map, list) -> map.put(MATCHED_QUERIES, list), new ParseField(MATCHED_QUERIES));
parser.declareField((map, list) -> map.put(SORT, list), SearchSortValues::fromXContent, new ParseField(SORT),
ObjectParser.ValueType.OBJECT_ARRAY);
}
private static void declareMetaDataFields(ObjectParser<Map<String, Object>, Void> parser) {
for (String metadatafield : MapperService.getAllMetaFields()) {
if (!metadatafield.equals(_ID) && !metadatafield.equals(_INDEX) && !metadatafield.equals(_TYPE)) {
parser.declareField((map, field) -> {
@SuppressWarnings("unchecked")
Map<String, SearchHitField> fieldMap = (Map<String, SearchHitField>) map.computeIfAbsent(FIELDS,
v -> new HashMap<String, SearchHitField>());
fieldMap.put(field.getName(), field);
}, (p, c) -> {
List<Object> values = new ArrayList<>();
values.add(parseFieldsValue(p));
return new InternalSearchHit(metadatafield, values);
}, new ParseField(metadatafield), ObjectParser.ValueType.VALUE);
}
}
}
private static Map<String, SearchHitField> parseFields(XContentParser parser) throws IOException {
Map<String, SearchHitField> fields = new HashMap<>();
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
SearchHitField field = SearchHitField.fromXContent(parser);
fields.put(field.getName(), field);
}
return fields;
}
private static Map<String, SearchHits> parseInnerHits(XContentParser parser) throws IOException {
Map<String, SearchHits> innerHits = new HashMap<>();
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation);
String name = parser.currentName();
ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
ensureFieldName(parser, parser.nextToken(), SearchHits.Fields.HITS);
innerHits.put(name, SearchHits.fromXContent(parser));
ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
return innerHits;
}
private static Map<String, HighlightField> parseHighlightFields(XContentParser parser) throws IOException {
Map<String, HighlightField> highlightFields = new HashMap<>();
while((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
HighlightField highlightField = HighlightField.fromXContent(parser);
highlightFields.put(highlightField.getName(), highlightField);
}
return highlightFields;
}
private static Explanation parseExplanation(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation);
XContentParser.Token token;
Float value = null;
String description = null;
List<Explanation> details = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
String currentFieldName = parser.currentName();
token = parser.nextToken();
if (Fields.VALUE.equals(currentFieldName)) {
value = parser.floatValue();
} else if (Fields.DESCRIPTION.equals(currentFieldName)) {
description = parser.textOrNull();
} else if (Fields.DETAILS.equals(currentFieldName)) {
ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation);
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
details.add(parseExplanation(parser));
}
} else {
parser.skipChildren();
}
}
if (value == null) {
throw new ParsingException(parser.getTokenLocation(), "missing explanation value");
}
if (description == null) {
throw new ParsingException(parser.getTokenLocation(), "missing explanation description");
}
return Explanation.match(value, description, details);
}
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
builder.startObject();
builder.field(Fields.VALUE, explanation.getValue());
builder.field(Fields.DESCRIPTION, explanation.getDescription());
Explanation[] innerExps = explanation.getDetails();
if (innerExps != null) {
builder.startArray(Fields.DETAILS);
for (Explanation exp : innerExps) {
buildExplanation(builder, exp);
}
builder.endArray();
}
builder.endObject();
}
}
}

@ -0,0 +1,217 @@
package org.xbib.elx.http.util;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
public abstract class AbstractObjectParser<Value, Context>
implements BiFunction<XContentParser, Context, Value>, ContextParser<Context, Value> {
/**
* Declare some field. Usually it is easier to use {@link #declareString(BiConsumer, ParseField)} or
* {@link #declareObject(BiConsumer, ContextParser, ParseField)} rather than call this directly.
*/
public abstract <T> void declareField(BiConsumer<Value, T> consumer, ContextParser<Context, T> parser, ParseField parseField,
ObjectParser.ValueType type);
/**
* Declares named objects in the style of aggregations. These are named
* inside and object like this:
*
* <pre>
* <code>
* {
* "aggregations": {
* "name_1": { "aggregation_type": {} },
* "name_2": { "aggregation_type": {} },
* "name_3": { "aggregation_type": {} }
* }
* }
* }
* </code>
* </pre>
*
* Unlike the other version of this method, "ordered" mode (arrays of
* objects) is not supported.
*
* See NamedObjectHolder in ObjectParserTests for examples of how to invoke
* this.
*
* @param consumer
* sets the values once they have been parsed
* @param namedObjectParser
* parses each named object
* @param parseField
* the field to parse
*/
public abstract <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer,
ObjectParser.NamedObjectParser<T, Context> namedObjectParser,
ParseField parseField);
/**
* Declares named objects in the style of highlighting's field element.
* These are usually named inside and object like this:
*
* <pre>
* <code>
* {
* "highlight": {
* "fields": { &lt;------ this one
* "title": {},
* "body": {},
* "category": {}
* }
* }
* }
* </code>
* </pre>
*
* but, when order is important, some may be written this way:
*
* <pre>
* <code>
* {
* "highlight": {
* "fields": [ &lt;------ this one
* {"title": {}},
* {"body": {}},
* {"category": {}}
* ]
* }
* }
* </code>
* </pre>
*
* This is because json doesn't enforce ordering. Elasticsearch reads it in
* the order sent but tools that generate json are free to put object
* members in an unordered Map, jumbling them. Thus, if you care about order
* you can send the object in the second way.
*
* See NamedObjectHolder in ObjectParserTests for examples of how to invoke
* this.
*
* @param consumer
* sets the values once they have been parsed
* @param namedObjectParser
* parses each named object
* @param orderedModeCallback
* called when the named object is parsed using the "ordered"
* mode (the array of objects)
* @param parseField
* the field to parse
*/
public abstract <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer,
ObjectParser.NamedObjectParser<T, Context> namedObjectParser,
Consumer<Value> orderedModeCallback,
ParseField parseField);
public abstract String getName();
public <T> void declareField(BiConsumer<Value, T> consumer, CheckedFunction<XContentParser, T, IOException> parser,
ParseField parseField, ObjectParser.ValueType type) {
if (parser == null) {
throw new IllegalArgumentException("[parser] is required");
}
declareField(consumer, (p, c) -> parser.apply(p), parseField, type);
}
public <T> void declareObject(BiConsumer<Value, T> consumer, ContextParser<Context, T> objectParser, ParseField field) {
declareField(consumer, (p, c) -> objectParser.parse(p, c), field, ObjectParser.ValueType.OBJECT);
}
public void declareFloat(BiConsumer<Value, Float> consumer, ParseField field) {
// Using a method reference here angers some compilers
declareField(consumer, p -> p.floatValue(), field, ObjectParser.ValueType.FLOAT);
}
public void declareDouble(BiConsumer<Value, Double> consumer, ParseField field) {
// Using a method reference here angers some compilers
declareField(consumer, p -> p.doubleValue(), field, ObjectParser.ValueType.DOUBLE);
}
public void declareLong(BiConsumer<Value, Long> consumer, ParseField field) {
// Using a method reference here angers some compilers
declareField(consumer, p -> p.longValue(), field, ObjectParser.ValueType.LONG);
}
public void declareInt(BiConsumer<Value, Integer> consumer, ParseField field) {
// Using a method reference here angers some compilers
declareField(consumer, p -> p.intValue(), field, ObjectParser.ValueType.INT);
}
public void declareString(BiConsumer<Value, String> consumer, ParseField field) {
declareField(consumer, XContentParser::text, field, ObjectParser.ValueType.STRING);
}
public void declareStringOrNull(BiConsumer<Value, String> consumer, ParseField field) {
declareField(consumer, (p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : p.text(), field,
ObjectParser.ValueType.STRING_OR_NULL);
}
public void declareBoolean(BiConsumer<Value, Boolean> consumer, ParseField field) {
declareField(consumer, XContentParser::booleanValue, field, ObjectParser.ValueType.BOOLEAN);
}
public <T> void declareObjectArray(BiConsumer<Value, List<T>> consumer, ContextParser<Context, T> objectParser,
ParseField field) {
declareFieldArray(consumer, objectParser, field, ObjectParser.ValueType.OBJECT_ARRAY);
}
public void declareStringArray(BiConsumer<Value, List<String>> consumer, ParseField field) {
declareFieldArray(consumer, (p, c) -> p.text(), field, ObjectParser.ValueType.STRING_ARRAY);
}
public void declareDoubleArray(BiConsumer<Value, List<Double>> consumer, ParseField field) {
declareFieldArray(consumer, (p, c) -> p.doubleValue(), field, ObjectParser.ValueType.DOUBLE_ARRAY);
}
public void declareFloatArray(BiConsumer<Value, List<Float>> consumer, ParseField field) {
declareFieldArray(consumer, (p, c) -> p.floatValue(), field, ObjectParser.ValueType.FLOAT_ARRAY);
}
public void declareLongArray(BiConsumer<Value, List<Long>> consumer, ParseField field) {
declareFieldArray(consumer, (p, c) -> p.longValue(), field, ObjectParser.ValueType.LONG_ARRAY);
}
public void declareIntArray(BiConsumer<Value, List<Integer>> consumer, ParseField field) {
declareFieldArray(consumer, (p, c) -> p.intValue(), field, ObjectParser.ValueType.INT_ARRAY);
}
/**
* Declares a field that can contain an array of elements listed in the type ValueType enum
*/
public <T> void declareFieldArray(BiConsumer<Value, List<T>> consumer, ContextParser<Context, T> itemParser,
ParseField field, ObjectParser.ValueType type) {
declareField(consumer, (p, c) -> parseArray(p, () -> itemParser.parse(p, c)), field, type);
}
private interface IOSupplier<T> {
T get() throws IOException;
}
private static <T> List<T> parseArray(XContentParser parser, IOSupplier<T> supplier) throws IOException {
List<T> list = new ArrayList<>();
if (parser.currentToken().isValue()
|| parser.currentToken() == XContentParser.Token.VALUE_NULL
|| parser.currentToken() == XContentParser.Token.START_OBJECT) {
list.add(supplier.get()); // single value
} else {
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
if (parser.currentToken().isValue()
|| parser.currentToken() == XContentParser.Token.VALUE_NULL
|| parser.currentToken() == XContentParser.Token.START_OBJECT) {
list.add(supplier.get());
} else {
throw new IllegalStateException("expected value but got [" + parser.currentToken() + "]");
}
}
}
return list;
}
}

@ -0,0 +1,11 @@
package org.xbib.elx.http.util;
import java.util.function.BiConsumer;
/**
* A {@link BiConsumer}-like interface which allows throwing checked exceptions.
*/
@FunctionalInterface
public interface CheckedBiConsumer<T, U, E extends Exception> {
void accept(T t, U u) throws E;
}

@ -0,0 +1,6 @@
package org.xbib.elx.http.util;
@FunctionalInterface
public interface CheckedFunction<T, R, E extends Exception> {
R apply(T t) throws E;
}

@ -0,0 +1,13 @@
package org.xbib.elx.http.util;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
* Reads an object from a parser using some context.
*/
@FunctionalInterface
public interface ContextParser<Context, T> {
T parse(XContentParser p, Context c) throws IOException;
}

@ -0,0 +1,14 @@
package org.xbib.elx.http.util;
import org.elasticsearch.common.xcontent.XContentLocation;
public class NamedObjectNotFoundException extends XContentParseException {
public NamedObjectNotFoundException(String message) {
this(null, message);
}
public NamedObjectNotFoundException(XContentLocation location, String message) {
super(location, message);
}
}

@ -0,0 +1,101 @@
package org.xbib.elx.http.util;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static java.util.Collections.emptyMap;
import static java.util.Collections.unmodifiableMap;
public class NamedXContentRegistry {
public static class Entry {
/** The class that this entry can read. */
public final Class<?> categoryClass;
/** A name for the entry which is unique within the {@link #categoryClass}. */
public final ParseField name;
/** A parser capability of parser the entry's class. */
private final ContextParser<Object, ?> parser;
/** Creates a new entry which can be stored by the registry. */
public <T> Entry(Class<T> categoryClass, ParseField name, CheckedFunction<XContentParser, ? extends T, IOException> parser) {
this.categoryClass = Objects.requireNonNull(categoryClass);
this.name = Objects.requireNonNull(name);
this.parser = Objects.requireNonNull((p, c) -> parser.apply(p));
}
/**
* Creates a new entry which can be stored by the registry.
* Prefer {@link Entry#Entry(Class, ParseField, CheckedFunction)} unless you need a context to carry around while parsing.
*/
public <T> Entry(Class<T> categoryClass, ParseField name, ContextParser<Object, ? extends T> parser) {
this.categoryClass = Objects.requireNonNull(categoryClass);
this.name = Objects.requireNonNull(name);
this.parser = Objects.requireNonNull(parser);
}
}
private final Map<Class<?>, Map<String, Entry>> registry;
public NamedXContentRegistry(List<Entry> entries) {
if (entries.isEmpty()) {
registry = emptyMap();
return;
}
entries = new ArrayList<>(entries);
entries.sort(Comparator.comparing(e -> e.categoryClass.getName()));
Map<Class<?>, Map<String, Entry>> registry = new HashMap<>();
Map<String, Entry> parsers = null;
Class<?> currentCategory = null;
for (Entry entry : entries) {
if (currentCategory != entry.categoryClass) {
if (currentCategory != null) {
// we've seen the last of this category, put it into the big map
registry.put(currentCategory, unmodifiableMap(parsers));
}
parsers = new HashMap<>();
currentCategory = entry.categoryClass;
}
for (String name : entry.name.getAllNamesIncludedDeprecated()) {
Object old = parsers.put(name, entry);
if (old != null) {
throw new IllegalArgumentException("NamedXContent [" + currentCategory.getName() + "][" + entry.name + "]" +
" is already registered for [" + old.getClass().getName() + "]," +
" cannot register [" + entry.parser.getClass().getName() + "]");
}
}
}
// handle the last category
registry.put(currentCategory, unmodifiableMap(parsers));
this.registry = unmodifiableMap(registry);
}
public <T, C> T parseNamedObject(Class<T> categoryClass, String name, XContentParser parser, C context) throws IOException {
Map<String, Entry> parsers = registry.get(categoryClass);
if (parsers == null) {
if (registry.isEmpty()) {
// The "empty" registry will never work so we throw a better exception as a hint.
throw new NamedObjectNotFoundException("named objects are not supported for this parser");
}
throw new NamedObjectNotFoundException("unknown named object category [" + categoryClass.getName() + "]");
}
Entry entry = parsers.get(name);
if (entry == null) {
throw new NamedObjectNotFoundException(parser.getTokenLocation(), "unable to parse " + categoryClass.getSimpleName() +
" with name [" + name + "]: parser not found");
}
return categoryClass.cast(entry.parser.parse(parser, context));
}
}

@ -0,0 +1,441 @@
package org.xbib.elx.http.util;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Supplier;
import static org.elasticsearch.common.xcontent.XContentParser.Token.START_ARRAY;
import static org.elasticsearch.common.xcontent.XContentParser.Token.START_OBJECT;
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_BOOLEAN;
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_EMBEDDED_OBJECT;
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_NULL;
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_NUMBER;
import static org.elasticsearch.common.xcontent.XContentParser.Token.VALUE_STRING;
/**
* A declarative, stateless parser that turns XContent into setter calls. A single parser should be defined for each object being parsed,
* nested elements can be added via {@link #declareObject(BiConsumer, ContextParser, ParseField)} which should be satisfied where possible
* by passing another instance of {@link ObjectParser}, this one customized for that Object.
* <p>
* This class works well for object that do have a constructor argument or that can be built using information available from earlier in the
* XContent.
* </p>
* <p>
* Instances of {@link ObjectParser} should be setup by declaring a constant field for the parsers and declaring all fields in a static
* block just below the creation of the parser. Like this:
* </p>
* <pre>{@code
* private static final ObjectParser<Thing, SomeContext> PARSER = new ObjectParser<>("thing", Thing::new));
* static {
* PARSER.declareInt(Thing::setMineral, new ParseField("mineral"));
* PARSER.declareInt(Thing::setFruit, new ParseField("fruit"));
* }
* }</pre>
* It's highly recommended to use the high level declare methods like {@link #declareString(BiConsumer, ParseField)} instead of
* {@link #declareField} which can be used to implement exceptional parsing operations not covered by the high level methods.
*/
public final class ObjectParser<Value, Context> extends AbstractObjectParser<Value, Context> {
private static final Logger logger = LogManager.getLogger(ObjectParser.class.getName());
public static <Value, ElementValue> BiConsumer<Value, List<ElementValue>> fromList(Class<ElementValue> c,
BiConsumer<Value, ElementValue[]> consumer) {
return (Value v, List<ElementValue> l) -> {
@SuppressWarnings("unchecked")
ElementValue[] array = (ElementValue[]) Array.newInstance(c, l.size());
consumer.accept(v, l.toArray(array));
};
}
private final Map<String, FieldParser> fieldParserMap = new HashMap<>();
private final String name;
private final Supplier<Value> valueSupplier;
/**
* Should this parser ignore unknown fields? This should generally be set to true only when parsing responses from external systems,
* never when parsing requests from users.
*/
private final boolean ignoreUnknownFields;
/**
* Creates a new ObjectParser instance with a name. This name is used to reference the parser in exceptions and messages.
*/
public ObjectParser(String name) {
this(name, null);
}
/**
* Creates a new ObjectParser instance which a name.
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser.
*/
public ObjectParser(String name, @Nullable Supplier<Value> valueSupplier) {
this(name, false, valueSupplier);
}
/**
* Creates a new ObjectParser instance which a name.
* @param name the parsers name, used to reference the parser in exceptions and messages.
* @param ignoreUnknownFields Should this parser ignore unknown fields? This should generally be set to true only when parsing
* responses from external systems, never when parsing requests from users.
* @param valueSupplier a supplier that creates a new Value instance used when the parser is used as an inner object parser.
*/
public ObjectParser(String name, boolean ignoreUnknownFields, @Nullable Supplier<Value> valueSupplier) {
this.name = name;
this.valueSupplier = valueSupplier;
this.ignoreUnknownFields = ignoreUnknownFields;
}
/**
* Parses a Value from the given {@link XContentParser}
* @param parser the parser to build a value from
* @param context context needed for parsing
* @return a new value instance drawn from the provided value supplier on {@link #ObjectParser(String, Supplier)}
* @throws IOException if an IOException occurs.
*/
@Override
public Value parse(XContentParser parser, Context context) throws IOException {
if (valueSupplier == null) {
throw new NullPointerException("valueSupplier is not set");
}
return parse(parser, valueSupplier.get(), context);
}
/**
* Parses a Value from the given {@link XContentParser}
* @param parser the parser to build a value from
* @param value the value to fill from the parser
* @param context a context that is passed along to all declared field parsers
* @return the parsed value
* @throws IOException if an IOException occurs.
*/
public Value parse(XContentParser parser, Value value, Context context) throws IOException {
logger.debug("parse");
XContentParser.Token token;
if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
token = parser.currentToken();
} else {
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token);
}
}
FieldParser fieldParser = null;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
fieldParser = getParser(currentFieldName, parser);
logger.debug("currentFieldName={} fieldParser={}", currentFieldName, fieldParser);
} else {
if (currentFieldName == null) {
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] no field found");
}
if (fieldParser == null) {
assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields";
parser.skipChildren(); // noop if parser points to a value, skips children if parser is start object or start array
} else {
fieldParser.assertSupports(name, parser, currentFieldName);
parseSub(parser, fieldParser, currentFieldName, value, context);
}
fieldParser = null;
}
}
return value;
}
@Override
public Value apply(XContentParser parser, Context context) {
if (valueSupplier == null) {
throw new NullPointerException("valueSupplier is not set");
}
try {
return parse(parser, valueSupplier.get(), context);
} catch (IOException e) {
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] failed to parse object", e);
}
}
public interface Parser<Value, Context> {
void parse(XContentParser parser, Value value, Context context) throws IOException;
}
public void declareField(Parser<Value, Context> p, ParseField parseField, ValueType type) {
if (parseField == null) {
throw new IllegalArgumentException("[parseField] is required");
}
if (type == null) {
throw new IllegalArgumentException("[type] is required");
}
FieldParser fieldParser = new FieldParser(p, type.supportedTokens(), parseField, type);
for (String fieldValue : parseField.getAllNamesIncludedDeprecated()) {
fieldParserMap.putIfAbsent(fieldValue, fieldParser);
}
}
@Override
public <T> void declareField(BiConsumer<Value, T> consumer, ContextParser<Context, T> parser, ParseField parseField,
ValueType type) {
if (consumer == null) {
throw new IllegalArgumentException("[consumer] is required");
}
if (parser == null) {
throw new IllegalArgumentException("[parser] is required");
}
declareField((p, v, c) -> consumer.accept(v, parser.parse(p, c)), parseField, type);
}
public <T> void declareObjectOrDefault(BiConsumer<Value, T> consumer, BiFunction<XContentParser, Context, T> objectParser,
Supplier<T> defaultValue, ParseField field) {
declareField((p, v, c) -> {
if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) {
if (p.booleanValue()) {
consumer.accept(v, defaultValue.get());
}
} else {
consumer.accept(v, objectParser.apply(p, c));
}
}, field, ValueType.OBJECT_OR_BOOLEAN);
}
@Override
public <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer, NamedObjectParser<T, Context> namedObjectParser,
Consumer<Value> orderedModeCallback, ParseField field) {
// This creates and parses the named object
BiFunction<XContentParser, Context, T> objectParser = (XContentParser p, Context c) -> {
if (p.currentToken() != XContentParser.Token.FIELD_NAME) {
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
+ "fields or an array where each entry is an object with a single field");
}
// This messy exception nesting has the nice side effect of telling the use which field failed to parse
try {
String name = p.currentName();
try {
return namedObjectParser.parse(p, c, name);
} catch (Exception e) {
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] failed to parse field [" + name + "]", e);
}
} catch (IOException e) {
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] error while parsing", e);
}
};
declareField((XContentParser p, Value v, Context c) -> {
List<T> fields = new ArrayList<>();
XContentParser.Token token;
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
// Fields are just named entries in a single object
while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) {
fields.add(objectParser.apply(p, c));
}
} else if (p.currentToken() == XContentParser.Token.START_ARRAY) {
// Fields are objects in an array. Each object contains a named field.
orderedModeCallback.accept(v);
while ((token = p.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token != XContentParser.Token.START_OBJECT) {
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
+ "fields or an array where each entry is an object with a single field");
}
p.nextToken(); // Move to the first field in the object
fields.add(objectParser.apply(p, c));
p.nextToken(); // Move past the object, should be back to into the array
if (p.currentToken() != XContentParser.Token.END_OBJECT) {
throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of "
+ "fields or an array where each entry is an object with a single field");
}
}
}
consumer.accept(v, fields);
}, field, ValueType.OBJECT_ARRAY);
}
@Override
public <T> void declareNamedObjects(BiConsumer<Value, List<T>> consumer, NamedObjectParser<T, Context> namedObjectParser,
ParseField field) {
Consumer<Value> orderedModeCallback = (v) -> {
throw new IllegalArgumentException("[" + field + "] doesn't support arrays. Use a single object with multiple fields.");
};
declareNamedObjects(consumer, namedObjectParser, orderedModeCallback, field);
}
/**
* Functional interface for instantiating and parsing named objects. See ObjectParserTests#NamedObject for the canonical way to
* implement this for objects that themselves have a parser.
*/
@FunctionalInterface
public interface NamedObjectParser<T, Context> {
T parse(XContentParser p, Context c, String name) throws IOException;
}
/**
* Get the name of the parser.
*/
@Override
public String getName() {
return name;
}
private void parseArray(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
throws IOException {
assert parser.currentToken() == XContentParser.Token.START_ARRAY : "Token was: " + parser.currentToken();
parseValue(parser, fieldParser, currentFieldName, value, context);
}
private void parseValue(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
throws IOException {
try {
fieldParser.parser.parse(parser, value, context);
} catch (Exception ex) {
throw new XContentParseException(parser.getTokenLocation(),
"[" + name + "] failed to parse field [" + currentFieldName + "]", ex);
}
}
private void parseSub(XContentParser parser, FieldParser fieldParser, String currentFieldName, Value value, Context context)
throws IOException {
final XContentParser.Token token = parser.currentToken();
switch (token) {
case START_OBJECT:
parseValue(parser, fieldParser, currentFieldName, value, context);
/*
* Well behaving parsers should consume the entire object but
* asserting that they do that is not something we can do
* efficiently here. Instead we can check that they end on an
* END_OBJECT. They could end on the *wrong* end object and
* this test won't catch them, but that is the price that we pay
* for having a cheap test.
*/
if (parser.currentToken() != XContentParser.Token.END_OBJECT) {
throw new IllegalStateException("parser for [" + currentFieldName + "] did not end on END_OBJECT");
}
break;
case START_ARRAY:
parseArray(parser, fieldParser, currentFieldName, value, context);
/*
* Well behaving parsers should consume the entire array but
* asserting that they do that is not something we can do
* efficiently here. Instead we can check that they end on an
* END_ARRAY. They could end on the *wrong* end array and
* this test won't catch them, but that is the price that we pay
* for having a cheap test.
*/
if (parser.currentToken() != XContentParser.Token.END_ARRAY) {
throw new IllegalStateException("parser for [" + currentFieldName + "] did not end on END_ARRAY");
}
break;
case END_OBJECT:
case END_ARRAY:
case FIELD_NAME:
throw new XContentParseException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected");
case VALUE_STRING:
case VALUE_NUMBER:
case VALUE_BOOLEAN:
case VALUE_EMBEDDED_OBJECT:
case VALUE_NULL:
parseValue(parser, fieldParser, currentFieldName, value, context);
}
}
private FieldParser getParser(String fieldName, XContentParser xContentParser) {
FieldParser parser = fieldParserMap.get(fieldName);
if (parser == null && false == ignoreUnknownFields) {
throw new XContentParseException(xContentParser.getTokenLocation(),
"[" + name + "] unknown field [" + fieldName + "], parser not found");
}
return parser;
}
private class FieldParser {
private final Parser<Value, Context> parser;
private final EnumSet<XContentParser.Token> supportedTokens;
private final ParseField parseField;
private final ValueType type;
FieldParser(Parser<Value, Context> parser, EnumSet<XContentParser.Token> supportedTokens, ParseField parseField, ValueType type) {
this.parser = parser;
this.supportedTokens = supportedTokens;
this.parseField = parseField;
this.type = type;
}
void assertSupports(String parserName, XContentParser parser, String currentFieldName) {
if (!supportedTokens.contains(parser.currentToken())) {
throw new XContentParseException(parser.getTokenLocation(),
"[" + parserName + "] " + currentFieldName + " doesn't support values of type: " + parser.currentToken());
}
}
@Override
public String toString() {
return "FieldParser{" +
"preferred_name=" + parseField.getPreferredName() +
", supportedTokens=" + supportedTokens +
", type=" + type.name() +
'}';
}
}
public enum ValueType {
STRING(VALUE_STRING),
STRING_OR_NULL(VALUE_STRING, VALUE_NULL),
FLOAT(VALUE_NUMBER, VALUE_STRING),
FLOAT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
DOUBLE(VALUE_NUMBER, VALUE_STRING),
DOUBLE_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
LONG(VALUE_NUMBER, VALUE_STRING),
LONG_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
INT(VALUE_NUMBER, VALUE_STRING),
INT_OR_NULL(VALUE_NUMBER, VALUE_STRING, VALUE_NULL),
BOOLEAN(VALUE_BOOLEAN, VALUE_STRING),
STRING_ARRAY(START_ARRAY, VALUE_STRING),
FLOAT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
DOUBLE_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
LONG_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
INT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING),
BOOLEAN_ARRAY(START_ARRAY, VALUE_BOOLEAN),
OBJECT(START_OBJECT),
OBJECT_OR_NULL(START_OBJECT, VALUE_NULL),
OBJECT_ARRAY(START_OBJECT, START_ARRAY),
OBJECT_OR_BOOLEAN(START_OBJECT, VALUE_BOOLEAN),
OBJECT_OR_STRING(START_OBJECT, VALUE_STRING),
OBJECT_OR_LONG(START_OBJECT, VALUE_NUMBER),
OBJECT_ARRAY_BOOLEAN_OR_STRING(START_OBJECT, START_ARRAY, VALUE_BOOLEAN, VALUE_STRING),
OBJECT_ARRAY_OR_STRING(START_OBJECT, START_ARRAY, VALUE_STRING),
VALUE(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING),
VALUE_OBJECT_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_EMBEDDED_OBJECT, VALUE_NUMBER, VALUE_STRING, START_OBJECT, START_ARRAY),
VALUE_ARRAY(VALUE_BOOLEAN, VALUE_NULL, VALUE_NUMBER, VALUE_STRING, START_ARRAY);
private final EnumSet<XContentParser.Token> tokens;
ValueType(XContentParser.Token first, XContentParser.Token... rest) {
this.tokens = EnumSet.of(first, rest);
}
public EnumSet<XContentParser.Token> supportedTokens() {
return this.tokens;
}
}
@Override
public String toString() {
return "ObjectParser{" +
"name='" + name + '\'' +
", fields=" + fieldParserMap.values() +
'}';
}
}

@ -0,0 +1,47 @@
package org.xbib.elx.http.util;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.xcontent.XContentLocation;
import java.util.Optional;
/**
* Thrown when one of the XContent parsers cannot parse something.
*/
public class XContentParseException extends IllegalArgumentException {
private final Optional<XContentLocation> location;
public XContentParseException(String message) {
this(null, message);
}
public XContentParseException(XContentLocation location, String message) {
super(message);
this.location = Optional.ofNullable(location);
}
public XContentParseException(XContentLocation location, String message, Exception cause) {
super(message, cause);
this.location = Optional.ofNullable(location);
}
public int getLineNumber() {
return location.map(l -> l.lineNumber).orElse(-1);
}
public int getColumnNumber() {
return location.map(l -> l.columnNumber).orElse(-1);
}
@Nullable
public XContentLocation getLocation() {
return location.orElse(null);
}
@Override
public String getMessage() {
return location.map(l -> "[" + l.toString() + "] ").orElse("") + super.getMessage();
}
}

@ -0,0 +1,68 @@
package org.xbib.elx.http.util;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentLocation;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.Aggregation;
import org.xbib.elx.http.util.aggregations.ParsedStringTerms;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Supplier;
import java.util.stream.Collectors;
public class XContentParserUtils {
private static final NamedXContentRegistry xContentRegistry = new NamedXContentRegistry(getDefaultNamedXContents());
public static void ensureExpectedToken(XContentParser.Token expected, XContentParser.Token actual, Supplier location) {
if (actual != expected) {
String message = "Failed to parse object: expecting token of type [%s] but found [%s]";
throw new ElasticsearchException(location.get() + ":" + String.format(Locale.ROOT, message, expected, actual));
}
}
public static <T> void parseTypedKeysObject(XContentParser parser, String delimiter, Class<T> objectClass, Consumer<T> consumer)
throws IOException {
if (parser.currentToken() != XContentParser.Token.START_OBJECT && parser.currentToken() != XContentParser.Token.START_ARRAY) {
throwUnknownToken(parser.currentToken(), parser.getTokenLocation());
}
String currentFieldName = parser.currentName();
if (Strings.hasLength(currentFieldName)) {
int position = currentFieldName.indexOf(delimiter);
if (position > 0) {
String type = currentFieldName.substring(0, position);
String name = currentFieldName.substring(position + 1);
consumer.accept(namedObject(parser, objectClass, type, name));
return;
}
// if we didn't find a delimiter we ignore the object or array for forward compatibility instead of throwing an error
parser.skipChildren();
} else {
throw new ElasticsearchException(parser.getTokenLocation() + ":" + "Failed to parse object: empty key");
}
}
public static void throwUnknownToken(XContentParser.Token token, XContentLocation location) {
String message = "Failed to parse object: unexpected token [%s] found";
throw new ElasticsearchException(location + ":" + String.format(Locale.ROOT, message, token));
}
static <T> T namedObject(XContentParser parser, Class<T> categoryClass, String name, Object context) throws IOException {
return xContentRegistry.parseNamedObject(categoryClass, name, parser, context);
}
public static List<NamedXContentRegistry.Entry> getDefaultNamedXContents() {
Map<String, ContextParser<Object, ? extends Aggregation>> map = new HashMap<>();
//map.put("terms", (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
return map.entrySet().stream()
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
.collect(Collectors.toList());
}
}

@ -0,0 +1,18 @@
package org.xbib.elx.http.util.aggregations;
import org.elasticsearch.common.ParseField;
final class CommonFields {
public static final ParseField META = new ParseField("meta");
public static final ParseField BUCKETS = new ParseField("buckets");
public static final ParseField VALUE = new ParseField("value");
public static final ParseField VALUES = new ParseField("values");
public static final ParseField VALUE_AS_STRING = new ParseField("value_as_string");
public static final ParseField DOC_COUNT = new ParseField("doc_count");
public static final ParseField KEY = new ParseField("key");
public static final ParseField KEY_AS_STRING = new ParseField("key_as_string");
public static final ParseField FROM = new ParseField("from");
public static final ParseField FROM_AS_STRING = new ParseField("from_as_string");
public static final ParseField TO = new ParseField("to");
public static final ParseField TO_AS_STRING = new ParseField("to_as_string");
}

@ -0,0 +1,40 @@
package org.xbib.elx.http.util.aggregations;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.search.aggregations.Aggregation;
import org.xbib.elx.http.util.ObjectParser;
import java.util.Collections;
import java.util.Map;
/**
* An implementation of {@link Aggregation} that is parsed from a REST response.
* Serves as a base class for all aggregation implementations that are parsed from REST.
*/
public abstract class ParsedAggregation implements Aggregation {
protected static void declareAggregationFields(ObjectParser<? extends ParsedAggregation, Void> objectParser) {
objectParser.declareObject((parsedAgg, metadata) -> parsedAgg.metadata = Collections.unmodifiableMap(metadata),
(parser, context) -> parser.map(), CommonFields.META);
}
private String name;
protected Map<String, Object> metadata;
@Override
public final String getName() {
return name;
}
protected void setName(String name) {
this.name = name;
}
@Override
public final Map<String, Object> getMetaData() {
return metadata;
}
}

@ -0,0 +1,149 @@
package org.xbib.elx.http.util.aggregations;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.Aggregations;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
import org.xbib.elx.http.util.CheckedBiConsumer;
import org.xbib.elx.http.util.CheckedFunction;
import org.xbib.elx.http.util.ObjectParser;
import org.xbib.elx.http.util.XContentParserUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
import static org.xbib.elx.http.util.XContentParserUtils.ensureExpectedToken;
public abstract class ParsedMultiBucketAggregation<B extends ParsedMultiBucketAggregation.Bucket>
extends ParsedAggregation implements MultiBucketsAggregation {
protected final List<B> buckets = new ArrayList<>();
protected boolean keyed = false;
protected static void declareMultiBucketAggregationFields(final ObjectParser<? extends ParsedMultiBucketAggregation, Void> objectParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> keyedBucketParser) {
declareAggregationFields(objectParser);
objectParser.declareField((parser, aggregation, context) -> {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) {
aggregation.keyed = true;
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
aggregation.buckets.add(keyedBucketParser.apply(parser));
}
} else if (token == XContentParser.Token.START_ARRAY) {
aggregation.keyed = false;
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
aggregation.buckets.add(bucketParser.apply(parser));
}
}
}, CommonFields.BUCKETS, ObjectParser.ValueType.OBJECT_ARRAY);
}
public abstract static class ParsedBucket implements MultiBucketsAggregation.Bucket {
private Aggregations aggregations;
private String keyAsString;
private long docCount;
private boolean keyed;
protected void setKeyAsString(String keyAsString) {
this.keyAsString = keyAsString;
}
@Override
public String getKeyAsString() {
return keyAsString;
}
protected void setDocCount(long docCount) {
this.docCount = docCount;
}
@Override
public long getDocCount() {
return docCount;
}
public void setKeyed(boolean keyed) {
this.keyed = keyed;
}
protected boolean isKeyed() {
return keyed;
}
protected void setAggregations(Aggregations aggregations) {
this.aggregations = aggregations;
}
@Override
public Aggregations getAggregations() {
return aggregations;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
/*if (keyed) {
builder.startObject(getKeyAsString());
} else {
builder.startObject();
}
if (keyAsString != null) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString());
}
keyToXContent(builder);
builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount);
aggregations.toXContentInternal(builder, params);
builder.endObject();*/
return builder;
}
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
}
protected static <B extends ParsedBucket> B parseXContent(final XContentParser parser,
final boolean keyed,
final Supplier<B> bucketSupplier,
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer)
throws IOException {
final B bucket = bucketSupplier.get();
bucket.setKeyed(keyed);
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (keyed) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
}
List<InternalAggregation> aggregations = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.setKeyAsString(parser.text());
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
keyConsumer.accept(parser, bucket);
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
keyConsumer.accept(parser, bucket);
} else {
XContentParserUtils.parseTypedKeysObject(parser, "#", InternalAggregation.class,
aggregations::add);
}
}
}
bucket.setAggregations(new InternalAggregations(aggregations));
return bucket;
}
}
}

@ -0,0 +1,103 @@
package org.xbib.elx.http.util.aggregations;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.xbib.elx.http.util.ObjectParser;
import java.io.IOException;
import java.nio.CharBuffer;
import java.util.List;
public class ParsedStringTerms extends ParsedTerms {
public String getType() {
return "terms";
}
private static ObjectParser<ParsedStringTerms, Void> PARSER =
new ObjectParser<>(ParsedStringTerms.class.getSimpleName(), true, ParsedStringTerms::new);
static {
declareParsedTermsFields(PARSER, ParsedBucket::fromXContent);
}
public static ParsedStringTerms fromXContent(XContentParser parser, String name) throws IOException {
ParsedStringTerms aggregation = PARSER.parse(parser, null);
aggregation.setName(name);
return aggregation;
}
@Override
public Object getProperty(String path) {
throw new UnsupportedOperationException();
}
public static class ParsedBucket extends ParsedTerms.ParsedBucket {
private BytesRef key;
@Override
public Object getKey() {
return getKeyAsString();
}
@Override
public String getKeyAsString() {
String keyAsString = super.getKeyAsString();
if (keyAsString != null) {
return keyAsString;
}
if (key != null) {
return key.utf8ToString();
}
return null;
}
@Override
public Object getProperty(String containingAggName, List<String> path) {
throw new UnsupportedOperationException();
}
public Number getKeyAsNumber() {
if (key != null) {
return Double.parseDouble(key.utf8ToString());
}
return null;
}
protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
return builder.field(CommonFields.KEY.getPreferredName(), getKey());
}
static ParsedBucket fromXContent(XContentParser parser) throws IOException {
return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> {
CharBuffer cb = charBufferOrNull(p);
if (cb == null) {
bucket.key = null;
} else {
bucket.key = new BytesRef(cb);
}
});
}
static CharBuffer charBufferOrNull(XContentParser parser) throws IOException {
if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
return null;
}
return CharBuffer.wrap(parser.textCharacters(), parser.textOffset(), parser.textLength());
}
@Override
public void readFrom(StreamInput in) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException();
}
}
}

@ -0,0 +1,118 @@
package org.xbib.elx.http.util.aggregations;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.xbib.elx.http.util.CheckedBiConsumer;
import org.xbib.elx.http.util.CheckedFunction;
import org.xbib.elx.http.util.ObjectParser;
import org.xbib.elx.http.util.XContentParserUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Supplier;
public abstract class ParsedTerms extends ParsedMultiBucketAggregation<ParsedTerms.ParsedBucket> implements Terms {
protected static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound");
protected static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count");
protected long docCountErrorUpperBound;
protected long sumOtherDocCount;
@Override
public long getDocCountError() {
return docCountErrorUpperBound;
}
@Override
public long getSumOfOtherDocCounts() {
return sumOtherDocCount;
}
@Override
public List<Terms.Bucket> getBuckets() {
//return buckets;
throw new UnsupportedOperationException();
}
@Override
public Terms.Bucket getBucketByKey(String term) {
for (Terms.Bucket bucket : getBuckets()) {
if (bucket.getKeyAsString().equals(term)) {
return bucket;
}
}
return null;
}
static void declareParsedTermsFields(final ObjectParser<? extends ParsedTerms, Void> objectParser,
final CheckedFunction<XContentParser, ParsedBucket, IOException> bucketParser) {
declareMultiBucketAggregationFields(objectParser, bucketParser::apply, bucketParser::apply);
objectParser.declareLong((parsedTerms, value) -> parsedTerms.docCountErrorUpperBound = value ,
DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME);
objectParser.declareLong((parsedTerms, value) -> parsedTerms.sumOtherDocCount = value,
SUM_OF_OTHER_DOC_COUNTS);
}
public abstract static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket /*implements Terms.Bucket*/ {
boolean showDocCountError = false;
protected long docCountError;
public long getDocCountError() {
return docCountError;
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
/*builder.startObject();
keyToXContent(builder);
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
if (showDocCountError) {
builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError());
}
getAggregations().toXContentInternal(builder, params);
builder.endObject();*/
return builder;
}
static <B extends ParsedBucket> B parseTermsBucketXContent(final XContentParser parser, final Supplier<B> bucketSupplier,
final CheckedBiConsumer<XContentParser, B, IOException> keyConsumer)
throws IOException {
final B bucket = bucketSupplier.get();
final List<InternalAggregation> aggregations = new ArrayList<>();
XContentParser.Token token;
String currentFieldName = parser.currentName();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) {
bucket.setKeyAsString(parser.text());
} else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) {
keyConsumer.accept(parser, bucket);
} else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) {
bucket.setDocCount(parser.longValue());
} else if (DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName().equals(currentFieldName)) {
bucket.docCountError = parser.longValue();
bucket.showDocCountError = true;
}
} else if (token == XContentParser.Token.START_OBJECT) {
XContentParserUtils.parseTypedKeysObject(parser, "#", InternalAggregation.class,
aggregations::add);
}
}
bucket.setAggregations(new InternalAggregations(aggregations));
return bucket;
}
}
}

@ -0,0 +1,3 @@
org.xbib.elx.http.action.search.HttpSearchAction
org.xbib.elx.http.action.get.HttpGetAction
org.xbib.elx.http.action.get.HttpMultiGetAction

@ -0,0 +1,122 @@
package org.xbib.elx.http.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.get.GetAction;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.get.MultiGetAction;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.junit.Ignore;
import org.junit.Test;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.http.ExtendedHttpClient;
import org.xbib.elx.http.ExtendedHttpClientProvider;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ClientTest extends TestBase {
private static final Logger logger = LogManager.getLogger(ClientTest.class.getName());
@Ignore
@Test
public void testGet() throws Exception {
try (ExtendedHttpClient client = ClientBuilder.builder()
.provider(ExtendedHttpClientProvider.class)
.put("url", "http://" + host + ":" + httpPort)
.build()) {
IndexRequest indexRequest = new IndexRequest();
indexRequest.index("test");
indexRequest.type("test");
indexRequest.id("1");
indexRequest.source("test", "Hello Jörg");
IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
client("1").execute(RefreshAction.INSTANCE, new RefreshRequest());
GetRequest getRequest = new GetRequest();
getRequest.index("test");
getRequest.type("test");
getRequest.id("1");
GetResponse getResponse = client.execute(GetAction.INSTANCE, getRequest).actionGet();
assertTrue(getResponse.isExists());
assertEquals("{\"test\":\"Hello Jörg\"}", getResponse.getSourceAsString());
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
}
}
@Ignore
@Test
public void testMultiGet() throws Exception {
try (ExtendedHttpClient client = ClientBuilder.builder()
.provider(ExtendedHttpClientProvider.class)
.put("url", "http://" + host + ":" + httpPort)
.build()) {
IndexRequest indexRequest = new IndexRequest();
indexRequest.index("test");
indexRequest.type("test");
indexRequest.id("1");
indexRequest.source("test", "Hello Jörg");
IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
client("1").execute(RefreshAction.INSTANCE, new RefreshRequest());
MultiGetRequest multiGetRequest = new MultiGetRequest();
multiGetRequest.add("test", "test", "1");
MultiGetResponse multiGetResponse = client.execute(MultiGetAction.INSTANCE, multiGetRequest).actionGet();
assertEquals(1, multiGetResponse.getResponses().length);
assertEquals("{\"test\":\"Hello Jörg\"}", multiGetResponse.getResponses()[0].getResponse().getSourceAsString());
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
}
}
@Test
public void testSearchDoc() throws Exception {
try (ExtendedHttpClient client = ClientBuilder.builder()
.provider(ExtendedHttpClientProvider.class)
.put("url", "http://" + host + ":" + httpPort)
.build()) {
IndexRequest indexRequest = new IndexRequest();
indexRequest.index("test");
indexRequest.type("test");
indexRequest.id("1");
indexRequest.source("test", "Hello Jörg");
IndexResponse indexResponse = client("1").execute(IndexAction.INSTANCE, indexRequest).actionGet();
client("1").execute(RefreshAction.INSTANCE, new RefreshRequest());
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.query(QueryBuilders.matchAllQuery());
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("test");
searchRequest.types("test");
searchRequest.source(builder);
SearchResponse searchResponse = client.execute(SearchAction.INSTANCE, searchRequest).actionGet();
long hits = searchResponse.getHits().getTotalHits();
assertEquals(1, hits);
logger.info("hits = {} source = {}", hits, searchResponse.getHits().getHits()[0].getSourceAsString());
assertEquals("{\"test\":\"Hello Jörg\"}", searchResponse.getHits().getHits()[0].getSourceAsString());
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
}
}
}

@ -1,4 +1,4 @@
package org.xbib.elx.transport;
package org.xbib.elx.http.test;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
@ -8,4 +8,5 @@ public class MockNode extends Node {
public MockNode(Settings settings) {
super(settings);
}
}

@ -1,4 +1,4 @@
package org.xbib.elx.transport;
package org.xbib.elx.http.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -6,6 +6,7 @@ import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
@ -15,6 +16,7 @@ import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.Node;
import org.junit.After;
@ -47,9 +49,11 @@ public class TestBase {
private String cluster;
private String host;
protected String host;
private int port;
protected int port;
protected int httpPort;
@Before
public void startNodes() {
@ -60,7 +64,7 @@ public class TestBase {
findNodeAddress();
try {
ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE,
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.YELLOW)
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
if (healthResponse != null && healthResponse.isTimedOut()) {
throw new IOException("cluster state is " + healthResponse.getStatus().name()
@ -119,14 +123,6 @@ public class TestBase {
protected Settings getNodeSettings() {
return settingsBuilder()
.put("cluster.name", cluster)
//.put("cluster.routing.schedule", "50ms")
//.put("cluster.routing.allocation.disk.threshold_enabled", false)
//.put("discovery.zen.multicast.enabled", true)
//.put("discovery.zen.multicast.ping_timeout", "5s")
//.put("http.enabled", true)
//.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors())
//.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low
//.put("index.number_of_replicas", 0)
.put("path.home", getHome())
.build();
}
@ -146,12 +142,18 @@ public class TestBase {
protected void findNodeAddress() {
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet();
Object obj = response.iterator().next().getTransport().getAddress()
.publishAddress();
if (obj instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
host = address.address().getHostName();
port = address.address().getPort();
for (NodeInfo nodeInfo : response) {
TransportAddress transportAddress = nodeInfo.getTransport().getAddress().publishAddress();
if (transportAddress instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) transportAddress;
host = address.address().getHostName();
port = address.address().getPort();
}
transportAddress = nodeInfo.getHttp().getAddress().publishAddress();
if (transportAddress instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) transportAddress;
httpPort = address.address().getPort();
}
}
}

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration status="OFF">
<appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="[%d{ISO8601}][%-5p][%-25c][%t] %m%n"/>
</Console>
</appenders>
<Loggers>
<Root level="debug">
<AppenderRef ref="Console" />
</Root>
</Loggers>
</configuration>

@ -1,10 +1,5 @@
package org.xbib.elx.node.test;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsAction;
@ -17,9 +12,10 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.QueryBuilders;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.common.Parameters;
import org.xbib.elx.node.ExtendedNodeClient;
@ -29,65 +25,42 @@ import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class ClientTest extends TestBase {
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@ExtendWith(TestExtension.class)
class ClientTest {
private static final Logger logger = LogManager.getLogger(ClientTest.class.getName());
private static final Long ACTIONS = 25000L;
private static final Long ACTIONS = 1000L;
private static final Long MAX_ACTIONS_PER_REQUEST = 1000L;
private static final Long MAX_ACTIONS_PER_REQUEST = 100L;
@Before
public void startNodes() {
try {
super.startNodes();
startNode("2");
} catch (Throwable t) {
logger.error("startNodes failed", t);
}
}
private final TestExtension.Helper helper;
@Test
public void testSingleDoc() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
.provider(ExtendedNodeClientProvider.class)
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(30))
.build();
try {
client.newIndex("test");
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
assertEquals(1, client.getBulkMetric().getSucceeded().getCount());
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.close();
}
ClientTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testNewIndex() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
void testNewIndex() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5))
.build();
client.newIndex("test");
client.newIndex("test1");
client.close();
}
@Test
public void testMapping() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
void testMapping() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5))
.build();
XContentBuilder builder = jsonBuilder()
XContentBuilder builder = JsonXContent.contentBuilder()
.startObject()
.startObject("doc")
.startObject("properties")
@ -97,30 +70,55 @@ public class ClientTest extends TestBase {
.endObject()
.endObject()
.endObject();
client.newIndex("test", Settings.EMPTY, builder.string());
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test");
client.newIndex("test2", Settings.EMPTY, builder.string());
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test2");
GetMappingsResponse getMappingsResponse =
client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet();
logger.info("mappings={}", getMappingsResponse.getMappings());
assertTrue(getMappingsResponse.getMappings().get("test").containsKey("doc"));
assertTrue(getMappingsResponse.getMappings().get("test2").containsKey("doc"));
client.close();
}
@Test
public void testRandomDocs() throws Exception {
void testSingleDoc() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(30))
.build();
try {
client.newIndex("test3");
client.index("test3", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
assertEquals(1, client.getBulkMetric().getSucceeded().getCount());
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.close();
}
}
@Test
void testRandomDocs() throws Exception {
long numactions = ACTIONS;
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
.build();
try {
client.newIndex("test");
client.newIndex("test4");
for (int i = 0; i < ACTIONS; i++) {
client.index("test", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test4", null, false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
client.waitForResponses(60L, TimeUnit.SECONDS);
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
@ -129,9 +127,11 @@ public class ClientTest extends TestBase {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.refreshIndex("test");
client.refreshIndex("test4");
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
.setQuery(QueryBuilders.matchAllQuery()).setSize(0);
.setIndices("test4")
.setQuery(QueryBuilders.matchAllQuery())
.setSize(0);
assertEquals(numactions,
searchRequestBuilder.execute().actionGet().getHits().getTotalHits());
client.close();
@ -139,37 +139,38 @@ public class ClientTest extends TestBase {
}
@Test
public void testThreadedRandomDocs() throws Exception {
void testThreadedRandomDocs() throws Exception {
int maxthreads = Runtime.getRuntime().availableProcessors();
Long maxActionsPerRequest = MAX_ACTIONS_PER_REQUEST;
final Long actions = ACTIONS;
long maxActionsPerRequest = MAX_ACTIONS_PER_REQUEST;
final long actions = ACTIONS;
logger.info("NodeClient max={} maxactions={} maxloop={}", maxthreads, maxActionsPerRequest, actions);
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.put(Parameters.MAX_CONCURRENT_REQUESTS.name(), maxthreads)
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), maxActionsPerRequest)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
.build();
try {
client.newIndex("test")
.startBulk("test", -1, 1000);
ThreadPoolExecutor pool = EsExecutors.newFixed("bulk-nodeclient-test", maxthreads, 30,
EsExecutors.daemonThreadFactory("bulk-nodeclient-test"));
client.newIndex("test5")
.startBulk("test5", -1, 1000);
ThreadPoolExecutor pool = EsExecutors.newFixed("nodeclient-test", maxthreads, 30,
EsExecutors.daemonThreadFactory("nodeclient-test"));
final CountDownLatch latch = new CountDownLatch(maxthreads);
for (int i = 0; i < maxthreads; i++) {
pool.execute(() -> {
for (int i1 = 0; i1 < actions; i1++) {
client.index("test", null, false,"{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test5", null, false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
latch.countDown();
});
}
logger.info("waiting for latch...");
if (latch.await(5, TimeUnit.MINUTES)) {
if (latch.await(60, TimeUnit.SECONDS)) {
logger.info("flush...");
client.flush();
client.waitForResponses(60L, TimeUnit.SECONDS);
logger.info("got all responses, pool shutdown...");
logger.info("pool shutdown...");
pool.shutdown();
logger.info("pool is shut down");
} else {
@ -178,15 +179,17 @@ public class ClientTest extends TestBase {
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
client.stopBulk("test", 30L, TimeUnit.SECONDS);
client.stopBulk("test5", 60L, TimeUnit.SECONDS);
assertEquals(maxthreads * actions, client.getBulkMetric().getSucceeded().getCount());
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.refreshIndex("test");
client.refreshIndex("test5");
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
.setQuery(QueryBuilders.matchAllQuery()).setSize(0);
.setIndices("test5")
.setQuery(QueryBuilders.matchAllQuery())
.setSize(0);
assertEquals(maxthreads * actions,
searchRequestBuilder.execute().actionGet().getHits().getTotalHits());
client.close();

@ -7,7 +7,8 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.common.Parameters;
import org.xbib.elx.node.ExtendedNodeClient;
@ -15,38 +16,47 @@ import org.xbib.elx.node.ExtendedNodeClientProvider;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.*;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class DuplicateIDTest extends TestBase {
@ExtendWith(TestExtension.class)
class DuplicateIDTest {
private static final Logger logger = LogManager.getLogger(DuplicateIDTest.class.getName());
private static final Long MAX_ACTIONS_PER_REQUEST = 1000L;
private static final Long MAX_ACTIONS_PER_REQUEST = 10L;
private static final Long ACTIONS = 12345L;
private static final Long ACTIONS = 50L;
private final TestExtension.Helper helper;
DuplicateIDTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testDuplicateDocIDs() throws Exception {
void testDuplicateDocIDs() throws Exception {
long numactions = ACTIONS;
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.build();
try {
client.newIndex("test");
client.newIndex("test_dup");
for (int i = 0; i < ACTIONS; i++) {
client.index("test", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test_dup", helper.randomString(1), false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
client.refreshIndex("test");
client.refreshIndex("test_dup");
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.query(QueryBuilders.matchAllQuery());
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("test");
searchRequest.types("test");
searchRequest.indices("test_dup");
searchRequest.source(builder);
long hits = client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits();
long hits = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits();
logger.info("hits = {}", hits);
assertTrue(hits < ACTIONS);
} catch (NoNodeAvailableException e) {

@ -7,7 +7,8 @@ import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsReques
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.api.IndexPruneResult;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.node.ExtendedNodeClient;
@ -19,17 +20,24 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class IndexPruneTest extends TestBase {
@ExtendWith(TestExtension.class)
class IndexPruneTest {
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName());
private final TestExtension.Helper helper;
IndexPruneTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testPrune() throws IOException {
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
void testPrune() throws IOException {
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.build();
try {
@ -37,25 +45,22 @@ public class IndexPruneTest extends TestBase {
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
.build();
client.newIndex("test1", settings);
client.shiftIndex("test", "test1", Collections.emptyList());
client.newIndex("test2", settings);
client.shiftIndex("test", "test2", Collections.emptyList());
client.newIndex("test3", settings);
client.shiftIndex("test", "test3", Collections.emptyList());
client.newIndex("test4", settings);
client.shiftIndex("test", "test4", Collections.emptyList());
client.newIndex("test_prune1", settings);
client.shiftIndex("test_prune", "test_prune1", Collections.emptyList());
client.newIndex("test_prune2", settings);
client.shiftIndex("test_prune", "test_prune2", Collections.emptyList());
client.newIndex("test_prune3", settings);
client.shiftIndex("test_prune", "test_prune3", Collections.emptyList());
client.newIndex("test_prune4", settings);
client.shiftIndex("test_prune", "test_prune4", Collections.emptyList());
IndexPruneResult indexPruneResult =
client.pruneIndex("test", "test4", 2, 2, true);
assertTrue(indexPruneResult.getDeletedIndices().contains("test1"));
assertTrue(indexPruneResult.getDeletedIndices().contains("test2"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test3"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test4"));
client.pruneIndex("test_prune", "test_prune4", 2, 2, true);
assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune1"));
assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune2"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune3"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune4"));
List<Boolean> list = new ArrayList<>();
for (String index : Arrays.asList("test1", "test2", "test3", "test4")) {
for (String index : Arrays.asList("test_prune1", "test_prune2", "test_prune3", "test_prune4")) {
IndicesExistsRequest indicesExistsRequest = new IndicesExistsRequest();
indicesExistsRequest.indices(new String[] { index });
IndicesExistsResponse indicesExistsResponse =

@ -7,7 +7,8 @@ import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.api.IndexShiftResult;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.node.ExtendedNodeClient;
@ -17,16 +18,23 @@ import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class IndexShiftTest extends TestBase {
@ExtendWith(TestExtension.class)
class IndexShiftTest {
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName());
private final TestExtension.Helper helper;
IndexShiftTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testIndexShift() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
void testIndexShift() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.build();
try {
@ -36,14 +44,13 @@ public class IndexShiftTest extends TestBase {
.build();
client.newIndex("test1234", settings);
for (int i = 0; i < 1; i++) {
client.index("test1234", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test1234", helper.randomString(1), false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
IndexShiftResult indexShiftResult =
client.shiftIndex("test", "test1234", Arrays.asList("a", "b", "c"));
client.shiftIndex("test_shift", "test1234", Arrays.asList("a", "b", "c"));
assertTrue(indexShiftResult.getNewAliases().contains("a"));
assertTrue(indexShiftResult.getNewAliases().contains("b"));
assertTrue(indexShiftResult.getNewAliases().contains("c"));
@ -53,23 +60,24 @@ public class IndexShiftTest extends TestBase {
assertTrue(aliases.containsKey("a"));
assertTrue(aliases.containsKey("b"));
assertTrue(aliases.containsKey("c"));
assertTrue(aliases.containsKey("test"));
assertTrue(aliases.containsKey("test_shift"));
String resolved = client.resolveAlias("test");
String resolved = client.resolveAlias("test_shift");
aliases = client.getAliases(resolved);
assertTrue(aliases.containsKey("a"));
assertTrue(aliases.containsKey("b"));
assertTrue(aliases.containsKey("c"));
assertTrue(aliases.containsKey("test"));
assertTrue(aliases.containsKey("test_shift"));
client.newIndex("test5678", settings);
for (int i = 0; i < 1; i++) {
client.index("test5678", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test5678", helper.randomString(1), false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
indexShiftResult = client.shiftIndex("test", "test5678", Arrays.asList("d", "e", "f"),
indexShiftResult = client.shiftIndex("test_shift", "test5678", Arrays.asList("d", "e", "f"),
(request, index, alias) -> request.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
index, alias).filter(QueryBuilders.termQuery("my_key", alias)))
);
@ -88,7 +96,7 @@ public class IndexShiftTest extends TestBase {
assertTrue(aliases.containsKey("e"));
assertTrue(aliases.containsKey("f"));
resolved = client.resolveAlias("test");
resolved = client.resolveAlias("test_shift");
aliases = client.getAliases(resolved);
assertTrue(aliases.containsKey("a"));
assertTrue(aliases.containsKey("b"));

@ -1,151 +0,0 @@
package org.xbib.elx.node.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.stats.CommonStats;
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.indexing.IndexingStats;
import org.junit.Ignore;
import org.junit.Test;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.node.ExtendedNodeClient;
import org.xbib.elx.node.ExtendedNodeClientProvider;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
@Ignore
public class ReplicaTest extends TestBase {
private static final Logger logger = LogManager.getLogger(ReplicaTest.class.getName());
@Test
public void testReplicaLevel() throws Exception {
// we need nodes for replica levels
startNode("2");
startNode("3");
startNode("4");
Settings settingsTest1 = Settings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 3)
.build();
Settings settingsTest2 = Settings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 1)
.build();
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
.provider(ExtendedNodeClientProvider.class)
.build();
try {
client.newIndex("test1", settingsTest1, new HashMap<>())
.newIndex("test2", settingsTest2, new HashMap<>());
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
for (int i = 0; i < 1234; i++) {
client.index("test1", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
}
for (int i = 0; i < 1234; i++) {
client.index("test2", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
logger.info("refreshing");
client.refreshIndex("test1");
client.refreshIndex("test2");
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
.setIndices("test1", "test2")
.setQuery(matchAllQuery());
long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits();
logger.info("query total hits={}", hits);
assertEquals(2468, hits);
IndicesStatsRequestBuilder indicesStatsRequestBuilder = new IndicesStatsRequestBuilder(client.getClient(), IndicesStatsAction.INSTANCE)
.all();
IndicesStatsResponse response = indicesStatsRequestBuilder.execute().actionGet();
for (Map.Entry<String, IndexStats> m : response.getIndices().entrySet()) {
IndexStats indexStats = m.getValue();
CommonStats commonStats = indexStats.getTotal();
IndexingStats indexingStats = commonStats.getIndexing();
IndexingStats.Stats stats = indexingStats.getTotal();
logger.info("index {}: count = {}", m.getKey(), stats.getIndexCount());
for (Map.Entry<Integer, IndexShardStats> me : indexStats.getIndexShards().entrySet()) {
IndexShardStats indexShardStats = me.getValue();
CommonStats commonShardStats = indexShardStats.getTotal();
logger.info("shard {} count = {}", me.getKey(),
commonShardStats.getIndexing().getTotal().getIndexCount());
}
}
try {
client.deleteIndex("test1")
.deleteIndex("test2");
} catch (Exception e) {
logger.error("delete index failed, ignored. Reason:", e);
}
client.close();
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
}
}
@Test
public void testUpdateReplicaLevel() throws Exception {
long numberOfShards = 2;
int replicaLevel = 3;
// we need 3 nodes for replica level 3
startNode("2");
startNode("3");
Settings settings = Settings.settingsBuilder()
.put("index.number_of_shards", numberOfShards)
.put("index.number_of_replicas", 0)
.build();
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
.provider(ExtendedNodeClientProvider.class)
.build();
try {
client.newIndex("replicatest", settings, new HashMap<>());
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
for (int i = 0; i < 12345; i++) {
client.index("replicatest",null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
client.updateReplicaLevel("replicatest", replicaLevel, 30L, TimeUnit.SECONDS);
assertEquals(replicaLevel, client.getReplicaLevel("replicatest"));
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
client.close();
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
}
}
}

@ -4,7 +4,8 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.api.IndexDefinition;
import org.xbib.elx.node.ExtendedNodeClient;
@ -12,47 +13,46 @@ import org.xbib.elx.node.ExtendedNodeClientProvider;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
public class SmokeTest extends TestBase {
@ExtendWith(TestExtension.class)
class SmokeTest {
private static final Logger logger = LogManager.getLogger(SmokeTest.class.getName());
private final TestExtension.Helper helper;
SmokeTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void smokeTest() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(client("1"))
void smokeTest() throws Exception {
final ExtendedNodeClient client = ClientBuilder.builder(helper.client("1"))
.provider(ExtendedNodeClientProvider.class)
.build();
try {
client.newIndex("test");
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
client.newIndex("test_smoke");
client.index("test_smoke", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
client.flush();
client.waitForResponses(30, TimeUnit.SECONDS);
assertEquals(getClusterName(), client.getClusterName());
client.checkMapping("test");
client.update("test", "1", "{ \"name\" : \"Another name\"}");
assertEquals(helper.getCluster(), client.getClusterName());
client.checkMapping("test_smoke");
client.update("test_smoke", "1", "{ \"name\" : \"Another name\"}");
client.flush();
client.waitForRecovery("test", 10L, TimeUnit.SECONDS);
client.delete("test", "1");
client.deleteIndex("test");
IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test2", Settings.settingsBuilder()
client.waitForRecovery("test_smoke", 10L, TimeUnit.SECONDS);
client.delete("test_smoke", "1");
client.deleteIndex("test_smoke");
IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test_smoke", Settings.settingsBuilder()
.build());
assertEquals(0, indexDefinition.getReplicaLevel());
client.newIndex(indexDefinition);
client.index(indexDefinition.getFullIndexName(), "1", true, "{ \"name\" : \"Hello World\"}");
client.flush();
client.updateReplicaLevel(indexDefinition, 2);
int replica = client.getReplicaLevel(indexDefinition);
assertEquals(2, replica);
client.deleteIndex(indexDefinition);
assertEquals(0, client.getBulkMetric().getFailed().getCount());
assertEquals(4, client.getBulkMetric().getSucceeded().getCount());

@ -1,212 +0,0 @@
package org.xbib.elx.node.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.Node;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class TestBase {
private static final Logger logger = LogManager.getLogger("test");
private static final Random random = new Random();
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
private Map<String, Node> nodes = new HashMap<>();
private Map<String, AbstractClient> clients = new HashMap<>();
private String cluster;
private String host;
private int port;
@Before
public void startNodes() {
try {
logger.info("starting");
setClusterName("test-cluster-" + System.getProperty("user.name"));
startNode("1");
findNodeAddress();
try {
ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE,
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
if (healthResponse != null && healthResponse.isTimedOut()) {
throw new IOException("cluster state is " + healthResponse.getStatus().name()
+ ", from here on, everything will fail!");
}
} catch (ElasticsearchTimeoutException e) {
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
}
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
ClusterStateResponse clusterStateResponse =
client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
logger.info("host = {} port = {}", host, port);
} catch (Throwable t) {
logger.error("startNodes failed", t);
}
}
@After
public void stopNodes() {
try {
closeNodes();
} catch (Exception e) {
logger.error("can not close nodes", e);
} finally {
try {
deleteFiles();
logger.info("data files wiped");
Thread.sleep(2000L); // let OS commit changes
} catch (IOException e) {
logger.error(e.getMessage(), e);
} catch (InterruptedException e) {
// ignore
}
}
}
protected void setClusterName(String cluster) {
this.cluster = cluster;
}
protected String getClusterName() {
return cluster;
}
protected Settings getTransportSettings() {
return settingsBuilder()
.put("host", host)
.put("port", port)
.put("cluster.name", cluster)
.put("path.home", getHome())
.build();
}
protected Settings getNodeSettings() {
return settingsBuilder()
.put("cluster.name", cluster)
//.put("cluster.routing.schedule", "50ms")
//.put("cluster.routing.allocation.disk.threshold_enabled", false)
//.put("discovery.zen.multicast.enabled", true)
//.put("discovery.zen.multicast.ping_timeout", "5s")
//.put("http.enabled", true)
//.put("threadpool.bulk.size", Runtime.getRuntime().availableProcessors())
//.put("threadpool.bulk.queue_size", 16 * Runtime.getRuntime().availableProcessors()) // default is 50, too low
//.put("index.number_of_replicas", 0)
.put("path.home", getHome())
.build();
}
protected static String getHome() {
return System.getProperty("path.home", System.getProperty("user.dir"));
}
protected void startNode(String id) {
buildNode(id).start();
}
protected AbstractClient client(String id) {
return clients.get(id);
}
protected void findNodeAddress() {
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet();
Object obj = response.iterator().next().getTransport().getAddress()
.publishAddress();
if (obj instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
host = address.address().getHostName();
port = address.address().getPort();
}
}
private Node buildNode(String id) {
Settings nodeSettings = settingsBuilder()
.put(getNodeSettings())
.put("name", id)
.build();
Node node = new MockNode(nodeSettings);
AbstractClient client = (AbstractClient) node.client();
nodes.put(id, node);
clients.put(id, client);
logger.info("clients={}", clients);
return node;
}
protected String randomString(int len) {
final char[] buf = new char[len];
final int n = numbersAndLetters.length - 1;
for (int i = 0; i < buf.length; i++) {
buf[i] = numbersAndLetters[random.nextInt(n)];
}
return new String(buf);
}
private void closeNodes() {
logger.info("closing all clients");
for (AbstractClient client : clients.values()) {
client.close();
}
clients.clear();
logger.info("closing all nodes");
for (Node node : nodes.values()) {
if (node != null) {
node.close();
}
}
nodes.clear();
logger.info("all nodes closed");
}
private static void deleteFiles() throws IOException {
Path directory = Paths.get(getHome() + "/data");
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}

@ -0,0 +1,213 @@
package org.xbib.elx.node.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.Node;
import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class TestExtension implements ParameterResolver, BeforeAllCallback, AfterAllCallback {
private static final Logger logger = LogManager.getLogger("test");
private static final Random random = new Random();
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
private Map<String, Node> nodes = new HashMap<>();
private Map<String, AbstractClient> clients = new HashMap<>();
private String home;
private String cluster;
private String host;
private int port;
private static final String key = "es-instance";
private static final ExtensionContext.Namespace ns =
ExtensionContext.Namespace.create(TestExtension.class);
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return parameterContext.getParameter().getType().equals(Helper.class);
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return extensionContext.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create());
}
@Override
public void beforeAll(ExtensionContext context) throws Exception {
Helper helper = context.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create(), Helper.class);
setHome(System.getProperty("path.home") + "/" + helper.randomString(8));
setClusterName("test-cluster-" + System.getProperty("user.name"));
deleteFiles(Paths.get(getHome() + "/data"));
logger.info("data files wiped");
Thread.sleep(2000L); // let OS commit changes
logger.info("starting cluster");
helper.startNode("1");
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet();
Object obj = response.iterator().next().getTransport().getAddress()
.publishAddress();
if (obj instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
host = address.address().getHostName();
port = address.address().getPort();
}
try {
ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE,
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
if (healthResponse != null && healthResponse.isTimedOut()) {
throw new IOException("cluster state is " + healthResponse.getStatus().name()
+ ", from here on, everything will fail!");
}
} catch (ElasticsearchTimeoutException e) {
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
}
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
ClusterStateResponse clusterStateResponse =
helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
logger.info("host = {} port = {}", host, port);
}
@Override
public void afterAll(ExtensionContext context) throws Exception {
closeNodes();
deleteFiles(Paths.get(getHome() + "/data"));
}
private void setClusterName(String cluster) {
this.cluster = cluster;
}
private String getClusterName() {
return cluster;
}
private void setHome(String home) {
this.home = home;
}
private String getHome() {
return home;
}
private void closeNodes() {
logger.info("closing all clients");
for (AbstractClient client : clients.values()) {
client.close();
}
clients.clear();
logger.info("closing all nodes");
for (Node node : nodes.values()) {
if (node != null) {
node.close();
}
}
nodes.clear();
logger.info("all nodes closed");
}
private static void deleteFiles(Path directory) throws IOException {
if (Files.exists(directory)) {
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}
private Helper create() {
return new Helper();
}
class Helper {
void startNode(String id) {
buildNode(id).start();
}
private Node buildNode(String id) {
Settings nodeSettings = settingsBuilder()
.put("cluster.name", getClusterName())
.put("path.home", getHome())
.put("name", id)
.build();
Node node = new MockNode(nodeSettings);
AbstractClient client = (AbstractClient) node.client();
nodes.put(id, node);
clients.put(id, client);
logger.info("clients={}", clients);
return node;
}
String randomString(int len) {
final char[] buf = new char[len];
final int n = numbersAndLetters.length - 1;
for (int i = 0; i < buf.length; i++) {
buf[i] = numbersAndLetters[random.nextInt(n)];
}
return new String(buf);
}
ElasticsearchClient client(String id) {
return clients.get(id);
}
String getCluster() {
return getClusterName();
}
}
}

@ -1,150 +0,0 @@
package org.xbib.elx.transport;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.action.admin.indices.stats.CommonStats;
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequestBuilder;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.indexing.IndexingStats;
import org.junit.Test;
import org.xbib.elx.common.ClientBuilder;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
public class ReplicaTest extends TestBase {
private static final Logger logger = LogManager.getLogger(ReplicaTest.class.getName());
@Test
public void testReplicaLevel() throws Exception {
// we need nodes for replica levels
startNode("2");
startNode("3");
startNode("4");
Settings settingsTest1 = Settings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 3)
.build();
Settings settingsTest2 = Settings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 1)
.build();
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.build();
try {
client.newIndex("test1", settingsTest1, new HashMap<>())
.newIndex("test2", settingsTest2, new HashMap<>());
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
for (int i = 0; i < 1234; i++) {
client.index("test1", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
}
for (int i = 0; i < 1234; i++) {
client.index("test2", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
client.refreshIndex("test1");
client.refreshIndex("test2");
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
.setIndices("test1", "test2")
.setQuery(matchAllQuery());
long hits = searchRequestBuilder.execute().actionGet().getHits().getTotalHits();
logger.info("query total hits={}", hits);
assertEquals(2468, hits);
// TODO move to api
IndicesStatsRequestBuilder indicesStatsRequestBuilder = new IndicesStatsRequestBuilder(client.getClient(),
IndicesStatsAction.INSTANCE).all();
IndicesStatsResponse response = indicesStatsRequestBuilder.execute().actionGet();
for (Map.Entry<String, IndexStats> m : response.getIndices().entrySet()) {
IndexStats indexStats = m.getValue();
CommonStats commonStats = indexStats.getTotal();
IndexingStats indexingStats = commonStats.getIndexing();
IndexingStats.Stats stats = indexingStats.getTotal();
logger.info("index {}: count = {}", m.getKey(), stats.getIndexCount());
for (Map.Entry<Integer, IndexShardStats> me : indexStats.getIndexShards().entrySet()) {
IndexShardStats indexShardStats = me.getValue();
CommonStats commonShardStats = indexShardStats.getTotal();
logger.info("shard {} count = {}", me.getKey(),
commonShardStats.getIndexing().getTotal().getIndexCount());
}
}
try {
client.deleteIndex("test1").deleteIndex("test2");
} catch (Exception e) {
logger.error("delete index failed, ignored. Reason:", e);
}
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.close();
}
}
@Test
public void testUpdateReplicaLevel() throws Exception {
long numberOfShards = 2;
int replicaLevel = 3;
// we need 3 nodes for replica level 3
startNode("2");
startNode("3");
int shardsAfterReplica;
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.build();
Settings settings = Settings.settingsBuilder()
.put("index.number_of_shards", numberOfShards)
.put("index.number_of_replicas", 0)
.build();
try {
client.newIndex("replicatest", settings, new HashMap<>());
client.waitForCluster("GREEN", 30L, TimeUnit.SECONDS);
for (int i = 0; i < 12345; i++) {
client.index("replicatest", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
client.updateReplicaLevel("replicatest", replicaLevel, 30L, TimeUnit.SECONDS);
assertEquals(replicaLevel, client.getReplicaLevel("replicatest"));
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
client.close();
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
}
}
}

@ -1,4 +1,4 @@
package org.xbib.elx.transport;
package org.xbib.elx.transport.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -13,88 +13,55 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.common.Parameters;
import org.xbib.elx.transport.ExtendedTransportClient;
import org.xbib.elx.transport.ExtendedTransportClientProvider;
import java.util.HashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ClientTest extends TestBase {
@ExtendWith(TestExtension.class)
class ClientTest {
private static final Logger logger = LogManager.getLogger(ClientTest.class.getName());
private static final Long MAX_ACTIONS_PER_REQUEST = 1000L;
private static final Long ACTIONS = 100L;
private static final Long ACTIONS = 1234L;
private static final Long MAX_ACTIONS_PER_REQUEST = 1000L;
@Before
public void startNodes() {
try {
super.startNodes();
startNode("2");
} catch (Throwable t) {
logger.error("startNodes failed", t);
}
}
private final TestExtension.Helper helper;
@Test
public void testClientIndexOp() throws Exception {
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
.build();
client.newIndex("test");
try {
client.deleteIndex("test")
.newIndex("test")
.deleteIndex("test");
} catch (NoNodeAvailableException e) {
logger.error("no node available");
} finally {
client.close();
}
ClientTest(TestExtension.Helper helper) {
this.helper = helper;
helper.startNode("2");
}
@Test
public void testSingleDoc() throws Exception {
void testClientIndexOp() throws Exception {
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.put(helper.getTransportSettings())
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
.build();
try {
client.newIndex("test");
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}");
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
assertEquals(1, client.getBulkMetric().getSucceeded().getCount());
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.close();
}
client.newIndex("test1");
client.close();
}
@Test
public void testMapping() throws Exception {
void testMapping() throws Exception {
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(helper.getTransportSettings())
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(5))
.build();
XContentBuilder builder = jsonBuilder()
@ -107,31 +74,57 @@ public class ClientTest extends TestBase {
.endObject()
.endObject()
.endObject();
client.newIndex("test", Settings.EMPTY, builder.string());
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test");
client.newIndex("test2", Settings.EMPTY, builder.string());
GetMappingsRequest getMappingsRequest = new GetMappingsRequest().indices("test2");
GetMappingsResponse getMappingsResponse =
client.getClient().execute(GetMappingsAction.INSTANCE, getMappingsRequest).actionGet();
logger.info("mappings={}", getMappingsResponse.getMappings());
assertTrue(getMappingsResponse.getMappings().get("test").containsKey("doc"));
assertTrue(getMappingsResponse.getMappings().get("test2").containsKey("doc"));
client.close();
}
@Test
public void testRandomDocs() throws Exception {
void testSingleDoc() throws Exception {
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(helper.getTransportSettings())
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
.build();
try {
client.newIndex("test3");
client.index("test3", "1", true, "{ \"name\" : \"Hello World\"}");
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
assertEquals(1, client.getBulkMetric().getSucceeded().getCount());
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.close();
}
}
@Test
void testRandomDocs() throws Exception {
long numactions = ACTIONS;
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(helper.getTransportSettings())
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
.build();
try {
client.newIndex("test");
client.newIndex("test4");
for (int i = 0; i < ACTIONS; i++) {
client.index("test", null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test4", null, false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
client.waitForResponses(60L, TimeUnit.SECONDS);
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
@ -140,37 +133,40 @@ public class ClientTest extends TestBase {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.refreshIndex("test4");
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
.setIndices("test4")
.setQuery(QueryBuilders.matchAllQuery())
.setSize(0);
assertEquals(numactions,
searchRequestBuilder.execute().actionGet().getHits().getTotalHits());
client.close();
}
}
@Test
public void testThreadedRandomDocs() throws Exception {
@Disabled
void testThreadedRandomDocs() throws Exception {
int maxthreads = Runtime.getRuntime().availableProcessors();
long maxactions = MAX_ACTIONS_PER_REQUEST;
final long maxloop = ACTIONS;
Settings settingsForIndex = Settings.settingsBuilder()
.put("index.number_of_shards", 2)
.put("index.number_of_replicas", 1)
.build();
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(helper.getTransportSettings())
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), maxactions)
.put(Parameters.FLUSH_INTERVAL.name(), TimeValue.timeValueSeconds(60))
.build();
try {
client.newIndex("test", settingsForIndex, new HashMap<>())
.startBulk("test", -1, 1000);
ThreadPoolExecutor pool = EsExecutors.newFixed("bulkclient-test", maxthreads, 30,
EsExecutors.daemonThreadFactory("bulkclient-test"));
client.newIndex("test5")
.startBulk("test5", -1, 1000);
ThreadPoolExecutor pool = EsExecutors.newFixed("transportclient-test", maxthreads, 30,
EsExecutors.daemonThreadFactory("transportclient-test"));
final CountDownLatch latch = new CountDownLatch(maxthreads);
for (int i = 0; i < maxthreads; i++) {
pool.execute(() -> {
for (int i1 = 0; i1 < maxloop; i1++) {
client.index("test",null, false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test5",null, false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
latch.countDown();
});
@ -179,25 +175,25 @@ public class ClientTest extends TestBase {
if (latch.await(60, TimeUnit.SECONDS)) {
logger.info("flush ...");
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
logger.info("pool to be shut down ...");
client.waitForResponses(60L, TimeUnit.SECONDS);
logger.info("pool shutdown ...");
pool.shutdown();
logger.info("poot shut down");
logger.info("poot is shut down");
} else {
logger.warn("latch timeout");
}
client.stopBulk("test", 30L, TimeUnit.SECONDS);
assertEquals(maxthreads * maxloop, client.getBulkMetric().getSucceeded().getCount());
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
client.stopBulk("test5", 60L, TimeUnit.SECONDS);
assertEquals(maxthreads * maxloop, client.getBulkMetric().getSucceeded().getCount());
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
// extra search lookup
client.refreshIndex("test");
client.refreshIndex("test5");
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client.getClient(), SearchAction.INSTANCE)
// to avoid NPE at org.elasticsearch.action.search.SearchRequest.writeTo(SearchRequest.java:580)
.setIndices("_all")
.setIndices("test5")
.setQuery(QueryBuilders.matchAllQuery())
.setSize(0);
assertEquals(maxthreads * maxloop,

@ -1,4 +1,4 @@
package org.xbib.elx.transport;
package org.xbib.elx.transport.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -7,47 +7,57 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.common.Parameters;
import org.xbib.elx.transport.ExtendedTransportClient;
import org.xbib.elx.transport.ExtendedTransportClientProvider;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class DuplicateIDTest extends TestBase {
@ExtendWith(TestExtension.class)
class DuplicateIDTest {
private final static Logger logger = LogManager.getLogger(DuplicateIDTest.class.getName());
private final static Long MAX_ACTIONS_PER_REQUEST = 1000L;
private final static Long MAX_ACTIONS_PER_REQUEST = 10L;
private final static Long ACTIONS = 12345L;
private final static Long ACTIONS = 5L;
private final TestExtension.Helper helper;
DuplicateIDTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testDuplicateDocIDs() throws Exception {
void testDuplicateDocIDs() throws Exception {
long numactions = ACTIONS;
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(helper.getTransportSettings())
.put(Parameters.MAX_ACTIONS_PER_REQUEST.name(), MAX_ACTIONS_PER_REQUEST)
.build();
try {
client.newIndex("test");
client.newIndex("test_dup");
for (int i = 0; i < ACTIONS; i++) {
client.index("test", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test_dup", helper.randomString(1), false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
client.refreshIndex("test");
client.refreshIndex("test_dup");
SearchSourceBuilder builder = new SearchSourceBuilder();
builder.query(QueryBuilders.matchAllQuery());
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices("test");
searchRequest.types("test");
searchRequest.indices("test_dup");
searchRequest.source(builder);
long hits = client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits();
long hits = helper.client("1").execute(SearchAction.INSTANCE, searchRequest).actionGet().getHits().getTotalHits();
logger.info("hits = {}", hits);
assertTrue(hits < ACTIONS);
} catch (NoNodeAvailableException e) {

@ -1,4 +1,4 @@
package org.xbib.elx.transport;
package org.xbib.elx.transport.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -7,9 +7,12 @@ import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsReques
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.api.IndexPruneResult;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.transport.ExtendedTransportClient;
import org.xbib.elx.transport.ExtendedTransportClientProvider;
import java.io.IOException;
import java.util.ArrayList;
@ -17,44 +20,51 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class IndexPruneTest extends TestBase {
@ExtendWith(TestExtension.class)
class IndexPruneTest {
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName());
private final TestExtension.Helper helper;
IndexPruneTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testPrune() throws IOException {
void testPrune() throws IOException {
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(helper.getTransportSettings())
.build();
try {
Settings settings = Settings.builder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
.build();
client.newIndex("test1", settings);
client.shiftIndex("test", "test1", Collections.emptyList());
client.newIndex("test2", settings);
client.shiftIndex("test", "test2", Collections.emptyList());
client.newIndex("test3", settings);
client.shiftIndex("test", "test3", Collections.emptyList());
client.newIndex("test4", settings);
client.shiftIndex("test", "test4", Collections.emptyList());
client.newIndex("test_prune1", settings);
client.shiftIndex("test_prune", "test_prune1", Collections.emptyList());
client.newIndex("test_prune2", settings);
client.shiftIndex("test_prune", "test_prune2", Collections.emptyList());
client.newIndex("test_prune3", settings);
client.shiftIndex("test_prune", "test_prune3", Collections.emptyList());
client.newIndex("test_prune4", settings);
client.shiftIndex("test_prune", "test_prune4", Collections.emptyList());
IndexPruneResult indexPruneResult =
client.pruneIndex("test", "test4", 2, 2, true);
client.pruneIndex("test_prune", "test_prune4", 2, 2, true);
assertTrue(indexPruneResult.getDeletedIndices().contains("test1"));
assertTrue(indexPruneResult.getDeletedIndices().contains("test2"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test3"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test4"));
assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune1"));
assertTrue(indexPruneResult.getDeletedIndices().contains("test_prune2"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune3"));
assertFalse(indexPruneResult.getDeletedIndices().contains("test_prune4"));
List<Boolean> list = new ArrayList<>();
for (String index : Arrays.asList("test1", "test2", "test3", "test4")) {
for (String index : Arrays.asList("test_prune1", "test_prune2", "test_prune3", "test_prune4")) {
IndicesExistsRequest indicesExistsRequest = new IndicesExistsRequest();
indicesExistsRequest.indices(new String[] { index });
IndicesExistsResponse indicesExistsResponse =

@ -1,4 +1,4 @@
package org.xbib.elx.transport;
package org.xbib.elx.transport.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -7,26 +7,36 @@ import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.api.IndexShiftResult;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.transport.ExtendedTransportClient;
import org.xbib.elx.transport.ExtendedTransportClientProvider;
import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class IndexShiftTest extends TestBase {
@ExtendWith(TestExtension.class)
class IndexShiftTest {
private static final Logger logger = LogManager.getLogger(IndexShiftTest.class.getName());
private final TestExtension.Helper helper;
IndexShiftTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testIndexAlias() throws Exception {
void testIndexAlias() throws Exception {
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings()).build();
.put(helper.getTransportSettings()).build();
try {
Settings settings = Settings.builder()
.put("index.number_of_shards", 1)
@ -34,13 +44,14 @@ public class IndexShiftTest extends TestBase {
.build();
client.newIndex("test1234", settings);
for (int i = 0; i < 1; i++) {
client.index("test1234", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test1234", helper.randomString(1), false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
IndexShiftResult indexShiftResult =
client.shiftIndex("test", "test1234", Arrays.asList("a", "b", "c"));
client.shiftIndex("test_shift", "test1234", Arrays.asList("a", "b", "c"));
assertTrue(indexShiftResult.getNewAliases().contains("a"));
assertTrue(indexShiftResult.getNewAliases().contains("b"));
@ -51,23 +62,24 @@ public class IndexShiftTest extends TestBase {
assertTrue(aliases.containsKey("a"));
assertTrue(aliases.containsKey("b"));
assertTrue(aliases.containsKey("c"));
assertTrue(aliases.containsKey("test"));
assertTrue(aliases.containsKey("test_shift"));
String resolved = client.resolveAlias("test");
String resolved = client.resolveAlias("test_shift");
aliases = client.getAliases(resolved);
assertTrue(aliases.containsKey("a"));
assertTrue(aliases.containsKey("b"));
assertTrue(aliases.containsKey("c"));
assertTrue(aliases.containsKey("test"));
assertTrue(aliases.containsKey("test_shift"));
client.newIndex("test5678", settings);
for (int i = 0; i < 1; i++) {
client.index("test5678", randomString(1), false, "{ \"name\" : \"" + randomString(32) + "\"}");
client.index("test5678", helper.randomString(1), false,
"{ \"name\" : \"" + helper.randomString(32) + "\"}");
}
client.flush();
client.waitForResponses(30L, TimeUnit.SECONDS);
indexShiftResult = client.shiftIndex("test", "test5678", Arrays.asList("d", "e", "f"),
indexShiftResult = client.shiftIndex("test_shift", "test5678", Arrays.asList("d", "e", "f"),
(request, index, alias) -> request.addAliasAction(new IndicesAliasesRequest.AliasActions(AliasAction.Type.ADD,
index, alias).filter(QueryBuilders.termQuery("my_key", alias)))
);
@ -86,7 +98,7 @@ public class IndexShiftTest extends TestBase {
assertTrue(aliases.containsKey("e"));
assertTrue(aliases.containsKey("f"));
resolved = client.resolveAlias("test");
resolved = client.resolveAlias("test_shift");
aliases = client.getAliases(resolved);
assertTrue(aliases.containsKey("a"));
assertTrue(aliases.containsKey("b"));
@ -98,11 +110,11 @@ public class IndexShiftTest extends TestBase {
} catch (NoNodeAvailableException e) {
logger.warn("skipping, no node available");
} finally {
client.close();
if (client.getBulkController().getLastBulkError() != null) {
logger.error("error", client.getBulkController().getLastBulkError());
}
assertNull(client.getBulkController().getLastBulkError());
client.close();
}
}
}

@ -0,0 +1,11 @@
package org.xbib.elx.transport.test;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.node.Node;
public class MockNode extends Node {
public MockNode(Settings settings) {
super(settings);
}
}

@ -1,46 +1,50 @@
package org.xbib.elx.transport;
package org.xbib.elx.transport.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.transport.NoNodeAvailableException;
import org.elasticsearch.common.settings.Settings;
import org.junit.Test;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.xbib.elx.api.IndexDefinition;
import org.xbib.elx.common.ClientBuilder;
import org.xbib.elx.transport.ExtendedTransportClient;
import org.xbib.elx.transport.ExtendedTransportClientProvider;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
public class SmokeTest extends TestBase {
@ExtendWith(TestExtension.class)
class SmokeTest extends TestExtension {
private static final Logger logger = LogManager.getLogger(SmokeTest.class.getName());
private final TestExtension.Helper helper;
SmokeTest(TestExtension.Helper helper) {
this.helper = helper;
}
@Test
public void testSingleDocNodeClient() throws Exception {
void testSingleDocNodeClient() throws Exception {
final ExtendedTransportClient client = ClientBuilder.builder()
.provider(ExtendedTransportClientProvider.class)
.put(getTransportSettings())
.put(helper.getTransportSettings())
.build();
try {
client.newIndex("test");
client.index("test", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
client.newIndex("test_smoke");
client.index("test_smoke", "1", true, "{ \"name\" : \"Hello World\"}"); // single doc ingest
client.flush();
client.waitForResponses(30, TimeUnit.SECONDS);
assertEquals(getClusterName(), client.getClusterName());
client.checkMapping("test");
client.update("test", "1", "{ \"name\" : \"Another name\"}");
assertEquals(helper.getCluster(), client.getClusterName());
client.checkMapping("test_smoke");
client.update("test_smoke", "1", "{ \"name\" : \"Another name\"}");
client.flush();
client.waitForRecovery("test", 10L, TimeUnit.SECONDS);
client.delete("test", "1");
client.deleteIndex("test");
client.waitForRecovery("test_smoke", 10L, TimeUnit.SECONDS);
client.delete("test_smoke", "1");
client.deleteIndex("test_smoke");
IndexDefinition indexDefinition = client.buildIndexDefinitionFromSettings("test2", Settings.settingsBuilder()
.build());
assertEquals(0, indexDefinition.getReplicaLevel());
@ -48,7 +52,6 @@ public class SmokeTest extends TestBase {
client.index(indexDefinition.getFullIndexName(), "1", true, "{ \"name\" : \"Hello World\"}");
client.flush();
client.updateReplicaLevel(indexDefinition, 2);
int replica = client.getReplicaLevel(indexDefinition);
assertEquals(2, replica);

@ -0,0 +1,229 @@
package org.xbib.elx.transport.test;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.client.support.AbstractClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.node.Node;
import org.junit.jupiter.api.extension.AfterAllCallback;
import org.junit.jupiter.api.extension.BeforeAllCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.api.extension.ParameterContext;
import org.junit.jupiter.api.extension.ParameterResolutionException;
import org.junit.jupiter.api.extension.ParameterResolver;
import java.io.IOException;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
public class TestExtension implements ParameterResolver, BeforeAllCallback, AfterAllCallback {
private static final Logger logger = LogManager.getLogger("test");
private static final Random random = new Random();
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
private Map<String, Node> nodes = new HashMap<>();
private Map<String, AbstractClient> clients = new HashMap<>();
private String home;
private String cluster;
private String host;
private int port;
private static final String key = "es-instance";
private static final ExtensionContext.Namespace ns =
ExtensionContext.Namespace.create(TestExtension.class);
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return parameterContext.getParameter().getType().equals(Helper.class);
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return extensionContext.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create());
}
@Override
public void beforeAll(ExtensionContext context) throws Exception {
Helper helper = context.getParent().get().getStore(ns).getOrComputeIfAbsent(key, key -> create(), Helper.class);
setHome(System.getProperty("path.home") + "/" + helper.randomString(8));
setClusterName("test-cluster-" + System.getProperty("user.name"));
deleteFiles(Paths.get(getHome() + "/data"));
logger.info("data files wiped: " + getHome());
Thread.sleep(2000L); // let OS commit changes
logger.info("starting cluster");
helper.startNode("1");
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
NodesInfoResponse response = helper.client("1"). execute(NodesInfoAction.INSTANCE, nodesInfoRequest).actionGet();
Object obj = response.iterator().next().getTransport().getAddress()
.publishAddress();
if (obj instanceof InetSocketTransportAddress) {
InetSocketTransportAddress address = (InetSocketTransportAddress) obj;
host = address.address().getHostName();
port = address.address().getPort();
}
try {
ClusterHealthResponse healthResponse = helper.client("1").execute(ClusterHealthAction.INSTANCE,
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
if (healthResponse != null && healthResponse.isTimedOut()) {
throw new IOException("cluster state is " + healthResponse.getStatus().name()
+ ", from here on, everything will fail!");
}
} catch (ElasticsearchTimeoutException e) {
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
}
ClusterStateRequest clusterStateRequest = new ClusterStateRequest().all();
ClusterStateResponse clusterStateResponse =
helper.client("1").execute(ClusterStateAction.INSTANCE, clusterStateRequest).actionGet();
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
logger.info("host = {} port = {}", host, port);
}
@Override
public void afterAll(ExtensionContext context) throws Exception {
closeNodes();
deleteFiles(Paths.get(getHome() + "/data"));
logger.info("cluster stopped");
}
private void setClusterName(String cluster) {
this.cluster = cluster;
}
private String getClusterName() {
return cluster;
}
private void setHome(String home) {
this.home = home;
}
private String getHome() {
return home;
}
private void closeNodes() {
logger.info("closing all clients");
for (AbstractClient client : clients.values()) {
client.close();
}
clients.clear();
logger.info("closing all nodes");
for (Node node : nodes.values()) {
if (node != null) {
node.close();
}
}
nodes.clear();
logger.info("all nodes closed");
}
private static void deleteFiles(Path directory) throws IOException {
if (Files.exists(directory)) {
Files.walkFileTree(directory, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
});
}
}
private Helper create() {
return new Helper();
}
class Helper {
Settings getNodeSettings() {
return settingsBuilder()
.put("cluster.name", getClusterName())
.put("path.home", getHome())
.build();
}
Settings getTransportSettings() {
return settingsBuilder()
.put("host", host)
.put("port", port)
.put("cluster.name", getClusterName())
.put("path.home", getHome() + "/transport")
.build();
}
void startNode(String id) {
buildNode(id).start();
}
private Node buildNode(String id) {
Settings nodeSettings = settingsBuilder()
.put(getNodeSettings())
.put("name", id)
.build();
Node node = new MockNode(nodeSettings);
AbstractClient client = (AbstractClient) node.client();
nodes.put(id, node);
clients.put(id, client);
logger.info("clients={}", clients);
return node;
}
String randomString(int len) {
final char[] buf = new char[len];
final int n = numbersAndLetters.length - 1;
for (int i = 0; i < buf.length; i++) {
buf[i] = numbersAndLetters[random.nextInt(n)];
}
return new String(buf);
}
ElasticsearchClient client(String id) {
return clients.get(id);
}
String getCluster() {
return getClusterName();
}
}
}

@ -1,18 +1,19 @@
group = org.xbib
name = elx
version = 2.2.1.7
version = 2.2.1.8
xbib-metrics.version = 1.2.0
xbib-guice.version = 4.0.4
xbib-netty-http.version = 4.1.35.0
elasticsearch.version = 2.2.1
jackson.version = 2.6.7
jna.version = 4.5.2
log4j.version = 2.11.1
mustache.version = 0.9.5
jts.version = 1.13
jackson-dataformat.version = 2.8.11
junit.version = 4.12
junit.version = 5.4.2
wagon.version = 3.0.0
asciidoclet.version = 1.5.4

Binary file not shown.

@ -1,6 +1,6 @@
#Fri Feb 15 11:59:10 CET 2019
#Thu May 02 09:39:03 CEST 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-5.3-all.zip

18
gradlew vendored

@ -1,5 +1,21 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
@ -28,7 +44,7 @@ APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m"'
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"

18
gradlew.bat vendored

@ -1,3 +1,19 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem http://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@ -14,7 +30,7 @@ set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m"
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome

@ -2,4 +2,3 @@ include 'elx-api'
include 'elx-common'
include 'elx-node'
include 'elx-transport'
include 'elx-http'

Loading…
Cancel
Save