disable bulk logging bey default, update to Gradle 5.3.1
This commit is contained in:
parent
7338f4bce1
commit
d71c164174
8 changed files with 99 additions and 97 deletions
82
build.gradle
82
build.gradle
|
@ -5,20 +5,6 @@ plugins {
|
|||
id "org.xbib.gradle.plugin.asciidoctor" version "1.5.6.0.1"
|
||||
}
|
||||
|
||||
printf "Host: %s\nOS: %s %s %s\nJVM: %s %s %s %s\nGradle: %s Groovy: %s Java: %s\n" +
|
||||
"Build: group: ${project.group} name: ${project.name} version: ${project.version}\n",
|
||||
InetAddress.getLocalHost(),
|
||||
System.getProperty("os.name"),
|
||||
System.getProperty("os.arch"),
|
||||
System.getProperty("os.version"),
|
||||
System.getProperty("java.version"),
|
||||
System.getProperty("java.vm.version"),
|
||||
System.getProperty("java.vm.vendor"),
|
||||
System.getProperty("java.vm.name"),
|
||||
gradle.gradleVersion,
|
||||
GroovySystem.getVersion(),
|
||||
JavaVersion.current()
|
||||
|
||||
if (JavaVersion.current() < JavaVersion.VERSION_11) {
|
||||
throw new GradleException("The build must be run with Java 11")
|
||||
}
|
||||
|
@ -34,7 +20,6 @@ subprojects {
|
|||
|
||||
configurations {
|
||||
asciidoclet
|
||||
wagon
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
@ -44,7 +29,6 @@ subprojects {
|
|||
testImplementation "org.apache.logging.log4j:log4j-jul:${project.property('log4j.version')}"
|
||||
testImplementation "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}"
|
||||
asciidoclet "org.xbib:asciidoclet:${project.property('asciidoclet.version')}"
|
||||
wagon "org.apache.maven.wagon:wagon-ssh:${project.property('wagon.version')}"
|
||||
}
|
||||
|
||||
compileJava {
|
||||
|
@ -110,8 +94,6 @@ subprojects {
|
|||
archives javadocJar, sourcesJar
|
||||
}
|
||||
|
||||
apply from: "${rootProject.projectDir}/gradle/publish.gradle"
|
||||
|
||||
spotbugs {
|
||||
effort = "max"
|
||||
reportLevel = "low"
|
||||
|
@ -162,4 +144,68 @@ subprojects {
|
|||
property "sonar.junit.reportsPath", "build/test-results/test/"
|
||||
}
|
||||
}
|
||||
|
||||
ext {
|
||||
description = 'Extensions for Elasticsearch clients (node and transport)'
|
||||
scmUrl = 'https://github.com/jprante/elx'
|
||||
scmConnection = 'scm:git:git://github.com/jprante/elx.git'
|
||||
scmDeveloperConnection = 'scm:git:git://github.com/jprante/elx.git'
|
||||
}
|
||||
|
||||
task sonaTypeUpload(type: Upload, dependsOn: build) {
|
||||
group = 'publish'
|
||||
configuration = configurations.archives
|
||||
uploadDescriptor = true
|
||||
repositories {
|
||||
if (project.hasProperty('ossrhUsername')) {
|
||||
mavenDeployer {
|
||||
beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
|
||||
repository(url: 'https://oss.sonatype.org/service/local/staging/deploy/maven2') {
|
||||
authentication(userName: ossrhUsername, password: ossrhPassword)
|
||||
}
|
||||
snapshotRepository(url: 'https://oss.sonatype.org/content/repositories/snapshots') {
|
||||
authentication(userName: ossrhUsername, password: ossrhPassword)
|
||||
}
|
||||
pom.project {
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
version project.version
|
||||
name project.name
|
||||
description description
|
||||
packaging 'jar'
|
||||
inceptionYear '2019'
|
||||
url scmUrl
|
||||
organization {
|
||||
name 'xbib'
|
||||
url 'http://xbib.org'
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id 'xbib'
|
||||
name 'Jörg Prante'
|
||||
email 'joergprante@gmail.com'
|
||||
url 'https://github.com/jprante'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
url scmUrl
|
||||
connection scmConnection
|
||||
developerConnection scmDeveloperConnection
|
||||
}
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nexusStaging {
|
||||
packageGroup = "org.xbib"
|
||||
}
|
||||
|
||||
}
|
|
@ -31,7 +31,7 @@ public interface BulkProcessor extends Closeable, Flushable {
|
|||
/**
|
||||
* A listener for the execution.
|
||||
*/
|
||||
public interface Listener {
|
||||
interface Listener {
|
||||
|
||||
/**
|
||||
* Callback before the bulk is executed.
|
||||
|
|
|
@ -49,6 +49,8 @@ public class DefaultBulkController implements BulkController {
|
|||
|
||||
private AtomicBoolean active;
|
||||
|
||||
private boolean enableBulkLogging;
|
||||
|
||||
public DefaultBulkController(ExtendedClient client, BulkMetric bulkMetric) {
|
||||
this.client = client;
|
||||
this.bulkMetric = bulkMetric;
|
||||
|
@ -76,11 +78,8 @@ public class DefaultBulkController implements BulkController {
|
|||
ByteSizeValue maxVolumePerRequest = settings.getAsBytesSize(Parameters.MAX_VOLUME_PER_REQUEST.name(),
|
||||
ByteSizeValue.parseBytesSizeValue(Parameters.DEFAULT_MAX_VOLUME_PER_REQUEST.getString(),
|
||||
"maxVolumePerRequest"));
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("bulk processor up with maxActionsPerRequest = {} maxConcurrentRequests = {} " +
|
||||
"flushIngestInterval = {} maxVolumePerRequest = {}",
|
||||
maxActionsPerRequest, maxConcurrentRequests, flushIngestInterval, maxVolumePerRequest);
|
||||
}
|
||||
this.enableBulkLogging = settings.getAsBoolean(Parameters.ENABLE_BULK_LOGGING.name(),
|
||||
Parameters.ENABLE_BULK_LOGGING.getValue());
|
||||
this.bulkListener = new BulkListener();
|
||||
this.bulkProcessor = DefaultBulkProcessor.builder(client.getClient(), bulkListener)
|
||||
.setBulkActions(maxActionsPerRequest)
|
||||
|
@ -89,6 +88,12 @@ public class DefaultBulkController implements BulkController {
|
|||
.setBulkSize(maxVolumePerRequest)
|
||||
.build();
|
||||
this.active.set(true);
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("bulk processor set up with maxActionsPerRequest = {} maxConcurrentRequests = {} " +
|
||||
"flushIngestInterval = {} maxVolumePerRequest = {}, bulk logging = {}",
|
||||
maxActionsPerRequest, maxConcurrentRequests, flushIngestInterval, maxVolumePerRequest,
|
||||
enableBulkLogging);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -240,7 +245,7 @@ public class DefaultBulkController implements BulkController {
|
|||
|
||||
private class BulkListener implements DefaultBulkProcessor.Listener {
|
||||
|
||||
private final Logger logger = LogManager.getLogger("org.xbib.elx.BulkProcessor.Listener");
|
||||
private final Logger logger = LogManager.getLogger(BulkListener.class.getName());
|
||||
|
||||
private Throwable lastBulkError = null;
|
||||
|
||||
|
@ -255,7 +260,7 @@ public class DefaultBulkController implements BulkController {
|
|||
bulkMetric.getCurrentIngestNumDocs().inc(n);
|
||||
bulkMetric.getTotalIngestSizeInBytes().inc(request.estimatedSizeInBytes());
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
if (enableBulkLogging && logger.isDebugEnabled()) {
|
||||
logger.debug("before bulk [{}] [actions={}] [bytes={}] [concurrent requests={}]",
|
||||
executionId,
|
||||
request.numberOfActions(),
|
||||
|
@ -285,7 +290,7 @@ public class DefaultBulkController implements BulkController {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (bulkMetric != null && logger.isDebugEnabled()) {
|
||||
if (enableBulkLogging && bulkMetric != null && logger.isDebugEnabled()) {
|
||||
logger.debug("after bulk [{}] [succeeded={}] [failed={}] [{}ms] {} concurrent requests",
|
||||
executionId,
|
||||
bulkMetric.getSucceeded().getCount(),
|
||||
|
@ -294,7 +299,7 @@ public class DefaultBulkController implements BulkController {
|
|||
l);
|
||||
}
|
||||
if (n > 0) {
|
||||
if (logger.isErrorEnabled()) {
|
||||
if (enableBulkLogging && logger.isErrorEnabled()) {
|
||||
logger.error("bulk [{}] failed with {} failed items, failure message = {}",
|
||||
executionId, n, response.buildFailureMessage());
|
||||
}
|
||||
|
@ -312,7 +317,7 @@ public class DefaultBulkController implements BulkController {
|
|||
}
|
||||
lastBulkError = failure;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
if (enableBulkLogging && logger.isErrorEnabled()) {
|
||||
logger.error("after bulk [" + executionId + "] error", failure);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ import org.elasticsearch.action.update.UpdateRequest;
|
|||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
|
|
|
@ -2,6 +2,8 @@ package org.xbib.elx.common;
|
|||
|
||||
public enum Parameters {
|
||||
|
||||
ENABLE_BULK_LOGGING(false),
|
||||
|
||||
DEFAULT_MAX_ACTIONS_PER_REQUEST(1000),
|
||||
|
||||
DEFAULT_MAX_CONCURRENT_REQUESTS(Runtime.getRuntime().availableProcessors()),
|
||||
|
@ -18,10 +20,16 @@ public enum Parameters {
|
|||
|
||||
FLUSH_INTERVAL("flush_interval");
|
||||
|
||||
boolean flag;
|
||||
|
||||
int num;
|
||||
|
||||
String string;
|
||||
|
||||
Parameters(boolean flag) {
|
||||
this.flag = flag;
|
||||
}
|
||||
|
||||
Parameters(int num) {
|
||||
this.num = num;
|
||||
}
|
||||
|
@ -30,6 +38,10 @@ public enum Parameters {
|
|||
this.string = string;
|
||||
}
|
||||
|
||||
boolean getValue() {
|
||||
return flag;
|
||||
}
|
||||
|
||||
int getNum() {
|
||||
return num;
|
||||
}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
group = org.xbib
|
||||
name = elx
|
||||
version = 2.2.1.9
|
||||
version = 2.2.1.10
|
||||
|
||||
xbib-metrics.version = 1.2.0
|
||||
xbib-guice.version = 4.0.4
|
||||
xbib-netty-http.version = 4.1.35.0
|
||||
xbib-netty-http.version = 4.1.36.7
|
||||
|
||||
elasticsearch.version = 2.2.1
|
||||
jackson.version = 2.6.7
|
||||
|
@ -13,8 +13,10 @@ log4j.version = 2.11.1
|
|||
mustache.version = 0.9.5
|
||||
jts.version = 1.13
|
||||
|
||||
# test
|
||||
junit.version = 5.4.2
|
||||
wagon.version = 3.0.0
|
||||
|
||||
# docs
|
||||
asciidoclet.version = 1.5.4
|
||||
|
||||
org.gradle.warning.mode = all
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
ext {
|
||||
description = 'Extensions for Elasticsearch clients (node and transport)'
|
||||
scmUrl = 'https://github.com/jprante/elx'
|
||||
scmConnection = 'scm:git:git://github.com/jprante/elx.git'
|
||||
scmDeveloperConnection = 'scm:git:git://github.com/jprante/elx.git'
|
||||
}
|
||||
|
||||
task sonaTypeUpload(type: Upload, dependsOn: build) {
|
||||
group = 'publish'
|
||||
configuration = configurations.archives
|
||||
uploadDescriptor = true
|
||||
repositories {
|
||||
if (project.hasProperty('ossrhUsername')) {
|
||||
mavenDeployer {
|
||||
beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
|
||||
repository(url: 'https://oss.sonatype.org/service/local/staging/deploy/maven2') {
|
||||
authentication(userName: ossrhUsername, password: ossrhPassword)
|
||||
}
|
||||
snapshotRepository(url: 'https://oss.sonatype.org/content/repositories/snapshots') {
|
||||
authentication(userName: ossrhUsername, password: ossrhPassword)
|
||||
}
|
||||
pom.project {
|
||||
groupId project.group
|
||||
artifactId project.name
|
||||
version project.version
|
||||
name project.name
|
||||
description description
|
||||
packaging 'jar'
|
||||
inceptionYear '2019'
|
||||
url scmUrl
|
||||
organization {
|
||||
name 'xbib'
|
||||
url 'http://xbib.org'
|
||||
}
|
||||
developers {
|
||||
developer {
|
||||
id 'xbib'
|
||||
name 'Jörg Prante'
|
||||
email 'joergprante@gmail.com'
|
||||
url 'https://github.com/jprante'
|
||||
}
|
||||
}
|
||||
scm {
|
||||
url scmUrl
|
||||
connection scmConnection
|
||||
developerConnection scmDeveloperConnection
|
||||
}
|
||||
licenses {
|
||||
license {
|
||||
name 'The Apache License, Version 2.0'
|
||||
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nexusStaging {
|
||||
packageGroup = "org.xbib"
|
||||
}
|
4
gradle/wrapper/gradle-wrapper.properties
vendored
4
gradle/wrapper/gradle-wrapper.properties
vendored
|
@ -1,6 +1,6 @@
|
|||
#Thu May 02 09:39:03 CEST 2019
|
||||
#Tue Jul 23 11:33:24 CEST 2019
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.3-all.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.3.1-all.zip
|
||||
|
|
Loading…
Reference in a new issue