first version of elx for Elasticsearch 6.3.2
This commit is contained in:
parent
0ba6b6d99d
commit
fa84c61336
226 changed files with 6608 additions and 17962 deletions
|
@ -1,7 +1,7 @@
|
|||
language: java
|
||||
sudo: required
|
||||
jdk:
|
||||
- oraclejdk8
|
||||
- openjdk11
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.m2
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
|
||||
dependencies {
|
||||
compile "io.netty:netty-buffer:${rootProject.property('netty.version')}"
|
||||
compile "io.netty:netty-codec-http:${rootProject.property('netty.version')}"
|
||||
compile "io.netty:netty-handler:${rootProject.property('netty.version')}"
|
||||
compile "org.xbib.elasticsearch:elasticsearch:${rootProject.property('elasticsearch-server.version')}"
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-api"
|
||||
}
|
|
@ -1,321 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
|
||||
<!-- This is a checkstyle configuration file. For descriptions of
|
||||
what the following rules do, please see the checkstyle configuration
|
||||
page at http://checkstyle.sourceforge.net/config.html -->
|
||||
|
||||
<module name="Checker">
|
||||
|
||||
<module name="FileTabCharacter">
|
||||
<!-- Checks that there are no tab characters in the file.
|
||||
-->
|
||||
</module>
|
||||
|
||||
<module name="NewlineAtEndOfFile">
|
||||
<property name="lineSeparator" value="lf"/>
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that FIXME is not used in comments. TODO is preferred.
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))FIXME" />
|
||||
<property name="message" value='TODO is preferred to FIXME. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that TODOs are named. (Actually, just that they are followed
|
||||
by an open paren.)
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))TODO[^(]" />
|
||||
<property name="message" value='All TODOs should be named. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="JavadocPackage">
|
||||
<!-- Checks that each Java package has a Javadoc file used for commenting.
|
||||
Only allows a package-info.java, not package.html. -->
|
||||
</module>
|
||||
|
||||
<!-- All Java AST specific tests live under TreeWalker module. -->
|
||||
<module name="TreeWalker">
|
||||
|
||||
<!--
|
||||
IMPORT CHECKS
|
||||
-->
|
||||
|
||||
<module name="RedundantImport">
|
||||
<!-- Checks for redundant import statements. -->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ImportOrder">
|
||||
<!-- Checks for out of order import statements. -->
|
||||
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="groups" value="com,junit,net,org,java,javax"/>
|
||||
<!-- This ensures that static imports go first. -->
|
||||
<property name="option" value="top"/>
|
||||
<property name="tokens" value="STATIC_IMPORT, IMPORT"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
JAVADOC CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Checks for Javadoc comments. -->
|
||||
<!-- See http://checkstyle.sf.net/config_javadoc.html -->
|
||||
<module name="JavadocMethod">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="allowMissingJavadoc" value="true"/>
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
<property name="allowMissingThrowsTags" value="true"/>
|
||||
<property name="allowThrowsTagsForSubclasses" value="true"/>
|
||||
<property name="allowUndeclaredRTE" value="true"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocType">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocStyle">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
NAMING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Item 38 - Adhere to generally accepted naming conventions -->
|
||||
|
||||
<module name="PackageName">
|
||||
<!-- Validates identifiers for package names against the
|
||||
supplied expression. -->
|
||||
<!-- Here the default checkstyle rule restricts package name parts to
|
||||
seven characters, this is not in line with common practice at Google.
|
||||
-->
|
||||
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]{1,})*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="TypeNameCheck">
|
||||
<!-- Validates static, final fields against the
|
||||
expression "^[A-Z][a-zA-Z0-9]*$". -->
|
||||
<metadata name="altname" value="TypeName"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ConstantNameCheck">
|
||||
<!-- Validates non-private, static, final fields against the supplied
|
||||
public/package final fields "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$". -->
|
||||
<metadata name="altname" value="ConstantName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="false"/>
|
||||
<property name="format" value="^([A-Z][A-Z0-9]*(_[A-Z0-9]+)*|FLAG_.*)$"/>
|
||||
<message key="name.invalidPattern"
|
||||
value="Variable ''{0}'' should be in ALL_CAPS (if it is a constant) or be private (otherwise)."/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="StaticVariableNameCheck">
|
||||
<!-- Validates static, non-final fields against the supplied
|
||||
expression "^[a-z][a-zA-Z0-9]*_?$". -->
|
||||
<metadata name="altname" value="StaticVariableName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*_?$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MemberNameCheck">
|
||||
<!-- Validates non-static members against the supplied expression. -->
|
||||
<metadata name="altname" value="MemberName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MethodNameCheck">
|
||||
<!-- Validates identifiers for method names. -->
|
||||
<metadata name="altname" value="MethodName"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*(_[a-zA-Z0-9]+)*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ParameterName">
|
||||
<!-- Validates identifiers for method parameters against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalFinalVariableName">
|
||||
<!-- Validates identifiers for local final variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalVariableName">
|
||||
<!-- Validates identifiers for local variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
LENGTH and CODING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="LineLength">
|
||||
<!-- Checks if a line is too long. -->
|
||||
<property name="max" value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.max}" default="128"/>
|
||||
<property name="severity" value="error"/>
|
||||
|
||||
<!--
|
||||
The default ignore pattern exempts the following elements:
|
||||
- import statements
|
||||
- long URLs inside comments
|
||||
-->
|
||||
|
||||
<property name="ignorePattern"
|
||||
value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.ignorePattern}"
|
||||
default="^(package .*;\s*)|(import .*;\s*)|( *(\*|//).*https?://.*)$"/>
|
||||
</module>
|
||||
|
||||
<module name="LeftCurly">
|
||||
<!-- Checks for placement of the left curly brace ('{'). -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="RightCurly">
|
||||
<!-- Checks right curlies on CATCH, ELSE, and TRY blocks are on
|
||||
the same line. e.g., the following example is fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
} else
|
||||
</pre>
|
||||
-->
|
||||
<!-- This next example is not fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
}
|
||||
else
|
||||
</pre>
|
||||
-->
|
||||
<property name="option" value="same"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for braces around if and else blocks -->
|
||||
<module name="NeedBraces">
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="tokens" value="LITERAL_IF, LITERAL_ELSE, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO"/>
|
||||
</module>
|
||||
|
||||
<module name="UpperEll">
|
||||
<!-- Checks that long constants are defined with an upper ell.-->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="FallThrough">
|
||||
<!-- Warn about falling through to the next case statement. Similar to
|
||||
javac -Xlint:fallthrough, but the check is suppressed if a single-line comment
|
||||
on the last non-blank line preceding the fallen-into case contains 'fall through' (or
|
||||
some other variants which we don't publicized to promote consistency).
|
||||
-->
|
||||
<property name="reliefPattern"
|
||||
value="fall through|Fall through|fallthru|Fallthru|falls through|Falls through|fallthrough|Fallthrough|No break|NO break|no break|continue on"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
MODIFIERS CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="ModifierOrder">
|
||||
<!-- Warn if modifier order is inconsistent with JLS3 8.1.1, 8.3.1, and
|
||||
8.4.3. The prescribed order is:
|
||||
public, protected, private, abstract, static, final, transient, volatile,
|
||||
synchronized, native, strictfp
|
||||
-->
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
WHITESPACE CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="WhitespaceAround">
|
||||
<!-- Checks that various tokens are surrounded by whitespace.
|
||||
This includes most binary operators and keywords followed
|
||||
by regular or curly braces.
|
||||
-->
|
||||
<property name="tokens" value="ASSIGN, BAND, BAND_ASSIGN, BOR,
|
||||
BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN,
|
||||
EQUAL, GE, GT, LAND, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE,
|
||||
LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
|
||||
LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS,
|
||||
MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION,
|
||||
SL, SL_ASSIGN, SR_ASSIGN, STAR, STAR_ASSIGN"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="WhitespaceAfter">
|
||||
<!-- Checks that commas, semicolons and typecasts are followed by
|
||||
whitespace.
|
||||
-->
|
||||
<property name="tokens" value="COMMA, SEMI, TYPECAST"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceAfter">
|
||||
<!-- Checks that there is no whitespace after various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS,
|
||||
UNARY_PLUS"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceBefore">
|
||||
<!-- Checks that there is no whitespace before various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="SEMI, DOT, POST_DEC, POST_INC"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ParenPad">
|
||||
<!-- Checks that there is no whitespace before close parens or after
|
||||
open parens.
|
||||
-->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
</module>
|
||||
</module>
|
||||
|
684
api/src/docs/asciidoc/css/foundation.css
vendored
684
api/src/docs/asciidoc/css/foundation.css
vendored
|
@ -1,684 +0,0 @@
|
|||
/*! normalize.css v2.1.2 | MIT License | git.io/normalize */
|
||||
/* ========================================================================== HTML5 display definitions ========================================================================== */
|
||||
/** Correct `block` display not defined in IE 8/9. */
|
||||
article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary { display: block; }
|
||||
|
||||
/** Correct `inline-block` display not defined in IE 8/9. */
|
||||
audio, canvas, video { display: inline-block; }
|
||||
|
||||
/** Prevent modern browsers from displaying `audio` without controls. Remove excess height in iOS 5 devices. */
|
||||
audio:not([controls]) { display: none; height: 0; }
|
||||
|
||||
/** Address `[hidden]` styling not present in IE 8/9. Hide the `template` element in IE, Safari, and Firefox < 22. */
|
||||
[hidden], template { display: none; }
|
||||
|
||||
script { display: none !important; }
|
||||
|
||||
/* ========================================================================== Base ========================================================================== */
|
||||
/** 1. Set default font family to sans-serif. 2. Prevent iOS text size adjust after orientation change, without disabling user zoom. */
|
||||
html { font-family: sans-serif; /* 1 */ -ms-text-size-adjust: 100%; /* 2 */ -webkit-text-size-adjust: 100%; /* 2 */ }
|
||||
|
||||
/** Remove default margin. */
|
||||
body { margin: 0; }
|
||||
|
||||
/* ========================================================================== Links ========================================================================== */
|
||||
/** Remove the gray background color from active links in IE 10. */
|
||||
a { background: transparent; }
|
||||
|
||||
/** Address `outline` inconsistency between Chrome and other browsers. */
|
||||
a:focus { outline: thin dotted; }
|
||||
|
||||
/** Improve readability when focused and also mouse hovered in all browsers. */
|
||||
a:active, a:hover { outline: 0; }
|
||||
|
||||
/* ========================================================================== Typography ========================================================================== */
|
||||
/** Address variable `h1` font-size and margin within `section` and `article` contexts in Firefox 4+, Safari 5, and Chrome. */
|
||||
h1 { font-size: 2em; margin: 0.67em 0; }
|
||||
|
||||
/** Address styling not present in IE 8/9, Safari 5, and Chrome. */
|
||||
abbr[title] { border-bottom: 1px dotted; }
|
||||
|
||||
/** Address style set to `bolder` in Firefox 4+, Safari 5, and Chrome. */
|
||||
b, strong { font-weight: bold; }
|
||||
|
||||
/** Address styling not present in Safari 5 and Chrome. */
|
||||
dfn { font-style: italic; }
|
||||
|
||||
/** Address differences between Firefox and other browsers. */
|
||||
hr { -moz-box-sizing: content-box; box-sizing: content-box; height: 0; }
|
||||
|
||||
/** Address styling not present in IE 8/9. */
|
||||
mark { background: #ff0; color: #000; }
|
||||
|
||||
/** Correct font family set oddly in Safari 5 and Chrome. */
|
||||
code, kbd, pre, samp { font-family: monospace, serif; font-size: 1em; }
|
||||
|
||||
/** Improve readability of pre-formatted text in all browsers. */
|
||||
pre { white-space: pre-wrap; }
|
||||
|
||||
/** Set consistent quote types. */
|
||||
q { quotes: "\201C" "\201D" "\2018" "\2019"; }
|
||||
|
||||
/** Address inconsistent and variable font size in all browsers. */
|
||||
small { font-size: 80%; }
|
||||
|
||||
/** Prevent `sub` and `sup` affecting `line-height` in all browsers. */
|
||||
sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; }
|
||||
|
||||
sup { top: -0.5em; }
|
||||
|
||||
sub { bottom: -0.25em; }
|
||||
|
||||
/* ========================================================================== Embedded content ========================================================================== */
|
||||
/** Remove border when inside `a` element in IE 8/9. */
|
||||
img { border: 0; }
|
||||
|
||||
/** Correct overflow displayed oddly in IE 9. */
|
||||
svg:not(:root) { overflow: hidden; }
|
||||
|
||||
/* ========================================================================== Figures ========================================================================== */
|
||||
/** Address margin not present in IE 8/9 and Safari 5. */
|
||||
figure { margin: 0; }
|
||||
|
||||
/* ========================================================================== Forms ========================================================================== */
|
||||
/** Define consistent border, margin, and padding. */
|
||||
fieldset { border: 1px solid #c0c0c0; margin: 0 2px; padding: 0.35em 0.625em 0.75em; }
|
||||
|
||||
/** 1. Correct `color` not being inherited in IE 8/9. 2. Remove padding so people aren't caught out if they zero out fieldsets. */
|
||||
legend { border: 0; /* 1 */ padding: 0; /* 2 */ }
|
||||
|
||||
/** 1. Correct font family not being inherited in all browsers. 2. Correct font size not being inherited in all browsers. 3. Address margins set differently in Firefox 4+, Safari 5, and Chrome. */
|
||||
button, input, select, textarea { font-family: inherit; /* 1 */ font-size: 100%; /* 2 */ margin: 0; /* 3 */ }
|
||||
|
||||
/** Address Firefox 4+ setting `line-height` on `input` using `!important` in the UA stylesheet. */
|
||||
button, input { line-height: normal; }
|
||||
|
||||
/** Address inconsistent `text-transform` inheritance for `button` and `select`. All other form control elements do not inherit `text-transform` values. Correct `button` style inheritance in Chrome, Safari 5+, and IE 8+. Correct `select` style inheritance in Firefox 4+ and Opera. */
|
||||
button, select { text-transform: none; }
|
||||
|
||||
/** 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` and `video` controls. 2. Correct inability to style clickable `input` types in iOS. 3. Improve usability and consistency of cursor style between image-type `input` and others. */
|
||||
button, html input[type="button"], input[type="reset"], input[type="submit"] { -webkit-appearance: button; /* 2 */ cursor: pointer; /* 3 */ }
|
||||
|
||||
/** Re-set default cursor for disabled elements. */
|
||||
button[disabled], html input[disabled] { cursor: default; }
|
||||
|
||||
/** 1. Address box sizing set to `content-box` in IE 8/9. 2. Remove excess padding in IE 8/9. */
|
||||
input[type="checkbox"], input[type="radio"] { box-sizing: border-box; /* 1 */ padding: 0; /* 2 */ }
|
||||
|
||||
/** 1. Address `appearance` set to `searchfield` in Safari 5 and Chrome. 2. Address `box-sizing` set to `border-box` in Safari 5 and Chrome (include `-moz` to future-proof). */
|
||||
input[type="search"] { -webkit-appearance: textfield; /* 1 */ -moz-box-sizing: content-box; -webkit-box-sizing: content-box; /* 2 */ box-sizing: content-box; }
|
||||
|
||||
/** Remove inner padding and search cancel button in Safari 5 and Chrome on OS X. */
|
||||
input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; }
|
||||
|
||||
/** Remove inner padding and border in Firefox 4+. */
|
||||
button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; }
|
||||
|
||||
/** 1. Remove default vertical scrollbar in IE 8/9. 2. Improve readability and alignment in all browsers. */
|
||||
textarea { overflow: auto; /* 1 */ vertical-align: top; /* 2 */ }
|
||||
|
||||
/* ========================================================================== Tables ========================================================================== */
|
||||
/** Remove most spacing between table cells. */
|
||||
table { border-collapse: collapse; border-spacing: 0; }
|
||||
|
||||
meta.foundation-mq-small { font-family: "only screen and (min-width: 768px)"; width: 768px; }
|
||||
|
||||
meta.foundation-mq-medium { font-family: "only screen and (min-width:1280px)"; width: 1280px; }
|
||||
|
||||
meta.foundation-mq-large { font-family: "only screen and (min-width:1440px)"; width: 1440px; }
|
||||
|
||||
*, *:before, *:after { -moz-box-sizing: border-box; -webkit-box-sizing: border-box; box-sizing: border-box; }
|
||||
|
||||
html, body { font-size: 100%; }
|
||||
|
||||
body { background: white; color: #222222; padding: 0; margin: 0; font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; font-weight: normal; font-style: normal; line-height: 1; position: relative; cursor: auto; }
|
||||
|
||||
a:hover { cursor: pointer; }
|
||||
|
||||
img, object, embed { max-width: 100%; height: auto; }
|
||||
|
||||
object, embed { height: 100%; }
|
||||
|
||||
img { -ms-interpolation-mode: bicubic; }
|
||||
|
||||
#map_canvas img, #map_canvas embed, #map_canvas object, .map_canvas img, .map_canvas embed, .map_canvas object { max-width: none !important; }
|
||||
|
||||
.left { float: left !important; }
|
||||
|
||||
.right { float: right !important; }
|
||||
|
||||
.text-left { text-align: left !important; }
|
||||
|
||||
.text-right { text-align: right !important; }
|
||||
|
||||
.text-center { text-align: center !important; }
|
||||
|
||||
.text-justify { text-align: justify !important; }
|
||||
|
||||
.hide { display: none; }
|
||||
|
||||
.antialiased { -webkit-font-smoothing: antialiased; }
|
||||
|
||||
img { display: inline-block; vertical-align: middle; }
|
||||
|
||||
textarea { height: auto; min-height: 50px; }
|
||||
|
||||
select { width: 100%; }
|
||||
|
||||
object, svg { display: inline-block; vertical-align: middle; }
|
||||
|
||||
.center { margin-left: auto; margin-right: auto; }
|
||||
|
||||
.spread { width: 100%; }
|
||||
|
||||
p.lead, .paragraph.lead > p, #preamble > .sectionbody > .paragraph:first-of-type p { font-size: 1.21875em; line-height: 1.6; }
|
||||
|
||||
.subheader, .admonitionblock td.content > .title, .audioblock > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .stemblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, table.tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { line-height: 1.4; color: #6f6f6f; font-weight: 300; margin-top: 0.2em; margin-bottom: 0.5em; }
|
||||
|
||||
/* Typography resets */
|
||||
div, dl, dt, dd, ul, ol, li, h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6, pre, form, p, blockquote, th, td { margin: 0; padding: 0; direction: ltr; }
|
||||
|
||||
/* Default Link Styles */
|
||||
a { color: #2ba6cb; text-decoration: none; line-height: inherit; }
|
||||
a:hover, a:focus { color: #2795b6; }
|
||||
a img { border: none; }
|
||||
|
||||
/* Default paragraph styles */
|
||||
p { font-family: inherit; font-weight: normal; font-size: 1em; line-height: 1.6; margin-bottom: 1.25em; text-rendering: optimizeLegibility; }
|
||||
p aside { font-size: 0.875em; line-height: 1.35; font-style: italic; }
|
||||
|
||||
/* Default header styles */
|
||||
h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; font-weight: bold; font-style: normal; color: #222222; text-rendering: optimizeLegibility; margin-top: 1em; margin-bottom: 0.5em; line-height: 1.2125em; }
|
||||
h1 small, h2 small, h3 small, #toctitle small, .sidebarblock > .content > .title small, h4 small, h5 small, h6 small { font-size: 60%; color: #6f6f6f; line-height: 0; }
|
||||
|
||||
h1 { font-size: 2.125em; }
|
||||
|
||||
h2 { font-size: 1.6875em; }
|
||||
|
||||
h3, #toctitle, .sidebarblock > .content > .title { font-size: 1.375em; }
|
||||
|
||||
h4 { font-size: 1.125em; }
|
||||
|
||||
h5 { font-size: 1.125em; }
|
||||
|
||||
h6 { font-size: 1em; }
|
||||
|
||||
hr { border: solid #dddddd; border-width: 1px 0 0; clear: both; margin: 1.25em 0 1.1875em; height: 0; }
|
||||
|
||||
/* Helpful Typography Defaults */
|
||||
em, i { font-style: italic; line-height: inherit; }
|
||||
|
||||
strong, b { font-weight: bold; line-height: inherit; }
|
||||
|
||||
small { font-size: 60%; line-height: inherit; }
|
||||
|
||||
code { font-family: Consolas, "Liberation Mono", Courier, monospace; font-weight: bold; color: #7f0a0c; }
|
||||
|
||||
/* Lists */
|
||||
ul, ol, dl { font-size: 1em; line-height: 1.6; margin-bottom: 1.25em; list-style-position: outside; font-family: inherit; }
|
||||
|
||||
ul, ol { margin-left: 1.5em; }
|
||||
ul.no-bullet, ol.no-bullet { margin-left: 1.5em; }
|
||||
|
||||
/* Unordered Lists */
|
||||
ul li ul, ul li ol { margin-left: 1.25em; margin-bottom: 0; font-size: 1em; /* Override nested font-size change */ }
|
||||
ul.square li ul, ul.circle li ul, ul.disc li ul { list-style: inherit; }
|
||||
ul.square { list-style-type: square; }
|
||||
ul.circle { list-style-type: circle; }
|
||||
ul.disc { list-style-type: disc; }
|
||||
ul.no-bullet { list-style: none; }
|
||||
|
||||
/* Ordered Lists */
|
||||
ol li ul, ol li ol { margin-left: 1.25em; margin-bottom: 0; }
|
||||
|
||||
/* Definition Lists */
|
||||
dl dt { margin-bottom: 0.3125em; font-weight: bold; }
|
||||
dl dd { margin-bottom: 1.25em; }
|
||||
|
||||
/* Abbreviations */
|
||||
abbr, acronym { text-transform: uppercase; font-size: 90%; color: #222222; border-bottom: 1px dotted #dddddd; cursor: help; }
|
||||
|
||||
abbr { text-transform: none; }
|
||||
|
||||
/* Blockquotes */
|
||||
blockquote { margin: 0 0 1.25em; padding: 0.5625em 1.25em 0 1.1875em; border-left: 1px solid #dddddd; }
|
||||
blockquote cite { display: block; font-size: 0.8125em; color: #555555; }
|
||||
blockquote cite:before { content: "\2014 \0020"; }
|
||||
blockquote cite a, blockquote cite a:visited { color: #555555; }
|
||||
|
||||
blockquote, blockquote p { line-height: 1.6; color: #6f6f6f; }
|
||||
|
||||
/* Microformats */
|
||||
.vcard { display: inline-block; margin: 0 0 1.25em 0; border: 1px solid #dddddd; padding: 0.625em 0.75em; }
|
||||
.vcard li { margin: 0; display: block; }
|
||||
.vcard .fn { font-weight: bold; font-size: 0.9375em; }
|
||||
|
||||
.vevent .summary { font-weight: bold; }
|
||||
.vevent abbr { cursor: auto; text-decoration: none; font-weight: bold; border: none; padding: 0 0.0625em; }
|
||||
|
||||
@media only screen and (min-width: 768px) { h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { line-height: 1.4; }
|
||||
h1 { font-size: 2.75em; }
|
||||
h2 { font-size: 2.3125em; }
|
||||
h3, #toctitle, .sidebarblock > .content > .title { font-size: 1.6875em; }
|
||||
h4 { font-size: 1.4375em; } }
|
||||
/* Tables */
|
||||
table { background: white; margin-bottom: 1.25em; border: solid 1px #dddddd; }
|
||||
table thead, table tfoot { background: whitesmoke; font-weight: bold; }
|
||||
table thead tr th, table thead tr td, table tfoot tr th, table tfoot tr td { padding: 0.5em 0.625em 0.625em; font-size: inherit; color: #222222; text-align: left; }
|
||||
table tr th, table tr td { padding: 0.5625em 0.625em; font-size: inherit; color: #222222; }
|
||||
table tr.even, table tr.alt, table tr:nth-of-type(even) { background: #f9f9f9; }
|
||||
table thead tr th, table tfoot tr th, table tbody tr td, table tr td, table tfoot tr td { display: table-cell; line-height: 1.4; }
|
||||
|
||||
body { -moz-osx-font-smoothing: grayscale; -webkit-font-smoothing: antialiased; tab-size: 4; }
|
||||
|
||||
h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { line-height: 1.4; }
|
||||
|
||||
.clearfix:before, .clearfix:after, .float-group:before, .float-group:after { content: " "; display: table; }
|
||||
.clearfix:after, .float-group:after { clear: both; }
|
||||
|
||||
*:not(pre) > code { font-size: inherit; font-style: normal !important; letter-spacing: 0; padding: 0; line-height: inherit; word-wrap: break-word; }
|
||||
*:not(pre) > code.nobreak { word-wrap: normal; }
|
||||
*:not(pre) > code.nowrap { white-space: nowrap; }
|
||||
|
||||
pre, pre > code { line-height: 1.4; color: black; font-family: monospace, serif; font-weight: normal; }
|
||||
|
||||
em em { font-style: normal; }
|
||||
|
||||
strong strong { font-weight: normal; }
|
||||
|
||||
.keyseq { color: #555555; }
|
||||
|
||||
kbd { font-family: Consolas, "Liberation Mono", Courier, monospace; display: inline-block; color: #222222; font-size: 0.65em; line-height: 1.45; background-color: #f7f7f7; border: 1px solid #ccc; -webkit-border-radius: 3px; border-radius: 3px; -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 0 0 0.1em white inset; box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 0 0 0.1em white inset; margin: 0 0.15em; padding: 0.2em 0.5em; vertical-align: middle; position: relative; top: -0.1em; white-space: nowrap; }
|
||||
|
||||
.keyseq kbd:first-child { margin-left: 0; }
|
||||
|
||||
.keyseq kbd:last-child { margin-right: 0; }
|
||||
|
||||
.menuseq, .menu { color: #090909; }
|
||||
|
||||
b.button:before, b.button:after { position: relative; top: -1px; font-weight: normal; }
|
||||
|
||||
b.button:before { content: "["; padding: 0 3px 0 2px; }
|
||||
|
||||
b.button:after { content: "]"; padding: 0 2px 0 3px; }
|
||||
|
||||
#header, #content, #footnotes, #footer { width: 100%; margin-left: auto; margin-right: auto; margin-top: 0; margin-bottom: 0; max-width: 62.5em; *zoom: 1; position: relative; padding-left: 0.9375em; padding-right: 0.9375em; }
|
||||
#header:before, #header:after, #content:before, #content:after, #footnotes:before, #footnotes:after, #footer:before, #footer:after { content: " "; display: table; }
|
||||
#header:after, #content:after, #footnotes:after, #footer:after { clear: both; }
|
||||
|
||||
#content { margin-top: 1.25em; }
|
||||
|
||||
#content:before { content: none; }
|
||||
|
||||
#header > h1:first-child { color: black; margin-top: 2.25rem; margin-bottom: 0; }
|
||||
#header > h1:first-child + #toc { margin-top: 8px; border-top: 1px solid #dddddd; }
|
||||
#header > h1:only-child, body.toc2 #header > h1:nth-last-child(2) { border-bottom: 1px solid #dddddd; padding-bottom: 8px; }
|
||||
#header .details { border-bottom: 1px solid #dddddd; line-height: 1.45; padding-top: 0.25em; padding-bottom: 0.25em; padding-left: 0.25em; color: #555555; display: -ms-flexbox; display: -webkit-flex; display: flex; -ms-flex-flow: row wrap; -webkit-flex-flow: row wrap; flex-flow: row wrap; }
|
||||
#header .details span:first-child { margin-left: -0.125em; }
|
||||
#header .details span.email a { color: #6f6f6f; }
|
||||
#header .details br { display: none; }
|
||||
#header .details br + span:before { content: "\00a0\2013\00a0"; }
|
||||
#header .details br + span.author:before { content: "\00a0\22c5\00a0"; color: #6f6f6f; }
|
||||
#header .details br + span#revremark:before { content: "\00a0|\00a0"; }
|
||||
#header #revnumber { text-transform: capitalize; }
|
||||
#header #revnumber:after { content: "\00a0"; }
|
||||
|
||||
#content > h1:first-child:not([class]) { color: black; border-bottom: 1px solid #dddddd; padding-bottom: 8px; margin-top: 0; padding-top: 1rem; margin-bottom: 1.25rem; }
|
||||
|
||||
#toc { border-bottom: 1px solid #dddddd; padding-bottom: 0.5em; }
|
||||
#toc > ul { margin-left: 0.125em; }
|
||||
#toc ul.sectlevel0 > li > a { font-style: italic; }
|
||||
#toc ul.sectlevel0 ul.sectlevel1 { margin: 0.5em 0; }
|
||||
#toc ul { font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; list-style-type: none; }
|
||||
#toc li { line-height: 1.3334; margin-top: 0.3334em; }
|
||||
#toc a { text-decoration: none; }
|
||||
#toc a:active { text-decoration: underline; }
|
||||
|
||||
#toctitle { color: #6f6f6f; font-size: 1.2em; }
|
||||
|
||||
@media only screen and (min-width: 768px) { #toctitle { font-size: 1.375em; }
|
||||
body.toc2 { padding-left: 15em; padding-right: 0; }
|
||||
#toc.toc2 { margin-top: 0 !important; background-color: #f2f2f2; position: fixed; width: 15em; left: 0; top: 0; border-right: 1px solid #dddddd; border-top-width: 0 !important; border-bottom-width: 0 !important; z-index: 1000; padding: 1.25em 1em; height: 100%; overflow: auto; }
|
||||
#toc.toc2 #toctitle { margin-top: 0; margin-bottom: 0.8rem; font-size: 1.2em; }
|
||||
#toc.toc2 > ul { font-size: 0.9em; margin-bottom: 0; }
|
||||
#toc.toc2 ul ul { margin-left: 0; padding-left: 1em; }
|
||||
#toc.toc2 ul.sectlevel0 ul.sectlevel1 { padding-left: 0; margin-top: 0.5em; margin-bottom: 0.5em; }
|
||||
body.toc2.toc-right { padding-left: 0; padding-right: 15em; }
|
||||
body.toc2.toc-right #toc.toc2 { border-right-width: 0; border-left: 1px solid #dddddd; left: auto; right: 0; } }
|
||||
@media only screen and (min-width: 1280px) { body.toc2 { padding-left: 20em; padding-right: 0; }
|
||||
#toc.toc2 { width: 20em; }
|
||||
#toc.toc2 #toctitle { font-size: 1.375em; }
|
||||
#toc.toc2 > ul { font-size: 0.95em; }
|
||||
#toc.toc2 ul ul { padding-left: 1.25em; }
|
||||
body.toc2.toc-right { padding-left: 0; padding-right: 20em; } }
|
||||
#content #toc { border-style: solid; border-width: 1px; border-color: #d9d9d9; margin-bottom: 1.25em; padding: 1.25em; background: #f2f2f2; -webkit-border-radius: 0; border-radius: 0; }
|
||||
#content #toc > :first-child { margin-top: 0; }
|
||||
#content #toc > :last-child { margin-bottom: 0; }
|
||||
|
||||
#footer { max-width: 100%; background-color: #222222; padding: 1.25em; }
|
||||
|
||||
#footer-text { color: #dddddd; line-height: 1.44; }
|
||||
|
||||
.sect1 { padding-bottom: 0.625em; }
|
||||
|
||||
@media only screen and (min-width: 768px) { .sect1 { padding-bottom: 1.25em; } }
|
||||
.sect1 + .sect1 { border-top: 1px solid #dddddd; }
|
||||
|
||||
#content h1 > a.anchor, h2 > a.anchor, h3 > a.anchor, #toctitle > a.anchor, .sidebarblock > .content > .title > a.anchor, h4 > a.anchor, h5 > a.anchor, h6 > a.anchor { position: absolute; z-index: 1001; width: 1.5ex; margin-left: -1.5ex; display: block; text-decoration: none !important; visibility: hidden; text-align: center; font-weight: normal; }
|
||||
#content h1 > a.anchor:before, h2 > a.anchor:before, h3 > a.anchor:before, #toctitle > a.anchor:before, .sidebarblock > .content > .title > a.anchor:before, h4 > a.anchor:before, h5 > a.anchor:before, h6 > a.anchor:before { content: "\00A7"; font-size: 0.85em; display: block; padding-top: 0.1em; }
|
||||
#content h1:hover > a.anchor, #content h1 > a.anchor:hover, h2:hover > a.anchor, h2 > a.anchor:hover, h3:hover > a.anchor, #toctitle:hover > a.anchor, .sidebarblock > .content > .title:hover > a.anchor, h3 > a.anchor:hover, #toctitle > a.anchor:hover, .sidebarblock > .content > .title > a.anchor:hover, h4:hover > a.anchor, h4 > a.anchor:hover, h5:hover > a.anchor, h5 > a.anchor:hover, h6:hover > a.anchor, h6 > a.anchor:hover { visibility: visible; }
|
||||
#content h1 > a.link, h2 > a.link, h3 > a.link, #toctitle > a.link, .sidebarblock > .content > .title > a.link, h4 > a.link, h5 > a.link, h6 > a.link { color: #222222; text-decoration: none; }
|
||||
#content h1 > a.link:hover, h2 > a.link:hover, h3 > a.link:hover, #toctitle > a.link:hover, .sidebarblock > .content > .title > a.link:hover, h4 > a.link:hover, h5 > a.link:hover, h6 > a.link:hover { color: #151515; }
|
||||
|
||||
.audioblock, .imageblock, .literalblock, .listingblock, .stemblock, .videoblock { margin-bottom: 1.25em; }
|
||||
|
||||
.admonitionblock td.content > .title, .audioblock > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .stemblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, table.tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { text-rendering: optimizeLegibility; text-align: left; }
|
||||
|
||||
table.tableblock > caption.title { white-space: nowrap; overflow: visible; max-width: 0; }
|
||||
|
||||
.paragraph.lead > p, #preamble > .sectionbody > .paragraph:first-of-type p { color: black; }
|
||||
|
||||
table.tableblock #preamble > .sectionbody > .paragraph:first-of-type p { font-size: inherit; }
|
||||
|
||||
.admonitionblock > table { border-collapse: separate; border: 0; background: none; width: 100%; }
|
||||
.admonitionblock > table td.icon { text-align: center; width: 80px; }
|
||||
.admonitionblock > table td.icon img { max-width: initial; }
|
||||
.admonitionblock > table td.icon .title { font-weight: bold; font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; text-transform: uppercase; }
|
||||
.admonitionblock > table td.content { padding-left: 1.125em; padding-right: 1.25em; border-left: 1px solid #dddddd; color: #555555; }
|
||||
.admonitionblock > table td.content > :last-child > :last-child { margin-bottom: 0; }
|
||||
|
||||
.exampleblock > .content { border-style: solid; border-width: 1px; border-color: #e6e6e6; margin-bottom: 1.25em; padding: 1.25em; background: white; -webkit-border-radius: 0; border-radius: 0; }
|
||||
.exampleblock > .content > :first-child { margin-top: 0; }
|
||||
.exampleblock > .content > :last-child { margin-bottom: 0; }
|
||||
|
||||
.sidebarblock { border-style: solid; border-width: 1px; border-color: #d9d9d9; margin-bottom: 1.25em; padding: 1.25em; background: #f2f2f2; -webkit-border-radius: 0; border-radius: 0; }
|
||||
.sidebarblock > :first-child { margin-top: 0; }
|
||||
.sidebarblock > :last-child { margin-bottom: 0; }
|
||||
.sidebarblock > .content > .title { color: #6f6f6f; margin-top: 0; }
|
||||
|
||||
.exampleblock > .content > :last-child > :last-child, .exampleblock > .content .olist > ol > li:last-child > :last-child, .exampleblock > .content .ulist > ul > li:last-child > :last-child, .exampleblock > .content .qlist > ol > li:last-child > :last-child, .sidebarblock > .content > :last-child > :last-child, .sidebarblock > .content .olist > ol > li:last-child > :last-child, .sidebarblock > .content .ulist > ul > li:last-child > :last-child, .sidebarblock > .content .qlist > ol > li:last-child > :last-child { margin-bottom: 0; }
|
||||
|
||||
.literalblock pre, .listingblock pre:not(.highlight), .listingblock pre[class="highlight"], .listingblock pre[class^="highlight "], .listingblock pre.CodeRay, .listingblock pre.prettyprint { background: #eeeeee; }
|
||||
.sidebarblock .literalblock pre, .sidebarblock .listingblock pre:not(.highlight), .sidebarblock .listingblock pre[class="highlight"], .sidebarblock .listingblock pre[class^="highlight "], .sidebarblock .listingblock pre.CodeRay, .sidebarblock .listingblock pre.prettyprint { background: #f2f1f1; }
|
||||
|
||||
.literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { border: 1px solid #cccccc; -webkit-border-radius: 0; border-radius: 0; word-wrap: break-word; padding: 0.8em 0.8em 0.65em 0.8em; font-size: 0.8125em; }
|
||||
.literalblock pre.nowrap, .literalblock pre[class].nowrap, .listingblock pre.nowrap, .listingblock pre[class].nowrap { overflow-x: auto; white-space: pre; word-wrap: normal; }
|
||||
@media only screen and (min-width: 768px) { .literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { font-size: 0.90625em; } }
|
||||
@media only screen and (min-width: 1280px) { .literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { font-size: 1em; } }
|
||||
|
||||
.literalblock.output pre { color: #eeeeee; background-color: black; }
|
||||
|
||||
.listingblock pre.highlightjs { padding: 0; }
|
||||
.listingblock pre.highlightjs > code { padding: 0.8em 0.8em 0.65em 0.8em; -webkit-border-radius: 0; border-radius: 0; }
|
||||
|
||||
.listingblock > .content { position: relative; }
|
||||
|
||||
.listingblock code[data-lang]:before { display: none; content: attr(data-lang); position: absolute; font-size: 0.75em; top: 0.425rem; right: 0.5rem; line-height: 1; text-transform: uppercase; color: #999; }
|
||||
|
||||
.listingblock:hover code[data-lang]:before { display: block; }
|
||||
|
||||
.listingblock.terminal pre .command:before { content: attr(data-prompt); padding-right: 0.5em; color: #999; }
|
||||
|
||||
.listingblock.terminal pre .command:not([data-prompt]):before { content: "$"; }
|
||||
|
||||
table.pyhltable { border-collapse: separate; border: 0; margin-bottom: 0; background: none; }
|
||||
|
||||
table.pyhltable td { vertical-align: top; padding-top: 0; padding-bottom: 0; line-height: 1.4; }
|
||||
|
||||
table.pyhltable td.code { padding-left: .75em; padding-right: 0; }
|
||||
|
||||
pre.pygments .lineno, table.pyhltable td:not(.code) { color: #999; padding-left: 0; padding-right: .5em; border-right: 1px solid #dddddd; }
|
||||
|
||||
pre.pygments .lineno { display: inline-block; margin-right: .25em; }
|
||||
|
||||
table.pyhltable .linenodiv { background: none !important; padding-right: 0 !important; }
|
||||
|
||||
.quoteblock { margin: 0 1em 1.25em 1.5em; display: table; }
|
||||
.quoteblock > .title { margin-left: -1.5em; margin-bottom: 0.75em; }
|
||||
.quoteblock blockquote, .quoteblock blockquote p { color: #6f6f6f; font-size: 1.15rem; line-height: 1.75; word-spacing: 0.1em; letter-spacing: 0; font-style: italic; text-align: justify; }
|
||||
.quoteblock blockquote { margin: 0; padding: 0; border: 0; }
|
||||
.quoteblock blockquote:before { content: "\201c"; float: left; font-size: 2.75em; font-weight: bold; line-height: 0.6em; margin-left: -0.6em; color: #6f6f6f; text-shadow: 0 1px 2px rgba(0, 0, 0, 0.1); }
|
||||
.quoteblock blockquote > .paragraph:last-child p { margin-bottom: 0; }
|
||||
.quoteblock .attribution { margin-top: 0.5em; margin-right: 0.5ex; text-align: right; }
|
||||
.quoteblock .quoteblock { margin-left: 0; margin-right: 0; padding: 0.5em 0; border-left: 3px solid #555555; }
|
||||
.quoteblock .quoteblock blockquote { padding: 0 0 0 0.75em; }
|
||||
.quoteblock .quoteblock blockquote:before { display: none; }
|
||||
|
||||
.verseblock { margin: 0 1em 1.25em 1em; }
|
||||
.verseblock pre { font-family: "Open Sans", "DejaVu Sans", sans; font-size: 1.15rem; color: #6f6f6f; font-weight: 300; text-rendering: optimizeLegibility; }
|
||||
.verseblock pre strong { font-weight: 400; }
|
||||
.verseblock .attribution { margin-top: 1.25rem; margin-left: 0.5ex; }
|
||||
|
||||
.quoteblock .attribution, .verseblock .attribution { font-size: 0.8125em; line-height: 1.45; font-style: italic; }
|
||||
.quoteblock .attribution br, .verseblock .attribution br { display: none; }
|
||||
.quoteblock .attribution cite, .verseblock .attribution cite { display: block; letter-spacing: -0.025em; color: #555555; }
|
||||
|
||||
.quoteblock.abstract { margin: 0 0 1.25em 0; display: block; }
|
||||
.quoteblock.abstract blockquote, .quoteblock.abstract blockquote p { text-align: left; word-spacing: 0; }
|
||||
.quoteblock.abstract blockquote:before, .quoteblock.abstract blockquote p:first-of-type:before { display: none; }
|
||||
|
||||
table.tableblock { max-width: 100%; border-collapse: separate; }
|
||||
table.tableblock td > .paragraph:last-child p > p:last-child, table.tableblock th > p:last-child, table.tableblock td > p:last-child { margin-bottom: 0; }
|
||||
|
||||
table.tableblock, th.tableblock, td.tableblock { border: 0 solid #dddddd; }
|
||||
|
||||
table.grid-all th.tableblock, table.grid-all td.tableblock { border-width: 0 1px 1px 0; }
|
||||
|
||||
table.grid-all tfoot > tr > th.tableblock, table.grid-all tfoot > tr > td.tableblock { border-width: 1px 1px 0 0; }
|
||||
|
||||
table.grid-cols th.tableblock, table.grid-cols td.tableblock { border-width: 0 1px 0 0; }
|
||||
|
||||
table.grid-all * > tr > .tableblock:last-child, table.grid-cols * > tr > .tableblock:last-child { border-right-width: 0; }
|
||||
|
||||
table.grid-rows th.tableblock, table.grid-rows td.tableblock { border-width: 0 0 1px 0; }
|
||||
|
||||
table.grid-all tbody > tr:last-child > th.tableblock, table.grid-all tbody > tr:last-child > td.tableblock, table.grid-all thead:last-child > tr > th.tableblock, table.grid-rows tbody > tr:last-child > th.tableblock, table.grid-rows tbody > tr:last-child > td.tableblock, table.grid-rows thead:last-child > tr > th.tableblock { border-bottom-width: 0; }
|
||||
|
||||
table.grid-rows tfoot > tr > th.tableblock, table.grid-rows tfoot > tr > td.tableblock { border-width: 1px 0 0 0; }
|
||||
|
||||
table.frame-all { border-width: 1px; }
|
||||
|
||||
table.frame-sides { border-width: 0 1px; }
|
||||
|
||||
table.frame-topbot { border-width: 1px 0; }
|
||||
|
||||
th.halign-left, td.halign-left { text-align: left; }
|
||||
|
||||
th.halign-right, td.halign-right { text-align: right; }
|
||||
|
||||
th.halign-center, td.halign-center { text-align: center; }
|
||||
|
||||
th.valign-top, td.valign-top { vertical-align: top; }
|
||||
|
||||
th.valign-bottom, td.valign-bottom { vertical-align: bottom; }
|
||||
|
||||
th.valign-middle, td.valign-middle { vertical-align: middle; }
|
||||
|
||||
table thead th, table tfoot th { font-weight: bold; }
|
||||
|
||||
tbody tr th { display: table-cell; line-height: 1.4; background: whitesmoke; }
|
||||
|
||||
tbody tr th, tbody tr th p, tfoot tr th, tfoot tr th p { color: #222222; font-weight: bold; }
|
||||
|
||||
p.tableblock > code:only-child { background: none; padding: 0; }
|
||||
|
||||
p.tableblock { font-size: 1em; }
|
||||
|
||||
td > div.verse { white-space: pre; }
|
||||
|
||||
ol { margin-left: 1.75em; }
|
||||
|
||||
ul li ol { margin-left: 1.5em; }
|
||||
|
||||
dl dd { margin-left: 1.125em; }
|
||||
|
||||
dl dd:last-child, dl dd:last-child > :last-child { margin-bottom: 0; }
|
||||
|
||||
ol > li p, ul > li p, ul dd, ol dd, .olist .olist, .ulist .ulist, .ulist .olist, .olist .ulist { margin-bottom: 0.625em; }
|
||||
|
||||
ul.unstyled, ol.unnumbered, ul.checklist, ul.none { list-style-type: none; }
|
||||
|
||||
ul.unstyled, ol.unnumbered, ul.checklist { margin-left: 0.625em; }
|
||||
|
||||
ul.checklist li > p:first-child > .fa-square-o:first-child, ul.checklist li > p:first-child > .fa-check-square-o:first-child { width: 1em; font-size: 0.85em; }
|
||||
|
||||
ul.checklist li > p:first-child > input[type="checkbox"]:first-child { width: 1em; position: relative; top: 1px; }
|
||||
|
||||
ul.inline { margin: 0 auto 0.625em auto; margin-left: -1.375em; margin-right: 0; padding: 0; list-style: none; overflow: hidden; }
|
||||
ul.inline > li { list-style: none; float: left; margin-left: 1.375em; display: block; }
|
||||
ul.inline > li > * { display: block; }
|
||||
|
||||
.unstyled dl dt { font-weight: normal; font-style: normal; }
|
||||
|
||||
ol.arabic { list-style-type: decimal; }
|
||||
|
||||
ol.decimal { list-style-type: decimal-leading-zero; }
|
||||
|
||||
ol.loweralpha { list-style-type: lower-alpha; }
|
||||
|
||||
ol.upperalpha { list-style-type: upper-alpha; }
|
||||
|
||||
ol.lowerroman { list-style-type: lower-roman; }
|
||||
|
||||
ol.upperroman { list-style-type: upper-roman; }
|
||||
|
||||
ol.lowergreek { list-style-type: lower-greek; }
|
||||
|
||||
.hdlist > table, .colist > table { border: 0; background: none; }
|
||||
.hdlist > table > tbody > tr, .colist > table > tbody > tr { background: none; }
|
||||
|
||||
td.hdlist1, td.hdlist2 { vertical-align: top; padding: 0 0.625em; }
|
||||
|
||||
td.hdlist1 { font-weight: bold; padding-bottom: 1.25em; }
|
||||
|
||||
.literalblock + .colist, .listingblock + .colist { margin-top: -0.5em; }
|
||||
|
||||
.colist > table tr > td:first-of-type { padding: 0 0.75em; line-height: 1; }
|
||||
.colist > table tr > td:first-of-type img { max-width: initial; }
|
||||
.colist > table tr > td:last-of-type { padding: 0.25em 0; }
|
||||
|
||||
.thumb, .th { line-height: 0; display: inline-block; border: solid 4px white; -webkit-box-shadow: 0 0 0 1px #dddddd; box-shadow: 0 0 0 1px #dddddd; }
|
||||
|
||||
.imageblock.left, .imageblock[style*="float: left"] { margin: 0.25em 0.625em 1.25em 0; }
|
||||
.imageblock.right, .imageblock[style*="float: right"] { margin: 0.25em 0 1.25em 0.625em; }
|
||||
.imageblock > .title { margin-bottom: 0; }
|
||||
.imageblock.thumb, .imageblock.th { border-width: 6px; }
|
||||
.imageblock.thumb > .title, .imageblock.th > .title { padding: 0 0.125em; }
|
||||
|
||||
.image.left, .image.right { margin-top: 0.25em; margin-bottom: 0.25em; display: inline-block; line-height: 0; }
|
||||
.image.left { margin-right: 0.625em; }
|
||||
.image.right { margin-left: 0.625em; }
|
||||
|
||||
a.image { text-decoration: none; display: inline-block; }
|
||||
a.image object { pointer-events: none; }
|
||||
|
||||
sup.footnote, sup.footnoteref { font-size: 0.875em; position: static; vertical-align: super; }
|
||||
sup.footnote a, sup.footnoteref a { text-decoration: none; }
|
||||
sup.footnote a:active, sup.footnoteref a:active { text-decoration: underline; }
|
||||
|
||||
#footnotes { padding-top: 0.75em; padding-bottom: 0.75em; margin-bottom: 0.625em; }
|
||||
#footnotes hr { width: 20%; min-width: 6.25em; margin: -0.25em 0 0.75em 0; border-width: 1px 0 0 0; }
|
||||
#footnotes .footnote { padding: 0 0.375em 0 0.225em; line-height: 1.3334; font-size: 0.875em; margin-left: 1.2em; text-indent: -1.05em; margin-bottom: 0.2em; }
|
||||
#footnotes .footnote a:first-of-type { font-weight: bold; text-decoration: none; }
|
||||
#footnotes .footnote:last-of-type { margin-bottom: 0; }
|
||||
#content #footnotes { margin-top: -0.625em; margin-bottom: 0; padding: 0.75em 0; }
|
||||
|
||||
.gist .file-data > table { border: 0; background: #fff; width: 100%; margin-bottom: 0; }
|
||||
.gist .file-data > table td.line-data { width: 99%; }
|
||||
|
||||
div.unbreakable { page-break-inside: avoid; }
|
||||
|
||||
.big { font-size: larger; }
|
||||
|
||||
.small { font-size: smaller; }
|
||||
|
||||
.underline { text-decoration: underline; }
|
||||
|
||||
.overline { text-decoration: overline; }
|
||||
|
||||
.line-through { text-decoration: line-through; }
|
||||
|
||||
.aqua { color: #00bfbf; }
|
||||
|
||||
.aqua-background { background-color: #00fafa; }
|
||||
|
||||
.black { color: black; }
|
||||
|
||||
.black-background { background-color: black; }
|
||||
|
||||
.blue { color: #0000bf; }
|
||||
|
||||
.blue-background { background-color: #0000fa; }
|
||||
|
||||
.fuchsia { color: #bf00bf; }
|
||||
|
||||
.fuchsia-background { background-color: #fa00fa; }
|
||||
|
||||
.gray { color: #606060; }
|
||||
|
||||
.gray-background { background-color: #7d7d7d; }
|
||||
|
||||
.green { color: #006000; }
|
||||
|
||||
.green-background { background-color: #007d00; }
|
||||
|
||||
.lime { color: #00bf00; }
|
||||
|
||||
.lime-background { background-color: #00fa00; }
|
||||
|
||||
.maroon { color: #600000; }
|
||||
|
||||
.maroon-background { background-color: #7d0000; }
|
||||
|
||||
.navy { color: #000060; }
|
||||
|
||||
.navy-background { background-color: #00007d; }
|
||||
|
||||
.olive { color: #606000; }
|
||||
|
||||
.olive-background { background-color: #7d7d00; }
|
||||
|
||||
.purple { color: #600060; }
|
||||
|
||||
.purple-background { background-color: #7d007d; }
|
||||
|
||||
.red { color: #bf0000; }
|
||||
|
||||
.red-background { background-color: #fa0000; }
|
||||
|
||||
.silver { color: #909090; }
|
||||
|
||||
.silver-background { background-color: #bcbcbc; }
|
||||
|
||||
.teal { color: #006060; }
|
||||
|
||||
.teal-background { background-color: #007d7d; }
|
||||
|
||||
.white { color: #bfbfbf; }
|
||||
|
||||
.white-background { background-color: #fafafa; }
|
||||
|
||||
.yellow { color: #bfbf00; }
|
||||
|
||||
.yellow-background { background-color: #fafa00; }
|
||||
|
||||
span.icon > .fa { cursor: default; }
|
||||
|
||||
.admonitionblock td.icon [class^="fa icon-"] { font-size: 2.5em; text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.5); cursor: default; }
|
||||
.admonitionblock td.icon .icon-note:before { content: "\f05a"; color: #207c98; }
|
||||
.admonitionblock td.icon .icon-tip:before { content: "\f0eb"; text-shadow: 1px 1px 2px rgba(155, 155, 0, 0.8); color: #111; }
|
||||
.admonitionblock td.icon .icon-warning:before { content: "\f071"; color: #bf6900; }
|
||||
.admonitionblock td.icon .icon-caution:before { content: "\f06d"; color: #bf3400; }
|
||||
.admonitionblock td.icon .icon-important:before { content: "\f06a"; color: #bf0000; }
|
||||
|
||||
.conum[data-value] { display: inline-block; color: #fff !important; background-color: #222222; -webkit-border-radius: 100px; border-radius: 100px; text-align: center; font-size: 0.75em; width: 1.67em; height: 1.67em; line-height: 1.67em; font-family: "Open Sans", "DejaVu Sans", sans-serif; font-style: normal; font-weight: bold; }
|
||||
.conum[data-value] * { color: #fff !important; }
|
||||
.conum[data-value] + b { display: none; }
|
||||
.conum[data-value]:after { content: attr(data-value); }
|
||||
pre .conum[data-value] { position: relative; top: -0.125em; }
|
||||
|
||||
b.conum * { color: inherit !important; }
|
||||
|
||||
.conum:not([data-value]):empty { display: none; }
|
||||
|
||||
.literalblock pre, .listingblock pre { background: #eeeeee; }
|
|
@ -1,4 +0,0 @@
|
|||
= Elasticsearch Java client
|
||||
Jörg Prante
|
||||
Version 5.4.0.0
|
||||
|
File diff suppressed because it is too large
Load diff
131
build.gradle
131
build.gradle
|
@ -1,27 +1,13 @@
|
|||
import java.time.ZonedDateTime
|
||||
import java.time.format.DateTimeFormatter
|
||||
|
||||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
maven {
|
||||
url 'http://xbib.org/repository'
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.xbib.elasticsearch:gradle-plugin-elasticsearch-build:6.3.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
plugins {
|
||||
id "org.sonarqube" version "2.6.1"
|
||||
id "io.codearte.nexus-staging" version "0.11.0"
|
||||
id "com.github.spotbugs" version "1.6.9"
|
||||
id "org.xbib.gradle.plugin.asciidoctor" version "1.6.0.1"
|
||||
}
|
||||
|
||||
printf "Date: %s\nHost: %s\nOS: %s %s %s\nJava: %s %s %s %s\nGradle: %s Groovy: %s Java: %s\n" +
|
||||
printf "Host: %s\nOS: %s %s %s\nJava: %s %s %s %s\nGradle: %s Groovy: %s Java: %s\n" +
|
||||
"Build: group: ${project.group} name: ${project.name} version: ${project.version}\n",
|
||||
ZonedDateTime.now().format(DateTimeFormatter.ISO_DATE_TIME),
|
||||
InetAddress.getLocalHost(),
|
||||
System.getProperty("os.name"),
|
||||
System.getProperty("os.arch"),
|
||||
|
@ -33,31 +19,28 @@ printf "Date: %s\nHost: %s\nOS: %s %s %s\nJava: %s %s %s %s\nGradle: %s Groovy:
|
|||
gradle.gradleVersion, GroovySystem.getVersion(), JavaVersion.current()
|
||||
|
||||
|
||||
apply plugin: "io.codearte.nexus-staging"
|
||||
apply plugin: 'org.xbib.gradle.plugin.asciidoctor'
|
||||
|
||||
ext {
|
||||
user = 'jprante'
|
||||
name = 'elx'
|
||||
description = 'Elasticsearch extensions'
|
||||
scmUrl = 'https://github.com/' + user + '/' + name
|
||||
scmConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git'
|
||||
scmDeveloperConnection = 'scm:git:git://github.com/' + user + '/' + name + '.git'
|
||||
if (JavaVersion.current() < JavaVersion.VERSION_11) {
|
||||
throw new GradleException("This build must be run with java 11 or higher")
|
||||
}
|
||||
|
||||
subprojects {
|
||||
apply plugin: 'java'
|
||||
apply plugin: 'maven'
|
||||
apply plugin: 'signing'
|
||||
apply plugin: 'com.github.spotbugs'
|
||||
apply plugin: 'pmd'
|
||||
apply plugin: 'checkstyle'
|
||||
apply plugin: 'org.xbib.gradle.plugin.asciidoctor'
|
||||
|
||||
configurations {
|
||||
wagon
|
||||
alpnagent
|
||||
asciidoclet
|
||||
wagon
|
||||
}
|
||||
|
||||
dependencies {
|
||||
alpnagent "org.mortbay.jetty.alpn:jetty-alpn-agent:${project.property('alpnagent.version')}"
|
||||
testCompile "junit:junit:${project.property('junit.version')}"
|
||||
testCompile "org.apache.logging.log4j:log4j-core:${project.property('log4j.version')}"
|
||||
testCompile "org.apache.logging.log4j:log4j-slf4j-impl:${project.property('log4j.version')}"
|
||||
asciidoclet "org.xbib:asciidoclet:${project.property('asciidoclet.version')}"
|
||||
wagon "org.apache.maven.wagon:wagon-ssh:${project.property('wagon.version')}"
|
||||
}
|
||||
|
@ -71,10 +54,32 @@ subprojects {
|
|||
targetCompatibility = JavaVersion.VERSION_11
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-${project.name}"
|
||||
tasks.withType(JavaCompile) {
|
||||
options.compilerArgs << "-Xlint:all"
|
||||
if (!options.compilerArgs.contains("-processor")) {
|
||||
options.compilerArgs << '-proc:none'
|
||||
}
|
||||
}
|
||||
|
||||
test {
|
||||
jvmArgs =[
|
||||
'--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED',
|
||||
'--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED',
|
||||
'--add-opens=java.base/java.nio=ALL-UNNAMED'
|
||||
]
|
||||
systemProperty 'jna.debug_load', 'true'
|
||||
testLogging {
|
||||
showStandardStreams = true
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
||||
|
||||
clean {
|
||||
delete "data"
|
||||
delete "logs"
|
||||
delete "out"
|
||||
}
|
||||
|
||||
/*javadoc {
|
||||
options.docletpath = configurations.asciidoclet.files.asType(List)
|
||||
options.doclet = 'org.xbib.asciidoclet.Asciidoclet'
|
||||
|
@ -105,16 +110,76 @@ subprojects {
|
|||
archives javadocJar, sourcesJar
|
||||
}*/
|
||||
|
||||
task javadocJar(type: Jar, dependsOn: javadoc) {
|
||||
classifier 'javadoc'
|
||||
}
|
||||
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
from sourceSets.main.allSource
|
||||
classifier 'sources'
|
||||
}
|
||||
|
||||
artifacts {
|
||||
archives javadocJar, sourcesJar
|
||||
}
|
||||
|
||||
if (project.hasProperty('signing.keyId')) {
|
||||
signing {
|
||||
sign configurations.archives
|
||||
}
|
||||
}
|
||||
|
||||
apply from: "${rootProject.projectDir}/gradle/ext.gradle"
|
||||
apply from: "${rootProject.projectDir}/gradle/publish.gradle"
|
||||
//apply from: "${rootProject.projectDir}/gradle/sonarqube.gradle"
|
||||
|
||||
spotbugs {
|
||||
effort = "max"
|
||||
reportLevel = "low"
|
||||
//includeFilter = file("findbugs-exclude.xml")
|
||||
}
|
||||
|
||||
tasks.withType(com.github.spotbugs.SpotBugsTask) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = false
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType(Pmd) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = true
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
tasks.withType(Checkstyle) {
|
||||
ignoreFailures = true
|
||||
reports {
|
||||
xml.enabled = true
|
||||
html.enabled = true
|
||||
}
|
||||
}
|
||||
|
||||
pmd {
|
||||
toolVersion = '6.11.0'
|
||||
ruleSets = ['category/java/bestpractices.xml']
|
||||
}
|
||||
|
||||
checkstyle {
|
||||
configFile = rootProject.file('config/checkstyle/checkstyle.xml')
|
||||
ignoreFailures = true
|
||||
showViolations = true
|
||||
}
|
||||
|
||||
sonarqube {
|
||||
properties {
|
||||
property "sonar.projectName", "${project.group} ${project.name}"
|
||||
property "sonar.sourceEncoding", "UTF-8"
|
||||
property "sonar.tests", "src/test/java"
|
||||
property "sonar.scm.provider", "git"
|
||||
property "sonar.junit.reportsPath", "build/test-results/test/"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*asciidoctor {
|
||||
|
|
|
@ -1,70 +0,0 @@
|
|||
buildscript {
|
||||
repositories {
|
||||
jcenter()
|
||||
maven {
|
||||
url 'http://xbib.org/repository'
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.xbib.elasticsearch:gradle-plugin-elasticsearch-build:6.3.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
apply plugin: 'org.xbib.gradle.plugin.elasticsearch.build'
|
||||
|
||||
configurations {
|
||||
main
|
||||
tests
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compile project(':api')
|
||||
compile "org.xbib:metrics:${project.property('xbib-metrics.version')}"
|
||||
compileOnly "org.apache.logging.log4j:log4j-api:${project.property('log4j.version')}"
|
||||
testCompile "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
testRuntime "org.xbib.elasticsearch:elasticsearch-test-framework:${project.property('elasticsearch-devkit.version')}"
|
||||
}
|
||||
|
||||
jar {
|
||||
baseName "${rootProject.name}-common"
|
||||
}
|
||||
|
||||
/*
|
||||
task testJar(type: Jar, dependsOn: testClasses) {
|
||||
baseName = "${project.archivesBaseName}-tests"
|
||||
from sourceSets.test.output
|
||||
}
|
||||
*/
|
||||
|
||||
artifacts {
|
||||
main jar
|
||||
tests testJar
|
||||
archives sourcesJar, javadocJar
|
||||
}
|
||||
|
||||
test {
|
||||
enabled = false
|
||||
//jvmArgs "-javaagent:" + configurations.alpnagent.asPath
|
||||
systemProperty 'path.home', project.buildDir.absolutePath
|
||||
testLogging {
|
||||
showStandardStreams = true
|
||||
exceptionFormat = 'full'
|
||||
}
|
||||
}
|
||||
|
||||
randomizedTest {
|
||||
enabled = false
|
||||
}
|
||||
|
||||
esTest {
|
||||
// test with the jars, not the classes, for security manager
|
||||
// classpath = files(configurations.testRuntime) + configurations.main.artifacts.files + configurations.tests.artifacts.files
|
||||
systemProperty 'tests.security.manager', 'true'
|
||||
}
|
||||
esTest.dependsOn jar, testJar
|
||||
|
||||
dependencyLicenses.enabled = false
|
||||
|
||||
// we not like to examine Netty
|
||||
thirdPartyAudit.enabled = false
|
||||
|
|
@ -1,321 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC
|
||||
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
|
||||
<!-- This is a checkstyle configuration file. For descriptions of
|
||||
what the following rules do, please see the checkstyle configuration
|
||||
page at http://checkstyle.sourceforge.net/config.html -->
|
||||
|
||||
<module name="Checker">
|
||||
|
||||
<module name="FileTabCharacter">
|
||||
<!-- Checks that there are no tab characters in the file.
|
||||
-->
|
||||
</module>
|
||||
|
||||
<module name="NewlineAtEndOfFile">
|
||||
<property name="lineSeparator" value="lf"/>
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that FIXME is not used in comments. TODO is preferred.
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))FIXME" />
|
||||
<property name="message" value='TODO is preferred to FIXME. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="RegexpSingleline">
|
||||
<!-- Checks that TODOs are named. (Actually, just that they are followed
|
||||
by an open paren.)
|
||||
-->
|
||||
<property name="format" value="((//.*)|(\*.*))TODO[^(]" />
|
||||
<property name="message" value='All TODOs should be named. e.g. "TODO(johndoe): Refactor when v2 is released."' />
|
||||
</module>
|
||||
|
||||
<module name="JavadocPackage">
|
||||
<!-- Checks that each Java package has a Javadoc file used for commenting.
|
||||
Only allows a package-info.java, not package.html. -->
|
||||
</module>
|
||||
|
||||
<!-- All Java AST specific tests live under TreeWalker module. -->
|
||||
<module name="TreeWalker">
|
||||
|
||||
<!--
|
||||
IMPORT CHECKS
|
||||
-->
|
||||
|
||||
<module name="RedundantImport">
|
||||
<!-- Checks for redundant import statements. -->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ImportOrder">
|
||||
<!-- Checks for out of order import statements. -->
|
||||
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="groups" value="com,junit,net,org,java,javax"/>
|
||||
<!-- This ensures that static imports go first. -->
|
||||
<property name="option" value="top"/>
|
||||
<property name="tokens" value="STATIC_IMPORT, IMPORT"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
JAVADOC CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Checks for Javadoc comments. -->
|
||||
<!-- See http://checkstyle.sf.net/config_javadoc.html -->
|
||||
<module name="JavadocMethod">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="allowMissingJavadoc" value="true"/>
|
||||
<property name="allowMissingParamTags" value="true"/>
|
||||
<property name="allowMissingReturnTag" value="true"/>
|
||||
<property name="allowMissingThrowsTags" value="true"/>
|
||||
<property name="allowThrowsTagsForSubclasses" value="true"/>
|
||||
<property name="allowUndeclaredRTE" value="true"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocType">
|
||||
<property name="scope" value="protected"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="JavadocStyle">
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!--
|
||||
|
||||
NAMING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<!-- Item 38 - Adhere to generally accepted naming conventions -->
|
||||
|
||||
<module name="PackageName">
|
||||
<!-- Validates identifiers for package names against the
|
||||
supplied expression. -->
|
||||
<!-- Here the default checkstyle rule restricts package name parts to
|
||||
seven characters, this is not in line with common practice at Google.
|
||||
-->
|
||||
<property name="format" value="^[a-z]+(\.[a-z][a-z0-9]{1,})*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="TypeNameCheck">
|
||||
<!-- Validates static, final fields against the
|
||||
expression "^[A-Z][a-zA-Z0-9]*$". -->
|
||||
<metadata name="altname" value="TypeName"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ConstantNameCheck">
|
||||
<!-- Validates non-private, static, final fields against the supplied
|
||||
public/package final fields "^[A-Z][A-Z0-9]*(_[A-Z0-9]+)*$". -->
|
||||
<metadata name="altname" value="ConstantName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="false"/>
|
||||
<property name="format" value="^([A-Z][A-Z0-9]*(_[A-Z0-9]+)*|FLAG_.*)$"/>
|
||||
<message key="name.invalidPattern"
|
||||
value="Variable ''{0}'' should be in ALL_CAPS (if it is a constant) or be private (otherwise)."/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="StaticVariableNameCheck">
|
||||
<!-- Validates static, non-final fields against the supplied
|
||||
expression "^[a-z][a-zA-Z0-9]*_?$". -->
|
||||
<metadata name="altname" value="StaticVariableName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*_?$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MemberNameCheck">
|
||||
<!-- Validates non-static members against the supplied expression. -->
|
||||
<metadata name="altname" value="MemberName"/>
|
||||
<property name="applyToPublic" value="true"/>
|
||||
<property name="applyToProtected" value="true"/>
|
||||
<property name="applyToPackage" value="true"/>
|
||||
<property name="applyToPrivate" value="true"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="MethodNameCheck">
|
||||
<!-- Validates identifiers for method names. -->
|
||||
<metadata name="altname" value="MethodName"/>
|
||||
<property name="format" value="^[a-z][a-zA-Z0-9]*(_[a-zA-Z0-9]+)*$"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="ParameterName">
|
||||
<!-- Validates identifiers for method parameters against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalFinalVariableName">
|
||||
<!-- Validates identifiers for local final variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="LocalVariableName">
|
||||
<!-- Validates identifiers for local variables against the
|
||||
expression "^[a-z][a-zA-Z0-9]*$". -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
LENGTH and CODING CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="LineLength">
|
||||
<!-- Checks if a line is too long. -->
|
||||
<property name="max" value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.max}" default="128"/>
|
||||
<property name="severity" value="error"/>
|
||||
|
||||
<!--
|
||||
The default ignore pattern exempts the following elements:
|
||||
- import statements
|
||||
- long URLs inside comments
|
||||
-->
|
||||
|
||||
<property name="ignorePattern"
|
||||
value="${com.puppycrawl.tools.checkstyle.checks.sizes.LineLength.ignorePattern}"
|
||||
default="^(package .*;\s*)|(import .*;\s*)|( *(\*|//).*https?://.*)$"/>
|
||||
</module>
|
||||
|
||||
<module name="LeftCurly">
|
||||
<!-- Checks for placement of the left curly brace ('{'). -->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<module name="RightCurly">
|
||||
<!-- Checks right curlies on CATCH, ELSE, and TRY blocks are on
|
||||
the same line. e.g., the following example is fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
} else
|
||||
</pre>
|
||||
-->
|
||||
<!-- This next example is not fine:
|
||||
<pre>
|
||||
if {
|
||||
...
|
||||
}
|
||||
else
|
||||
</pre>
|
||||
-->
|
||||
<property name="option" value="same"/>
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
<!-- Checks for braces around if and else blocks -->
|
||||
<module name="NeedBraces">
|
||||
<property name="severity" value="warning"/>
|
||||
<property name="tokens" value="LITERAL_IF, LITERAL_ELSE, LITERAL_FOR, LITERAL_WHILE, LITERAL_DO"/>
|
||||
</module>
|
||||
|
||||
<module name="UpperEll">
|
||||
<!-- Checks that long constants are defined with an upper ell.-->
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="FallThrough">
|
||||
<!-- Warn about falling through to the next case statement. Similar to
|
||||
javac -Xlint:fallthrough, but the check is suppressed if a single-line comment
|
||||
on the last non-blank line preceding the fallen-into case contains 'fall through' (or
|
||||
some other variants which we don't publicized to promote consistency).
|
||||
-->
|
||||
<property name="reliefPattern"
|
||||
value="fall through|Fall through|fallthru|Fallthru|falls through|Falls through|fallthrough|Fallthrough|No break|NO break|no break|continue on"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
MODIFIERS CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="ModifierOrder">
|
||||
<!-- Warn if modifier order is inconsistent with JLS3 8.1.1, 8.3.1, and
|
||||
8.4.3. The prescribed order is:
|
||||
public, protected, private, abstract, static, final, transient, volatile,
|
||||
synchronized, native, strictfp
|
||||
-->
|
||||
</module>
|
||||
|
||||
|
||||
<!--
|
||||
|
||||
WHITESPACE CHECKS
|
||||
|
||||
-->
|
||||
|
||||
<module name="WhitespaceAround">
|
||||
<!-- Checks that various tokens are surrounded by whitespace.
|
||||
This includes most binary operators and keywords followed
|
||||
by regular or curly braces.
|
||||
-->
|
||||
<property name="tokens" value="ASSIGN, BAND, BAND_ASSIGN, BOR,
|
||||
BOR_ASSIGN, BSR, BSR_ASSIGN, BXOR, BXOR_ASSIGN, COLON, DIV, DIV_ASSIGN,
|
||||
EQUAL, GE, GT, LAND, LE, LITERAL_CATCH, LITERAL_DO, LITERAL_ELSE,
|
||||
LITERAL_FINALLY, LITERAL_FOR, LITERAL_IF, LITERAL_RETURN,
|
||||
LITERAL_SYNCHRONIZED, LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS,
|
||||
MINUS_ASSIGN, MOD, MOD_ASSIGN, NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION,
|
||||
SL, SL_ASSIGN, SR_ASSIGN, STAR, STAR_ASSIGN"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="WhitespaceAfter">
|
||||
<!-- Checks that commas, semicolons and typecasts are followed by
|
||||
whitespace.
|
||||
-->
|
||||
<property name="tokens" value="COMMA, SEMI, TYPECAST"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceAfter">
|
||||
<!-- Checks that there is no whitespace after various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="BNOT, DEC, DOT, INC, LNOT, UNARY_MINUS,
|
||||
UNARY_PLUS"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="NoWhitespaceBefore">
|
||||
<!-- Checks that there is no whitespace before various unary operators.
|
||||
Linebreaks are allowed.
|
||||
-->
|
||||
<property name="tokens" value="SEMI, DOT, POST_DEC, POST_INC"/>
|
||||
<property name="allowLineBreaks" value="true"/>
|
||||
<property name="severity" value="error"/>
|
||||
</module>
|
||||
|
||||
<module name="ParenPad">
|
||||
<!-- Checks that there is no whitespace before close parens or after
|
||||
open parens.
|
||||
-->
|
||||
<property name="severity" value="warning"/>
|
||||
</module>
|
||||
|
||||
</module>
|
||||
</module>
|
||||
|
|
@ -1 +0,0 @@
|
|||
f14124d1557cd7c21742f09cd18913a861125e56
|
|
@ -1 +0,0 @@
|
|||
2bc144784abc748426b125a948b0bdd4fc4dd7d6
|
|
@ -1 +0,0 @@
|
|||
af8cf6c3e7de988bbb7e6e441a2235ba1df8eaf8
|
|
@ -1,202 +0,0 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1 +0,0 @@
|
|||
436454f1e6e821f6f18def7a2e4b467eeb341430
|
|
@ -1 +0,0 @@
|
|||
80ef947c9edfaacb261ee27e2c7fa5968b3eeaa6
|
|
@ -1 +0,0 @@
|
|||
abf31b393745f2a6e133819ee7485420d6bc5160
|
|
@ -1 +0,0 @@
|
|||
68463acec824eb54989fcecbe44074a41ee639e3
|
|
@ -1 +0,0 @@
|
|||
4c232fdaf23b8c7b1ff1ca1ba9b91fcc0fa01938
|
|
@ -1 +0,0 @@
|
|||
7834ee69f91a3360f17a31cf6a27b245a3a2f668
|
|
@ -1 +0,0 @@
|
|||
b10c7f51ab98e6f6f252c931534edbb632cb108e
|
|
@ -1 +0,0 @@
|
|||
824c180dc70fda00b70a146d2f2be9a8f36cfdbb
|
|
@ -1 +0,0 @@
|
|||
68fba4b570c4717cda49a3f187e2bfb909697fc8
|
|
@ -1 +0,0 @@
|
|||
e8949a50a223ab837edc312e34ee597febe86464
|
|
@ -1 +0,0 @@
|
|||
0d4fdb13d5832a0f348e4d855c71201a2b15d560
|
|
@ -1 +0,0 @@
|
|||
600762bf6861fa62b061782debb6fcdeff1f1984
|
|
@ -1 +0,0 @@
|
|||
ad557dffc0777b1b24558d6c57b77b0198dbb58d
|
|
@ -1 +0,0 @@
|
|||
398b725cbaca8c691b74759ae6c3d69b8eeb0574
|
|
@ -1 +0,0 @@
|
|||
3bcc2db64f7b0ebacba552aff319b41962c2df96
|
|
@ -1 +0,0 @@
|
|||
f3873f5ed509b5c169fb7cbaf34b694d8c748926
|
|
@ -1 +0,0 @@
|
|||
b9f9af72dfcd8464c16169670d52c6dc5fe65897
|
|
@ -1 +0,0 @@
|
|||
fe9c516ca4ead60f713eceb398e6f636b83d0a5b
|
|
@ -1 +0,0 @@
|
|||
b1bd19c1f50b6764f104cdcbfa3f01b1b3bb2045
|
|
@ -1 +0,0 @@
|
|||
ce2c501f3c72eb1099467d708b9c134ed0b7bb2a
|
|
@ -1 +0,0 @@
|
|||
62a48b60b17e6d2a823439a5e68f31ef196f11e7
|
|
@ -1 +0,0 @@
|
|||
02855ff60b4cecf9dd15e6e91e3cc0902d2e7eac
|
|
@ -1 +0,0 @@
|
|||
9d28517afc71abe5b7f224944280d5f03ed2f2cc
|
684
common/src/docs/asciidoc/css/foundation.css
vendored
684
common/src/docs/asciidoc/css/foundation.css
vendored
|
@ -1,684 +0,0 @@
|
|||
/*! normalize.css v2.1.2 | MIT License | git.io/normalize */
|
||||
/* ========================================================================== HTML5 display definitions ========================================================================== */
|
||||
/** Correct `block` display not defined in IE 8/9. */
|
||||
article, aside, details, figcaption, figure, footer, header, hgroup, main, nav, section, summary { display: block; }
|
||||
|
||||
/** Correct `inline-block` display not defined in IE 8/9. */
|
||||
audio, canvas, video { display: inline-block; }
|
||||
|
||||
/** Prevent modern browsers from displaying `audio` without controls. Remove excess height in iOS 5 devices. */
|
||||
audio:not([controls]) { display: none; height: 0; }
|
||||
|
||||
/** Address `[hidden]` styling not present in IE 8/9. Hide the `template` element in IE, Safari, and Firefox < 22. */
|
||||
[hidden], template { display: none; }
|
||||
|
||||
script { display: none !important; }
|
||||
|
||||
/* ========================================================================== Base ========================================================================== */
|
||||
/** 1. Set default font family to sans-serif. 2. Prevent iOS text size adjust after orientation change, without disabling user zoom. */
|
||||
html { font-family: sans-serif; /* 1 */ -ms-text-size-adjust: 100%; /* 2 */ -webkit-text-size-adjust: 100%; /* 2 */ }
|
||||
|
||||
/** Remove default margin. */
|
||||
body { margin: 0; }
|
||||
|
||||
/* ========================================================================== Links ========================================================================== */
|
||||
/** Remove the gray background color from active links in IE 10. */
|
||||
a { background: transparent; }
|
||||
|
||||
/** Address `outline` inconsistency between Chrome and other browsers. */
|
||||
a:focus { outline: thin dotted; }
|
||||
|
||||
/** Improve readability when focused and also mouse hovered in all browsers. */
|
||||
a:active, a:hover { outline: 0; }
|
||||
|
||||
/* ========================================================================== Typography ========================================================================== */
|
||||
/** Address variable `h1` font-size and margin within `section` and `article` contexts in Firefox 4+, Safari 5, and Chrome. */
|
||||
h1 { font-size: 2em; margin: 0.67em 0; }
|
||||
|
||||
/** Address styling not present in IE 8/9, Safari 5, and Chrome. */
|
||||
abbr[title] { border-bottom: 1px dotted; }
|
||||
|
||||
/** Address style set to `bolder` in Firefox 4+, Safari 5, and Chrome. */
|
||||
b, strong { font-weight: bold; }
|
||||
|
||||
/** Address styling not present in Safari 5 and Chrome. */
|
||||
dfn { font-style: italic; }
|
||||
|
||||
/** Address differences between Firefox and other browsers. */
|
||||
hr { -moz-box-sizing: content-box; box-sizing: content-box; height: 0; }
|
||||
|
||||
/** Address styling not present in IE 8/9. */
|
||||
mark { background: #ff0; color: #000; }
|
||||
|
||||
/** Correct font family set oddly in Safari 5 and Chrome. */
|
||||
code, kbd, pre, samp { font-family: monospace, serif; font-size: 1em; }
|
||||
|
||||
/** Improve readability of pre-formatted text in all browsers. */
|
||||
pre { white-space: pre-wrap; }
|
||||
|
||||
/** Set consistent quote types. */
|
||||
q { quotes: "\201C" "\201D" "\2018" "\2019"; }
|
||||
|
||||
/** Address inconsistent and variable font size in all browsers. */
|
||||
small { font-size: 80%; }
|
||||
|
||||
/** Prevent `sub` and `sup` affecting `line-height` in all browsers. */
|
||||
sub, sup { font-size: 75%; line-height: 0; position: relative; vertical-align: baseline; }
|
||||
|
||||
sup { top: -0.5em; }
|
||||
|
||||
sub { bottom: -0.25em; }
|
||||
|
||||
/* ========================================================================== Embedded content ========================================================================== */
|
||||
/** Remove border when inside `a` element in IE 8/9. */
|
||||
img { border: 0; }
|
||||
|
||||
/** Correct overflow displayed oddly in IE 9. */
|
||||
svg:not(:root) { overflow: hidden; }
|
||||
|
||||
/* ========================================================================== Figures ========================================================================== */
|
||||
/** Address margin not present in IE 8/9 and Safari 5. */
|
||||
figure { margin: 0; }
|
||||
|
||||
/* ========================================================================== Forms ========================================================================== */
|
||||
/** Define consistent border, margin, and padding. */
|
||||
fieldset { border: 1px solid #c0c0c0; margin: 0 2px; padding: 0.35em 0.625em 0.75em; }
|
||||
|
||||
/** 1. Correct `color` not being inherited in IE 8/9. 2. Remove padding so people aren't caught out if they zero out fieldsets. */
|
||||
legend { border: 0; /* 1 */ padding: 0; /* 2 */ }
|
||||
|
||||
/** 1. Correct font family not being inherited in all browsers. 2. Correct font size not being inherited in all browsers. 3. Address margins set differently in Firefox 4+, Safari 5, and Chrome. */
|
||||
button, input, select, textarea { font-family: inherit; /* 1 */ font-size: 100%; /* 2 */ margin: 0; /* 3 */ }
|
||||
|
||||
/** Address Firefox 4+ setting `line-height` on `input` using `!important` in the UA stylesheet. */
|
||||
button, input { line-height: normal; }
|
||||
|
||||
/** Address inconsistent `text-transform` inheritance for `button` and `select`. All other form control elements do not inherit `text-transform` values. Correct `button` style inheritance in Chrome, Safari 5+, and IE 8+. Correct `select` style inheritance in Firefox 4+ and Opera. */
|
||||
button, select { text-transform: none; }
|
||||
|
||||
/** 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio` and `video` controls. 2. Correct inability to style clickable `input` types in iOS. 3. Improve usability and consistency of cursor style between image-type `input` and others. */
|
||||
button, html input[type="button"], input[type="reset"], input[type="submit"] { -webkit-appearance: button; /* 2 */ cursor: pointer; /* 3 */ }
|
||||
|
||||
/** Re-set default cursor for disabled elements. */
|
||||
button[disabled], html input[disabled] { cursor: default; }
|
||||
|
||||
/** 1. Address box sizing set to `content-box` in IE 8/9. 2. Remove excess padding in IE 8/9. */
|
||||
input[type="checkbox"], input[type="radio"] { box-sizing: border-box; /* 1 */ padding: 0; /* 2 */ }
|
||||
|
||||
/** 1. Address `appearance` set to `searchfield` in Safari 5 and Chrome. 2. Address `box-sizing` set to `border-box` in Safari 5 and Chrome (include `-moz` to future-proof). */
|
||||
input[type="search"] { -webkit-appearance: textfield; /* 1 */ -moz-box-sizing: content-box; -webkit-box-sizing: content-box; /* 2 */ box-sizing: content-box; }
|
||||
|
||||
/** Remove inner padding and search cancel button in Safari 5 and Chrome on OS X. */
|
||||
input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { -webkit-appearance: none; }
|
||||
|
||||
/** Remove inner padding and border in Firefox 4+. */
|
||||
button::-moz-focus-inner, input::-moz-focus-inner { border: 0; padding: 0; }
|
||||
|
||||
/** 1. Remove default vertical scrollbar in IE 8/9. 2. Improve readability and alignment in all browsers. */
|
||||
textarea { overflow: auto; /* 1 */ vertical-align: top; /* 2 */ }
|
||||
|
||||
/* ========================================================================== Tables ========================================================================== */
|
||||
/** Remove most spacing between table cells. */
|
||||
table { border-collapse: collapse; border-spacing: 0; }
|
||||
|
||||
meta.foundation-mq-small { font-family: "only screen and (min-width: 768px)"; width: 768px; }
|
||||
|
||||
meta.foundation-mq-medium { font-family: "only screen and (min-width:1280px)"; width: 1280px; }
|
||||
|
||||
meta.foundation-mq-large { font-family: "only screen and (min-width:1440px)"; width: 1440px; }
|
||||
|
||||
*, *:before, *:after { -moz-box-sizing: border-box; -webkit-box-sizing: border-box; box-sizing: border-box; }
|
||||
|
||||
html, body { font-size: 100%; }
|
||||
|
||||
body { background: white; color: #222222; padding: 0; margin: 0; font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; font-weight: normal; font-style: normal; line-height: 1; position: relative; cursor: auto; }
|
||||
|
||||
a:hover { cursor: pointer; }
|
||||
|
||||
img, object, embed { max-width: 100%; height: auto; }
|
||||
|
||||
object, embed { height: 100%; }
|
||||
|
||||
img { -ms-interpolation-mode: bicubic; }
|
||||
|
||||
#map_canvas img, #map_canvas embed, #map_canvas object, .map_canvas img, .map_canvas embed, .map_canvas object { max-width: none !important; }
|
||||
|
||||
.left { float: left !important; }
|
||||
|
||||
.right { float: right !important; }
|
||||
|
||||
.text-left { text-align: left !important; }
|
||||
|
||||
.text-right { text-align: right !important; }
|
||||
|
||||
.text-center { text-align: center !important; }
|
||||
|
||||
.text-justify { text-align: justify !important; }
|
||||
|
||||
.hide { display: none; }
|
||||
|
||||
.antialiased { -webkit-font-smoothing: antialiased; }
|
||||
|
||||
img { display: inline-block; vertical-align: middle; }
|
||||
|
||||
textarea { height: auto; min-height: 50px; }
|
||||
|
||||
select { width: 100%; }
|
||||
|
||||
object, svg { display: inline-block; vertical-align: middle; }
|
||||
|
||||
.center { margin-left: auto; margin-right: auto; }
|
||||
|
||||
.spread { width: 100%; }
|
||||
|
||||
p.lead, .paragraph.lead > p, #preamble > .sectionbody > .paragraph:first-of-type p { font-size: 1.21875em; line-height: 1.6; }
|
||||
|
||||
.subheader, .admonitionblock td.content > .title, .audioblock > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .stemblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, table.tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { line-height: 1.4; color: #6f6f6f; font-weight: 300; margin-top: 0.2em; margin-bottom: 0.5em; }
|
||||
|
||||
/* Typography resets */
|
||||
div, dl, dt, dd, ul, ol, li, h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6, pre, form, p, blockquote, th, td { margin: 0; padding: 0; direction: ltr; }
|
||||
|
||||
/* Default Link Styles */
|
||||
a { color: #2ba6cb; text-decoration: none; line-height: inherit; }
|
||||
a:hover, a:focus { color: #2795b6; }
|
||||
a img { border: none; }
|
||||
|
||||
/* Default paragraph styles */
|
||||
p { font-family: inherit; font-weight: normal; font-size: 1em; line-height: 1.6; margin-bottom: 1.25em; text-rendering: optimizeLegibility; }
|
||||
p aside { font-size: 0.875em; line-height: 1.35; font-style: italic; }
|
||||
|
||||
/* Default header styles */
|
||||
h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; font-weight: bold; font-style: normal; color: #222222; text-rendering: optimizeLegibility; margin-top: 1em; margin-bottom: 0.5em; line-height: 1.2125em; }
|
||||
h1 small, h2 small, h3 small, #toctitle small, .sidebarblock > .content > .title small, h4 small, h5 small, h6 small { font-size: 60%; color: #6f6f6f; line-height: 0; }
|
||||
|
||||
h1 { font-size: 2.125em; }
|
||||
|
||||
h2 { font-size: 1.6875em; }
|
||||
|
||||
h3, #toctitle, .sidebarblock > .content > .title { font-size: 1.375em; }
|
||||
|
||||
h4 { font-size: 1.125em; }
|
||||
|
||||
h5 { font-size: 1.125em; }
|
||||
|
||||
h6 { font-size: 1em; }
|
||||
|
||||
hr { border: solid #dddddd; border-width: 1px 0 0; clear: both; margin: 1.25em 0 1.1875em; height: 0; }
|
||||
|
||||
/* Helpful Typography Defaults */
|
||||
em, i { font-style: italic; line-height: inherit; }
|
||||
|
||||
strong, b { font-weight: bold; line-height: inherit; }
|
||||
|
||||
small { font-size: 60%; line-height: inherit; }
|
||||
|
||||
code { font-family: Consolas, "Liberation Mono", Courier, monospace; font-weight: bold; color: #7f0a0c; }
|
||||
|
||||
/* Lists */
|
||||
ul, ol, dl { font-size: 1em; line-height: 1.6; margin-bottom: 1.25em; list-style-position: outside; font-family: inherit; }
|
||||
|
||||
ul, ol { margin-left: 1.5em; }
|
||||
ul.no-bullet, ol.no-bullet { margin-left: 1.5em; }
|
||||
|
||||
/* Unordered Lists */
|
||||
ul li ul, ul li ol { margin-left: 1.25em; margin-bottom: 0; font-size: 1em; /* Override nested font-size change */ }
|
||||
ul.square li ul, ul.circle li ul, ul.disc li ul { list-style: inherit; }
|
||||
ul.square { list-style-type: square; }
|
||||
ul.circle { list-style-type: circle; }
|
||||
ul.disc { list-style-type: disc; }
|
||||
ul.no-bullet { list-style: none; }
|
||||
|
||||
/* Ordered Lists */
|
||||
ol li ul, ol li ol { margin-left: 1.25em; margin-bottom: 0; }
|
||||
|
||||
/* Definition Lists */
|
||||
dl dt { margin-bottom: 0.3125em; font-weight: bold; }
|
||||
dl dd { margin-bottom: 1.25em; }
|
||||
|
||||
/* Abbreviations */
|
||||
abbr, acronym { text-transform: uppercase; font-size: 90%; color: #222222; border-bottom: 1px dotted #dddddd; cursor: help; }
|
||||
|
||||
abbr { text-transform: none; }
|
||||
|
||||
/* Blockquotes */
|
||||
blockquote { margin: 0 0 1.25em; padding: 0.5625em 1.25em 0 1.1875em; border-left: 1px solid #dddddd; }
|
||||
blockquote cite { display: block; font-size: 0.8125em; color: #555555; }
|
||||
blockquote cite:before { content: "\2014 \0020"; }
|
||||
blockquote cite a, blockquote cite a:visited { color: #555555; }
|
||||
|
||||
blockquote, blockquote p { line-height: 1.6; color: #6f6f6f; }
|
||||
|
||||
/* Microformats */
|
||||
.vcard { display: inline-block; margin: 0 0 1.25em 0; border: 1px solid #dddddd; padding: 0.625em 0.75em; }
|
||||
.vcard li { margin: 0; display: block; }
|
||||
.vcard .fn { font-weight: bold; font-size: 0.9375em; }
|
||||
|
||||
.vevent .summary { font-weight: bold; }
|
||||
.vevent abbr { cursor: auto; text-decoration: none; font-weight: bold; border: none; padding: 0 0.0625em; }
|
||||
|
||||
@media only screen and (min-width: 768px) { h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { line-height: 1.4; }
|
||||
h1 { font-size: 2.75em; }
|
||||
h2 { font-size: 2.3125em; }
|
||||
h3, #toctitle, .sidebarblock > .content > .title { font-size: 1.6875em; }
|
||||
h4 { font-size: 1.4375em; } }
|
||||
/* Tables */
|
||||
table { background: white; margin-bottom: 1.25em; border: solid 1px #dddddd; }
|
||||
table thead, table tfoot { background: whitesmoke; font-weight: bold; }
|
||||
table thead tr th, table thead tr td, table tfoot tr th, table tfoot tr td { padding: 0.5em 0.625em 0.625em; font-size: inherit; color: #222222; text-align: left; }
|
||||
table tr th, table tr td { padding: 0.5625em 0.625em; font-size: inherit; color: #222222; }
|
||||
table tr.even, table tr.alt, table tr:nth-of-type(even) { background: #f9f9f9; }
|
||||
table thead tr th, table tfoot tr th, table tbody tr td, table tr td, table tfoot tr td { display: table-cell; line-height: 1.4; }
|
||||
|
||||
body { -moz-osx-font-smoothing: grayscale; -webkit-font-smoothing: antialiased; tab-size: 4; }
|
||||
|
||||
h1, h2, h3, #toctitle, .sidebarblock > .content > .title, h4, h5, h6 { line-height: 1.4; }
|
||||
|
||||
.clearfix:before, .clearfix:after, .float-group:before, .float-group:after { content: " "; display: table; }
|
||||
.clearfix:after, .float-group:after { clear: both; }
|
||||
|
||||
*:not(pre) > code { font-size: inherit; font-style: normal !important; letter-spacing: 0; padding: 0; line-height: inherit; word-wrap: break-word; }
|
||||
*:not(pre) > code.nobreak { word-wrap: normal; }
|
||||
*:not(pre) > code.nowrap { white-space: nowrap; }
|
||||
|
||||
pre, pre > code { line-height: 1.4; color: black; font-family: monospace, serif; font-weight: normal; }
|
||||
|
||||
em em { font-style: normal; }
|
||||
|
||||
strong strong { font-weight: normal; }
|
||||
|
||||
.keyseq { color: #555555; }
|
||||
|
||||
kbd { font-family: Consolas, "Liberation Mono", Courier, monospace; display: inline-block; color: #222222; font-size: 0.65em; line-height: 1.45; background-color: #f7f7f7; border: 1px solid #ccc; -webkit-border-radius: 3px; border-radius: 3px; -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 0 0 0.1em white inset; box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), 0 0 0 0.1em white inset; margin: 0 0.15em; padding: 0.2em 0.5em; vertical-align: middle; position: relative; top: -0.1em; white-space: nowrap; }
|
||||
|
||||
.keyseq kbd:first-child { margin-left: 0; }
|
||||
|
||||
.keyseq kbd:last-child { margin-right: 0; }
|
||||
|
||||
.menuseq, .menu { color: #090909; }
|
||||
|
||||
b.button:before, b.button:after { position: relative; top: -1px; font-weight: normal; }
|
||||
|
||||
b.button:before { content: "["; padding: 0 3px 0 2px; }
|
||||
|
||||
b.button:after { content: "]"; padding: 0 2px 0 3px; }
|
||||
|
||||
#header, #content, #footnotes, #footer { width: 100%; margin-left: auto; margin-right: auto; margin-top: 0; margin-bottom: 0; max-width: 62.5em; *zoom: 1; position: relative; padding-left: 0.9375em; padding-right: 0.9375em; }
|
||||
#header:before, #header:after, #content:before, #content:after, #footnotes:before, #footnotes:after, #footer:before, #footer:after { content: " "; display: table; }
|
||||
#header:after, #content:after, #footnotes:after, #footer:after { clear: both; }
|
||||
|
||||
#content { margin-top: 1.25em; }
|
||||
|
||||
#content:before { content: none; }
|
||||
|
||||
#header > h1:first-child { color: black; margin-top: 2.25rem; margin-bottom: 0; }
|
||||
#header > h1:first-child + #toc { margin-top: 8px; border-top: 1px solid #dddddd; }
|
||||
#header > h1:only-child, body.toc2 #header > h1:nth-last-child(2) { border-bottom: 1px solid #dddddd; padding-bottom: 8px; }
|
||||
#header .details { border-bottom: 1px solid #dddddd; line-height: 1.45; padding-top: 0.25em; padding-bottom: 0.25em; padding-left: 0.25em; color: #555555; display: -ms-flexbox; display: -webkit-flex; display: flex; -ms-flex-flow: row wrap; -webkit-flex-flow: row wrap; flex-flow: row wrap; }
|
||||
#header .details span:first-child { margin-left: -0.125em; }
|
||||
#header .details span.email a { color: #6f6f6f; }
|
||||
#header .details br { display: none; }
|
||||
#header .details br + span:before { content: "\00a0\2013\00a0"; }
|
||||
#header .details br + span.author:before { content: "\00a0\22c5\00a0"; color: #6f6f6f; }
|
||||
#header .details br + span#revremark:before { content: "\00a0|\00a0"; }
|
||||
#header #revnumber { text-transform: capitalize; }
|
||||
#header #revnumber:after { content: "\00a0"; }
|
||||
|
||||
#content > h1:first-child:not([class]) { color: black; border-bottom: 1px solid #dddddd; padding-bottom: 8px; margin-top: 0; padding-top: 1rem; margin-bottom: 1.25rem; }
|
||||
|
||||
#toc { border-bottom: 1px solid #dddddd; padding-bottom: 0.5em; }
|
||||
#toc > ul { margin-left: 0.125em; }
|
||||
#toc ul.sectlevel0 > li > a { font-style: italic; }
|
||||
#toc ul.sectlevel0 ul.sectlevel1 { margin: 0.5em 0; }
|
||||
#toc ul { font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; list-style-type: none; }
|
||||
#toc li { line-height: 1.3334; margin-top: 0.3334em; }
|
||||
#toc a { text-decoration: none; }
|
||||
#toc a:active { text-decoration: underline; }
|
||||
|
||||
#toctitle { color: #6f6f6f; font-size: 1.2em; }
|
||||
|
||||
@media only screen and (min-width: 768px) { #toctitle { font-size: 1.375em; }
|
||||
body.toc2 { padding-left: 15em; padding-right: 0; }
|
||||
#toc.toc2 { margin-top: 0 !important; background-color: #f2f2f2; position: fixed; width: 15em; left: 0; top: 0; border-right: 1px solid #dddddd; border-top-width: 0 !important; border-bottom-width: 0 !important; z-index: 1000; padding: 1.25em 1em; height: 100%; overflow: auto; }
|
||||
#toc.toc2 #toctitle { margin-top: 0; margin-bottom: 0.8rem; font-size: 1.2em; }
|
||||
#toc.toc2 > ul { font-size: 0.9em; margin-bottom: 0; }
|
||||
#toc.toc2 ul ul { margin-left: 0; padding-left: 1em; }
|
||||
#toc.toc2 ul.sectlevel0 ul.sectlevel1 { padding-left: 0; margin-top: 0.5em; margin-bottom: 0.5em; }
|
||||
body.toc2.toc-right { padding-left: 0; padding-right: 15em; }
|
||||
body.toc2.toc-right #toc.toc2 { border-right-width: 0; border-left: 1px solid #dddddd; left: auto; right: 0; } }
|
||||
@media only screen and (min-width: 1280px) { body.toc2 { padding-left: 20em; padding-right: 0; }
|
||||
#toc.toc2 { width: 20em; }
|
||||
#toc.toc2 #toctitle { font-size: 1.375em; }
|
||||
#toc.toc2 > ul { font-size: 0.95em; }
|
||||
#toc.toc2 ul ul { padding-left: 1.25em; }
|
||||
body.toc2.toc-right { padding-left: 0; padding-right: 20em; } }
|
||||
#content #toc { border-style: solid; border-width: 1px; border-color: #d9d9d9; margin-bottom: 1.25em; padding: 1.25em; background: #f2f2f2; -webkit-border-radius: 0; border-radius: 0; }
|
||||
#content #toc > :first-child { margin-top: 0; }
|
||||
#content #toc > :last-child { margin-bottom: 0; }
|
||||
|
||||
#footer { max-width: 100%; background-color: #222222; padding: 1.25em; }
|
||||
|
||||
#footer-text { color: #dddddd; line-height: 1.44; }
|
||||
|
||||
.sect1 { padding-bottom: 0.625em; }
|
||||
|
||||
@media only screen and (min-width: 768px) { .sect1 { padding-bottom: 1.25em; } }
|
||||
.sect1 + .sect1 { border-top: 1px solid #dddddd; }
|
||||
|
||||
#content h1 > a.anchor, h2 > a.anchor, h3 > a.anchor, #toctitle > a.anchor, .sidebarblock > .content > .title > a.anchor, h4 > a.anchor, h5 > a.anchor, h6 > a.anchor { position: absolute; z-index: 1001; width: 1.5ex; margin-left: -1.5ex; display: block; text-decoration: none !important; visibility: hidden; text-align: center; font-weight: normal; }
|
||||
#content h1 > a.anchor:before, h2 > a.anchor:before, h3 > a.anchor:before, #toctitle > a.anchor:before, .sidebarblock > .content > .title > a.anchor:before, h4 > a.anchor:before, h5 > a.anchor:before, h6 > a.anchor:before { content: "\00A7"; font-size: 0.85em; display: block; padding-top: 0.1em; }
|
||||
#content h1:hover > a.anchor, #content h1 > a.anchor:hover, h2:hover > a.anchor, h2 > a.anchor:hover, h3:hover > a.anchor, #toctitle:hover > a.anchor, .sidebarblock > .content > .title:hover > a.anchor, h3 > a.anchor:hover, #toctitle > a.anchor:hover, .sidebarblock > .content > .title > a.anchor:hover, h4:hover > a.anchor, h4 > a.anchor:hover, h5:hover > a.anchor, h5 > a.anchor:hover, h6:hover > a.anchor, h6 > a.anchor:hover { visibility: visible; }
|
||||
#content h1 > a.link, h2 > a.link, h3 > a.link, #toctitle > a.link, .sidebarblock > .content > .title > a.link, h4 > a.link, h5 > a.link, h6 > a.link { color: #222222; text-decoration: none; }
|
||||
#content h1 > a.link:hover, h2 > a.link:hover, h3 > a.link:hover, #toctitle > a.link:hover, .sidebarblock > .content > .title > a.link:hover, h4 > a.link:hover, h5 > a.link:hover, h6 > a.link:hover { color: #151515; }
|
||||
|
||||
.audioblock, .imageblock, .literalblock, .listingblock, .stemblock, .videoblock { margin-bottom: 1.25em; }
|
||||
|
||||
.admonitionblock td.content > .title, .audioblock > .title, .exampleblock > .title, .imageblock > .title, .listingblock > .title, .literalblock > .title, .stemblock > .title, .openblock > .title, .paragraph > .title, .quoteblock > .title, table.tableblock > .title, .verseblock > .title, .videoblock > .title, .dlist > .title, .olist > .title, .ulist > .title, .qlist > .title, .hdlist > .title { text-rendering: optimizeLegibility; text-align: left; }
|
||||
|
||||
table.tableblock > caption.title { white-space: nowrap; overflow: visible; max-width: 0; }
|
||||
|
||||
.paragraph.lead > p, #preamble > .sectionbody > .paragraph:first-of-type p { color: black; }
|
||||
|
||||
table.tableblock #preamble > .sectionbody > .paragraph:first-of-type p { font-size: inherit; }
|
||||
|
||||
.admonitionblock > table { border-collapse: separate; border: 0; background: none; width: 100%; }
|
||||
.admonitionblock > table td.icon { text-align: center; width: 80px; }
|
||||
.admonitionblock > table td.icon img { max-width: initial; }
|
||||
.admonitionblock > table td.icon .title { font-weight: bold; font-family: "Helvetica Neue", "Helvetica", Helvetica, Arial, sans-serif; text-transform: uppercase; }
|
||||
.admonitionblock > table td.content { padding-left: 1.125em; padding-right: 1.25em; border-left: 1px solid #dddddd; color: #555555; }
|
||||
.admonitionblock > table td.content > :last-child > :last-child { margin-bottom: 0; }
|
||||
|
||||
.exampleblock > .content { border-style: solid; border-width: 1px; border-color: #e6e6e6; margin-bottom: 1.25em; padding: 1.25em; background: white; -webkit-border-radius: 0; border-radius: 0; }
|
||||
.exampleblock > .content > :first-child { margin-top: 0; }
|
||||
.exampleblock > .content > :last-child { margin-bottom: 0; }
|
||||
|
||||
.sidebarblock { border-style: solid; border-width: 1px; border-color: #d9d9d9; margin-bottom: 1.25em; padding: 1.25em; background: #f2f2f2; -webkit-border-radius: 0; border-radius: 0; }
|
||||
.sidebarblock > :first-child { margin-top: 0; }
|
||||
.sidebarblock > :last-child { margin-bottom: 0; }
|
||||
.sidebarblock > .content > .title { color: #6f6f6f; margin-top: 0; }
|
||||
|
||||
.exampleblock > .content > :last-child > :last-child, .exampleblock > .content .olist > ol > li:last-child > :last-child, .exampleblock > .content .ulist > ul > li:last-child > :last-child, .exampleblock > .content .qlist > ol > li:last-child > :last-child, .sidebarblock > .content > :last-child > :last-child, .sidebarblock > .content .olist > ol > li:last-child > :last-child, .sidebarblock > .content .ulist > ul > li:last-child > :last-child, .sidebarblock > .content .qlist > ol > li:last-child > :last-child { margin-bottom: 0; }
|
||||
|
||||
.literalblock pre, .listingblock pre:not(.highlight), .listingblock pre[class="highlight"], .listingblock pre[class^="highlight "], .listingblock pre.CodeRay, .listingblock pre.prettyprint { background: #eeeeee; }
|
||||
.sidebarblock .literalblock pre, .sidebarblock .listingblock pre:not(.highlight), .sidebarblock .listingblock pre[class="highlight"], .sidebarblock .listingblock pre[class^="highlight "], .sidebarblock .listingblock pre.CodeRay, .sidebarblock .listingblock pre.prettyprint { background: #f2f1f1; }
|
||||
|
||||
.literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { border: 1px solid #cccccc; -webkit-border-radius: 0; border-radius: 0; word-wrap: break-word; padding: 0.8em 0.8em 0.65em 0.8em; font-size: 0.8125em; }
|
||||
.literalblock pre.nowrap, .literalblock pre[class].nowrap, .listingblock pre.nowrap, .listingblock pre[class].nowrap { overflow-x: auto; white-space: pre; word-wrap: normal; }
|
||||
@media only screen and (min-width: 768px) { .literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { font-size: 0.90625em; } }
|
||||
@media only screen and (min-width: 1280px) { .literalblock pre, .literalblock pre[class], .listingblock pre, .listingblock pre[class] { font-size: 1em; } }
|
||||
|
||||
.literalblock.output pre { color: #eeeeee; background-color: black; }
|
||||
|
||||
.listingblock pre.highlightjs { padding: 0; }
|
||||
.listingblock pre.highlightjs > code { padding: 0.8em 0.8em 0.65em 0.8em; -webkit-border-radius: 0; border-radius: 0; }
|
||||
|
||||
.listingblock > .content { position: relative; }
|
||||
|
||||
.listingblock code[data-lang]:before { display: none; content: attr(data-lang); position: absolute; font-size: 0.75em; top: 0.425rem; right: 0.5rem; line-height: 1; text-transform: uppercase; color: #999; }
|
||||
|
||||
.listingblock:hover code[data-lang]:before { display: block; }
|
||||
|
||||
.listingblock.terminal pre .command:before { content: attr(data-prompt); padding-right: 0.5em; color: #999; }
|
||||
|
||||
.listingblock.terminal pre .command:not([data-prompt]):before { content: "$"; }
|
||||
|
||||
table.pyhltable { border-collapse: separate; border: 0; margin-bottom: 0; background: none; }
|
||||
|
||||
table.pyhltable td { vertical-align: top; padding-top: 0; padding-bottom: 0; line-height: 1.4; }
|
||||
|
||||
table.pyhltable td.code { padding-left: .75em; padding-right: 0; }
|
||||
|
||||
pre.pygments .lineno, table.pyhltable td:not(.code) { color: #999; padding-left: 0; padding-right: .5em; border-right: 1px solid #dddddd; }
|
||||
|
||||
pre.pygments .lineno { display: inline-block; margin-right: .25em; }
|
||||
|
||||
table.pyhltable .linenodiv { background: none !important; padding-right: 0 !important; }
|
||||
|
||||
.quoteblock { margin: 0 1em 1.25em 1.5em; display: table; }
|
||||
.quoteblock > .title { margin-left: -1.5em; margin-bottom: 0.75em; }
|
||||
.quoteblock blockquote, .quoteblock blockquote p { color: #6f6f6f; font-size: 1.15rem; line-height: 1.75; word-spacing: 0.1em; letter-spacing: 0; font-style: italic; text-align: justify; }
|
||||
.quoteblock blockquote { margin: 0; padding: 0; border: 0; }
|
||||
.quoteblock blockquote:before { content: "\201c"; float: left; font-size: 2.75em; font-weight: bold; line-height: 0.6em; margin-left: -0.6em; color: #6f6f6f; text-shadow: 0 1px 2px rgba(0, 0, 0, 0.1); }
|
||||
.quoteblock blockquote > .paragraph:last-child p { margin-bottom: 0; }
|
||||
.quoteblock .attribution { margin-top: 0.5em; margin-right: 0.5ex; text-align: right; }
|
||||
.quoteblock .quoteblock { margin-left: 0; margin-right: 0; padding: 0.5em 0; border-left: 3px solid #555555; }
|
||||
.quoteblock .quoteblock blockquote { padding: 0 0 0 0.75em; }
|
||||
.quoteblock .quoteblock blockquote:before { display: none; }
|
||||
|
||||
.verseblock { margin: 0 1em 1.25em 1em; }
|
||||
.verseblock pre { font-family: "Open Sans", "DejaVu Sans", sans; font-size: 1.15rem; color: #6f6f6f; font-weight: 300; text-rendering: optimizeLegibility; }
|
||||
.verseblock pre strong { font-weight: 400; }
|
||||
.verseblock .attribution { margin-top: 1.25rem; margin-left: 0.5ex; }
|
||||
|
||||
.quoteblock .attribution, .verseblock .attribution { font-size: 0.8125em; line-height: 1.45; font-style: italic; }
|
||||
.quoteblock .attribution br, .verseblock .attribution br { display: none; }
|
||||
.quoteblock .attribution cite, .verseblock .attribution cite { display: block; letter-spacing: -0.025em; color: #555555; }
|
||||
|
||||
.quoteblock.abstract { margin: 0 0 1.25em 0; display: block; }
|
||||
.quoteblock.abstract blockquote, .quoteblock.abstract blockquote p { text-align: left; word-spacing: 0; }
|
||||
.quoteblock.abstract blockquote:before, .quoteblock.abstract blockquote p:first-of-type:before { display: none; }
|
||||
|
||||
table.tableblock { max-width: 100%; border-collapse: separate; }
|
||||
table.tableblock td > .paragraph:last-child p > p:last-child, table.tableblock th > p:last-child, table.tableblock td > p:last-child { margin-bottom: 0; }
|
||||
|
||||
table.tableblock, th.tableblock, td.tableblock { border: 0 solid #dddddd; }
|
||||
|
||||
table.grid-all th.tableblock, table.grid-all td.tableblock { border-width: 0 1px 1px 0; }
|
||||
|
||||
table.grid-all tfoot > tr > th.tableblock, table.grid-all tfoot > tr > td.tableblock { border-width: 1px 1px 0 0; }
|
||||
|
||||
table.grid-cols th.tableblock, table.grid-cols td.tableblock { border-width: 0 1px 0 0; }
|
||||
|
||||
table.grid-all * > tr > .tableblock:last-child, table.grid-cols * > tr > .tableblock:last-child { border-right-width: 0; }
|
||||
|
||||
table.grid-rows th.tableblock, table.grid-rows td.tableblock { border-width: 0 0 1px 0; }
|
||||
|
||||
table.grid-all tbody > tr:last-child > th.tableblock, table.grid-all tbody > tr:last-child > td.tableblock, table.grid-all thead:last-child > tr > th.tableblock, table.grid-rows tbody > tr:last-child > th.tableblock, table.grid-rows tbody > tr:last-child > td.tableblock, table.grid-rows thead:last-child > tr > th.tableblock { border-bottom-width: 0; }
|
||||
|
||||
table.grid-rows tfoot > tr > th.tableblock, table.grid-rows tfoot > tr > td.tableblock { border-width: 1px 0 0 0; }
|
||||
|
||||
table.frame-all { border-width: 1px; }
|
||||
|
||||
table.frame-sides { border-width: 0 1px; }
|
||||
|
||||
table.frame-topbot { border-width: 1px 0; }
|
||||
|
||||
th.halign-left, td.halign-left { text-align: left; }
|
||||
|
||||
th.halign-right, td.halign-right { text-align: right; }
|
||||
|
||||
th.halign-center, td.halign-center { text-align: center; }
|
||||
|
||||
th.valign-top, td.valign-top { vertical-align: top; }
|
||||
|
||||
th.valign-bottom, td.valign-bottom { vertical-align: bottom; }
|
||||
|
||||
th.valign-middle, td.valign-middle { vertical-align: middle; }
|
||||
|
||||
table thead th, table tfoot th { font-weight: bold; }
|
||||
|
||||
tbody tr th { display: table-cell; line-height: 1.4; background: whitesmoke; }
|
||||
|
||||
tbody tr th, tbody tr th p, tfoot tr th, tfoot tr th p { color: #222222; font-weight: bold; }
|
||||
|
||||
p.tableblock > code:only-child { background: none; padding: 0; }
|
||||
|
||||
p.tableblock { font-size: 1em; }
|
||||
|
||||
td > div.verse { white-space: pre; }
|
||||
|
||||
ol { margin-left: 1.75em; }
|
||||
|
||||
ul li ol { margin-left: 1.5em; }
|
||||
|
||||
dl dd { margin-left: 1.125em; }
|
||||
|
||||
dl dd:last-child, dl dd:last-child > :last-child { margin-bottom: 0; }
|
||||
|
||||
ol > li p, ul > li p, ul dd, ol dd, .olist .olist, .ulist .ulist, .ulist .olist, .olist .ulist { margin-bottom: 0.625em; }
|
||||
|
||||
ul.unstyled, ol.unnumbered, ul.checklist, ul.none { list-style-type: none; }
|
||||
|
||||
ul.unstyled, ol.unnumbered, ul.checklist { margin-left: 0.625em; }
|
||||
|
||||
ul.checklist li > p:first-child > .fa-square-o:first-child, ul.checklist li > p:first-child > .fa-check-square-o:first-child { width: 1em; font-size: 0.85em; }
|
||||
|
||||
ul.checklist li > p:first-child > input[type="checkbox"]:first-child { width: 1em; position: relative; top: 1px; }
|
||||
|
||||
ul.inline { margin: 0 auto 0.625em auto; margin-left: -1.375em; margin-right: 0; padding: 0; list-style: none; overflow: hidden; }
|
||||
ul.inline > li { list-style: none; float: left; margin-left: 1.375em; display: block; }
|
||||
ul.inline > li > * { display: block; }
|
||||
|
||||
.unstyled dl dt { font-weight: normal; font-style: normal; }
|
||||
|
||||
ol.arabic { list-style-type: decimal; }
|
||||
|
||||
ol.decimal { list-style-type: decimal-leading-zero; }
|
||||
|
||||
ol.loweralpha { list-style-type: lower-alpha; }
|
||||
|
||||
ol.upperalpha { list-style-type: upper-alpha; }
|
||||
|
||||
ol.lowerroman { list-style-type: lower-roman; }
|
||||
|
||||
ol.upperroman { list-style-type: upper-roman; }
|
||||
|
||||
ol.lowergreek { list-style-type: lower-greek; }
|
||||
|
||||
.hdlist > table, .colist > table { border: 0; background: none; }
|
||||
.hdlist > table > tbody > tr, .colist > table > tbody > tr { background: none; }
|
||||
|
||||
td.hdlist1, td.hdlist2 { vertical-align: top; padding: 0 0.625em; }
|
||||
|
||||
td.hdlist1 { font-weight: bold; padding-bottom: 1.25em; }
|
||||
|
||||
.literalblock + .colist, .listingblock + .colist { margin-top: -0.5em; }
|
||||
|
||||
.colist > table tr > td:first-of-type { padding: 0 0.75em; line-height: 1; }
|
||||
.colist > table tr > td:first-of-type img { max-width: initial; }
|
||||
.colist > table tr > td:last-of-type { padding: 0.25em 0; }
|
||||
|
||||
.thumb, .th { line-height: 0; display: inline-block; border: solid 4px white; -webkit-box-shadow: 0 0 0 1px #dddddd; box-shadow: 0 0 0 1px #dddddd; }
|
||||
|
||||
.imageblock.left, .imageblock[style*="float: left"] { margin: 0.25em 0.625em 1.25em 0; }
|
||||
.imageblock.right, .imageblock[style*="float: right"] { margin: 0.25em 0 1.25em 0.625em; }
|
||||
.imageblock > .title { margin-bottom: 0; }
|
||||
.imageblock.thumb, .imageblock.th { border-width: 6px; }
|
||||
.imageblock.thumb > .title, .imageblock.th > .title { padding: 0 0.125em; }
|
||||
|
||||
.image.left, .image.right { margin-top: 0.25em; margin-bottom: 0.25em; display: inline-block; line-height: 0; }
|
||||
.image.left { margin-right: 0.625em; }
|
||||
.image.right { margin-left: 0.625em; }
|
||||
|
||||
a.image { text-decoration: none; display: inline-block; }
|
||||
a.image object { pointer-events: none; }
|
||||
|
||||
sup.footnote, sup.footnoteref { font-size: 0.875em; position: static; vertical-align: super; }
|
||||
sup.footnote a, sup.footnoteref a { text-decoration: none; }
|
||||
sup.footnote a:active, sup.footnoteref a:active { text-decoration: underline; }
|
||||
|
||||
#footnotes { padding-top: 0.75em; padding-bottom: 0.75em; margin-bottom: 0.625em; }
|
||||
#footnotes hr { width: 20%; min-width: 6.25em; margin: -0.25em 0 0.75em 0; border-width: 1px 0 0 0; }
|
||||
#footnotes .footnote { padding: 0 0.375em 0 0.225em; line-height: 1.3334; font-size: 0.875em; margin-left: 1.2em; text-indent: -1.05em; margin-bottom: 0.2em; }
|
||||
#footnotes .footnote a:first-of-type { font-weight: bold; text-decoration: none; }
|
||||
#footnotes .footnote:last-of-type { margin-bottom: 0; }
|
||||
#content #footnotes { margin-top: -0.625em; margin-bottom: 0; padding: 0.75em 0; }
|
||||
|
||||
.gist .file-data > table { border: 0; background: #fff; width: 100%; margin-bottom: 0; }
|
||||
.gist .file-data > table td.line-data { width: 99%; }
|
||||
|
||||
div.unbreakable { page-break-inside: avoid; }
|
||||
|
||||
.big { font-size: larger; }
|
||||
|
||||
.small { font-size: smaller; }
|
||||
|
||||
.underline { text-decoration: underline; }
|
||||
|
||||
.overline { text-decoration: overline; }
|
||||
|
||||
.line-through { text-decoration: line-through; }
|
||||
|
||||
.aqua { color: #00bfbf; }
|
||||
|
||||
.aqua-background { background-color: #00fafa; }
|
||||
|
||||
.black { color: black; }
|
||||
|
||||
.black-background { background-color: black; }
|
||||
|
||||
.blue { color: #0000bf; }
|
||||
|
||||
.blue-background { background-color: #0000fa; }
|
||||
|
||||
.fuchsia { color: #bf00bf; }
|
||||
|
||||
.fuchsia-background { background-color: #fa00fa; }
|
||||
|
||||
.gray { color: #606060; }
|
||||
|
||||
.gray-background { background-color: #7d7d7d; }
|
||||
|
||||
.green { color: #006000; }
|
||||
|
||||
.green-background { background-color: #007d00; }
|
||||
|
||||
.lime { color: #00bf00; }
|
||||
|
||||
.lime-background { background-color: #00fa00; }
|
||||
|
||||
.maroon { color: #600000; }
|
||||
|
||||
.maroon-background { background-color: #7d0000; }
|
||||
|
||||
.navy { color: #000060; }
|
||||
|
||||
.navy-background { background-color: #00007d; }
|
||||
|
||||
.olive { color: #606000; }
|
||||
|
||||
.olive-background { background-color: #7d7d00; }
|
||||
|
||||
.purple { color: #600060; }
|
||||
|
||||
.purple-background { background-color: #7d007d; }
|
||||
|
||||
.red { color: #bf0000; }
|
||||
|
||||
.red-background { background-color: #fa0000; }
|
||||
|
||||
.silver { color: #909090; }
|
||||
|
||||
.silver-background { background-color: #bcbcbc; }
|
||||
|
||||
.teal { color: #006060; }
|
||||
|
||||
.teal-background { background-color: #007d7d; }
|
||||
|
||||
.white { color: #bfbfbf; }
|
||||
|
||||
.white-background { background-color: #fafafa; }
|
||||
|
||||
.yellow { color: #bfbf00; }
|
||||
|
||||
.yellow-background { background-color: #fafa00; }
|
||||
|
||||
span.icon > .fa { cursor: default; }
|
||||
|
||||
.admonitionblock td.icon [class^="fa icon-"] { font-size: 2.5em; text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.5); cursor: default; }
|
||||
.admonitionblock td.icon .icon-note:before { content: "\f05a"; color: #207c98; }
|
||||
.admonitionblock td.icon .icon-tip:before { content: "\f0eb"; text-shadow: 1px 1px 2px rgba(155, 155, 0, 0.8); color: #111; }
|
||||
.admonitionblock td.icon .icon-warning:before { content: "\f071"; color: #bf6900; }
|
||||
.admonitionblock td.icon .icon-caution:before { content: "\f06d"; color: #bf3400; }
|
||||
.admonitionblock td.icon .icon-important:before { content: "\f06a"; color: #bf0000; }
|
||||
|
||||
.conum[data-value] { display: inline-block; color: #fff !important; background-color: #222222; -webkit-border-radius: 100px; border-radius: 100px; text-align: center; font-size: 0.75em; width: 1.67em; height: 1.67em; line-height: 1.67em; font-family: "Open Sans", "DejaVu Sans", sans-serif; font-style: normal; font-weight: bold; }
|
||||
.conum[data-value] * { color: #fff !important; }
|
||||
.conum[data-value] + b { display: none; }
|
||||
.conum[data-value]:after { content: attr(data-value); }
|
||||
pre .conum[data-value] { position: relative; top: -0.125em; }
|
||||
|
||||
b.conum * { color: inherit !important; }
|
||||
|
||||
.conum:not([data-value]):empty { display: none; }
|
||||
|
||||
.literalblock pre, .listingblock pre { background: #eeeeee; }
|
|
@ -1,4 +0,0 @@
|
|||
= Elasticsearch Java client
|
||||
Jörg Prante
|
||||
Version 5.4.0.0
|
||||
|
|
@ -1,925 +0,0 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesAction;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushAction;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingAction;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryAction;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest;
|
||||
import org.elasticsearch.action.admin.indices.recovery.RecoveryResponse;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction;
|
||||
import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchAction;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.client.transport.NoNodeAvailableException;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.metadata.AliasMetaData;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public abstract class AbstractClient implements ClientMethods {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(AbstractClient.class.getName());
|
||||
|
||||
private Settings.Builder settingsBuilder;
|
||||
|
||||
private Settings settings;
|
||||
|
||||
private Map<String, String> mappings;
|
||||
|
||||
private ElasticsearchClient client;
|
||||
|
||||
protected BulkProcessor bulkProcessor;
|
||||
|
||||
protected BulkMetric metric;
|
||||
|
||||
protected BulkControl control;
|
||||
|
||||
protected Throwable throwable;
|
||||
|
||||
protected boolean closed;
|
||||
|
||||
protected int maxActionsPerRequest = DEFAULT_MAX_ACTIONS_PER_REQUEST;
|
||||
|
||||
protected int maxConcurrentRequests = DEFAULT_MAX_CONCURRENT_REQUESTS;
|
||||
|
||||
protected String maxVolumePerRequest = DEFAULT_MAX_VOLUME_PER_REQUEST;
|
||||
|
||||
protected String flushIngestInterval = DEFAULT_FLUSH_INTERVAL;
|
||||
|
||||
@Override
|
||||
public AbstractClient init(ElasticsearchClient client, Settings settings,
|
||||
final BulkMetric metric, final BulkControl control) {
|
||||
this.client = client;
|
||||
this.mappings = new HashMap<>();
|
||||
if (settings == null) {
|
||||
settings = findSettings();
|
||||
}
|
||||
if (client == null && settings != null) {
|
||||
try {
|
||||
this.client = createClient(settings);
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
this.metric = metric;
|
||||
this.control = control;
|
||||
if (metric != null) {
|
||||
metric.start();
|
||||
}
|
||||
resetSettings();
|
||||
BulkProcessor.Listener listener = new BulkProcessor.Listener() {
|
||||
|
||||
private final Logger logger = LogManager.getLogger(getClass().getName() + ".Listener");
|
||||
|
||||
@Override
|
||||
public void beforeBulk(long executionId, BulkRequest request) {
|
||||
long l = -1;
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngest().inc();
|
||||
l = metric.getCurrentIngest().getCount();
|
||||
int n = request.numberOfActions();
|
||||
metric.getSubmitted().inc(n);
|
||||
metric.getCurrentIngestNumDocs().inc(n);
|
||||
metric.getTotalIngestSizeInBytes().inc(request.estimatedSizeInBytes());
|
||||
}
|
||||
logger.debug("before bulk [{}] [actions={}] [bytes={}] [concurrent requests={}]",
|
||||
executionId,
|
||||
request.numberOfActions(),
|
||||
request.estimatedSizeInBytes(),
|
||||
l);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
|
||||
long l = -1;
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngest().dec();
|
||||
l = metric.getCurrentIngest().getCount();
|
||||
metric.getSucceeded().inc(response.getItems().length);
|
||||
}
|
||||
int n = 0;
|
||||
for (BulkItemResponse itemResponse : response.getItems()) {
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngest().dec(itemResponse.getIndex(), itemResponse.getType(), itemResponse.getId());
|
||||
}
|
||||
if (itemResponse.isFailed()) {
|
||||
n++;
|
||||
if (metric != null) {
|
||||
metric.getSucceeded().dec(1);
|
||||
metric.getFailed().inc(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (metric != null) {
|
||||
logger.debug("after bulk [{}] [succeeded={}] [failed={}] [{}ms] {} concurrent requests",
|
||||
executionId,
|
||||
metric.getSucceeded().getCount(),
|
||||
metric.getFailed().getCount(),
|
||||
response.getTook().millis(),
|
||||
l);
|
||||
}
|
||||
if (n > 0) {
|
||||
logger.error("bulk [{}] failed with {} failed items, failure message = {}",
|
||||
executionId, n, response.buildFailureMessage());
|
||||
} else {
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngestNumDocs().dec(response.getItems().length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngest().dec();
|
||||
}
|
||||
throwable = failure;
|
||||
closed = true;
|
||||
logger.error("after bulk [" + executionId + "] error", failure);
|
||||
}
|
||||
};
|
||||
if (this.client != null) {
|
||||
BulkProcessor.Builder builder = BulkProcessor.builder(this.client, listener)
|
||||
.setBulkActions(maxActionsPerRequest)
|
||||
.setConcurrentRequests(maxConcurrentRequests)
|
||||
.setFlushInterval(TimeValue.parseTimeValue(flushIngestInterval, "flushIngestInterval"));
|
||||
if (maxVolumePerRequest != null) {
|
||||
builder.setBulkSize(ByteSizeValue.parseBytesSizeValue(maxVolumePerRequest, "maxVolumePerRequest"));
|
||||
}
|
||||
this.bulkProcessor = builder.build();
|
||||
}
|
||||
this.closed = false;
|
||||
return this;
|
||||
}
|
||||
|
||||
protected abstract ElasticsearchClient createClient(Settings settings) throws IOException;
|
||||
|
||||
@Override
|
||||
public ElasticsearchClient client() {
|
||||
return client;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods maxActionsPerRequest(int maxActionsPerRequest) {
|
||||
this.maxActionsPerRequest = maxActionsPerRequest;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods maxConcurrentRequests(int maxConcurrentRequests) {
|
||||
this.maxConcurrentRequests = maxConcurrentRequests;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods maxVolumePerRequest(String maxVolumePerRequest) {
|
||||
this.maxVolumePerRequest = maxVolumePerRequest;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods flushIngestInterval(String flushIngestInterval) {
|
||||
this.flushIngestInterval = flushIngestInterval;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BulkMetric getMetric() {
|
||||
return metric;
|
||||
}
|
||||
|
||||
public void resetSettings() {
|
||||
this.settingsBuilder = Settings.builder();
|
||||
settings = null;
|
||||
mappings = new HashMap<>();
|
||||
}
|
||||
|
||||
public void setSettings(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
public void setting(String key, String value) {
|
||||
if (settingsBuilder == null) {
|
||||
settingsBuilder = Settings.builder();
|
||||
}
|
||||
settingsBuilder.put(key, value);
|
||||
}
|
||||
|
||||
public void setting(String key, Boolean value) {
|
||||
if (settingsBuilder == null) {
|
||||
settingsBuilder = Settings.builder();
|
||||
}
|
||||
settingsBuilder.put(key, value);
|
||||
}
|
||||
|
||||
public void setting(String key, Integer value) {
|
||||
if (settingsBuilder == null) {
|
||||
settingsBuilder = Settings.builder();
|
||||
}
|
||||
settingsBuilder.put(key, value);
|
||||
}
|
||||
|
||||
public void setting(InputStream in) throws IOException {
|
||||
settingsBuilder = Settings.builder().loadFromStream(".json", in, true);
|
||||
}
|
||||
|
||||
public Settings.Builder settingsBuilder() {
|
||||
return settingsBuilder != null ? settingsBuilder : Settings.builder();
|
||||
}
|
||||
|
||||
public Settings settings() {
|
||||
if (settings != null) {
|
||||
return settings;
|
||||
}
|
||||
if (settingsBuilder == null) {
|
||||
settingsBuilder = Settings.builder();
|
||||
}
|
||||
return settingsBuilder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mapping(String type, String mapping) throws IOException {
|
||||
mappings.put(type, mapping);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void mapping(String type, InputStream in) throws IOException {
|
||||
if (type == null) {
|
||||
return;
|
||||
}
|
||||
StringWriter sw = new StringWriter();
|
||||
Streams.copy(new InputStreamReader(in, StandardCharsets.UTF_8), sw);
|
||||
mappings.put(type, sw.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods index(String index, String type, String id, boolean create, BytesReference source) {
|
||||
return indexRequest(new IndexRequest(index).type(type).id(id).create(create).source(source, XContentType.JSON));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods index(String index, String type, String id, boolean create, String source) {
|
||||
return indexRequest(new IndexRequest(index).type(type).id(id).create(create).source(source, XContentType.JSON));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods indexRequest(IndexRequest indexRequest) {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
try {
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngest().inc(indexRequest.index(), indexRequest.type(), indexRequest.id());
|
||||
}
|
||||
bulkProcessor.add(indexRequest);
|
||||
} catch (Exception e) {
|
||||
throwable = e;
|
||||
closed = true;
|
||||
logger.error("bulk add of index request failed: " + e.getMessage(), e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods delete(String index, String type, String id) {
|
||||
return deleteRequest(new DeleteRequest(index).type(type).id(id));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods deleteRequest(DeleteRequest deleteRequest) {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
try {
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngest().inc(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
|
||||
}
|
||||
bulkProcessor.add(deleteRequest);
|
||||
} catch (Exception e) {
|
||||
throwable = e;
|
||||
closed = true;
|
||||
logger.error("bulk add of delete failed: " + e.getMessage(), e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods update(String index, String type, String id, BytesReference source) {
|
||||
return updateRequest(new UpdateRequest().index(index).type(type).id(id).upsert(source, XContentType.JSON));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods update(String index, String type, String id, String source) {
|
||||
return updateRequest(new UpdateRequest().index(index).type(type).id(id).upsert(source, XContentType.JSON));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods updateRequest(UpdateRequest updateRequest) {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
try {
|
||||
if (metric != null) {
|
||||
metric.getCurrentIngest().inc(updateRequest.index(), updateRequest.type(), updateRequest.id());
|
||||
}
|
||||
bulkProcessor.add(updateRequest);
|
||||
} catch (Exception e) {
|
||||
throwable = e;
|
||||
closed = true;
|
||||
logger.error("bulk add of update request failed: " + e.getMessage(), e);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods startBulk(String index, long startRefreshIntervalSeconds, long stopRefreshIntervalSeconds)
|
||||
throws IOException {
|
||||
if (control == null) {
|
||||
return this;
|
||||
}
|
||||
if (!control.isBulk(index) && startRefreshIntervalSeconds > 0L && stopRefreshIntervalSeconds > 0L) {
|
||||
control.startBulk(index, startRefreshIntervalSeconds, stopRefreshIntervalSeconds);
|
||||
updateIndexSetting(index, "refresh_interval", startRefreshIntervalSeconds + "s");
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods stopBulk(String index) throws IOException {
|
||||
if (control == null) {
|
||||
return this;
|
||||
}
|
||||
if (control.isBulk(index)) {
|
||||
long secs = control.getStopBulkRefreshIntervals().get(index);
|
||||
if (secs > 0L) {
|
||||
updateIndexSetting(index, "refresh_interval", secs + "s");
|
||||
}
|
||||
control.finishBulk(index);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods flushIngest() {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
logger.debug("flushing bulk processor");
|
||||
bulkProcessor.flush();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void shutdown() throws IOException {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
if (bulkProcessor != null) {
|
||||
logger.info("closing bulk processor...");
|
||||
bulkProcessor.close();
|
||||
}
|
||||
if (metric != null) {
|
||||
logger.info("stopping metric");
|
||||
metric.stop();
|
||||
}
|
||||
if (control != null && control.indices() != null && !control.indices().isEmpty()) {
|
||||
logger.info("stopping bulk mode for indices {}...", control.indices());
|
||||
for (String index : control.indices()) {
|
||||
stopBulk(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods newIndex(String index) {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
return newIndex(index, null, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods newIndex(String index, String type, InputStream settings, InputStream mappings) throws IOException {
|
||||
resetSettings();
|
||||
setting(settings);
|
||||
mapping(type, mappings);
|
||||
return newIndex(index, settings(), this.mappings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods newIndex(String index, Settings settings, Map<String, String> mappings) {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
if (client() == null) {
|
||||
logger.warn("no client for create index");
|
||||
return this;
|
||||
}
|
||||
if (index == null) {
|
||||
logger.warn("no index name given to create index");
|
||||
return this;
|
||||
}
|
||||
CreateIndexRequestBuilder createIndexRequestBuilder =
|
||||
new CreateIndexRequestBuilder(client(), CreateIndexAction.INSTANCE).setIndex(index);
|
||||
if (settings != null) {
|
||||
logger.info("found settings {}", settings.toString());
|
||||
createIndexRequestBuilder.setSettings(settings);
|
||||
}
|
||||
if (mappings != null) {
|
||||
for (Map.Entry<String, String> entry : mappings.entrySet()) {
|
||||
String type = entry.getKey();
|
||||
String mapping = entry.getValue();
|
||||
logger.info("found mapping for {}", type);
|
||||
createIndexRequestBuilder.addMapping(type, mapping, XContentType.JSON);
|
||||
}
|
||||
}
|
||||
CreateIndexResponse createIndexResponse = createIndexRequestBuilder.execute().actionGet();
|
||||
logger.info("index {} created: {}", index, createIndexResponse);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ClientMethods newMapping(String index, String type, Map<String, Object> mapping) {
|
||||
PutMappingRequestBuilder putMappingRequestBuilder =
|
||||
new PutMappingRequestBuilder(client(), PutMappingAction.INSTANCE)
|
||||
.setIndices(index)
|
||||
.setType(type)
|
||||
.setSource(mapping);
|
||||
putMappingRequestBuilder.execute().actionGet();
|
||||
logger.info("mapping created for index {} and type {}", index, type);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods deleteIndex(String index) {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
if (client == null) {
|
||||
logger.warn("no client");
|
||||
return this;
|
||||
}
|
||||
if (index == null) {
|
||||
logger.warn("no index name given to delete index");
|
||||
return this;
|
||||
}
|
||||
DeleteIndexRequestBuilder deleteIndexRequestBuilder =
|
||||
new DeleteIndexRequestBuilder(client(), DeleteIndexAction.INSTANCE, index);
|
||||
deleteIndexRequestBuilder.execute().actionGet();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ClientMethods waitForResponses(String maxWaitTime) throws InterruptedException, ExecutionException {
|
||||
if (closed) {
|
||||
throwClose();
|
||||
}
|
||||
long millis = TimeValue.parseTimeValue(maxWaitTime, "millis").getMillis();
|
||||
while (!bulkProcessor.awaitClose(millis, TimeUnit.MILLISECONDS)) {
|
||||
logger.warn("still waiting for responses");
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public void waitForRecovery() throws IOException {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
client().execute(RecoveryAction.INSTANCE, new RecoveryRequest()).actionGet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int waitForRecovery(String index) throws IOException {
|
||||
if (client() == null) {
|
||||
return -1;
|
||||
}
|
||||
if (index == null) {
|
||||
throw new IOException("unable to waitfor recovery, index not set");
|
||||
}
|
||||
RecoveryResponse response = client().execute(RecoveryAction.INSTANCE, new RecoveryRequest(index)).actionGet();
|
||||
int shards = response.getTotalShards();
|
||||
client().execute(ClusterHealthAction.INSTANCE, new ClusterHealthRequest(index)
|
||||
.waitForActiveShards(shards)).actionGet();
|
||||
return shards;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void waitForCluster(String statusString, String timeout) throws IOException {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
ClusterHealthStatus status = ClusterHealthStatus.fromString(statusString);
|
||||
ClusterHealthResponse healthResponse =
|
||||
client().execute(ClusterHealthAction.INSTANCE, new ClusterHealthRequest()
|
||||
.waitForStatus(status).timeout(timeout)).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
throw new IOException("cluster state is " + healthResponse.getStatus().name()
|
||||
+ " and not " + status.name()
|
||||
+ ", from here on, everything will fail!");
|
||||
}
|
||||
}
|
||||
|
||||
public String fetchClusterName() {
|
||||
if (client() == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
ClusterStateRequestBuilder clusterStateRequestBuilder =
|
||||
new ClusterStateRequestBuilder(client(), ClusterStateAction.INSTANCE).all();
|
||||
ClusterStateResponse clusterStateResponse = clusterStateRequestBuilder.execute().actionGet();
|
||||
String name = clusterStateResponse.getClusterName().value();
|
||||
int nodeCount = clusterStateResponse.getState().getNodes().getSize();
|
||||
return name + " (" + nodeCount + " nodes connected)";
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
return "TIMEOUT";
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
return "DISCONNECTED";
|
||||
} catch (Exception e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
return "[" + e.getMessage() + "]";
|
||||
}
|
||||
}
|
||||
|
||||
public String healthColor() {
|
||||
if (client() == null) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
ClusterHealthResponse healthResponse =
|
||||
client().execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().timeout(TimeValue.timeValueSeconds(30))).actionGet();
|
||||
ClusterHealthStatus status = healthResponse.getStatus();
|
||||
return status.name();
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
return "TIMEOUT";
|
||||
} catch (NoNodeAvailableException e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
return "DISCONNECTED";
|
||||
} catch (Exception e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
return "[" + e.getMessage() + "]";
|
||||
}
|
||||
}
|
||||
|
||||
public int updateReplicaLevel(String index, int level) throws IOException {
|
||||
waitForCluster("YELLOW","30s");
|
||||
updateIndexSetting(index, "number_of_replicas", level);
|
||||
return waitForRecovery(index);
|
||||
}
|
||||
|
||||
public void flushIndex(String index) {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
if (index != null) {
|
||||
client().execute(FlushAction.INSTANCE, new FlushRequest(index)).actionGet();
|
||||
}
|
||||
}
|
||||
|
||||
public void refreshIndex(String index) {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
if (index != null) {
|
||||
client().execute(RefreshAction.INSTANCE, new RefreshRequest(index)).actionGet();
|
||||
}
|
||||
}
|
||||
|
||||
public void putMapping(String index) {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
if (!mappings.isEmpty()) {
|
||||
for (Map.Entry<String, String> me : mappings.entrySet()) {
|
||||
client().execute(PutMappingAction.INSTANCE,
|
||||
new PutMappingRequest(index).type(me.getKey()).source(me.getValue(), XContentType.JSON)).actionGet();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String resolveAlias(String alias) {
|
||||
if (client() == null) {
|
||||
return alias;
|
||||
}
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client(), GetAliasesAction.INSTANCE);
|
||||
GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.setAliases(alias).execute().actionGet();
|
||||
if (!getAliasesResponse.getAliases().isEmpty()) {
|
||||
return getAliasesResponse.getAliases().keys().iterator().next().value;
|
||||
}
|
||||
return alias;
|
||||
}
|
||||
|
||||
public String resolveMostRecentIndex(String alias) {
|
||||
if (client() == null) {
|
||||
return alias;
|
||||
}
|
||||
if (alias == null) {
|
||||
return null;
|
||||
}
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client(), GetAliasesAction.INSTANCE);
|
||||
GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.setAliases(alias).execute().actionGet();
|
||||
Pattern pattern = Pattern.compile("^(.*?)(\\d+)$");
|
||||
Set<String> indices = new TreeSet<>(Collections.reverseOrder());
|
||||
for (ObjectCursor<String> indexName : getAliasesResponse.getAliases().keys()) {
|
||||
Matcher m = pattern.matcher(indexName.value);
|
||||
if (m.matches() && alias.equals(m.group(1))) {
|
||||
indices.add(indexName.value);
|
||||
}
|
||||
}
|
||||
return indices.isEmpty() ? alias : indices.iterator().next();
|
||||
}
|
||||
|
||||
public Map<String, String> getAliasFilters(String alias) {
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client(), GetAliasesAction.INSTANCE);
|
||||
return getFilters(getAliasesRequestBuilder.setIndices(resolveAlias(alias)).execute().actionGet());
|
||||
}
|
||||
|
||||
public Map<String, String> getIndexFilters(String index) {
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client(), GetAliasesAction.INSTANCE);
|
||||
return getFilters(getAliasesRequestBuilder.setIndices(index).execute().actionGet());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void switchAliases(String index, String concreteIndex, List<String> extraAliases) {
|
||||
switchAliases(index, concreteIndex, extraAliases, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void switchAliases(String index, String concreteIndex,
|
||||
List<String> extraAliases, IndexAliasAdder adder) {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
if (index.equals(concreteIndex)) {
|
||||
return;
|
||||
}
|
||||
// two situations: 1. there is a new alias 2. there is already an old index with the alias
|
||||
String oldIndex = resolveAlias(index);
|
||||
final Map<String, String> oldFilterMap = oldIndex.equals(index) ? null : getIndexFilters(oldIndex);
|
||||
final List<String> newAliases = new LinkedList<>();
|
||||
final List<String> switchAliases = new LinkedList<>();
|
||||
IndicesAliasesRequestBuilder requestBuilder = new IndicesAliasesRequestBuilder(client(), IndicesAliasesAction.INSTANCE);
|
||||
if (oldFilterMap == null || !oldFilterMap.containsKey(index)) {
|
||||
// never apply a filter for trunk index name
|
||||
requestBuilder.addAlias(concreteIndex, index);
|
||||
newAliases.add(index);
|
||||
}
|
||||
// switch existing aliases
|
||||
if (oldFilterMap != null) {
|
||||
for (Map.Entry<String, String> entry : oldFilterMap.entrySet()) {
|
||||
String alias = entry.getKey();
|
||||
String filter = entry.getValue();
|
||||
requestBuilder.removeAlias(oldIndex, alias);
|
||||
if (filter != null) {
|
||||
requestBuilder.addAlias(concreteIndex, alias, filter);
|
||||
} else {
|
||||
requestBuilder.addAlias(concreteIndex, alias);
|
||||
}
|
||||
switchAliases.add(alias);
|
||||
}
|
||||
}
|
||||
// a list of aliases that should be added, check if new or old
|
||||
if (extraAliases != null) {
|
||||
for (String extraAlias : extraAliases) {
|
||||
if (oldFilterMap == null || !oldFilterMap.containsKey(extraAlias)) {
|
||||
// index alias adder only active on extra aliases, and if alias is new
|
||||
if (adder != null) {
|
||||
adder.addIndexAlias(requestBuilder, concreteIndex, extraAlias);
|
||||
} else {
|
||||
requestBuilder.addAlias(concreteIndex, extraAlias);
|
||||
}
|
||||
newAliases.add(extraAlias);
|
||||
} else {
|
||||
String filter = oldFilterMap.get(extraAlias);
|
||||
requestBuilder.removeAlias(oldIndex, extraAlias);
|
||||
if (filter != null) {
|
||||
requestBuilder.addAlias(concreteIndex, extraAlias, filter);
|
||||
} else {
|
||||
requestBuilder.addAlias(concreteIndex, extraAlias);
|
||||
}
|
||||
switchAliases.add(extraAlias);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!newAliases.isEmpty() || !switchAliases.isEmpty()) {
|
||||
logger.info("new aliases = {}, switch aliases = {}", newAliases, switchAliases);
|
||||
requestBuilder.execute().actionGet();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void performRetentionPolicy(String index, String concreteIndex, int timestampdiff, int mintokeep) {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
if (index.equals(concreteIndex)) {
|
||||
return;
|
||||
}
|
||||
GetIndexRequestBuilder getIndexRequestBuilder = new GetIndexRequestBuilder(client(), GetIndexAction.INSTANCE);
|
||||
GetIndexResponse getIndexResponse = getIndexRequestBuilder.execute().actionGet();
|
||||
Pattern pattern = Pattern.compile("^(.*?)(\\d+)$");
|
||||
Set<String> indices = new TreeSet<>();
|
||||
logger.info("{} indices", getIndexResponse.getIndices().length);
|
||||
for (String s : getIndexResponse.getIndices()) {
|
||||
Matcher m = pattern.matcher(s);
|
||||
if (m.matches() && index.equals(m.group(1)) && !s.equals(concreteIndex)) {
|
||||
indices.add(s);
|
||||
}
|
||||
}
|
||||
if (indices.isEmpty()) {
|
||||
logger.info("no indices found, retention policy skipped");
|
||||
return;
|
||||
}
|
||||
if (mintokeep > 0 && indices.size() <= mintokeep) {
|
||||
logger.info("{} indices found, not enough for retention policy ({}), skipped",
|
||||
indices.size(), mintokeep);
|
||||
return;
|
||||
} else {
|
||||
logger.info("candidates for deletion = {}", indices);
|
||||
}
|
||||
List<String> indicesToDelete = new ArrayList<>();
|
||||
// our index
|
||||
Matcher m1 = pattern.matcher(concreteIndex);
|
||||
if (m1.matches()) {
|
||||
Integer i1 = Integer.parseInt(m1.group(2));
|
||||
for (String s : indices) {
|
||||
Matcher m2 = pattern.matcher(s);
|
||||
if (m2.matches()) {
|
||||
Integer i2 = Integer.parseInt(m2.group(2));
|
||||
int kept = indices.size() - indicesToDelete.size();
|
||||
if ((timestampdiff == 0 || (timestampdiff > 0 && i1 - i2 > timestampdiff)) && mintokeep <= kept) {
|
||||
indicesToDelete.add(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info("indices to delete = {}", indicesToDelete);
|
||||
if (indicesToDelete.isEmpty()) {
|
||||
logger.info("not enough indices found to delete, retention policy complete");
|
||||
return;
|
||||
}
|
||||
String[] s = indicesToDelete.toArray(new String[indicesToDelete.size()]);
|
||||
DeleteIndexRequestBuilder requestBuilder = new DeleteIndexRequestBuilder(client(), DeleteIndexAction.INSTANCE, s);
|
||||
DeleteIndexResponse response = requestBuilder.execute().actionGet();
|
||||
if (!response.isAcknowledged()) {
|
||||
logger.warn("retention delete index operation was not acknowledged");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long mostRecentDocument(String index, String timestampfieldname) {
|
||||
if (client() == null) {
|
||||
return null;
|
||||
}
|
||||
SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client(), SearchAction.INSTANCE);
|
||||
SortBuilder<?> sort = SortBuilders.fieldSort(timestampfieldname).order(SortOrder.DESC);
|
||||
SearchResponse searchResponse = searchRequestBuilder.setIndices(index)
|
||||
.addStoredField(timestampfieldname)
|
||||
.setSize(1)
|
||||
.addSort(sort)
|
||||
.execute().actionGet();
|
||||
if (searchResponse.getHits().getHits().length == 1) {
|
||||
SearchHit hit = searchResponse.getHits().getHits()[0];
|
||||
if (hit.getFields().get(timestampfieldname) != null) {
|
||||
return hit.getFields().get(timestampfieldname).getValue();
|
||||
} else {
|
||||
return 0L;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasThrowable() {
|
||||
return throwable != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Throwable getThrowable() {
|
||||
return throwable;
|
||||
}
|
||||
|
||||
protected static void throwClose() {
|
||||
throw new ElasticsearchException("client is closed");
|
||||
}
|
||||
|
||||
|
||||
protected void updateIndexSetting(String index, String key, Object value) throws IOException {
|
||||
if (client() == null) {
|
||||
return;
|
||||
}
|
||||
if (index == null) {
|
||||
throw new IOException("no index name given");
|
||||
}
|
||||
if (key == null) {
|
||||
throw new IOException("no key given");
|
||||
}
|
||||
if (value == null) {
|
||||
throw new IOException("no value given");
|
||||
}
|
||||
Settings.Builder updateSettingsBuilder = Settings.builder();
|
||||
updateSettingsBuilder.put(key, value.toString());
|
||||
UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(index)
|
||||
.settings(updateSettingsBuilder);
|
||||
client().execute(UpdateSettingsAction.INSTANCE, updateSettingsRequest).actionGet();
|
||||
}
|
||||
|
||||
private Map<String, String> getFilters(GetAliasesResponse getAliasesResponse) {
|
||||
Map<String, String> result = new HashMap<>();
|
||||
for (ObjectObjectCursor<String, List<AliasMetaData>> object : getAliasesResponse.getAliases()) {
|
||||
List<AliasMetaData> aliasMetaDataList = object.value;
|
||||
for (AliasMetaData aliasMetaData : aliasMetaDataList) {
|
||||
if (aliasMetaData.filteringRequired()) {
|
||||
String metaData = new String(aliasMetaData.getFilter().uncompressed(), StandardCharsets.UTF_8);
|
||||
result.put(aliasMetaData.alias(), metaData);
|
||||
} else {
|
||||
result.put(aliasMetaData.alias(), null);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private Settings findSettings() {
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put("host", "localhost");
|
||||
try {
|
||||
String hostname = NetworkUtils.getLocalAddress().getHostName();
|
||||
logger.debug("the hostname is {}", hostname);
|
||||
settingsBuilder.put("host", hostname)
|
||||
.put("port", 9300);
|
||||
} catch (Exception e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
}
|
||||
return settingsBuilder.build();
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public interface BulkControl {
|
||||
|
||||
void startBulk(String indexName, long startRefreshInterval, long stopRefreshInterval);
|
||||
|
||||
boolean isBulk(String indexName);
|
||||
|
||||
void finishBulk(String indexName);
|
||||
|
||||
Set<String> indices();
|
||||
|
||||
Map<String, Long> getStartBulkRefreshIntervals();
|
||||
|
||||
Map<String, Long> getStopBulkRefreshIntervals();
|
||||
}
|
|
@ -1,100 +0,0 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
|
||||
public final class ClientBuilder implements Parameters {
|
||||
|
||||
private final Settings.Builder settingsBuilder;
|
||||
|
||||
private Map<Class<? extends ClientMethods>, ClientMethods> clientMethodsMap;
|
||||
|
||||
private BulkMetric metric;
|
||||
|
||||
private BulkControl control;
|
||||
|
||||
public ClientBuilder() {
|
||||
this(Thread.currentThread().getContextClassLoader());
|
||||
}
|
||||
|
||||
public ClientBuilder(ClassLoader classLoader) {
|
||||
this.settingsBuilder = Settings.builder();
|
||||
//settingsBuilder.put("node.name", "clientnode");
|
||||
this.clientMethodsMap = new HashMap<>();
|
||||
ServiceLoader<ClientMethods> serviceLoader = ServiceLoader.load(ClientMethods.class,
|
||||
classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader());
|
||||
for (ClientMethods clientMethods : serviceLoader) {
|
||||
clientMethodsMap.put(clientMethods.getClass(), clientMethods);
|
||||
}
|
||||
}
|
||||
|
||||
public static ClientBuilder builder() {
|
||||
return new ClientBuilder();
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, String value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, Integer value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, Long value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, Double value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, ByteSizeValue value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, TimeValue value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(Settings settings) {
|
||||
settingsBuilder.put(settings);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder setMetric(BulkMetric metric) {
|
||||
this.metric = metric;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder setControl(BulkControl control) {
|
||||
this.control = control;
|
||||
return this;
|
||||
}
|
||||
|
||||
public <C extends ClientMethods> C getClient(Class<C> clientClass) {
|
||||
return getClient(null, clientClass);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <C extends ClientMethods> C getClient(Client client, Class<C> clientClass) {
|
||||
Settings settings = settingsBuilder.build();
|
||||
return (C) clientMethodsMap.get(clientClass)
|
||||
.maxActionsPerRequest(settings.getAsInt(MAX_ACTIONS_PER_REQUEST, DEFAULT_MAX_ACTIONS_PER_REQUEST))
|
||||
.maxConcurrentRequests(settings.getAsInt(MAX_CONCURRENT_REQUESTS, DEFAULT_MAX_CONCURRENT_REQUESTS))
|
||||
.maxVolumePerRequest(settings.get(MAX_VOLUME_PER_REQUEST, DEFAULT_MAX_VOLUME_PER_REQUEST))
|
||||
.flushIngestInterval(settings.get(FLUSH_INTERVAL, DEFAULT_FLUSH_INTERVAL))
|
||||
.init(client, settings, metric, control);
|
||||
}
|
||||
}
|
|
@ -1,402 +0,0 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
/**
|
||||
* Interface for providing convenient administrative methods for ingesting data into Elasticsearch.
|
||||
*/
|
||||
public interface ClientMethods extends Parameters {
|
||||
|
||||
ClientMethods init(ElasticsearchClient client, Settings settings, BulkMetric metric, BulkControl control);
|
||||
|
||||
/**
|
||||
* Return Elasticsearch client.
|
||||
*
|
||||
* @return Elasticsearch client
|
||||
*/
|
||||
ElasticsearchClient client();
|
||||
|
||||
/**
|
||||
* Bulked index request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
*
|
||||
* @param index the index
|
||||
* @param type the type
|
||||
* @param id the id
|
||||
* @param create true if document must be created
|
||||
* @param source the source
|
||||
* @return this
|
||||
*/
|
||||
ClientMethods index(String index, String type, String id, boolean create, BytesReference source);
|
||||
|
||||
/**
|
||||
* Bulked index request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
*
|
||||
* @param index the index
|
||||
* @param type the type
|
||||
* @param id the id
|
||||
* @param create true if document must be created
|
||||
* @param source the source
|
||||
* @return this
|
||||
*/
|
||||
ClientMethods index(String index, String type, String id, boolean create, String source);
|
||||
|
||||
/**
|
||||
* Bulked index request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
*
|
||||
* @param indexRequest the index request to add
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods indexRequest(IndexRequest indexRequest);
|
||||
|
||||
/**
|
||||
* Delete document.
|
||||
*
|
||||
* @param index the index
|
||||
* @param type the type
|
||||
* @param id the id
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods delete(String index, String type, String id);
|
||||
|
||||
/**
|
||||
* Bulked delete request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
*
|
||||
* @param deleteRequest the delete request to add
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods deleteRequest(DeleteRequest deleteRequest);
|
||||
|
||||
/**
|
||||
* Bulked update request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
* Note that updates only work correctly when all operations between nodes are synchronized.
|
||||
*
|
||||
* @param index the index
|
||||
* @param type the type
|
||||
* @param id the id
|
||||
* @param source the source
|
||||
* @return this
|
||||
*/
|
||||
ClientMethods update(String index, String type, String id, BytesReference source);
|
||||
|
||||
/**
|
||||
* Bulked update request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
* Note that updates only work correctly when all operations between nodes are synchronized.
|
||||
*
|
||||
* @param index the index
|
||||
* @param type the type
|
||||
* @param id the id
|
||||
* @param source the source
|
||||
* @return this
|
||||
*/
|
||||
ClientMethods update(String index, String type, String id, String source);
|
||||
|
||||
/**
|
||||
* Bulked update request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
* Note that updates only work correctly when all operations between nodes are synchronized.
|
||||
*
|
||||
* @param updateRequest the update request to add
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods updateRequest(UpdateRequest updateRequest);
|
||||
|
||||
/**
|
||||
* Set the maximum number of actions per request.
|
||||
*
|
||||
* @param maxActionsPerRequest maximum number of actions per request
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods maxActionsPerRequest(int maxActionsPerRequest);
|
||||
|
||||
/**
|
||||
* Set the maximum concurent requests.
|
||||
*
|
||||
* @param maxConcurentRequests maximum number of concurrent ingest requests
|
||||
* @return this Ingest
|
||||
*/
|
||||
ClientMethods maxConcurrentRequests(int maxConcurentRequests);
|
||||
|
||||
/**
|
||||
* Set the maximum volume for request before flush.
|
||||
*
|
||||
* @param maxVolume maximum volume
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods maxVolumePerRequest(String maxVolume);
|
||||
|
||||
/**
|
||||
* Set the flush interval for automatic flushing outstanding ingest requests.
|
||||
*
|
||||
* @param flushInterval the flush interval, default is 30 seconds
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods flushIngestInterval(String flushInterval);
|
||||
|
||||
/**
|
||||
* Set mapping.
|
||||
*
|
||||
* @param type mapping type
|
||||
* @param in mapping definition as input stream
|
||||
* @throws IOException if mapping could not be added
|
||||
*/
|
||||
void mapping(String type, InputStream in) throws IOException;
|
||||
|
||||
/**
|
||||
* Set mapping.
|
||||
*
|
||||
* @param type mapping type
|
||||
* @param mapping mapping definition as input stream
|
||||
* @throws IOException if mapping could not be added
|
||||
*/
|
||||
void mapping(String type, String mapping) throws IOException;
|
||||
|
||||
/**
|
||||
* Put mapping.
|
||||
*
|
||||
* @param index index
|
||||
*/
|
||||
void putMapping(String index);
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods newIndex(String index);
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param type type
|
||||
* @param settings settings
|
||||
* @param mappings mappings
|
||||
* @return this ingest
|
||||
* @throws IOException if new index creation fails
|
||||
*/
|
||||
ClientMethods newIndex(String index, String type, InputStream settings, InputStream mappings) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param settings settings
|
||||
* @param mappings mappings
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods newIndex(String index, Settings settings, Map<String, String> mappings);
|
||||
|
||||
/**
|
||||
* Create new mapping.
|
||||
*
|
||||
* @param index index
|
||||
* @param type index type
|
||||
* @param mapping mapping
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods newMapping(String index, String type, Map<String, Object> mapping);
|
||||
|
||||
/**
|
||||
* Delete index.
|
||||
*
|
||||
* @param index index
|
||||
* @return this ingest
|
||||
*/
|
||||
ClientMethods deleteIndex(String index);
|
||||
|
||||
/**
|
||||
* Start bulk mode.
|
||||
*
|
||||
* @param index index
|
||||
* @param startRefreshIntervalSeconds refresh interval before bulk
|
||||
* @param stopRefreshIntervalSeconds refresh interval after bulk
|
||||
* @return this ingest
|
||||
* @throws IOException if bulk could not be started
|
||||
*/
|
||||
ClientMethods startBulk(String index, long startRefreshIntervalSeconds, long stopRefreshIntervalSeconds) throws IOException;
|
||||
|
||||
/**
|
||||
* Stops bulk mode.
|
||||
*
|
||||
* @param index index
|
||||
* @return this Ingest
|
||||
* @throws IOException if bulk could not be stopped
|
||||
*/
|
||||
ClientMethods stopBulk(String index) throws IOException;
|
||||
|
||||
/**
|
||||
* Flush ingest, move all pending documents to the cluster.
|
||||
*
|
||||
* @return this
|
||||
*/
|
||||
ClientMethods flushIngest();
|
||||
|
||||
/**
|
||||
* Wait for all outstanding responses.
|
||||
*
|
||||
* @param maxWaitTime maximum wait time
|
||||
* @return this ingest
|
||||
* @throws InterruptedException if wait is interrupted
|
||||
* @throws ExecutionException if execution failed
|
||||
*/
|
||||
ClientMethods waitForResponses(String maxWaitTime) throws InterruptedException, ExecutionException;
|
||||
|
||||
/**
|
||||
* Refresh the index.
|
||||
*
|
||||
* @param index index
|
||||
*/
|
||||
void refreshIndex(String index);
|
||||
|
||||
/**
|
||||
* Flush the index.
|
||||
*
|
||||
* @param index index
|
||||
*/
|
||||
void flushIndex(String index);
|
||||
|
||||
/**
|
||||
* Update replica level.
|
||||
*
|
||||
* @param index index
|
||||
* @param level the replica level
|
||||
* @return number of shards after updating replica level
|
||||
* @throws IOException if replica could not be updated
|
||||
*/
|
||||
int updateReplicaLevel(String index, int level) throws IOException;
|
||||
|
||||
/**
|
||||
* Wait for cluster being healthy.
|
||||
*
|
||||
* @param healthColor cluster health color to wait for
|
||||
* @param timeValue time value
|
||||
* @throws IOException if wait failed
|
||||
*/
|
||||
void waitForCluster(String healthColor, String timeValue) throws IOException;
|
||||
|
||||
/**
|
||||
* Get current health color.
|
||||
*
|
||||
* @return the cluster health color
|
||||
*/
|
||||
String healthColor();
|
||||
|
||||
/**
|
||||
* Wait for index recovery (after replica change).
|
||||
*
|
||||
* @param index index
|
||||
* @return number of shards found
|
||||
* @throws IOException if wait failed
|
||||
*/
|
||||
int waitForRecovery(String index) throws IOException;
|
||||
|
||||
/**
|
||||
* Resolve alias.
|
||||
*
|
||||
* @param alias the alias
|
||||
* @return one index name behind the alias or the alias if there is no index
|
||||
*/
|
||||
String resolveAlias(String alias);
|
||||
|
||||
/**
|
||||
* Resolve alias to all connected indices, sort index names with most recent timestamp on top, return this index
|
||||
* name.
|
||||
*
|
||||
* @param alias the alias
|
||||
* @return the most recent index name pointing to the alias
|
||||
*/
|
||||
String resolveMostRecentIndex(String alias);
|
||||
|
||||
/**
|
||||
* Get all alias filters.
|
||||
*
|
||||
* @param index index
|
||||
* @return map of alias filters
|
||||
*/
|
||||
Map<String, String> getAliasFilters(String index);
|
||||
|
||||
/**
|
||||
* Switch aliases from one index to another.
|
||||
*
|
||||
* @param index the index name
|
||||
* @param concreteIndex the index name with timestamp
|
||||
* @param extraAliases a list of names that should be set as index aliases
|
||||
*/
|
||||
void switchAliases(String index, String concreteIndex, List<String> extraAliases);
|
||||
|
||||
/**
|
||||
* Switch aliases from one index to another.
|
||||
*
|
||||
* @param index the index name
|
||||
* @param concreteIndex the index name with timestamp
|
||||
* @param extraAliases a list of names that should be set as index aliases
|
||||
* @param adder an adder method to create alias term queries
|
||||
*/
|
||||
void switchAliases(String index, String concreteIndex, List<String> extraAliases, IndexAliasAdder adder);
|
||||
|
||||
/**
|
||||
* Retention policy for an index. All indices before timestampdiff should be deleted,
|
||||
* but mintokeep indices must be kept.
|
||||
*
|
||||
* @param index index name
|
||||
* @param concreteIndex index name with timestamp
|
||||
* @param timestampdiff timestamp delta (for index timestamps)
|
||||
* @param mintokeep minimum number of indices to keep
|
||||
*/
|
||||
void performRetentionPolicy(String index, String concreteIndex, int timestampdiff, int mintokeep);
|
||||
|
||||
/**
|
||||
* Find the timestamp of the most recently indexed document in the index.
|
||||
*
|
||||
* @param index the index name
|
||||
* @param timestampfieldname the timestamp field name
|
||||
* @return millis UTC millis of the most recent document
|
||||
* @throws IOException if most rcent document can not be found
|
||||
*/
|
||||
Long mostRecentDocument(String index, String timestampfieldname) throws IOException;
|
||||
|
||||
/**
|
||||
* Get metric.
|
||||
*
|
||||
* @return metric
|
||||
*/
|
||||
BulkMetric getMetric();
|
||||
|
||||
/**
|
||||
* Returns true is a throwable exists.
|
||||
*
|
||||
* @return true if a Throwable exists
|
||||
*/
|
||||
boolean hasThrowable();
|
||||
|
||||
/**
|
||||
* Return last throwable if exists.
|
||||
*
|
||||
* @return last throwable
|
||||
*/
|
||||
Throwable getThrowable();
|
||||
|
||||
/**
|
||||
* Shutdown the ingesting.
|
||||
* @throws IOException is shutdown fails
|
||||
*/
|
||||
void shutdown() throws IOException;
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface IndexAliasAdder {
|
||||
|
||||
void addIndexAlias(IndicesAliasesRequestBuilder builder, String index, String alias);
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
|
||||
public interface Parameters {
|
||||
|
||||
int DEFAULT_MAX_ACTIONS_PER_REQUEST = 1000;
|
||||
|
||||
int DEFAULT_MAX_CONCURRENT_REQUESTS = Runtime.getRuntime().availableProcessors();
|
||||
|
||||
String DEFAULT_MAX_VOLUME_PER_REQUEST = "10mb";
|
||||
|
||||
String DEFAULT_FLUSH_INTERVAL = "30s";
|
||||
|
||||
String MAX_ACTIONS_PER_REQUEST = "max_actions_per_request";
|
||||
|
||||
String MAX_CONCURRENT_REQUESTS = "max_concurrent_requests";
|
||||
|
||||
String MAX_VOLUME_PER_REQUEST = "max_volume_per_request";
|
||||
|
||||
String FLUSH_INTERVAL = "flush_interval";
|
||||
}
|
|
@ -1,52 +0,0 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class SimpleBulkControl implements BulkControl {
|
||||
|
||||
private final Set<String> indexNames = new HashSet<>();
|
||||
|
||||
private final Map<String, Long> startBulkRefreshIntervals = new HashMap<>();
|
||||
|
||||
private final Map<String, Long> stopBulkRefreshIntervals = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public void startBulk(String indexName, long startRefreshInterval, long stopRefreshInterval) {
|
||||
synchronized (indexNames) {
|
||||
indexNames.add(indexName);
|
||||
startBulkRefreshIntervals.put(indexName, startRefreshInterval);
|
||||
stopBulkRefreshIntervals.put(indexName, stopRefreshInterval);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBulk(String indexName) {
|
||||
return indexNames.contains(indexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void finishBulk(String indexName) {
|
||||
synchronized (indexNames) {
|
||||
indexNames.remove(indexName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> indices() {
|
||||
return indexNames;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Long> getStartBulkRefreshIntervals() {
|
||||
return startBulkRefreshIntervals;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, Long> getStopBulkRefreshIntervals() {
|
||||
return stopBulkRefreshIntervals;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
/**
|
||||
* Classes for Elasticsearch client.
|
||||
*/
|
||||
package org.xbib.elasticsearch.client;
|
|
@ -1,60 +0,0 @@
|
|||
package org.xbib.elasticsearch.client.common;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.bulk.BulkAction;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.testframework.ESSingleNodeTestCase;
|
||||
|
||||
public class SearchTests extends ESSingleNodeTestCase {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(SearchTests.class.getName());
|
||||
|
||||
public void testSearch() throws Exception {
|
||||
long t0 = System.currentTimeMillis();
|
||||
BulkRequestBuilder builder = new BulkRequestBuilder(client(), BulkAction.INSTANCE);
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
builder.add(Requests.indexRequest()
|
||||
.index("pages").type("row")
|
||||
.source(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("user1", "kimchy")
|
||||
.field("user2", "kimchy")
|
||||
.field("user3", "kimchy")
|
||||
.field("user4", "kimchy")
|
||||
.field("user5", "kimchy")
|
||||
.field("user6", "kimchy")
|
||||
.field("user7", "kimchy")
|
||||
.field("user8", "kimchy")
|
||||
.field("user9", "kimchy")
|
||||
.field("rowcount", i)
|
||||
.field("rs", 1234)
|
||||
.endObject()));
|
||||
}
|
||||
client().bulk(builder.request()).actionGet();
|
||||
client().admin().indices().refresh(Requests.refreshRequest()).actionGet();
|
||||
long t1 = System.currentTimeMillis();
|
||||
logger.info("t1-t0 = {}", t1 - t0);
|
||||
for (int i = 0; i < 100; i++) {
|
||||
t1 = System.currentTimeMillis();
|
||||
QueryBuilder queryStringBuilder =
|
||||
QueryBuilders.queryStringQuery("rs:" + 1234);
|
||||
SearchRequestBuilder requestBuilder = client().prepareSearch()
|
||||
.setIndices("pages")
|
||||
.setTypes("row")
|
||||
.setQuery(queryStringBuilder)
|
||||
.addSort("rowcount", SortOrder.DESC)
|
||||
.setFrom(i * 10).setSize(10);
|
||||
SearchResponse response = requestBuilder.execute().actionGet();
|
||||
long t2 = System.currentTimeMillis();
|
||||
logger.info("t2-t1 = {}", t2 - t1);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,51 +0,0 @@
|
|||
package org.xbib.elasticsearch.client.common;
|
||||
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.client.Requests;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.testframework.ESSingleNodeTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class WildcardTests extends ESSingleNodeTestCase {
|
||||
|
||||
public void testWildcard() throws Exception {
|
||||
index("1", "010");
|
||||
index("2", "0*0");
|
||||
// exact
|
||||
validateCount(QueryBuilders.queryStringQuery("010").defaultField("field"), 1);
|
||||
validateCount(QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1);
|
||||
// pattern
|
||||
validateCount(QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2?
|
||||
validateCount(QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2?
|
||||
validateCount(QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2?
|
||||
validateCount(QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0);
|
||||
validateCount(QueryBuilders.queryStringQuery("*10").defaultField("field"), 1);
|
||||
validateCount(QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1);
|
||||
validateCount(QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1?
|
||||
validateCount(QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1?
|
||||
}
|
||||
|
||||
private void index(String id, String fieldValue) throws IOException {
|
||||
client().index(Requests.indexRequest()
|
||||
.index("index").type("type").id(id)
|
||||
.source(XContentFactory.jsonBuilder().startObject().field("field", fieldValue).endObject())
|
||||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE))
|
||||
.actionGet();
|
||||
}
|
||||
|
||||
private void validateCount(QueryBuilder queryBuilder, long expectedHits) {
|
||||
final long actualHits = count(queryBuilder);
|
||||
if (actualHits != expectedHits) {
|
||||
throw new RuntimeException("actualHits=" + actualHits + ", expectedHits=" + expectedHits);
|
||||
}
|
||||
}
|
||||
|
||||
private long count(QueryBuilder queryBuilder) {
|
||||
return client().prepareSearch("index").setTypes("type")
|
||||
.setQuery(queryBuilder)
|
||||
.execute().actionGet().getHits().getTotalHits();
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
/**
|
||||
* Classes to test Elasticsearch clients.
|
||||
*/
|
||||
package org.xbib.elasticsearch.client.common;
|
4
elx-api/build.gradle
Normal file
4
elx-api/build.gradle
Normal file
|
@ -0,0 +1,4 @@
|
|||
dependencies {
|
||||
compile "org.xbib:metrics:${project.property('xbib-metrics.version')}"
|
||||
compile "org.xbib.elasticsearch:elasticsearch:${rootProject.property('elasticsearch-server.version')}"
|
||||
}
|
36
elx-api/src/main/java/org/xbib/elx/api/BulkController.java
Normal file
36
elx-api/src/main/java/org/xbib/elx/api/BulkController.java
Normal file
|
@ -0,0 +1,36 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public interface BulkController extends Closeable, Flushable {
|
||||
|
||||
void init(Settings settings);
|
||||
|
||||
Throwable getLastBulkError();
|
||||
|
||||
void startBulkMode(IndexDefinition indexDefinition) throws IOException;
|
||||
|
||||
void startBulkMode(String indexName, long startRefreshIntervalInSeconds,
|
||||
long stopRefreshIntervalInSeconds) throws IOException;
|
||||
|
||||
void index(IndexRequest indexRequest);
|
||||
|
||||
void delete(DeleteRequest deleteRequest);
|
||||
|
||||
void update(UpdateRequest updateRequest);
|
||||
|
||||
boolean waitForResponses(long timeout, TimeUnit timeUnit);
|
||||
|
||||
void stopBulkMode(IndexDefinition indexDefinition) throws IOException;
|
||||
|
||||
void stopBulkMode(String index, long timeout, TimeUnit timeUnit) throws IOException;
|
||||
|
||||
}
|
|
@ -1,9 +1,14 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.xbib.metrics.Count;
|
||||
import org.xbib.metrics.Metered;
|
||||
|
||||
public interface BulkMetric {
|
||||
import java.io.Closeable;
|
||||
|
||||
public interface BulkMetric extends Closeable {
|
||||
|
||||
void init(Settings settings);
|
||||
|
||||
Metered getTotalIngest();
|
||||
|
||||
|
@ -19,9 +24,9 @@ public interface BulkMetric {
|
|||
|
||||
Count getFailed();
|
||||
|
||||
long elapsed();
|
||||
|
||||
void start();
|
||||
|
||||
void stop();
|
||||
|
||||
long elapsed();
|
||||
}
|
64
elx-api/src/main/java/org/xbib/elx/api/BulkProcessor.java
Normal file
64
elx-api/src/main/java/org/xbib/elx/api/BulkProcessor.java
Normal file
|
@ -0,0 +1,64 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public interface BulkProcessor extends Closeable, Flushable {
|
||||
|
||||
BulkProcessor add(ActionRequest request);
|
||||
|
||||
BulkProcessor add(ActionRequest request, Object payload);
|
||||
|
||||
boolean awaitFlush(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
|
||||
boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
|
||||
interface BulkRequestHandler {
|
||||
|
||||
void execute(BulkRequest bulkRequest, long executionId);
|
||||
|
||||
boolean close(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* A listener for the execution.
|
||||
*/
|
||||
public interface Listener {
|
||||
|
||||
/**
|
||||
* Callback before the bulk is executed.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
*/
|
||||
void beforeBulk(long executionId, BulkRequest request);
|
||||
|
||||
/**
|
||||
* Callback after a successful execution of bulk request.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param response response
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, BulkResponse response);
|
||||
|
||||
/**
|
||||
* Callback after a failed execution of bulk request.
|
||||
*
|
||||
* Note that in case an instance of <code>InterruptedException</code> is passed, which means that request
|
||||
* processing has been
|
||||
* cancelled externally, the thread's interruption status has been restored prior to calling this method.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param failure failure
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, Throwable failure);
|
||||
}
|
||||
}
|
480
elx-api/src/main/java/org/xbib/elx/api/ExtendedClient.java
Normal file
480
elx-api/src/main/java/org/xbib/elx/api/ExtendedClient.java
Normal file
|
@ -0,0 +1,480 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Interface for extended managing and indexing methods of an Elasticsearch client.
|
||||
*/
|
||||
public interface ExtendedClient extends Flushable, Closeable {
|
||||
|
||||
/**
|
||||
* Set an Elasticsearch client to extend from it. May be null for TransportClient.
|
||||
* @param client client
|
||||
* @return this client
|
||||
*/
|
||||
ExtendedClient setClient(ElasticsearchClient client);
|
||||
|
||||
/**
|
||||
* Return Elasticsearch client.
|
||||
*
|
||||
* @return Elasticsearch client
|
||||
*/
|
||||
ElasticsearchClient getClient();
|
||||
|
||||
/**
|
||||
* Get bulk metric.
|
||||
* @return the bulk metric
|
||||
*/
|
||||
BulkMetric getBulkMetric();
|
||||
|
||||
/**
|
||||
* Get buulk control.
|
||||
* @return the bulk control
|
||||
*/
|
||||
BulkController getBulkController();
|
||||
|
||||
/**
|
||||
* Initiative the extended client, the bulk metric and bulk controller,
|
||||
* creates instances and connect to cluster, if required.
|
||||
*
|
||||
* @param settings settings
|
||||
* @return this client
|
||||
* @throws IOException if init fails
|
||||
*/
|
||||
ExtendedClient init(Settings settings) throws IOException;
|
||||
|
||||
/**
|
||||
* Build index definition from settings.
|
||||
*
|
||||
* @param index the index name
|
||||
* @param settings the settings for the index
|
||||
* @return index definition
|
||||
* @throws IOException if settings/mapping URL is invalid/malformed
|
||||
*/
|
||||
IndexDefinition buildIndexDefinitionFromSettings(String index, Settings settings) throws IOException;
|
||||
|
||||
/**
|
||||
* Add index request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when limits are exceeded.
|
||||
*
|
||||
* @param index the index
|
||||
* @param id the id
|
||||
* @param create true if document must be created
|
||||
* @param source the source
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient index(String index, String id, boolean create, BytesReference source);
|
||||
|
||||
/**
|
||||
* Index request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when limits are exceeded.
|
||||
*
|
||||
* @param index the index
|
||||
* @param id the id
|
||||
* @param create true if document is to be created, false otherwise
|
||||
* @param source the source
|
||||
* @return this client methods
|
||||
*/
|
||||
ExtendedClient index(String index, String id, boolean create, String source);
|
||||
|
||||
/**
|
||||
* Index request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
*
|
||||
* @param indexRequest the index request to add
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient index(IndexRequest indexRequest);
|
||||
|
||||
/**
|
||||
* Delete request.
|
||||
*
|
||||
* @param index the index
|
||||
* @param id the id
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient delete(String index, String id);
|
||||
|
||||
/**
|
||||
* Delete request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
*
|
||||
* @param deleteRequest the delete request to add
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient delete(DeleteRequest deleteRequest);
|
||||
|
||||
/**
|
||||
* Bulked update request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
* Note that updates only work correctly when all operations between nodes are synchronized.
|
||||
*
|
||||
* @param index the index
|
||||
* @param id the id
|
||||
* @param source the source
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient update(String index, String id, BytesReference source);
|
||||
|
||||
/**
|
||||
* Update document. Use with precaution! Does not work in all cases.
|
||||
*
|
||||
* @param index the index
|
||||
* @param id the id
|
||||
* @param source the source
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient update(String index, String id, String source);
|
||||
|
||||
/**
|
||||
* Bulked update request. Each request will be added to a queue for bulking requests.
|
||||
* Submitting request will be done when bulk limits are exceeded.
|
||||
* Note that updates only work correctly when all operations between nodes are synchronized.
|
||||
*
|
||||
* @param updateRequest the update request to add
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient update(UpdateRequest updateRequest);
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @return this
|
||||
* @throws IOException if new index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param settings settings
|
||||
* @param mapping mapping
|
||||
* @return this
|
||||
* @throws IOException if settings/mapping is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index, InputStream settings, InputStream mapping) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param settings settings
|
||||
* @return this
|
||||
* @throws IOException if settings is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index, Settings settings) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param settings settings
|
||||
* @param mapping mapping
|
||||
* @return this
|
||||
* @throws IOException if settings/mapping is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index, Settings settings, String mapping) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
*
|
||||
* @param index index
|
||||
* @param settings settings
|
||||
* @param mapping mapping
|
||||
* @return this
|
||||
* @throws IOException if settings/mapping is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(String index, Settings settings, Map<String, Object> mapping) throws IOException;
|
||||
|
||||
/**
|
||||
* Create a new index.
|
||||
* @param indexDefinition the index definition
|
||||
* @return this
|
||||
* @throws IOException if settings/mapping is invalid or index creation fails
|
||||
*/
|
||||
ExtendedClient newIndex(IndexDefinition indexDefinition) throws IOException;
|
||||
|
||||
/**
|
||||
* Delete an index.
|
||||
* @param indexDefinition the index definition
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient deleteIndex(IndexDefinition indexDefinition);
|
||||
|
||||
/**
|
||||
* Delete an index.
|
||||
*
|
||||
* @param index index
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient deleteIndex(String index);
|
||||
|
||||
/**
|
||||
* Start bulk mode for indexes.
|
||||
* @param indexDefinition index definition
|
||||
* @return this
|
||||
* @throws IOException if bulk could not be started
|
||||
*/
|
||||
ExtendedClient startBulk(IndexDefinition indexDefinition) throws IOException;
|
||||
|
||||
/**
|
||||
* Start bulk mode.
|
||||
*
|
||||
* @param index index
|
||||
* @param startRefreshIntervalSeconds refresh interval before bulk
|
||||
* @param stopRefreshIntervalSeconds refresh interval after bulk
|
||||
* @return this
|
||||
* @throws IOException if bulk could not be started
|
||||
*/
|
||||
ExtendedClient startBulk(String index, long startRefreshIntervalSeconds,
|
||||
long stopRefreshIntervalSeconds) throws IOException;
|
||||
|
||||
/**
|
||||
* Stop bulk mode.
|
||||
*
|
||||
* @param indexDefinition index definition
|
||||
* @return this
|
||||
* @throws IOException if bulk could not be startet
|
||||
*/
|
||||
ExtendedClient stopBulk(IndexDefinition indexDefinition) throws IOException;
|
||||
|
||||
/**
|
||||
* Stops bulk mode.
|
||||
*
|
||||
* @param index index
|
||||
* @param timeout maximum wait time
|
||||
* @param timeUnit time unit for timeout
|
||||
* @return this
|
||||
* @throws IOException if bulk could not be stopped
|
||||
*/
|
||||
ExtendedClient stopBulk(String index, long timeout, TimeUnit timeUnit) throws IOException;
|
||||
|
||||
/**
|
||||
* Update replica level.
|
||||
* @param indexDefinition the index definition
|
||||
* @param level the replica level
|
||||
* @return this
|
||||
* @throws IOException if replica setting could not be updated
|
||||
*/
|
||||
ExtendedClient updateReplicaLevel(IndexDefinition indexDefinition, int level) throws IOException;
|
||||
|
||||
/**
|
||||
* Update replica level.
|
||||
*
|
||||
* @param index index
|
||||
* @param level the replica level
|
||||
* @param maxWaitTime maximum wait time
|
||||
* @param timeUnit time unit
|
||||
* @return this
|
||||
* @throws IOException if replica setting could not be updated
|
||||
*/
|
||||
ExtendedClient updateReplicaLevel(String index, int level, long maxWaitTime, TimeUnit timeUnit) throws IOException;
|
||||
|
||||
/**
|
||||
* Get replica level.
|
||||
* @param indexDefinition the index name
|
||||
* @return the replica level of the index
|
||||
*/
|
||||
int getReplicaLevel(IndexDefinition indexDefinition);
|
||||
|
||||
/**
|
||||
* Get replica level.
|
||||
* @param index the index name
|
||||
* @return the replica level of the index
|
||||
*/
|
||||
int getReplicaLevel(String index);
|
||||
|
||||
/**
|
||||
* Refresh the index.
|
||||
*
|
||||
* @param index index
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient refreshIndex(String index);
|
||||
|
||||
/**
|
||||
* Flush the index. The cluster clears cache and completes indexing.
|
||||
*
|
||||
* @param index index
|
||||
* @return this
|
||||
*/
|
||||
ExtendedClient flushIndex(String index);
|
||||
|
||||
/**
|
||||
* Force segment merge of an index.
|
||||
* @param indexDefinition th eindex definition
|
||||
* @return this
|
||||
*/
|
||||
boolean forceMerge(IndexDefinition indexDefinition);
|
||||
|
||||
/**
|
||||
* Force segment merge of an index.
|
||||
* @param index the index
|
||||
* @param maxWaitTime maximum wait time
|
||||
* @param timeUnit time unit
|
||||
* @return this
|
||||
*/
|
||||
boolean forceMerge(String index, long maxWaitTime, TimeUnit timeUnit);
|
||||
|
||||
/**
|
||||
* Wait for all outstanding bulk responses.
|
||||
*
|
||||
* @param timeout maximum wait time
|
||||
* @param timeUnit unit of timeout value
|
||||
* @return true if wait succeeded, false if wait timed out
|
||||
*/
|
||||
boolean waitForResponses(long timeout, TimeUnit timeUnit);
|
||||
|
||||
/**
|
||||
* Wait for cluster being healthy.
|
||||
*
|
||||
* @param healthColor cluster health color to wait for
|
||||
* @param maxWaitTime time value
|
||||
* @param timeUnit time unit
|
||||
* @return true if wait succeeded, false if wait timed out
|
||||
*/
|
||||
boolean waitForCluster(String healthColor, long maxWaitTime, TimeUnit timeUnit);
|
||||
|
||||
/**
|
||||
* Get current health color.
|
||||
*
|
||||
* @param maxWaitTime maximum wait time
|
||||
* @param timeUnit time unit
|
||||
* @return the cluster health color
|
||||
*/
|
||||
String getHealthColor(long maxWaitTime, TimeUnit timeUnit);
|
||||
|
||||
/**
|
||||
* Wait for index recovery (after replica change).
|
||||
*
|
||||
* @param index index
|
||||
* @param maxWaitTime maximum wait time
|
||||
* @param timeUnit time unit
|
||||
* @return true if wait succeeded, false if wait timed out
|
||||
*/
|
||||
boolean waitForRecovery(String index, long maxWaitTime, TimeUnit timeUnit);
|
||||
|
||||
/**
|
||||
* Update index setting.
|
||||
* @param index the index
|
||||
* @param key the key of the value to be updated
|
||||
* @param value the new value
|
||||
* @param timeout timeout
|
||||
* @param timeUnit time unit
|
||||
* @throws IOException if update index setting failed
|
||||
*/
|
||||
void updateIndexSetting(String index, String key, Object value, long timeout, TimeUnit timeUnit) throws IOException;
|
||||
|
||||
/**
|
||||
* Resolve alias.
|
||||
*
|
||||
* @param alias the alias
|
||||
* @return this index name behind the alias or the alias if there is no index
|
||||
*/
|
||||
String resolveAlias(String alias);
|
||||
|
||||
/**
|
||||
* Resolve alias to all connected indices, sort index names with most recent timestamp on top, return this index
|
||||
* name.
|
||||
*
|
||||
* @param alias the alias
|
||||
* @return the most recent index name pointing to the alias
|
||||
*/
|
||||
String resolveMostRecentIndex(String alias);
|
||||
|
||||
/**
|
||||
* Get all index filters.
|
||||
* @param index the index
|
||||
* @return map of index filters
|
||||
*/
|
||||
Map<String, String> getAliases(String index);
|
||||
|
||||
/**
|
||||
* Shift from one index to another.
|
||||
* @param indexDefinition the index definition
|
||||
* @param additionalAliases new aliases
|
||||
* @return this
|
||||
*/
|
||||
IndexShiftResult shiftIndex(IndexDefinition indexDefinition, List<String> additionalAliases);
|
||||
|
||||
/**
|
||||
* Shift from one index to another.
|
||||
* @param indexDefinition the index definition
|
||||
* @param additionalAliases new aliases
|
||||
* @param indexAliasAdder method to add aliases
|
||||
* @return this
|
||||
*/
|
||||
IndexShiftResult shiftIndex(IndexDefinition indexDefinition, List<String> additionalAliases,
|
||||
IndexAliasAdder indexAliasAdder);
|
||||
|
||||
/**
|
||||
* Shift from one index to another.
|
||||
* @param index the index name
|
||||
* @param fullIndexName the index name with timestamp
|
||||
* @param additionalAliases a list of names that should be set as index aliases
|
||||
* @return this
|
||||
*/
|
||||
IndexShiftResult shiftIndex(String index, String fullIndexName, List<String> additionalAliases);
|
||||
|
||||
/**
|
||||
* Shift from one index to another.
|
||||
* @param index the index name
|
||||
* @param fullIndexName the index name with timestamp
|
||||
* @param additionalAliases a list of names that should be set as index aliases
|
||||
* @param adder an adder method to create alias term queries
|
||||
* @return this
|
||||
*/
|
||||
IndexShiftResult shiftIndex(String index, String fullIndexName, List<String> additionalAliases,
|
||||
IndexAliasAdder adder);
|
||||
|
||||
/**
|
||||
* Prune index.
|
||||
* @param indexDefinition the index definition
|
||||
* @return the index prune result
|
||||
*/
|
||||
IndexPruneResult pruneIndex(IndexDefinition indexDefinition);
|
||||
|
||||
/**
|
||||
* Apply retention policy to prune indices. All indices before delta should be deleted,
|
||||
* but the number of mintokeep indices must be kept.
|
||||
*
|
||||
* @param index index name
|
||||
* @param fullIndexName index name with timestamp
|
||||
* @param delta timestamp delta (for index timestamps)
|
||||
* @param mintokeep minimum number of indices to keep
|
||||
* @param perform true if pruning should be executed, false if not
|
||||
* @return the index prune result
|
||||
*/
|
||||
IndexPruneResult pruneIndex(String index, String fullIndexName, int delta, int mintokeep, boolean perform);
|
||||
|
||||
/**
|
||||
* Find the timestamp of the most recently indexed document in the index.
|
||||
*
|
||||
* @param index the index name
|
||||
* @param timestampfieldname the timestamp field name
|
||||
* @return millis UTC millis of the most recent document
|
||||
* @throws IOException if most rcent document can not be found
|
||||
*/
|
||||
Long mostRecentDocument(String index, String timestampfieldname) throws IOException;
|
||||
|
||||
/**
|
||||
* Get cluster name.
|
||||
* @return the cluster name
|
||||
*/
|
||||
String getClusterName();
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface ExtendedClientProvider<C extends ExtendedClient> {
|
||||
|
||||
C getExtendedClient();
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface IndexAliasAdder {
|
||||
|
||||
void addIndexAlias(IndicesAliasesRequest requwst, String index, String alias);
|
||||
}
|
70
elx-api/src/main/java/org/xbib/elx/api/IndexDefinition.java
Normal file
70
elx-api/src/main/java/org/xbib/elx/api/IndexDefinition.java
Normal file
|
@ -0,0 +1,70 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public interface IndexDefinition {
|
||||
|
||||
IndexDefinition setIndex(String index);
|
||||
|
||||
String getIndex();
|
||||
|
||||
IndexDefinition setFullIndexName(String fullIndexName);
|
||||
|
||||
String getFullIndexName();
|
||||
|
||||
IndexDefinition setSettingsUrl(String settingsUrlString) throws MalformedURLException;
|
||||
|
||||
IndexDefinition setSettingsUrl(URL settingsUrl);
|
||||
|
||||
URL getSettingsUrl();
|
||||
|
||||
IndexDefinition setMappingsUrl(String mappingsUrlString) throws MalformedURLException;
|
||||
|
||||
IndexDefinition setMappingsUrl(URL mappingsUrl);
|
||||
|
||||
URL getMappingsUrl();
|
||||
|
||||
IndexDefinition setDateTimePattern(String timeWindow);
|
||||
|
||||
String getDateTimePattern();
|
||||
|
||||
IndexDefinition setEnabled(boolean enabled);
|
||||
|
||||
boolean isEnabled();
|
||||
|
||||
IndexDefinition setIgnoreErrors(boolean ignoreErrors);
|
||||
|
||||
boolean ignoreErrors();
|
||||
|
||||
IndexDefinition setShift(boolean shift);
|
||||
|
||||
boolean isShiftEnabled();
|
||||
|
||||
IndexDefinition setForceMerge(boolean hasForceMerge);
|
||||
|
||||
boolean hasForceMerge();
|
||||
|
||||
IndexDefinition setReplicaLevel(int replicaLevel);
|
||||
|
||||
int getReplicaLevel();
|
||||
|
||||
IndexDefinition setRetention(IndexRetention indexRetention);
|
||||
|
||||
IndexRetention getRetention();
|
||||
|
||||
IndexDefinition setMaxWaitTime(long maxWaitTime, TimeUnit timeUnit);
|
||||
|
||||
long getMaxWaitTime();
|
||||
|
||||
TimeUnit getMaxWaitTimeUnit();
|
||||
|
||||
IndexDefinition setStartRefreshInterval(long seconds);
|
||||
|
||||
long getStartRefreshInterval();
|
||||
|
||||
IndexDefinition setStopRefreshInterval(long seconds);
|
||||
|
||||
long getStopRefreshInterval();
|
||||
}
|
16
elx-api/src/main/java/org/xbib/elx/api/IndexPruneResult.java
Normal file
16
elx-api/src/main/java/org/xbib/elx/api/IndexPruneResult.java
Normal file
|
@ -0,0 +1,16 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IndexPruneResult {
|
||||
|
||||
enum State { NOTHING_TO_DO, SUCCESS, NONE };
|
||||
|
||||
State getState();
|
||||
|
||||
List<String> getCandidateIndices();
|
||||
|
||||
List<String> getDeletedIndices();
|
||||
|
||||
boolean isAcknowledged();
|
||||
}
|
13
elx-api/src/main/java/org/xbib/elx/api/IndexRetention.java
Normal file
13
elx-api/src/main/java/org/xbib/elx/api/IndexRetention.java
Normal file
|
@ -0,0 +1,13 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
public interface IndexRetention {
|
||||
|
||||
IndexRetention setDelta(int delta);
|
||||
|
||||
int getDelta();
|
||||
|
||||
IndexRetention setMinToKeep(int minToKeep);
|
||||
|
||||
int getMinToKeep();
|
||||
|
||||
}
|
10
elx-api/src/main/java/org/xbib/elx/api/IndexShiftResult.java
Normal file
10
elx-api/src/main/java/org/xbib/elx/api/IndexShiftResult.java
Normal file
|
@ -0,0 +1,10 @@
|
|||
package org.xbib.elx.api;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IndexShiftResult {
|
||||
|
||||
List<String> getMovedAliases();
|
||||
|
||||
List<String> getNewAliases();
|
||||
}
|
4
elx-api/src/main/java/org/xbib/elx/api/package-info.java
Normal file
4
elx-api/src/main/java/org/xbib/elx/api/package-info.java
Normal file
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
* The API of the extended Elasticsearch clients.
|
||||
*/
|
||||
package org.xbib.elx.api;
|
5
elx-common/build.gradle
Normal file
5
elx-common/build.gradle
Normal file
|
@ -0,0 +1,5 @@
|
|||
dependencies{
|
||||
compile project(':elx-api')
|
||||
testCompile "org.xbib.elasticsearch:elasticsearch-analysis-common:${rootProject.property('elasticsearch-server.version')}"
|
||||
testCompile "org.xbib.elasticsearch:transport-netty4:${rootProject.property('elasticsearch-server.version')}"
|
||||
}
|
File diff suppressed because it is too large
Load diff
102
elx-common/src/main/java/org/xbib/elx/common/ClientBuilder.java
Normal file
102
elx-common/src/main/java/org/xbib/elx/common/ClientBuilder.java
Normal file
|
@ -0,0 +1,102 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.xbib.elx.api.ExtendedClient;
|
||||
import org.xbib.elx.api.ExtendedClientProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
public class ClientBuilder {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
private final Settings.Builder settingsBuilder;
|
||||
|
||||
private Map<Class<? extends ExtendedClientProvider>, ExtendedClientProvider> providerMap;
|
||||
|
||||
private Class<? extends ExtendedClientProvider> provider;
|
||||
|
||||
public ClientBuilder() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
public ClientBuilder(ElasticsearchClient client) {
|
||||
this(client, Thread.currentThread().getContextClassLoader());
|
||||
}
|
||||
|
||||
public ClientBuilder(ElasticsearchClient client, ClassLoader classLoader) {
|
||||
this.client = client;
|
||||
this.settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put("node.name", "elx-client-" + Version.CURRENT);
|
||||
this.providerMap = new HashMap<>();
|
||||
ServiceLoader<ExtendedClientProvider> serviceLoader = ServiceLoader.load(ExtendedClientProvider.class,
|
||||
classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader());
|
||||
for (ExtendedClientProvider provider : serviceLoader) {
|
||||
providerMap.put(provider.getClass(), provider);
|
||||
}
|
||||
}
|
||||
|
||||
public static ClientBuilder builder() {
|
||||
return new ClientBuilder();
|
||||
}
|
||||
|
||||
public static ClientBuilder builder(ElasticsearchClient client) {
|
||||
return new ClientBuilder(client);
|
||||
}
|
||||
|
||||
public ClientBuilder provider(Class<? extends ExtendedClientProvider> provider) {
|
||||
this.provider = provider;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, String value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, Integer value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, Long value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, Double value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, ByteSizeValue value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(String key, TimeValue value) {
|
||||
settingsBuilder.put(key, value);
|
||||
return this;
|
||||
}
|
||||
|
||||
public ClientBuilder put(Settings settings) {
|
||||
settingsBuilder.put(settings);
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public <C extends ExtendedClient> C build() throws IOException {
|
||||
if (provider == null) {
|
||||
throw new IllegalArgumentException("no provider");
|
||||
}
|
||||
return (C) providerMap.get(provider).getExtendedClient().setClient(client).init(settingsBuilder.build());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,312 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.xbib.elx.api.BulkController;
|
||||
import org.xbib.elx.api.BulkMetric;
|
||||
import org.xbib.elx.api.BulkProcessor;
|
||||
import org.xbib.elx.api.ExtendedClient;
|
||||
import org.xbib.elx.api.IndexDefinition;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
public class DefaultBulkController implements BulkController {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(DefaultBulkController.class);
|
||||
|
||||
private final ExtendedClient client;
|
||||
|
||||
private final BulkMetric bulkMetric;
|
||||
|
||||
private final List<String> indexNames;
|
||||
|
||||
private final Map<String, Long> startBulkRefreshIntervals;
|
||||
|
||||
private final Map<String, Long> stopBulkRefreshIntervals;
|
||||
|
||||
private long maxWaitTime;
|
||||
|
||||
private TimeUnit maxWaitTimeUnit;
|
||||
|
||||
private BulkProcessor bulkProcessor;
|
||||
|
||||
private BulkListener bulkListener;
|
||||
|
||||
private AtomicBoolean active;
|
||||
|
||||
public DefaultBulkController(ExtendedClient client, BulkMetric bulkMetric) {
|
||||
this.client = client;
|
||||
this.bulkMetric = bulkMetric;
|
||||
this.indexNames = new ArrayList<>();
|
||||
this.active = new AtomicBoolean(false);
|
||||
this.startBulkRefreshIntervals = new HashMap<>();
|
||||
this.stopBulkRefreshIntervals = new HashMap<>();
|
||||
this.maxWaitTime = 30L;
|
||||
this.maxWaitTimeUnit = TimeUnit.SECONDS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Throwable getLastBulkError() {
|
||||
return bulkListener.getLastBulkError();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(Settings settings) {
|
||||
int maxActionsPerRequest = settings.getAsInt(Parameters.MAX_ACTIONS_PER_REQUEST.name(),
|
||||
Parameters.DEFAULT_MAX_ACTIONS_PER_REQUEST.getNum());
|
||||
int maxConcurrentRequests = settings.getAsInt(Parameters.MAX_CONCURRENT_REQUESTS.name(),
|
||||
Parameters.DEFAULT_MAX_CONCURRENT_REQUESTS.getNum());
|
||||
TimeValue flushIngestInterval = settings.getAsTime(Parameters.FLUSH_INTERVAL.name(),
|
||||
TimeValue.timeValueSeconds(Parameters.DEFAULT_FLUSH_INTERVAL.getNum()));
|
||||
ByteSizeValue maxVolumePerRequest = settings.getAsBytesSize(Parameters.MAX_VOLUME_PER_REQUEST.name(),
|
||||
ByteSizeValue.parseBytesSizeValue(Parameters.DEFAULT_MAX_VOLUME_PER_REQUEST.getString(),
|
||||
"maxVolumePerRequest"));
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info("bulk processor up with maxActionsPerRequest = {} maxConcurrentRequests = {} " +
|
||||
"flushIngestInterval = {} maxVolumePerRequest = {}",
|
||||
maxActionsPerRequest, maxConcurrentRequests, flushIngestInterval, maxVolumePerRequest);
|
||||
}
|
||||
this.bulkListener = new BulkListener();
|
||||
DefaultBulkProcessor.Builder builder = DefaultBulkProcessor.builder((Client) client.getClient(), bulkListener)
|
||||
.setBulkActions(maxActionsPerRequest)
|
||||
.setConcurrentRequests(maxConcurrentRequests)
|
||||
.setFlushInterval(flushIngestInterval)
|
||||
.setBulkSize(maxVolumePerRequest);
|
||||
this.bulkProcessor = builder.build();
|
||||
this.active.set(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startBulkMode(IndexDefinition indexDefinition) throws IOException {
|
||||
startBulkMode(indexDefinition.getFullIndexName(), indexDefinition.getStartRefreshInterval(),
|
||||
indexDefinition.getStopRefreshInterval());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startBulkMode(String indexName,
|
||||
long startRefreshIntervalInSeconds,
|
||||
long stopRefreshIntervalInSeconds) throws IOException {
|
||||
if (!indexNames.contains(indexName)) {
|
||||
indexNames.add(indexName);
|
||||
startBulkRefreshIntervals.put(indexName, startRefreshIntervalInSeconds);
|
||||
stopBulkRefreshIntervals.put(indexName, stopRefreshIntervalInSeconds);
|
||||
if (startRefreshIntervalInSeconds != 0L) {
|
||||
client.updateIndexSetting(indexName, "refresh_interval", startRefreshIntervalInSeconds + "s",
|
||||
30L, TimeUnit.SECONDS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void index(IndexRequest indexRequest) {
|
||||
if (!active.get()) {
|
||||
throw new IllegalStateException("inactive");
|
||||
}
|
||||
try {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().inc(indexRequest.index(), indexRequest.type(), indexRequest.id());
|
||||
}
|
||||
bulkProcessor.add(indexRequest);
|
||||
} catch (Exception e) {
|
||||
bulkListener.lastBulkError = e;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk add of index failed: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(DeleteRequest deleteRequest) {
|
||||
if (!active.get()) {
|
||||
throw new IllegalStateException("inactive");
|
||||
}
|
||||
try {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().inc(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
|
||||
}
|
||||
bulkProcessor.add(deleteRequest);
|
||||
} catch (Exception e) {
|
||||
bulkListener.lastBulkError = e;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk add of delete failed: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(UpdateRequest updateRequest) {
|
||||
if (!active.get()) {
|
||||
throw new IllegalStateException("inactive");
|
||||
}
|
||||
try {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().inc(updateRequest.index(), updateRequest.type(), updateRequest.id());
|
||||
}
|
||||
bulkProcessor.add(updateRequest);
|
||||
} catch (Exception e) {
|
||||
bulkListener.lastBulkError = e;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk add of update failed: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForResponses(long timeout, TimeUnit timeUnit) {
|
||||
try {
|
||||
return bulkProcessor.awaitFlush(timeout, timeUnit);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
logger.error("interrupted");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopBulkMode(IndexDefinition indexDefinition) throws IOException {
|
||||
stopBulkMode(indexDefinition.getFullIndexName(),
|
||||
indexDefinition.getMaxWaitTime(), indexDefinition.getMaxWaitTimeUnit());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopBulkMode(String index, long timeout, TimeUnit timeUnit) throws IOException {
|
||||
flush();
|
||||
if (waitForResponses(timeout, timeUnit)) {
|
||||
if (indexNames.contains(index)) {
|
||||
Long secs = stopBulkRefreshIntervals.get(index);
|
||||
if (secs != null && secs != 0L) {
|
||||
client.updateIndexSetting(index, "refresh_interval", secs + "s",
|
||||
30L, TimeUnit.SECONDS);
|
||||
}
|
||||
indexNames.remove(index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() throws IOException {
|
||||
if (bulkProcessor != null) {
|
||||
bulkProcessor.flush();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
flush();
|
||||
if (client.waitForResponses(maxWaitTime, maxWaitTimeUnit)) {
|
||||
for (String index : indexNames) {
|
||||
Long secs = stopBulkRefreshIntervals.get(index);
|
||||
if (secs != null && secs != 0L)
|
||||
client.updateIndexSetting(index, "refresh_interval", secs + "s",
|
||||
30L, TimeUnit.SECONDS);
|
||||
}
|
||||
indexNames.clear();
|
||||
}
|
||||
if (bulkProcessor != null) {
|
||||
bulkProcessor.close();
|
||||
}
|
||||
}
|
||||
|
||||
private class BulkListener implements DefaultBulkProcessor.Listener {
|
||||
|
||||
private final Logger logger = LogManager.getLogger("org.xbib.elx.BulkProcessor.Listener");
|
||||
|
||||
private Throwable lastBulkError = null;
|
||||
|
||||
@Override
|
||||
public void beforeBulk(long executionId, BulkRequest request) {
|
||||
long l = 0;
|
||||
if (bulkMetric != null) {
|
||||
l = bulkMetric.getCurrentIngest().getCount();
|
||||
bulkMetric.getCurrentIngest().inc();
|
||||
int n = request.numberOfActions();
|
||||
bulkMetric.getSubmitted().inc(n);
|
||||
bulkMetric.getCurrentIngestNumDocs().inc(n);
|
||||
bulkMetric.getTotalIngestSizeInBytes().inc(request.estimatedSizeInBytes());
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("before bulk [{}] [actions={}] [bytes={}] [concurrent requests={}]",
|
||||
executionId,
|
||||
request.numberOfActions(),
|
||||
request.estimatedSizeInBytes(),
|
||||
l);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
|
||||
long l = 0;
|
||||
if (bulkMetric != null) {
|
||||
l = bulkMetric.getCurrentIngest().getCount();
|
||||
bulkMetric.getCurrentIngest().dec();
|
||||
bulkMetric.getSucceeded().inc(response.getItems().length);
|
||||
}
|
||||
int n = 0;
|
||||
for (BulkItemResponse itemResponse : response.getItems()) {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().dec(itemResponse.getIndex(), itemResponse.getType(), itemResponse.getId());
|
||||
}
|
||||
if (itemResponse.isFailed()) {
|
||||
n++;
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getSucceeded().dec(1);
|
||||
bulkMetric.getFailed().inc(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (bulkMetric != null && logger.isDebugEnabled()) {
|
||||
logger.debug("after bulk [{}] [succeeded={}] [failed={}] [{}ms] {} concurrent requests",
|
||||
executionId,
|
||||
bulkMetric.getSucceeded().getCount(),
|
||||
bulkMetric.getFailed().getCount(),
|
||||
response.getTook().millis(),
|
||||
l);
|
||||
}
|
||||
if (n > 0) {
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("bulk [{}] failed with {} failed items, failure message = {}",
|
||||
executionId, n, response.buildFailureMessage());
|
||||
}
|
||||
} else {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngestNumDocs().dec(response.getItems().length);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
|
||||
if (bulkMetric != null) {
|
||||
bulkMetric.getCurrentIngest().dec();
|
||||
}
|
||||
lastBulkError = failure;
|
||||
active.set(false);
|
||||
if (logger.isErrorEnabled()) {
|
||||
logger.error("after bulk [" + executionId + "] error", failure);
|
||||
}
|
||||
}
|
||||
|
||||
Throwable getLastBulkError() {
|
||||
return lastBulkError;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,16 +1,15 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
package org.xbib.elx.common;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.xbib.elx.api.BulkMetric;
|
||||
import org.xbib.metrics.Count;
|
||||
import org.xbib.metrics.CountMetric;
|
||||
import org.xbib.metrics.Meter;
|
||||
import org.xbib.metrics.Metered;
|
||||
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
|
||||
public class SimpleBulkMetric implements BulkMetric {
|
||||
|
||||
private final ScheduledExecutorService executorService;
|
||||
public class DefaultBulkMetric implements BulkMetric {
|
||||
|
||||
private final Meter totalIngest;
|
||||
|
||||
|
@ -30,13 +29,8 @@ public class SimpleBulkMetric implements BulkMetric {
|
|||
|
||||
private Long stopped;
|
||||
|
||||
public SimpleBulkMetric() {
|
||||
this(Executors.newSingleThreadScheduledExecutor());
|
||||
}
|
||||
|
||||
public SimpleBulkMetric(ScheduledExecutorService executorService) {
|
||||
this.executorService = executorService;
|
||||
totalIngest = new Meter(executorService);
|
||||
public DefaultBulkMetric() {
|
||||
totalIngest = new Meter(Executors.newSingleThreadScheduledExecutor());
|
||||
totalIngestSizeInBytes = new CountMetric();
|
||||
currentIngest = new CountMetric();
|
||||
currentIngestNumDocs = new CountMetric();
|
||||
|
@ -45,6 +39,11 @@ public class SimpleBulkMetric implements BulkMetric {
|
|||
failed = new CountMetric();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void init(Settings settings) {
|
||||
start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Metered getTotalIngest() {
|
||||
return totalIngest;
|
||||
|
@ -80,6 +79,11 @@ public class SimpleBulkMetric implements BulkMetric {
|
|||
return failed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long elapsed() {
|
||||
return started != null ? ((stopped != null ? stopped : System.nanoTime()) - started) : -1L;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
this.started = System.nanoTime();
|
||||
|
@ -90,12 +94,11 @@ public class SimpleBulkMetric implements BulkMetric {
|
|||
public void stop() {
|
||||
this.stopped = System.nanoTime();
|
||||
totalIngest.stop();
|
||||
executorService.shutdownNow();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long elapsed() {
|
||||
return (stopped != null ? stopped : System.nanoTime()) - started;
|
||||
public void close() {
|
||||
stop();
|
||||
totalIngest.shutdown();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,19 +1,21 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
package org.xbib.elx.common;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.bulk.BulkAction;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.EsExecutors;
|
||||
import org.elasticsearch.common.util.concurrent.FutureUtils;
|
||||
import org.xbib.elx.api.BulkProcessor;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.ScheduledThreadPoolExecutor;
|
||||
|
@ -27,36 +29,38 @@ import java.util.concurrent.atomic.AtomicLong;
|
|||
* requests allowed to be executed in parallel.
|
||||
* In order to create a new bulk processor, use the {@link Builder}.
|
||||
*/
|
||||
public class BulkProcessor implements Closeable {
|
||||
public class DefaultBulkProcessor implements BulkProcessor {
|
||||
|
||||
private final int maximumBulkActionsPerRequest;
|
||||
private final int bulkActions;
|
||||
|
||||
private final long maximumBulkRequestByteSize;
|
||||
private final long bulkSize;
|
||||
|
||||
private final ScheduledThreadPoolExecutor scheduler;
|
||||
|
||||
private final ScheduledFuture<?> scheduledFuture;
|
||||
|
||||
private final AtomicLong executionIdGen = new AtomicLong();
|
||||
private final AtomicLong executionIdGen;
|
||||
|
||||
private final BulkExecutor bulkExecutor;
|
||||
private final BulkRequestHandler bulkRequestHandler;
|
||||
|
||||
private BulkRequest bulkRequest;
|
||||
|
||||
private volatile boolean closed = false;
|
||||
private volatile boolean closed;
|
||||
|
||||
private BulkProcessor(ElasticsearchClient client, Listener listener, int maximumConcurrentBulkRequests,
|
||||
int maximumBulkActionsPerRequest, ByteSizeValue maximumBulkRequestByteSize,
|
||||
@Nullable TimeValue flushInterval) {
|
||||
this.maximumBulkActionsPerRequest = maximumBulkActionsPerRequest;
|
||||
this.maximumBulkRequestByteSize = maximumBulkRequestByteSize.getBytes();
|
||||
private DefaultBulkProcessor(Client client, Listener listener, String name, int concurrentRequests,
|
||||
int bulkActions, ByteSizeValue bulkSize, TimeValue flushInterval) {
|
||||
this.executionIdGen = new AtomicLong();
|
||||
this.closed = false;
|
||||
this.bulkActions = bulkActions;
|
||||
this.bulkSize = bulkSize.getBytes();
|
||||
this.bulkRequest = new BulkRequest();
|
||||
this.bulkExecutor = maximumConcurrentBulkRequests == 0 ?
|
||||
new SyncBulkExecutor(client, listener) :
|
||||
new AsyncBulkExecutor(client, listener, maximumConcurrentBulkRequests);
|
||||
|
||||
this.bulkRequestHandler = concurrentRequests == 0 ?
|
||||
new SyncBulkRequestHandler(client, listener) :
|
||||
new AsyncBulkRequestHandler(client, listener, concurrentRequests);
|
||||
if (flushInterval != null) {
|
||||
this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1);
|
||||
this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1,
|
||||
EsExecutors.daemonThreadFactory(client.settings(),
|
||||
name != null ? "[" + name + "]" : "" + "bulk_processor"));
|
||||
this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
|
||||
this.scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
|
||||
this.scheduledFuture = this.scheduler.scheduleWithFixedDelay(new Flush(), flushInterval.millis(),
|
||||
|
@ -67,7 +71,7 @@ public class BulkProcessor implements Closeable {
|
|||
}
|
||||
}
|
||||
|
||||
public static Builder builder(ElasticsearchClient client, Listener listener) {
|
||||
public static Builder builder(Client client, Listener listener) {
|
||||
if (client == null) {
|
||||
throw new NullPointerException("The client you specified while building a BulkProcessor is null");
|
||||
}
|
||||
|
@ -75,20 +79,28 @@ public class BulkProcessor implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Closes the processor. If flushing by time is enabled, then it's shutdown. Any remaining bulk actions are flushed.
|
||||
* Wait for bulk request handler with flush.
|
||||
* @param timeout the timeout value
|
||||
* @param unit the timeout unit
|
||||
* @return true is method was successful, false if timeout
|
||||
* @throws InterruptedException if timeout
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
try {
|
||||
awaitClose(0, TimeUnit.NANOSECONDS);
|
||||
} catch (InterruptedException exc) {
|
||||
Thread.currentThread().interrupt();
|
||||
public synchronized boolean awaitFlush(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
if (closed) {
|
||||
return true;
|
||||
}
|
||||
// flush
|
||||
if (bulkRequest.numberOfActions() > 0) {
|
||||
execute();
|
||||
}
|
||||
// wait for all bulk responses
|
||||
return this.bulkRequestHandler.close(timeout, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the processor. If flushing by time is enabled, then it's shutdown. Any remaining bulk actions are
|
||||
* flushed.
|
||||
* Closes the processor. Any remaining bulk actions are flushed and then closed. This emthod can only be called
|
||||
* once as the last action of a bulk processor.
|
||||
*
|
||||
* If concurrent requests are not enabled, returns {@code true} immediately.
|
||||
* If concurrent requests are enabled, waits for up to the specified timeout for all bulk requests to complete then
|
||||
|
@ -101,98 +113,50 @@ public class BulkProcessor implements Closeable {
|
|||
* bulk requests completed
|
||||
* @throws InterruptedException If the current thread is interrupted
|
||||
*/
|
||||
@Override
|
||||
public synchronized boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
if (closed) {
|
||||
return true;
|
||||
}
|
||||
closed = true;
|
||||
if (this.scheduledFuture != null) {
|
||||
this.scheduledFuture.cancel(false);
|
||||
FutureUtils.cancel(this.scheduledFuture);
|
||||
this.scheduler.shutdown();
|
||||
}
|
||||
if (bulkRequest.numberOfActions() > 0) {
|
||||
execute();
|
||||
}
|
||||
return bulkExecutor.awaitClose(timeout, unit);
|
||||
return this.bulkRequestHandler.close(timeout, unit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an {@link IndexRequest} to the list of actions to execute. Follows the same behavior of {@link IndexRequest}
|
||||
* (for example, if no id is provided, one will be generated, or usage of the create flag).
|
||||
* Adds either a delete or an index request.
|
||||
*
|
||||
* @param request request
|
||||
* @return his bulk processor
|
||||
*/
|
||||
public synchronized BulkProcessor add(IndexRequest request) {
|
||||
if (request == null) {
|
||||
return this;
|
||||
}
|
||||
ensureOpen();
|
||||
bulkRequest.add(request);
|
||||
if (isOverTheLimit()) {
|
||||
execute();
|
||||
}
|
||||
return this;
|
||||
@Override
|
||||
public DefaultBulkProcessor add(ActionRequest request) {
|
||||
return add(request, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an {@link DeleteRequest} to the list of actions to execute.
|
||||
* Adds either a delete or an index request with a payload.
|
||||
*
|
||||
* @param request request
|
||||
* @param payload payload
|
||||
* @return his bulk processor
|
||||
*/
|
||||
public synchronized BulkProcessor add(DeleteRequest request) {
|
||||
if (request == null) {
|
||||
return this;
|
||||
}
|
||||
ensureOpen();
|
||||
bulkRequest.add(request);
|
||||
if (isOverTheLimit()) {
|
||||
execute();
|
||||
}
|
||||
@Override
|
||||
public DefaultBulkProcessor add(ActionRequest request, Object payload) {
|
||||
internalAdd(request, payload);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an {@link UpdateRequest} to the list of actions to execute.
|
||||
*
|
||||
* @param request request
|
||||
* @return his bulk processor
|
||||
*/
|
||||
public synchronized BulkProcessor add(UpdateRequest request) {
|
||||
if (request == null) {
|
||||
return this;
|
||||
}
|
||||
ensureOpen();
|
||||
bulkRequest.add(request);
|
||||
if (isOverTheLimit()) {
|
||||
execute();
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private void ensureOpen() {
|
||||
if (closed) {
|
||||
throw new IllegalStateException("bulk process already closed");
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isOverTheLimit() {
|
||||
final int count = bulkRequest.numberOfActions();
|
||||
return count > 0 &&
|
||||
(maximumBulkActionsPerRequest != -1 && count >= maximumBulkActionsPerRequest) ||
|
||||
(maximumBulkRequestByteSize != -1 && bulkRequest.estimatedSizeInBytes() >= maximumBulkRequestByteSize);
|
||||
}
|
||||
|
||||
private void execute() {
|
||||
final BulkRequest myBulkRequest = this.bulkRequest;
|
||||
bulkExecutor.execute(myBulkRequest, executionIdGen.incrementAndGet());
|
||||
this.bulkRequest = new BulkRequest();
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush pending delete or index requests.
|
||||
*/
|
||||
@Override
|
||||
public synchronized void flush() {
|
||||
ensureOpen();
|
||||
if (bulkRequest.numberOfActions() > 0) {
|
||||
|
@ -201,39 +165,58 @@ public class BulkProcessor implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* A listener for the execution.
|
||||
* Closes the processor. If flushing by time is enabled, then it's shutdown. Any remaining bulk actions are flushed.
|
||||
*/
|
||||
public interface Listener {
|
||||
@Override
|
||||
public void close() {
|
||||
try {
|
||||
// 0 = immediate close
|
||||
awaitClose(0, TimeUnit.NANOSECONDS);
|
||||
} catch (InterruptedException exc) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback before the bulk is executed.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
*/
|
||||
void beforeBulk(long executionId, BulkRequest request);
|
||||
private void ensureOpen() {
|
||||
if (closed) {
|
||||
throw new IllegalStateException("bulk processor already closed");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback after a successful execution of bulk request.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param response response
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, BulkResponse response);
|
||||
private synchronized void internalAdd(ActionRequest request, Object payload) {
|
||||
ensureOpen();
|
||||
if (request instanceof IndexRequest) {
|
||||
bulkRequest.add((IndexRequest) request, payload);
|
||||
} else if (request instanceof DeleteRequest) {
|
||||
bulkRequest.add((DeleteRequest) request, payload);
|
||||
} else if (request instanceof UpdateRequest) {
|
||||
bulkRequest.add((UpdateRequest) request, payload);
|
||||
} else {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
executeIfNeeded();
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback after a failed execution of bulk request.
|
||||
*
|
||||
* Note that in case an instance of <code>InterruptedException</code> is passed, which means that request
|
||||
* processing has been
|
||||
* cancelled externally, the thread's interruption status has been restored prior to calling this method.
|
||||
*
|
||||
* @param executionId execution ID
|
||||
* @param request request
|
||||
* @param failure failure
|
||||
*/
|
||||
void afterBulk(long executionId, BulkRequest request, Throwable failure);
|
||||
private void executeIfNeeded() {
|
||||
ensureOpen();
|
||||
if (!isOverTheLimit()) {
|
||||
return;
|
||||
}
|
||||
execute();
|
||||
}
|
||||
|
||||
private void execute() {
|
||||
final BulkRequest myBulkRequest = this.bulkRequest;
|
||||
final long executionId = executionIdGen.incrementAndGet();
|
||||
this.bulkRequest = new BulkRequest();
|
||||
this.bulkRequestHandler.execute(myBulkRequest, executionId);
|
||||
}
|
||||
|
||||
private boolean isOverTheLimit() {
|
||||
return bulkActions != -1 &&
|
||||
bulkRequest.numberOfActions() >= bulkActions ||
|
||||
bulkSize != -1 &&
|
||||
bulkRequest.estimatedSizeInBytes() >= bulkSize;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -241,11 +224,18 @@ public class BulkProcessor implements Closeable {
|
|||
*/
|
||||
public static class Builder {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final Client client;
|
||||
|
||||
private final Listener listener;
|
||||
|
||||
private String name;
|
||||
|
||||
private int concurrentRequests = 1;
|
||||
|
||||
private int bulkActions = 1000;
|
||||
private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB);
|
||||
|
||||
private ByteSizeValue bulkSize = new ByteSizeValue(10, ByteSizeUnit.MB);
|
||||
|
||||
private TimeValue flushInterval = null;
|
||||
|
||||
/**
|
||||
|
@ -255,11 +245,22 @@ public class BulkProcessor implements Closeable {
|
|||
* @param client the client
|
||||
* @param listener the listener
|
||||
*/
|
||||
Builder(ElasticsearchClient client, Listener listener) {
|
||||
Builder(Client client, Listener listener) {
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets an optional name to identify this bulk processor.
|
||||
*
|
||||
* @param name name
|
||||
* @return this builder
|
||||
*/
|
||||
public Builder setName(String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the number of concurrent requests allowed to be executed. A value of 0 means that only a single
|
||||
* request will be allowed to be executed. A value of 1 means 1 concurrent request is allowed to be executed
|
||||
|
@ -277,7 +278,7 @@ public class BulkProcessor implements Closeable {
|
|||
* Sets when to flush a new bulk request based on the number of actions currently added. Defaults to
|
||||
* {@code 1000}. Can be set to {@code -1} to disable it.
|
||||
*
|
||||
* @param bulkActions mbulk actions
|
||||
* @param bulkActions bulk actions
|
||||
* @return this builder
|
||||
*/
|
||||
public Builder setBulkActions(int bulkActions) {
|
||||
|
@ -299,7 +300,7 @@ public class BulkProcessor implements Closeable {
|
|||
|
||||
/**
|
||||
* Sets a flush interval flushing *any* bulk actions pending if the interval passes. Defaults to not set.
|
||||
* Note, both {@link #setBulkActions(int)} and {@link #setBulkSize(ByteSizeValue)}
|
||||
* Note, both {@link #setBulkActions(int)} and {@link #setBulkSize(org.elasticsearch.common.unit.ByteSizeValue)}
|
||||
* can be set to {@code -1} with the flush interval set allowing for complete async processing of bulk actions.
|
||||
*
|
||||
* @param flushInterval flush interval
|
||||
|
@ -315,8 +316,8 @@ public class BulkProcessor implements Closeable {
|
|||
*
|
||||
* @return a bulk processor
|
||||
*/
|
||||
public BulkProcessor build() {
|
||||
return new BulkProcessor(client, listener, concurrentRequests, bulkActions, bulkSize, flushInterval);
|
||||
public DefaultBulkProcessor build() {
|
||||
return new DefaultBulkProcessor(client, listener, name, concurrentRequests, bulkActions, bulkSize, flushInterval);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -324,32 +325,25 @@ public class BulkProcessor implements Closeable {
|
|||
|
||||
@Override
|
||||
public void run() {
|
||||
synchronized (BulkProcessor.this) {
|
||||
synchronized (DefaultBulkProcessor.this) {
|
||||
if (closed) {
|
||||
return;
|
||||
}
|
||||
if (bulkRequest.numberOfActions() > 0) {
|
||||
execute();
|
||||
if (bulkRequest.numberOfActions() == 0) {
|
||||
return;
|
||||
}
|
||||
execute();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
interface BulkExecutor {
|
||||
private static class SyncBulkRequestHandler implements BulkRequestHandler {
|
||||
|
||||
void execute(BulkRequest bulkRequest, long executionId);
|
||||
private final Client client;
|
||||
|
||||
boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException;
|
||||
private final DefaultBulkProcessor.Listener listener;
|
||||
|
||||
}
|
||||
|
||||
private static class SyncBulkExecutor implements BulkExecutor {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
private final BulkProcessor.Listener listener;
|
||||
|
||||
SyncBulkExecutor(ElasticsearchClient client, BulkProcessor.Listener listener) {
|
||||
SyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener) {
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
@ -370,22 +364,22 @@ public class BulkProcessor implements Closeable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
public boolean close(long timeout, TimeUnit unit) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
private static class AsyncBulkExecutor implements BulkExecutor {
|
||||
private static class AsyncBulkRequestHandler implements BulkRequestHandler {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final Client client;
|
||||
|
||||
private final BulkProcessor.Listener listener;
|
||||
private final DefaultBulkProcessor.Listener listener;
|
||||
|
||||
private final Semaphore semaphore;
|
||||
|
||||
private final int concurrentRequests;
|
||||
|
||||
private AsyncBulkExecutor(ElasticsearchClient client, BulkProcessor.Listener listener, int concurrentRequests) {
|
||||
private AsyncBulkRequestHandler(Client client, DefaultBulkProcessor.Listener listener, int concurrentRequests) {
|
||||
this.client = client;
|
||||
this.listener = listener;
|
||||
this.concurrentRequests = concurrentRequests;
|
||||
|
@ -400,7 +394,7 @@ public class BulkProcessor implements Closeable {
|
|||
listener.beforeBulk(executionId, bulkRequest);
|
||||
semaphore.acquire();
|
||||
acquired = true;
|
||||
client.execute(BulkAction.INSTANCE, bulkRequest, new ActionListener<BulkResponse>() {
|
||||
client.execute(BulkAction.INSTANCE, bulkRequest, new ActionListener<>() {
|
||||
@Override
|
||||
public void onResponse(BulkResponse response) {
|
||||
try {
|
||||
|
@ -433,9 +427,9 @@ public class BulkProcessor implements Closeable {
|
|||
}
|
||||
|
||||
@Override
|
||||
public boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
if (semaphore.tryAcquire(this.concurrentRequests, timeout, unit)) {
|
||||
semaphore.release(this.concurrentRequests);
|
||||
public boolean close(long timeout, TimeUnit unit) throws InterruptedException {
|
||||
if (semaphore.tryAcquire(concurrentRequests, timeout, unit)) {
|
||||
semaphore.release(concurrentRequests);
|
||||
return true;
|
||||
}
|
||||
return false;
|
|
@ -0,0 +1,214 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.xbib.elx.api.IndexDefinition;
|
||||
import org.xbib.elx.api.IndexRetention;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
public class DefaultIndexDefinition implements IndexDefinition {
|
||||
|
||||
private String index;
|
||||
|
||||
private String fullIndexName;
|
||||
|
||||
private String dateTimePattern;
|
||||
|
||||
private URL settingsUrl;
|
||||
|
||||
private URL mappingsUrl;
|
||||
|
||||
private boolean enabled;
|
||||
|
||||
private boolean ignoreErrors;
|
||||
|
||||
private boolean switchAliases;
|
||||
|
||||
private boolean hasForceMerge;
|
||||
|
||||
private int replicaLevel;
|
||||
|
||||
private IndexRetention indexRetention;
|
||||
|
||||
private long maxWaitTime;
|
||||
|
||||
private TimeUnit maxWaitTimeUnit;
|
||||
|
||||
private long startRefreshInterval;
|
||||
|
||||
private long stopRefreshInterval;
|
||||
|
||||
@Override
|
||||
public IndexDefinition setIndex(String index) {
|
||||
this.index = index;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIndex() {
|
||||
return index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setFullIndexName(String fullIndexName) {
|
||||
this.fullIndexName = fullIndexName;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFullIndexName() {
|
||||
return fullIndexName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setSettingsUrl(String settingsUrlString) throws MalformedURLException {
|
||||
this.settingsUrl = settingsUrlString != null ? new URL(settingsUrlString) : null;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setSettingsUrl(URL settingsUrl) {
|
||||
this.settingsUrl = settingsUrl;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL getSettingsUrl() {
|
||||
return settingsUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setMappingsUrl(String mappingsUrlString) throws MalformedURLException {
|
||||
this.mappingsUrl = mappingsUrlString != null ? new URL(mappingsUrlString) : null;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setMappingsUrl(URL mappingsUrl) {
|
||||
this.mappingsUrl = mappingsUrl;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public URL getMappingsUrl() {
|
||||
return mappingsUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setDateTimePattern(String timeWindow) {
|
||||
this.dateTimePattern = timeWindow;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDateTimePattern() {
|
||||
return dateTimePattern;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setEnabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEnabled() {
|
||||
return enabled;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setIgnoreErrors(boolean ignoreErrors) {
|
||||
this.ignoreErrors = ignoreErrors;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean ignoreErrors() {
|
||||
return ignoreErrors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setShift(boolean switchAliases) {
|
||||
this.switchAliases = switchAliases;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isShiftEnabled() {
|
||||
return switchAliases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setForceMerge(boolean hasForceMerge) {
|
||||
this.hasForceMerge = hasForceMerge;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasForceMerge() {
|
||||
return hasForceMerge;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setReplicaLevel(int replicaLevel) {
|
||||
this.replicaLevel = replicaLevel;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getReplicaLevel() {
|
||||
return replicaLevel;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setRetention(IndexRetention indexRetention) {
|
||||
this.indexRetention = indexRetention;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexRetention getRetention() {
|
||||
return indexRetention;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setMaxWaitTime(long maxWaitTime, TimeUnit timeUnit) {
|
||||
this.maxWaitTime = maxWaitTime;
|
||||
this.maxWaitTimeUnit = timeUnit;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMaxWaitTime() {
|
||||
return maxWaitTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TimeUnit getMaxWaitTimeUnit() {
|
||||
return maxWaitTimeUnit;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setStartRefreshInterval(long seconds) {
|
||||
this.startRefreshInterval = seconds;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStartRefreshInterval() {
|
||||
return startRefreshInterval;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexDefinition setStopRefreshInterval(long seconds) {
|
||||
this.stopRefreshInterval = seconds;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStopRefreshInterval() {
|
||||
return stopRefreshInterval;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.xbib.elx.api.IndexRetention;
|
||||
|
||||
public class DefaultIndexRetention implements IndexRetention {
|
||||
|
||||
private int delta;
|
||||
|
||||
private int minToKeep;
|
||||
|
||||
@Override
|
||||
public IndexRetention setDelta(int delta) {
|
||||
this.delta = delta;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getDelta() {
|
||||
return delta;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexRetention setMinToKeep(int minToKeep) {
|
||||
this.minToKeep = minToKeep;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getMinToKeep() {
|
||||
return minToKeep;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* A mocked client, it does not perform any actions on a cluster. Useful for testing.
|
||||
*/
|
||||
public class MockExtendedClient extends AbstractExtendedClient {
|
||||
|
||||
@Override
|
||||
public ElasticsearchClient getClient() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient init(Settings settings) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ElasticsearchClient createClient(Settings settings) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void closeClient() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient index(String index, String id, boolean create, String source) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient delete(String index, String id) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient update(String index, String id, String source) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient index(IndexRequest indexRequest) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient delete(DeleteRequest deleteRequest) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient update(UpdateRequest updateRequest) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient startBulk(String index, long startRefreshInterval, long stopRefreshIterval) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient stopBulk(String index, long maxWaitTime, TimeUnit timeUnit) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient newIndex(String index) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient deleteIndex(String index) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient refreshIndex(String index) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient flushIndex(String index) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean forceMerge(String index, long maxWaitTime, TimeUnit timeUnit) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForCluster(String healthColor, long timeValue, TimeUnit timeUnit) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForResponses(long maxWaitTime, TimeUnit timeUnit) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean waitForRecovery(String index, long maxWaitTime, TimeUnit timeUnit) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MockExtendedClient updateReplicaLevel(String index, int level, long maxWaitTime, TimeUnit timeUnit) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
// nothing to do
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
// nothing to do
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
import org.xbib.elx.api.ExtendedClientProvider;
|
||||
|
||||
public class MockExtendedClientProvider implements ExtendedClientProvider<MockExtendedClient> {
|
||||
@Override
|
||||
public MockExtendedClient getExtendedClient() {
|
||||
return new MockExtendedClient();
|
||||
}
|
||||
}
|
40
elx-common/src/main/java/org/xbib/elx/common/Parameters.java
Normal file
40
elx-common/src/main/java/org/xbib/elx/common/Parameters.java
Normal file
|
@ -0,0 +1,40 @@
|
|||
package org.xbib.elx.common;
|
||||
|
||||
public enum Parameters {
|
||||
|
||||
DEFAULT_MAX_ACTIONS_PER_REQUEST(1000),
|
||||
|
||||
DEFAULT_MAX_CONCURRENT_REQUESTS(Runtime.getRuntime().availableProcessors()),
|
||||
|
||||
DEFAULT_MAX_VOLUME_PER_REQUEST("10mb"),
|
||||
|
||||
DEFAULT_FLUSH_INTERVAL(30),
|
||||
|
||||
MAX_ACTIONS_PER_REQUEST ("max_actions_per_request"),
|
||||
|
||||
MAX_CONCURRENT_REQUESTS("max_concurrent_requests"),
|
||||
|
||||
MAX_VOLUME_PER_REQUEST("max_volume_per_request"),
|
||||
|
||||
FLUSH_INTERVAL("flush_interval");
|
||||
|
||||
int num;
|
||||
|
||||
String string;
|
||||
|
||||
Parameters(int num) {
|
||||
this.num = num;
|
||||
}
|
||||
|
||||
Parameters(String string) {
|
||||
this.string = string;
|
||||
}
|
||||
|
||||
int getNum() {
|
||||
return num;
|
||||
}
|
||||
|
||||
String getString() {
|
||||
return string;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
package org.xbib.elx.common.io;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.net.URLConnection;
|
||||
import java.net.URLStreamHandler;
|
||||
|
||||
public class ClasspathURLStreamHandler extends URLStreamHandler {
|
||||
|
||||
private final ClassLoader classLoader;
|
||||
|
||||
public ClasspathURLStreamHandler() {
|
||||
this.classLoader = getClass().getClassLoader();
|
||||
}
|
||||
|
||||
public ClasspathURLStreamHandler(ClassLoader classLoader) {
|
||||
this.classLoader = classLoader;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected URLConnection openConnection(URL u) throws IOException {
|
||||
final URL resourceUrl = classLoader.getResource(u.getPath());
|
||||
return resourceUrl != null ? resourceUrl.openConnection() : null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.xbib.elx.common.io;
|
||||
|
||||
import java.net.URLStreamHandler;
|
||||
import java.net.URLStreamHandlerFactory;
|
||||
|
||||
public class ClasspathURLStreamHandlerFactory implements URLStreamHandlerFactory {
|
||||
|
||||
@Override
|
||||
public URLStreamHandler createURLStreamHandler(String protocol) {
|
||||
return "classpath".equals(protocol) ? new ClasspathURLStreamHandler() : null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
*
|
||||
*/
|
||||
package org.xbib.elx.common.io;
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
*
|
||||
*/
|
||||
package org.xbib.elx.common;
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elasticsearch.client;
|
||||
package org.xbib.elx.common.util;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -16,6 +16,9 @@ import java.util.Enumeration;
|
|||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class NetworkUtils {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(NetworkUtils.class.getName());
|
||||
|
@ -100,10 +103,8 @@ public class NetworkUtils {
|
|||
NetworkInterface networkInterface = interfaces.nextElement();
|
||||
allInterfaces.add(networkInterface);
|
||||
Enumeration<NetworkInterface> subInterfaces = networkInterface.getSubInterfaces();
|
||||
if (subInterfaces.hasMoreElements()) {
|
||||
while (subInterfaces.hasMoreElements()) {
|
||||
allInterfaces.add(subInterfaces.nextElement());
|
||||
}
|
||||
while (subInterfaces.hasMoreElements()) {
|
||||
allInterfaces.add(subInterfaces.nextElement());
|
||||
}
|
||||
}
|
||||
sortInterfaces(allInterfaces);
|
||||
|
@ -221,10 +222,8 @@ public class NetworkUtils {
|
|||
NetworkInterface networkInterface = interfaces.nextElement();
|
||||
networkInterfaces.add(networkInterface);
|
||||
Enumeration<NetworkInterface> subInterfaces = networkInterface.getSubInterfaces();
|
||||
if (subInterfaces.hasMoreElements()) {
|
||||
while (subInterfaces.hasMoreElements()) {
|
||||
networkInterfaces.add(subInterfaces.nextElement());
|
||||
}
|
||||
while (subInterfaces.hasMoreElements()) {
|
||||
networkInterfaces.add(subInterfaces.nextElement());
|
||||
}
|
||||
}
|
||||
sortInterfaces(networkInterfaces);
|
||||
|
@ -250,6 +249,9 @@ public class NetworkUtils {
|
|||
return left.length - right.length;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public enum ProtocolVersion {
|
||||
IPV4, IPV6, IPV46, NONE
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
*
|
||||
*/
|
||||
package org.xbib.elx.common.util;
|
|
@ -0,0 +1 @@
|
|||
org.xbib.elx.common.io.ClasspathURLStreamHandlerFactory
|
|
@ -0,0 +1 @@
|
|||
org.xbib.elx.common.MockExtendedClientProvider
|
|
@ -1,4 +1,7 @@
|
|||
package org.xbib.elasticsearch.client.common;
|
||||
package org.xbib.elx.common.test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
@ -9,8 +12,9 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
|||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.testframework.ESSingleNodeTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
|
@ -19,58 +23,72 @@ import java.util.TreeSet;
|
|||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class AliasTests extends ESSingleNodeTestCase {
|
||||
public class AliasTest extends TestBase {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(AliasTests.class.getName());
|
||||
private static final Logger logger = LogManager.getLogger(AliasTest.class.getName());
|
||||
|
||||
@Test
|
||||
public void testAlias() {
|
||||
Client client = client("1");
|
||||
CreateIndexRequest indexRequest = new CreateIndexRequest("test");
|
||||
client().admin().indices().create(indexRequest).actionGet();
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
// put alias
|
||||
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
||||
indicesAliasesRequest.addAliasAction(IndicesAliasesRequest.AliasActions.add()
|
||||
.index("test").alias("test_alias")
|
||||
);
|
||||
client().admin().indices().aliases(indicesAliasesRequest).actionGet();
|
||||
String[] indices = new String[]{"test"};
|
||||
String[] aliases = new String[]{"test_alias"};
|
||||
IndicesAliasesRequest.AliasActions aliasAction =
|
||||
new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.ADD)
|
||||
.indices(indices)
|
||||
.aliases(aliases);
|
||||
indicesAliasesRequest.addAliasAction(aliasAction);
|
||||
client.admin().indices().aliases(indicesAliasesRequest).actionGet();
|
||||
// get alias
|
||||
GetAliasesRequest getAliasesRequest = new GetAliasesRequest(Strings.EMPTY_ARRAY);
|
||||
long t0 = System.nanoTime();
|
||||
GetAliasesResponse getAliasesResponse = client().admin().indices().getAliases(getAliasesRequest).actionGet();
|
||||
GetAliasesResponse getAliasesResponse = client.admin().indices().getAliases(getAliasesRequest).actionGet();
|
||||
long t1 = (System.nanoTime() - t0) / 1000000;
|
||||
logger.info("{} time(ms) = {}", getAliasesResponse.getAliases(), t1);
|
||||
assertTrue(t1 >= 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMostRecentIndex() {
|
||||
Client client = client("1");
|
||||
String alias = "test";
|
||||
CreateIndexRequest indexRequest = new CreateIndexRequest("test20160101");
|
||||
client().admin().indices().create(indexRequest).actionGet();
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
indexRequest = new CreateIndexRequest("test20160102");
|
||||
client().admin().indices().create(indexRequest).actionGet();
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
indexRequest = new CreateIndexRequest("test20160103");
|
||||
client().admin().indices().create(indexRequest).actionGet();
|
||||
client.admin().indices().create(indexRequest).actionGet();
|
||||
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
|
||||
indicesAliasesRequest.addAliasAction(IndicesAliasesRequest.AliasActions.add()
|
||||
.indices("test20160101", "test20160102", "test20160103")
|
||||
.alias(alias)
|
||||
);
|
||||
client().admin().indices().aliases(indicesAliasesRequest).actionGet();
|
||||
String[] indices = new String[]{"test20160101", "test20160102", "test20160103"};
|
||||
String[] aliases = new String[]{alias};
|
||||
IndicesAliasesRequest.AliasActions aliasAction =
|
||||
new IndicesAliasesRequest.AliasActions(IndicesAliasesRequest.AliasActions.Type.ADD)
|
||||
.indices(indices)
|
||||
.aliases(aliases);
|
||||
indicesAliasesRequest.addAliasAction(aliasAction);
|
||||
client.admin().indices().aliases(indicesAliasesRequest).actionGet();
|
||||
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client(),
|
||||
GetAliasesRequestBuilder getAliasesRequestBuilder = new GetAliasesRequestBuilder(client,
|
||||
GetAliasesAction.INSTANCE);
|
||||
GetAliasesResponse getAliasesResponse = getAliasesRequestBuilder.setAliases(alias).execute().actionGet();
|
||||
Pattern pattern = Pattern.compile("^(.*?)(\\d+)$");
|
||||
Set<String> result = new TreeSet<>(Collections.reverseOrder());
|
||||
for (ObjectCursor<String> indexName : getAliasesResponse.getAliases().keys()) {
|
||||
Matcher m = pattern.matcher(indexName.value);
|
||||
if (m.matches() && alias.equals(m.group(1))) {
|
||||
result.add(indexName.value);
|
||||
if (m.matches()) {
|
||||
if (alias.equals(m.group(1))) {
|
||||
result.add(indexName.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
Iterator<String> it = result.iterator();
|
||||
assertEquals("test20160103", it.next());
|
||||
assertEquals("test20160102", it.next());
|
||||
assertEquals("test20160101", it.next());
|
||||
logger.info("result={}", result);
|
||||
logger.info("success: result={}", result);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,50 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.block.ClusterBlockException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
@Ignore
|
||||
public class ClusterBlockTest extends TestBase {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
setClusterName("test-cluster");
|
||||
startNode("1");
|
||||
// do not wait for green health state
|
||||
logger.info("ready");
|
||||
} catch (Throwable t) {
|
||||
logger.error("startNodes failed", t);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Settings getNodeSettings() {
|
||||
return Settings.builder()
|
||||
.put(super.getNodeSettings())
|
||||
.put("discovery.zen.minimum_master_nodes", 2) // block until we have two nodes
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test(expected = ClusterBlockException.class)
|
||||
public void testClusterBlock() throws Exception {
|
||||
Client client = client("1");
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder().startObject().field("field1", "value1").endObject();
|
||||
IndexRequestBuilder irb = client.prepareIndex("test", "test", "1").setSource(builder);
|
||||
BulkRequestBuilder brb = client.prepareBulk();
|
||||
brb.add(irb);
|
||||
brb.execute().actionGet();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.xbib.elx.common.ClientBuilder;
|
||||
import org.xbib.elx.common.MockExtendedClient;
|
||||
import org.xbib.elx.common.MockExtendedClientProvider;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
|
||||
public class MockExtendedClientProviderTest {
|
||||
|
||||
@Test
|
||||
public void testMockExtendedProvider() throws IOException {
|
||||
MockExtendedClient client = ClientBuilder.builder().provider(MockExtendedClientProvider.class).build();
|
||||
assertNotNull(client);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.node.InternalSettingsPreparer;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class MockNode extends Node {
|
||||
|
||||
public MockNode(Settings settings, List<Class<? extends Plugin>> classpathPlugins) {
|
||||
super(InternalSettingsPreparer.prepareEnvironment(settings, null), classpathPlugins);
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elasticsearch.client.common;
|
||||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -13,17 +13,12 @@ public class NetworkTest {
|
|||
|
||||
private static final Logger logger = LogManager.getLogger(NetworkTest.class);
|
||||
|
||||
/**
|
||||
* Demonstrates the slowness oj Java network interface lookup on certain environments.
|
||||
* May be a killer for ES node startup - so avoid automatic traversal of NICs at all costs.
|
||||
*
|
||||
* @throws Exception if test fails
|
||||
*/
|
||||
@Test
|
||||
public void testNetwork() throws Exception {
|
||||
// walk over all found interfaces (this is slow - multicast/pings are performed)
|
||||
Enumeration<NetworkInterface> nets = NetworkInterface.getNetworkInterfaces();
|
||||
for (NetworkInterface netint : Collections.list(nets)) {
|
||||
logger.info("checking network interface = " + netint.getName());
|
||||
System.out.println("checking network interface = " + netint.getName());
|
||||
Enumeration<InetAddress> inetAddresses = netint.getInetAddresses();
|
||||
for (InetAddress addr : Collections.list(inetAddresses)) {
|
||||
logger.info("found address = " + addr.getHostAddress()
|
|
@ -0,0 +1,58 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.bulk.BulkAction;
|
||||
import org.elasticsearch.action.bulk.BulkRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class SearchTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testSearch() throws Exception {
|
||||
Client client = client("1");
|
||||
BulkRequestBuilder builder = new BulkRequestBuilder(client, BulkAction.INSTANCE);
|
||||
for (int i = 0; i < 1000; i++) {
|
||||
IndexRequest indexRequest = new IndexRequest("pages", "row")
|
||||
.source(XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("user1", "joerg")
|
||||
.field("user2", "joerg")
|
||||
.field("user3", "joerg")
|
||||
.field("user4", "joerg")
|
||||
.field("user5", "joerg")
|
||||
.field("user6", "joerg")
|
||||
.field("user7", "joerg")
|
||||
.field("user8", "joerg")
|
||||
.field("user9", "joerg")
|
||||
.field("rowcount", i)
|
||||
.field("rs", 1234)
|
||||
.endObject()
|
||||
);
|
||||
builder.add(indexRequest);
|
||||
}
|
||||
client.bulk(builder.request()).actionGet();
|
||||
client.admin().indices().refresh(new RefreshRequest()).actionGet();
|
||||
|
||||
for (int i = 0; i < 100; i++) {
|
||||
QueryBuilder queryStringBuilder = QueryBuilders.queryStringQuery("rs:" + 1234);
|
||||
SearchRequestBuilder requestBuilder = client.prepareSearch()
|
||||
.setIndices("pages")
|
||||
.setTypes("row")
|
||||
.setQuery(queryStringBuilder)
|
||||
.addSort("rowcount", SortOrder.DESC)
|
||||
.setFrom(i * 10).setSize(10);
|
||||
SearchResponse searchResponse = requestBuilder.execute().actionGet();
|
||||
assertTrue(searchResponse.getHits().getTotalHits() > 0);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,53 +1,53 @@
|
|||
package org.xbib.elasticsearch.client.common;
|
||||
package org.xbib.elx.common.test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexAction;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.testframework.ESSingleNodeTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
public class SimpleTests extends ESSingleNodeTestCase {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(SimpleTests.class.getName());
|
||||
public class SimpleTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void test() throws Exception {
|
||||
try {
|
||||
DeleteIndexRequestBuilder deleteIndexRequestBuilder =
|
||||
new DeleteIndexRequestBuilder(client(), DeleteIndexAction.INSTANCE, "test");
|
||||
new DeleteIndexRequestBuilder(client("1"), DeleteIndexAction.INSTANCE, "test");
|
||||
deleteIndexRequestBuilder.execute().actionGet();
|
||||
} catch (Exception e) {
|
||||
logger.warn(e.getMessage(), e);
|
||||
} catch (IndexNotFoundException e) {
|
||||
// ignore if index not found
|
||||
}
|
||||
CreateIndexRequestBuilder createIndexRequestBuilder = new CreateIndexRequestBuilder(client(),
|
||||
CreateIndexAction.INSTANCE)
|
||||
.setIndex("test")
|
||||
.setSettings(Settings.builder()
|
||||
.put("index.analysis.analyzer.default.filter.0", "lowercase")
|
||||
// where is the trim token filter???
|
||||
//.put("index.analysis.analyzer.default.filter.1", "trim")
|
||||
.put("index.analysis.analyzer.default.tokenizer", "keyword")
|
||||
.build());
|
||||
createIndexRequestBuilder.execute().actionGet();
|
||||
Settings indexSettings = Settings.builder()
|
||||
.put("index.analysis.analyzer.default.filter.0", "lowercase")
|
||||
.put("index.analysis.analyzer.default.filter.1", "trim")
|
||||
.put("index.analysis.analyzer.default.tokenizer", "keyword")
|
||||
.build();
|
||||
CreateIndexRequestBuilder createIndexRequestBuilder = new CreateIndexRequestBuilder(client("1"), CreateIndexAction.INSTANCE);
|
||||
createIndexRequestBuilder.setIndex("test")
|
||||
.setSettings(indexSettings).execute().actionGet();
|
||||
|
||||
IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(client(), IndexAction.INSTANCE);
|
||||
IndexRequestBuilder indexRequestBuilder = new IndexRequestBuilder(client("1"), IndexAction.INSTANCE);
|
||||
indexRequestBuilder
|
||||
.setIndex("test")
|
||||
.setType("test")
|
||||
.setId("1")
|
||||
.setSource(XContentFactory.jsonBuilder().startObject().field("field",
|
||||
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8").endObject())
|
||||
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
|
||||
.execute()
|
||||
.actionGet();
|
||||
String doc = client().prepareSearch("test")
|
||||
RefreshRequestBuilder refreshRequestBuilder = new RefreshRequestBuilder(client("1"), RefreshAction.INSTANCE);
|
||||
refreshRequestBuilder.setIndices("test").execute().actionGet();
|
||||
String doc = client("1").prepareSearch("test")
|
||||
.setTypes("test")
|
||||
.setQuery(QueryBuilders.matchQuery("field",
|
||||
"1%2fPJJP3JV2C24iDfEu9XpHBaYxXh%2fdHTbmchB35SDznXO2g8Vz4D7GTIvY54iMiX_149c95f02a8"))
|
206
elx-common/src/test/java/org/xbib/elx/common/test/TestBase.java
Normal file
206
elx-common/src/test/java/org/xbib/elx/common/test/TestBase.java
Normal file
|
@ -0,0 +1,206 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequestBuilder;
|
||||
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
|
||||
import org.elasticsearch.analysis.common.CommonAnalysisPlugin;
|
||||
import org.elasticsearch.client.support.AbstractClient;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeValidationException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.transport.netty4.Netty4Plugin;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.FileVisitResult;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.SimpleFileVisitor;
|
||||
import java.nio.file.attribute.BasicFileAttributes;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
|
||||
public class TestBase {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger("test");
|
||||
|
||||
private static final Random random = new Random();
|
||||
|
||||
private static final char[] numbersAndLetters = ("0123456789abcdefghijklmnopqrstuvwxyz").toCharArray();
|
||||
|
||||
private Map<String, Node> nodes = new HashMap<>();
|
||||
|
||||
private Map<String, AbstractClient> clients = new HashMap<>();
|
||||
|
||||
private String cluster;
|
||||
|
||||
private String host;
|
||||
|
||||
private int port;
|
||||
|
||||
@Before
|
||||
public void startNodes() {
|
||||
try {
|
||||
logger.info("starting");
|
||||
setClusterName("test-cluster");
|
||||
startNode("1");
|
||||
findNodeAddress();
|
||||
try {
|
||||
ClusterHealthResponse healthResponse = client("1").execute(ClusterHealthAction.INSTANCE,
|
||||
new ClusterHealthRequest().waitForStatus(ClusterHealthStatus.GREEN)
|
||||
.timeout(TimeValue.timeValueSeconds(30))).actionGet();
|
||||
if (healthResponse != null && healthResponse.isTimedOut()) {
|
||||
throw new IOException("cluster state is " + healthResponse.getStatus().name()
|
||||
+ ", from here on, everything will fail!");
|
||||
}
|
||||
} catch (ElasticsearchTimeoutException e) {
|
||||
throw new IOException("cluster does not respond to health request, cowardly refusing to continue");
|
||||
}
|
||||
ClusterStateRequestBuilder clusterStateRequestBuilder =
|
||||
new ClusterStateRequestBuilder(client("1"), ClusterStateAction.INSTANCE).all();
|
||||
ClusterStateResponse clusterStateResponse = clusterStateRequestBuilder.execute().actionGet();
|
||||
logger.info("cluster name = {}", clusterStateResponse.getClusterName().value());
|
||||
logger.info("host = {} port = {}", host, port);
|
||||
} catch (Throwable t) {
|
||||
logger.error(t.getMessage(), t);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void stopNodes() {
|
||||
try {
|
||||
closeNodes();
|
||||
} catch (Exception e) {
|
||||
logger.error("can not close nodes", e);
|
||||
} finally {
|
||||
try {
|
||||
deleteFiles();
|
||||
logger.info("data files wiped");
|
||||
Thread.sleep(2000L); // let OS commit changes
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
} catch (InterruptedException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected Settings getTransportSettings() {
|
||||
return Settings.builder()
|
||||
.put("host", host)
|
||||
.put("port", port)
|
||||
.put("cluster.name", cluster)
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
||||
protected Settings getNodeSettings() {
|
||||
return Settings.builder()
|
||||
.put("cluster.name", cluster)
|
||||
.put("transport.type", Netty4Plugin.NETTY_TRANSPORT_NAME)
|
||||
.put("path.home", getHome())
|
||||
.build();
|
||||
}
|
||||
|
||||
protected static String getHome() {
|
||||
return System.getProperty("path.home", System.getProperty("user.dir"));
|
||||
}
|
||||
|
||||
protected void startNode(String id) throws NodeValidationException {
|
||||
buildNode(id).start();
|
||||
}
|
||||
|
||||
protected AbstractClient client(String id) {
|
||||
return clients.get(id);
|
||||
}
|
||||
|
||||
protected void setClusterName(String cluster) {
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
protected String getClusterName() {
|
||||
return cluster;
|
||||
}
|
||||
|
||||
protected String randomString(int len) {
|
||||
final char[] buf = new char[len];
|
||||
final int n = numbersAndLetters.length - 1;
|
||||
for (int i = 0; i < buf.length; i++) {
|
||||
buf[i] = numbersAndLetters[random.nextInt(n)];
|
||||
}
|
||||
return new String(buf);
|
||||
}
|
||||
|
||||
private void closeNodes() throws IOException {
|
||||
logger.info("closing all clients");
|
||||
for (AbstractClient client : clients.values()) {
|
||||
client.close();
|
||||
}
|
||||
clients.clear();
|
||||
logger.info("closing all nodes");
|
||||
for (Node node : nodes.values()) {
|
||||
if (node != null) {
|
||||
node.close();
|
||||
}
|
||||
}
|
||||
nodes.clear();
|
||||
logger.info("all nodes closed");
|
||||
}
|
||||
|
||||
private void findNodeAddress() {
|
||||
NodesInfoRequest nodesInfoRequest = new NodesInfoRequest().transport(true);
|
||||
NodesInfoResponse response = client("1").admin().cluster().nodesInfo(nodesInfoRequest).actionGet();
|
||||
TransportAddress address= response.getNodes().iterator().next().getTransport().getAddress()
|
||||
.publishAddress();
|
||||
host = address.address().getHostName();
|
||||
port = address.address().getPort();
|
||||
}
|
||||
|
||||
private Node buildNode(String id) {
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(getNodeSettings())
|
||||
.put("node.name", id)
|
||||
.build();
|
||||
List<Class<? extends Plugin>> plugins = Arrays.asList(CommonAnalysisPlugin.class, Netty4Plugin.class);
|
||||
Node node = new MockNode(nodeSettings, plugins);
|
||||
AbstractClient client = (AbstractClient) node.client();
|
||||
nodes.put(id, node);
|
||||
clients.put(id, client);
|
||||
logger.info("clients={}", clients);
|
||||
return node;
|
||||
}
|
||||
|
||||
private static void deleteFiles() throws IOException {
|
||||
Path directory = Paths.get(getHome() + "/data");
|
||||
Files.walkFileTree(directory, new SimpleFileVisitor<>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
Files.delete(file);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
|
||||
Files.delete(dir);
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
package org.xbib.elx.common.test;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class WildcardTest extends TestBase {
|
||||
|
||||
@Test
|
||||
public void testWildcard() throws Exception {
|
||||
index(client("1"), "1", "010");
|
||||
index(client("1"), "2", "0*0");
|
||||
// exact
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("010").defaultField("field"), 1);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0\\*0").defaultField("field"), 1);
|
||||
// pattern
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0*0").defaultField("field"), 1); // 2?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0?0").defaultField("field"), 1); // 2?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0**0").defaultField("field"), 1); // 2?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("0??0").defaultField("field"), 0);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*10").defaultField("field"), 1);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*1*").defaultField("field"), 1);
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*\\*0").defaultField("field"), 0); // 1?
|
||||
validateCount(client("1"), QueryBuilders.queryStringQuery("*\\**").defaultField("field"), 0); // 1?
|
||||
}
|
||||
|
||||
private void index(Client client, String id, String fieldValue) throws IOException {
|
||||
client.index(new IndexRequest("index", "type", id)
|
||||
.source(XContentFactory.jsonBuilder().startObject().field("field", fieldValue).endObject()))
|
||||
.actionGet();
|
||||
client.admin().indices().refresh(new RefreshRequest()).actionGet();
|
||||
}
|
||||
|
||||
private long count(Client client, QueryBuilder queryBuilder) {
|
||||
return client.prepareSearch("index").setTypes("type")
|
||||
.setQuery(queryBuilder)
|
||||
.execute().actionGet().getHits().getTotalHits();
|
||||
}
|
||||
|
||||
private void validateCount(Client client, QueryBuilder queryBuilder, long expectedHits) {
|
||||
final long actualHits = count(client, queryBuilder);
|
||||
if (actualHits != expectedHits) {
|
||||
throw new RuntimeException("actualHits=" + actualHits + ", expectedHits=" + expectedHits);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
/**
|
||||
*
|
||||
*/
|
||||
package org.xbib.elx.common.test;
|
|
@ -2,11 +2,11 @@
|
|||
<configuration status="OFF">
|
||||
<appenders>
|
||||
<Console name="Console" target="SYSTEM_OUT">
|
||||
<PatternLayout pattern="[%d{ABSOLUTE}][%-5p][%-25c][%t] %m%n"/>
|
||||
<PatternLayout pattern="[%d{ISO8601}][%-5p][%-25c][%t] %m%n"/>
|
||||
</Console>
|
||||
</appenders>
|
||||
<Loggers>
|
||||
<Root level="debug">
|
||||
<Root level="info">
|
||||
<AppenderRef ref="Console" />
|
||||
</Root>
|
||||
</Loggers>
|
6
elx-http/build.gradle
Normal file
6
elx-http/build.gradle
Normal file
|
@ -0,0 +1,6 @@
|
|||
dependencies{
|
||||
compile project(':elx-common')
|
||||
compile "org.xbib.elasticsearch:transport-netty4:${rootProject.property('elasticsearch-server.version')}"
|
||||
compile "org.xbib:netty-http-client:${project.property('xbib-netty-http.version')}"
|
||||
testCompile "org.xbib.elasticsearch:elasticsearch-analysis-common:${rootProject.property('elasticsearch-server.version')}"
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elasticsearch.client.http;
|
||||
package org.xbib.elx.http;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -13,21 +13,16 @@ import org.elasticsearch.action.support.PlainActionFuture;
|
|||
import org.elasticsearch.client.ElasticsearchClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.xbib.elasticsearch.client.AbstractClient;
|
||||
import org.xbib.elasticsearch.client.BulkControl;
|
||||
import org.xbib.elasticsearch.client.BulkMetric;
|
||||
import org.xbib.elx.common.AbstractExtendedClient;
|
||||
import org.xbib.netty.http.client.Client;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.ServiceLoader;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
@ -35,52 +30,44 @@ import java.util.stream.Stream;
|
|||
/**
|
||||
* Elasticsearch HTTP client.
|
||||
*/
|
||||
public class HttpClient extends AbstractClient implements ElasticsearchClient {
|
||||
public class ExtendedHttpClient extends AbstractExtendedClient implements ElasticsearchClient {
|
||||
|
||||
private static final Logger logger = LogManager.getLogger(HttpClient.class);
|
||||
private static final Logger logger = LogManager.getLogger(ExtendedHttpClient.class);
|
||||
|
||||
private Client client;
|
||||
private Client nettyHttpClient;
|
||||
|
||||
private NamedXContentRegistry registry;
|
||||
private final ClassLoader classLoader;
|
||||
|
||||
private final NamedXContentRegistry registry;
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
private Map<GenericAction, HttpAction> actionMap;
|
||||
private final Map<GenericAction, HttpAction> actionMap;
|
||||
|
||||
private List<String> urls;
|
||||
private String url;
|
||||
|
||||
//private ThreadPool threadPool;
|
||||
|
||||
@Override
|
||||
public HttpClient init(ElasticsearchClient client, Settings settings, BulkMetric metric, BulkControl control) {
|
||||
init(client, settings, metric, control, null, Collections.emptyList());
|
||||
return this;
|
||||
public ExtendedHttpClient(List<NamedXContentRegistry.Entry> namedXContentEntries, ClassLoader classLoader) {
|
||||
this.registry = new NamedXContentRegistry(Stream.of(getNamedXContents().stream(),
|
||||
namedXContentEntries.stream()).flatMap(Function.identity()).collect(Collectors.toList()));
|
||||
this.classLoader = classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader();
|
||||
this.actionMap = new HashMap<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
private void init(ElasticsearchClient client, Settings settings, BulkMetric metric, BulkControl control,
|
||||
ClassLoader classLoader, List<NamedXContentRegistry.Entry> namedXContentEntries) {
|
||||
//super.init(client, settings, metric, control);
|
||||
this.urls = settings.getAsList("urls");
|
||||
if (urls.isEmpty()) {
|
||||
throw new IllegalArgumentException("no urls given");
|
||||
public ExtendedHttpClient init(Settings settings) throws IOException {
|
||||
super.init(settings);
|
||||
if (settings == null) {
|
||||
return null;
|
||||
}
|
||||
this.registry = new NamedXContentRegistry(Stream.of(getNamedXContents().stream(),
|
||||
namedXContentEntries.stream()
|
||||
).flatMap(Function.identity()).collect(Collectors.toList()));
|
||||
this.actionMap = new HashMap<>();
|
||||
ServiceLoader<HttpAction> httpActionServiceLoader = ServiceLoader.load(HttpAction.class,
|
||||
classLoader != null ? classLoader : Thread.currentThread().getContextClassLoader());
|
||||
this.url = settings.get("url");
|
||||
ServiceLoader<HttpAction> httpActionServiceLoader = ServiceLoader.load(HttpAction.class, classLoader);
|
||||
for (HttpAction<? extends ActionRequest, ? extends ActionResponse> httpAction : httpActionServiceLoader) {
|
||||
httpAction.setSettings(settings);
|
||||
actionMap.put(httpAction.getActionInstance(), httpAction);
|
||||
}
|
||||
this.client = Client.builder().enableDebug().build();
|
||||
Settings threadPoolsettings = Settings.builder()
|
||||
.put(settings)
|
||||
.put(Node.NODE_NAME_SETTING.getKey(), "httpclient")
|
||||
.build();
|
||||
//this.threadPool = threadPool != null ? threadPool : new ThreadPool(threadPoolsettings);
|
||||
logger.info("HTTP client initialized with {} actions", actionMap.size());
|
||||
this.nettyHttpClient = Client.builder().enableDebug().build();
|
||||
logger.info("extended HTTP client initialized with {} actions", actionMap.size());
|
||||
return this;
|
||||
}
|
||||
|
||||
private static List<NamedXContentRegistry.Entry> getNamedXContents() {
|
||||
|
@ -91,28 +78,23 @@ public class HttpClient extends AbstractClient implements ElasticsearchClient {
|
|||
return registry;
|
||||
}
|
||||
|
||||
public static Builder builder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public Client internalClient() {
|
||||
return client;
|
||||
return nettyHttpClient;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ElasticsearchClient client() {
|
||||
public ElasticsearchClient getClient() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ElasticsearchClient createClient(Settings settings) throws IOException {
|
||||
protected ElasticsearchClient createClient(Settings settings) {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() throws IOException {
|
||||
client.shutdownGracefully();
|
||||
//threadPool.close();
|
||||
protected void closeClient() throws IOException {
|
||||
nettyHttpClient.shutdownGracefully();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -142,68 +124,22 @@ public class HttpClient extends AbstractClient implements ElasticsearchClient {
|
|||
@Override
|
||||
public ThreadPool threadPool() {
|
||||
logger.info("returning null for threadPool() request");
|
||||
return null; //threadPool;
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public <R extends ActionRequest, T extends ActionResponse, B extends ActionRequestBuilder<R, T, B>>
|
||||
private <R extends ActionRequest, T extends ActionResponse, B extends ActionRequestBuilder<R, T, B>>
|
||||
void doExecute(Action<R, T, B> action, R request, ActionListener<T> listener) {
|
||||
HttpAction httpAction = actionMap.get(action);
|
||||
if (httpAction == null) {
|
||||
throw new IllegalStateException("failed to find http action [" + action + "] to execute");
|
||||
}
|
||||
logger.info("http action = " + httpAction);
|
||||
String url = urls.get(0); // TODO
|
||||
try {
|
||||
logger.info("submitting to URL {}", url);
|
||||
HttpActionContext httpActionContext = new HttpActionContext(this, request, url);
|
||||
httpAction.execute(httpActionContext, listener);
|
||||
logger.info("submitted to URL {}", url);
|
||||
logger.debug("submitted to URL {}", url);
|
||||
} catch (Exception e) {
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The Builder for HTTP client.
|
||||
*/
|
||||
public static class Builder {
|
||||
|
||||
private final Settings.Builder settingsBuilder = Settings.builder();
|
||||
|
||||
private ClassLoader classLoader;
|
||||
|
||||
private List<NamedXContentRegistry.Entry> namedXContentEntries;
|
||||
|
||||
private ThreadPool threadPool = null;
|
||||
|
||||
public Builder settings(Settings settings) {
|
||||
this.settingsBuilder.put(settings);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder classLoader(ClassLoader classLoader) {
|
||||
this.classLoader = classLoader;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder namedXContentEntries(List<NamedXContentRegistry.Entry> namedXContentEntries) {
|
||||
this.namedXContentEntries = namedXContentEntries;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder threadPool(ThreadPool threadPool) {
|
||||
this.threadPool = threadPool;
|
||||
return this;
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public HttpClient build() {
|
||||
Settings settings = settingsBuilder.build();
|
||||
HttpClient httpClient = new HttpClient();
|
||||
httpClient.init(null, settings, null, null,
|
||||
classLoader, namedXContentEntries);
|
||||
return httpClient;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
package org.xbib.elx.http;
|
||||
|
||||
import org.xbib.elx.api.ExtendedClientProvider;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
public class ExtendedHttpClientProvider implements ExtendedClientProvider<ExtendedHttpClient> {
|
||||
@Override
|
||||
public ExtendedHttpClient getExtendedClient() {
|
||||
return new ExtendedHttpClient(Collections.emptyList(), Thread.currentThread().getContextClassLoader());
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elasticsearch.client.http;
|
||||
package org.xbib.elx.http;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.handler.codec.http.HttpHeaderNames;
|
||||
|
@ -68,7 +68,7 @@ public abstract class HttpAction<R extends ActionRequest, T extends ActionRespon
|
|||
" content = " + fullHttpResponse.content().toString(StandardCharsets.UTF_8));
|
||||
listener.onResponse(parseToResponse(httpActionContext.setHttpResponse(fullHttpResponse)));
|
||||
});
|
||||
Transport transport = httpActionContext.getHttpClient().internalClient().execute(httpRequest);
|
||||
Transport transport = httpActionContext.getExtendedHttpClient().internalClient().execute(httpRequest);
|
||||
logger.info("transport = " + transport);
|
||||
httpActionContext.setHttpClientTransport(transport);
|
||||
if (transport.isFailed()) {
|
||||
|
@ -143,7 +143,8 @@ public abstract class HttpAction<R extends ActionRequest, T extends ActionRespon
|
|||
if (xContentType == null) {
|
||||
throw new IllegalStateException("unsupported content-type: " + mediaType);
|
||||
}
|
||||
try (XContentParser parser = xContentType.xContent().createParser(httpActionContext.getHttpClient().getRegistry(),
|
||||
try (XContentParser parser = xContentType.xContent()
|
||||
.createParser(httpActionContext.getExtendedHttpClient().getRegistry(),
|
||||
DeprecationHandler.THROW_UNSUPPORTED_OPERATION,
|
||||
httpActionContext.getHttpResponse().content().array())) {
|
||||
return entityParser().apply(parser);
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elasticsearch.client.http;
|
||||
package org.xbib.elx.http;
|
||||
|
||||
import io.netty.handler.codec.http.FullHttpResponse;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
|
@ -13,7 +13,7 @@ import org.xbib.netty.http.client.transport.Transport;
|
|||
*/
|
||||
public class HttpActionContext<R extends ActionRequest, T extends ActionResponse> {
|
||||
|
||||
private final HttpClient httpClient;
|
||||
private final ExtendedHttpClient extendedHttpClient;
|
||||
|
||||
private final R request;
|
||||
|
||||
|
@ -23,14 +23,14 @@ public class HttpActionContext<R extends ActionRequest, T extends ActionResponse
|
|||
|
||||
private FullHttpResponse httpResponse;
|
||||
|
||||
HttpActionContext(HttpClient httpClient, R request, String url) {
|
||||
this.httpClient = httpClient;
|
||||
HttpActionContext(ExtendedHttpClient extendedHttpClient, R request, String url) {
|
||||
this.extendedHttpClient = extendedHttpClient;
|
||||
this.request = request;
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public HttpClient getHttpClient() {
|
||||
return httpClient;
|
||||
public ExtendedHttpClient getExtendedHttpClient() {
|
||||
return extendedHttpClient;
|
||||
}
|
||||
|
||||
public R getRequest() {
|
|
@ -1,4 +1,4 @@
|
|||
package org.xbib.elasticsearch.client.http;
|
||||
package org.xbib.elx.http;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
|
@ -13,8 +13,6 @@ import java.util.concurrent.ExecutionException;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class HttpActionFuture<T, L> extends BaseFuture<T> implements ActionFuture<T>, ActionListener<L> {
|
||||
|
||||
private Transport httpClientTransport;
|
|
@ -0,0 +1,135 @@
|
|||
package org.xbib.elx.http.action.admin.cluster.health;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
|
||||
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.health.ClusterIndexHealth;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.xbib.elx.http.HttpAction;
|
||||
import org.xbib.netty.http.client.RequestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
public class HttpClusterHealthAction extends HttpAction<ClusterHealthRequest, ClusterHealthResponse> {
|
||||
|
||||
@Override
|
||||
public ClusterHealthAction getActionInstance() {
|
||||
return ClusterHealthAction.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RequestBuilder createHttpRequest(String url, ClusterHealthRequest request) {
|
||||
return newPutRequest(url, "/_cluster/health");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected CheckedFunction<XContentParser, ClusterHealthResponse, IOException> entityParser() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
private static final String CLUSTER_NAME = "cluster_name";
|
||||
private static final String STATUS = "status";
|
||||
private static final String TIMED_OUT = "timed_out";
|
||||
private static final String NUMBER_OF_NODES = "number_of_nodes";
|
||||
private static final String NUMBER_OF_DATA_NODES = "number_of_data_nodes";
|
||||
private static final String NUMBER_OF_PENDING_TASKS = "number_of_pending_tasks";
|
||||
private static final String NUMBER_OF_IN_FLIGHT_FETCH = "number_of_in_flight_fetch";
|
||||
private static final String DELAYED_UNASSIGNED_SHARDS = "delayed_unassigned_shards";
|
||||
private static final String TASK_MAX_WAIT_TIME_IN_QUEUE = "task_max_waiting_in_queue";
|
||||
private static final String TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS = "task_max_waiting_in_queue_millis";
|
||||
private static final String ACTIVE_SHARDS_PERCENT_AS_NUMBER = "active_shards_percent_as_number";
|
||||
private static final String ACTIVE_SHARDS_PERCENT = "active_shards_percent";
|
||||
private static final String ACTIVE_PRIMARY_SHARDS = "active_primary_shards";
|
||||
private static final String ACTIVE_SHARDS = "active_shards";
|
||||
private static final String RELOCATING_SHARDS = "relocating_shards";
|
||||
private static final String INITIALIZING_SHARDS = "initializing_shards";
|
||||
private static final String UNASSIGNED_SHARDS = "unassigned_shards";
|
||||
private static final String INDICES = "indices";
|
||||
|
||||
private static final ConstructingObjectParser<ClusterHealthResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("cluster_health_response", true,
|
||||
parsedObjects -> {
|
||||
int i = 0;
|
||||
// ClusterStateHealth fields
|
||||
int numberOfNodes = (int) parsedObjects[i++];
|
||||
int numberOfDataNodes = (int) parsedObjects[i++];
|
||||
int activeShards = (int) parsedObjects[i++];
|
||||
int relocatingShards = (int) parsedObjects[i++];
|
||||
int activePrimaryShards = (int) parsedObjects[i++];
|
||||
int initializingShards = (int) parsedObjects[i++];
|
||||
int unassignedShards = (int) parsedObjects[i++];
|
||||
double activeShardsPercent = (double) parsedObjects[i++];
|
||||
String statusStr = (String) parsedObjects[i++];
|
||||
ClusterHealthStatus status = ClusterHealthStatus.fromString(statusStr);
|
||||
@SuppressWarnings("unchecked") List<ClusterIndexHealth> indexList =
|
||||
(List<ClusterIndexHealth>) parsedObjects[i++];
|
||||
final Map<String, ClusterIndexHealth> indices;
|
||||
if (indexList == null || indexList.isEmpty()) {
|
||||
indices = emptyMap();
|
||||
} else {
|
||||
indices = new HashMap<>(indexList.size());
|
||||
for (ClusterIndexHealth indexHealth : indexList) {
|
||||
indices.put(indexHealth.getIndex(), indexHealth);
|
||||
}
|
||||
}
|
||||
/*ClusterStateHealth stateHealth = new ClusterStateHealth(activePrimaryShards, activeShards, relocatingShards,
|
||||
initializingShards, unassignedShards, numberOfNodes, numberOfDataNodes, activeShardsPercent, status,
|
||||
indices);*/
|
||||
//ClusterState clusterState = new ClusterState();
|
||||
//ClusterStateHealth clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices);
|
||||
|
||||
// ClusterHealthResponse fields
|
||||
String clusterName = (String) parsedObjects[i++];
|
||||
int numberOfPendingTasks = (int) parsedObjects[i++];
|
||||
int numberOfInFlightFetch = (int) parsedObjects[i++];
|
||||
int delayedUnassignedShards = (int) parsedObjects[i++];
|
||||
long taskMaxWaitingTimeMillis = (long) parsedObjects[i++];
|
||||
boolean timedOut = (boolean) parsedObjects[i];
|
||||
|
||||
return new ClusterHealthResponse(clusterName, null, null, numberOfPendingTasks,
|
||||
numberOfInFlightFetch, delayedUnassignedShards,
|
||||
TimeValue.timeValueMillis(taskMaxWaitingTimeMillis));
|
||||
/*return new ClusterHealthResponse(clusterName, numberOfPendingTasks, numberOfInFlightFetch,
|
||||
delayedUnassignedShards,
|
||||
TimeValue.timeValueMillis(taskMaxWaitingTimeMillis), timedOut, stateHealth);*/
|
||||
});
|
||||
|
||||
|
||||
// private static final ObjectParser.NamedObjectParser<ClusterIndexHealth, Void> INDEX_PARSER =
|
||||
// (XContentParser parser, Void context, String index) -> ClusterIndexHealth.innerFromXContent(parser, index);
|
||||
|
||||
static {
|
||||
// ClusterStateHealth fields
|
||||
PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_NODES));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_DATA_NODES));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_SHARDS));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(RELOCATING_SHARDS));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(ACTIVE_PRIMARY_SHARDS));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(INITIALIZING_SHARDS));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(UNASSIGNED_SHARDS));
|
||||
PARSER.declareDouble(constructorArg(), new ParseField(ACTIVE_SHARDS_PERCENT_AS_NUMBER));
|
||||
PARSER.declareString(constructorArg(), new ParseField(STATUS));
|
||||
// Can be absent if LEVEL == 'cluster'
|
||||
//PARSER.declareNamedObjects(optionalConstructorArg(), INDEX_PARSER, new ParseField(INDICES));
|
||||
|
||||
// ClusterHealthResponse fields
|
||||
PARSER.declareString(constructorArg(), new ParseField(CLUSTER_NAME));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_PENDING_TASKS));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(NUMBER_OF_IN_FLIGHT_FETCH));
|
||||
PARSER.declareInt(constructorArg(), new ParseField(DELAYED_UNASSIGNED_SHARDS));
|
||||
PARSER.declareLong(constructorArg(), new ParseField(TASK_MAX_WAIT_TIME_IN_QUEUE_IN_MILLIS));
|
||||
PARSER.declareBoolean(constructorArg(), new ParseField(TIMED_OUT));
|
||||
}
|
||||
|
||||
}
|
|
@ -1,8 +1,13 @@
|
|||
package org.elasticsearch.action.admin.cluster.node.info;
|
||||
package org.xbib.elx.http.action.admin.cluster.node.info;
|
||||
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
|
||||
import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
|
@ -16,8 +21,8 @@ import org.elasticsearch.monitor.os.OsInfo;
|
|||
import org.elasticsearch.monitor.process.ProcessInfo;
|
||||
import org.elasticsearch.threadpool.ThreadPoolInfo;
|
||||
import org.elasticsearch.transport.TransportInfo;
|
||||
import org.xbib.elasticsearch.client.http.HttpAction;
|
||||
import org.xbib.elasticsearch.client.http.HttpActionContext;
|
||||
import org.xbib.elx.http.HttpAction;
|
||||
import org.xbib.elx.http.HttpActionContext;
|
||||
import org.xbib.netty.http.client.RequestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -106,11 +111,11 @@ public class HttpNodesInfoAction extends HttpAction<NodesInfoRequest, NodesInfoR
|
|||
String nodeId = entry.getKey();
|
||||
String ephemeralId = null;
|
||||
Map<String,Object> map2 = (Map<String, Object>) entry.getValue();
|
||||
String nodeName = (String)map2.get("name");
|
||||
String hostName = (String)map2.get("host");
|
||||
String hostAddress = (String)map2.get("ip");
|
||||
String nodeName = (String) map2.get("name");
|
||||
String hostName = (String) map2.get("host");
|
||||
String hostAddress = (String) map2.get("ip");
|
||||
// <host>[/<ip>][:<port>]
|
||||
String transportAddressString = (String)map2.get("transport_address");
|
||||
String transportAddressString = (String) map2.get("transport_address");
|
||||
int pos = transportAddressString.indexOf(':');
|
||||
String host = pos > 0 ? transportAddressString.substring(0, pos) : transportAddressString;
|
||||
int port = Integer.parseInt(pos > 0 ? transportAddressString.substring(pos + 1) : "0");
|
||||
|
@ -121,8 +126,8 @@ public class HttpNodesInfoAction extends HttpAction<NodesInfoRequest, NodesInfoR
|
|||
TransportAddress transportAddress = new TransportAddress(inetAddresses[0], port);
|
||||
Build build = new Build(Build.Flavor.OSS, Build.Type.TAR,
|
||||
(String) map2.get("build"),
|
||||
(String)map2.get("date"),
|
||||
(Boolean)map2.get("snapshot"));
|
||||
(String) map2.get("date"),
|
||||
(Boolean) map2.get("snapshot"));
|
||||
Map<String, String> attributes = Collections.emptyMap();
|
||||
Set<DiscoveryNode.Role> roles = new HashSet<>();
|
||||
Version version = Version.fromString((String) map2.get("version"));
|
|
@ -1,11 +1,14 @@
|
|||
package org.elasticsearch.action.admin.cluster.settings;
|
||||
package org.xbib.elx.http.action.admin.cluster.settings;
|
||||
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.xbib.elasticsearch.client.http.HttpAction;
|
||||
import org.xbib.elx.http.HttpAction;
|
||||
import org.xbib.netty.http.client.RequestBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -13,9 +16,6 @@ import java.io.UncheckedIOException;
|
|||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class HttpClusterUpdateSettingsAction extends HttpAction<ClusterUpdateSettingsRequest, ClusterUpdateSettingsResponse> {
|
||||
|
||||
@Override
|
||||
|
@ -41,9 +41,6 @@ public class HttpClusterUpdateSettingsAction extends HttpAction<ClusterUpdateSet
|
|||
|
||||
@Override
|
||||
protected CheckedFunction<XContentParser, ClusterUpdateSettingsResponse, IOException> entityParser() {
|
||||
return parser -> {
|
||||
// TODO(jprante)
|
||||
return new ClusterUpdateSettingsResponse();
|
||||
};
|
||||
return ClusterUpdateSettingsResponse::fromXContent;
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue