id
int32
0
165k
repo
stringlengths
7
58
path
stringlengths
12
218
func_name
stringlengths
3
140
original_string
stringlengths
73
34.1k
language
stringclasses
1 value
code
stringlengths
73
34.1k
code_tokens
list
docstring
stringlengths
3
16k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
105
339
14,800
lettuce-io/lettuce-core
src/main/java/io/lettuce/core/cluster/RoundRobin.java
RoundRobin.next
public V next() { Collection<? extends V> collection = this.collection; V offset = this.offset; if (offset != null) { boolean accept = false; for (V element : collection) { if (element == offset) { accept = true; continue; } if (accept) { return this.offset = element; } } } return this.offset = collection.iterator().next(); }
java
public V next() { Collection<? extends V> collection = this.collection; V offset = this.offset; if (offset != null) { boolean accept = false; for (V element : collection) { if (element == offset) { accept = true; continue; } if (accept) { return this.offset = element; } } } return this.offset = collection.iterator().next(); }
[ "public", "V", "next", "(", ")", "{", "Collection", "<", "?", "extends", "V", ">", "collection", "=", "this", ".", "collection", ";", "V", "offset", "=", "this", ".", "offset", ";", "if", "(", "offset", "!=", "null", ")", "{", "boolean", "accept", "=", "false", ";", "for", "(", "V", "element", ":", "collection", ")", "{", "if", "(", "element", "==", "offset", ")", "{", "accept", "=", "true", ";", "continue", ";", "}", "if", "(", "accept", ")", "{", "return", "this", ".", "offset", "=", "element", ";", "}", "}", "}", "return", "this", ".", "offset", "=", "collection", ".", "iterator", "(", ")", ".", "next", "(", ")", ";", "}" ]
Returns the next item. @return the next item
[ "Returns", "the", "next", "item", "." ]
b6de74e384dea112e3656684ca3f50cdfd6c8e0d
https://github.com/lettuce-io/lettuce-core/blob/b6de74e384dea112e3656684ca3f50cdfd6c8e0d/src/main/java/io/lettuce/core/cluster/RoundRobin.java#L64-L84
14,801
lettuce-io/lettuce-core
src/main/java/io/lettuce/core/dynamic/intercept/InvocationProxyFactory.java
InvocationProxyFactory.addInterface
public void addInterface(Class<?> ifc) { LettuceAssert.notNull(ifc, "Interface type must not be null"); LettuceAssert.isTrue(ifc.isInterface(), "Type must be an interface"); this.interfaces.add(ifc); }
java
public void addInterface(Class<?> ifc) { LettuceAssert.notNull(ifc, "Interface type must not be null"); LettuceAssert.isTrue(ifc.isInterface(), "Type must be an interface"); this.interfaces.add(ifc); }
[ "public", "void", "addInterface", "(", "Class", "<", "?", ">", "ifc", ")", "{", "LettuceAssert", ".", "notNull", "(", "ifc", ",", "\"Interface type must not be null\"", ")", ";", "LettuceAssert", ".", "isTrue", "(", "ifc", ".", "isInterface", "(", ")", ",", "\"Type must be an interface\"", ")", ";", "this", ".", "interfaces", ".", "add", "(", "ifc", ")", ";", "}" ]
Add a interface type that should be implemented by the resulting invocation proxy. @param ifc must not be {@literal null} and must be an interface type.
[ "Add", "a", "interface", "type", "that", "should", "be", "implemented", "by", "the", "resulting", "invocation", "proxy", "." ]
b6de74e384dea112e3656684ca3f50cdfd6c8e0d
https://github.com/lettuce-io/lettuce-core/blob/b6de74e384dea112e3656684ca3f50cdfd6c8e0d/src/main/java/io/lettuce/core/dynamic/intercept/InvocationProxyFactory.java#L68-L74
14,802
lettuce-io/lettuce-core
src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java
ReactiveTypeAdapters.registerIn
static void registerIn(ConversionService conversionService) { LettuceAssert.notNull(conversionService, "ConversionService must not be null!"); if (ReactiveTypes.isAvailable(ReactiveLibrary.PROJECT_REACTOR)) { if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA1)) { conversionService.addConverter(PublisherToRxJava1CompletableAdapter.INSTANCE); conversionService.addConverter(RxJava1CompletableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava1CompletableToMonoAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava1SingleAdapter.INSTANCE); conversionService.addConverter(RxJava1SingleToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava1SingleToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava1SingleToFluxAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava1ObservableAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToFluxAdapter.INSTANCE); } if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA2)) { conversionService.addConverter(PublisherToRxJava2CompletableAdapter.INSTANCE); conversionService.addConverter(RxJava2CompletableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2CompletableToMonoAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2SingleAdapter.INSTANCE); conversionService.addConverter(RxJava2SingleToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2SingleToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava2SingleToFluxAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2ObservableAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToFluxAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2FlowableAdapter.INSTANCE); conversionService.addConverter(RxJava2FlowableToPublisherAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2MaybeAdapter.INSTANCE); conversionService.addConverter(RxJava2MaybeToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2MaybeToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava2MaybeToFluxAdapter.INSTANCE); } conversionService.addConverter(PublisherToMonoAdapter.INSTANCE); conversionService.addConverter(PublisherToFluxAdapter.INSTANCE); if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA1)) { conversionService.addConverter(RxJava1SingleToObservableAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToSingleAdapter.INSTANCE); } if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA2)) { conversionService.addConverter(RxJava2SingleToObservableAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToSingleAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToMaybeAdapter.INSTANCE); } } }
java
static void registerIn(ConversionService conversionService) { LettuceAssert.notNull(conversionService, "ConversionService must not be null!"); if (ReactiveTypes.isAvailable(ReactiveLibrary.PROJECT_REACTOR)) { if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA1)) { conversionService.addConverter(PublisherToRxJava1CompletableAdapter.INSTANCE); conversionService.addConverter(RxJava1CompletableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava1CompletableToMonoAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava1SingleAdapter.INSTANCE); conversionService.addConverter(RxJava1SingleToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava1SingleToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava1SingleToFluxAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava1ObservableAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToFluxAdapter.INSTANCE); } if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA2)) { conversionService.addConverter(PublisherToRxJava2CompletableAdapter.INSTANCE); conversionService.addConverter(RxJava2CompletableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2CompletableToMonoAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2SingleAdapter.INSTANCE); conversionService.addConverter(RxJava2SingleToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2SingleToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava2SingleToFluxAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2ObservableAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToFluxAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2FlowableAdapter.INSTANCE); conversionService.addConverter(RxJava2FlowableToPublisherAdapter.INSTANCE); conversionService.addConverter(PublisherToRxJava2MaybeAdapter.INSTANCE); conversionService.addConverter(RxJava2MaybeToPublisherAdapter.INSTANCE); conversionService.addConverter(RxJava2MaybeToMonoAdapter.INSTANCE); conversionService.addConverter(RxJava2MaybeToFluxAdapter.INSTANCE); } conversionService.addConverter(PublisherToMonoAdapter.INSTANCE); conversionService.addConverter(PublisherToFluxAdapter.INSTANCE); if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA1)) { conversionService.addConverter(RxJava1SingleToObservableAdapter.INSTANCE); conversionService.addConverter(RxJava1ObservableToSingleAdapter.INSTANCE); } if (ReactiveTypes.isAvailable(ReactiveLibrary.RXJAVA2)) { conversionService.addConverter(RxJava2SingleToObservableAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToSingleAdapter.INSTANCE); conversionService.addConverter(RxJava2ObservableToMaybeAdapter.INSTANCE); } } }
[ "static", "void", "registerIn", "(", "ConversionService", "conversionService", ")", "{", "LettuceAssert", ".", "notNull", "(", "conversionService", ",", "\"ConversionService must not be null!\"", ")", ";", "if", "(", "ReactiveTypes", ".", "isAvailable", "(", "ReactiveLibrary", ".", "PROJECT_REACTOR", ")", ")", "{", "if", "(", "ReactiveTypes", ".", "isAvailable", "(", "ReactiveLibrary", ".", "RXJAVA1", ")", ")", "{", "conversionService", ".", "addConverter", "(", "PublisherToRxJava1CompletableAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1CompletableToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1CompletableToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "PublisherToRxJava1SingleAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1SingleToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1SingleToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1SingleToFluxAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "PublisherToRxJava1ObservableAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1ObservableToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1ObservableToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1ObservableToFluxAdapter", ".", "INSTANCE", ")", ";", "}", "if", "(", "ReactiveTypes", ".", "isAvailable", "(", "ReactiveLibrary", ".", "RXJAVA2", ")", ")", "{", "conversionService", ".", "addConverter", "(", "PublisherToRxJava2CompletableAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2CompletableToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2CompletableToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "PublisherToRxJava2SingleAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2SingleToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2SingleToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2SingleToFluxAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "PublisherToRxJava2ObservableAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2ObservableToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2ObservableToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2ObservableToFluxAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "PublisherToRxJava2FlowableAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2FlowableToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "PublisherToRxJava2MaybeAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2MaybeToPublisherAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2MaybeToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2MaybeToFluxAdapter", ".", "INSTANCE", ")", ";", "}", "conversionService", ".", "addConverter", "(", "PublisherToMonoAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "PublisherToFluxAdapter", ".", "INSTANCE", ")", ";", "if", "(", "ReactiveTypes", ".", "isAvailable", "(", "ReactiveLibrary", ".", "RXJAVA1", ")", ")", "{", "conversionService", ".", "addConverter", "(", "RxJava1SingleToObservableAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava1ObservableToSingleAdapter", ".", "INSTANCE", ")", ";", "}", "if", "(", "ReactiveTypes", ".", "isAvailable", "(", "ReactiveLibrary", ".", "RXJAVA2", ")", ")", "{", "conversionService", ".", "addConverter", "(", "RxJava2SingleToObservableAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2ObservableToSingleAdapter", ".", "INSTANCE", ")", ";", "conversionService", ".", "addConverter", "(", "RxJava2ObservableToMaybeAdapter", ".", "INSTANCE", ")", ";", "}", "}", "}" ]
Register adapters in the conversion service. @param conversionService
[ "Register", "adapters", "in", "the", "conversion", "service", "." ]
b6de74e384dea112e3656684ca3f50cdfd6c8e0d
https://github.com/lettuce-io/lettuce-core/blob/b6de74e384dea112e3656684ca3f50cdfd6c8e0d/src/main/java/io/lettuce/core/dynamic/ReactiveTypeAdapters.java#L46-L108
14,803
lettuce-io/lettuce-core
src/main/java/io/lettuce/core/Consumer.java
Consumer.from
public static <K> Consumer<K> from(K group, K name) { LettuceAssert.notNull(group, "Group must not be null"); LettuceAssert.notNull(name, "Name must not be null"); return new Consumer<>(group, name); }
java
public static <K> Consumer<K> from(K group, K name) { LettuceAssert.notNull(group, "Group must not be null"); LettuceAssert.notNull(name, "Name must not be null"); return new Consumer<>(group, name); }
[ "public", "static", "<", "K", ">", "Consumer", "<", "K", ">", "from", "(", "K", "group", ",", "K", "name", ")", "{", "LettuceAssert", ".", "notNull", "(", "group", ",", "\"Group must not be null\"", ")", ";", "LettuceAssert", ".", "notNull", "(", "name", ",", "\"Name must not be null\"", ")", ";", "return", "new", "Consumer", "<>", "(", "group", ",", "name", ")", ";", "}" ]
Create a new consumer. @param group name of the consumer group, must not be {@literal null} or empty. @param name name of the consumer, must not be {@literal null} or empty. @return the consumer {@link Consumer} object.
[ "Create", "a", "new", "consumer", "." ]
b6de74e384dea112e3656684ca3f50cdfd6c8e0d
https://github.com/lettuce-io/lettuce-core/blob/b6de74e384dea112e3656684ca3f50cdfd6c8e0d/src/main/java/io/lettuce/core/Consumer.java#L47-L53
14,804
ogaclejapan/SmartTabLayout
utils-v4/src/main/java/com/ogaclejapan/smarttablayout/utils/v4/Bundler.java
Bundler.putSize
@TargetApi(21) public Bundler putSize(String key, Size value) { bundle.putSize(key, value); return this; }
java
@TargetApi(21) public Bundler putSize(String key, Size value) { bundle.putSize(key, value); return this; }
[ "@", "TargetApi", "(", "21", ")", "public", "Bundler", "putSize", "(", "String", "key", ",", "Size", "value", ")", "{", "bundle", ".", "putSize", "(", "key", ",", "value", ")", ";", "return", "this", ";", "}" ]
Inserts a Size value into the mapping of this Bundle, replacing any existing value for the given key. Either key or value may be null. @param key a String, or null @param value a Size object, or null @return this
[ "Inserts", "a", "Size", "value", "into", "the", "mapping", "of", "this", "Bundle", "replacing", "any", "existing", "value", "for", "the", "given", "key", ".", "Either", "key", "or", "value", "may", "be", "null", "." ]
712e81a92f1e12a3c33dcbda03d813e0162e8589
https://github.com/ogaclejapan/SmartTabLayout/blob/712e81a92f1e12a3c33dcbda03d813e0162e8589/utils-v4/src/main/java/com/ogaclejapan/smarttablayout/utils/v4/Bundler.java#L153-L157
14,805
ogaclejapan/SmartTabLayout
utils-v4/src/main/java/com/ogaclejapan/smarttablayout/utils/v4/Bundler.java
Bundler.putSizeF
@TargetApi(21) public Bundler putSizeF(String key, SizeF value) { bundle.putSizeF(key, value); return this; }
java
@TargetApi(21) public Bundler putSizeF(String key, SizeF value) { bundle.putSizeF(key, value); return this; }
[ "@", "TargetApi", "(", "21", ")", "public", "Bundler", "putSizeF", "(", "String", "key", ",", "SizeF", "value", ")", "{", "bundle", ".", "putSizeF", "(", "key", ",", "value", ")", ";", "return", "this", ";", "}" ]
Inserts a SizeF value into the mapping of this Bundle, replacing any existing value for the given key. Either key or value may be null. @param key a String, or null @param value a SizeF object, or null @return this
[ "Inserts", "a", "SizeF", "value", "into", "the", "mapping", "of", "this", "Bundle", "replacing", "any", "existing", "value", "for", "the", "given", "key", ".", "Either", "key", "or", "value", "may", "be", "null", "." ]
712e81a92f1e12a3c33dcbda03d813e0162e8589
https://github.com/ogaclejapan/SmartTabLayout/blob/712e81a92f1e12a3c33dcbda03d813e0162e8589/utils-v4/src/main/java/com/ogaclejapan/smarttablayout/utils/v4/Bundler.java#L167-L171
14,806
ogaclejapan/SmartTabLayout
utils-v4/src/main/java/com/ogaclejapan/smarttablayout/utils/v4/Bundler.java
Bundler.into
public <T extends Fragment> T into(T fragment) { fragment.setArguments(get()); return fragment; }
java
public <T extends Fragment> T into(T fragment) { fragment.setArguments(get()); return fragment; }
[ "public", "<", "T", "extends", "Fragment", ">", "T", "into", "(", "T", "fragment", ")", "{", "fragment", ".", "setArguments", "(", "get", "(", ")", ")", ";", "return", "fragment", ";", "}" ]
Set the argument of Fragment. @param fragment a fragment @return a fragment
[ "Set", "the", "argument", "of", "Fragment", "." ]
712e81a92f1e12a3c33dcbda03d813e0162e8589
https://github.com/ogaclejapan/SmartTabLayout/blob/712e81a92f1e12a3c33dcbda03d813e0162e8589/utils-v4/src/main/java/com/ogaclejapan/smarttablayout/utils/v4/Bundler.java#L515-L518
14,807
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/shared/rest/documentation/generator/Generator.java
Generator.cleanRoute
private String cleanRoute(String route) { if (!route.startsWith("/")) { route = "/" + route; } if (route.endsWith("/")) { route = route.substring(0, route.length() - 1); } return route; }
java
private String cleanRoute(String route) { if (!route.startsWith("/")) { route = "/" + route; } if (route.endsWith("/")) { route = route.substring(0, route.length() - 1); } return route; }
[ "private", "String", "cleanRoute", "(", "String", "route", ")", "{", "if", "(", "!", "route", ".", "startsWith", "(", "\"/\"", ")", ")", "{", "route", "=", "\"/\"", "+", "route", ";", "}", "if", "(", "route", ".", "endsWith", "(", "\"/\"", ")", ")", "{", "route", "=", "route", ".", "substring", "(", "0", ",", "route", ".", "length", "(", ")", "-", "1", ")", ";", "}", "return", "route", ";", "}" ]
Leading slash but no trailing.
[ "Leading", "slash", "but", "no", "trailing", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/shared/rest/documentation/generator/Generator.java#L369-L379
14,808
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/shared/buffers/RawMessageEvent.java
RawMessageEvent.getMessageId
public UUID getMessageId() { final ByteBuffer wrap = ByteBuffer.wrap(messageIdBytes); return new UUID(wrap.asLongBuffer().get(0), wrap.asLongBuffer().get(1)); }
java
public UUID getMessageId() { final ByteBuffer wrap = ByteBuffer.wrap(messageIdBytes); return new UUID(wrap.asLongBuffer().get(0), wrap.asLongBuffer().get(1)); }
[ "public", "UUID", "getMessageId", "(", ")", "{", "final", "ByteBuffer", "wrap", "=", "ByteBuffer", ".", "wrap", "(", "messageIdBytes", ")", ";", "return", "new", "UUID", "(", "wrap", ".", "asLongBuffer", "(", ")", ".", "get", "(", "0", ")", ",", "wrap", ".", "asLongBuffer", "(", ")", ".", "get", "(", "1", ")", ")", ";", "}" ]
performance doesn't matter, it's only being called during tracing
[ "performance", "doesn", "t", "matter", "it", "s", "only", "being", "called", "during", "tracing" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/shared/buffers/RawMessageEvent.java#L83-L86
14,809
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/parser/PipelineRuleParser.java
PipelineRuleParser.parseRule
public Rule parseRule(String id, String rule, boolean silent, PipelineClassloader ruleClassLoader) throws ParseException { final ParseContext parseContext = new ParseContext(silent); final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(rule)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new DefaultErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(errorListener); final RuleLangParser.RuleDeclarationContext ruleDeclaration = parser.ruleDeclaration(); // parsing stages: // 1. build AST nodes, checks for invalid var, function refs // 2. type annotator: infer type information from var refs, func refs // 3. checker: static type check w/ coercion nodes // 4. optimizer: TODO WALKER.walk(new RuleAstBuilder(parseContext), ruleDeclaration); WALKER.walk(new RuleTypeAnnotator(parseContext), ruleDeclaration); WALKER.walk(new RuleTypeChecker(parseContext), ruleDeclaration); if (parseContext.getErrors().isEmpty()) { Rule parsedRule = parseContext.getRules().get(0).withId(id); if (ruleClassLoader != null && ConfigurationStateUpdater.isAllowCodeGeneration()) { try { final Class<? extends GeneratedRule> generatedClass = codeGenerator.generateCompiledRule(parsedRule, ruleClassLoader); if (generatedClass != null) { parsedRule = parsedRule.toBuilder().generatedRuleClass(generatedClass).build(); } } catch (Exception e) { log.warn("Unable to compile rule {} to native code, falling back to interpreting it: {}", parsedRule.name(), e.getMessage()); } } return parsedRule; } throw new ParseException(parseContext.getErrors()); }
java
public Rule parseRule(String id, String rule, boolean silent, PipelineClassloader ruleClassLoader) throws ParseException { final ParseContext parseContext = new ParseContext(silent); final SyntaxErrorListener errorListener = new SyntaxErrorListener(parseContext); final RuleLangLexer lexer = new RuleLangLexer(new ANTLRInputStream(rule)); lexer.removeErrorListeners(); lexer.addErrorListener(errorListener); final RuleLangParser parser = new RuleLangParser(new CommonTokenStream(lexer)); parser.setErrorHandler(new DefaultErrorStrategy()); parser.removeErrorListeners(); parser.addErrorListener(errorListener); final RuleLangParser.RuleDeclarationContext ruleDeclaration = parser.ruleDeclaration(); // parsing stages: // 1. build AST nodes, checks for invalid var, function refs // 2. type annotator: infer type information from var refs, func refs // 3. checker: static type check w/ coercion nodes // 4. optimizer: TODO WALKER.walk(new RuleAstBuilder(parseContext), ruleDeclaration); WALKER.walk(new RuleTypeAnnotator(parseContext), ruleDeclaration); WALKER.walk(new RuleTypeChecker(parseContext), ruleDeclaration); if (parseContext.getErrors().isEmpty()) { Rule parsedRule = parseContext.getRules().get(0).withId(id); if (ruleClassLoader != null && ConfigurationStateUpdater.isAllowCodeGeneration()) { try { final Class<? extends GeneratedRule> generatedClass = codeGenerator.generateCompiledRule(parsedRule, ruleClassLoader); if (generatedClass != null) { parsedRule = parsedRule.toBuilder().generatedRuleClass(generatedClass).build(); } } catch (Exception e) { log.warn("Unable to compile rule {} to native code, falling back to interpreting it: {}", parsedRule.name(), e.getMessage()); } } return parsedRule; } throw new ParseException(parseContext.getErrors()); }
[ "public", "Rule", "parseRule", "(", "String", "id", ",", "String", "rule", ",", "boolean", "silent", ",", "PipelineClassloader", "ruleClassLoader", ")", "throws", "ParseException", "{", "final", "ParseContext", "parseContext", "=", "new", "ParseContext", "(", "silent", ")", ";", "final", "SyntaxErrorListener", "errorListener", "=", "new", "SyntaxErrorListener", "(", "parseContext", ")", ";", "final", "RuleLangLexer", "lexer", "=", "new", "RuleLangLexer", "(", "new", "ANTLRInputStream", "(", "rule", ")", ")", ";", "lexer", ".", "removeErrorListeners", "(", ")", ";", "lexer", ".", "addErrorListener", "(", "errorListener", ")", ";", "final", "RuleLangParser", "parser", "=", "new", "RuleLangParser", "(", "new", "CommonTokenStream", "(", "lexer", ")", ")", ";", "parser", ".", "setErrorHandler", "(", "new", "DefaultErrorStrategy", "(", ")", ")", ";", "parser", ".", "removeErrorListeners", "(", ")", ";", "parser", ".", "addErrorListener", "(", "errorListener", ")", ";", "final", "RuleLangParser", ".", "RuleDeclarationContext", "ruleDeclaration", "=", "parser", ".", "ruleDeclaration", "(", ")", ";", "// parsing stages:", "// 1. build AST nodes, checks for invalid var, function refs", "// 2. type annotator: infer type information from var refs, func refs", "// 3. checker: static type check w/ coercion nodes", "// 4. optimizer: TODO", "WALKER", ".", "walk", "(", "new", "RuleAstBuilder", "(", "parseContext", ")", ",", "ruleDeclaration", ")", ";", "WALKER", ".", "walk", "(", "new", "RuleTypeAnnotator", "(", "parseContext", ")", ",", "ruleDeclaration", ")", ";", "WALKER", ".", "walk", "(", "new", "RuleTypeChecker", "(", "parseContext", ")", ",", "ruleDeclaration", ")", ";", "if", "(", "parseContext", ".", "getErrors", "(", ")", ".", "isEmpty", "(", ")", ")", "{", "Rule", "parsedRule", "=", "parseContext", ".", "getRules", "(", ")", ".", "get", "(", "0", ")", ".", "withId", "(", "id", ")", ";", "if", "(", "ruleClassLoader", "!=", "null", "&&", "ConfigurationStateUpdater", ".", "isAllowCodeGeneration", "(", ")", ")", "{", "try", "{", "final", "Class", "<", "?", "extends", "GeneratedRule", ">", "generatedClass", "=", "codeGenerator", ".", "generateCompiledRule", "(", "parsedRule", ",", "ruleClassLoader", ")", ";", "if", "(", "generatedClass", "!=", "null", ")", "{", "parsedRule", "=", "parsedRule", ".", "toBuilder", "(", ")", ".", "generatedRuleClass", "(", "generatedClass", ")", ".", "build", "(", ")", ";", "}", "}", "catch", "(", "Exception", "e", ")", "{", "log", ".", "warn", "(", "\"Unable to compile rule {} to native code, falling back to interpreting it: {}\"", ",", "parsedRule", ".", "name", "(", ")", ",", "e", ".", "getMessage", "(", ")", ")", ";", "}", "}", "return", "parsedRule", ";", "}", "throw", "new", "ParseException", "(", "parseContext", ".", "getErrors", "(", ")", ")", ";", "}" ]
Parses the given rule source and optionally generates a Java class for it if the classloader is not null. @param id the id of the rule, necessary to generate code @param rule rule source code @param silent don't emit status messages during parsing @param ruleClassLoader the classloader to load the generated code into (can be null) @return the parse rule @throws ParseException if a one or more parse errors occur
[ "Parses", "the", "given", "rule", "source", "and", "optionally", "generates", "a", "Java", "class", "for", "it", "if", "the", "classloader", "is", "not", "null", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/parser/PipelineRuleParser.java#L147-L188
14,810
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/processors/PipelineInterpreter.java
PipelineInterpreter.process
public Messages process(Messages messages, InterpreterListener interpreterListener, State state) { interpreterListener.startProcessing(); // message id + stream id final Set<Tuple2<String, String>> processingBlacklist = Sets.newHashSet(); final List<Message> toProcess = Lists.newArrayList(messages); final List<Message> fullyProcessed = Lists.newArrayListWithExpectedSize(toProcess.size()); while (!toProcess.isEmpty()) { final MessageCollection currentSet = new MessageCollection(toProcess); // we'll add them back below toProcess.clear(); for (Message message : currentSet) { final String msgId = message.getId(); // this makes a copy of the list, which is mutated later in updateStreamBlacklist // it serves as a worklist, to keep track of which <msg, stream> tuples need to be re-run again final Set<String> initialStreamIds = message.getStreams().stream().map(Stream::getId).collect(Collectors.toSet()); final ImmutableSet<Pipeline> pipelinesToRun = selectPipelines(interpreterListener, processingBlacklist, message, initialStreamIds, state.getStreamPipelineConnections()); toProcess.addAll(processForResolvedPipelines(message, msgId, pipelinesToRun, interpreterListener, state)); // add each processed message-stream combination to the blacklist set and figure out if the processing // has added a stream to the message, in which case we need to cycle and determine whether to process // its pipeline connections, too boolean addedStreams = updateStreamBlacklist(processingBlacklist, message, initialStreamIds); potentiallyDropFilteredMessage(message); // go to 1 and iterate over all messages again until no more streams are being assigned if (!addedStreams || message.getFilterOut()) { log.debug("[{}] no new streams matches or dropped message, not running again", msgId); fullyProcessed.add(message); } else { // process again, we've added a stream log.debug("[{}] new streams assigned, running again for those streams", msgId); toProcess.add(message); } } } interpreterListener.finishProcessing(); // 7. return the processed messages return new MessageCollection(fullyProcessed); }
java
public Messages process(Messages messages, InterpreterListener interpreterListener, State state) { interpreterListener.startProcessing(); // message id + stream id final Set<Tuple2<String, String>> processingBlacklist = Sets.newHashSet(); final List<Message> toProcess = Lists.newArrayList(messages); final List<Message> fullyProcessed = Lists.newArrayListWithExpectedSize(toProcess.size()); while (!toProcess.isEmpty()) { final MessageCollection currentSet = new MessageCollection(toProcess); // we'll add them back below toProcess.clear(); for (Message message : currentSet) { final String msgId = message.getId(); // this makes a copy of the list, which is mutated later in updateStreamBlacklist // it serves as a worklist, to keep track of which <msg, stream> tuples need to be re-run again final Set<String> initialStreamIds = message.getStreams().stream().map(Stream::getId).collect(Collectors.toSet()); final ImmutableSet<Pipeline> pipelinesToRun = selectPipelines(interpreterListener, processingBlacklist, message, initialStreamIds, state.getStreamPipelineConnections()); toProcess.addAll(processForResolvedPipelines(message, msgId, pipelinesToRun, interpreterListener, state)); // add each processed message-stream combination to the blacklist set and figure out if the processing // has added a stream to the message, in which case we need to cycle and determine whether to process // its pipeline connections, too boolean addedStreams = updateStreamBlacklist(processingBlacklist, message, initialStreamIds); potentiallyDropFilteredMessage(message); // go to 1 and iterate over all messages again until no more streams are being assigned if (!addedStreams || message.getFilterOut()) { log.debug("[{}] no new streams matches or dropped message, not running again", msgId); fullyProcessed.add(message); } else { // process again, we've added a stream log.debug("[{}] new streams assigned, running again for those streams", msgId); toProcess.add(message); } } } interpreterListener.finishProcessing(); // 7. return the processed messages return new MessageCollection(fullyProcessed); }
[ "public", "Messages", "process", "(", "Messages", "messages", ",", "InterpreterListener", "interpreterListener", ",", "State", "state", ")", "{", "interpreterListener", ".", "startProcessing", "(", ")", ";", "// message id + stream id", "final", "Set", "<", "Tuple2", "<", "String", ",", "String", ">", ">", "processingBlacklist", "=", "Sets", ".", "newHashSet", "(", ")", ";", "final", "List", "<", "Message", ">", "toProcess", "=", "Lists", ".", "newArrayList", "(", "messages", ")", ";", "final", "List", "<", "Message", ">", "fullyProcessed", "=", "Lists", ".", "newArrayListWithExpectedSize", "(", "toProcess", ".", "size", "(", ")", ")", ";", "while", "(", "!", "toProcess", ".", "isEmpty", "(", ")", ")", "{", "final", "MessageCollection", "currentSet", "=", "new", "MessageCollection", "(", "toProcess", ")", ";", "// we'll add them back below", "toProcess", ".", "clear", "(", ")", ";", "for", "(", "Message", "message", ":", "currentSet", ")", "{", "final", "String", "msgId", "=", "message", ".", "getId", "(", ")", ";", "// this makes a copy of the list, which is mutated later in updateStreamBlacklist", "// it serves as a worklist, to keep track of which <msg, stream> tuples need to be re-run again", "final", "Set", "<", "String", ">", "initialStreamIds", "=", "message", ".", "getStreams", "(", ")", ".", "stream", "(", ")", ".", "map", "(", "Stream", "::", "getId", ")", ".", "collect", "(", "Collectors", ".", "toSet", "(", ")", ")", ";", "final", "ImmutableSet", "<", "Pipeline", ">", "pipelinesToRun", "=", "selectPipelines", "(", "interpreterListener", ",", "processingBlacklist", ",", "message", ",", "initialStreamIds", ",", "state", ".", "getStreamPipelineConnections", "(", ")", ")", ";", "toProcess", ".", "addAll", "(", "processForResolvedPipelines", "(", "message", ",", "msgId", ",", "pipelinesToRun", ",", "interpreterListener", ",", "state", ")", ")", ";", "// add each processed message-stream combination to the blacklist set and figure out if the processing", "// has added a stream to the message, in which case we need to cycle and determine whether to process", "// its pipeline connections, too", "boolean", "addedStreams", "=", "updateStreamBlacklist", "(", "processingBlacklist", ",", "message", ",", "initialStreamIds", ")", ";", "potentiallyDropFilteredMessage", "(", "message", ")", ";", "// go to 1 and iterate over all messages again until no more streams are being assigned", "if", "(", "!", "addedStreams", "||", "message", ".", "getFilterOut", "(", ")", ")", "{", "log", ".", "debug", "(", "\"[{}] no new streams matches or dropped message, not running again\"", ",", "msgId", ")", ";", "fullyProcessed", ".", "add", "(", "message", ")", ";", "}", "else", "{", "// process again, we've added a stream", "log", ".", "debug", "(", "\"[{}] new streams assigned, running again for those streams\"", ",", "msgId", ")", ";", "toProcess", ".", "add", "(", "message", ")", ";", "}", "}", "}", "interpreterListener", ".", "finishProcessing", "(", ")", ";", "// 7. return the processed messages", "return", "new", "MessageCollection", "(", "fullyProcessed", ")", ";", "}" ]
Evaluates all pipelines that apply to the given messages, based on the current stream routing of the messages. The processing loops on each single message (passed in or created by pipelines) until the set of streams does not change anymore. No cycle detection is performed. @param messages the messages to process through the pipelines @param interpreterListener a listener which gets called for each processing stage (e.g. to trace execution) @param state the pipeline/stage/rule/stream connection state to use during processing @return the processed messages
[ "Evaluates", "all", "pipelines", "that", "apply", "to", "the", "given", "messages", "based", "on", "the", "current", "stream", "routing", "of", "the", "messages", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/processors/PipelineInterpreter.java#L117-L168
14,811
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/processors/PipelineInterpreter.java
PipelineInterpreter.processForPipelines
public List<Message> processForPipelines(Message message, Set<String> pipelineIds, InterpreterListener interpreterListener, State state) { final Map<String, Pipeline> currentPipelines = state.getCurrentPipelines(); final ImmutableSet<Pipeline> pipelinesToRun = pipelineIds.stream() .map(currentPipelines::get) .filter(Objects::nonNull) .collect(ImmutableSet.toImmutableSet()); return processForResolvedPipelines(message, message.getId(), pipelinesToRun, interpreterListener, state); }
java
public List<Message> processForPipelines(Message message, Set<String> pipelineIds, InterpreterListener interpreterListener, State state) { final Map<String, Pipeline> currentPipelines = state.getCurrentPipelines(); final ImmutableSet<Pipeline> pipelinesToRun = pipelineIds.stream() .map(currentPipelines::get) .filter(Objects::nonNull) .collect(ImmutableSet.toImmutableSet()); return processForResolvedPipelines(message, message.getId(), pipelinesToRun, interpreterListener, state); }
[ "public", "List", "<", "Message", ">", "processForPipelines", "(", "Message", "message", ",", "Set", "<", "String", ">", "pipelineIds", ",", "InterpreterListener", "interpreterListener", ",", "State", "state", ")", "{", "final", "Map", "<", "String", ",", "Pipeline", ">", "currentPipelines", "=", "state", ".", "getCurrentPipelines", "(", ")", ";", "final", "ImmutableSet", "<", "Pipeline", ">", "pipelinesToRun", "=", "pipelineIds", ".", "stream", "(", ")", ".", "map", "(", "currentPipelines", "::", "get", ")", ".", "filter", "(", "Objects", "::", "nonNull", ")", ".", "collect", "(", "ImmutableSet", ".", "toImmutableSet", "(", ")", ")", ";", "return", "processForResolvedPipelines", "(", "message", ",", "message", ".", "getId", "(", ")", ",", "pipelinesToRun", ",", "interpreterListener", ",", "state", ")", ";", "}" ]
Given a set of pipeline ids, process the given message according to the passed state. This method returns the list of messages produced by the configuration in state, it does not look at the database or any other external resource besides what is being passed as parameters. This can be used to simulate pipelines without having to store them in the database. @param message the message to process @param pipelineIds the ids of the pipelines to resolve and run the message through @param interpreterListener the listener tracing the execution @param state the pipeline/stage/rule state to interpret @return the list of messages created during the interpreter run
[ "Given", "a", "set", "of", "pipeline", "ids", "process", "the", "given", "message", "according", "to", "the", "passed", "state", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/processors/PipelineInterpreter.java#L233-L244
14,812
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/streams/StreamRouterEngine.java
StreamRouterEngine.match
public List<Stream> match(Message message) { final Set<Stream> result = Sets.newHashSet(); final Set<Stream> blackList = Sets.newHashSet(); for (final Rule rule : rulesList) { if (blackList.contains(rule.getStream())) { continue; } final StreamRule streamRule = rule.getStreamRule(); final StreamRuleType streamRuleType = streamRule.getType(); final Stream.MatchingType matchingType = rule.getMatchingType(); if (!ruleTypesNotNeedingFieldPresence.contains(streamRuleType) && !message.hasField(streamRule.getField())) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStream()); } continue; } final Stream stream; if (streamRuleType != StreamRuleType.REGEX) { stream = rule.match(message); } else { stream = rule.matchWithTimeOut(message, streamProcessingTimeout, TimeUnit.MILLISECONDS); } if (stream == null) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStream()); } } else { result.add(stream); if (matchingType == Stream.MatchingType.OR) { // blacklist stream because it is already matched blackList.add(rule.getStream()); } } } final Stream defaultStream = defaultStreamProvider.get(); boolean alreadyRemovedDefaultStream = false; for (Stream stream : result) { streamMetrics.markIncomingMeter(stream.getId()); if (stream.getRemoveMatchesFromDefaultStream()) { if (alreadyRemovedDefaultStream || message.removeStream(defaultStream)) { alreadyRemovedDefaultStream = true; if (LOG.isTraceEnabled()) { LOG.trace("Successfully removed default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } else { if (LOG.isWarnEnabled()) { LOG.warn("Couldn't remove default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } } } // either the message stayed on the default stream, in which case we mark that stream's throughput, // or someone removed it, in which case we don't mark it. if (!alreadyRemovedDefaultStream) { streamMetrics.markIncomingMeter(defaultStream.getId()); } return ImmutableList.copyOf(result); }
java
public List<Stream> match(Message message) { final Set<Stream> result = Sets.newHashSet(); final Set<Stream> blackList = Sets.newHashSet(); for (final Rule rule : rulesList) { if (blackList.contains(rule.getStream())) { continue; } final StreamRule streamRule = rule.getStreamRule(); final StreamRuleType streamRuleType = streamRule.getType(); final Stream.MatchingType matchingType = rule.getMatchingType(); if (!ruleTypesNotNeedingFieldPresence.contains(streamRuleType) && !message.hasField(streamRule.getField())) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStream()); } continue; } final Stream stream; if (streamRuleType != StreamRuleType.REGEX) { stream = rule.match(message); } else { stream = rule.matchWithTimeOut(message, streamProcessingTimeout, TimeUnit.MILLISECONDS); } if (stream == null) { if (matchingType == Stream.MatchingType.AND) { result.remove(rule.getStream()); // blacklist stream because it can't match anymore blackList.add(rule.getStream()); } } else { result.add(stream); if (matchingType == Stream.MatchingType.OR) { // blacklist stream because it is already matched blackList.add(rule.getStream()); } } } final Stream defaultStream = defaultStreamProvider.get(); boolean alreadyRemovedDefaultStream = false; for (Stream stream : result) { streamMetrics.markIncomingMeter(stream.getId()); if (stream.getRemoveMatchesFromDefaultStream()) { if (alreadyRemovedDefaultStream || message.removeStream(defaultStream)) { alreadyRemovedDefaultStream = true; if (LOG.isTraceEnabled()) { LOG.trace("Successfully removed default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } else { if (LOG.isWarnEnabled()) { LOG.warn("Couldn't remove default stream <{}> from message <{}>", defaultStream.getId(), message.getId()); } } } } // either the message stayed on the default stream, in which case we mark that stream's throughput, // or someone removed it, in which case we don't mark it. if (!alreadyRemovedDefaultStream) { streamMetrics.markIncomingMeter(defaultStream.getId()); } return ImmutableList.copyOf(result); }
[ "public", "List", "<", "Stream", ">", "match", "(", "Message", "message", ")", "{", "final", "Set", "<", "Stream", ">", "result", "=", "Sets", ".", "newHashSet", "(", ")", ";", "final", "Set", "<", "Stream", ">", "blackList", "=", "Sets", ".", "newHashSet", "(", ")", ";", "for", "(", "final", "Rule", "rule", ":", "rulesList", ")", "{", "if", "(", "blackList", ".", "contains", "(", "rule", ".", "getStream", "(", ")", ")", ")", "{", "continue", ";", "}", "final", "StreamRule", "streamRule", "=", "rule", ".", "getStreamRule", "(", ")", ";", "final", "StreamRuleType", "streamRuleType", "=", "streamRule", ".", "getType", "(", ")", ";", "final", "Stream", ".", "MatchingType", "matchingType", "=", "rule", ".", "getMatchingType", "(", ")", ";", "if", "(", "!", "ruleTypesNotNeedingFieldPresence", ".", "contains", "(", "streamRuleType", ")", "&&", "!", "message", ".", "hasField", "(", "streamRule", ".", "getField", "(", ")", ")", ")", "{", "if", "(", "matchingType", "==", "Stream", ".", "MatchingType", ".", "AND", ")", "{", "result", ".", "remove", "(", "rule", ".", "getStream", "(", ")", ")", ";", "// blacklist stream because it can't match anymore", "blackList", ".", "add", "(", "rule", ".", "getStream", "(", ")", ")", ";", "}", "continue", ";", "}", "final", "Stream", "stream", ";", "if", "(", "streamRuleType", "!=", "StreamRuleType", ".", "REGEX", ")", "{", "stream", "=", "rule", ".", "match", "(", "message", ")", ";", "}", "else", "{", "stream", "=", "rule", ".", "matchWithTimeOut", "(", "message", ",", "streamProcessingTimeout", ",", "TimeUnit", ".", "MILLISECONDS", ")", ";", "}", "if", "(", "stream", "==", "null", ")", "{", "if", "(", "matchingType", "==", "Stream", ".", "MatchingType", ".", "AND", ")", "{", "result", ".", "remove", "(", "rule", ".", "getStream", "(", ")", ")", ";", "// blacklist stream because it can't match anymore", "blackList", ".", "add", "(", "rule", ".", "getStream", "(", ")", ")", ";", "}", "}", "else", "{", "result", ".", "add", "(", "stream", ")", ";", "if", "(", "matchingType", "==", "Stream", ".", "MatchingType", ".", "OR", ")", "{", "// blacklist stream because it is already matched", "blackList", ".", "add", "(", "rule", ".", "getStream", "(", ")", ")", ";", "}", "}", "}", "final", "Stream", "defaultStream", "=", "defaultStreamProvider", ".", "get", "(", ")", ";", "boolean", "alreadyRemovedDefaultStream", "=", "false", ";", "for", "(", "Stream", "stream", ":", "result", ")", "{", "streamMetrics", ".", "markIncomingMeter", "(", "stream", ".", "getId", "(", ")", ")", ";", "if", "(", "stream", ".", "getRemoveMatchesFromDefaultStream", "(", ")", ")", "{", "if", "(", "alreadyRemovedDefaultStream", "||", "message", ".", "removeStream", "(", "defaultStream", ")", ")", "{", "alreadyRemovedDefaultStream", "=", "true", ";", "if", "(", "LOG", ".", "isTraceEnabled", "(", ")", ")", "{", "LOG", ".", "trace", "(", "\"Successfully removed default stream <{}> from message <{}>\"", ",", "defaultStream", ".", "getId", "(", ")", ",", "message", ".", "getId", "(", ")", ")", ";", "}", "}", "else", "{", "if", "(", "LOG", ".", "isWarnEnabled", "(", ")", ")", "{", "LOG", ".", "warn", "(", "\"Couldn't remove default stream <{}> from message <{}>\"", ",", "defaultStream", ".", "getId", "(", ")", ",", "message", ".", "getId", "(", ")", ")", ";", "}", "}", "}", "}", "// either the message stayed on the default stream, in which case we mark that stream's throughput,", "// or someone removed it, in which case we don't mark it.", "if", "(", "!", "alreadyRemovedDefaultStream", ")", "{", "streamMetrics", ".", "markIncomingMeter", "(", "defaultStream", ".", "getId", "(", ")", ")", ";", "}", "return", "ImmutableList", ".", "copyOf", "(", "result", ")", ";", "}" ]
Returns a list of matching streams for the given message. @param message the message @return the list of matching streams
[ "Returns", "a", "list", "of", "matching", "streams", "for", "the", "given", "message", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/streams/StreamRouterEngine.java#L164-L233
14,813
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/ast/Stage.java
Stage.registerMetrics
public void registerMetrics(MetricRegistry metricRegistry, String pipelineId) { meterName = name(Pipeline.class, pipelineId, "stage", String.valueOf(stage()), "executed"); executed = metricRegistry.meter(meterName); }
java
public void registerMetrics(MetricRegistry metricRegistry, String pipelineId) { meterName = name(Pipeline.class, pipelineId, "stage", String.valueOf(stage()), "executed"); executed = metricRegistry.meter(meterName); }
[ "public", "void", "registerMetrics", "(", "MetricRegistry", "metricRegistry", ",", "String", "pipelineId", ")", "{", "meterName", "=", "name", "(", "Pipeline", ".", "class", ",", "pipelineId", ",", "\"stage\"", ",", "String", ".", "valueOf", "(", "stage", "(", ")", ")", ",", "\"executed\"", ")", ";", "executed", "=", "metricRegistry", ".", "meter", "(", "meterName", ")", ";", "}" ]
Register the metrics attached to this stage. @param metricRegistry the registry to add the metrics to
[ "Register", "the", "metrics", "attached", "to", "this", "stage", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/ast/Stage.java#L65-L68
14,814
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/Version.java
Version.greaterMinor
@Deprecated public boolean greaterMinor(Version other) { return other.major < this.major || other.major == this.major && other.minor < this.minor; }
java
@Deprecated public boolean greaterMinor(Version other) { return other.major < this.major || other.major == this.major && other.minor < this.minor; }
[ "@", "Deprecated", "public", "boolean", "greaterMinor", "(", "Version", "other", ")", "{", "return", "other", ".", "major", "<", "this", ".", "major", "||", "other", ".", "major", "==", "this", ".", "major", "&&", "other", ".", "minor", "<", "this", ".", "minor", ";", "}" ]
Check if this version is higher than the passed other version. Only taking major and minor version number in account. @param other {@link Version} to compare
[ "Check", "if", "this", "version", "is", "higher", "than", "the", "passed", "other", "version", ".", "Only", "taking", "major", "and", "minor", "version", "number", "in", "account", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/Version.java#L272-L275
14,815
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/shared/utilities/ByteBufferUtils.java
ByteBufferUtils.readBytes
public static byte[] readBytes(ByteBuffer buffer, int offset, int size) { final byte[] dest = new byte[size]; buffer.get(dest, offset, size); return dest; }
java
public static byte[] readBytes(ByteBuffer buffer, int offset, int size) { final byte[] dest = new byte[size]; buffer.get(dest, offset, size); return dest; }
[ "public", "static", "byte", "[", "]", "readBytes", "(", "ByteBuffer", "buffer", ",", "int", "offset", ",", "int", "size", ")", "{", "final", "byte", "[", "]", "dest", "=", "new", "byte", "[", "size", "]", ";", "buffer", ".", "get", "(", "dest", ",", "offset", ",", "size", ")", ";", "return", "dest", ";", "}" ]
Read a byte array from the given offset and size in the buffer This will <em>consume</em> the given {@link ByteBuffer}.
[ "Read", "a", "byte", "array", "from", "the", "given", "offset", "and", "size", "in", "the", "buffer" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/shared/utilities/ByteBufferUtils.java#L36-L40
14,816
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/utilities/ConfigurationMapConverter.java
ConfigurationMapConverter.convertValues
public static Map<String, Object> convertValues(final Map<String, Object> data, final ConfigurationRequest configurationRequest) throws ValidationException { final Map<String, Object> configuration = Maps.newHashMapWithExpectedSize(data.size()); final Map<String, Map<String, Object>> configurationFields = configurationRequest.asList(); for (final Map.Entry<String, Object> entry : data.entrySet()) { final String field = entry.getKey(); final Map<String, Object> fieldDescription = configurationFields.get(field); if (fieldDescription == null || fieldDescription.isEmpty()) { throw new ValidationException(field, "Unknown configuration field description for field \"" + field + "\""); } final String type = (String) fieldDescription.get("type"); // Decide what to cast to. (string, bool, number) Object value; switch (type) { case "text": case "dropdown": value = entry.getValue() == null ? "" : String.valueOf(entry.getValue()); break; case "number": try { value = Integer.parseInt(String.valueOf(entry.getValue())); } catch (NumberFormatException e) { // If a numeric field is optional and not provided, use null as value if ("true".equals(String.valueOf(fieldDescription.get("is_optional")))) { value = null; } else { throw new ValidationException(field, e.getMessage()); } } break; case "boolean": value = "true".equalsIgnoreCase(String.valueOf(entry.getValue())); break; case "list": final List<?> valueList = entry.getValue() == null ? Collections.emptyList() : (List<?>) entry.getValue(); value = valueList.stream() .filter(o -> o != null && o instanceof String) .map(String::valueOf) .collect(Collectors.toList()); break; default: throw new ValidationException(field, "Unknown configuration field type \"" + type + "\""); } configuration.put(field, value); } return configuration; }
java
public static Map<String, Object> convertValues(final Map<String, Object> data, final ConfigurationRequest configurationRequest) throws ValidationException { final Map<String, Object> configuration = Maps.newHashMapWithExpectedSize(data.size()); final Map<String, Map<String, Object>> configurationFields = configurationRequest.asList(); for (final Map.Entry<String, Object> entry : data.entrySet()) { final String field = entry.getKey(); final Map<String, Object> fieldDescription = configurationFields.get(field); if (fieldDescription == null || fieldDescription.isEmpty()) { throw new ValidationException(field, "Unknown configuration field description for field \"" + field + "\""); } final String type = (String) fieldDescription.get("type"); // Decide what to cast to. (string, bool, number) Object value; switch (type) { case "text": case "dropdown": value = entry.getValue() == null ? "" : String.valueOf(entry.getValue()); break; case "number": try { value = Integer.parseInt(String.valueOf(entry.getValue())); } catch (NumberFormatException e) { // If a numeric field is optional and not provided, use null as value if ("true".equals(String.valueOf(fieldDescription.get("is_optional")))) { value = null; } else { throw new ValidationException(field, e.getMessage()); } } break; case "boolean": value = "true".equalsIgnoreCase(String.valueOf(entry.getValue())); break; case "list": final List<?> valueList = entry.getValue() == null ? Collections.emptyList() : (List<?>) entry.getValue(); value = valueList.stream() .filter(o -> o != null && o instanceof String) .map(String::valueOf) .collect(Collectors.toList()); break; default: throw new ValidationException(field, "Unknown configuration field type \"" + type + "\""); } configuration.put(field, value); } return configuration; }
[ "public", "static", "Map", "<", "String", ",", "Object", ">", "convertValues", "(", "final", "Map", "<", "String", ",", "Object", ">", "data", ",", "final", "ConfigurationRequest", "configurationRequest", ")", "throws", "ValidationException", "{", "final", "Map", "<", "String", ",", "Object", ">", "configuration", "=", "Maps", ".", "newHashMapWithExpectedSize", "(", "data", ".", "size", "(", ")", ")", ";", "final", "Map", "<", "String", ",", "Map", "<", "String", ",", "Object", ">", ">", "configurationFields", "=", "configurationRequest", ".", "asList", "(", ")", ";", "for", "(", "final", "Map", ".", "Entry", "<", "String", ",", "Object", ">", "entry", ":", "data", ".", "entrySet", "(", ")", ")", "{", "final", "String", "field", "=", "entry", ".", "getKey", "(", ")", ";", "final", "Map", "<", "String", ",", "Object", ">", "fieldDescription", "=", "configurationFields", ".", "get", "(", "field", ")", ";", "if", "(", "fieldDescription", "==", "null", "||", "fieldDescription", ".", "isEmpty", "(", ")", ")", "{", "throw", "new", "ValidationException", "(", "field", ",", "\"Unknown configuration field description for field \\\"\"", "+", "field", "+", "\"\\\"\"", ")", ";", "}", "final", "String", "type", "=", "(", "String", ")", "fieldDescription", ".", "get", "(", "\"type\"", ")", ";", "// Decide what to cast to. (string, bool, number)", "Object", "value", ";", "switch", "(", "type", ")", "{", "case", "\"text\"", ":", "case", "\"dropdown\"", ":", "value", "=", "entry", ".", "getValue", "(", ")", "==", "null", "?", "\"\"", ":", "String", ".", "valueOf", "(", "entry", ".", "getValue", "(", ")", ")", ";", "break", ";", "case", "\"number\"", ":", "try", "{", "value", "=", "Integer", ".", "parseInt", "(", "String", ".", "valueOf", "(", "entry", ".", "getValue", "(", ")", ")", ")", ";", "}", "catch", "(", "NumberFormatException", "e", ")", "{", "// If a numeric field is optional and not provided, use null as value", "if", "(", "\"true\"", ".", "equals", "(", "String", ".", "valueOf", "(", "fieldDescription", ".", "get", "(", "\"is_optional\"", ")", ")", ")", ")", "{", "value", "=", "null", ";", "}", "else", "{", "throw", "new", "ValidationException", "(", "field", ",", "e", ".", "getMessage", "(", ")", ")", ";", "}", "}", "break", ";", "case", "\"boolean\"", ":", "value", "=", "\"true\"", ".", "equalsIgnoreCase", "(", "String", ".", "valueOf", "(", "entry", ".", "getValue", "(", ")", ")", ")", ";", "break", ";", "case", "\"list\"", ":", "final", "List", "<", "?", ">", "valueList", "=", "entry", ".", "getValue", "(", ")", "==", "null", "?", "Collections", ".", "emptyList", "(", ")", ":", "(", "List", "<", "?", ">", ")", "entry", ".", "getValue", "(", ")", ";", "value", "=", "valueList", ".", "stream", "(", ")", ".", "filter", "(", "o", "->", "o", "!=", "null", "&&", "o", "instanceof", "String", ")", ".", "map", "(", "String", "::", "valueOf", ")", ".", "collect", "(", "Collectors", ".", "toList", "(", ")", ")", ";", "break", ";", "default", ":", "throw", "new", "ValidationException", "(", "field", ",", "\"Unknown configuration field type \\\"\"", "+", "type", "+", "\"\\\"\"", ")", ";", "}", "configuration", ".", "put", "(", "field", ",", "value", ")", ";", "}", "return", "configuration", ";", "}" ]
Converts the values in the map to the requested types. This has been copied from the Graylog web interface and should be removed once we have better configuration objects.
[ "Converts", "the", "values", "in", "the", "map", "to", "the", "requested", "types", ".", "This", "has", "been", "copied", "from", "the", "Graylog", "web", "interface", "and", "should", "be", "removed", "once", "we", "have", "better", "configuration", "objects", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/utilities/ConfigurationMapConverter.java#L33-L83
14,817
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/searches/Searches.java
Searches.extractStreamId
public static Optional<String> extractStreamId(String filter) { if (isNullOrEmpty(filter)) { return Optional.empty(); } final Matcher streamIdMatcher = filterStreamIdPattern.matcher(filter); if (streamIdMatcher.find()) { return Optional.of(streamIdMatcher.group(2)); } return Optional.empty(); }
java
public static Optional<String> extractStreamId(String filter) { if (isNullOrEmpty(filter)) { return Optional.empty(); } final Matcher streamIdMatcher = filterStreamIdPattern.matcher(filter); if (streamIdMatcher.find()) { return Optional.of(streamIdMatcher.group(2)); } return Optional.empty(); }
[ "public", "static", "Optional", "<", "String", ">", "extractStreamId", "(", "String", "filter", ")", "{", "if", "(", "isNullOrEmpty", "(", "filter", ")", ")", "{", "return", "Optional", ".", "empty", "(", ")", ";", "}", "final", "Matcher", "streamIdMatcher", "=", "filterStreamIdPattern", ".", "matcher", "(", "filter", ")", ";", "if", "(", "streamIdMatcher", ".", "find", "(", ")", ")", "{", "return", "Optional", ".", "of", "(", "streamIdMatcher", ".", "group", "(", "2", ")", ")", ";", "}", "return", "Optional", ".", "empty", "(", ")", ";", "}" ]
Extracts the last stream id from the filter string passed as part of the elasticsearch query. This is used later to pass to possibly existing message decorators for stream-specific configurations. The assumption is that usually (when listing/searching messages for a stream) only a single stream filter is passed. When this is not the case, only the last stream id will be taked into account. This is currently a workaround. A better solution would be to pass the stream id which is supposed to be the scope for a search query as a separate parameter. @param filter the filter string like "streams:xxxyyyzzz" @return the optional stream id
[ "Extracts", "the", "last", "stream", "id", "from", "the", "filter", "string", "passed", "as", "part", "of", "the", "elasticsearch", "query", ".", "This", "is", "used", "later", "to", "pass", "to", "possibly", "existing", "message", "decorators", "for", "stream", "-", "specific", "configurations", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/searches/Searches.java#L880-L889
14,818
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/ast/Rule.java
Rule.invokableCopy
public Rule invokableCopy(FunctionRegistry functionRegistry) { final Builder builder = toBuilder(); final Class<? extends GeneratedRule> ruleClass = generatedRuleClass(); if (ruleClass != null) { try { //noinspection unchecked final Set<Constructor> constructors = ReflectionUtils.getConstructors(ruleClass); final Constructor onlyElement = Iterables.getOnlyElement(constructors); final GeneratedRule instance = (GeneratedRule) onlyElement.newInstance(functionRegistry); builder.generatedRule(instance); } catch (IllegalAccessException | InstantiationException | InvocationTargetException e) { LOG.warn("Unable to generate code for rule {}: {}", id(), e); } } return builder.build(); }
java
public Rule invokableCopy(FunctionRegistry functionRegistry) { final Builder builder = toBuilder(); final Class<? extends GeneratedRule> ruleClass = generatedRuleClass(); if (ruleClass != null) { try { //noinspection unchecked final Set<Constructor> constructors = ReflectionUtils.getConstructors(ruleClass); final Constructor onlyElement = Iterables.getOnlyElement(constructors); final GeneratedRule instance = (GeneratedRule) onlyElement.newInstance(functionRegistry); builder.generatedRule(instance); } catch (IllegalAccessException | InstantiationException | InvocationTargetException e) { LOG.warn("Unable to generate code for rule {}: {}", id(), e); } } return builder.build(); }
[ "public", "Rule", "invokableCopy", "(", "FunctionRegistry", "functionRegistry", ")", "{", "final", "Builder", "builder", "=", "toBuilder", "(", ")", ";", "final", "Class", "<", "?", "extends", "GeneratedRule", ">", "ruleClass", "=", "generatedRuleClass", "(", ")", ";", "if", "(", "ruleClass", "!=", "null", ")", "{", "try", "{", "//noinspection unchecked", "final", "Set", "<", "Constructor", ">", "constructors", "=", "ReflectionUtils", ".", "getConstructors", "(", "ruleClass", ")", ";", "final", "Constructor", "onlyElement", "=", "Iterables", ".", "getOnlyElement", "(", "constructors", ")", ";", "final", "GeneratedRule", "instance", "=", "(", "GeneratedRule", ")", "onlyElement", ".", "newInstance", "(", "functionRegistry", ")", ";", "builder", ".", "generatedRule", "(", "instance", ")", ";", "}", "catch", "(", "IllegalAccessException", "|", "InstantiationException", "|", "InvocationTargetException", "e", ")", "{", "LOG", ".", "warn", "(", "\"Unable to generate code for rule {}: {}\"", ",", "id", "(", ")", ",", "e", ")", ";", "}", "}", "return", "builder", ".", "build", "(", ")", ";", "}" ]
Creates a copy of this Rule with a new instance of the generated rule class if present. This prevents sharing instances across threads, which is not supported for performance reasons. Otherwise the generated code would need to be thread safe, adding to the runtime overhead. Instead we buy speed by spending more memory. @param functionRegistry the registered functions of the system @return a copy of this rule with a new instance of its generated code
[ "Creates", "a", "copy", "of", "this", "Rule", "with", "a", "new", "instance", "of", "the", "generated", "rule", "class", "if", "present", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/pipelineprocessor/ast/Rule.java#L178-L193
14,819
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePoller.java
IndexFieldTypePoller.pollIndex
public Optional<IndexFieldTypesDTO> pollIndex(final String indexName, final String indexSetId) { final GetMapping getMapping = new GetMapping.Builder() .addIndex(indexName) .build(); final JestResult result; try (final Timer.Context ignored = pollTimer.time()) { result = JestUtils.execute(jestClient, getMapping, () -> "Unable to get index mapping for index: " + indexName); } catch (Exception e) { if (LOG.isDebugEnabled()) { LOG.error("Couldn't get mapping for index <{}>", indexName, e); } else { LOG.error("Couldn't get mapping for index <{}>: {}", indexName, ExceptionUtils.getRootCauseMessage(e)); } return Optional.empty(); } final JsonNode properties = result.getJsonObject() .path(indexName) .path("mappings") .path("message") // TODO: Hardcoded index type name .path("properties"); if (properties.isMissingNode()) { LOG.error("Invalid mapping response: {}", result.getJsonString()); return Optional.empty(); } final Spliterator<Map.Entry<String, JsonNode>> fieldSpliterator = Spliterators.spliteratorUnknownSize(properties.fields(), Spliterator.IMMUTABLE); final Set<FieldTypeDTO> fieldsMap = StreamSupport.stream(fieldSpliterator, false) .map(field -> Maps.immutableEntry(field.getKey(), field.getValue().path("type").asText())) // The "type" value is empty if we deal with a nested data type // TODO: Figure out how to handle nested fields, for now we only support the top-level fields .filter(field -> !field.getValue().isEmpty()) .map(field -> FieldTypeDTO.create(field.getKey(), field.getValue())) .collect(Collectors.toSet()); return Optional.of(IndexFieldTypesDTO.create(indexSetId, indexName, fieldsMap)); }
java
public Optional<IndexFieldTypesDTO> pollIndex(final String indexName, final String indexSetId) { final GetMapping getMapping = new GetMapping.Builder() .addIndex(indexName) .build(); final JestResult result; try (final Timer.Context ignored = pollTimer.time()) { result = JestUtils.execute(jestClient, getMapping, () -> "Unable to get index mapping for index: " + indexName); } catch (Exception e) { if (LOG.isDebugEnabled()) { LOG.error("Couldn't get mapping for index <{}>", indexName, e); } else { LOG.error("Couldn't get mapping for index <{}>: {}", indexName, ExceptionUtils.getRootCauseMessage(e)); } return Optional.empty(); } final JsonNode properties = result.getJsonObject() .path(indexName) .path("mappings") .path("message") // TODO: Hardcoded index type name .path("properties"); if (properties.isMissingNode()) { LOG.error("Invalid mapping response: {}", result.getJsonString()); return Optional.empty(); } final Spliterator<Map.Entry<String, JsonNode>> fieldSpliterator = Spliterators.spliteratorUnknownSize(properties.fields(), Spliterator.IMMUTABLE); final Set<FieldTypeDTO> fieldsMap = StreamSupport.stream(fieldSpliterator, false) .map(field -> Maps.immutableEntry(field.getKey(), field.getValue().path("type").asText())) // The "type" value is empty if we deal with a nested data type // TODO: Figure out how to handle nested fields, for now we only support the top-level fields .filter(field -> !field.getValue().isEmpty()) .map(field -> FieldTypeDTO.create(field.getKey(), field.getValue())) .collect(Collectors.toSet()); return Optional.of(IndexFieldTypesDTO.create(indexSetId, indexName, fieldsMap)); }
[ "public", "Optional", "<", "IndexFieldTypesDTO", ">", "pollIndex", "(", "final", "String", "indexName", ",", "final", "String", "indexSetId", ")", "{", "final", "GetMapping", "getMapping", "=", "new", "GetMapping", ".", "Builder", "(", ")", ".", "addIndex", "(", "indexName", ")", ".", "build", "(", ")", ";", "final", "JestResult", "result", ";", "try", "(", "final", "Timer", ".", "Context", "ignored", "=", "pollTimer", ".", "time", "(", ")", ")", "{", "result", "=", "JestUtils", ".", "execute", "(", "jestClient", ",", "getMapping", ",", "(", ")", "->", "\"Unable to get index mapping for index: \"", "+", "indexName", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "if", "(", "LOG", ".", "isDebugEnabled", "(", ")", ")", "{", "LOG", ".", "error", "(", "\"Couldn't get mapping for index <{}>\"", ",", "indexName", ",", "e", ")", ";", "}", "else", "{", "LOG", ".", "error", "(", "\"Couldn't get mapping for index <{}>: {}\"", ",", "indexName", ",", "ExceptionUtils", ".", "getRootCauseMessage", "(", "e", ")", ")", ";", "}", "return", "Optional", ".", "empty", "(", ")", ";", "}", "final", "JsonNode", "properties", "=", "result", ".", "getJsonObject", "(", ")", ".", "path", "(", "indexName", ")", ".", "path", "(", "\"mappings\"", ")", ".", "path", "(", "\"message\"", ")", "// TODO: Hardcoded index type name", ".", "path", "(", "\"properties\"", ")", ";", "if", "(", "properties", ".", "isMissingNode", "(", ")", ")", "{", "LOG", ".", "error", "(", "\"Invalid mapping response: {}\"", ",", "result", ".", "getJsonString", "(", ")", ")", ";", "return", "Optional", ".", "empty", "(", ")", ";", "}", "final", "Spliterator", "<", "Map", ".", "Entry", "<", "String", ",", "JsonNode", ">", ">", "fieldSpliterator", "=", "Spliterators", ".", "spliteratorUnknownSize", "(", "properties", ".", "fields", "(", ")", ",", "Spliterator", ".", "IMMUTABLE", ")", ";", "final", "Set", "<", "FieldTypeDTO", ">", "fieldsMap", "=", "StreamSupport", ".", "stream", "(", "fieldSpliterator", ",", "false", ")", ".", "map", "(", "field", "->", "Maps", ".", "immutableEntry", "(", "field", ".", "getKey", "(", ")", ",", "field", ".", "getValue", "(", ")", ".", "path", "(", "\"type\"", ")", ".", "asText", "(", ")", ")", ")", "// The \"type\" value is empty if we deal with a nested data type", "// TODO: Figure out how to handle nested fields, for now we only support the top-level fields", ".", "filter", "(", "field", "->", "!", "field", ".", "getValue", "(", ")", ".", "isEmpty", "(", ")", ")", ".", "map", "(", "field", "->", "FieldTypeDTO", ".", "create", "(", "field", ".", "getKey", "(", ")", ",", "field", ".", "getValue", "(", ")", ")", ")", ".", "collect", "(", "Collectors", ".", "toSet", "(", ")", ")", ";", "return", "Optional", ".", "of", "(", "IndexFieldTypesDTO", ".", "create", "(", "indexSetId", ",", "indexName", ",", "fieldsMap", ")", ")", ";", "}" ]
Returns the index field types for the given index. @param indexName index name to poll types for @param indexSetId index set ID of the given index @return the polled index field type data for the given index
[ "Returns", "the", "index", "field", "types", "for", "the", "given", "index", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePoller.java#L95-L134
14,820
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java
NetFlowV9Parser.parseHeader
public static NetFlowV9Header parseHeader(ByteBuf bb) { final int version = bb.readUnsignedShort(); if (version != 9) { throw new InvalidFlowVersionException(version); } final int count = bb.readUnsignedShort(); final long sysUptime = bb.readUnsignedInt(); final long unixSecs = bb.readUnsignedInt(); final long sequence = bb.readUnsignedInt(); final long sourceId = bb.readUnsignedInt(); return NetFlowV9Header.create(version, count, sysUptime, unixSecs, sequence, sourceId); }
java
public static NetFlowV9Header parseHeader(ByteBuf bb) { final int version = bb.readUnsignedShort(); if (version != 9) { throw new InvalidFlowVersionException(version); } final int count = bb.readUnsignedShort(); final long sysUptime = bb.readUnsignedInt(); final long unixSecs = bb.readUnsignedInt(); final long sequence = bb.readUnsignedInt(); final long sourceId = bb.readUnsignedInt(); return NetFlowV9Header.create(version, count, sysUptime, unixSecs, sequence, sourceId); }
[ "public", "static", "NetFlowV9Header", "parseHeader", "(", "ByteBuf", "bb", ")", "{", "final", "int", "version", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "if", "(", "version", "!=", "9", ")", "{", "throw", "new", "InvalidFlowVersionException", "(", "version", ")", ";", "}", "final", "int", "count", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "final", "long", "sysUptime", "=", "bb", ".", "readUnsignedInt", "(", ")", ";", "final", "long", "unixSecs", "=", "bb", ".", "readUnsignedInt", "(", ")", ";", "final", "long", "sequence", "=", "bb", ".", "readUnsignedInt", "(", ")", ";", "final", "long", "sourceId", "=", "bb", ".", "readUnsignedInt", "(", ")", ";", "return", "NetFlowV9Header", ".", "create", "(", "version", ",", "count", ",", "sysUptime", ",", "unixSecs", ",", "sequence", ",", "sourceId", ")", ";", "}" ]
Flow Header Format <pre> | 0-1 | version | NetFlow export format version number | |-------|------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 2-3 | count | Number of flow sets exported in this packet, both template and data (1-30). | | 4-7 | sys_uptime | Current time in milliseconds since the export device booted. | | 8-11 | unix_secs | Current count of seconds since 0000 UTC 1970. | | 12-15 | package_sequence | Sequence counter of all export packets sent by the export device. Note: This is a change from the Version 5 and Version 8 headers, where this number represented total flows. | | 16-19 | source_id | A 32-bit value that is used to guarantee uniqueness for all flows exported from a particular device. (The Source ID field is the equivalent of the engine type and engine ID fields found in the NetFlow Version 5 and Version 8 headers). The format of this field is vendor specific. In Cisco's implementation, the first two bytes are reserved for future expansion, and will always be zero. Byte 3 provides uniqueness with respect to the routing engine on the exporting device. Byte 4 provides uniqueness with respect to the particular line card or Versatile Interface Processor on the exporting device. Collector devices should use the combination of the source IP address plus the Source ID field to associate an incoming NetFlow export packet with a unique instance of NetFlow on a particular device. | </pre>
[ "Flow", "Header", "Format" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java#L95-L108
14,821
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java
NetFlowV9Parser.parseTemplates
public static List<NetFlowV9Template> parseTemplates(ByteBuf bb, NetFlowV9FieldTypeRegistry typeRegistry) { final ImmutableList.Builder<NetFlowV9Template> templates = ImmutableList.builder(); int len = bb.readUnsignedShort(); int p = 4; // flow set id and length field itself while (p < len) { final NetFlowV9Template template = parseTemplate(bb, typeRegistry); templates.add(template); p += 4 + template.fieldCount() * 4; } return templates.build(); }
java
public static List<NetFlowV9Template> parseTemplates(ByteBuf bb, NetFlowV9FieldTypeRegistry typeRegistry) { final ImmutableList.Builder<NetFlowV9Template> templates = ImmutableList.builder(); int len = bb.readUnsignedShort(); int p = 4; // flow set id and length field itself while (p < len) { final NetFlowV9Template template = parseTemplate(bb, typeRegistry); templates.add(template); p += 4 + template.fieldCount() * 4; } return templates.build(); }
[ "public", "static", "List", "<", "NetFlowV9Template", ">", "parseTemplates", "(", "ByteBuf", "bb", ",", "NetFlowV9FieldTypeRegistry", "typeRegistry", ")", "{", "final", "ImmutableList", ".", "Builder", "<", "NetFlowV9Template", ">", "templates", "=", "ImmutableList", ".", "builder", "(", ")", ";", "int", "len", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "p", "=", "4", ";", "// flow set id and length field itself", "while", "(", "p", "<", "len", ")", "{", "final", "NetFlowV9Template", "template", "=", "parseTemplate", "(", "bb", ",", "typeRegistry", ")", ";", "templates", ".", "add", "(", "template", ")", ";", "p", "+=", "4", "+", "template", ".", "fieldCount", "(", ")", "*", "4", ";", "}", "return", "templates", ".", "build", "(", ")", ";", "}" ]
Template FlowSet Format <pre> | FIELD | DESCRIPTION | |--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | flowset_id | The flowset_id is used to distinguish template records from data records. A template record always has a flowset_id in the range of 0-255. Currently template record that describes flow fields has a flowset_id of zero and the template record that describes option fields (described below) has a flowset_id of 1. A data record always has a nonzero flowset_id greater than 255. | | length | Length refers to the total length of this FlowSet. Because an individual template FlowSet may contain multiple template IDs (as illustrated above), the length value should be used to determine the position of the next FlowSet record, which could be either a template or a data FlowSet. Length is expressed in type/length/value (TLV) format, meaning that the value includes the bytes used for the flowset_id and the length bytes themselves, as well as the combined lengths of all template records included in this FlowSet. | | template_id | As a router generates different template FlowSets to match the type of NetFlow data it will be exporting, each template is given a unique ID. This uniqueness is local to the router that generated the template_id. Templates that define data record formats begin numbering at 256 since 0-255 are reserved for FlowSet IDs. | | field_count | This field gives the number of fields in this template record. Because a template FlowSet may contain multiple template records, this field allows the parser to determine the end of the current template record and the start of the next. | | field_type | This numeric value represents the type of the field. The possible values of the field type are vendor specific. Cisco supplied values are consistent across all platforms that support NetFlow Version 9. At the time of the initial release of the NetFlow Version 9 code (and after any subsequent changes that could add new field-type definitions), Cisco provides a file that defines the known field types and their lengths. The currently defined field types are detailed below. | | field_length | This number gives the length of the above-defined field, in bytes. | </pre>
[ "Template", "FlowSet", "Format" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java#L124-L136
14,822
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java
NetFlowV9Parser.parseTemplatesShallow
public static List<Map.Entry<Integer, byte[]>> parseTemplatesShallow(ByteBuf bb) { final ImmutableList.Builder<Map.Entry<Integer, byte[]>> templates = ImmutableList.builder(); int len = bb.readUnsignedShort(); int p = 4; // flow set id and length field itself while (p < len) { final int start = bb.readerIndex(); final int templateId = bb.readUnsignedShort(); final int fieldCount = bb.readUnsignedShort(); final ImmutableList.Builder<NetFlowV9FieldDef> fieldDefs = ImmutableList.builder(); for (int i = 0; i < fieldCount; i++) { int fieldType = bb.readUnsignedShort(); int fieldLength = bb.readUnsignedShort(); } final byte[] bytes = ByteBufUtil.getBytes(bb, start, bb.readerIndex() - start); final Map.Entry<Integer, byte[]> template = Maps.immutableEntry(templateId, bytes); templates.add(template); p += 4 + fieldCount * 4; } return templates.build(); }
java
public static List<Map.Entry<Integer, byte[]>> parseTemplatesShallow(ByteBuf bb) { final ImmutableList.Builder<Map.Entry<Integer, byte[]>> templates = ImmutableList.builder(); int len = bb.readUnsignedShort(); int p = 4; // flow set id and length field itself while (p < len) { final int start = bb.readerIndex(); final int templateId = bb.readUnsignedShort(); final int fieldCount = bb.readUnsignedShort(); final ImmutableList.Builder<NetFlowV9FieldDef> fieldDefs = ImmutableList.builder(); for (int i = 0; i < fieldCount; i++) { int fieldType = bb.readUnsignedShort(); int fieldLength = bb.readUnsignedShort(); } final byte[] bytes = ByteBufUtil.getBytes(bb, start, bb.readerIndex() - start); final Map.Entry<Integer, byte[]> template = Maps.immutableEntry(templateId, bytes); templates.add(template); p += 4 + fieldCount * 4; } return templates.build(); }
[ "public", "static", "List", "<", "Map", ".", "Entry", "<", "Integer", ",", "byte", "[", "]", ">", ">", "parseTemplatesShallow", "(", "ByteBuf", "bb", ")", "{", "final", "ImmutableList", ".", "Builder", "<", "Map", ".", "Entry", "<", "Integer", ",", "byte", "[", "]", ">", ">", "templates", "=", "ImmutableList", ".", "builder", "(", ")", ";", "int", "len", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "p", "=", "4", ";", "// flow set id and length field itself", "while", "(", "p", "<", "len", ")", "{", "final", "int", "start", "=", "bb", ".", "readerIndex", "(", ")", ";", "final", "int", "templateId", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "final", "int", "fieldCount", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "final", "ImmutableList", ".", "Builder", "<", "NetFlowV9FieldDef", ">", "fieldDefs", "=", "ImmutableList", ".", "builder", "(", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "fieldCount", ";", "i", "++", ")", "{", "int", "fieldType", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "fieldLength", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "}", "final", "byte", "[", "]", "bytes", "=", "ByteBufUtil", ".", "getBytes", "(", "bb", ",", "start", ",", "bb", ".", "readerIndex", "(", ")", "-", "start", ")", ";", "final", "Map", ".", "Entry", "<", "Integer", ",", "byte", "[", "]", ">", "template", "=", "Maps", ".", "immutableEntry", "(", "templateId", ",", "bytes", ")", ";", "templates", ".", "add", "(", "template", ")", ";", "p", "+=", "4", "+", "fieldCount", "*", "4", ";", "}", "return", "templates", ".", "build", "(", ")", ";", "}" ]
Like above, but only retrieves the bytes and template ids
[ "Like", "above", "but", "only", "retrieves", "the", "bytes", "and", "template", "ids" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java#L163-L185
14,823
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java
NetFlowV9Parser.parseOptionTemplate
public static NetFlowV9OptionTemplate parseOptionTemplate(ByteBuf bb, NetFlowV9FieldTypeRegistry typeRegistry) { int length = bb.readUnsignedShort(); final int templateId = bb.readUnsignedShort(); int optionScopeLength = bb.readUnsignedShort(); int optionLength = bb.readUnsignedShort(); int p = bb.readerIndex(); int endOfScope = p + optionScopeLength; int endOfOption = endOfScope + optionLength; int endOfTemplate = p - 10 + length; final ImmutableList.Builder<NetFlowV9ScopeDef> scopeDefs = ImmutableList.builder(); while (bb.readerIndex() < endOfScope) { int scopeType = bb.readUnsignedShort(); int scopeLength = bb.readUnsignedShort(); scopeDefs.add(NetFlowV9ScopeDef.create(scopeType, scopeLength)); } // skip padding bb.readerIndex(endOfScope); final ImmutableList.Builder<NetFlowV9FieldDef> optionDefs = ImmutableList.builder(); while (bb.readerIndex() < endOfOption) { int optType = bb.readUnsignedShort(); int optLength = bb.readUnsignedShort(); NetFlowV9FieldType t = typeRegistry.get(optType); if (t == null) { t = NetFlowV9FieldType.create(optType, NetFlowV9FieldType.ValueType.byLength(optLength), "option_" + optType); } optionDefs.add(NetFlowV9FieldDef.create(t, optLength)); } // skip padding bb.readerIndex(endOfTemplate); return NetFlowV9OptionTemplate.create(templateId, scopeDefs.build(), optionDefs.build()); }
java
public static NetFlowV9OptionTemplate parseOptionTemplate(ByteBuf bb, NetFlowV9FieldTypeRegistry typeRegistry) { int length = bb.readUnsignedShort(); final int templateId = bb.readUnsignedShort(); int optionScopeLength = bb.readUnsignedShort(); int optionLength = bb.readUnsignedShort(); int p = bb.readerIndex(); int endOfScope = p + optionScopeLength; int endOfOption = endOfScope + optionLength; int endOfTemplate = p - 10 + length; final ImmutableList.Builder<NetFlowV9ScopeDef> scopeDefs = ImmutableList.builder(); while (bb.readerIndex() < endOfScope) { int scopeType = bb.readUnsignedShort(); int scopeLength = bb.readUnsignedShort(); scopeDefs.add(NetFlowV9ScopeDef.create(scopeType, scopeLength)); } // skip padding bb.readerIndex(endOfScope); final ImmutableList.Builder<NetFlowV9FieldDef> optionDefs = ImmutableList.builder(); while (bb.readerIndex() < endOfOption) { int optType = bb.readUnsignedShort(); int optLength = bb.readUnsignedShort(); NetFlowV9FieldType t = typeRegistry.get(optType); if (t == null) { t = NetFlowV9FieldType.create(optType, NetFlowV9FieldType.ValueType.byLength(optLength), "option_" + optType); } optionDefs.add(NetFlowV9FieldDef.create(t, optLength)); } // skip padding bb.readerIndex(endOfTemplate); return NetFlowV9OptionTemplate.create(templateId, scopeDefs.build(), optionDefs.build()); }
[ "public", "static", "NetFlowV9OptionTemplate", "parseOptionTemplate", "(", "ByteBuf", "bb", ",", "NetFlowV9FieldTypeRegistry", "typeRegistry", ")", "{", "int", "length", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "final", "int", "templateId", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "optionScopeLength", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "optionLength", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "p", "=", "bb", ".", "readerIndex", "(", ")", ";", "int", "endOfScope", "=", "p", "+", "optionScopeLength", ";", "int", "endOfOption", "=", "endOfScope", "+", "optionLength", ";", "int", "endOfTemplate", "=", "p", "-", "10", "+", "length", ";", "final", "ImmutableList", ".", "Builder", "<", "NetFlowV9ScopeDef", ">", "scopeDefs", "=", "ImmutableList", ".", "builder", "(", ")", ";", "while", "(", "bb", ".", "readerIndex", "(", ")", "<", "endOfScope", ")", "{", "int", "scopeType", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "scopeLength", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "scopeDefs", ".", "add", "(", "NetFlowV9ScopeDef", ".", "create", "(", "scopeType", ",", "scopeLength", ")", ")", ";", "}", "// skip padding", "bb", ".", "readerIndex", "(", "endOfScope", ")", ";", "final", "ImmutableList", ".", "Builder", "<", "NetFlowV9FieldDef", ">", "optionDefs", "=", "ImmutableList", ".", "builder", "(", ")", ";", "while", "(", "bb", ".", "readerIndex", "(", ")", "<", "endOfOption", ")", "{", "int", "optType", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "optLength", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "NetFlowV9FieldType", "t", "=", "typeRegistry", ".", "get", "(", "optType", ")", ";", "if", "(", "t", "==", "null", ")", "{", "t", "=", "NetFlowV9FieldType", ".", "create", "(", "optType", ",", "NetFlowV9FieldType", ".", "ValueType", ".", "byLength", "(", "optLength", ")", ",", "\"option_\"", "+", "optType", ")", ";", "}", "optionDefs", ".", "add", "(", "NetFlowV9FieldDef", ".", "create", "(", "t", ",", "optLength", ")", ")", ";", "}", "// skip padding", "bb", ".", "readerIndex", "(", "endOfTemplate", ")", ";", "return", "NetFlowV9OptionTemplate", ".", "create", "(", "templateId", ",", "scopeDefs", ".", "build", "(", ")", ",", "optionDefs", ".", "build", "(", ")", ")", ";", "}" ]
Options Template Format <pre> | FIELD | DESCRIPTION | |-----------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | flowset_id = 1 | The flowset_id is used to distinguish template records from data records. A template record always has a flowset_id of 1. A data record always has a nonzero flowset_id which is greater than 255. | | length | This field gives the total length of this FlowSet. Because an individual template FlowSet may contain multiple template IDs, the length value should be used to determine the position of the next FlowSet record, which could be either a template or a data FlowSet. Length is expressed in TLV format, meaning that the value includes the bytes used for the flowset_id and the length bytes themselves, as well as the combined lengths of all template records included in this FlowSet. | | template_id | As a router generates different template FlowSets to match the type of NetFlow data it will be exporting, each template is given a unique ID. This uniqueness is local to the router that generated the template_id. The template_id is greater than 255. Template IDs inferior to 255 are reserved. | | option_scope_length | This field gives the length in bytes of any scope fields contained in this options template (the use of scope is described below). | | options_length | This field gives the length (in bytes) of any Options field definitions contained in this options template. | | scope_field_N_type | This field gives the relevant portion of the NetFlow process to which the options record refers. Currently defined values follow: | | | * 0x0001 System | | | * 0x0002 Interface | | | * 0x0003 Line Card | | | * 0x0004 NetFlow Cache | | | * 0x0005 Template | | | | | | For example, sampled NetFlow can be implemented on a per-interface basis, so if the options record were reporting on how sampling is configured, the scope for the report would be 0x0002 (interface). | | scope_field_N_length | This field gives the length (in bytes) of the Scope field, as it would appear in an options record. | | option_field_N_type | This numeric value represents the type of the field that appears in the options record. Possible values are detailed in template flow set format (above). | | option_field_N_length | This number is the length (in bytes) of the field, as it would appear in an options record. | | padding | Padding should be inserted to align the end of the FlowSet on a 32 bit boundary. Pay attention that the length field will include those padding bits. | </pre>
[ "Options", "Template", "Format" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java#L212-L249
14,824
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java
NetFlowV9Parser.parseRecords
public static List<NetFlowV9BaseRecord> parseRecords(ByteBuf bb, Map<Integer, NetFlowV9Template> cache, NetFlowV9OptionTemplate optionTemplate) { List<NetFlowV9BaseRecord> records = new ArrayList<>(); int flowSetId = bb.readUnsignedShort(); int length = bb.readUnsignedShort(); int end = bb.readerIndex() - 4 + length; List<NetFlowV9FieldDef> defs; boolean isOptionTemplate = optionTemplate != null && optionTemplate.templateId() == flowSetId; if (isOptionTemplate) { defs = optionTemplate.optionDefs(); } else { NetFlowV9Template t = cache.get(flowSetId); if (t == null) { return Collections.emptyList(); } defs = t.definitions(); } // calculate record unit size int unitSize = 0; for (NetFlowV9FieldDef def : defs) { unitSize += def.length(); } while (bb.readerIndex() < end && bb.readableBytes() >= unitSize) { final ImmutableMap.Builder<String, Object> fields = ImmutableMap.builder(); for (NetFlowV9FieldDef def : defs) { final String key = def.type().name().toLowerCase(Locale.ROOT); final Optional<Object> optValue = def.parse(bb); optValue.ifPresent(value -> fields.put(key, value)); } if (isOptionTemplate) { final ImmutableMap.Builder<Integer, Object> scopes = ImmutableMap.builder(); for (NetFlowV9ScopeDef def : optionTemplate.scopeDefs()) { int t = def.type(); int len = def.length(); long l = 0; for (int i = 0; i < len; i++) { l <<= 8; l |= bb.readUnsignedByte(); } scopes.put(t, l); } records.add(NetFlowV9OptionRecord.create(fields.build(), scopes.build())); } else { records.add(NetFlowV9Record.create(fields.build())); } // This flowset cannot contain another record, treat as padding if (end - bb.readerIndex() < unitSize) { break; } } bb.readerIndex(end); return records; }
java
public static List<NetFlowV9BaseRecord> parseRecords(ByteBuf bb, Map<Integer, NetFlowV9Template> cache, NetFlowV9OptionTemplate optionTemplate) { List<NetFlowV9BaseRecord> records = new ArrayList<>(); int flowSetId = bb.readUnsignedShort(); int length = bb.readUnsignedShort(); int end = bb.readerIndex() - 4 + length; List<NetFlowV9FieldDef> defs; boolean isOptionTemplate = optionTemplate != null && optionTemplate.templateId() == flowSetId; if (isOptionTemplate) { defs = optionTemplate.optionDefs(); } else { NetFlowV9Template t = cache.get(flowSetId); if (t == null) { return Collections.emptyList(); } defs = t.definitions(); } // calculate record unit size int unitSize = 0; for (NetFlowV9FieldDef def : defs) { unitSize += def.length(); } while (bb.readerIndex() < end && bb.readableBytes() >= unitSize) { final ImmutableMap.Builder<String, Object> fields = ImmutableMap.builder(); for (NetFlowV9FieldDef def : defs) { final String key = def.type().name().toLowerCase(Locale.ROOT); final Optional<Object> optValue = def.parse(bb); optValue.ifPresent(value -> fields.put(key, value)); } if (isOptionTemplate) { final ImmutableMap.Builder<Integer, Object> scopes = ImmutableMap.builder(); for (NetFlowV9ScopeDef def : optionTemplate.scopeDefs()) { int t = def.type(); int len = def.length(); long l = 0; for (int i = 0; i < len; i++) { l <<= 8; l |= bb.readUnsignedByte(); } scopes.put(t, l); } records.add(NetFlowV9OptionRecord.create(fields.build(), scopes.build())); } else { records.add(NetFlowV9Record.create(fields.build())); } // This flowset cannot contain another record, treat as padding if (end - bb.readerIndex() < unitSize) { break; } } bb.readerIndex(end); return records; }
[ "public", "static", "List", "<", "NetFlowV9BaseRecord", ">", "parseRecords", "(", "ByteBuf", "bb", ",", "Map", "<", "Integer", ",", "NetFlowV9Template", ">", "cache", ",", "NetFlowV9OptionTemplate", "optionTemplate", ")", "{", "List", "<", "NetFlowV9BaseRecord", ">", "records", "=", "new", "ArrayList", "<>", "(", ")", ";", "int", "flowSetId", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "length", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "end", "=", "bb", ".", "readerIndex", "(", ")", "-", "4", "+", "length", ";", "List", "<", "NetFlowV9FieldDef", ">", "defs", ";", "boolean", "isOptionTemplate", "=", "optionTemplate", "!=", "null", "&&", "optionTemplate", ".", "templateId", "(", ")", "==", "flowSetId", ";", "if", "(", "isOptionTemplate", ")", "{", "defs", "=", "optionTemplate", ".", "optionDefs", "(", ")", ";", "}", "else", "{", "NetFlowV9Template", "t", "=", "cache", ".", "get", "(", "flowSetId", ")", ";", "if", "(", "t", "==", "null", ")", "{", "return", "Collections", ".", "emptyList", "(", ")", ";", "}", "defs", "=", "t", ".", "definitions", "(", ")", ";", "}", "// calculate record unit size", "int", "unitSize", "=", "0", ";", "for", "(", "NetFlowV9FieldDef", "def", ":", "defs", ")", "{", "unitSize", "+=", "def", ".", "length", "(", ")", ";", "}", "while", "(", "bb", ".", "readerIndex", "(", ")", "<", "end", "&&", "bb", ".", "readableBytes", "(", ")", ">=", "unitSize", ")", "{", "final", "ImmutableMap", ".", "Builder", "<", "String", ",", "Object", ">", "fields", "=", "ImmutableMap", ".", "builder", "(", ")", ";", "for", "(", "NetFlowV9FieldDef", "def", ":", "defs", ")", "{", "final", "String", "key", "=", "def", ".", "type", "(", ")", ".", "name", "(", ")", ".", "toLowerCase", "(", "Locale", ".", "ROOT", ")", ";", "final", "Optional", "<", "Object", ">", "optValue", "=", "def", ".", "parse", "(", "bb", ")", ";", "optValue", ".", "ifPresent", "(", "value", "->", "fields", ".", "put", "(", "key", ",", "value", ")", ")", ";", "}", "if", "(", "isOptionTemplate", ")", "{", "final", "ImmutableMap", ".", "Builder", "<", "Integer", ",", "Object", ">", "scopes", "=", "ImmutableMap", ".", "builder", "(", ")", ";", "for", "(", "NetFlowV9ScopeDef", "def", ":", "optionTemplate", ".", "scopeDefs", "(", ")", ")", "{", "int", "t", "=", "def", ".", "type", "(", ")", ";", "int", "len", "=", "def", ".", "length", "(", ")", ";", "long", "l", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "len", ";", "i", "++", ")", "{", "l", "<<=", "8", ";", "l", "|=", "bb", ".", "readUnsignedByte", "(", ")", ";", "}", "scopes", ".", "put", "(", "t", ",", "l", ")", ";", "}", "records", ".", "add", "(", "NetFlowV9OptionRecord", ".", "create", "(", "fields", ".", "build", "(", ")", ",", "scopes", ".", "build", "(", ")", ")", ")", ";", "}", "else", "{", "records", ".", "add", "(", "NetFlowV9Record", ".", "create", "(", "fields", ".", "build", "(", ")", ")", ")", ";", "}", "// This flowset cannot contain another record, treat as padding", "if", "(", "end", "-", "bb", ".", "readerIndex", "(", ")", "<", "unitSize", ")", "{", "break", ";", "}", "}", "bb", ".", "readerIndex", "(", "end", ")", ";", "return", "records", ";", "}" ]
Data FlowSet Format <pre> | FIELD | DESCRIPTION | |------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | flowset_id | A FlowSet ID precedes each group of records within a NetFlow Version 9 data FlowSet. The FlowSet ID maps to a (previously received) template_id. The collector and display applications should use the flowset_id to map the appropriate type and length to any field values that follow. | | length | This field gives the length of the data FlowSet. Length is expressed in TLV format, meaning that the value includes the bytes used for the flowset_id and the length bytes themselves, as well as the combined lengths of any included data records. | | record_N—field_M | The remainder of the Version 9 data FlowSet is a collection of field values. The type and length of the fields have been previously defined in the template record referenced by the flowset_id/template_id. | | padding | Padding should be inserted to align the end of the FlowSet on a 32 bit boundary. Pay attention that the length field will include those padding bits. | </pre>
[ "Data", "FlowSet", "Format" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java#L296-L356
14,825
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java
NetFlowV9Parser.parseRecordShallow
public static Integer parseRecordShallow(ByteBuf bb) { final int start = bb.readerIndex(); int usedTemplateId = bb.readUnsignedShort(); int length = bb.readUnsignedShort(); int end = bb.readerIndex() - 4 + length; bb.readerIndex(end); return usedTemplateId; }
java
public static Integer parseRecordShallow(ByteBuf bb) { final int start = bb.readerIndex(); int usedTemplateId = bb.readUnsignedShort(); int length = bb.readUnsignedShort(); int end = bb.readerIndex() - 4 + length; bb.readerIndex(end); return usedTemplateId; }
[ "public", "static", "Integer", "parseRecordShallow", "(", "ByteBuf", "bb", ")", "{", "final", "int", "start", "=", "bb", ".", "readerIndex", "(", ")", ";", "int", "usedTemplateId", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "length", "=", "bb", ".", "readUnsignedShort", "(", ")", ";", "int", "end", "=", "bb", ".", "readerIndex", "(", ")", "-", "4", "+", "length", ";", "bb", ".", "readerIndex", "(", "end", ")", ";", "return", "usedTemplateId", ";", "}" ]
like above, but contains all records for the template id as raw bytes
[ "like", "above", "but", "contains", "all", "records", "for", "the", "template", "id", "as", "raw", "bytes" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog/plugins/netflow/v9/NetFlowV9Parser.java#L359-L366
14,826
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/migrations/V20170607164210_MigrateReopenedIndicesToAliases.java
V20170607164210_MigrateReopenedIndicesToAliases.upgrade
@Override public void upgrade() { this.indexSetService.findAll() .stream() .map(mongoIndexSetFactory::create) .flatMap(indexSet -> getReopenedIndices(indexSet).stream()) .map(indexName -> { LOG.debug("Marking index {} to be reopened using alias.", indexName); return indexName; }) .forEach(indices::markIndexReopened); }
java
@Override public void upgrade() { this.indexSetService.findAll() .stream() .map(mongoIndexSetFactory::create) .flatMap(indexSet -> getReopenedIndices(indexSet).stream()) .map(indexName -> { LOG.debug("Marking index {} to be reopened using alias.", indexName); return indexName; }) .forEach(indices::markIndexReopened); }
[ "@", "Override", "public", "void", "upgrade", "(", ")", "{", "this", ".", "indexSetService", ".", "findAll", "(", ")", ".", "stream", "(", ")", ".", "map", "(", "mongoIndexSetFactory", "::", "create", ")", ".", "flatMap", "(", "indexSet", "->", "getReopenedIndices", "(", "indexSet", ")", ".", "stream", "(", ")", ")", ".", "map", "(", "indexName", "->", "{", "LOG", ".", "debug", "(", "\"Marking index {} to be reopened using alias.\"", ",", "indexName", ")", ";", "return", "indexName", ";", "}", ")", ".", "forEach", "(", "indices", "::", "markIndexReopened", ")", ";", "}" ]
Create aliases for legacy reopened indices.
[ "Create", "aliases", "for", "legacy", "reopened", "indices", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/migrations/V20170607164210_MigrateReopenedIndicesToAliases.java#L74-L82
14,827
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/alerts/AbstractAlertCondition.java
AbstractAlertCondition.buildQueryFilter
protected String buildQueryFilter(String streamId, String query) { checkArgument(streamId != null, "streamId parameter cannot be null"); final String trimmedStreamId = streamId.trim(); checkArgument(!trimmedStreamId.isEmpty(), "streamId parameter cannot be empty"); final StringBuilder builder = new StringBuilder().append("streams:").append(trimmedStreamId); if (query != null) { final String trimmedQuery = query.trim(); if (!trimmedQuery.isEmpty() && !"*".equals(trimmedQuery)) { builder.append(" AND (").append(trimmedQuery).append(")"); } } return builder.toString(); }
java
protected String buildQueryFilter(String streamId, String query) { checkArgument(streamId != null, "streamId parameter cannot be null"); final String trimmedStreamId = streamId.trim(); checkArgument(!trimmedStreamId.isEmpty(), "streamId parameter cannot be empty"); final StringBuilder builder = new StringBuilder().append("streams:").append(trimmedStreamId); if (query != null) { final String trimmedQuery = query.trim(); if (!trimmedQuery.isEmpty() && !"*".equals(trimmedQuery)) { builder.append(" AND (").append(trimmedQuery).append(")"); } } return builder.toString(); }
[ "protected", "String", "buildQueryFilter", "(", "String", "streamId", ",", "String", "query", ")", "{", "checkArgument", "(", "streamId", "!=", "null", ",", "\"streamId parameter cannot be null\"", ")", ";", "final", "String", "trimmedStreamId", "=", "streamId", ".", "trim", "(", ")", ";", "checkArgument", "(", "!", "trimmedStreamId", ".", "isEmpty", "(", ")", ",", "\"streamId parameter cannot be empty\"", ")", ";", "final", "StringBuilder", "builder", "=", "new", "StringBuilder", "(", ")", ".", "append", "(", "\"streams:\"", ")", ".", "append", "(", "trimmedStreamId", ")", ";", "if", "(", "query", "!=", "null", ")", "{", "final", "String", "trimmedQuery", "=", "query", ".", "trim", "(", ")", ";", "if", "(", "!", "trimmedQuery", ".", "isEmpty", "(", ")", "&&", "!", "\"*\"", ".", "equals", "(", "trimmedQuery", ")", ")", "{", "builder", ".", "append", "(", "\" AND (\"", ")", ".", "append", "(", "trimmedQuery", ")", ".", "append", "(", "\")\"", ")", ";", "}", "}", "return", "builder", ".", "toString", "(", ")", ";", "}" ]
Combines the given stream ID and query string into a single filter string. @param streamId the stream ID @param query the query string (might be null or empty) @return the combined filter string
[ "Combines", "the", "given", "stream", "ID", "and", "query", "string", "into", "a", "single", "filter", "string", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/alerts/AbstractAlertCondition.java#L170-L187
14,828
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/inputs/syslog/tcp/SyslogOctetCountFrameDecoder.java
SyslogOctetCountFrameDecoder.findFrameSizeValueLength
private int findFrameSizeValueLength(final ByteBuf buffer) { final int readerIndex = buffer.readerIndex(); int index = buffer.forEachByte(BYTE_PROCESSOR); if (index >= 0) { return index - readerIndex; } else { return -1; } }
java
private int findFrameSizeValueLength(final ByteBuf buffer) { final int readerIndex = buffer.readerIndex(); int index = buffer.forEachByte(BYTE_PROCESSOR); if (index >= 0) { return index - readerIndex; } else { return -1; } }
[ "private", "int", "findFrameSizeValueLength", "(", "final", "ByteBuf", "buffer", ")", "{", "final", "int", "readerIndex", "=", "buffer", ".", "readerIndex", "(", ")", ";", "int", "index", "=", "buffer", ".", "forEachByte", "(", "BYTE_PROCESSOR", ")", ";", "if", "(", "index", ">=", "0", ")", "{", "return", "index", "-", "readerIndex", ";", "}", "else", "{", "return", "-", "1", ";", "}", "}" ]
Find the byte length of the frame length value. @param buffer The channel buffer @return The length of the frame length value
[ "Find", "the", "byte", "length", "of", "the", "frame", "length", "value", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/inputs/syslog/tcp/SyslogOctetCountFrameDecoder.java#L70-L79
14,829
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/database/MongoConnectionImpl.java
MongoConnectionImpl.connect
@Override public synchronized Mongo connect() { if (m == null) { final String dbName = mongoClientURI.getDatabase(); if (isNullOrEmpty(dbName)) { LOG.error("The MongoDB database name must not be null or empty (mongodb_uri was: {})", mongoClientURI); throw new RuntimeException("MongoDB database name is missing."); } m = new MongoClient(mongoClientURI); db = m.getDB(dbName); db.setWriteConcern(WriteConcern.ACKNOWLEDGED); mongoDatabase = m.getDatabase(dbName).withWriteConcern(WriteConcern.ACKNOWLEDGED); } try { db.command("{ ping: 1 }"); } catch (MongoCommandException e) { if (e.getCode() == 18) { throw new MongoException("Couldn't connect to MongoDB. Please check the authentication credentials.", e); } else { throw new MongoException("Couldn't connect to MongoDB: " + e.getMessage(), e); } } final Version mongoVersion = getMongoVersion(m.getDB("admin")); if (mongoVersion != null && mongoVersion.lessThan(MINIMUM_MONGODB_VERSION)) { LOG.warn("You're running MongoDB {} but Graylog requires at least MongoDB {}. Please upgrade.", mongoVersion, MINIMUM_MONGODB_VERSION); } return m; }
java
@Override public synchronized Mongo connect() { if (m == null) { final String dbName = mongoClientURI.getDatabase(); if (isNullOrEmpty(dbName)) { LOG.error("The MongoDB database name must not be null or empty (mongodb_uri was: {})", mongoClientURI); throw new RuntimeException("MongoDB database name is missing."); } m = new MongoClient(mongoClientURI); db = m.getDB(dbName); db.setWriteConcern(WriteConcern.ACKNOWLEDGED); mongoDatabase = m.getDatabase(dbName).withWriteConcern(WriteConcern.ACKNOWLEDGED); } try { db.command("{ ping: 1 }"); } catch (MongoCommandException e) { if (e.getCode() == 18) { throw new MongoException("Couldn't connect to MongoDB. Please check the authentication credentials.", e); } else { throw new MongoException("Couldn't connect to MongoDB: " + e.getMessage(), e); } } final Version mongoVersion = getMongoVersion(m.getDB("admin")); if (mongoVersion != null && mongoVersion.lessThan(MINIMUM_MONGODB_VERSION)) { LOG.warn("You're running MongoDB {} but Graylog requires at least MongoDB {}. Please upgrade.", mongoVersion, MINIMUM_MONGODB_VERSION); } return m; }
[ "@", "Override", "public", "synchronized", "Mongo", "connect", "(", ")", "{", "if", "(", "m", "==", "null", ")", "{", "final", "String", "dbName", "=", "mongoClientURI", ".", "getDatabase", "(", ")", ";", "if", "(", "isNullOrEmpty", "(", "dbName", ")", ")", "{", "LOG", ".", "error", "(", "\"The MongoDB database name must not be null or empty (mongodb_uri was: {})\"", ",", "mongoClientURI", ")", ";", "throw", "new", "RuntimeException", "(", "\"MongoDB database name is missing.\"", ")", ";", "}", "m", "=", "new", "MongoClient", "(", "mongoClientURI", ")", ";", "db", "=", "m", ".", "getDB", "(", "dbName", ")", ";", "db", ".", "setWriteConcern", "(", "WriteConcern", ".", "ACKNOWLEDGED", ")", ";", "mongoDatabase", "=", "m", ".", "getDatabase", "(", "dbName", ")", ".", "withWriteConcern", "(", "WriteConcern", ".", "ACKNOWLEDGED", ")", ";", "}", "try", "{", "db", ".", "command", "(", "\"{ ping: 1 }\"", ")", ";", "}", "catch", "(", "MongoCommandException", "e", ")", "{", "if", "(", "e", ".", "getCode", "(", ")", "==", "18", ")", "{", "throw", "new", "MongoException", "(", "\"Couldn't connect to MongoDB. Please check the authentication credentials.\"", ",", "e", ")", ";", "}", "else", "{", "throw", "new", "MongoException", "(", "\"Couldn't connect to MongoDB: \"", "+", "e", ".", "getMessage", "(", ")", ",", "e", ")", ";", "}", "}", "final", "Version", "mongoVersion", "=", "getMongoVersion", "(", "m", ".", "getDB", "(", "\"admin\"", ")", ")", ";", "if", "(", "mongoVersion", "!=", "null", "&&", "mongoVersion", ".", "lessThan", "(", "MINIMUM_MONGODB_VERSION", ")", ")", "{", "LOG", ".", "warn", "(", "\"You're running MongoDB {} but Graylog requires at least MongoDB {}. Please upgrade.\"", ",", "mongoVersion", ",", "MINIMUM_MONGODB_VERSION", ")", ";", "}", "return", "m", ";", "}" ]
Connect the instance.
[ "Connect", "the", "instance", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/database/MongoConnectionImpl.java#L67-L100
14,830
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/database/PaginatedDbService.java
PaginatedDbService.getSortBuilder
protected DBSort.SortBuilder getSortBuilder(String order, String field) { DBSort.SortBuilder sortBuilder; if ("desc".equalsIgnoreCase(order)) { sortBuilder = DBSort.desc(field); } else { sortBuilder = DBSort.asc(field); } return sortBuilder; }
java
protected DBSort.SortBuilder getSortBuilder(String order, String field) { DBSort.SortBuilder sortBuilder; if ("desc".equalsIgnoreCase(order)) { sortBuilder = DBSort.desc(field); } else { sortBuilder = DBSort.asc(field); } return sortBuilder; }
[ "protected", "DBSort", ".", "SortBuilder", "getSortBuilder", "(", "String", "order", ",", "String", "field", ")", "{", "DBSort", ".", "SortBuilder", "sortBuilder", ";", "if", "(", "\"desc\"", ".", "equalsIgnoreCase", "(", "order", ")", ")", "{", "sortBuilder", "=", "DBSort", ".", "desc", "(", "field", ")", ";", "}", "else", "{", "sortBuilder", "=", "DBSort", ".", "asc", "(", "field", ")", ";", "}", "return", "sortBuilder", ";", "}" ]
Returns a sort builder for the given order and field name. @param order the order. either "asc" or "desc" @param field the field to sort on @return the sort builder
[ "Returns", "a", "sort", "builder", "for", "the", "given", "order", "and", "field", "name", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/database/PaginatedDbService.java#L226-L234
14,831
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/lookup/adapters/DnsLookupDataAdapter.java
DnsLookupDataAdapter.assignMinimumTTL
private void assignMinimumTTL(List<? extends DnsAnswer> dnsAnswers, LookupResult.Builder builder) { if (config.hasOverrideTTL()) { builder.cacheTTL(config.getCacheTTLOverrideMillis()); } else { // Deduce minimum TTL on all TXT records. A TTL will always be returned by DNS server. builder.cacheTTL(dnsAnswers.stream() .map(DnsAnswer::dnsTTL) .min(Comparator.comparing(Long::valueOf)).get() * 1000); } }
java
private void assignMinimumTTL(List<? extends DnsAnswer> dnsAnswers, LookupResult.Builder builder) { if (config.hasOverrideTTL()) { builder.cacheTTL(config.getCacheTTLOverrideMillis()); } else { // Deduce minimum TTL on all TXT records. A TTL will always be returned by DNS server. builder.cacheTTL(dnsAnswers.stream() .map(DnsAnswer::dnsTTL) .min(Comparator.comparing(Long::valueOf)).get() * 1000); } }
[ "private", "void", "assignMinimumTTL", "(", "List", "<", "?", "extends", "DnsAnswer", ">", "dnsAnswers", ",", "LookupResult", ".", "Builder", "builder", ")", "{", "if", "(", "config", ".", "hasOverrideTTL", "(", ")", ")", "{", "builder", ".", "cacheTTL", "(", "config", ".", "getCacheTTLOverrideMillis", "(", ")", ")", ";", "}", "else", "{", "// Deduce minimum TTL on all TXT records. A TTL will always be returned by DNS server.", "builder", ".", "cacheTTL", "(", "dnsAnswers", ".", "stream", "(", ")", ".", "map", "(", "DnsAnswer", "::", "dnsTTL", ")", ".", "min", "(", "Comparator", ".", "comparing", "(", "Long", "::", "valueOf", ")", ")", ".", "get", "(", ")", "*", "1000", ")", ";", "}", "}" ]
Assigns the minimum TTL found in the supplied DnsAnswers. The minimum makes sense, because this is the least amount of time that at least one of the records is valid for.
[ "Assigns", "the", "minimum", "TTL", "found", "in", "the", "supplied", "DnsAnswers", ".", "The", "minimum", "makes", "sense", "because", "this", "is", "the", "least", "amount", "of", "time", "that", "at", "least", "one", "of", "the", "records", "is", "valid", "for", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/lookup/adapters/DnsLookupDataAdapter.java#L373-L383
14,832
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/contentpacks/ContentPackInstallationPersistenceService.java
ContentPackInstallationPersistenceService.countInstallationOfEntityById
public long countInstallationOfEntityById(ModelId entityId) { final String field = String.format(Locale.ROOT, "%s.%s", ContentPackInstallation.FIELD_ENTITIES, NativeEntityDescriptor.FIELD_META_ID); return dbCollection.getCount(DBQuery.is(field, entityId)); }
java
public long countInstallationOfEntityById(ModelId entityId) { final String field = String.format(Locale.ROOT, "%s.%s", ContentPackInstallation.FIELD_ENTITIES, NativeEntityDescriptor.FIELD_META_ID); return dbCollection.getCount(DBQuery.is(field, entityId)); }
[ "public", "long", "countInstallationOfEntityById", "(", "ModelId", "entityId", ")", "{", "final", "String", "field", "=", "String", ".", "format", "(", "Locale", ".", "ROOT", ",", "\"%s.%s\"", ",", "ContentPackInstallation", ".", "FIELD_ENTITIES", ",", "NativeEntityDescriptor", ".", "FIELD_META_ID", ")", ";", "return", "dbCollection", ".", "getCount", "(", "DBQuery", ".", "is", "(", "field", ",", "entityId", ")", ")", ";", "}" ]
Returns the number of installations the given content pack entity ID is used in. @param entityId the native entity ID @return number of installations
[ "Returns", "the", "number", "of", "installations", "the", "given", "content", "pack", "entity", "ID", "is", "used", "in", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/contentpacks/ContentPackInstallationPersistenceService.java#L139-L143
14,833
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/Message.java
Message.addStream
public void addStream(Stream stream) { indexSets.add(stream.getIndexSet()); if (streams.add(stream)) { sizeCounter.inc(8); if (LOG.isTraceEnabled()) { LOG.trace("[Message size update][{}] stream added: {}", getId(), sizeCounter.getCount()); } } }
java
public void addStream(Stream stream) { indexSets.add(stream.getIndexSet()); if (streams.add(stream)) { sizeCounter.inc(8); if (LOG.isTraceEnabled()) { LOG.trace("[Message size update][{}] stream added: {}", getId(), sizeCounter.getCount()); } } }
[ "public", "void", "addStream", "(", "Stream", "stream", ")", "{", "indexSets", ".", "add", "(", "stream", ".", "getIndexSet", "(", ")", ")", ";", "if", "(", "streams", ".", "add", "(", "stream", ")", ")", "{", "sizeCounter", ".", "inc", "(", "8", ")", ";", "if", "(", "LOG", ".", "isTraceEnabled", "(", ")", ")", "{", "LOG", ".", "trace", "(", "\"[Message size update][{}] stream added: {}\"", ",", "getId", "(", ")", ",", "sizeCounter", ".", "getCount", "(", ")", ")", ";", "}", "}", "}" ]
Assign the given stream to this message. @param stream the stream to route this message into
[ "Assign", "the", "given", "stream", "to", "this", "message", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/Message.java#L567-L575
14,834
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/Message.java
Message.removeStream
public boolean removeStream(Stream stream) { final boolean removed = streams.remove(stream); if (removed) { indexSets.clear(); for (Stream s : streams) { indexSets.add(s.getIndexSet()); } sizeCounter.dec(8); if (LOG.isTraceEnabled()) { LOG.trace("[Message size update][{}] stream removed: {}", getId(), sizeCounter.getCount()); } } return removed; }
java
public boolean removeStream(Stream stream) { final boolean removed = streams.remove(stream); if (removed) { indexSets.clear(); for (Stream s : streams) { indexSets.add(s.getIndexSet()); } sizeCounter.dec(8); if (LOG.isTraceEnabled()) { LOG.trace("[Message size update][{}] stream removed: {}", getId(), sizeCounter.getCount()); } } return removed; }
[ "public", "boolean", "removeStream", "(", "Stream", "stream", ")", "{", "final", "boolean", "removed", "=", "streams", ".", "remove", "(", "stream", ")", ";", "if", "(", "removed", ")", "{", "indexSets", ".", "clear", "(", ")", ";", "for", "(", "Stream", "s", ":", "streams", ")", "{", "indexSets", ".", "add", "(", "s", ".", "getIndexSet", "(", ")", ")", ";", "}", "sizeCounter", ".", "dec", "(", "8", ")", ";", "if", "(", "LOG", ".", "isTraceEnabled", "(", ")", ")", "{", "LOG", ".", "trace", "(", "\"[Message size update][{}] stream removed: {}\"", ",", "getId", "(", ")", ",", "sizeCounter", ".", "getCount", "(", ")", ")", ";", "}", "}", "return", "removed", ";", "}" ]
Remove the stream assignment from this message. @param stream the stream assignment to remove this message from @return <tt>true</tt> if this message was assigned to the stream
[ "Remove", "the", "stream", "assignment", "from", "this", "message", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/Message.java#L592-L607
14,835
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/searches/timeranges/TimeRanges.java
TimeRanges.toSeconds
public static int toSeconds(TimeRange timeRange) { if (timeRange.getFrom() == null || timeRange.getTo() == null) { return 0; } try { return Seconds.secondsBetween(timeRange.getFrom(), timeRange.getTo()).getSeconds(); } catch (IllegalArgumentException e) { return 0; } }
java
public static int toSeconds(TimeRange timeRange) { if (timeRange.getFrom() == null || timeRange.getTo() == null) { return 0; } try { return Seconds.secondsBetween(timeRange.getFrom(), timeRange.getTo()).getSeconds(); } catch (IllegalArgumentException e) { return 0; } }
[ "public", "static", "int", "toSeconds", "(", "TimeRange", "timeRange", ")", "{", "if", "(", "timeRange", ".", "getFrom", "(", ")", "==", "null", "||", "timeRange", ".", "getTo", "(", ")", "==", "null", ")", "{", "return", "0", ";", "}", "try", "{", "return", "Seconds", ".", "secondsBetween", "(", "timeRange", ".", "getFrom", "(", ")", ",", "timeRange", ".", "getTo", "(", ")", ")", ".", "getSeconds", "(", ")", ";", "}", "catch", "(", "IllegalArgumentException", "e", ")", "{", "return", "0", ";", "}", "}" ]
Calculate the number of seconds in the given time range. @param timeRange the {@link TimeRange} @return the number of seconds in the given time range or 0 if an error occurred.
[ "Calculate", "the", "number", "of", "seconds", "in", "the", "given", "time", "range", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/searches/timeranges/TimeRanges.java#L32-L42
14,836
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java
IndexFieldTypePollerPeriodical.handleIndexSetCreation
@Subscribe public void handleIndexSetCreation(final IndexSetCreatedEvent event) { final String indexSetId = event.indexSet().id(); // We are NOT using IndexSetRegistry#get(String) here because of this: https://github.com/Graylog2/graylog2-server/issues/4625 final Optional<IndexSetConfig> optionalIndexSet = indexSetService.get(indexSetId); if (optionalIndexSet.isPresent()) { schedule(mongoIndexSetFactory.create(optionalIndexSet.get())); } else { LOG.warn("Couldn't find newly created index set <{}>", indexSetId); } }
java
@Subscribe public void handleIndexSetCreation(final IndexSetCreatedEvent event) { final String indexSetId = event.indexSet().id(); // We are NOT using IndexSetRegistry#get(String) here because of this: https://github.com/Graylog2/graylog2-server/issues/4625 final Optional<IndexSetConfig> optionalIndexSet = indexSetService.get(indexSetId); if (optionalIndexSet.isPresent()) { schedule(mongoIndexSetFactory.create(optionalIndexSet.get())); } else { LOG.warn("Couldn't find newly created index set <{}>", indexSetId); } }
[ "@", "Subscribe", "public", "void", "handleIndexSetCreation", "(", "final", "IndexSetCreatedEvent", "event", ")", "{", "final", "String", "indexSetId", "=", "event", ".", "indexSet", "(", ")", ".", "id", "(", ")", ";", "// We are NOT using IndexSetRegistry#get(String) here because of this: https://github.com/Graylog2/graylog2-server/issues/4625", "final", "Optional", "<", "IndexSetConfig", ">", "optionalIndexSet", "=", "indexSetService", ".", "get", "(", "indexSetId", ")", ";", "if", "(", "optionalIndexSet", ".", "isPresent", "(", ")", ")", "{", "schedule", "(", "mongoIndexSetFactory", ".", "create", "(", "optionalIndexSet", ".", "get", "(", ")", ")", ")", ";", "}", "else", "{", "LOG", ".", "warn", "(", "\"Couldn't find newly created index set <{}>\"", ",", "indexSetId", ")", ";", "}", "}" ]
Creates a new field type polling job for the newly created index set. @param event index set creation event
[ "Creates", "a", "new", "field", "type", "polling", "job", "for", "the", "newly", "created", "index", "set", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java#L138-L149
14,837
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java
IndexFieldTypePollerPeriodical.handleIndexSetDeletion
@Subscribe public void handleIndexSetDeletion(final IndexSetDeletedEvent event) { final String indexSetId = event.id(); LOG.debug("Disable field type updating for index set <{}>", indexSetId); cancel(futures.remove(indexSetId)); }
java
@Subscribe public void handleIndexSetDeletion(final IndexSetDeletedEvent event) { final String indexSetId = event.id(); LOG.debug("Disable field type updating for index set <{}>", indexSetId); cancel(futures.remove(indexSetId)); }
[ "@", "Subscribe", "public", "void", "handleIndexSetDeletion", "(", "final", "IndexSetDeletedEvent", "event", ")", "{", "final", "String", "indexSetId", "=", "event", ".", "id", "(", ")", ";", "LOG", ".", "debug", "(", "\"Disable field type updating for index set <{}>\"", ",", "indexSetId", ")", ";", "cancel", "(", "futures", ".", "remove", "(", "indexSetId", ")", ")", ";", "}" ]
Removes the field type polling job for the now deleted index set. @param event index set deletion event
[ "Removes", "the", "field", "type", "polling", "job", "for", "the", "now", "deleted", "index", "set", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java#L155-L161
14,838
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java
IndexFieldTypePollerPeriodical.handleIndexDeletion
@Subscribe public void handleIndexDeletion(final IndicesDeletedEvent event) { event.indices().forEach(indexName -> { LOG.debug("Removing field type information for deleted index <{}>", indexName); dbService.delete(indexName); }); }
java
@Subscribe public void handleIndexDeletion(final IndicesDeletedEvent event) { event.indices().forEach(indexName -> { LOG.debug("Removing field type information for deleted index <{}>", indexName); dbService.delete(indexName); }); }
[ "@", "Subscribe", "public", "void", "handleIndexDeletion", "(", "final", "IndicesDeletedEvent", "event", ")", "{", "event", ".", "indices", "(", ")", ".", "forEach", "(", "indexName", "->", "{", "LOG", ".", "debug", "(", "\"Removing field type information for deleted index <{}>\"", ",", "indexName", ")", ";", "dbService", ".", "delete", "(", "indexName", ")", ";", "}", ")", ";", "}" ]
Removes the index field type data for the deleted index. @param event index deletion event
[ "Removes", "the", "index", "field", "type", "data", "for", "the", "deleted", "index", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java#L167-L173
14,839
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java
IndexFieldTypePollerPeriodical.schedule
private void schedule(final IndexSet indexSet) { final String indexSetId = indexSet.getConfig().id(); final String indexSetTitle = indexSet.getConfig().title(); final Duration refreshInterval = indexSet.getConfig().fieldTypeRefreshInterval(); if (Duration.ZERO.equals(refreshInterval)) { LOG.debug("Skipping index set with ZERO refresh interval <{}/{}>", indexSetTitle, indexSetId); return; } if (!indexSet.getConfig().isWritable()) { LOG.debug("Skipping non-writable index set <{}/{}>", indexSetTitle, indexSetId); return; } // Make sure there is no existing polling job running for this index set cancel(futures.get(indexSetId)); LOG.debug("Schedule index field type updating for index set <{}/{}> every {} ms", indexSetId, indexSetTitle, refreshInterval.getMillis()); final ScheduledFuture<?> future = scheduler.scheduleAtFixedRate(() -> { try { // Only check the active write index on a regular basis, the others don't change anymore final String activeWriteIndex = indexSet.getActiveWriteIndex(); if (activeWriteIndex != null) { LOG.debug("Updating index field types for active write index <{}> in index set <{}/{}>", activeWriteIndex, indexSetTitle, indexSetId); poller.pollIndex(activeWriteIndex, indexSetId).ifPresent(dbService::upsert); } else { LOG.warn("Active write index for index set \"{}\" ({}) doesn't exist yet", indexSetTitle, indexSetId); } } catch (TooManyAliasesException e) { LOG.error("Couldn't get active write index", e); } catch (Exception e) { LOG.error("Couldn't update field types for index set <{}/{}>", indexSetTitle, indexSetId, e); } }, 0, refreshInterval.getMillis(), TimeUnit.MILLISECONDS); futures.put(indexSetId, future); }
java
private void schedule(final IndexSet indexSet) { final String indexSetId = indexSet.getConfig().id(); final String indexSetTitle = indexSet.getConfig().title(); final Duration refreshInterval = indexSet.getConfig().fieldTypeRefreshInterval(); if (Duration.ZERO.equals(refreshInterval)) { LOG.debug("Skipping index set with ZERO refresh interval <{}/{}>", indexSetTitle, indexSetId); return; } if (!indexSet.getConfig().isWritable()) { LOG.debug("Skipping non-writable index set <{}/{}>", indexSetTitle, indexSetId); return; } // Make sure there is no existing polling job running for this index set cancel(futures.get(indexSetId)); LOG.debug("Schedule index field type updating for index set <{}/{}> every {} ms", indexSetId, indexSetTitle, refreshInterval.getMillis()); final ScheduledFuture<?> future = scheduler.scheduleAtFixedRate(() -> { try { // Only check the active write index on a regular basis, the others don't change anymore final String activeWriteIndex = indexSet.getActiveWriteIndex(); if (activeWriteIndex != null) { LOG.debug("Updating index field types for active write index <{}> in index set <{}/{}>", activeWriteIndex, indexSetTitle, indexSetId); poller.pollIndex(activeWriteIndex, indexSetId).ifPresent(dbService::upsert); } else { LOG.warn("Active write index for index set \"{}\" ({}) doesn't exist yet", indexSetTitle, indexSetId); } } catch (TooManyAliasesException e) { LOG.error("Couldn't get active write index", e); } catch (Exception e) { LOG.error("Couldn't update field types for index set <{}/{}>", indexSetTitle, indexSetId, e); } }, 0, refreshInterval.getMillis(), TimeUnit.MILLISECONDS); futures.put(indexSetId, future); }
[ "private", "void", "schedule", "(", "final", "IndexSet", "indexSet", ")", "{", "final", "String", "indexSetId", "=", "indexSet", ".", "getConfig", "(", ")", ".", "id", "(", ")", ";", "final", "String", "indexSetTitle", "=", "indexSet", ".", "getConfig", "(", ")", ".", "title", "(", ")", ";", "final", "Duration", "refreshInterval", "=", "indexSet", ".", "getConfig", "(", ")", ".", "fieldTypeRefreshInterval", "(", ")", ";", "if", "(", "Duration", ".", "ZERO", ".", "equals", "(", "refreshInterval", ")", ")", "{", "LOG", ".", "debug", "(", "\"Skipping index set with ZERO refresh interval <{}/{}>\"", ",", "indexSetTitle", ",", "indexSetId", ")", ";", "return", ";", "}", "if", "(", "!", "indexSet", ".", "getConfig", "(", ")", ".", "isWritable", "(", ")", ")", "{", "LOG", ".", "debug", "(", "\"Skipping non-writable index set <{}/{}>\"", ",", "indexSetTitle", ",", "indexSetId", ")", ";", "return", ";", "}", "// Make sure there is no existing polling job running for this index set", "cancel", "(", "futures", ".", "get", "(", "indexSetId", ")", ")", ";", "LOG", ".", "debug", "(", "\"Schedule index field type updating for index set <{}/{}> every {} ms\"", ",", "indexSetId", ",", "indexSetTitle", ",", "refreshInterval", ".", "getMillis", "(", ")", ")", ";", "final", "ScheduledFuture", "<", "?", ">", "future", "=", "scheduler", ".", "scheduleAtFixedRate", "(", "(", ")", "->", "{", "try", "{", "// Only check the active write index on a regular basis, the others don't change anymore", "final", "String", "activeWriteIndex", "=", "indexSet", ".", "getActiveWriteIndex", "(", ")", ";", "if", "(", "activeWriteIndex", "!=", "null", ")", "{", "LOG", ".", "debug", "(", "\"Updating index field types for active write index <{}> in index set <{}/{}>\"", ",", "activeWriteIndex", ",", "indexSetTitle", ",", "indexSetId", ")", ";", "poller", ".", "pollIndex", "(", "activeWriteIndex", ",", "indexSetId", ")", ".", "ifPresent", "(", "dbService", "::", "upsert", ")", ";", "}", "else", "{", "LOG", ".", "warn", "(", "\"Active write index for index set \\\"{}\\\" ({}) doesn't exist yet\"", ",", "indexSetTitle", ",", "indexSetId", ")", ";", "}", "}", "catch", "(", "TooManyAliasesException", "e", ")", "{", "LOG", ".", "error", "(", "\"Couldn't get active write index\"", ",", "e", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "error", "(", "\"Couldn't update field types for index set <{}/{}>\"", ",", "indexSetTitle", ",", "indexSetId", ",", "e", ")", ";", "}", "}", ",", "0", ",", "refreshInterval", ".", "getMillis", "(", ")", ",", "TimeUnit", ".", "MILLISECONDS", ")", ";", "futures", ".", "put", "(", "indexSetId", ",", "future", ")", ";", "}" ]
Creates a new polling job for the given index set to keep the active write index information up to date. @param indexSet index set
[ "Creates", "a", "new", "polling", "job", "for", "the", "given", "index", "set", "to", "keep", "the", "active", "write", "index", "information", "up", "to", "date", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/IndexFieldTypePollerPeriodical.java#L179-L217
14,840
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/shared/plugins/PluginProperties.java
PluginProperties.fromJarFile
public static PluginProperties fromJarFile(final String filename) { final Properties properties = new Properties(); try { final JarFile jarFile = new JarFile(requireNonNull(filename)); final Optional<String> propertiesPath = getPropertiesPath(jarFile); if (propertiesPath.isPresent()) { LOG.debug("Loading <{}> from <{}>", propertiesPath.get(), filename); final ZipEntry entry = jarFile.getEntry(propertiesPath.get()); if (entry != null) { properties.load(jarFile.getInputStream(entry)); } else { LOG.debug("Plugin properties <{}> are missing in <{}>", propertiesPath.get(), filename); } } } catch (Exception e) { LOG.debug("Unable to load properties from plugin <{}>", filename, e); } return new PluginProperties(properties); }
java
public static PluginProperties fromJarFile(final String filename) { final Properties properties = new Properties(); try { final JarFile jarFile = new JarFile(requireNonNull(filename)); final Optional<String> propertiesPath = getPropertiesPath(jarFile); if (propertiesPath.isPresent()) { LOG.debug("Loading <{}> from <{}>", propertiesPath.get(), filename); final ZipEntry entry = jarFile.getEntry(propertiesPath.get()); if (entry != null) { properties.load(jarFile.getInputStream(entry)); } else { LOG.debug("Plugin properties <{}> are missing in <{}>", propertiesPath.get(), filename); } } } catch (Exception e) { LOG.debug("Unable to load properties from plugin <{}>", filename, e); } return new PluginProperties(properties); }
[ "public", "static", "PluginProperties", "fromJarFile", "(", "final", "String", "filename", ")", "{", "final", "Properties", "properties", "=", "new", "Properties", "(", ")", ";", "try", "{", "final", "JarFile", "jarFile", "=", "new", "JarFile", "(", "requireNonNull", "(", "filename", ")", ")", ";", "final", "Optional", "<", "String", ">", "propertiesPath", "=", "getPropertiesPath", "(", "jarFile", ")", ";", "if", "(", "propertiesPath", ".", "isPresent", "(", ")", ")", "{", "LOG", ".", "debug", "(", "\"Loading <{}> from <{}>\"", ",", "propertiesPath", ".", "get", "(", ")", ",", "filename", ")", ";", "final", "ZipEntry", "entry", "=", "jarFile", ".", "getEntry", "(", "propertiesPath", ".", "get", "(", ")", ")", ";", "if", "(", "entry", "!=", "null", ")", "{", "properties", ".", "load", "(", "jarFile", ".", "getInputStream", "(", "entry", ")", ")", ";", "}", "else", "{", "LOG", ".", "debug", "(", "\"Plugin properties <{}> are missing in <{}>\"", ",", "propertiesPath", ".", "get", "(", ")", ",", "filename", ")", ";", "}", "}", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "debug", "(", "\"Unable to load properties from plugin <{}>\"", ",", "filename", ",", "e", ")", ";", "}", "return", "new", "PluginProperties", "(", "properties", ")", ";", "}" ]
Loads the Graylog plugin properties file from the given JAR file. The path to the properties file resource inside the JAR file is stored in the "Graylog-Plugin-Properties-Path" attribute of the JAR manifest. (Example: {@code org.graylog.plugins.graylog-plugin-map-widget}) If the plugin properties file does not exist or cannot be found (like in older plugins) a default {@link PluginProperties} object will be returned. @param filename path to the JAR file @return the plugin properties object
[ "Loads", "the", "Graylog", "plugin", "properties", "file", "from", "the", "given", "JAR", "file", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/shared/plugins/PluginProperties.java#L58-L79
14,841
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/cluster/Cluster.java
Cluster.isConnected
public boolean isConnected() { final Health request = new Health.Builder() .local() .timeout(Ints.saturatedCast(requestTimeout.toSeconds())) .build(); try { final JestResult result = JestUtils.execute(jestClient, request, () -> "Couldn't check connection status of Elasticsearch"); final int numberOfDataNodes = result.getJsonObject().path("number_of_data_nodes").asInt(); return numberOfDataNodes > 0; } catch (ElasticsearchException e) { if (LOG.isDebugEnabled()) { LOG.error(e.getMessage(), e); } return false; } }
java
public boolean isConnected() { final Health request = new Health.Builder() .local() .timeout(Ints.saturatedCast(requestTimeout.toSeconds())) .build(); try { final JestResult result = JestUtils.execute(jestClient, request, () -> "Couldn't check connection status of Elasticsearch"); final int numberOfDataNodes = result.getJsonObject().path("number_of_data_nodes").asInt(); return numberOfDataNodes > 0; } catch (ElasticsearchException e) { if (LOG.isDebugEnabled()) { LOG.error(e.getMessage(), e); } return false; } }
[ "public", "boolean", "isConnected", "(", ")", "{", "final", "Health", "request", "=", "new", "Health", ".", "Builder", "(", ")", ".", "local", "(", ")", ".", "timeout", "(", "Ints", ".", "saturatedCast", "(", "requestTimeout", ".", "toSeconds", "(", ")", ")", ")", ".", "build", "(", ")", ";", "try", "{", "final", "JestResult", "result", "=", "JestUtils", ".", "execute", "(", "jestClient", ",", "request", ",", "(", ")", "->", "\"Couldn't check connection status of Elasticsearch\"", ")", ";", "final", "int", "numberOfDataNodes", "=", "result", ".", "getJsonObject", "(", ")", ".", "path", "(", "\"number_of_data_nodes\"", ")", ".", "asInt", "(", ")", ";", "return", "numberOfDataNodes", ">", "0", ";", "}", "catch", "(", "ElasticsearchException", "e", ")", "{", "if", "(", "LOG", ".", "isDebugEnabled", "(", ")", ")", "{", "LOG", ".", "error", "(", "e", ".", "getMessage", "(", ")", ",", "e", ")", ";", "}", "return", "false", ";", "}", "}" ]
Check if Elasticsearch is available and that there are data nodes in the cluster. @return {@code true} if the Elasticsearch client is up and the cluster contains data nodes, {@code false} otherwise
[ "Check", "if", "Elasticsearch", "is", "available", "and", "that", "there", "are", "data", "nodes", "in", "the", "cluster", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/cluster/Cluster.java#L165-L181
14,842
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/cluster/Cluster.java
Cluster.waitForConnectedAndDeflectorHealthy
public void waitForConnectedAndDeflectorHealthy(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { LOG.debug("Waiting until the write-active index is healthy again, checking once per second."); final CountDownLatch latch = new CountDownLatch(1); final ScheduledFuture<?> scheduledFuture = scheduler.scheduleAtFixedRate(() -> { try { if (isConnected() && isDeflectorHealthy()) { LOG.debug("Write-active index is healthy again, unblocking waiting threads."); latch.countDown(); } } catch (Exception ignore) { } // to not cancel the schedule }, 0, 1, TimeUnit.SECONDS); // TODO should this be configurable? final boolean waitSuccess = latch.await(timeout, unit); scheduledFuture.cancel(true); // Make sure to cancel the task to avoid task leaks! if (!waitSuccess) { throw new TimeoutException("Write-active index didn't get healthy within timeout"); } }
java
public void waitForConnectedAndDeflectorHealthy(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { LOG.debug("Waiting until the write-active index is healthy again, checking once per second."); final CountDownLatch latch = new CountDownLatch(1); final ScheduledFuture<?> scheduledFuture = scheduler.scheduleAtFixedRate(() -> { try { if (isConnected() && isDeflectorHealthy()) { LOG.debug("Write-active index is healthy again, unblocking waiting threads."); latch.countDown(); } } catch (Exception ignore) { } // to not cancel the schedule }, 0, 1, TimeUnit.SECONDS); // TODO should this be configurable? final boolean waitSuccess = latch.await(timeout, unit); scheduledFuture.cancel(true); // Make sure to cancel the task to avoid task leaks! if (!waitSuccess) { throw new TimeoutException("Write-active index didn't get healthy within timeout"); } }
[ "public", "void", "waitForConnectedAndDeflectorHealthy", "(", "long", "timeout", ",", "TimeUnit", "unit", ")", "throws", "InterruptedException", ",", "TimeoutException", "{", "LOG", ".", "debug", "(", "\"Waiting until the write-active index is healthy again, checking once per second.\"", ")", ";", "final", "CountDownLatch", "latch", "=", "new", "CountDownLatch", "(", "1", ")", ";", "final", "ScheduledFuture", "<", "?", ">", "scheduledFuture", "=", "scheduler", ".", "scheduleAtFixedRate", "(", "(", ")", "->", "{", "try", "{", "if", "(", "isConnected", "(", ")", "&&", "isDeflectorHealthy", "(", ")", ")", "{", "LOG", ".", "debug", "(", "\"Write-active index is healthy again, unblocking waiting threads.\"", ")", ";", "latch", ".", "countDown", "(", ")", ";", "}", "}", "catch", "(", "Exception", "ignore", ")", "{", "}", "// to not cancel the schedule", "}", ",", "0", ",", "1", ",", "TimeUnit", ".", "SECONDS", ")", ";", "// TODO should this be configurable?", "final", "boolean", "waitSuccess", "=", "latch", ".", "await", "(", "timeout", ",", "unit", ")", ";", "scheduledFuture", ".", "cancel", "(", "true", ")", ";", "// Make sure to cancel the task to avoid task leaks!", "if", "(", "!", "waitSuccess", ")", "{", "throw", "new", "TimeoutException", "(", "\"Write-active index didn't get healthy within timeout\"", ")", ";", "}", "}" ]
Blocks until the Elasticsearch cluster and current write index is healthy again or the given timeout fires. @param timeout the timeout value @param unit the timeout unit @throws InterruptedException @throws TimeoutException
[ "Blocks", "until", "the", "Elasticsearch", "cluster", "and", "current", "write", "index", "is", "healthy", "again", "or", "the", "given", "timeout", "fires", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/cluster/Cluster.java#L215-L235
14,843
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/system/shutdown/GracefulShutdownService.java
GracefulShutdownService.register
public void register(GracefulShutdownHook shutdownHook) { if (isShuttingDown.get()) { // Avoid any changes to the shutdown hooks set when the shutdown is already in progress throw new IllegalStateException("Couldn't register shutdown hook because shutdown is already in progress"); } shutdownHooks.add(requireNonNull(shutdownHook, "shutdownHook cannot be null")); }
java
public void register(GracefulShutdownHook shutdownHook) { if (isShuttingDown.get()) { // Avoid any changes to the shutdown hooks set when the shutdown is already in progress throw new IllegalStateException("Couldn't register shutdown hook because shutdown is already in progress"); } shutdownHooks.add(requireNonNull(shutdownHook, "shutdownHook cannot be null")); }
[ "public", "void", "register", "(", "GracefulShutdownHook", "shutdownHook", ")", "{", "if", "(", "isShuttingDown", ".", "get", "(", ")", ")", "{", "// Avoid any changes to the shutdown hooks set when the shutdown is already in progress", "throw", "new", "IllegalStateException", "(", "\"Couldn't register shutdown hook because shutdown is already in progress\"", ")", ";", "}", "shutdownHooks", ".", "add", "(", "requireNonNull", "(", "shutdownHook", ",", "\"shutdownHook cannot be null\"", ")", ")", ";", "}" ]
Register a shutdown hook with the service. @param shutdownHook a class that implements {@link GracefulShutdownHook} @throws IllegalStateException if the server shutdown is already in progress and the hook cannot be registered @throws NullPointerException if the shutdown hook argument is null
[ "Register", "a", "shutdown", "hook", "with", "the", "service", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/system/shutdown/GracefulShutdownService.java#L104-L110
14,844
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/web/PluginAssets.java
PluginAssets.sortedJsFiles
List<String> sortedJsFiles() { return jsFiles().stream() .sorted((file1, file2) -> { // Vendor JS scripts go first if (vendorJsFiles.contains(file1)) { return -1; } if (vendorJsFiles.contains(file2)) { return 1; } // Polyfill JS script goes second if (file1.equals(polyfillJsFile)) { return -1; } if (file2.equals(polyfillJsFile)) { return 1; } // Builtins JS script goes third if (file1.equals(builtinsJsFile)) { return -1; } if (file2.equals(builtinsJsFile)) { return 1; } // App JS script goes last, as plugins need to be loaded before return file2.compareTo(file1); }) .collect(Collectors.toList()); }
java
List<String> sortedJsFiles() { return jsFiles().stream() .sorted((file1, file2) -> { // Vendor JS scripts go first if (vendorJsFiles.contains(file1)) { return -1; } if (vendorJsFiles.contains(file2)) { return 1; } // Polyfill JS script goes second if (file1.equals(polyfillJsFile)) { return -1; } if (file2.equals(polyfillJsFile)) { return 1; } // Builtins JS script goes third if (file1.equals(builtinsJsFile)) { return -1; } if (file2.equals(builtinsJsFile)) { return 1; } // App JS script goes last, as plugins need to be loaded before return file2.compareTo(file1); }) .collect(Collectors.toList()); }
[ "List", "<", "String", ">", "sortedJsFiles", "(", ")", "{", "return", "jsFiles", "(", ")", ".", "stream", "(", ")", ".", "sorted", "(", "(", "file1", ",", "file2", ")", "->", "{", "// Vendor JS scripts go first", "if", "(", "vendorJsFiles", ".", "contains", "(", "file1", ")", ")", "{", "return", "-", "1", ";", "}", "if", "(", "vendorJsFiles", ".", "contains", "(", "file2", ")", ")", "{", "return", "1", ";", "}", "// Polyfill JS script goes second", "if", "(", "file1", ".", "equals", "(", "polyfillJsFile", ")", ")", "{", "return", "-", "1", ";", "}", "if", "(", "file2", ".", "equals", "(", "polyfillJsFile", ")", ")", "{", "return", "1", ";", "}", "// Builtins JS script goes third", "if", "(", "file1", ".", "equals", "(", "builtinsJsFile", ")", ")", "{", "return", "-", "1", ";", "}", "if", "(", "file2", ".", "equals", "(", "builtinsJsFile", ")", ")", "{", "return", "1", ";", "}", "// App JS script goes last, as plugins need to be loaded before", "return", "file2", ".", "compareTo", "(", "file1", ")", ";", "}", ")", ".", "collect", "(", "Collectors", ".", "toList", "(", ")", ")", ";", "}" ]
Sort JS files in the intended load order, so templates don't need to care about it.
[ "Sort", "JS", "files", "in", "the", "intended", "load", "order", "so", "templates", "don", "t", "need", "to", "care", "about", "it", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/web/PluginAssets.java#L102-L133
14,845
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java
Graphs.singletonDirectedGraph
public static <N> ImmutableGraph<N> singletonDirectedGraph(N node) { final MutableGraph<N> graph = GraphBuilder.directed().build(); graph.addNode(node); return ImmutableGraph.copyOf(graph); }
java
public static <N> ImmutableGraph<N> singletonDirectedGraph(N node) { final MutableGraph<N> graph = GraphBuilder.directed().build(); graph.addNode(node); return ImmutableGraph.copyOf(graph); }
[ "public", "static", "<", "N", ">", "ImmutableGraph", "<", "N", ">", "singletonDirectedGraph", "(", "N", "node", ")", "{", "final", "MutableGraph", "<", "N", ">", "graph", "=", "GraphBuilder", ".", "directed", "(", ")", ".", "build", "(", ")", ";", "graph", ".", "addNode", "(", "node", ")", ";", "return", "ImmutableGraph", ".", "copyOf", "(", "graph", ")", ";", "}" ]
Returns an immutable directed graph, containing only the specified node. @param node The single node in the returned graph @param <N> The class of the nodes @return an immutable directed graph with a single node
[ "Returns", "an", "immutable", "directed", "graph", "containing", "only", "the", "specified", "node", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java#L73-L77
14,846
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java
Graphs.singletonUndirectedGraph
public static <N> ImmutableGraph<N> singletonUndirectedGraph(N node) { final MutableGraph<N> graph = GraphBuilder.undirected().build(); graph.addNode(node); return ImmutableGraph.copyOf(graph); }
java
public static <N> ImmutableGraph<N> singletonUndirectedGraph(N node) { final MutableGraph<N> graph = GraphBuilder.undirected().build(); graph.addNode(node); return ImmutableGraph.copyOf(graph); }
[ "public", "static", "<", "N", ">", "ImmutableGraph", "<", "N", ">", "singletonUndirectedGraph", "(", "N", "node", ")", "{", "final", "MutableGraph", "<", "N", ">", "graph", "=", "GraphBuilder", ".", "undirected", "(", ")", ".", "build", "(", ")", ";", "graph", ".", "addNode", "(", "node", ")", ";", "return", "ImmutableGraph", ".", "copyOf", "(", "graph", ")", ";", "}" ]
Returns an immutable undirected graph, containing only the specified node. @param node The single node in the returned graph @param <N> The class of the nodes @return an immutable undirected graph with a single node
[ "Returns", "an", "immutable", "undirected", "graph", "containing", "only", "the", "specified", "node", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java#L86-L90
14,847
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java
Graphs.singletonGraph
public static <N> ImmutableGraph<N> singletonGraph(Graph<N> graph, N node) { final MutableGraph<N> mutableGraph = GraphBuilder.from(graph).build(); mutableGraph.addNode(node); return ImmutableGraph.copyOf(mutableGraph); }
java
public static <N> ImmutableGraph<N> singletonGraph(Graph<N> graph, N node) { final MutableGraph<N> mutableGraph = GraphBuilder.from(graph).build(); mutableGraph.addNode(node); return ImmutableGraph.copyOf(mutableGraph); }
[ "public", "static", "<", "N", ">", "ImmutableGraph", "<", "N", ">", "singletonGraph", "(", "Graph", "<", "N", ">", "graph", ",", "N", "node", ")", "{", "final", "MutableGraph", "<", "N", ">", "mutableGraph", "=", "GraphBuilder", ".", "from", "(", "graph", ")", ".", "build", "(", ")", ";", "mutableGraph", ".", "addNode", "(", "node", ")", ";", "return", "ImmutableGraph", ".", "copyOf", "(", "mutableGraph", ")", ";", "}" ]
Returns an immutable graph, containing only the specified node. @param graph The graph to use as template for the created graph @param node The single node in the returned graph @param <N> The class of the nodes @return an immutable graph with a single node @see GraphBuilder#from(Graph)
[ "Returns", "an", "immutable", "graph", "containing", "only", "the", "specified", "node", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java#L101-L105
14,848
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java
Graphs.merge
public static <N> void merge(MutableGraph<N> graph1, Graph<N> graph2) { for (N node : graph2.nodes()) { graph1.addNode(node); } for (EndpointPair<N> edge : graph2.edges()) { graph1.putEdge(edge.nodeU(), edge.nodeV()); } }
java
public static <N> void merge(MutableGraph<N> graph1, Graph<N> graph2) { for (N node : graph2.nodes()) { graph1.addNode(node); } for (EndpointPair<N> edge : graph2.edges()) { graph1.putEdge(edge.nodeU(), edge.nodeV()); } }
[ "public", "static", "<", "N", ">", "void", "merge", "(", "MutableGraph", "<", "N", ">", "graph1", ",", "Graph", "<", "N", ">", "graph2", ")", "{", "for", "(", "N", "node", ":", "graph2", ".", "nodes", "(", ")", ")", "{", "graph1", ".", "addNode", "(", "node", ")", ";", "}", "for", "(", "EndpointPair", "<", "N", ">", "edge", ":", "graph2", ".", "edges", "(", ")", ")", "{", "graph1", ".", "putEdge", "(", "edge", ".", "nodeU", "(", ")", ",", "edge", ".", "nodeV", "(", ")", ")", ";", "}", "}" ]
Merge all nodes and edges of two graphs. @param graph1 A {@link MutableGraph} into which all nodes and edges of {@literal graph2} will be merged @param graph2 The {@link Graph} whose nodes and edges will be merged into {@literal graph1} @param <N> The class of the nodes
[ "Merge", "all", "nodes", "and", "edges", "of", "two", "graphs", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/utilities/Graphs.java#L114-L121
14,849
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/security/ldap/LdapConnector.java
LdapConnector.normalizedDn
@Nullable private String normalizedDn(String dn) { if (isNullOrEmpty(dn)) { return dn; } else { try { return new Dn(dn).getNormName(); } catch (LdapInvalidDnException e) { LOG.debug("Invalid DN", e); return dn; } } }
java
@Nullable private String normalizedDn(String dn) { if (isNullOrEmpty(dn)) { return dn; } else { try { return new Dn(dn).getNormName(); } catch (LdapInvalidDnException e) { LOG.debug("Invalid DN", e); return dn; } } }
[ "@", "Nullable", "private", "String", "normalizedDn", "(", "String", "dn", ")", "{", "if", "(", "isNullOrEmpty", "(", "dn", ")", ")", "{", "return", "dn", ";", "}", "else", "{", "try", "{", "return", "new", "Dn", "(", "dn", ")", ".", "getNormName", "(", ")", ";", "}", "catch", "(", "LdapInvalidDnException", "e", ")", "{", "LOG", ".", "debug", "(", "\"Invalid DN\"", ",", "e", ")", ";", "return", "dn", ";", "}", "}", "}" ]
When the given string is a DN, the method ensures that the DN gets normalized so it can be used in string comparison. If the string is not a DN, the method just returns it. Examples: String is a DN: input = "cn=John Doe, ou=groups, ou=system" output = "cn=John Doe,ou=groups,ou=system" String is not a DN: input = "john" output = "john" This behavior is needed because for some values we don't know if the value is a DN or not. (i.e. group member values) See: https://github.com/Graylog2/graylog2-server/issues/1790 @param dn denormalized DN string @return normalized DN string
[ "When", "the", "given", "string", "is", "a", "DN", "the", "method", "ensures", "that", "the", "DN", "gets", "normalized", "so", "it", "can", "be", "used", "in", "string", "comparison", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/security/ldap/LdapConnector.java#L325-L337
14,850
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/inputs/converters/NumericConverter.java
NumericConverter.convert
@Override public Object convert(String value) { if (value == null || value.isEmpty()) { return value; } Object result = Ints.tryParse(value); if (result != null) { return result; } result = Longs.tryParse(value); if (result != null) { return result; } result = Doubles.tryParse(value); if (result != null) { return result; } return value; }
java
@Override public Object convert(String value) { if (value == null || value.isEmpty()) { return value; } Object result = Ints.tryParse(value); if (result != null) { return result; } result = Longs.tryParse(value); if (result != null) { return result; } result = Doubles.tryParse(value); if (result != null) { return result; } return value; }
[ "@", "Override", "public", "Object", "convert", "(", "String", "value", ")", "{", "if", "(", "value", "==", "null", "||", "value", ".", "isEmpty", "(", ")", ")", "{", "return", "value", ";", "}", "Object", "result", "=", "Ints", ".", "tryParse", "(", "value", ")", ";", "if", "(", "result", "!=", "null", ")", "{", "return", "result", ";", "}", "result", "=", "Longs", ".", "tryParse", "(", "value", ")", ";", "if", "(", "result", "!=", "null", ")", "{", "return", "result", ";", "}", "result", "=", "Doubles", ".", "tryParse", "(", "value", ")", ";", "if", "(", "result", "!=", "null", ")", "{", "return", "result", ";", "}", "return", "value", ";", "}" ]
Attempts to convert the provided string value to a numeric type, trying Integer, Long and Double in order until successful.
[ "Attempts", "to", "convert", "the", "provided", "string", "value", "to", "a", "numeric", "type", "trying", "Integer", "Long", "and", "Double", "in", "order", "until", "successful", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/inputs/converters/NumericConverter.java#L39-L64
14,851
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/inputs/codecs/gelf/GELFMessage.java
GELFMessage.getJSON
public String getJSON(long maxBytes) { try { switch (getGELFType()) { case ZLIB: return Tools.decompressZlib(payload, maxBytes); case GZIP: return Tools.decompressGzip(payload, maxBytes); case UNCOMPRESSED: return new String(payload, StandardCharsets.UTF_8); case CHUNKED: case UNSUPPORTED: throw new IllegalStateException("Unknown GELF type. Not supported."); } } catch (final IOException e) { // Note that the UnsupportedEncodingException thrown by 'new String' can never happen because UTF-8 // is a mandatory JRE encoding which is always present. So we only need to mention the decompress exceptions here. throw new IllegalStateException("Failed to decompress the GELF message payload", e); } return null; }
java
public String getJSON(long maxBytes) { try { switch (getGELFType()) { case ZLIB: return Tools.decompressZlib(payload, maxBytes); case GZIP: return Tools.decompressGzip(payload, maxBytes); case UNCOMPRESSED: return new String(payload, StandardCharsets.UTF_8); case CHUNKED: case UNSUPPORTED: throw new IllegalStateException("Unknown GELF type. Not supported."); } } catch (final IOException e) { // Note that the UnsupportedEncodingException thrown by 'new String' can never happen because UTF-8 // is a mandatory JRE encoding which is always present. So we only need to mention the decompress exceptions here. throw new IllegalStateException("Failed to decompress the GELF message payload", e); } return null; }
[ "public", "String", "getJSON", "(", "long", "maxBytes", ")", "{", "try", "{", "switch", "(", "getGELFType", "(", ")", ")", "{", "case", "ZLIB", ":", "return", "Tools", ".", "decompressZlib", "(", "payload", ",", "maxBytes", ")", ";", "case", "GZIP", ":", "return", "Tools", ".", "decompressGzip", "(", "payload", ",", "maxBytes", ")", ";", "case", "UNCOMPRESSED", ":", "return", "new", "String", "(", "payload", ",", "StandardCharsets", ".", "UTF_8", ")", ";", "case", "CHUNKED", ":", "case", "UNSUPPORTED", ":", "throw", "new", "IllegalStateException", "(", "\"Unknown GELF type. Not supported.\"", ")", ";", "}", "}", "catch", "(", "final", "IOException", "e", ")", "{", "// Note that the UnsupportedEncodingException thrown by 'new String' can never happen because UTF-8", "// is a mandatory JRE encoding which is always present. So we only need to mention the decompress exceptions here.", "throw", "new", "IllegalStateException", "(", "\"Failed to decompress the GELF message payload\"", ",", "e", ")", ";", "}", "return", "null", ";", "}" ]
Return the JSON payload of the GELF message. @param maxBytes The maximum number of bytes to read from a compressed GELF payload. {@code -1} means unlimited. @return The extracted JSON payload of the GELF message. @see Tools#decompressGzip(byte[], long) @see Tools#decompressZlib(byte[], long)
[ "Return", "the", "JSON", "payload", "of", "the", "GELF", "message", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/inputs/codecs/gelf/GELFMessage.java#L70-L89
14,852
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/MongoFieldTypeLookup.java
MongoFieldTypeLookup.get
@Override public Map<String, FieldTypes> get(final Collection<String> fieldNames, Collection<String> indexNames) { // Shortcut - if we don't select any fields we don't have to do any database query if (fieldNames.isEmpty()) { return Collections.emptyMap(); } // We have to transform the field type database entries to make them usable for the user. // // [ // { // "index_name": "graylog_0", // "fields": [ // {"field_name": "message", "physical_type": "text"}, // {"field_name": "source", "physical_type": "keyword"} // ] // }, // { // "index_name": "graylog_1", // "fields": [ // {"field_name": "message", "physical_type": "text"}, // ] // } // ] // // gets transformed into // // { // "message": { // "field_name": "message", // "types": [ // { // "type": "string", // "properties": ["full-text-search"], // "index_names": ["graylog_0", "graylog_1"] // ] // }, // "source": { // "field_name": "source", // "types": [ // { // "type": "string", // "properties": ["enumerable"], // "index_names": ["graylog_0"] // ] // } // } // field-name -> {physical-type -> [index-name, ...]} final Map<String, SetMultimap<String, String>> fields = new HashMap<>(); // Convert the data from the database to be indexed by field name and physical type getTypesStream(fieldNames, indexNames).forEach(types -> { final String indexName = types.indexName(); types.fields().forEach(fieldType -> { final String fieldName = fieldType.fieldName(); final String physicalType = fieldType.physicalType(); if (fieldNames.contains(fieldName)) { if (indexNames.isEmpty() || indexNames.contains(indexName)) { if (!fields.containsKey(fieldName)) { fields.put(fieldName, HashMultimap.create()); } fields.get(fieldName).put(physicalType, indexName); } } }); }); final ImmutableMap.Builder<String, FieldTypes> result = ImmutableMap.builder(); for (Map.Entry<String, SetMultimap<String, String>> fieldNameEntry : fields.entrySet()) { final String fieldName = fieldNameEntry.getKey(); final Map<String, Collection<String>> physicalTypes = fieldNameEntry.getValue().asMap(); // Use the field type mapper to do the conversion between the Elasticsearch type and our logical type final Set<FieldTypes.Type> types = physicalTypes.entrySet().stream() .map(entry -> { final String physicalType = entry.getKey(); final Set<String> indices = ImmutableSet.copyOf(entry.getValue()); return typeMapper.mapType(physicalType).map(t -> t.withIndexNames(indices)); }) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toSet()); result.put(fieldName, FieldTypes.create(fieldName, types)); } return result.build(); }
java
@Override public Map<String, FieldTypes> get(final Collection<String> fieldNames, Collection<String> indexNames) { // Shortcut - if we don't select any fields we don't have to do any database query if (fieldNames.isEmpty()) { return Collections.emptyMap(); } // We have to transform the field type database entries to make them usable for the user. // // [ // { // "index_name": "graylog_0", // "fields": [ // {"field_name": "message", "physical_type": "text"}, // {"field_name": "source", "physical_type": "keyword"} // ] // }, // { // "index_name": "graylog_1", // "fields": [ // {"field_name": "message", "physical_type": "text"}, // ] // } // ] // // gets transformed into // // { // "message": { // "field_name": "message", // "types": [ // { // "type": "string", // "properties": ["full-text-search"], // "index_names": ["graylog_0", "graylog_1"] // ] // }, // "source": { // "field_name": "source", // "types": [ // { // "type": "string", // "properties": ["enumerable"], // "index_names": ["graylog_0"] // ] // } // } // field-name -> {physical-type -> [index-name, ...]} final Map<String, SetMultimap<String, String>> fields = new HashMap<>(); // Convert the data from the database to be indexed by field name and physical type getTypesStream(fieldNames, indexNames).forEach(types -> { final String indexName = types.indexName(); types.fields().forEach(fieldType -> { final String fieldName = fieldType.fieldName(); final String physicalType = fieldType.physicalType(); if (fieldNames.contains(fieldName)) { if (indexNames.isEmpty() || indexNames.contains(indexName)) { if (!fields.containsKey(fieldName)) { fields.put(fieldName, HashMultimap.create()); } fields.get(fieldName).put(physicalType, indexName); } } }); }); final ImmutableMap.Builder<String, FieldTypes> result = ImmutableMap.builder(); for (Map.Entry<String, SetMultimap<String, String>> fieldNameEntry : fields.entrySet()) { final String fieldName = fieldNameEntry.getKey(); final Map<String, Collection<String>> physicalTypes = fieldNameEntry.getValue().asMap(); // Use the field type mapper to do the conversion between the Elasticsearch type and our logical type final Set<FieldTypes.Type> types = physicalTypes.entrySet().stream() .map(entry -> { final String physicalType = entry.getKey(); final Set<String> indices = ImmutableSet.copyOf(entry.getValue()); return typeMapper.mapType(physicalType).map(t -> t.withIndexNames(indices)); }) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toSet()); result.put(fieldName, FieldTypes.create(fieldName, types)); } return result.build(); }
[ "@", "Override", "public", "Map", "<", "String", ",", "FieldTypes", ">", "get", "(", "final", "Collection", "<", "String", ">", "fieldNames", ",", "Collection", "<", "String", ">", "indexNames", ")", "{", "// Shortcut - if we don't select any fields we don't have to do any database query", "if", "(", "fieldNames", ".", "isEmpty", "(", ")", ")", "{", "return", "Collections", ".", "emptyMap", "(", ")", ";", "}", "// We have to transform the field type database entries to make them usable for the user.", "//", "// [", "// {", "// \"index_name\": \"graylog_0\",", "// \"fields\": [", "// {\"field_name\": \"message\", \"physical_type\": \"text\"},", "// {\"field_name\": \"source\", \"physical_type\": \"keyword\"}", "// ]", "// },", "// {", "// \"index_name\": \"graylog_1\",", "// \"fields\": [", "// {\"field_name\": \"message\", \"physical_type\": \"text\"},", "// ]", "// }", "// ]", "//", "// gets transformed into", "//", "// {", "// \"message\": {", "// \"field_name\": \"message\",", "// \"types\": [", "// {", "// \"type\": \"string\",", "// \"properties\": [\"full-text-search\"],", "// \"index_names\": [\"graylog_0\", \"graylog_1\"]", "// ]", "// },", "// \"source\": {", "// \"field_name\": \"source\",", "// \"types\": [", "// {", "// \"type\": \"string\",", "// \"properties\": [\"enumerable\"],", "// \"index_names\": [\"graylog_0\"]", "// ]", "// }", "// }", "// field-name -> {physical-type -> [index-name, ...]}", "final", "Map", "<", "String", ",", "SetMultimap", "<", "String", ",", "String", ">", ">", "fields", "=", "new", "HashMap", "<>", "(", ")", ";", "// Convert the data from the database to be indexed by field name and physical type", "getTypesStream", "(", "fieldNames", ",", "indexNames", ")", ".", "forEach", "(", "types", "->", "{", "final", "String", "indexName", "=", "types", ".", "indexName", "(", ")", ";", "types", ".", "fields", "(", ")", ".", "forEach", "(", "fieldType", "->", "{", "final", "String", "fieldName", "=", "fieldType", ".", "fieldName", "(", ")", ";", "final", "String", "physicalType", "=", "fieldType", ".", "physicalType", "(", ")", ";", "if", "(", "fieldNames", ".", "contains", "(", "fieldName", ")", ")", "{", "if", "(", "indexNames", ".", "isEmpty", "(", ")", "||", "indexNames", ".", "contains", "(", "indexName", ")", ")", "{", "if", "(", "!", "fields", ".", "containsKey", "(", "fieldName", ")", ")", "{", "fields", ".", "put", "(", "fieldName", ",", "HashMultimap", ".", "create", "(", ")", ")", ";", "}", "fields", ".", "get", "(", "fieldName", ")", ".", "put", "(", "physicalType", ",", "indexName", ")", ";", "}", "}", "}", ")", ";", "}", ")", ";", "final", "ImmutableMap", ".", "Builder", "<", "String", ",", "FieldTypes", ">", "result", "=", "ImmutableMap", ".", "builder", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "String", ",", "SetMultimap", "<", "String", ",", "String", ">", ">", "fieldNameEntry", ":", "fields", ".", "entrySet", "(", ")", ")", "{", "final", "String", "fieldName", "=", "fieldNameEntry", ".", "getKey", "(", ")", ";", "final", "Map", "<", "String", ",", "Collection", "<", "String", ">", ">", "physicalTypes", "=", "fieldNameEntry", ".", "getValue", "(", ")", ".", "asMap", "(", ")", ";", "// Use the field type mapper to do the conversion between the Elasticsearch type and our logical type", "final", "Set", "<", "FieldTypes", ".", "Type", ">", "types", "=", "physicalTypes", ".", "entrySet", "(", ")", ".", "stream", "(", ")", ".", "map", "(", "entry", "->", "{", "final", "String", "physicalType", "=", "entry", ".", "getKey", "(", ")", ";", "final", "Set", "<", "String", ">", "indices", "=", "ImmutableSet", ".", "copyOf", "(", "entry", ".", "getValue", "(", ")", ")", ";", "return", "typeMapper", ".", "mapType", "(", "physicalType", ")", ".", "map", "(", "t", "->", "t", ".", "withIndexNames", "(", "indices", ")", ")", ";", "}", ")", ".", "filter", "(", "Optional", "::", "isPresent", ")", ".", "map", "(", "Optional", "::", "get", ")", ".", "collect", "(", "Collectors", ".", "toSet", "(", ")", ")", ";", "result", ".", "put", "(", "fieldName", ",", "FieldTypes", ".", "create", "(", "fieldName", ",", "types", ")", ")", ";", "}", "return", "result", ".", "build", "(", ")", ";", "}" ]
Returns a map of field names to the corresponding field types. @param fieldNames a collection of field names to get the types for @param indexNames a collection of index names to filter the results @return map of field names to field type objects
[ "Returns", "a", "map", "of", "field", "names", "to", "the", "corresponding", "field", "types", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/MongoFieldTypeLookup.java#L77-L169
14,853
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/FieldTypeMapper.java
FieldTypeMapper.mapType
public Optional<FieldTypes.Type> mapType(String typeName) { return Optional.ofNullable(TYPE_MAP.get(typeName)); }
java
public Optional<FieldTypes.Type> mapType(String typeName) { return Optional.ofNullable(TYPE_MAP.get(typeName)); }
[ "public", "Optional", "<", "FieldTypes", ".", "Type", ">", "mapType", "(", "String", "typeName", ")", "{", "return", "Optional", ".", "ofNullable", "(", "TYPE_MAP", ".", "get", "(", "typeName", ")", ")", ";", "}" ]
Map the given Elasticsearch field type to a Graylog type. @param typeName Elasticsearch type name @return the Graylog type object
[ "Map", "the", "given", "Elasticsearch", "field", "type", "to", "a", "Graylog", "type", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/fieldtypes/FieldTypeMapper.java#L78-L80
14,854
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/inputs/codecs/GelfChunkAggregator.java
GelfChunkAggregator.checkForCompletion
@Nullable private ByteBuf checkForCompletion(GELFMessage gelfMessage) { if (!chunks.isEmpty() && log.isDebugEnabled()) { log.debug("Dumping GELF chunk map [chunks for {} messages]:\n{}", chunks.size(), humanReadableChunkMap()); } final GELFMessageChunk chunk = new GELFMessageChunk(gelfMessage, null); // TODO second parameter final int sequenceCount = chunk.getSequenceCount(); final String messageId = chunk.getId(); ChunkEntry entry = new ChunkEntry(sequenceCount, chunk.getArrival(), messageId); final ChunkEntry existing = chunks.putIfAbsent(messageId, entry); if (existing == null) { // add this chunk entry to the eviction set waitingMessages.inc(); sortedEvictionSet.add(entry); } else { // the entry is already in the eviction set and chunk map entry = existing; } final int sequenceNumber = chunk.getSequenceNumber(); if (!entry.payloadArray.compareAndSet(sequenceNumber, null, chunk)) { log.error("Received duplicate chunk {} for message {} from {}", sequenceNumber, messageId, gelfMessage.getSourceAddress()); duplicateChunks.inc(); return null; } final int chunkWatermark = entry.chunkSlotsWritten.incrementAndGet(); if (chunkWatermark > MAX_CHUNKS) { getAndCleanupEntry(messageId); throw new IllegalStateException("Maximum number of chunks reached, discarding message"); } if (chunkWatermark == sequenceCount) { // message is complete by chunk count, assemble and return it. // it might still be corrupt etc, but we've seen enough chunks // remove before operating on it, to avoid racing too much with the clean up job, some race is inevitable, though. entry = getAndCleanupEntry(messageId); final byte[] allChunks[] = new byte[sequenceCount][]; for (int i = 0; i < entry.payloadArray.length(); i++) { final GELFMessageChunk messageChunk = entry.payloadArray.get(i); if (messageChunk == null) { log.debug("Couldn't read chunk {} of message {}, skipping this chunk.", i, messageId); } else { allChunks[i] = messageChunk.getData(); } } completeMessages.inc(); return Unpooled.wrappedBuffer(allChunks); } // message isn't complete yet, check if we should remove the other parts as well if (isOutdated(entry)) { // chunks are outdated, the oldest came in over 5 seconds ago, clean them all up log.debug("Not all chunks of <{}> arrived within {}ms. Dropping chunks.", messageId, VALIDITY_PERIOD); expireEntry(messageId); } return null; }
java
@Nullable private ByteBuf checkForCompletion(GELFMessage gelfMessage) { if (!chunks.isEmpty() && log.isDebugEnabled()) { log.debug("Dumping GELF chunk map [chunks for {} messages]:\n{}", chunks.size(), humanReadableChunkMap()); } final GELFMessageChunk chunk = new GELFMessageChunk(gelfMessage, null); // TODO second parameter final int sequenceCount = chunk.getSequenceCount(); final String messageId = chunk.getId(); ChunkEntry entry = new ChunkEntry(sequenceCount, chunk.getArrival(), messageId); final ChunkEntry existing = chunks.putIfAbsent(messageId, entry); if (existing == null) { // add this chunk entry to the eviction set waitingMessages.inc(); sortedEvictionSet.add(entry); } else { // the entry is already in the eviction set and chunk map entry = existing; } final int sequenceNumber = chunk.getSequenceNumber(); if (!entry.payloadArray.compareAndSet(sequenceNumber, null, chunk)) { log.error("Received duplicate chunk {} for message {} from {}", sequenceNumber, messageId, gelfMessage.getSourceAddress()); duplicateChunks.inc(); return null; } final int chunkWatermark = entry.chunkSlotsWritten.incrementAndGet(); if (chunkWatermark > MAX_CHUNKS) { getAndCleanupEntry(messageId); throw new IllegalStateException("Maximum number of chunks reached, discarding message"); } if (chunkWatermark == sequenceCount) { // message is complete by chunk count, assemble and return it. // it might still be corrupt etc, but we've seen enough chunks // remove before operating on it, to avoid racing too much with the clean up job, some race is inevitable, though. entry = getAndCleanupEntry(messageId); final byte[] allChunks[] = new byte[sequenceCount][]; for (int i = 0; i < entry.payloadArray.length(); i++) { final GELFMessageChunk messageChunk = entry.payloadArray.get(i); if (messageChunk == null) { log.debug("Couldn't read chunk {} of message {}, skipping this chunk.", i, messageId); } else { allChunks[i] = messageChunk.getData(); } } completeMessages.inc(); return Unpooled.wrappedBuffer(allChunks); } // message isn't complete yet, check if we should remove the other parts as well if (isOutdated(entry)) { // chunks are outdated, the oldest came in over 5 seconds ago, clean them all up log.debug("Not all chunks of <{}> arrived within {}ms. Dropping chunks.", messageId, VALIDITY_PERIOD); expireEntry(messageId); } return null; }
[ "@", "Nullable", "private", "ByteBuf", "checkForCompletion", "(", "GELFMessage", "gelfMessage", ")", "{", "if", "(", "!", "chunks", ".", "isEmpty", "(", ")", "&&", "log", ".", "isDebugEnabled", "(", ")", ")", "{", "log", ".", "debug", "(", "\"Dumping GELF chunk map [chunks for {} messages]:\\n{}\"", ",", "chunks", ".", "size", "(", ")", ",", "humanReadableChunkMap", "(", ")", ")", ";", "}", "final", "GELFMessageChunk", "chunk", "=", "new", "GELFMessageChunk", "(", "gelfMessage", ",", "null", ")", ";", "// TODO second parameter", "final", "int", "sequenceCount", "=", "chunk", ".", "getSequenceCount", "(", ")", ";", "final", "String", "messageId", "=", "chunk", ".", "getId", "(", ")", ";", "ChunkEntry", "entry", "=", "new", "ChunkEntry", "(", "sequenceCount", ",", "chunk", ".", "getArrival", "(", ")", ",", "messageId", ")", ";", "final", "ChunkEntry", "existing", "=", "chunks", ".", "putIfAbsent", "(", "messageId", ",", "entry", ")", ";", "if", "(", "existing", "==", "null", ")", "{", "// add this chunk entry to the eviction set", "waitingMessages", ".", "inc", "(", ")", ";", "sortedEvictionSet", ".", "add", "(", "entry", ")", ";", "}", "else", "{", "// the entry is already in the eviction set and chunk map", "entry", "=", "existing", ";", "}", "final", "int", "sequenceNumber", "=", "chunk", ".", "getSequenceNumber", "(", ")", ";", "if", "(", "!", "entry", ".", "payloadArray", ".", "compareAndSet", "(", "sequenceNumber", ",", "null", ",", "chunk", ")", ")", "{", "log", ".", "error", "(", "\"Received duplicate chunk {} for message {} from {}\"", ",", "sequenceNumber", ",", "messageId", ",", "gelfMessage", ".", "getSourceAddress", "(", ")", ")", ";", "duplicateChunks", ".", "inc", "(", ")", ";", "return", "null", ";", "}", "final", "int", "chunkWatermark", "=", "entry", ".", "chunkSlotsWritten", ".", "incrementAndGet", "(", ")", ";", "if", "(", "chunkWatermark", ">", "MAX_CHUNKS", ")", "{", "getAndCleanupEntry", "(", "messageId", ")", ";", "throw", "new", "IllegalStateException", "(", "\"Maximum number of chunks reached, discarding message\"", ")", ";", "}", "if", "(", "chunkWatermark", "==", "sequenceCount", ")", "{", "// message is complete by chunk count, assemble and return it.", "// it might still be corrupt etc, but we've seen enough chunks", "// remove before operating on it, to avoid racing too much with the clean up job, some race is inevitable, though.", "entry", "=", "getAndCleanupEntry", "(", "messageId", ")", ";", "final", "byte", "[", "]", "allChunks", "[", "]", "=", "new", "byte", "[", "sequenceCount", "]", "[", "", "]", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "entry", ".", "payloadArray", ".", "length", "(", ")", ";", "i", "++", ")", "{", "final", "GELFMessageChunk", "messageChunk", "=", "entry", ".", "payloadArray", ".", "get", "(", "i", ")", ";", "if", "(", "messageChunk", "==", "null", ")", "{", "log", ".", "debug", "(", "\"Couldn't read chunk {} of message {}, skipping this chunk.\"", ",", "i", ",", "messageId", ")", ";", "}", "else", "{", "allChunks", "[", "i", "]", "=", "messageChunk", ".", "getData", "(", ")", ";", "}", "}", "completeMessages", ".", "inc", "(", ")", ";", "return", "Unpooled", ".", "wrappedBuffer", "(", "allChunks", ")", ";", "}", "// message isn't complete yet, check if we should remove the other parts as well", "if", "(", "isOutdated", "(", "entry", ")", ")", "{", "// chunks are outdated, the oldest came in over 5 seconds ago, clean them all up", "log", ".", "debug", "(", "\"Not all chunks of <{}> arrived within {}ms. Dropping chunks.\"", ",", "messageId", ",", "VALIDITY_PERIOD", ")", ";", "expireEntry", "(", "messageId", ")", ";", "}", "return", "null", ";", "}" ]
Checks whether the presented gelf message chunk completes the incoming raw message and returns it if it does. If the message isn't complete, it adds the chunk to the internal buffer and waits for more incoming messages. Outdated chunks are being purged regularly. @param gelfMessage the gelf message chunk @return null or a {@link org.graylog2.plugin.journal.RawMessage raw message} object
[ "Checks", "whether", "the", "presented", "gelf", "message", "chunk", "completes", "the", "incoming", "raw", "message", "and", "returns", "it", "if", "it", "does", ".", "If", "the", "message", "isn", "t", "complete", "it", "adds", "the", "chunk", "to", "the", "internal", "buffer", "and", "waits", "for", "more", "incoming", "messages", ".", "Outdated", "chunks", "are", "being", "purged", "regularly", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/inputs/codecs/GelfChunkAggregator.java#L129-L192
14,855
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/inputs/gelf/tcp/GELFTCPInput.java
GELFTCPInput.overrideDelimiter
private static Configuration overrideDelimiter(Configuration configuration) { configuration.setBoolean(TcpTransport.CK_USE_NULL_DELIMITER, true); return configuration; }
java
private static Configuration overrideDelimiter(Configuration configuration) { configuration.setBoolean(TcpTransport.CK_USE_NULL_DELIMITER, true); return configuration; }
[ "private", "static", "Configuration", "overrideDelimiter", "(", "Configuration", "configuration", ")", "{", "configuration", ".", "setBoolean", "(", "TcpTransport", ".", "CK_USE_NULL_DELIMITER", ",", "true", ")", ";", "return", "configuration", ";", "}" ]
has been created with the wrong value.
[ "has", "been", "created", "with", "the", "wrong", "value", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/inputs/gelf/tcp/GELFTCPInput.java#L49-L53
14,856
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/lookup/adapters/dnslookup/DnsClient.java
DnsClient.decodeDnsRecord
private static ADnsAnswer decodeDnsRecord(DnsRecord dnsRecord, boolean includeIpVersion) { if (dnsRecord == null) { return null; } LOG.trace("Attempting to decode DNS record [{}]", dnsRecord); /* Read data from DNS record response. The data is a binary representation of the IP address * IPv4 address: 32 bits, IPv6 address: 128 bits */ byte[] ipAddressBytes; final DefaultDnsRawRecord dnsRawRecord = (DefaultDnsRawRecord) dnsRecord; try { final ByteBuf byteBuf = dnsRawRecord.content(); ipAddressBytes = new byte[byteBuf.readableBytes()]; int readerIndex = byteBuf.readerIndex(); byteBuf.getBytes(readerIndex, ipAddressBytes); } finally { /* Must manually release references on dnsRawRecord object since the DefaultDnsRawRecord class * extends ReferenceCounted. This also releases the above ByteBuf, since DefaultDnsRawRecord is * the holder for it. */ dnsRawRecord.release(); } LOG.trace("The IP address has [{}] bytes", ipAddressBytes.length); InetAddress ipAddress; try { ipAddress = InetAddress.getByAddress(ipAddressBytes); // Takes care of correctly creating an IPv4 or IPv6 address. } catch (UnknownHostException e) { // This should not happen. LOG.error("Could not extract IP address from DNS entry [{}]. Cause [{}]", dnsRecord.toString(), ExceptionUtils.getRootCauseMessage(e)); return null; } LOG.trace("The resulting IP address is [{}]", ipAddress.getHostAddress()); final ADnsAnswer.Builder builder = ADnsAnswer.builder() .ipAddress(ipAddress.getHostAddress()) .dnsTTL(dnsRecord.timeToLive()); if (includeIpVersion) { builder.ipVersion(ipAddress instanceof Inet4Address ? IP_4_VERSION : IP_6_VERSION); } return builder.build(); }
java
private static ADnsAnswer decodeDnsRecord(DnsRecord dnsRecord, boolean includeIpVersion) { if (dnsRecord == null) { return null; } LOG.trace("Attempting to decode DNS record [{}]", dnsRecord); /* Read data from DNS record response. The data is a binary representation of the IP address * IPv4 address: 32 bits, IPv6 address: 128 bits */ byte[] ipAddressBytes; final DefaultDnsRawRecord dnsRawRecord = (DefaultDnsRawRecord) dnsRecord; try { final ByteBuf byteBuf = dnsRawRecord.content(); ipAddressBytes = new byte[byteBuf.readableBytes()]; int readerIndex = byteBuf.readerIndex(); byteBuf.getBytes(readerIndex, ipAddressBytes); } finally { /* Must manually release references on dnsRawRecord object since the DefaultDnsRawRecord class * extends ReferenceCounted. This also releases the above ByteBuf, since DefaultDnsRawRecord is * the holder for it. */ dnsRawRecord.release(); } LOG.trace("The IP address has [{}] bytes", ipAddressBytes.length); InetAddress ipAddress; try { ipAddress = InetAddress.getByAddress(ipAddressBytes); // Takes care of correctly creating an IPv4 or IPv6 address. } catch (UnknownHostException e) { // This should not happen. LOG.error("Could not extract IP address from DNS entry [{}]. Cause [{}]", dnsRecord.toString(), ExceptionUtils.getRootCauseMessage(e)); return null; } LOG.trace("The resulting IP address is [{}]", ipAddress.getHostAddress()); final ADnsAnswer.Builder builder = ADnsAnswer.builder() .ipAddress(ipAddress.getHostAddress()) .dnsTTL(dnsRecord.timeToLive()); if (includeIpVersion) { builder.ipVersion(ipAddress instanceof Inet4Address ? IP_4_VERSION : IP_6_VERSION); } return builder.build(); }
[ "private", "static", "ADnsAnswer", "decodeDnsRecord", "(", "DnsRecord", "dnsRecord", ",", "boolean", "includeIpVersion", ")", "{", "if", "(", "dnsRecord", "==", "null", ")", "{", "return", "null", ";", "}", "LOG", ".", "trace", "(", "\"Attempting to decode DNS record [{}]\"", ",", "dnsRecord", ")", ";", "/* Read data from DNS record response. The data is a binary representation of the IP address\n * IPv4 address: 32 bits, IPv6 address: 128 bits */", "byte", "[", "]", "ipAddressBytes", ";", "final", "DefaultDnsRawRecord", "dnsRawRecord", "=", "(", "DefaultDnsRawRecord", ")", "dnsRecord", ";", "try", "{", "final", "ByteBuf", "byteBuf", "=", "dnsRawRecord", ".", "content", "(", ")", ";", "ipAddressBytes", "=", "new", "byte", "[", "byteBuf", ".", "readableBytes", "(", ")", "]", ";", "int", "readerIndex", "=", "byteBuf", ".", "readerIndex", "(", ")", ";", "byteBuf", ".", "getBytes", "(", "readerIndex", ",", "ipAddressBytes", ")", ";", "}", "finally", "{", "/* Must manually release references on dnsRawRecord object since the DefaultDnsRawRecord class\n * extends ReferenceCounted. This also releases the above ByteBuf, since DefaultDnsRawRecord is\n * the holder for it. */", "dnsRawRecord", ".", "release", "(", ")", ";", "}", "LOG", ".", "trace", "(", "\"The IP address has [{}] bytes\"", ",", "ipAddressBytes", ".", "length", ")", ";", "InetAddress", "ipAddress", ";", "try", "{", "ipAddress", "=", "InetAddress", ".", "getByAddress", "(", "ipAddressBytes", ")", ";", "// Takes care of correctly creating an IPv4 or IPv6 address.", "}", "catch", "(", "UnknownHostException", "e", ")", "{", "// This should not happen.", "LOG", ".", "error", "(", "\"Could not extract IP address from DNS entry [{}]. Cause [{}]\"", ",", "dnsRecord", ".", "toString", "(", ")", ",", "ExceptionUtils", ".", "getRootCauseMessage", "(", "e", ")", ")", ";", "return", "null", ";", "}", "LOG", ".", "trace", "(", "\"The resulting IP address is [{}]\"", ",", "ipAddress", ".", "getHostAddress", "(", ")", ")", ";", "final", "ADnsAnswer", ".", "Builder", "builder", "=", "ADnsAnswer", ".", "builder", "(", ")", ".", "ipAddress", "(", "ipAddress", ".", "getHostAddress", "(", ")", ")", ".", "dnsTTL", "(", "dnsRecord", ".", "timeToLive", "(", ")", ")", ";", "if", "(", "includeIpVersion", ")", "{", "builder", ".", "ipVersion", "(", "ipAddress", "instanceof", "Inet4Address", "?", "IP_4_VERSION", ":", "IP_6_VERSION", ")", ";", "}", "return", "builder", ".", "build", "(", ")", ";", "}" ]
Picks out the IP address and TTL from the answer response for each record.
[ "Picks", "out", "the", "IP", "address", "and", "TTL", "from", "the", "answer", "response", "for", "each", "record", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/lookup/adapters/dnslookup/DnsClient.java#L173-L219
14,857
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/utilities/FileInfo.java
FileInfo.forPath
@NotNull public static FileInfo forPath(Path path) { try { final BasicFileAttributes attributes = Files.readAttributes(path, BasicFileAttributes.class); return FileInfo.builder() .path(path) .key(attributes.fileKey()) .size(attributes.size()) .modificationTime(attributes.lastModifiedTime()) .build(); } catch (Exception e) { LOG.error("Couldn't get file info for path: {}", path, e); return EMPTY_FILE_INFO.toBuilder().path(path).build(); } }
java
@NotNull public static FileInfo forPath(Path path) { try { final BasicFileAttributes attributes = Files.readAttributes(path, BasicFileAttributes.class); return FileInfo.builder() .path(path) .key(attributes.fileKey()) .size(attributes.size()) .modificationTime(attributes.lastModifiedTime()) .build(); } catch (Exception e) { LOG.error("Couldn't get file info for path: {}", path, e); return EMPTY_FILE_INFO.toBuilder().path(path).build(); } }
[ "@", "NotNull", "public", "static", "FileInfo", "forPath", "(", "Path", "path", ")", "{", "try", "{", "final", "BasicFileAttributes", "attributes", "=", "Files", ".", "readAttributes", "(", "path", ",", "BasicFileAttributes", ".", "class", ")", ";", "return", "FileInfo", ".", "builder", "(", ")", ".", "path", "(", "path", ")", ".", "key", "(", "attributes", ".", "fileKey", "(", ")", ")", ".", "size", "(", "attributes", ".", "size", "(", ")", ")", ".", "modificationTime", "(", "attributes", ".", "lastModifiedTime", "(", ")", ")", ".", "build", "(", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "LOG", ".", "error", "(", "\"Couldn't get file info for path: {}\"", ",", "path", ",", "e", ")", ";", "return", "EMPTY_FILE_INFO", ".", "toBuilder", "(", ")", ".", "path", "(", "path", ")", ".", "build", "(", ")", ";", "}", "}" ]
Create a file info for the given path. @param path the path must exist, otherwise an IllegalArgumentException is thrown @return the file info object
[ "Create", "a", "file", "info", "for", "the", "given", "path", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/utilities/FileInfo.java#L73-L87
14,858
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/alarmcallbacks/EmailAlarmCallback.java
EmailAlarmCallback.getConfigurationRequest
private ConfigurationRequest getConfigurationRequest(Map<String, String> userNames) { ConfigurationRequest configurationRequest = new ConfigurationRequest(); configurationRequest.addField(new TextField("sender", "Sender", "graylog@example.org", "The sender of sent out mail alerts", ConfigurationField.Optional.OPTIONAL)); configurationRequest.addField(new TextField("subject", "E-Mail Subject", "Graylog alert for stream: ${stream.title}: ${check_result.resultDescription}", "The subject of sent out mail alerts", ConfigurationField.Optional.NOT_OPTIONAL)); configurationRequest.addField(new TextField("body", "E-Mail Body", FormattedEmailAlertSender.bodyTemplate, "The template to generate the body from", ConfigurationField.Optional.OPTIONAL, TextField.Attribute.TEXTAREA)); configurationRequest.addField(new ListField(CK_USER_RECEIVERS, "User Receivers", Collections.emptyList(), userNames, "Graylog usernames that should receive this alert", ConfigurationField.Optional.OPTIONAL)); configurationRequest.addField(new ListField(CK_EMAIL_RECEIVERS, "E-Mail Receivers", Collections.emptyList(), Collections.emptyMap(), "E-Mail addresses that should receive this alert", ConfigurationField.Optional.OPTIONAL, ListField.Attribute.ALLOW_CREATE)); return configurationRequest; }
java
private ConfigurationRequest getConfigurationRequest(Map<String, String> userNames) { ConfigurationRequest configurationRequest = new ConfigurationRequest(); configurationRequest.addField(new TextField("sender", "Sender", "graylog@example.org", "The sender of sent out mail alerts", ConfigurationField.Optional.OPTIONAL)); configurationRequest.addField(new TextField("subject", "E-Mail Subject", "Graylog alert for stream: ${stream.title}: ${check_result.resultDescription}", "The subject of sent out mail alerts", ConfigurationField.Optional.NOT_OPTIONAL)); configurationRequest.addField(new TextField("body", "E-Mail Body", FormattedEmailAlertSender.bodyTemplate, "The template to generate the body from", ConfigurationField.Optional.OPTIONAL, TextField.Attribute.TEXTAREA)); configurationRequest.addField(new ListField(CK_USER_RECEIVERS, "User Receivers", Collections.emptyList(), userNames, "Graylog usernames that should receive this alert", ConfigurationField.Optional.OPTIONAL)); configurationRequest.addField(new ListField(CK_EMAIL_RECEIVERS, "E-Mail Receivers", Collections.emptyList(), Collections.emptyMap(), "E-Mail addresses that should receive this alert", ConfigurationField.Optional.OPTIONAL, ListField.Attribute.ALLOW_CREATE)); return configurationRequest; }
[ "private", "ConfigurationRequest", "getConfigurationRequest", "(", "Map", "<", "String", ",", "String", ">", "userNames", ")", "{", "ConfigurationRequest", "configurationRequest", "=", "new", "ConfigurationRequest", "(", ")", ";", "configurationRequest", ".", "addField", "(", "new", "TextField", "(", "\"sender\"", ",", "\"Sender\"", ",", "\"graylog@example.org\"", ",", "\"The sender of sent out mail alerts\"", ",", "ConfigurationField", ".", "Optional", ".", "OPTIONAL", ")", ")", ";", "configurationRequest", ".", "addField", "(", "new", "TextField", "(", "\"subject\"", ",", "\"E-Mail Subject\"", ",", "\"Graylog alert for stream: ${stream.title}: ${check_result.resultDescription}\"", ",", "\"The subject of sent out mail alerts\"", ",", "ConfigurationField", ".", "Optional", ".", "NOT_OPTIONAL", ")", ")", ";", "configurationRequest", ".", "addField", "(", "new", "TextField", "(", "\"body\"", ",", "\"E-Mail Body\"", ",", "FormattedEmailAlertSender", ".", "bodyTemplate", ",", "\"The template to generate the body from\"", ",", "ConfigurationField", ".", "Optional", ".", "OPTIONAL", ",", "TextField", ".", "Attribute", ".", "TEXTAREA", ")", ")", ";", "configurationRequest", ".", "addField", "(", "new", "ListField", "(", "CK_USER_RECEIVERS", ",", "\"User Receivers\"", ",", "Collections", ".", "emptyList", "(", ")", ",", "userNames", ",", "\"Graylog usernames that should receive this alert\"", ",", "ConfigurationField", ".", "Optional", ".", "OPTIONAL", ")", ")", ";", "configurationRequest", ".", "addField", "(", "new", "ListField", "(", "CK_EMAIL_RECEIVERS", ",", "\"E-Mail Receivers\"", ",", "Collections", ".", "emptyList", "(", ")", ",", "Collections", ".", "emptyMap", "(", ")", ",", "\"E-Mail addresses that should receive this alert\"", ",", "ConfigurationField", ".", "Optional", ".", "OPTIONAL", ",", "ListField", ".", "Attribute", ".", "ALLOW_CREATE", ")", ")", ";", "return", "configurationRequest", ";", "}" ]
I am truly sorry about this, but leaking the user list is not okay...
[ "I", "am", "truly", "sorry", "about", "this", "but", "leaking", "the", "user", "list", "is", "not", "okay", "..." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/alarmcallbacks/EmailAlarmCallback.java#L173-L210
14,859
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/inputs/transports/ThrottleableTransport.java
ThrottleableTransport.updateThrottleState
@Subscribe public void updateThrottleState(ThrottleState throttleState) { // Only run if throttling is enabled. if (!throttlingAllowed) { return; } // check if we are throttled final boolean throttled = determineIfThrottled(throttleState); if (currentlyThrottled.get()) { // no need to unblock if (throttled) { return; } // sanity check if (blockLatch == null) { log.error("Expected to see a transport throttle latch, but it is missing. This is a bug, continuing anyway."); return; } currentlyThrottled.set(false); handleChangedThrottledState(false); blockLatch.countDown(); } else if (throttled) { currentlyThrottled.set(true); handleChangedThrottledState(true); blockLatch = new CountDownLatch(1); } }
java
@Subscribe public void updateThrottleState(ThrottleState throttleState) { // Only run if throttling is enabled. if (!throttlingAllowed) { return; } // check if we are throttled final boolean throttled = determineIfThrottled(throttleState); if (currentlyThrottled.get()) { // no need to unblock if (throttled) { return; } // sanity check if (blockLatch == null) { log.error("Expected to see a transport throttle latch, but it is missing. This is a bug, continuing anyway."); return; } currentlyThrottled.set(false); handleChangedThrottledState(false); blockLatch.countDown(); } else if (throttled) { currentlyThrottled.set(true); handleChangedThrottledState(true); blockLatch = new CountDownLatch(1); } }
[ "@", "Subscribe", "public", "void", "updateThrottleState", "(", "ThrottleState", "throttleState", ")", "{", "// Only run if throttling is enabled.", "if", "(", "!", "throttlingAllowed", ")", "{", "return", ";", "}", "// check if we are throttled", "final", "boolean", "throttled", "=", "determineIfThrottled", "(", "throttleState", ")", ";", "if", "(", "currentlyThrottled", ".", "get", "(", ")", ")", "{", "// no need to unblock", "if", "(", "throttled", ")", "{", "return", ";", "}", "// sanity check", "if", "(", "blockLatch", "==", "null", ")", "{", "log", ".", "error", "(", "\"Expected to see a transport throttle latch, but it is missing. This is a bug, continuing anyway.\"", ")", ";", "return", ";", "}", "currentlyThrottled", ".", "set", "(", "false", ")", ";", "handleChangedThrottledState", "(", "false", ")", ";", "blockLatch", ".", "countDown", "(", ")", ";", "}", "else", "if", "(", "throttled", ")", "{", "currentlyThrottled", ".", "set", "(", "true", ")", ";", "handleChangedThrottledState", "(", "true", ")", ";", "blockLatch", "=", "new", "CountDownLatch", "(", "1", ")", ";", "}", "}" ]
Only executed if the Allow Throttling checkbox is set in the input's configuration. @param throttleState current processing system state
[ "Only", "executed", "if", "the", "Allow", "Throttling", "checkbox", "is", "set", "in", "the", "input", "s", "configuration", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/inputs/transports/ThrottleableTransport.java#L120-L146
14,860
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/inputs/transports/ThrottleableTransport.java
ThrottleableTransport.blockUntilUnthrottled
public boolean blockUntilUnthrottled(long timeout, TimeUnit unit) { // sanity: if there's no latch, don't try to access it if (blockLatch == null) { return false; } // purposely allow interrupts as a means to let the caller check if it should exit its run loop try { return blockLatch.await(timeout, unit); } catch (InterruptedException e) { return false; } }
java
public boolean blockUntilUnthrottled(long timeout, TimeUnit unit) { // sanity: if there's no latch, don't try to access it if (blockLatch == null) { return false; } // purposely allow interrupts as a means to let the caller check if it should exit its run loop try { return blockLatch.await(timeout, unit); } catch (InterruptedException e) { return false; } }
[ "public", "boolean", "blockUntilUnthrottled", "(", "long", "timeout", ",", "TimeUnit", "unit", ")", "{", "// sanity: if there's no latch, don't try to access it", "if", "(", "blockLatch", "==", "null", ")", "{", "return", "false", ";", "}", "// purposely allow interrupts as a means to let the caller check if it should exit its run loop", "try", "{", "return", "blockLatch", ".", "await", "(", "timeout", ",", "unit", ")", ";", "}", "catch", "(", "InterruptedException", "e", ")", "{", "return", "false", ";", "}", "}" ]
Blocks until the blockLatch is released or until the timeout is exceeded. @param timeout the maximum time to wait @param unit the time unit for the {@code timeout} argument. @return {@code true} if the blockLatch was released before the {@code timeout} elapsed. and {@code false} if the {@code timeout} was exceeded before the blockLatch was released.
[ "Blocks", "until", "the", "blockLatch", "is", "released", "or", "until", "the", "timeout", "is", "exceeded", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/inputs/transports/ThrottleableTransport.java#L239-L250
14,861
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/Tools.java
Tools.getSystemInformation
public static String getSystemInformation() { String ret = System.getProperty("java.vendor"); ret += " " + System.getProperty("java.version"); ret += " on " + System.getProperty("os.name"); ret += " " + System.getProperty("os.version"); return ret; }
java
public static String getSystemInformation() { String ret = System.getProperty("java.vendor"); ret += " " + System.getProperty("java.version"); ret += " on " + System.getProperty("os.name"); ret += " " + System.getProperty("os.version"); return ret; }
[ "public", "static", "String", "getSystemInformation", "(", ")", "{", "String", "ret", "=", "System", ".", "getProperty", "(", "\"java.vendor\"", ")", ";", "ret", "+=", "\" \"", "+", "System", ".", "getProperty", "(", "\"java.version\"", ")", ";", "ret", "+=", "\" on \"", "+", "System", ".", "getProperty", "(", "\"os.name\"", ")", ";", "ret", "+=", "\" \"", "+", "System", ".", "getProperty", "(", "\"os.version\"", ")", ";", "return", "ret", ";", "}" ]
Get a String containing version information of JRE, OS, ... @return Descriptive string of JRE and OS
[ "Get", "a", "String", "containing", "version", "information", "of", "JRE", "OS", "..." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/Tools.java#L187-L193
14,862
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/Tools.java
Tools.timeFormatterWithOptionalMilliseconds
public static DateTimeFormatter timeFormatterWithOptionalMilliseconds() { // This is the .SSS part DateTimeParser ms = new DateTimeFormatterBuilder() .appendLiteral(".") .appendFractionOfSecond(1, 3) .toParser(); return new DateTimeFormatterBuilder() .append(DateTimeFormat.forPattern(ES_DATE_FORMAT_NO_MS).withZoneUTC()) .appendOptional(ms) .toFormatter(); }
java
public static DateTimeFormatter timeFormatterWithOptionalMilliseconds() { // This is the .SSS part DateTimeParser ms = new DateTimeFormatterBuilder() .appendLiteral(".") .appendFractionOfSecond(1, 3) .toParser(); return new DateTimeFormatterBuilder() .append(DateTimeFormat.forPattern(ES_DATE_FORMAT_NO_MS).withZoneUTC()) .appendOptional(ms) .toFormatter(); }
[ "public", "static", "DateTimeFormatter", "timeFormatterWithOptionalMilliseconds", "(", ")", "{", "// This is the .SSS part", "DateTimeParser", "ms", "=", "new", "DateTimeFormatterBuilder", "(", ")", ".", "appendLiteral", "(", "\".\"", ")", ".", "appendFractionOfSecond", "(", "1", ",", "3", ")", ".", "toParser", "(", ")", ";", "return", "new", "DateTimeFormatterBuilder", "(", ")", ".", "append", "(", "DateTimeFormat", ".", "forPattern", "(", "ES_DATE_FORMAT_NO_MS", ")", ".", "withZoneUTC", "(", ")", ")", ".", "appendOptional", "(", "ms", ")", ".", "toFormatter", "(", ")", ";", "}" ]
Accepts our ElasticSearch time formats without milliseconds. @return A DateTimeFormatter suitable to parse an ES_DATE_FORMAT formatted string to a DateTime Object even if it contains no milliseconds.
[ "Accepts", "our", "ElasticSearch", "time", "formats", "without", "milliseconds", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/Tools.java#L346-L357
14,863
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/Tools.java
Tools.elasticSearchTimeFormatToISO8601
public static String elasticSearchTimeFormatToISO8601(String time) { try { DateTime dt = DateTime.parse(time, ES_DATE_FORMAT_FORMATTER); return getISO8601String(dt); } catch (IllegalArgumentException e) { return time; } }
java
public static String elasticSearchTimeFormatToISO8601(String time) { try { DateTime dt = DateTime.parse(time, ES_DATE_FORMAT_FORMATTER); return getISO8601String(dt); } catch (IllegalArgumentException e) { return time; } }
[ "public", "static", "String", "elasticSearchTimeFormatToISO8601", "(", "String", "time", ")", "{", "try", "{", "DateTime", "dt", "=", "DateTime", ".", "parse", "(", "time", ",", "ES_DATE_FORMAT_FORMATTER", ")", ";", "return", "getISO8601String", "(", "dt", ")", ";", "}", "catch", "(", "IllegalArgumentException", "e", ")", "{", "return", "time", ";", "}", "}" ]
Try to parse a date in ES_DATE_FORMAT format considering it is in UTC and convert it to an ISO8601 date. If an error is encountered in the process, it will return the original string.
[ "Try", "to", "parse", "a", "date", "in", "ES_DATE_FORMAT", "format", "considering", "it", "is", "in", "UTC", "and", "convert", "it", "to", "an", "ISO8601", "date", ".", "If", "an", "error", "is", "encountered", "in", "the", "process", "it", "will", "return", "the", "original", "string", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/Tools.java#L380-L387
14,864
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/plugin/Tools.java
Tools.silenceUncaughtExceptionsInThisThread
public static void silenceUncaughtExceptionsInThisThread() { Thread.currentThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread ignored, Throwable ignored1) { } }); }
java
public static void silenceUncaughtExceptionsInThisThread() { Thread.currentThread().setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { @Override public void uncaughtException(Thread ignored, Throwable ignored1) { } }); }
[ "public", "static", "void", "silenceUncaughtExceptionsInThisThread", "(", ")", "{", "Thread", ".", "currentThread", "(", ")", ".", "setUncaughtExceptionHandler", "(", "new", "Thread", ".", "UncaughtExceptionHandler", "(", ")", "{", "@", "Override", "public", "void", "uncaughtException", "(", "Thread", "ignored", ",", "Throwable", "ignored1", ")", "{", "}", "}", ")", ";", "}" ]
The default uncaught exception handler will print to STDERR, which we don't always want for threads. Using this utility method you can avoid writing to STDERR on a per-thread basis
[ "The", "default", "uncaught", "exception", "handler", "will", "print", "to", "STDERR", "which", "we", "don", "t", "always", "want", "for", "threads", ".", "Using", "this", "utility", "method", "you", "can", "avoid", "writing", "to", "STDERR", "on", "a", "per", "-", "thread", "basis" ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/plugin/Tools.java#L622-L628
14,865
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java
Indices.exists
public boolean exists(String indexName) { try { final JestResult result = jestClient.execute(new GetSettings.Builder().addIndex(indexName).build()); return result.isSucceeded() && Iterators.contains(result.getJsonObject().fieldNames(), indexName); } catch (IOException e) { throw new ElasticsearchException("Couldn't check existence of index " + indexName, e); } }
java
public boolean exists(String indexName) { try { final JestResult result = jestClient.execute(new GetSettings.Builder().addIndex(indexName).build()); return result.isSucceeded() && Iterators.contains(result.getJsonObject().fieldNames(), indexName); } catch (IOException e) { throw new ElasticsearchException("Couldn't check existence of index " + indexName, e); } }
[ "public", "boolean", "exists", "(", "String", "indexName", ")", "{", "try", "{", "final", "JestResult", "result", "=", "jestClient", ".", "execute", "(", "new", "GetSettings", ".", "Builder", "(", ")", ".", "addIndex", "(", "indexName", ")", ".", "build", "(", ")", ")", ";", "return", "result", ".", "isSucceeded", "(", ")", "&&", "Iterators", ".", "contains", "(", "result", ".", "getJsonObject", "(", ")", ".", "fieldNames", "(", ")", ",", "indexName", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "throw", "new", "ElasticsearchException", "(", "\"Couldn't check existence of index \"", "+", "indexName", ",", "e", ")", ";", "}", "}" ]
Check if a given name is an existing index. @param indexName Name of the index to check presence for. @return {@code true} if indexName is an existing index, {@code false} if it is non-existing or an alias.
[ "Check", "if", "a", "given", "name", "is", "an", "existing", "index", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java#L275-L282
14,866
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java
Indices.aliasExists
public boolean aliasExists(String alias) { try { final JestResult result = jestClient.execute(new GetSettings.Builder().addIndex(alias).build()); return result.isSucceeded() && !Iterators.contains(result.getJsonObject().fieldNames(), alias); } catch (IOException e) { throw new ElasticsearchException("Couldn't check existence of alias " + alias, e); } }
java
public boolean aliasExists(String alias) { try { final JestResult result = jestClient.execute(new GetSettings.Builder().addIndex(alias).build()); return result.isSucceeded() && !Iterators.contains(result.getJsonObject().fieldNames(), alias); } catch (IOException e) { throw new ElasticsearchException("Couldn't check existence of alias " + alias, e); } }
[ "public", "boolean", "aliasExists", "(", "String", "alias", ")", "{", "try", "{", "final", "JestResult", "result", "=", "jestClient", ".", "execute", "(", "new", "GetSettings", ".", "Builder", "(", ")", ".", "addIndex", "(", "alias", ")", ".", "build", "(", ")", ")", ";", "return", "result", ".", "isSucceeded", "(", ")", "&&", "!", "Iterators", ".", "contains", "(", "result", ".", "getJsonObject", "(", ")", ".", "fieldNames", "(", ")", ",", "alias", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "throw", "new", "ElasticsearchException", "(", "\"Couldn't check existence of alias \"", "+", "alias", ",", "e", ")", ";", "}", "}" ]
Check if a given name is an existing alias. @param alias Name of the alias to check presence for. @return {@code true} if alias is an existing alias, {@code false} if it is non-existing or an index.
[ "Check", "if", "a", "given", "name", "is", "an", "existing", "alias", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java#L290-L297
14,867
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java
Indices.getIndexNamesAndAliases
@NotNull public Map<String, Set<String>> getIndexNamesAndAliases(String indexPattern) { // only request indices matching the name or pattern in `indexPattern` and only get the alias names for each index, // not the settings or mappings final GetAliases request = new GetAliases.Builder() .addIndex(indexPattern) // ES 6 changed the "expand_wildcards" default value for the /_alias API from "open" to "all". // Since our code expects only open indices to be returned, we have to explicitly set the parameter now. .setParameter("expand_wildcards", "open") .build(); final JestResult jestResult = JestUtils.execute(jestClient, request, () -> "Couldn't collect aliases for index pattern " + indexPattern); final ImmutableMap.Builder<String, Set<String>> indexAliasesBuilder = ImmutableMap.builder(); final Iterator<Map.Entry<String, JsonNode>> it = jestResult.getJsonObject().fields(); while (it.hasNext()) { final Map.Entry<String, JsonNode> entry = it.next(); final String indexName = entry.getKey(); final JsonNode aliasMetaData = entry.getValue().path("aliases"); if (aliasMetaData.isObject()) { final ImmutableSet<String> aliasNames = ImmutableSet.copyOf(aliasMetaData.fieldNames()); indexAliasesBuilder.put(indexName, aliasNames); } } return indexAliasesBuilder.build(); }
java
@NotNull public Map<String, Set<String>> getIndexNamesAndAliases(String indexPattern) { // only request indices matching the name or pattern in `indexPattern` and only get the alias names for each index, // not the settings or mappings final GetAliases request = new GetAliases.Builder() .addIndex(indexPattern) // ES 6 changed the "expand_wildcards" default value for the /_alias API from "open" to "all". // Since our code expects only open indices to be returned, we have to explicitly set the parameter now. .setParameter("expand_wildcards", "open") .build(); final JestResult jestResult = JestUtils.execute(jestClient, request, () -> "Couldn't collect aliases for index pattern " + indexPattern); final ImmutableMap.Builder<String, Set<String>> indexAliasesBuilder = ImmutableMap.builder(); final Iterator<Map.Entry<String, JsonNode>> it = jestResult.getJsonObject().fields(); while (it.hasNext()) { final Map.Entry<String, JsonNode> entry = it.next(); final String indexName = entry.getKey(); final JsonNode aliasMetaData = entry.getValue().path("aliases"); if (aliasMetaData.isObject()) { final ImmutableSet<String> aliasNames = ImmutableSet.copyOf(aliasMetaData.fieldNames()); indexAliasesBuilder.put(indexName, aliasNames); } } return indexAliasesBuilder.build(); }
[ "@", "NotNull", "public", "Map", "<", "String", ",", "Set", "<", "String", ">", ">", "getIndexNamesAndAliases", "(", "String", "indexPattern", ")", "{", "// only request indices matching the name or pattern in `indexPattern` and only get the alias names for each index,", "// not the settings or mappings", "final", "GetAliases", "request", "=", "new", "GetAliases", ".", "Builder", "(", ")", ".", "addIndex", "(", "indexPattern", ")", "// ES 6 changed the \"expand_wildcards\" default value for the /_alias API from \"open\" to \"all\".", "// Since our code expects only open indices to be returned, we have to explicitly set the parameter now.", ".", "setParameter", "(", "\"expand_wildcards\"", ",", "\"open\"", ")", ".", "build", "(", ")", ";", "final", "JestResult", "jestResult", "=", "JestUtils", ".", "execute", "(", "jestClient", ",", "request", ",", "(", ")", "->", "\"Couldn't collect aliases for index pattern \"", "+", "indexPattern", ")", ";", "final", "ImmutableMap", ".", "Builder", "<", "String", ",", "Set", "<", "String", ">", ">", "indexAliasesBuilder", "=", "ImmutableMap", ".", "builder", "(", ")", ";", "final", "Iterator", "<", "Map", ".", "Entry", "<", "String", ",", "JsonNode", ">", ">", "it", "=", "jestResult", ".", "getJsonObject", "(", ")", ".", "fields", "(", ")", ";", "while", "(", "it", ".", "hasNext", "(", ")", ")", "{", "final", "Map", ".", "Entry", "<", "String", ",", "JsonNode", ">", "entry", "=", "it", ".", "next", "(", ")", ";", "final", "String", "indexName", "=", "entry", ".", "getKey", "(", ")", ";", "final", "JsonNode", "aliasMetaData", "=", "entry", ".", "getValue", "(", ")", ".", "path", "(", "\"aliases\"", ")", ";", "if", "(", "aliasMetaData", ".", "isObject", "(", ")", ")", "{", "final", "ImmutableSet", "<", "String", ">", "aliasNames", "=", "ImmutableSet", ".", "copyOf", "(", "aliasMetaData", ".", "fieldNames", "(", ")", ")", ";", "indexAliasesBuilder", ".", "put", "(", "indexName", ",", "aliasNames", ")", ";", "}", "}", "return", "indexAliasesBuilder", ".", "build", "(", ")", ";", "}" ]
Returns index names and their aliases. This only returns indices which actually have an alias.
[ "Returns", "index", "names", "and", "their", "aliases", ".", "This", "only", "returns", "indices", "which", "actually", "have", "an", "alias", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java#L302-L328
14,868
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java
Indices.getIndexTemplate
public Map<String, Object> getIndexTemplate(IndexSet indexSet) { final String indexWildcard = indexSet.getIndexWildcard(); final String analyzer = indexSet.getConfig().indexAnalyzer(); return indexMappingFactory.createIndexMapping().messageTemplate(indexWildcard, analyzer); }
java
public Map<String, Object> getIndexTemplate(IndexSet indexSet) { final String indexWildcard = indexSet.getIndexWildcard(); final String analyzer = indexSet.getConfig().indexAnalyzer(); return indexMappingFactory.createIndexMapping().messageTemplate(indexWildcard, analyzer); }
[ "public", "Map", "<", "String", ",", "Object", ">", "getIndexTemplate", "(", "IndexSet", "indexSet", ")", "{", "final", "String", "indexWildcard", "=", "indexSet", ".", "getIndexWildcard", "(", ")", ";", "final", "String", "analyzer", "=", "indexSet", ".", "getConfig", "(", ")", ".", "indexAnalyzer", "(", ")", ";", "return", "indexMappingFactory", ".", "createIndexMapping", "(", ")", ".", "messageTemplate", "(", "indexWildcard", ",", "analyzer", ")", ";", "}" ]
Returns the generated Elasticsearch index template for the given index set. @param indexSet the index set @return the generated index template
[ "Returns", "the", "generated", "Elasticsearch", "index", "template", "for", "the", "given", "index", "set", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java#L381-L386
14,869
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java
Indices.indexRangeStatsOfIndex
public IndexRangeStats indexRangeStatsOfIndex(String index) { final FilterAggregationBuilder builder = AggregationBuilders.filter("agg", QueryBuilders.existsQuery(Message.FIELD_TIMESTAMP)) .subAggregation(AggregationBuilders.min("ts_min").field(Message.FIELD_TIMESTAMP)) .subAggregation(AggregationBuilders.max("ts_max").field(Message.FIELD_TIMESTAMP)) .subAggregation(AggregationBuilders.terms("streams").field(Message.FIELD_STREAMS)); final String query = searchSource() .aggregation(builder) .size(0) .toString(); final Search request = new Search.Builder(query) .addIndex(index) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .ignoreUnavailable(true) .build(); if (LOG.isDebugEnabled()) { String data = "{}"; try { data = request.getData(objectMapper.copy().enable(SerializationFeature.INDENT_OUTPUT)); } catch (IOException e) { LOG.debug("Couldn't pretty print request payload", e); } LOG.debug("Index range query: _search/{}: {}", index, data); } final SearchResult result = JestUtils.execute(jestClient, request, () -> "Couldn't build index range of index " + index); final FilterAggregation f = result.getAggregations().getFilterAggregation("agg"); if (f == null) { throw new IndexNotFoundException("Couldn't build index range of index " + index + " because it doesn't exist."); } else if (f.getCount() == 0L) { LOG.debug("No documents with attribute \"timestamp\" found in index <{}>", index); return IndexRangeStats.EMPTY; } final MinAggregation minAgg = f.getMinAggregation("ts_min"); final DateTime min = new DateTime(minAgg.getMin().longValue(), DateTimeZone.UTC); final MaxAggregation maxAgg = f.getMaxAggregation("ts_max"); final DateTime max = new DateTime(maxAgg.getMax().longValue(), DateTimeZone.UTC); // make sure we return an empty list, so we can differentiate between old indices that don't have this information // and newer ones that simply have no streams. final TermsAggregation streams = f.getTermsAggregation("streams"); final List<String> streamIds = streams.getBuckets().stream() .map(TermsAggregation.Entry::getKeyAsString) .collect(toList()); return IndexRangeStats.create(min, max, streamIds); }
java
public IndexRangeStats indexRangeStatsOfIndex(String index) { final FilterAggregationBuilder builder = AggregationBuilders.filter("agg", QueryBuilders.existsQuery(Message.FIELD_TIMESTAMP)) .subAggregation(AggregationBuilders.min("ts_min").field(Message.FIELD_TIMESTAMP)) .subAggregation(AggregationBuilders.max("ts_max").field(Message.FIELD_TIMESTAMP)) .subAggregation(AggregationBuilders.terms("streams").field(Message.FIELD_STREAMS)); final String query = searchSource() .aggregation(builder) .size(0) .toString(); final Search request = new Search.Builder(query) .addIndex(index) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .ignoreUnavailable(true) .build(); if (LOG.isDebugEnabled()) { String data = "{}"; try { data = request.getData(objectMapper.copy().enable(SerializationFeature.INDENT_OUTPUT)); } catch (IOException e) { LOG.debug("Couldn't pretty print request payload", e); } LOG.debug("Index range query: _search/{}: {}", index, data); } final SearchResult result = JestUtils.execute(jestClient, request, () -> "Couldn't build index range of index " + index); final FilterAggregation f = result.getAggregations().getFilterAggregation("agg"); if (f == null) { throw new IndexNotFoundException("Couldn't build index range of index " + index + " because it doesn't exist."); } else if (f.getCount() == 0L) { LOG.debug("No documents with attribute \"timestamp\" found in index <{}>", index); return IndexRangeStats.EMPTY; } final MinAggregation minAgg = f.getMinAggregation("ts_min"); final DateTime min = new DateTime(minAgg.getMin().longValue(), DateTimeZone.UTC); final MaxAggregation maxAgg = f.getMaxAggregation("ts_max"); final DateTime max = new DateTime(maxAgg.getMax().longValue(), DateTimeZone.UTC); // make sure we return an empty list, so we can differentiate between old indices that don't have this information // and newer ones that simply have no streams. final TermsAggregation streams = f.getTermsAggregation("streams"); final List<String> streamIds = streams.getBuckets().stream() .map(TermsAggregation.Entry::getKeyAsString) .collect(toList()); return IndexRangeStats.create(min, max, streamIds); }
[ "public", "IndexRangeStats", "indexRangeStatsOfIndex", "(", "String", "index", ")", "{", "final", "FilterAggregationBuilder", "builder", "=", "AggregationBuilders", ".", "filter", "(", "\"agg\"", ",", "QueryBuilders", ".", "existsQuery", "(", "Message", ".", "FIELD_TIMESTAMP", ")", ")", ".", "subAggregation", "(", "AggregationBuilders", ".", "min", "(", "\"ts_min\"", ")", ".", "field", "(", "Message", ".", "FIELD_TIMESTAMP", ")", ")", ".", "subAggregation", "(", "AggregationBuilders", ".", "max", "(", "\"ts_max\"", ")", ".", "field", "(", "Message", ".", "FIELD_TIMESTAMP", ")", ")", ".", "subAggregation", "(", "AggregationBuilders", ".", "terms", "(", "\"streams\"", ")", ".", "field", "(", "Message", ".", "FIELD_STREAMS", ")", ")", ";", "final", "String", "query", "=", "searchSource", "(", ")", ".", "aggregation", "(", "builder", ")", ".", "size", "(", "0", ")", ".", "toString", "(", ")", ";", "final", "Search", "request", "=", "new", "Search", ".", "Builder", "(", "query", ")", ".", "addIndex", "(", "index", ")", ".", "setSearchType", "(", "SearchType", ".", "DFS_QUERY_THEN_FETCH", ")", ".", "ignoreUnavailable", "(", "true", ")", ".", "build", "(", ")", ";", "if", "(", "LOG", ".", "isDebugEnabled", "(", ")", ")", "{", "String", "data", "=", "\"{}\"", ";", "try", "{", "data", "=", "request", ".", "getData", "(", "objectMapper", ".", "copy", "(", ")", ".", "enable", "(", "SerializationFeature", ".", "INDENT_OUTPUT", ")", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "LOG", ".", "debug", "(", "\"Couldn't pretty print request payload\"", ",", "e", ")", ";", "}", "LOG", ".", "debug", "(", "\"Index range query: _search/{}: {}\"", ",", "index", ",", "data", ")", ";", "}", "final", "SearchResult", "result", "=", "JestUtils", ".", "execute", "(", "jestClient", ",", "request", ",", "(", ")", "->", "\"Couldn't build index range of index \"", "+", "index", ")", ";", "final", "FilterAggregation", "f", "=", "result", ".", "getAggregations", "(", ")", ".", "getFilterAggregation", "(", "\"agg\"", ")", ";", "if", "(", "f", "==", "null", ")", "{", "throw", "new", "IndexNotFoundException", "(", "\"Couldn't build index range of index \"", "+", "index", "+", "\" because it doesn't exist.\"", ")", ";", "}", "else", "if", "(", "f", ".", "getCount", "(", ")", "==", "0L", ")", "{", "LOG", ".", "debug", "(", "\"No documents with attribute \\\"timestamp\\\" found in index <{}>\"", ",", "index", ")", ";", "return", "IndexRangeStats", ".", "EMPTY", ";", "}", "final", "MinAggregation", "minAgg", "=", "f", ".", "getMinAggregation", "(", "\"ts_min\"", ")", ";", "final", "DateTime", "min", "=", "new", "DateTime", "(", "minAgg", ".", "getMin", "(", ")", ".", "longValue", "(", ")", ",", "DateTimeZone", ".", "UTC", ")", ";", "final", "MaxAggregation", "maxAgg", "=", "f", ".", "getMaxAggregation", "(", "\"ts_max\"", ")", ";", "final", "DateTime", "max", "=", "new", "DateTime", "(", "maxAgg", ".", "getMax", "(", ")", ".", "longValue", "(", ")", ",", "DateTimeZone", ".", "UTC", ")", ";", "// make sure we return an empty list, so we can differentiate between old indices that don't have this information", "// and newer ones that simply have no streams.", "final", "TermsAggregation", "streams", "=", "f", ".", "getTermsAggregation", "(", "\"streams\"", ")", ";", "final", "List", "<", "String", ">", "streamIds", "=", "streams", ".", "getBuckets", "(", ")", ".", "stream", "(", ")", ".", "map", "(", "TermsAggregation", ".", "Entry", "::", "getKeyAsString", ")", ".", "collect", "(", "toList", "(", ")", ")", ";", "return", "IndexRangeStats", ".", "create", "(", "min", ",", "max", ",", "streamIds", ")", ";", "}" ]
Calculate min and max message timestamps in the given index. @param index Name of the index to query. @return the timestamp stats in the given index, or {@code null} if they couldn't be calculated. @see org.elasticsearch.search.aggregations.metrics.stats.Stats
[ "Calculate", "min", "and", "max", "message", "timestamps", "in", "the", "given", "index", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/indexer/indices/Indices.java#L715-L764
14,870
Graylog2/graylog2-server
graylog2-server/src/main/java/org/graylog2/cluster/NodeServiceImpl.java
NodeServiceImpl.markAsAlive
@Override public void markAsAlive(Node node, boolean isMaster, String restTransportAddress) { node.getFields().put("last_seen", Tools.getUTCTimestamp()); node.getFields().put("is_master", isMaster); node.getFields().put("transport_address", restTransportAddress); try { save(node); } catch (ValidationException e) { throw new RuntimeException("Validation failed.", e); } }
java
@Override public void markAsAlive(Node node, boolean isMaster, String restTransportAddress) { node.getFields().put("last_seen", Tools.getUTCTimestamp()); node.getFields().put("is_master", isMaster); node.getFields().put("transport_address", restTransportAddress); try { save(node); } catch (ValidationException e) { throw new RuntimeException("Validation failed.", e); } }
[ "@", "Override", "public", "void", "markAsAlive", "(", "Node", "node", ",", "boolean", "isMaster", ",", "String", "restTransportAddress", ")", "{", "node", ".", "getFields", "(", ")", ".", "put", "(", "\"last_seen\"", ",", "Tools", ".", "getUTCTimestamp", "(", ")", ")", ";", "node", ".", "getFields", "(", ")", ".", "put", "(", "\"is_master\"", ",", "isMaster", ")", ";", "node", ".", "getFields", "(", ")", ".", "put", "(", "\"transport_address\"", ",", "restTransportAddress", ")", ";", "try", "{", "save", "(", "node", ")", ";", "}", "catch", "(", "ValidationException", "e", ")", "{", "throw", "new", "RuntimeException", "(", "\"Validation failed.\"", ",", "e", ")", ";", "}", "}" ]
Mark this node as alive and probably update some settings that may have changed since last server boot. @param isMaster @param restTransportAddress
[ "Mark", "this", "node", "as", "alive", "and", "probably", "update", "some", "settings", "that", "may", "have", "changed", "since", "last", "server", "boot", "." ]
50b565dcead6e0a372236d5c2f8530dc5726fa9b
https://github.com/Graylog2/graylog2-server/blob/50b565dcead6e0a372236d5c2f8530dc5726fa9b/graylog2-server/src/main/java/org/graylog2/cluster/NodeServiceImpl.java#L129-L139
14,871
tobato/FastDFS_Client
src/main/java/com/github/tobato/fastdfs/domain/proto/mapper/BytesUtil.java
BytesUtil.buff2long
public static long buff2long(byte[] bs, int offset) { return (((long) (bs[offset] >= 0 ? bs[offset] : 256 + bs[offset])) << 56) | (((long) (bs[offset + 1] >= 0 ? bs[offset + 1] : 256 + bs[offset + 1])) << 48) | (((long) (bs[offset + 2] >= 0 ? bs[offset + 2] : 256 + bs[offset + 2])) << 40) | (((long) (bs[offset + 3] >= 0 ? bs[offset + 3] : 256 + bs[offset + 3])) << 32) | (((long) (bs[offset + 4] >= 0 ? bs[offset + 4] : 256 + bs[offset + 4])) << 24) | (((long) (bs[offset + 5] >= 0 ? bs[offset + 5] : 256 + bs[offset + 5])) << 16) | (((long) (bs[offset + 6] >= 0 ? bs[offset + 6] : 256 + bs[offset + 6])) << 8) | (bs[offset + 7] >= 0 ? bs[offset + 7] : 256 + bs[offset + 7]); }
java
public static long buff2long(byte[] bs, int offset) { return (((long) (bs[offset] >= 0 ? bs[offset] : 256 + bs[offset])) << 56) | (((long) (bs[offset + 1] >= 0 ? bs[offset + 1] : 256 + bs[offset + 1])) << 48) | (((long) (bs[offset + 2] >= 0 ? bs[offset + 2] : 256 + bs[offset + 2])) << 40) | (((long) (bs[offset + 3] >= 0 ? bs[offset + 3] : 256 + bs[offset + 3])) << 32) | (((long) (bs[offset + 4] >= 0 ? bs[offset + 4] : 256 + bs[offset + 4])) << 24) | (((long) (bs[offset + 5] >= 0 ? bs[offset + 5] : 256 + bs[offset + 5])) << 16) | (((long) (bs[offset + 6] >= 0 ? bs[offset + 6] : 256 + bs[offset + 6])) << 8) | (bs[offset + 7] >= 0 ? bs[offset + 7] : 256 + bs[offset + 7]); }
[ "public", "static", "long", "buff2long", "(", "byte", "[", "]", "bs", ",", "int", "offset", ")", "{", "return", "(", "(", "(", "long", ")", "(", "bs", "[", "offset", "]", ">=", "0", "?", "bs", "[", "offset", "]", ":", "256", "+", "bs", "[", "offset", "]", ")", ")", "<<", "56", ")", "|", "(", "(", "(", "long", ")", "(", "bs", "[", "offset", "+", "1", "]", ">=", "0", "?", "bs", "[", "offset", "+", "1", "]", ":", "256", "+", "bs", "[", "offset", "+", "1", "]", ")", ")", "<<", "48", ")", "|", "(", "(", "(", "long", ")", "(", "bs", "[", "offset", "+", "2", "]", ">=", "0", "?", "bs", "[", "offset", "+", "2", "]", ":", "256", "+", "bs", "[", "offset", "+", "2", "]", ")", ")", "<<", "40", ")", "|", "(", "(", "(", "long", ")", "(", "bs", "[", "offset", "+", "3", "]", ">=", "0", "?", "bs", "[", "offset", "+", "3", "]", ":", "256", "+", "bs", "[", "offset", "+", "3", "]", ")", ")", "<<", "32", ")", "|", "(", "(", "(", "long", ")", "(", "bs", "[", "offset", "+", "4", "]", ">=", "0", "?", "bs", "[", "offset", "+", "4", "]", ":", "256", "+", "bs", "[", "offset", "+", "4", "]", ")", ")", "<<", "24", ")", "|", "(", "(", "(", "long", ")", "(", "bs", "[", "offset", "+", "5", "]", ">=", "0", "?", "bs", "[", "offset", "+", "5", "]", ":", "256", "+", "bs", "[", "offset", "+", "5", "]", ")", ")", "<<", "16", ")", "|", "(", "(", "(", "long", ")", "(", "bs", "[", "offset", "+", "6", "]", ">=", "0", "?", "bs", "[", "offset", "+", "6", "]", ":", "256", "+", "bs", "[", "offset", "+", "6", "]", ")", ")", "<<", "8", ")", "|", "(", "bs", "[", "offset", "+", "7", "]", ">=", "0", "?", "bs", "[", "offset", "+", "7", "]", ":", "256", "+", "bs", "[", "offset", "+", "7", "]", ")", ";", "}" ]
buff convert to long @param bs the buffer (big-endian) @param offset the start position based 0 @return long number
[ "buff", "convert", "to", "long" ]
8e3bfe712f1739028beed7f3a6b2cc4579a231e4
https://github.com/tobato/FastDFS_Client/blob/8e3bfe712f1739028beed7f3a6b2cc4579a231e4/src/main/java/com/github/tobato/fastdfs/domain/proto/mapper/BytesUtil.java#L42-L51
14,872
tobato/FastDFS_Client
src/main/java/com/github/tobato/fastdfs/domain/proto/mapper/BytesUtil.java
BytesUtil.buff2int
public static int buff2int(byte[] bs, int offset) { return ((bs[offset] >= 0 ? bs[offset] : 256 + bs[offset]) << 24) | ((bs[offset + 1] >= 0 ? bs[offset + 1] : 256 + bs[offset + 1]) << 16) | ((bs[offset + 2] >= 0 ? bs[offset + 2] : 256 + bs[offset + 2]) << 8) | (bs[offset + 3] >= 0 ? bs[offset + 3] : 256 + bs[offset + 3]); }
java
public static int buff2int(byte[] bs, int offset) { return ((bs[offset] >= 0 ? bs[offset] : 256 + bs[offset]) << 24) | ((bs[offset + 1] >= 0 ? bs[offset + 1] : 256 + bs[offset + 1]) << 16) | ((bs[offset + 2] >= 0 ? bs[offset + 2] : 256 + bs[offset + 2]) << 8) | (bs[offset + 3] >= 0 ? bs[offset + 3] : 256 + bs[offset + 3]); }
[ "public", "static", "int", "buff2int", "(", "byte", "[", "]", "bs", ",", "int", "offset", ")", "{", "return", "(", "(", "bs", "[", "offset", "]", ">=", "0", "?", "bs", "[", "offset", "]", ":", "256", "+", "bs", "[", "offset", "]", ")", "<<", "24", ")", "|", "(", "(", "bs", "[", "offset", "+", "1", "]", ">=", "0", "?", "bs", "[", "offset", "+", "1", "]", ":", "256", "+", "bs", "[", "offset", "+", "1", "]", ")", "<<", "16", ")", "|", "(", "(", "bs", "[", "offset", "+", "2", "]", ">=", "0", "?", "bs", "[", "offset", "+", "2", "]", ":", "256", "+", "bs", "[", "offset", "+", "2", "]", ")", "<<", "8", ")", "|", "(", "bs", "[", "offset", "+", "3", "]", ">=", "0", "?", "bs", "[", "offset", "+", "3", "]", ":", "256", "+", "bs", "[", "offset", "+", "3", "]", ")", ";", "}" ]
buff convert to int @param bs the buffer (big-endian) @param offset the start position based 0 @return int number
[ "buff", "convert", "to", "int" ]
8e3bfe712f1739028beed7f3a6b2cc4579a231e4
https://github.com/tobato/FastDFS_Client/blob/8e3bfe712f1739028beed7f3a6b2cc4579a231e4/src/main/java/com/github/tobato/fastdfs/domain/proto/mapper/BytesUtil.java#L60-L65
14,873
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleConfig.java
SimpleConfig.parsePeriod
public static Period parsePeriod(String input, ConfigOrigin originForException, String pathForException) { String s = ConfigImplUtil.unicodeTrim(input); String originalUnitString = getUnits(s); String unitString = originalUnitString; String numberString = ConfigImplUtil.unicodeTrim(s.substring(0, s.length() - unitString.length())); ChronoUnit units; // this would be caught later anyway, but the error message // is more helpful if we check it here. if (numberString.length() == 0) throw new ConfigException.BadValue(originForException, pathForException, "No number in period value '" + input + "'"); if (unitString.length() > 2 && !unitString.endsWith("s")) unitString = unitString + "s"; // note that this is deliberately case-sensitive if (unitString.equals("") || unitString.equals("d") || unitString.equals("days")) { units = ChronoUnit.DAYS; } else if (unitString.equals("w") || unitString.equals("weeks")) { units = ChronoUnit.WEEKS; } else if (unitString.equals("m") || unitString.equals("mo") || unitString.equals("months")) { units = ChronoUnit.MONTHS; } else if (unitString.equals("y") || unitString.equals("years")) { units = ChronoUnit.YEARS; } else { throw new ConfigException.BadValue(originForException, pathForException, "Could not parse time unit '" + originalUnitString + "' (try d, w, mo, y)"); } try { return periodOf(Integer.parseInt(numberString), units); } catch (NumberFormatException e) { throw new ConfigException.BadValue(originForException, pathForException, "Could not parse duration number '" + numberString + "'"); } }
java
public static Period parsePeriod(String input, ConfigOrigin originForException, String pathForException) { String s = ConfigImplUtil.unicodeTrim(input); String originalUnitString = getUnits(s); String unitString = originalUnitString; String numberString = ConfigImplUtil.unicodeTrim(s.substring(0, s.length() - unitString.length())); ChronoUnit units; // this would be caught later anyway, but the error message // is more helpful if we check it here. if (numberString.length() == 0) throw new ConfigException.BadValue(originForException, pathForException, "No number in period value '" + input + "'"); if (unitString.length() > 2 && !unitString.endsWith("s")) unitString = unitString + "s"; // note that this is deliberately case-sensitive if (unitString.equals("") || unitString.equals("d") || unitString.equals("days")) { units = ChronoUnit.DAYS; } else if (unitString.equals("w") || unitString.equals("weeks")) { units = ChronoUnit.WEEKS; } else if (unitString.equals("m") || unitString.equals("mo") || unitString.equals("months")) { units = ChronoUnit.MONTHS; } else if (unitString.equals("y") || unitString.equals("years")) { units = ChronoUnit.YEARS; } else { throw new ConfigException.BadValue(originForException, pathForException, "Could not parse time unit '" + originalUnitString + "' (try d, w, mo, y)"); } try { return periodOf(Integer.parseInt(numberString), units); } catch (NumberFormatException e) { throw new ConfigException.BadValue(originForException, pathForException, "Could not parse duration number '" + numberString + "'"); } }
[ "public", "static", "Period", "parsePeriod", "(", "String", "input", ",", "ConfigOrigin", "originForException", ",", "String", "pathForException", ")", "{", "String", "s", "=", "ConfigImplUtil", ".", "unicodeTrim", "(", "input", ")", ";", "String", "originalUnitString", "=", "getUnits", "(", "s", ")", ";", "String", "unitString", "=", "originalUnitString", ";", "String", "numberString", "=", "ConfigImplUtil", ".", "unicodeTrim", "(", "s", ".", "substring", "(", "0", ",", "s", ".", "length", "(", ")", "-", "unitString", ".", "length", "(", ")", ")", ")", ";", "ChronoUnit", "units", ";", "// this would be caught later anyway, but the error message", "// is more helpful if we check it here.", "if", "(", "numberString", ".", "length", "(", ")", "==", "0", ")", "throw", "new", "ConfigException", ".", "BadValue", "(", "originForException", ",", "pathForException", ",", "\"No number in period value '\"", "+", "input", "+", "\"'\"", ")", ";", "if", "(", "unitString", ".", "length", "(", ")", ">", "2", "&&", "!", "unitString", ".", "endsWith", "(", "\"s\"", ")", ")", "unitString", "=", "unitString", "+", "\"s\"", ";", "// note that this is deliberately case-sensitive", "if", "(", "unitString", ".", "equals", "(", "\"\"", ")", "||", "unitString", ".", "equals", "(", "\"d\"", ")", "||", "unitString", ".", "equals", "(", "\"days\"", ")", ")", "{", "units", "=", "ChronoUnit", ".", "DAYS", ";", "}", "else", "if", "(", "unitString", ".", "equals", "(", "\"w\"", ")", "||", "unitString", ".", "equals", "(", "\"weeks\"", ")", ")", "{", "units", "=", "ChronoUnit", ".", "WEEKS", ";", "}", "else", "if", "(", "unitString", ".", "equals", "(", "\"m\"", ")", "||", "unitString", ".", "equals", "(", "\"mo\"", ")", "||", "unitString", ".", "equals", "(", "\"months\"", ")", ")", "{", "units", "=", "ChronoUnit", ".", "MONTHS", ";", "}", "else", "if", "(", "unitString", ".", "equals", "(", "\"y\"", ")", "||", "unitString", ".", "equals", "(", "\"years\"", ")", ")", "{", "units", "=", "ChronoUnit", ".", "YEARS", ";", "}", "else", "{", "throw", "new", "ConfigException", ".", "BadValue", "(", "originForException", ",", "pathForException", ",", "\"Could not parse time unit '\"", "+", "originalUnitString", "+", "\"' (try d, w, mo, y)\"", ")", ";", "}", "try", "{", "return", "periodOf", "(", "Integer", ".", "parseInt", "(", "numberString", ")", ",", "units", ")", ";", "}", "catch", "(", "NumberFormatException", "e", ")", "{", "throw", "new", "ConfigException", ".", "BadValue", "(", "originForException", ",", "pathForException", ",", "\"Could not parse duration number '\"", "+", "numberString", "+", "\"'\"", ")", ";", "}", "}" ]
Parses a period string. If no units are specified in the string, it is assumed to be in days. The returned period is in days. The purpose of this function is to implement the period-related methods in the ConfigObject interface. @param input the string to parse @param originForException origin of the value being parsed @param pathForException path to include in exceptions @return duration in days @throws ConfigException if string is invalid
[ "Parses", "a", "period", "string", ".", "If", "no", "units", "are", "specified", "in", "the", "string", "it", "is", "assumed", "to", "be", "in", "days", ".", "The", "returned", "period", "is", "in", "days", ".", "The", "purpose", "of", "this", "function", "is", "to", "implement", "the", "period", "-", "related", "methods", "in", "the", "ConfigObject", "interface", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleConfig.java#L621-L667
14,874
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleConfig.java
SimpleConfig.addMissing
static void addMissing(List<ConfigException.ValidationProblem> accumulator, ConfigValueType refType, Path path, ConfigOrigin origin) { addMissing(accumulator, getDesc(refType), path, origin); }
java
static void addMissing(List<ConfigException.ValidationProblem> accumulator, ConfigValueType refType, Path path, ConfigOrigin origin) { addMissing(accumulator, getDesc(refType), path, origin); }
[ "static", "void", "addMissing", "(", "List", "<", "ConfigException", ".", "ValidationProblem", ">", "accumulator", ",", "ConfigValueType", "refType", ",", "Path", "path", ",", "ConfigOrigin", "origin", ")", "{", "addMissing", "(", "accumulator", ",", "getDesc", "(", "refType", ")", ",", "path", ",", "origin", ")", ";", "}" ]
JavaBean stuff uses this
[ "JavaBean", "stuff", "uses", "this" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleConfig.java#L931-L934
14,875
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleConfig.java
SimpleConfig.checkValidObject
private static void checkValidObject(Path path, AbstractConfigObject reference, AbstractConfigObject value, List<ConfigException.ValidationProblem> accumulator) { for (Map.Entry<String, ConfigValue> entry : reference.entrySet()) { String key = entry.getKey(); Path childPath; if (path != null) childPath = Path.newKey(key).prepend(path); else childPath = Path.newKey(key); AbstractConfigValue v = value.get(key); if (v == null) { addMissing(accumulator, entry.getValue(), childPath, value.origin()); } else { checkValid(childPath, entry.getValue(), v, accumulator); } } }
java
private static void checkValidObject(Path path, AbstractConfigObject reference, AbstractConfigObject value, List<ConfigException.ValidationProblem> accumulator) { for (Map.Entry<String, ConfigValue> entry : reference.entrySet()) { String key = entry.getKey(); Path childPath; if (path != null) childPath = Path.newKey(key).prepend(path); else childPath = Path.newKey(key); AbstractConfigValue v = value.get(key); if (v == null) { addMissing(accumulator, entry.getValue(), childPath, value.origin()); } else { checkValid(childPath, entry.getValue(), v, accumulator); } } }
[ "private", "static", "void", "checkValidObject", "(", "Path", "path", ",", "AbstractConfigObject", "reference", ",", "AbstractConfigObject", "value", ",", "List", "<", "ConfigException", ".", "ValidationProblem", ">", "accumulator", ")", "{", "for", "(", "Map", ".", "Entry", "<", "String", ",", "ConfigValue", ">", "entry", ":", "reference", ".", "entrySet", "(", ")", ")", "{", "String", "key", "=", "entry", ".", "getKey", "(", ")", ";", "Path", "childPath", ";", "if", "(", "path", "!=", "null", ")", "childPath", "=", "Path", ".", "newKey", "(", "key", ")", ".", "prepend", "(", "path", ")", ";", "else", "childPath", "=", "Path", ".", "newKey", "(", "key", ")", ";", "AbstractConfigValue", "v", "=", "value", ".", "get", "(", "key", ")", ";", "if", "(", "v", "==", "null", ")", "{", "addMissing", "(", "accumulator", ",", "entry", ".", "getValue", "(", ")", ",", "childPath", ",", "value", ".", "origin", "(", ")", ")", ";", "}", "else", "{", "checkValid", "(", "childPath", ",", "entry", ".", "getValue", "(", ")", ",", "v", ",", "accumulator", ")", ";", "}", "}", "}" ]
path is null if we're at the root
[ "path", "is", "null", "if", "we", "re", "at", "the", "root" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleConfig.java#L1002-L1021
14,876
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleConfig.java
SimpleConfig.checkValid
static void checkValid(Path path, ConfigValueType referenceType, AbstractConfigValue value, List<ConfigException.ValidationProblem> accumulator) { if (haveCompatibleTypes(referenceType, value)) { if (referenceType == ConfigValueType.LIST && value instanceof SimpleConfigObject) { // attempt conversion of indexed object to list AbstractConfigValue listValue = DefaultTransformer.transform(value, ConfigValueType.LIST); if (!(listValue instanceof SimpleConfigList)) addWrongType(accumulator, referenceType, value, path); } } else { addWrongType(accumulator, referenceType, value, path); } }
java
static void checkValid(Path path, ConfigValueType referenceType, AbstractConfigValue value, List<ConfigException.ValidationProblem> accumulator) { if (haveCompatibleTypes(referenceType, value)) { if (referenceType == ConfigValueType.LIST && value instanceof SimpleConfigObject) { // attempt conversion of indexed object to list AbstractConfigValue listValue = DefaultTransformer.transform(value, ConfigValueType.LIST); if (!(listValue instanceof SimpleConfigList)) addWrongType(accumulator, referenceType, value, path); } } else { addWrongType(accumulator, referenceType, value, path); } }
[ "static", "void", "checkValid", "(", "Path", "path", ",", "ConfigValueType", "referenceType", ",", "AbstractConfigValue", "value", ",", "List", "<", "ConfigException", ".", "ValidationProblem", ">", "accumulator", ")", "{", "if", "(", "haveCompatibleTypes", "(", "referenceType", ",", "value", ")", ")", "{", "if", "(", "referenceType", "==", "ConfigValueType", ".", "LIST", "&&", "value", "instanceof", "SimpleConfigObject", ")", "{", "// attempt conversion of indexed object to list", "AbstractConfigValue", "listValue", "=", "DefaultTransformer", ".", "transform", "(", "value", ",", "ConfigValueType", ".", "LIST", ")", ";", "if", "(", "!", "(", "listValue", "instanceof", "SimpleConfigList", ")", ")", "addWrongType", "(", "accumulator", ",", "referenceType", ",", "value", ",", "path", ")", ";", "}", "}", "else", "{", "addWrongType", "(", "accumulator", ",", "referenceType", ",", "value", ",", "path", ")", ";", "}", "}" ]
Used by the JavaBean-based validator
[ "Used", "by", "the", "JavaBean", "-", "based", "validator" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleConfig.java#L1043-L1056
14,877
lightbend/config
config/src/main/java/com/typesafe/config/impl/SerializedConfigValue.java
SerializedConfigValue.writeOriginField
private static void writeOriginField(DataOutput out, SerializedField code, Object v) throws IOException { switch (code) { case ORIGIN_DESCRIPTION: out.writeUTF((String) v); break; case ORIGIN_LINE_NUMBER: out.writeInt((Integer) v); break; case ORIGIN_END_LINE_NUMBER: out.writeInt((Integer) v); break; case ORIGIN_TYPE: out.writeByte((Integer) v); break; case ORIGIN_URL: out.writeUTF((String) v); break; case ORIGIN_RESOURCE: out.writeUTF((String) v); break; case ORIGIN_COMMENTS: @SuppressWarnings("unchecked") List<String> list = (List<String>) v; int size = list.size(); out.writeInt(size); for (String s : list) { out.writeUTF(s); } break; case ORIGIN_NULL_URL: // FALL THRU case ORIGIN_NULL_RESOURCE: // FALL THRU case ORIGIN_NULL_COMMENTS: // nothing to write out besides code and length break; default: throw new IOException("Unhandled field from origin: " + code); } }
java
private static void writeOriginField(DataOutput out, SerializedField code, Object v) throws IOException { switch (code) { case ORIGIN_DESCRIPTION: out.writeUTF((String) v); break; case ORIGIN_LINE_NUMBER: out.writeInt((Integer) v); break; case ORIGIN_END_LINE_NUMBER: out.writeInt((Integer) v); break; case ORIGIN_TYPE: out.writeByte((Integer) v); break; case ORIGIN_URL: out.writeUTF((String) v); break; case ORIGIN_RESOURCE: out.writeUTF((String) v); break; case ORIGIN_COMMENTS: @SuppressWarnings("unchecked") List<String> list = (List<String>) v; int size = list.size(); out.writeInt(size); for (String s : list) { out.writeUTF(s); } break; case ORIGIN_NULL_URL: // FALL THRU case ORIGIN_NULL_RESOURCE: // FALL THRU case ORIGIN_NULL_COMMENTS: // nothing to write out besides code and length break; default: throw new IOException("Unhandled field from origin: " + code); } }
[ "private", "static", "void", "writeOriginField", "(", "DataOutput", "out", ",", "SerializedField", "code", ",", "Object", "v", ")", "throws", "IOException", "{", "switch", "(", "code", ")", "{", "case", "ORIGIN_DESCRIPTION", ":", "out", ".", "writeUTF", "(", "(", "String", ")", "v", ")", ";", "break", ";", "case", "ORIGIN_LINE_NUMBER", ":", "out", ".", "writeInt", "(", "(", "Integer", ")", "v", ")", ";", "break", ";", "case", "ORIGIN_END_LINE_NUMBER", ":", "out", ".", "writeInt", "(", "(", "Integer", ")", "v", ")", ";", "break", ";", "case", "ORIGIN_TYPE", ":", "out", ".", "writeByte", "(", "(", "Integer", ")", "v", ")", ";", "break", ";", "case", "ORIGIN_URL", ":", "out", ".", "writeUTF", "(", "(", "String", ")", "v", ")", ";", "break", ";", "case", "ORIGIN_RESOURCE", ":", "out", ".", "writeUTF", "(", "(", "String", ")", "v", ")", ";", "break", ";", "case", "ORIGIN_COMMENTS", ":", "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "List", "<", "String", ">", "list", "=", "(", "List", "<", "String", ">", ")", "v", ";", "int", "size", "=", "list", ".", "size", "(", ")", ";", "out", ".", "writeInt", "(", "size", ")", ";", "for", "(", "String", "s", ":", "list", ")", "{", "out", ".", "writeUTF", "(", "s", ")", ";", "}", "break", ";", "case", "ORIGIN_NULL_URL", ":", "// FALL THRU", "case", "ORIGIN_NULL_RESOURCE", ":", "// FALL THRU", "case", "ORIGIN_NULL_COMMENTS", ":", "// nothing to write out besides code and length", "break", ";", "default", ":", "throw", "new", "IOException", "(", "\"Unhandled field from origin: \"", "+", "code", ")", ";", "}", "}" ]
outer stream instead of field.data
[ "outer", "stream", "instead", "of", "field", ".", "data" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SerializedConfigValue.java#L168-L206
14,878
lightbend/config
config/src/main/java/com/typesafe/config/impl/SerializedConfigValue.java
SerializedConfigValue.writeOrigin
static void writeOrigin(DataOutput out, SimpleConfigOrigin origin, SimpleConfigOrigin baseOrigin) throws IOException { Map<SerializedField, Object> m; // to serialize a null origin, we write out no fields at all if (origin != null) m = origin.toFieldsDelta(baseOrigin); else m = Collections.emptyMap(); for (Map.Entry<SerializedField, Object> e : m.entrySet()) { FieldOut field = new FieldOut(e.getKey()); Object v = e.getValue(); writeOriginField(field.data, field.code, v); writeField(out, field); } writeEndMarker(out); }
java
static void writeOrigin(DataOutput out, SimpleConfigOrigin origin, SimpleConfigOrigin baseOrigin) throws IOException { Map<SerializedField, Object> m; // to serialize a null origin, we write out no fields at all if (origin != null) m = origin.toFieldsDelta(baseOrigin); else m = Collections.emptyMap(); for (Map.Entry<SerializedField, Object> e : m.entrySet()) { FieldOut field = new FieldOut(e.getKey()); Object v = e.getValue(); writeOriginField(field.data, field.code, v); writeField(out, field); } writeEndMarker(out); }
[ "static", "void", "writeOrigin", "(", "DataOutput", "out", ",", "SimpleConfigOrigin", "origin", ",", "SimpleConfigOrigin", "baseOrigin", ")", "throws", "IOException", "{", "Map", "<", "SerializedField", ",", "Object", ">", "m", ";", "// to serialize a null origin, we write out no fields at all", "if", "(", "origin", "!=", "null", ")", "m", "=", "origin", ".", "toFieldsDelta", "(", "baseOrigin", ")", ";", "else", "m", "=", "Collections", ".", "emptyMap", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "SerializedField", ",", "Object", ">", "e", ":", "m", ".", "entrySet", "(", ")", ")", "{", "FieldOut", "field", "=", "new", "FieldOut", "(", "e", ".", "getKey", "(", ")", ")", ";", "Object", "v", "=", "e", ".", "getValue", "(", ")", ";", "writeOriginField", "(", "field", ".", "data", ",", "field", ".", "code", ",", "v", ")", ";", "writeField", "(", "out", ",", "field", ")", ";", "}", "writeEndMarker", "(", "out", ")", ";", "}" ]
not private because we use it to serialize ConfigException
[ "not", "private", "because", "we", "use", "it", "to", "serialize", "ConfigException" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SerializedConfigValue.java#L209-L224
14,879
lightbend/config
config/src/main/java/com/typesafe/config/impl/SerializedConfigValue.java
SerializedConfigValue.readOrigin
static SimpleConfigOrigin readOrigin(DataInput in, SimpleConfigOrigin baseOrigin) throws IOException { Map<SerializedField, Object> m = new EnumMap<SerializedField, Object>(SerializedField.class); while (true) { Object v = null; SerializedField field = readCode(in); switch (field) { case END_MARKER: return SimpleConfigOrigin.fromBase(baseOrigin, m); case ORIGIN_DESCRIPTION: in.readInt(); // discard length v = in.readUTF(); break; case ORIGIN_LINE_NUMBER: in.readInt(); // discard length v = in.readInt(); break; case ORIGIN_END_LINE_NUMBER: in.readInt(); // discard length v = in.readInt(); break; case ORIGIN_TYPE: in.readInt(); // discard length v = in.readUnsignedByte(); break; case ORIGIN_URL: in.readInt(); // discard length v = in.readUTF(); break; case ORIGIN_RESOURCE: in.readInt(); // discard length v = in.readUTF(); break; case ORIGIN_COMMENTS: in.readInt(); // discard length int size = in.readInt(); List<String> list = new ArrayList<String>(size); for (int i = 0; i < size; ++i) { list.add(in.readUTF()); } v = list; break; case ORIGIN_NULL_URL: // FALL THRU case ORIGIN_NULL_RESOURCE: // FALL THRU case ORIGIN_NULL_COMMENTS: // nothing to read besides code and length in.readInt(); // discard length v = ""; // just something non-null to put in the map break; case ROOT_VALUE: case ROOT_WAS_CONFIG: case VALUE_DATA: case VALUE_ORIGIN: throw new IOException("Not expecting this field here: " + field); case UNKNOWN: // skip unknown field skipField(in); break; } if (v != null) m.put(field, v); } }
java
static SimpleConfigOrigin readOrigin(DataInput in, SimpleConfigOrigin baseOrigin) throws IOException { Map<SerializedField, Object> m = new EnumMap<SerializedField, Object>(SerializedField.class); while (true) { Object v = null; SerializedField field = readCode(in); switch (field) { case END_MARKER: return SimpleConfigOrigin.fromBase(baseOrigin, m); case ORIGIN_DESCRIPTION: in.readInt(); // discard length v = in.readUTF(); break; case ORIGIN_LINE_NUMBER: in.readInt(); // discard length v = in.readInt(); break; case ORIGIN_END_LINE_NUMBER: in.readInt(); // discard length v = in.readInt(); break; case ORIGIN_TYPE: in.readInt(); // discard length v = in.readUnsignedByte(); break; case ORIGIN_URL: in.readInt(); // discard length v = in.readUTF(); break; case ORIGIN_RESOURCE: in.readInt(); // discard length v = in.readUTF(); break; case ORIGIN_COMMENTS: in.readInt(); // discard length int size = in.readInt(); List<String> list = new ArrayList<String>(size); for (int i = 0; i < size; ++i) { list.add(in.readUTF()); } v = list; break; case ORIGIN_NULL_URL: // FALL THRU case ORIGIN_NULL_RESOURCE: // FALL THRU case ORIGIN_NULL_COMMENTS: // nothing to read besides code and length in.readInt(); // discard length v = ""; // just something non-null to put in the map break; case ROOT_VALUE: case ROOT_WAS_CONFIG: case VALUE_DATA: case VALUE_ORIGIN: throw new IOException("Not expecting this field here: " + field); case UNKNOWN: // skip unknown field skipField(in); break; } if (v != null) m.put(field, v); } }
[ "static", "SimpleConfigOrigin", "readOrigin", "(", "DataInput", "in", ",", "SimpleConfigOrigin", "baseOrigin", ")", "throws", "IOException", "{", "Map", "<", "SerializedField", ",", "Object", ">", "m", "=", "new", "EnumMap", "<", "SerializedField", ",", "Object", ">", "(", "SerializedField", ".", "class", ")", ";", "while", "(", "true", ")", "{", "Object", "v", "=", "null", ";", "SerializedField", "field", "=", "readCode", "(", "in", ")", ";", "switch", "(", "field", ")", "{", "case", "END_MARKER", ":", "return", "SimpleConfigOrigin", ".", "fromBase", "(", "baseOrigin", ",", "m", ")", ";", "case", "ORIGIN_DESCRIPTION", ":", "in", ".", "readInt", "(", ")", ";", "// discard length", "v", "=", "in", ".", "readUTF", "(", ")", ";", "break", ";", "case", "ORIGIN_LINE_NUMBER", ":", "in", ".", "readInt", "(", ")", ";", "// discard length", "v", "=", "in", ".", "readInt", "(", ")", ";", "break", ";", "case", "ORIGIN_END_LINE_NUMBER", ":", "in", ".", "readInt", "(", ")", ";", "// discard length", "v", "=", "in", ".", "readInt", "(", ")", ";", "break", ";", "case", "ORIGIN_TYPE", ":", "in", ".", "readInt", "(", ")", ";", "// discard length", "v", "=", "in", ".", "readUnsignedByte", "(", ")", ";", "break", ";", "case", "ORIGIN_URL", ":", "in", ".", "readInt", "(", ")", ";", "// discard length", "v", "=", "in", ".", "readUTF", "(", ")", ";", "break", ";", "case", "ORIGIN_RESOURCE", ":", "in", ".", "readInt", "(", ")", ";", "// discard length", "v", "=", "in", ".", "readUTF", "(", ")", ";", "break", ";", "case", "ORIGIN_COMMENTS", ":", "in", ".", "readInt", "(", ")", ";", "// discard length", "int", "size", "=", "in", ".", "readInt", "(", ")", ";", "List", "<", "String", ">", "list", "=", "new", "ArrayList", "<", "String", ">", "(", "size", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "size", ";", "++", "i", ")", "{", "list", ".", "add", "(", "in", ".", "readUTF", "(", ")", ")", ";", "}", "v", "=", "list", ";", "break", ";", "case", "ORIGIN_NULL_URL", ":", "// FALL THRU", "case", "ORIGIN_NULL_RESOURCE", ":", "// FALL THRU", "case", "ORIGIN_NULL_COMMENTS", ":", "// nothing to read besides code and length", "in", ".", "readInt", "(", ")", ";", "// discard length", "v", "=", "\"\"", ";", "// just something non-null to put in the map", "break", ";", "case", "ROOT_VALUE", ":", "case", "ROOT_WAS_CONFIG", ":", "case", "VALUE_DATA", ":", "case", "VALUE_ORIGIN", ":", "throw", "new", "IOException", "(", "\"Not expecting this field here: \"", "+", "field", ")", ";", "case", "UNKNOWN", ":", "// skip unknown field", "skipField", "(", "in", ")", ";", "break", ";", "}", "if", "(", "v", "!=", "null", ")", "m", ".", "put", "(", "field", ",", "v", ")", ";", "}", "}" ]
not private because we use it to deserialize ConfigException
[ "not", "private", "because", "we", "use", "it", "to", "deserialize", "ConfigException" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SerializedConfigValue.java#L227-L289
14,880
lightbend/config
config/src/main/java/com/typesafe/config/impl/Token.java
Token.newWithoutOrigin
static Token newWithoutOrigin(TokenType tokenType, String debugString, String tokenText) { return new Token(tokenType, null, tokenText, debugString); }
java
static Token newWithoutOrigin(TokenType tokenType, String debugString, String tokenText) { return new Token(tokenType, null, tokenText, debugString); }
[ "static", "Token", "newWithoutOrigin", "(", "TokenType", "tokenType", ",", "String", "debugString", ",", "String", "tokenText", ")", "{", "return", "new", "Token", "(", "tokenType", ",", "null", ",", "tokenText", ",", "debugString", ")", ";", "}" ]
this is used for singleton tokens like COMMA or OPEN_CURLY
[ "this", "is", "used", "for", "singleton", "tokens", "like", "COMMA", "or", "OPEN_CURLY" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/Token.java#L31-L33
14,881
lightbend/config
config/src/main/java/com/typesafe/config/impl/ResolveContext.java
ResolveContext.restrict
ResolveContext restrict(Path restrictTo) { if (restrictTo == restrictToChild) return this; else return new ResolveContext(memos, options, restrictTo, resolveStack, cycleMarkers); }
java
ResolveContext restrict(Path restrictTo) { if (restrictTo == restrictToChild) return this; else return new ResolveContext(memos, options, restrictTo, resolveStack, cycleMarkers); }
[ "ResolveContext", "restrict", "(", "Path", "restrictTo", ")", "{", "if", "(", "restrictTo", "==", "restrictToChild", ")", "return", "this", ";", "else", "return", "new", "ResolveContext", "(", "memos", ",", "options", ",", "restrictTo", ",", "resolveStack", ",", "cycleMarkers", ")", ";", "}" ]
restrictTo may be null to unrestrict
[ "restrictTo", "may", "be", "null", "to", "unrestrict" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/ResolveContext.java#L90-L95
14,882
lightbend/config
config/src/main/java/com/typesafe/config/impl/Path.java
Path.hasFunkyChars
static boolean hasFunkyChars(String s) { int length = s.length(); if (length == 0) return false; for (int i = 0; i < length; ++i) { char c = s.charAt(i); if (Character.isLetterOrDigit(c) || c == '-' || c == '_') continue; else return true; } return false; }
java
static boolean hasFunkyChars(String s) { int length = s.length(); if (length == 0) return false; for (int i = 0; i < length; ++i) { char c = s.charAt(i); if (Character.isLetterOrDigit(c) || c == '-' || c == '_') continue; else return true; } return false; }
[ "static", "boolean", "hasFunkyChars", "(", "String", "s", ")", "{", "int", "length", "=", "s", ".", "length", "(", ")", ";", "if", "(", "length", "==", "0", ")", "return", "false", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "length", ";", "++", "i", ")", "{", "char", "c", "=", "s", ".", "charAt", "(", "i", ")", ";", "if", "(", "Character", ".", "isLetterOrDigit", "(", "c", ")", "||", "c", "==", "'", "'", "||", "c", "==", "'", "'", ")", "continue", ";", "else", "return", "true", ";", "}", "return", "false", ";", "}" ]
noise from quotes in the rendered path for average cases
[ "noise", "from", "quotes", "in", "the", "rendered", "path", "for", "average", "cases" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/Path.java#L178-L193
14,883
lightbend/config
config/src/main/java/com/typesafe/config/impl/Parseable.java
Parseable.newReader
public static Parseable newReader(Reader reader, ConfigParseOptions options) { return new ParseableReader(doNotClose(reader), options); }
java
public static Parseable newReader(Reader reader, ConfigParseOptions options) { return new ParseableReader(doNotClose(reader), options); }
[ "public", "static", "Parseable", "newReader", "(", "Reader", "reader", ",", "ConfigParseOptions", "options", ")", "{", "return", "new", "ParseableReader", "(", "doNotClose", "(", "reader", ")", ",", "options", ")", ";", "}" ]
is complete.
[ "is", "complete", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/Parseable.java#L445-L448
14,884
lightbend/config
config/src/main/java/com/typesafe/config/impl/Parseable.java
Parseable.convertResourceName
private static String convertResourceName(Class<?> klass, String resource) { if (resource.startsWith("/")) { // "absolute" resource, chop the slash return resource.substring(1); } else { String className = klass.getName(); int i = className.lastIndexOf('.'); if (i < 0) { // no package return resource; } else { // need to be relative to the package String packageName = className.substring(0, i); String packagePath = packageName.replace('.', '/'); return packagePath + "/" + resource; } } }
java
private static String convertResourceName(Class<?> klass, String resource) { if (resource.startsWith("/")) { // "absolute" resource, chop the slash return resource.substring(1); } else { String className = klass.getName(); int i = className.lastIndexOf('.'); if (i < 0) { // no package return resource; } else { // need to be relative to the package String packageName = className.substring(0, i); String packagePath = packageName.replace('.', '/'); return packagePath + "/" + resource; } } }
[ "private", "static", "String", "convertResourceName", "(", "Class", "<", "?", ">", "klass", ",", "String", "resource", ")", "{", "if", "(", "resource", ".", "startsWith", "(", "\"/\"", ")", ")", "{", "// \"absolute\" resource, chop the slash", "return", "resource", ".", "substring", "(", "1", ")", ";", "}", "else", "{", "String", "className", "=", "klass", ".", "getName", "(", ")", ";", "int", "i", "=", "className", ".", "lastIndexOf", "(", "'", "'", ")", ";", "if", "(", "i", "<", "0", ")", "{", "// no package", "return", "resource", ";", "}", "else", "{", "// need to be relative to the package", "String", "packageName", "=", "className", ".", "substring", "(", "0", ",", "i", ")", ";", "String", "packagePath", "=", "packageName", ".", "replace", "(", "'", "'", ",", "'", "'", ")", ";", "return", "packagePath", "+", "\"/\"", "+", "resource", ";", "}", "}", "}" ]
use ClassLoader directly.
[ "use", "ClassLoader", "directly", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/Parseable.java#L807-L824
14,885
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java
SimpleConfigObject.withOnlyPathOrNull
@Override protected SimpleConfigObject withOnlyPathOrNull(Path path) { String key = path.first(); Path next = path.remainder(); AbstractConfigValue v = value.get(key); if (next != null) { if (v != null && (v instanceof AbstractConfigObject)) { v = ((AbstractConfigObject) v).withOnlyPathOrNull(next); } else { // if the path has more elements but we don't have an object, // then the rest of the path does not exist. v = null; } } if (v == null) { return null; } else { return new SimpleConfigObject(origin(), Collections.singletonMap(key, v), v.resolveStatus(), ignoresFallbacks); } }
java
@Override protected SimpleConfigObject withOnlyPathOrNull(Path path) { String key = path.first(); Path next = path.remainder(); AbstractConfigValue v = value.get(key); if (next != null) { if (v != null && (v instanceof AbstractConfigObject)) { v = ((AbstractConfigObject) v).withOnlyPathOrNull(next); } else { // if the path has more elements but we don't have an object, // then the rest of the path does not exist. v = null; } } if (v == null) { return null; } else { return new SimpleConfigObject(origin(), Collections.singletonMap(key, v), v.resolveStatus(), ignoresFallbacks); } }
[ "@", "Override", "protected", "SimpleConfigObject", "withOnlyPathOrNull", "(", "Path", "path", ")", "{", "String", "key", "=", "path", ".", "first", "(", ")", ";", "Path", "next", "=", "path", ".", "remainder", "(", ")", ";", "AbstractConfigValue", "v", "=", "value", ".", "get", "(", "key", ")", ";", "if", "(", "next", "!=", "null", ")", "{", "if", "(", "v", "!=", "null", "&&", "(", "v", "instanceof", "AbstractConfigObject", ")", ")", "{", "v", "=", "(", "(", "AbstractConfigObject", ")", "v", ")", ".", "withOnlyPathOrNull", "(", "next", ")", ";", "}", "else", "{", "// if the path has more elements but we don't have an object,", "// then the rest of the path does not exist.", "v", "=", "null", ";", "}", "}", "if", "(", "v", "==", "null", ")", "{", "return", "null", ";", "}", "else", "{", "return", "new", "SimpleConfigObject", "(", "origin", "(", ")", ",", "Collections", ".", "singletonMap", "(", "key", ",", "v", ")", ",", "v", ".", "resolveStatus", "(", ")", ",", "ignoresFallbacks", ")", ";", "}", "}" ]
"a" object.
[ "a", "object", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleConfigObject.java#L71-L93
14,886
lightbend/config
config/src/main/java/com/typesafe/config/impl/PathParser.java
PathParser.looksUnsafeForFastParser
private static boolean looksUnsafeForFastParser(String s) { boolean lastWasDot = true; // start of path is also a "dot" int len = s.length(); if (s.isEmpty()) return true; if (s.charAt(0) == '.') return true; if (s.charAt(len - 1) == '.') return true; for (int i = 0; i < len; ++i) { char c = s.charAt(i); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_') { lastWasDot = false; continue; } else if (c == '.') { if (lastWasDot) return true; // ".." means we need to throw an error lastWasDot = true; } else if (c == '-') { if (lastWasDot) return true; continue; } else { return true; } } if (lastWasDot) return true; return false; }
java
private static boolean looksUnsafeForFastParser(String s) { boolean lastWasDot = true; // start of path is also a "dot" int len = s.length(); if (s.isEmpty()) return true; if (s.charAt(0) == '.') return true; if (s.charAt(len - 1) == '.') return true; for (int i = 0; i < len; ++i) { char c = s.charAt(i); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_') { lastWasDot = false; continue; } else if (c == '.') { if (lastWasDot) return true; // ".." means we need to throw an error lastWasDot = true; } else if (c == '-') { if (lastWasDot) return true; continue; } else { return true; } } if (lastWasDot) return true; return false; }
[ "private", "static", "boolean", "looksUnsafeForFastParser", "(", "String", "s", ")", "{", "boolean", "lastWasDot", "=", "true", ";", "// start of path is also a \"dot\"", "int", "len", "=", "s", ".", "length", "(", ")", ";", "if", "(", "s", ".", "isEmpty", "(", ")", ")", "return", "true", ";", "if", "(", "s", ".", "charAt", "(", "0", ")", "==", "'", "'", ")", "return", "true", ";", "if", "(", "s", ".", "charAt", "(", "len", "-", "1", ")", "==", "'", "'", ")", "return", "true", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "len", ";", "++", "i", ")", "{", "char", "c", "=", "s", ".", "charAt", "(", "i", ")", ";", "if", "(", "(", "c", ">=", "'", "'", "&&", "c", "<=", "'", "'", ")", "||", "(", "c", ">=", "'", "'", "&&", "c", "<=", "'", "'", ")", "||", "c", "==", "'", "'", ")", "{", "lastWasDot", "=", "false", ";", "continue", ";", "}", "else", "if", "(", "c", "==", "'", "'", ")", "{", "if", "(", "lastWasDot", ")", "return", "true", ";", "// \"..\" means we need to throw an error", "lastWasDot", "=", "true", ";", "}", "else", "if", "(", "c", "==", "'", "'", ")", "{", "if", "(", "lastWasDot", ")", "return", "true", ";", "continue", ";", "}", "else", "{", "return", "true", ";", "}", "}", "if", "(", "lastWasDot", ")", "return", "true", ";", "return", "false", ";", "}" ]
that might require the full parser to deal with.
[ "that", "might", "require", "the", "full", "parser", "to", "deal", "with", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/PathParser.java#L224-L256
14,887
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleIncluder.java
SimpleIncluder.clearForInclude
static ConfigParseOptions clearForInclude(ConfigParseOptions options) { // the class loader and includer are inherited, but not this other // stuff. return options.setSyntax(null).setOriginDescription(null).setAllowMissing(true); }
java
static ConfigParseOptions clearForInclude(ConfigParseOptions options) { // the class loader and includer are inherited, but not this other // stuff. return options.setSyntax(null).setOriginDescription(null).setAllowMissing(true); }
[ "static", "ConfigParseOptions", "clearForInclude", "(", "ConfigParseOptions", "options", ")", "{", "// the class loader and includer are inherited, but not this other", "// stuff.", "return", "options", ".", "setSyntax", "(", "null", ")", ".", "setOriginDescription", "(", "null", ")", ".", "setAllowMissing", "(", "true", ")", ";", "}" ]
ConfigIncludeContext does this for us on its options
[ "ConfigIncludeContext", "does", "this", "for", "us", "on", "its", "options" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleIncluder.java#L33-L37
14,888
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleIncluder.java
SimpleIncluder.include
@Override public ConfigObject include(final ConfigIncludeContext context, String name) { ConfigObject obj = includeWithoutFallback(context, name); // now use the fallback includer if any and merge // its result. if (fallback != null) { return obj.withFallback(fallback.include(context, name)); } else { return obj; } }
java
@Override public ConfigObject include(final ConfigIncludeContext context, String name) { ConfigObject obj = includeWithoutFallback(context, name); // now use the fallback includer if any and merge // its result. if (fallback != null) { return obj.withFallback(fallback.include(context, name)); } else { return obj; } }
[ "@", "Override", "public", "ConfigObject", "include", "(", "final", "ConfigIncludeContext", "context", ",", "String", "name", ")", "{", "ConfigObject", "obj", "=", "includeWithoutFallback", "(", "context", ",", "name", ")", ";", "// now use the fallback includer if any and merge", "// its result.", "if", "(", "fallback", "!=", "null", ")", "{", "return", "obj", ".", "withFallback", "(", "fallback", ".", "include", "(", "context", ",", "name", ")", ")", ";", "}", "else", "{", "return", "obj", ";", "}", "}" ]
this is the heuristic includer
[ "this", "is", "the", "heuristic", "includer" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleIncluder.java#L40-L51
14,889
lightbend/config
config/src/main/java/com/typesafe/config/impl/SimpleIncluder.java
SimpleIncluder.includeWithoutFallback
static ConfigObject includeWithoutFallback(final ConfigIncludeContext context, String name) { // the heuristic is valid URL then URL, else relative to including file; // relativeTo in a file falls back to classpath inside relativeTo(). URL url; try { url = new URL(name); } catch (MalformedURLException e) { url = null; } if (url != null) { return includeURLWithoutFallback(context, url); } else { NameSource source = new RelativeNameSource(context); return fromBasename(source, name, context.parseOptions()); } }
java
static ConfigObject includeWithoutFallback(final ConfigIncludeContext context, String name) { // the heuristic is valid URL then URL, else relative to including file; // relativeTo in a file falls back to classpath inside relativeTo(). URL url; try { url = new URL(name); } catch (MalformedURLException e) { url = null; } if (url != null) { return includeURLWithoutFallback(context, url); } else { NameSource source = new RelativeNameSource(context); return fromBasename(source, name, context.parseOptions()); } }
[ "static", "ConfigObject", "includeWithoutFallback", "(", "final", "ConfigIncludeContext", "context", ",", "String", "name", ")", "{", "// the heuristic is valid URL then URL, else relative to including file;", "// relativeTo in a file falls back to classpath inside relativeTo().", "URL", "url", ";", "try", "{", "url", "=", "new", "URL", "(", "name", ")", ";", "}", "catch", "(", "MalformedURLException", "e", ")", "{", "url", "=", "null", ";", "}", "if", "(", "url", "!=", "null", ")", "{", "return", "includeURLWithoutFallback", "(", "context", ",", "url", ")", ";", "}", "else", "{", "NameSource", "source", "=", "new", "RelativeNameSource", "(", "context", ")", ";", "return", "fromBasename", "(", "source", ",", "name", ",", "context", ".", "parseOptions", "(", ")", ")", ";", "}", "}" ]
the heuristic includer in static form
[ "the", "heuristic", "includer", "in", "static", "form" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/SimpleIncluder.java#L54-L71
14,890
lightbend/config
config/src/main/java/com/typesafe/config/ConfigException.java
ConfigException.writeObject
private void writeObject(java.io.ObjectOutputStream out) throws IOException { out.defaultWriteObject(); ConfigImplUtil.writeOrigin(out, origin); }
java
private void writeObject(java.io.ObjectOutputStream out) throws IOException { out.defaultWriteObject(); ConfigImplUtil.writeOrigin(out, origin); }
[ "private", "void", "writeObject", "(", "java", ".", "io", ".", "ObjectOutputStream", "out", ")", "throws", "IOException", "{", "out", ".", "defaultWriteObject", "(", ")", ";", "ConfigImplUtil", ".", "writeOrigin", "(", "out", ",", "origin", ")", ";", "}" ]
support it)
[ "support", "it", ")" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/ConfigException.java#L56-L59
14,891
lightbend/config
config/src/main/java/com/typesafe/config/ConfigException.java
ConfigException.setOriginField
private static <T> void setOriginField(T hasOriginField, Class<T> clazz, ConfigOrigin origin) throws IOException { // circumvent "final" Field f; try { f = clazz.getDeclaredField("origin"); } catch (NoSuchFieldException e) { throw new IOException(clazz.getSimpleName() + " has no origin field?", e); } catch (SecurityException e) { throw new IOException("unable to fill out origin field in " + clazz.getSimpleName(), e); } f.setAccessible(true); try { f.set(hasOriginField, origin); } catch (IllegalArgumentException e) { throw new IOException("unable to set origin field", e); } catch (IllegalAccessException e) { throw new IOException("unable to set origin field", e); } }
java
private static <T> void setOriginField(T hasOriginField, Class<T> clazz, ConfigOrigin origin) throws IOException { // circumvent "final" Field f; try { f = clazz.getDeclaredField("origin"); } catch (NoSuchFieldException e) { throw new IOException(clazz.getSimpleName() + " has no origin field?", e); } catch (SecurityException e) { throw new IOException("unable to fill out origin field in " + clazz.getSimpleName(), e); } f.setAccessible(true); try { f.set(hasOriginField, origin); } catch (IllegalArgumentException e) { throw new IOException("unable to set origin field", e); } catch (IllegalAccessException e) { throw new IOException("unable to set origin field", e); } }
[ "private", "static", "<", "T", ">", "void", "setOriginField", "(", "T", "hasOriginField", ",", "Class", "<", "T", ">", "clazz", ",", "ConfigOrigin", "origin", ")", "throws", "IOException", "{", "// circumvent \"final\"", "Field", "f", ";", "try", "{", "f", "=", "clazz", ".", "getDeclaredField", "(", "\"origin\"", ")", ";", "}", "catch", "(", "NoSuchFieldException", "e", ")", "{", "throw", "new", "IOException", "(", "clazz", ".", "getSimpleName", "(", ")", "+", "\" has no origin field?\"", ",", "e", ")", ";", "}", "catch", "(", "SecurityException", "e", ")", "{", "throw", "new", "IOException", "(", "\"unable to fill out origin field in \"", "+", "clazz", ".", "getSimpleName", "(", ")", ",", "e", ")", ";", "}", "f", ".", "setAccessible", "(", "true", ")", ";", "try", "{", "f", ".", "set", "(", "hasOriginField", ",", "origin", ")", ";", "}", "catch", "(", "IllegalArgumentException", "e", ")", "{", "throw", "new", "IOException", "(", "\"unable to set origin field\"", ",", "e", ")", ";", "}", "catch", "(", "IllegalAccessException", "e", ")", "{", "throw", "new", "IOException", "(", "\"unable to set origin field\"", ",", "e", ")", ";", "}", "}" ]
For deserialization - uses reflection to set the final origin field on the object
[ "For", "deserialization", "-", "uses", "reflection", "to", "set", "the", "final", "origin", "field", "on", "the", "object" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/ConfigException.java#L62-L82
14,892
lightbend/config
config/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java
AbstractConfigObject.peekAssumingResolved
protected final AbstractConfigValue peekAssumingResolved(String key, Path originalPath) { try { return attemptPeekWithPartialResolve(key); } catch (ConfigException.NotResolved e) { throw ConfigImpl.improveNotResolved(originalPath, e); } }
java
protected final AbstractConfigValue peekAssumingResolved(String key, Path originalPath) { try { return attemptPeekWithPartialResolve(key); } catch (ConfigException.NotResolved e) { throw ConfigImpl.improveNotResolved(originalPath, e); } }
[ "protected", "final", "AbstractConfigValue", "peekAssumingResolved", "(", "String", "key", ",", "Path", "originalPath", ")", "{", "try", "{", "return", "attemptPeekWithPartialResolve", "(", "key", ")", ";", "}", "catch", "(", "ConfigException", ".", "NotResolved", "e", ")", "{", "throw", "ConfigImpl", ".", "improveNotResolved", "(", "originalPath", ",", "e", ")", ";", "}", "}" ]
This looks up the key with no transformation or type conversion of any kind, and returns null if the key is not present. The object must be resolved along the nodes needed to get the key or ConfigException.NotResolved will be thrown. @param key @return the unmodified raw value or null
[ "This", "looks", "up", "the", "key", "with", "no", "transformation", "or", "type", "conversion", "of", "any", "kind", "and", "returns", "null", "if", "the", "key", "is", "not", "present", ".", "The", "object", "must", "be", "resolved", "along", "the", "nodes", "needed", "to", "get", "the", "key", "or", "ConfigException", ".", "NotResolved", "will", "be", "thrown", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/AbstractConfigObject.java#L64-L70
14,893
lightbend/config
config/src/main/java/com/typesafe/config/parser/ConfigDocumentFactory.java
ConfigDocumentFactory.parseReader
public static ConfigDocument parseReader(Reader reader, ConfigParseOptions options) { return Parseable.newReader(reader, options).parseConfigDocument(); }
java
public static ConfigDocument parseReader(Reader reader, ConfigParseOptions options) { return Parseable.newReader(reader, options).parseConfigDocument(); }
[ "public", "static", "ConfigDocument", "parseReader", "(", "Reader", "reader", ",", "ConfigParseOptions", "options", ")", "{", "return", "Parseable", ".", "newReader", "(", "reader", ",", "options", ")", ".", "parseConfigDocument", "(", ")", ";", "}" ]
Parses a Reader into a ConfigDocument instance. @param reader the reader to parse @param options parse options to control how the reader is interpreted @return the parsed configuration @throws com.typesafe.config.ConfigException on IO or parse errors
[ "Parses", "a", "Reader", "into", "a", "ConfigDocument", "instance", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/parser/ConfigDocumentFactory.java#L26-L28
14,894
lightbend/config
config/src/main/java/com/typesafe/config/parser/ConfigDocumentFactory.java
ConfigDocumentFactory.parseFile
public static ConfigDocument parseFile(File file, ConfigParseOptions options) { return Parseable.newFile(file, options).parseConfigDocument(); }
java
public static ConfigDocument parseFile(File file, ConfigParseOptions options) { return Parseable.newFile(file, options).parseConfigDocument(); }
[ "public", "static", "ConfigDocument", "parseFile", "(", "File", "file", ",", "ConfigParseOptions", "options", ")", "{", "return", "Parseable", ".", "newFile", "(", "file", ",", "options", ")", ".", "parseConfigDocument", "(", ")", ";", "}" ]
Parses a file into a ConfigDocument instance. @param file the file to parse @param options parse options to control how the file is interpreted @return the parsed configuration @throws com.typesafe.config.ConfigException on IO or parse errors
[ "Parses", "a", "file", "into", "a", "ConfigDocument", "instance", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/parser/ConfigDocumentFactory.java#L54-L56
14,895
lightbend/config
config/src/main/java/com/typesafe/config/parser/ConfigDocumentFactory.java
ConfigDocumentFactory.parseString
public static ConfigDocument parseString(String s, ConfigParseOptions options) { return Parseable.newString(s, options).parseConfigDocument(); }
java
public static ConfigDocument parseString(String s, ConfigParseOptions options) { return Parseable.newString(s, options).parseConfigDocument(); }
[ "public", "static", "ConfigDocument", "parseString", "(", "String", "s", ",", "ConfigParseOptions", "options", ")", "{", "return", "Parseable", ".", "newString", "(", "s", ",", "options", ")", ".", "parseConfigDocument", "(", ")", ";", "}" ]
Parses a string which should be valid HOCON or JSON. @param s string to parse @param options parse options @return the parsed configuration
[ "Parses", "a", "string", "which", "should", "be", "valid", "HOCON", "or", "JSON", "." ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/parser/ConfigDocumentFactory.java#L79-L81
14,896
lightbend/config
config/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java
AbstractConfigValue.withFallback
@Override public AbstractConfigValue withFallback(ConfigMergeable mergeable) { if (ignoresFallbacks()) { return this; } else { ConfigValue other = ((MergeableValue) mergeable).toFallbackValue(); if (other instanceof Unmergeable) { return mergedWithTheUnmergeable((Unmergeable) other); } else if (other instanceof AbstractConfigObject) { return mergedWithObject((AbstractConfigObject) other); } else { return mergedWithNonObject((AbstractConfigValue) other); } } }
java
@Override public AbstractConfigValue withFallback(ConfigMergeable mergeable) { if (ignoresFallbacks()) { return this; } else { ConfigValue other = ((MergeableValue) mergeable).toFallbackValue(); if (other instanceof Unmergeable) { return mergedWithTheUnmergeable((Unmergeable) other); } else if (other instanceof AbstractConfigObject) { return mergedWithObject((AbstractConfigObject) other); } else { return mergedWithNonObject((AbstractConfigValue) other); } } }
[ "@", "Override", "public", "AbstractConfigValue", "withFallback", "(", "ConfigMergeable", "mergeable", ")", "{", "if", "(", "ignoresFallbacks", "(", ")", ")", "{", "return", "this", ";", "}", "else", "{", "ConfigValue", "other", "=", "(", "(", "MergeableValue", ")", "mergeable", ")", ".", "toFallbackValue", "(", ")", ";", "if", "(", "other", "instanceof", "Unmergeable", ")", "{", "return", "mergedWithTheUnmergeable", "(", "(", "Unmergeable", ")", "other", ")", ";", "}", "else", "if", "(", "other", "instanceof", "AbstractConfigObject", ")", "{", "return", "mergedWithObject", "(", "(", "AbstractConfigObject", ")", "other", ")", ";", "}", "else", "{", "return", "mergedWithNonObject", "(", "(", "AbstractConfigValue", ")", "other", ")", ";", "}", "}", "}" ]
this is only overridden to change the return type
[ "this", "is", "only", "overridden", "to", "change", "the", "return", "type" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/AbstractConfigValue.java#L269-L284
14,897
lightbend/config
examples/java/simple-lib/src/main/java/simplelib/SimpleLibContext.java
SimpleLibContext.printSetting
public void printSetting(String path) { System.out.println("The setting '" + path + "' is: " + config.getString(path)); }
java
public void printSetting(String path) { System.out.println("The setting '" + path + "' is: " + config.getString(path)); }
[ "public", "void", "printSetting", "(", "String", "path", ")", "{", "System", ".", "out", ".", "println", "(", "\"The setting '\"", "+", "path", "+", "\"' is: \"", "+", "config", ".", "getString", "(", "path", ")", ")", ";", "}" ]
this is the amazing functionality provided by simple-lib
[ "this", "is", "the", "amazing", "functionality", "provided", "by", "simple", "-", "lib" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/examples/java/simple-lib/src/main/java/simplelib/SimpleLibContext.java#L31-L33
14,898
lightbend/config
config/src/main/java/com/typesafe/config/ConfigResolveOptions.java
ConfigResolveOptions.appendResolver
public ConfigResolveOptions appendResolver(ConfigResolver value) { if (value == null) { throw new ConfigException.BugOrBroken("null resolver passed to appendResolver"); } else if (value == this.resolver) { return this; } else { return new ConfigResolveOptions(useSystemEnvironment, allowUnresolved, this.resolver.withFallback(value)); } }
java
public ConfigResolveOptions appendResolver(ConfigResolver value) { if (value == null) { throw new ConfigException.BugOrBroken("null resolver passed to appendResolver"); } else if (value == this.resolver) { return this; } else { return new ConfigResolveOptions(useSystemEnvironment, allowUnresolved, this.resolver.withFallback(value)); } }
[ "public", "ConfigResolveOptions", "appendResolver", "(", "ConfigResolver", "value", ")", "{", "if", "(", "value", "==", "null", ")", "{", "throw", "new", "ConfigException", ".", "BugOrBroken", "(", "\"null resolver passed to appendResolver\"", ")", ";", "}", "else", "if", "(", "value", "==", "this", ".", "resolver", ")", "{", "return", "this", ";", "}", "else", "{", "return", "new", "ConfigResolveOptions", "(", "useSystemEnvironment", ",", "allowUnresolved", ",", "this", ".", "resolver", ".", "withFallback", "(", "value", ")", ")", ";", "}", "}" ]
Returns options where the given resolver used as a fallback if a reference cannot be otherwise resolved. This resolver will only be called after resolution has failed to substitute with a value from within the config itself and with any other resolvers that have been appended before this one. Multiple resolvers can be added using, <pre> ConfigResolveOptions options = ConfigResolveOptions.defaults() .appendResolver(primary) .appendResolver(secondary) .appendResolver(tertiary); </pre> With this config unresolved references will first be resolved with the primary resolver, if that fails then the secondary, and finally if that also fails the tertiary. If all fallbacks fail to return a substitution "allow unresolved" determines whether resolution fails or continues. ` @param value the resolver to fall back to @return options that use the given resolver as a fallback @since 1.3.2
[ "Returns", "options", "where", "the", "given", "resolver", "used", "as", "a", "fallback", "if", "a", "reference", "cannot", "be", "otherwise", "resolved", ".", "This", "resolver", "will", "only", "be", "called", "after", "resolution", "has", "failed", "to", "substitute", "with", "a", "value", "from", "within", "the", "config", "itself", "and", "with", "any", "other", "resolvers", "that", "have", "been", "appended", "before", "this", "one", ".", "Multiple", "resolvers", "can", "be", "added", "using" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/ConfigResolveOptions.java#L125-L134
14,899
lightbend/config
config/src/main/java/com/typesafe/config/impl/ResolveResult.java
ResolveResult.asObjectResult
@SuppressWarnings("unchecked") ResolveResult<AbstractConfigObject> asObjectResult() { if (!(value instanceof AbstractConfigObject)) throw new ConfigException.BugOrBroken("Expecting a resolve result to be an object, but it was " + value); Object o = this; return (ResolveResult<AbstractConfigObject>) o; }
java
@SuppressWarnings("unchecked") ResolveResult<AbstractConfigObject> asObjectResult() { if (!(value instanceof AbstractConfigObject)) throw new ConfigException.BugOrBroken("Expecting a resolve result to be an object, but it was " + value); Object o = this; return (ResolveResult<AbstractConfigObject>) o; }
[ "@", "SuppressWarnings", "(", "\"unchecked\"", ")", "ResolveResult", "<", "AbstractConfigObject", ">", "asObjectResult", "(", ")", "{", "if", "(", "!", "(", "value", "instanceof", "AbstractConfigObject", ")", ")", "throw", "new", "ConfigException", ".", "BugOrBroken", "(", "\"Expecting a resolve result to be an object, but it was \"", "+", "value", ")", ";", "Object", "o", "=", "this", ";", "return", "(", "ResolveResult", "<", "AbstractConfigObject", ">", ")", "o", ";", "}" ]
better option? we don't have variance
[ "better", "option?", "we", "don", "t", "have", "variance" ]
68cebfde5e861e9a5fdc75ceff366ed95e17d475
https://github.com/lightbend/config/blob/68cebfde5e861e9a5fdc75ceff366ed95e17d475/config/src/main/java/com/typesafe/config/impl/ResolveResult.java#L20-L26