index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/CDataSectionParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; final class CDataSectionParser extends Parser<Object> { final XmlParser<?, ?> xml; final Output<?> output; final int step; CDataSectionParser(XmlParser<?, ?> xml, Output<?> output, int step) { this.xml = xml; this.output = output; this.step = step; } @Override public Parser<Object> feed(Input input) { return parse(input, this.xml, this.output, this.step); } static Parser<Object> parse(Input input, XmlParser<?, ?> xml, Output<?> output, int step) { int c = 0; while (step >= 1 && step <= 9) { if (input.isCont()) { if (input.head() == "<![CDATA[".charAt(step - 1)) { input = input.step(); step += 1; continue; } else { return error(Diagnostic.expected("<![CDATA[".charAt(step - 1), input)); } } else if (input.isDone()) { return error(Diagnostic.expected("<![CDATA[".charAt(step - 1), input)); } break; } do { if (step == 10) { while (input.isCont()) { c = input.head(); if (Xml.isChar(c) && c != ']') { input = input.step(); output = output.write(c); } else { break; } } if (input.isCont()) { if (c == ']') { input = input.step(); step = 11; } else { return error(Diagnostic.unexpected(input)); } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 11) { if (input.isCont()) { c = input.head(); if (c == ']') { input = input.step(); step = 12; } else { output = output.write(']'); step = 10; continue; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 12) { if (input.isCont()) { c = input.head(); if (c == '>') { input = input.step(); return done(); } else { output = output.write(']'); output = output.write(']'); step = 10; continue; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } break; } while (true); if (input.isError()) { return error(input.trap()); } return new CDataSectionParser(xml, output, step); } static Parser<Object> parse(Input input, XmlParser<?, ?> xml, Output<?> output) { return parse(input, xml, output, 1); } static Parser<Object> parseRest(Input input, XmlParser<?, ?> xml, Output<?> output) { return parse(input, xml, output, 3); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/CommentOutput.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Output; import swim.codec.OutputSettings; final class CommentOutput<I> extends Output<I> { final XmlParser<I, ?> xml; final StringBuilder builder; OutputSettings settings; CommentOutput(XmlParser<I, ?> xml, StringBuilder builder, OutputSettings settings) { this.xml = xml; this.builder = builder; this.settings = settings; } CommentOutput(XmlParser<I, ?> xml) { this(xml, new StringBuilder(), OutputSettings.standard()); } @Override public boolean isCont() { return true; } @Override public boolean isFull() { return false; } @Override public boolean isDone() { return false; } @Override public boolean isError() { return false; } @Override public boolean isPart() { return false; } @Override public Output<I> isPart(boolean isPart) { return this; } @Override public Output<I> write(int codePoint) { this.builder.appendCodePoint(codePoint); return this; } @Override public Output<I> write(String string) { this.builder.append(string); return this; } @Override public Output<I> writeln(String string) { this.builder.append(string).append(this.settings.lineSeparator()); return this; } @Override public Output<I> writeln() { this.builder.append(this.settings.lineSeparator()); return this; } @Override public OutputSettings settings() { return this.settings; } @Override public Output<I> settings(OutputSettings settings) { this.settings = settings; return this; } @Override public I bind() { return xml.comment(this.builder.toString()); } @Override public Output<I> clone() { return new CommentOutput<I>(this.xml, new StringBuilder(this.builder.toString()), this.settings); } @Override public String toString() { return this.builder.toString(); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/CommentParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; final class CommentParser<I> extends Parser<I> { final XmlParser<I, ?> xml; final Output<I> output; final int step; CommentParser(XmlParser<I, ?> xml, Output<I> output, int step) { this.xml = xml; this.output = output; this.step = step; } @Override public Parser<I> feed(Input input) { return parse(input, this.xml, this.output, this.step); } static <I> Parser<I> parse(Input input, XmlParser<I, ?> xml, Output<I> output, int step) { int c = 0; while (step >= 1 && step <= 4) { if (input.isCont()) { if (input.head() == "<!--".charAt(step - 1)) { input = input.step(); step += 1; continue; } else { return error(Diagnostic.expected("<!--".charAt(step - 1), input)); } } else if (input.isDone()) { return error(Diagnostic.expected("<!--".charAt(step - 1), input)); } break; } do { if (step == 5) { while (input.isCont()) { c = input.head(); if (Xml.isChar(c) && c != '-') { input = input.step(); if (output == null) { output = xml.commentOutput(); } output = output.write(c); } else { break; } } if (input.isCont()) { if (c == '-') { input = input.step(); step = 6; } else { return error(Diagnostic.unexpected(input)); } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 6) { if (input.isCont()) { c = input.head(); if (c == '-') { input = input.step(); step = 7; } else { if (output == null) { output = xml.commentOutput(); } output = output.write('-'); step = 5; continue; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 7) { if (input.isCont()) { c = input.head(); if (output == null) { output = xml.commentOutput(); } if (c == '>') { input = input.step(); return done(output.bind()); } else { output = output.write('-'); output = output.write('-'); step = 5; continue; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } break; } while (true); if (input.isError()) { return error(input.trap()); } return new CommentParser<I>(xml, output, step); } static <I> Parser<I> parse(Input input, XmlParser<I, ?> xml) { return parse(input, xml, null, 1); } static <I> Parser<I> parseRest(Input input, XmlParser<I, ?> xml) { return parse(input, xml, null, 3); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/DoctypeDeclParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; import swim.codec.Unicode; final class DoctypeDeclParser<I, V> extends Parser<I> { final XmlParser<I, V> xml; final Parser<String> nameParser; final Output<String> publicId; final Output<String> systemId; final Parser<V> markupDeclParser; final int quote; final int step; DoctypeDeclParser(XmlParser<I, V> xml, Parser<String> nameParser, Output<String> publicId, Output<String> systemId, Parser<V> markupDeclParser, int quote, int step) { this.xml = xml; this.nameParser = nameParser; this.publicId = publicId; this.systemId = systemId; this.markupDeclParser = markupDeclParser; this.quote = quote; this.step = step; } @Override public Parser<I> feed(Input input) { return parse(input, this.xml, this.nameParser, this.publicId, this.systemId, this.markupDeclParser, this.quote, this.step); } static <I, V> Parser<I> parse(Input input, XmlParser<I, V> xml, Parser<String> nameParser, Output<String> publicId, Output<String> systemId, Parser<V> markupDeclParser, int quote, int step) { int c = 0; while (step >= 1 && step <= 9) { if (input.isCont()) { if (input.head() == "<!DOCTYPE".charAt(step - 1)) { input = input.step(); step += 1; continue; } else { return error(Diagnostic.expected("<!DOCTYPE".charAt(step - 1), input)); } } else if (input.isDone()) { return error(Diagnostic.expected("<!DOCTYPE".charAt(step - 1), input)); } break; } if (step == 10) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); step = 11; } else { return error(Diagnostic.expected("space", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("space", input)); } } if (step == 11) { if (nameParser == null) { nameParser = xml.parseName(input); } else { nameParser = nameParser.feed(input); } if (nameParser.isDone()) { step = 12; } else if (nameParser.isError()) { return nameParser.asError(); } } if (step == 12) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); step = 13; } else { step = 32; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 13) { if (input.isCont()) { c = input.head(); if (c == 'P') { step = 14; } else if (c == 'S') { step = 23; } else if (c == '[') { input = input.step(); step = 34; } else { step = 37; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } while (step >= 14 && step <= 19) { if (input.isCont()) { if (input.head() == "PUBLIC".charAt(step - 14)) { input = input.step(); step += 1; continue; } else { return error(Diagnostic.expected("PUBLIC".charAt(step - 14), input)); } } else if (input.isDone()) { return error(Diagnostic.expected("PUBLIC".charAt(step - 14), input)); } break; } if (step == 20) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); step = 21; } else { return error(Diagnostic.expected("space", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("space", input)); } } if (step == 21) { if (input.isCont()) { c = input.head(); if (c == '"' || c == '\'') { input = input.step(); publicId = Unicode.stringOutput(); quote = c; step = 22; } else { return error(Diagnostic.expected("quote", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("quote", input)); } } if (step == 22) { while (input.isCont()) { c = input.head(); if (Xml.isPubidChar(c) && c != quote) { input = input.step(); publicId.write(c); } else { break; } } if (input.isCont()) { if (c == quote) { input = input.step(); quote = 0; step = 29; } else { return error(Diagnostic.expected(c, input)); } } else if (input.isDone()) { return error(Diagnostic.expected(c, input)); } } while (step >= 23 && step <= 28) { if (input.isCont()) { if (input.head() == "SYSTEM".charAt(step - 23)) { input = input.step(); step += 1; continue; } else { return error(Diagnostic.expected("SYSTEM".charAt(step - 23), input)); } } else if (input.isDone()) { return error(Diagnostic.expected("SYSTEM".charAt(step - 23), input)); } break; } if (step == 29) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); step = 30; } else { return error(Diagnostic.expected("space", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("space", input)); } } if (step == 30) { if (input.isCont()) { c = input.head(); if (c == '"' || c == '\'') { input = input.step(); systemId = Unicode.stringOutput(); quote = c; step = 31; } else { return error(Diagnostic.expected("quote", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("quote", input)); } } if (step == 31) { while (input.isCont()) { c = input.head(); if (Xml.isChar(c) && c != quote) { input = input.step(); systemId.write(c); } else { break; } } if (input.isCont()) { if (c == quote) { input = input.step(); quote = 0; step = 32; } else { return error(Diagnostic.expected(c, input)); } } else if (input.isDone()) { return error(Diagnostic.expected(c, input)); } } if (step == 32) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); } step = 33; } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 33) { if (input.isCont()) { c = input.head(); if (c == '[') { input = input.step(); step = 34; } else { step = 37; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 34) { if (markupDeclParser == null) { markupDeclParser = xml.parseMarkupDecl(input); } else { markupDeclParser = markupDeclParser.feed(input); } if (markupDeclParser.isDone()) { step = 35; } else if (markupDeclParser.isError()) { return markupDeclParser.asError(); } } if (step == 35) { if (input.isCont()) { c = input.head(); if (c == ']') { input = input.step(); step = 36; } else { return error(Diagnostic.expected(']', input)); } } else if (input.isDone()) { return error(Diagnostic.expected(']', input)); } } if (step == 36) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); } step = 37; } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 37) { if (input.isCont()) { c = input.head(); if (c == '>') { input = input.step(); if (publicId == null && systemId == null) { return done(xml.doctype(nameParser.bind())); } else if (publicId == null) { return done(xml.doctype(nameParser.bind(), systemId.bind())); } else { return done(xml.doctype(nameParser.bind(), publicId.bind(), systemId.bind())); } } else { return error(Diagnostic.expected('>', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('>', input)); } } if (input.isError()) { return error(input.trap()); } return new DoctypeDeclParser<I, V>(xml, nameParser, publicId, systemId, markupDeclParser, quote, step); } static <I, V> Parser<I> parse(Input input, XmlParser<I, V> xml) { return parse(input, xml, null, null, null, null, 0, 1); } static <I, V> Parser<I> parseRest(Input input, XmlParser<I, V> xml) { return parse(input, xml, null, null, null, null, 0, 3); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/DocumentParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Parser; import swim.util.Builder; final class DocumentParser<I, V> extends Parser<V> { final XmlParser<I, V> xml; final Builder<I, V> builder; final Parser<String> targetParser; final Parser<I> miscParser; final Parser<V> tagParser; final int step; DocumentParser(XmlParser<I, V> xml, Builder<I, V> builder, Parser<String> targetParser, Parser<I> miscParser, Parser<V> tagParser, int step) { this.xml = xml; this.builder = builder; this.targetParser = targetParser; this.miscParser = miscParser; this.tagParser = tagParser; this.step = step; } DocumentParser(XmlParser<I, V> xml) { this(xml, null, null, null, null, 1); } @Override public Parser<V> feed(Input input) { return parse(input, this.xml, this.builder, this.targetParser, this.miscParser, this.tagParser, this.step); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml, Builder<I, V> builder, Parser<String> targetParser, Parser<I> miscParser, Parser<V> tagParser, int step) { int c = 0; do { if (step == 1) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '<') { input = input.step(); step = 2; } else { return error(Diagnostic.expected('<', input)); } } else if (input.isError()) { return error(input.trap()); } else if (input.isDone()) { if (builder == null) { builder = xml.documentBuilder(); } return done(builder.bind()); } } if (step == 2) { if (input.isCont()) { c = input.head(); if (c == '?') { input = input.step(); step = 3; } else if (c == '!') { input = input.step(); step = 4; } else if (Xml.isNameStartChar(c)) { if (builder == null) { builder = xml.documentBuilder(); } tagParser = xml.parseTagStartRest(input, builder); step = 6; } else { return error(Diagnostic.unexpected(input)); } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 3) { if (targetParser == null) { targetParser = xml.parsePITarget(input); } else { targetParser = targetParser.feed(input); } if (targetParser.isDone()) { final String target = targetParser.bind(); targetParser = null; if ("xml".equalsIgnoreCase(target)) { miscParser = xml.parseXmlDeclRest(input); step = 5; } else { miscParser = xml.parsePITargetRest(input, target); step = 5; } } else if (targetParser.isError()) { return targetParser.asError(); } } if (step == 4) { if (input.isCont()) { c = input.head(); if (c == '-') { miscParser = xml.parseCommentRest(input); step = 5; } else { miscParser = xml.parseDoctypeDeclRest(input); step = 5; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 5) { while (miscParser.isCont() && !input.isEmpty()) { miscParser = miscParser.feed(input); } if (miscParser.isDone()) { final I misc = miscParser.bind(); if (misc != null) { if (builder == null) { builder = xml.documentBuilder(); } builder.add(misc); } miscParser = null; step = 1; continue; } else if (miscParser.isError()) { return miscParser.asError(); } } if (step == 6) { while (tagParser.isCont() && !input.isEmpty()) { tagParser = tagParser.feed(input); } if (tagParser.isDone()) { tagParser = null; step = 1; continue; } else if (tagParser.isError()) { return tagParser.asError(); } } break; } while (true); if (input.isError()) { return error(input.trap()); } return new DocumentParser<I, V>(xml, builder, targetParser, miscParser, tagParser, step); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml, Builder<I, V> builder) { return parse(input, xml, builder, null, null, null, 1); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml) { return parse(input, xml, null, null, null, null, 1); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/NameParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; final class NameParser extends Parser<String> { final XmlParser<?, ?> xml; final Output<String> output; final int step; NameParser(XmlParser<?, ?> xml, Output<String> output, int step) { this.xml = xml; this.output = output; this.step = step; } @Override public Parser<String> feed(Input input) { return parse(input, this.xml, this.output, this.step); } static Parser<String> parse(Input input, XmlParser<?, ?> xml, Output<String> output, int step) { int c = 0; if (step == 1) { if (input.isCont()) { c = input.head(); if (Xml.isNameStartChar(c)) { if (output == null) { output = xml.nameOutput(); } input = input.step(); output = output.write(c); step = 2; } else { return error(Diagnostic.expected("name", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("name", input)); } } if (step == 2) { while (input.isCont()) { c = input.head(); if (Xml.isNameChar(c)) { input = input.step(); output = output.write(c); } else { break; } } if (!input.isEmpty()) { return done(xml.name(output.bind())); } } if (input.isError()) { return error(input.trap()); } return new NameParser(xml, output, step); } static Parser<String> parse(Input input, XmlParser<?, ?> xml, Output<String> output) { return parse(input, xml, output, 1); } static Parser<String> parse(Input input, XmlParser<?, ?> xml) { return parse(input, xml, null, 1); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/PIOutput.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Output; import swim.codec.OutputSettings; final class PIOutput<I> extends Output<I> { final XmlParser<I, ?> xml; final String target; final StringBuilder builder; OutputSettings settings; PIOutput(XmlParser<I, ?> xml, String target, StringBuilder builder, OutputSettings settings) { this.xml = xml; this.target = target; this.builder = builder; this.settings = settings; } PIOutput(XmlParser<I, ?> xml, String target) { this(xml, target, new StringBuilder(), OutputSettings.standard()); } @Override public boolean isCont() { return true; } @Override public boolean isFull() { return false; } @Override public boolean isDone() { return false; } @Override public boolean isError() { return false; } @Override public boolean isPart() { return false; } @Override public Output<I> isPart(boolean isPart) { return this; } @Override public Output<I> write(int codePoint) { this.builder.appendCodePoint(codePoint); return this; } @Override public Output<I> write(String string) { this.builder.append(string); return this; } @Override public Output<I> writeln(String string) { this.builder.append(string).append(this.settings.lineSeparator()); return this; } @Override public Output<I> writeln() { this.builder.append(this.settings.lineSeparator()); return this; } @Override public OutputSettings settings() { return this.settings; } @Override public Output<I> settings(OutputSettings settings) { this.settings = settings; return this; } @Override public I bind() { return xml.pi(this.target, this.builder.toString()); } @Override public Output<I> clone() { return new PIOutput<I>(this.xml, this.target, new StringBuilder(this.builder.toString()), this.settings); } @Override public String toString() { return this.builder.toString(); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/PIParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; final class PIParser<I> extends Parser<I> { final XmlParser<I, ?> xml; final Parser<String> targetParser; final Output<I> output; final int step; PIParser(XmlParser<I, ?> xml, Parser<String> targetParser, Output<I> output, int step) { this.xml = xml; this.targetParser = targetParser; this.output = output; this.step = step; } @Override public Parser<I> feed(Input input) { return parse(input, this.xml, this.targetParser, this.output, this.step); } static <I> Parser<I> parse(Input input, XmlParser<I, ?> xml, Parser<String> targetParser, Output<I> output, int step) { int c = 0; if (step == 1) { if (input.isCont()) { if (input.head() == '<') { input = input.step(); step = 2; } else { return error(Diagnostic.expected('<', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('<', input)); } } if (step == 2) { if (input.isCont()) { if (input.head() == '?') { input = input.step(); step = 3; } else { return error(Diagnostic.expected('?', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('?', input)); } } if (step == 3) { if (targetParser == null) { targetParser = xml.parsePITarget(input); } else { targetParser = targetParser.feed(input); } if (targetParser.isDone()) { final String target = targetParser.bind(); if (!"xml".equalsIgnoreCase(target)) { return xml.parsePITargetRest(input, target); } else { return error(Diagnostic.message("illegal processing instruction target: " + target, input)); } } else if (targetParser.isError()) { return targetParser.asError(); } } if (step == 4) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); if (output == null) { output = xml.piOutput(targetParser.bind()); } step = 5; } else { return error(Diagnostic.expected("space", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("space", input)); } } do { if (step == 5) { while (input.isCont()) { c = input.head(); if (Xml.isChar(c) && c != '?') { input = input.step(); output = output.write(c); } else { break; } } if (input.isCont()) { if (c == '?') { input = input.step(); step = 6; } else { return error(Diagnostic.unexpected(input)); } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } else { break; } } if (step == 6) { if (input.isCont()) { c = input.head(); if (c == '>') { input = input.step(); return done(output.bind()); } else { output = output.write('?'); step = 5; continue; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } else { break; } } break; } while (true); if (input.isError()) { return error(input.trap()); } return new PIParser<I>(xml, targetParser, output, step); } static <I> Parser<I> parse(Input input, XmlParser<I, ?> xml) { return parse(input, xml, null, null, 1); } static <I> Parser<I> parseRest(Input input, XmlParser<I, ?> xml) { return parse(input, xml, null, null, 3); } static <I> Parser<I> parseTargetRest(Input input, XmlParser<I, ?> xml, String target) { return parse(input, xml, done(target), null, 4); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/ReferenceParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Base10; import swim.codec.Base16; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; final class ReferenceParser extends Parser<Object> { final XmlParser<?, ?> xml; final Output<?> output; final Parser<String> nameParser; final int code; final int step; ReferenceParser(XmlParser<?, ?> xml, Output<?> output, Parser<String> nameParser, int code, int step) { this.xml = xml; this.output = output; this.nameParser = nameParser; this.code = code; this.step = step; } @Override public Parser<Object> feed(Input input) { return parse(input, this.xml, this.output, this.nameParser, this.code, this.step); } static Parser<Object> parse(Input input, XmlParser<?, ?> xml, Output<?> output, Parser<String> nameParser, int code, int step) { int c = 0; if (step == 1) { if (input.isCont()) { c = input.head(); if (c == '&') { input = input.step(); step = 2; } else { return error(Diagnostic.expected('&', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('&', input)); } } if (step == 2) { if (input.isCont()) { c = input.head(); if (c == '#') { input = input.step(); step = 5; } else { step = 3; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 3) { if (nameParser == null) { nameParser = xml.parseEntityName(input); } else { nameParser = nameParser.feed(input); } if (nameParser.isDone()) { step = 4; } else if (nameParser.isError()) { return nameParser.asError(); } } if (step == 4) { if (input.isCont()) { c = input.head(); if (c == ';') { input = input.step(); final boolean expanded = xml.expandEntityRef(nameParser.bind(), output); if (expanded) { return done(); } else { return error(Diagnostic.message("unrecognized entity: " + nameParser.bind(), input)); } } else { return error(Diagnostic.expected(';', input)); } } else if (input.isDone()) { return error(Diagnostic.expected(';', input)); } } if (step == 5) { if (input.isCont()) { c = input.head(); if (c == 'x') { input = input.step(); step = 6; } else { step = 8; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 6) { if (input.isCont()) { c = input.head(); if (Base16.isDigit(c)) { input = input.step(); code = Base16.decodeDigit(c); step = 7; } else { return error(Diagnostic.expected("hex digit", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("hex digit", input)); } } if (step == 7) { do { if (input.isCont()) { c = input.head(); if (Base16.isDigit(c)) { input = input.step(); code = 16 * code + Base16.decodeDigit(c); } else { step = 10; break; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } else { break; } } while (true); } if (step == 8) { if (input.isCont()) { c = input.head(); if (Base10.isDigit(c)) { input = input.step(); code = Base10.decodeDigit(c); step = 9; } else { return error(Diagnostic.expected("digit", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("digit", input)); } } if (step == 9) { do { if (input.isCont()) { c = input.head(); if (Base10.isDigit(c)) { input = input.step(); code = 10 * code + Base10.decodeDigit(c); } else { step = 10; break; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } else { break; } } while (true); } if (step == 10) { if (input.isCont()) { c = input.head(); if (c == ';') { input = input.step(); if (Xml.isChar(code)) { output = output.write(code); } else { return error(Diagnostic.message("illegal character reference: " + code, input)); } return done(); } else { return error(Diagnostic.expected(';', input)); } } else if (input.isDone()) { return error(Diagnostic.expected(';', input)); } } if (input.isError()) { return error(input.trap()); } return new ReferenceParser(xml, output, nameParser, code, step); } static Parser<Object> parse(Input input, XmlParser<?, ?> xml, Output<?> output) { return parse(input, xml, output, null, 0, 1); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/TagContentParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; import swim.util.Builder; final class TagContentParser<I, V> extends Parser<V> { final XmlParser<I, V> xml; final String tag; final Builder<I, V> builder; final Output<V> text; final Parser<?> nodeParser; final int step; TagContentParser(XmlParser<I, V> xml, String tag, Builder<I, V> builder, Output<V> text, Parser<?> nodeParser, int step) { this.xml = xml; this.tag = tag; this.builder = builder; this.text = text; this.nodeParser = nodeParser; this.step = step; } @Override public Parser<V> feed(Input input) { return parse(input, this.xml, this.tag, this.builder, this.text, this.nodeParser, this.step); } @SuppressWarnings("unchecked") static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml, String tag, Builder<I, V> builder, Output<V> text, Parser<?> nodeParser, int step) { int c = 0; do { if (step == 1) { while (input.isCont()) { c = input.head(); if (Xml.isChar(c) && c != ']' && c != '<' && c != '&') { input = input.step(); if (text == null) { text = xml.textOutput(); } text.write(c); } else { break; } } if (input.isCont()) { if (c == ']') { input = input.step(); step = 2; } else if (c == '<') { input = input.step(); step = 4; } else if (c == '&') { if (text == null) { text = xml.textOutput(); } nodeParser = xml.parseReference(input, text); step = 8; } else { return error(Diagnostic.unexpected(input)); } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 2) { if (input.isCont()) { c = input.head(); if (c == ']') { input = input.step(); step = 3; } else { if (text == null) { text = xml.textOutput(); } text.write(']'); step = 1; continue; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 3) { if (input.isCont()) { c = input.head(); if (c == '>') { return error(Diagnostic.message("unexpected ]]>", input)); } else { if (text == null) { text = xml.textOutput(); } text.write(']'); text.write(']'); step = 1; continue; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 4) { if (input.isCont()) { c = input.head(); if (Xml.isNameStartChar(c)) { if (text != null) { builder.add(xml.item(text.bind())); text = null; } nodeParser = xml.parseTagStartRest(input); step = 7; } else if (c == '/') { input = input.step(); if (text != null) { builder.add(xml.item(text.bind())); text = null; } return xml.parseTagEndRest(input, tag, builder); } else if (c == '?') { input = input.step(); if (text != null) { builder.add(xml.item(text.bind())); text = null; } nodeParser = xml.parsePIRest(input); step = 6; } else if (c == '!') { input = input.step(); step = 5; } else { return error(Diagnostic.expected("end tag, processing instruction, comment, or CDATA section", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("end tag, processing instruction, comment, or CDATA section", input)); } } if (step == 5) { if (input.isCont()) { c = input.head(); if (c == '-') { if (text != null) { builder.add(xml.item(text.bind())); text = null; } nodeParser = xml.parseCommentRest(input); step = 6; } else if (c == '[') { if (text == null) { text = xml.textOutput(); } nodeParser = xml.parseCDataSectionRest(input, text); step = 8; } else { return error(Diagnostic.expected("comment or CDATA section", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("comment or CDATA section", input)); } } if (step == 6) { while (nodeParser.isCont() && !input.isEmpty()) { nodeParser = nodeParser.feed(input); } if (nodeParser.isDone()) { final I node = (I) nodeParser.bind(); if (node != null) { builder.add(node); } nodeParser = null; step = 1; continue; } else if (nodeParser.isError()) { return nodeParser.asError(); } } if (step == 7) { while (nodeParser.isCont() && !input.isEmpty()) { nodeParser = nodeParser.feed(input); } if (nodeParser.isDone()) { final V node = (V) nodeParser.bind(); if (node != null) { builder.add(xml.item(node)); } nodeParser = null; step = 1; continue; } else if (nodeParser.isError()) { return nodeParser.asError(); } } if (step == 8) { while (nodeParser.isCont() && !input.isEmpty()) { nodeParser = nodeParser.feed(input); } if (nodeParser.isDone()) { nodeParser = null; step = 1; continue; } else if (nodeParser.isError()) { return nodeParser.asError(); } } break; } while (true); if (input.isError()) { return error(input.trap()); } return new TagContentParser<I, V>(xml, tag, builder, text, nodeParser, step); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml, String tag, Builder<I, V> builder) { return parse(input, xml, tag, builder, null, null, 1); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/TagEndParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Parser; import swim.util.Builder; final class TagEndParser<I, V> extends Parser<V> { final XmlParser<I, V> xml; final String tag; final Builder<I, V> builder; final int step; TagEndParser(XmlParser<I, V> xml, String tag, Builder<I, V> builder, int step) { this.xml = xml; this.tag = tag; this.builder = builder; this.step = step; } @Override public Parser<V> feed(Input input) { return parse(input, this.xml, this.tag, this.builder, this.step); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml, String tag, Builder<I, V> builder, int step) { int c = 0; if (step == 1) { if (input.isCont()) { c = input.head(); if (c == '<') { input = input.step(); step = 2; } else { return error(Diagnostic.expected('<', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('<', input)); } } if (step == 2) { if (input.isCont()) { c = input.head(); if (c == '/') { input = input.step(); step = 3; } else { return error(Diagnostic.expected('/', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('/', input)); } } while (step >= 3 && step - 3 < tag.length()) { if (input.isCont()) { if (input.head() == tag.codePointAt(step - 3)) { input = input.step(); step = 3 + tag.offsetByCodePoints(step - 3, 1); continue; } else { return error(Diagnostic.expected("</" + tag + ">", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("</" + tag + ">", input)); } break; } if (step == 3 + tag.length()) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '>') { input = input.step(); return done(builder.bind()); } else { return error(Diagnostic.expected('>', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('>', input)); } } if (input.isError()) { return error(input.trap()); } return new TagEndParser<I, V>(xml, tag, builder, step); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml, String tag, Builder<I, V> builder) { return parse(input, xml, tag, builder, 1); } static <I, V> Parser<V> parseRest(Input input, XmlParser<I, V> xml, String tag, Builder<I, V> builder) { return parse(input, xml, tag, builder, 3); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/TagStartParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Parser; import swim.util.Builder; final class TagStartParser<I, V> extends Parser<V> { final XmlParser<I, V> xml; final Builder<I, V> builder; final Parser<String> tagParser; final Builder<I, V> attributes; final Parser<String> nameParser; final Parser<V> valueParser; final int step; TagStartParser(XmlParser<I, V> xml, Builder<I, V> builder, Parser<String> tagParser, Builder<I, V> attributes, Parser<String> nameParser, Parser<V> valueParser, int step) { this.xml = xml; this.builder = builder; this.tagParser = tagParser; this.attributes = attributes; this.nameParser = nameParser; this.valueParser = valueParser; this.step = step; } @Override public Parser<V> feed(Input input) { return parse(input, this.xml, this.builder, this.tagParser, this.attributes, this.nameParser, this.valueParser, this.step); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml, Builder<I, V> builder, Parser<String> tagParser, Builder<I, V> attributes, Parser<String> nameParser, Parser<V> valueParser, int step) { int c = 0; if (step == 1) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '<') { input = input.step(); step = 2; } else { return error(Diagnostic.expected('<', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('<', input)); } } if (step == 2) { if (tagParser == null) { tagParser = xml.parseName(input); } else { tagParser = tagParser.feed(input); } if (tagParser.isDone()) { step = 3; } else if (tagParser.isError()) { return tagParser.asError(); } } do { if (step == 3) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); step = 4; } else { step = 9; break; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 4) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (Xml.isNameStartChar(c)) { step = 5; } else { step = 9; break; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 5) { if (nameParser == null) { nameParser = xml.parseName(input); } else { nameParser = nameParser.feed(input); } if (nameParser.isDone()) { step = 6; } else if (nameParser.isError()) { return nameParser.asError(); } } if (step == 6) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '=') { input = input.step(); step = 7; } else { return error(Diagnostic.expected('=', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('=', input)); } } if (step == 7) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '"' || c == '\'') { step = 8; } else { return error(Diagnostic.expected("attribute value", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("attribute value", input)); } } if (step == 8) { if (valueParser == null) { valueParser = xml.parseAttributeValue(input); } else { valueParser = valueParser.feed(input); } if (valueParser.isDone()) { if (attributes == null) { attributes = xml.attributesBuilder(); } attributes.add(xml.attribute(nameParser.bind(), valueParser.bind())); nameParser = null; valueParser = null; step = 3; } else if (valueParser.isError()) { return valueParser.asError(); } } break; } while (true); if (step == 9) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '/') { input = input.step(); step = 10; } else if (c == '>') { input = input.step(); if (builder == null) { if (attributes == null) { builder = xml.tagBuilder(tagParser.bind()); } else { builder = xml.tagBuilder(tagParser.bind(), attributes.bind()); } } else if (attributes == null) { builder.add(xml.tag(tagParser.bind())); } else { builder.add(xml.tag(tagParser.bind(), attributes.bind())); } return xml.parseTagContent(input, tagParser.bind(), builder); } else { return error(Diagnostic.expected("'/' or '>'", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("'/' or '>'", input)); } } if (step == 10) { if (input.isCont()) { c = input.head(); if (c == '>') { input = input.step(); if (builder == null) { if (attributes == null) { builder = xml.tagBuilder(tagParser.bind()); } else { builder = xml.tagBuilder(tagParser.bind(), attributes.bind()); } } else if (attributes == null) { builder.add(xml.tag(tagParser.bind())); } else { builder.add(xml.tag(tagParser.bind(), attributes.bind())); } return done(builder.bind()); } else { return error(Diagnostic.expected('>', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('>', input)); } } if (input.isError()) { return error(input.trap()); } return new TagStartParser<I, V>(xml, builder, tagParser, attributes, nameParser, valueParser, step); } static <I, V> Parser<V> parse(Input input, XmlParser<I, V> xml) { return parse(input, xml, null, null, null, null, null, 1); } static <I, V> Parser<V> parseRest(Input input, XmlParser<I, V> xml, Builder<I, V> builder) { return parse(input, xml, builder, null, null, null, null, 2); } static <I, V> Parser<V> parseRest(Input input, XmlParser<I, V> xml) { return parse(input, xml, null, null, null, null, null, 2); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/Xml.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Parser; import swim.structure.Item; import swim.structure.Value; /** * Factory for constructing XML parsers and writers. */ public final class Xml { private Xml() { // stub } static boolean isSpace(int c) { return c == 0x20 || c == 0x9; } static boolean isNewline(int c) { return c == 0xa || c == 0xd; } static boolean isWhitespace(int c) { return isSpace(c) || isNewline(c); } static boolean isChar(int c) { return c >= 0x1 && c <= 0xd7ff || c >= 0xe000 && c <= 0xfffd || c >= 0x10000 && c <= 0x10ffff; } static boolean isRestrictedChar(int c) { return c >= 0x1 && c <= 0x8 || c >= 0xb && c <= 0xc || c >= 0xe && c <= 0x1f || c >= 0x7f && c <= 0x84 || c >= 0x86 && c <= 0x9f; } static boolean isNameStartChar(int c) { return c == ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z' || c >= 0xc0 && c <= 0xd6 || c >= 0xd8 && c <= 0xf6 || c >= 0xf8 && c <= 0x2ff || c >= 0x370 && c <= 0x37d || c >= 0x37f && c <= 0x1fff || c >= 0x200c && c <= 0x200d || c >= 0x2070 && c <= 0x218f || c >= 0x2c00 && c <= 0x2fef || c >= 0x3001 && c <= 0xd7ff || c >= 0xf900 && c <= 0xfdcf || c >= 0xfdf0 && c <= 0xfffd || c >= 0x10000 && c <= 0xeffff; } static boolean isNameChar(int c) { return c == '-' || c == '.' || c >= '0' && c <= '9' || c == ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z' || c == 0xb7 || c >= 0xc0 && c <= 0xd6 || c >= 0xd8 && c <= 0xf6 || c >= 0xf8 && c <= 0x37d || c >= 0x37f && c <= 0x1fff || c >= 0x200c && c <= 0x200d || c >= 0x203f && c <= 0x2040 || c >= 0x2070 && c <= 0x218f || c >= 0x2c00 && c <= 0x2fef || c >= 0x3001 && c <= 0xd7ff || c >= 0xf900 && c <= 0xfdcf || c >= 0xfdf0 && c <= 0xfffd || c >= 0x10000 && c <= 0xeffff; } static boolean isPubidChar(int c) { return c == 0xa || c == 0xd || c == 0x20 || c == '!' || c == '#' || c == '$' || c == '%' || c == '\'' || c == '(' || c == ')' || c == '*' || c == '+' || c == ',' || c == '-' || c == '.' || c == '/' || c >= '0' && c <= '9' || c == ':' || c == ';' || c == '=' || c == '?' || c == '@' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z'; } private static XmlParser<Item, Value> structureParser; //private static XmlWriter<Item, Value> structureWriter; public static XmlParser<Item, Value> structureParser() { if (structureParser == null) { structureParser = new XmlStructureParser(); } return structureParser; } //public static XmlWriter<Item, Value> structureWriter() { // if (structureWriter == null) { // structureWriter = new XmlStructureWriter(); // } // return structureWriter; //} public static Value parse(String xml) { return structureParser().parseDocumentString(xml); } public static Value parseFragment(String xml) { return structureParser().parseFragmentString(xml); } public static Parser<Value> parser() { return structureParser().documentParser(); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/XmlDeclParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Parser; import swim.util.Builder; final class XmlDeclParser<I, V> extends Parser<I> { final XmlParser<I, V> xml; final Builder<I, V> attributes; final Parser<String> nameParser; final Parser<V> valueParser; final int step; XmlDeclParser(XmlParser<I, V> xml, Builder<I, V> attributes, Parser<String> nameParser, Parser<V> valueParser, int step) { this.xml = xml; this.attributes = attributes; this.nameParser = nameParser; this.valueParser = valueParser; this.step = step; } @Override public Parser<I> feed(Input input) { return parse(input, this.xml, this.attributes, this.nameParser, this.valueParser, this.step); } static <I, V> Parser<I> parse(Input input, XmlParser<I, V> xml, Builder<I, V> attributes, Parser<String> nameParser, Parser<V> valueParser, int step) { int c = 0; while (step >= 1 && step <= 5) { if (input.isCont()) { if (input.head() == "<?xml".charAt(step - 1)) { input = input.step(); step += 1; continue; } else { return error(Diagnostic.expected("<?xml".charAt(step - 1), input)); } } else if (input.isDone()) { return error(Diagnostic.expected("<?xml".charAt(step - 1), input)); } break; } do { if (step == 6) { if (input.isCont()) { if (Xml.isWhitespace(input.head())) { input = input.step(); step = 7; } else { step = 12; break; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 7) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (Xml.isNameStartChar(c)) { step = 8; } else { step = 12; break; } } else if (input.isDone()) { return error(Diagnostic.unexpected(input)); } } if (step == 8) { if (nameParser == null) { nameParser = xml.parseName(input); } else { nameParser = nameParser.feed(input); } if (nameParser.isDone()) { step = 9; } else if (nameParser.isError()) { return nameParser.asError(); } else { break; } } if (step == 9) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '=') { input = input.step(); step = 10; } else { return error(Diagnostic.expected('=', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('=', input)); } } if (step == 10) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '"' || c == '\'') { step = 11; } else { return error(Diagnostic.expected("attribute value", input)); } } else if (input.isDone()) { return error(Diagnostic.expected("attribute value", input)); } } if (step == 11) { if (valueParser == null) { valueParser = xml.parseAttributeValue(input); } else { valueParser = valueParser.feed(input); } if (valueParser.isDone()) { if (attributes == null) { attributes = xml.attributesBuilder(); } attributes.add(xml.attribute(nameParser.bind(), valueParser.bind())); nameParser = null; valueParser = null; step = 6; } else if (valueParser.isError()) { return valueParser.asError(); } } break; } while (true); if (step == 12) { while (input.isCont()) { c = input.head(); if (Xml.isWhitespace(c)) { input = input.step(); } else { break; } } if (input.isCont()) { if (c == '?') { input = input.step(); step = 13; } else { return error(Diagnostic.expected('?', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('?', input)); } } if (step == 13) { if (input.isCont()) { c = input.head(); if (c == '>') { input = input.step(); if (attributes == null) { return done(xml.xml(xml.attributes())); } else { return done(xml.xml(attributes.bind())); } } else { return error(Diagnostic.expected('>', input)); } } else if (input.isDone()) { return error(Diagnostic.expected('>', input)); } } if (input.isError()) { return error(input.trap()); } return new XmlDeclParser<I, V>(xml, attributes, nameParser, valueParser, step); } static <I, V> Parser<I> parse(Input input, XmlParser<I, V> xml) { return parse(input, xml, null, null, null, 1); } static <I, V> Parser<I> parseRest(Input input, XmlParser<I, V> xml) { return parse(input, xml, null, null, null, 6); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/XmlParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Diagnostic; import swim.codec.Input; import swim.codec.Output; import swim.codec.Parser; import swim.codec.Unicode; import swim.util.Builder; /** * Factory for constructing XML parsers and parse trees. */ public abstract class XmlParser<I, V> { public abstract I item(V value); public abstract String name(String name); public abstract I attribute(String name, V value); public abstract V attributes(); public abstract I xml(V attributes); public abstract I doctype(String name); public abstract I doctype(String name, String systemId); public abstract I doctype(String name, String publicId, String systemId); public abstract I tag(String name); public abstract I tag(String name, V attributes); public abstract I comment(String value); public abstract I pi(String target, String value); public abstract Output<String> nameOutput(); public abstract Output<V> textOutput(); public Output<I> commentOutput() { return new CommentOutput<I>(this); } public Output<I> piOutput(String target) { return new PIOutput<I>(this, target); } public abstract Builder<I, V> attributesBuilder(); public abstract Builder<I, V> tagBuilder(String tag); public abstract Builder<I, V> tagBuilder(String tag, V attributes); public abstract Builder<I, V> documentBuilder(); public abstract Builder<I, V> fragmentBuilder(); public boolean expandEntityRef(String name, Output<?> output) { if ("amp".equals(name)) { output = output.write('&'); return true; } else if ("lt".equals(name)) { output = output.write('<'); return true; } else if ("gt".equals(name)) { output = output.write('>'); return true; } else if ("apos".equals(name)) { output = output.write('\''); return true; } else if ("quot".equals(name)) { output = output.write('"'); return true; } return false; } public Parser<String> parseName(Input input) { return NameParser.parse(input, this); } public Parser<V> parseAttributeValue(Input input) { return AttributeValueParser.parse(input, this); } public Parser<String> parseEntityName(Input input) { return NameParser.parse(input, this); } public Parser<?> parseReference(Input input, Output<?> text) { return ReferenceParser.parse(input, this, text); } public Parser<V> parseDocument(Input input) { return DocumentParser.parse(input, this); } public Parser<V> parseFragment(Input input) { return DocumentParser.parse(input, this, fragmentBuilder()); } public Parser<I> parseXmlDecl(Input input) { return XmlDeclParser.parse(input, this); } public Parser<I> parseXmlDeclRest(Input input) { return XmlDeclParser.parseRest(input, this); } public Parser<I> parseDoctypeDecl(Input input) { return DoctypeDeclParser.parse(input, this); } public Parser<I> parseDoctypeDeclRest(Input input) { return DoctypeDeclParser.parseRest(input, this); } public Parser<V> parseMarkupDecl(Input input) { // TODO: MarkupDeclParser.parse(input, this); return Parser.error(Diagnostic.message("unsupported markup decl", input)); } public Parser<V> parseTagStart(Input input) { return TagStartParser.parse(input, this); } public Parser<V> parseTagStartRest(Input input) { return TagStartParser.parseRest(input, this); } public Parser<V> parseTagStartRest(Input input, Builder<I, V> builder) { return TagStartParser.parseRest(input, this, builder); } public Parser<V> parseTagContent(Input input, String tag, Builder<I, V> builder) { return TagContentParser.parse(input, this, tag, builder); } public Parser<V> parseTagEnd(Input input, String tag, Builder<I, V> builder) { return TagEndParser.parse(input, this, tag, builder); } public Parser<V> parseTagEndRest(Input input, String tag, Builder<I, V> builder) { return TagEndParser.parseRest(input, this, tag, builder); } public Parser<?> parseCDataSection(Input input, Output<?> text) { return CDataSectionParser.parse(input, this, text); } public Parser<?> parseCDataSectionRest(Input input, Output<?> text) { return CDataSectionParser.parseRest(input, this, text); } public Parser<I> parseComment(Input input) { return CommentParser.parse(input, this); } public Parser<I> parseCommentRest(Input input) { return CommentParser.parseRest(input, this); } public Parser<I> parsePI(Input input) { return PIParser.parse(input, this); } public Parser<I> parsePIRest(Input input) { return PIParser.parseRest(input, this); } public Parser<String> parsePITarget(Input input) { return NameParser.parse(input, this); } public Parser<I> parsePITargetRest(Input input, String target) { return PIParser.parseTargetRest(input, this, target); } public Parser<V> documentParser() { return new DocumentParser<I, V>(this); } public V parseDocumentString(String string) { Input input = Unicode.stringInput(string); while (input.isCont() && Xml.isWhitespace(input.head())) { input = input.step(); } Parser<V> parser = parseDocument(input); if (parser.isDone()) { while (input.isCont() && Xml.isWhitespace(input.head())) { input = input.step(); } } if (input.isCont() && !parser.isError()) { parser = Parser.error(Diagnostic.unexpected(input)); } else if (input.isError()) { parser = Parser.error(input.trap()); } return parser.bind(); } public V parseFragmentString(String string) { Input input = Unicode.stringInput(string); while (input.isCont() && Xml.isWhitespace(input.head())) { input = input.step(); } Parser<V> parser = parseFragment(input); if (parser.isDone()) { while (input.isCont() && Xml.isWhitespace(input.head())) { input = input.step(); } } if (input.isCont() && !parser.isError()) { parser = Parser.error(Diagnostic.unexpected(input)); } else if (input.isError()) { parser = Parser.error(input.trap()); } return parser.bind(); } }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/XmlStructureParser.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package swim.xml; import swim.codec.Output; import swim.codec.Unicode; import swim.structure.Attr; import swim.structure.Item; import swim.structure.Record; import swim.structure.Slot; import swim.structure.Text; import swim.structure.Value; import swim.util.Builder; public class XmlStructureParser extends XmlParser<Item, Value> { @Override public Item item(Value value) { return value; } @Override public String name(String name) { return name; } @Override public Item attribute(String name, Value value) { return Slot.of(name, value); } @Override public Value attributes() { return Value.extant(); } @Override public Item xml(Value attributes) { return Attr.of(XML_TAG, attributes); } @Override public Item doctype(String name) { return Attr.of(XML_DOCTYPE_TAG, name); } @Override public Item doctype(String name, String systemId) { return Attr.of(XML_DOCTYPE_TAG, Record.of(Slot.of("name", name), Slot.of("system", systemId))); } @Override public Item doctype(String name, String publicId, String systemId) { return Attr.of(XML_DOCTYPE_TAG, Record.of(Slot.of("name", name), Slot.of("public", publicId), Slot.of("system", systemId))); } @Override public Item tag(String name) { return Attr.of(name); } @Override public Item tag(String name, Value attributes) { return Attr.of(name, attributes); } @Override public Item comment(String value) { return Attr.of(XML_COMMENT_TAG, value); } @Override public Item pi(String target, String value) { return Attr.of(XML_PI_TAG, Record.of(target, value)); } @Override public Output<String> nameOutput() { return Unicode.stringOutput(); } @SuppressWarnings("unchecked") @Override public Output<Value> textOutput() { return (Output<Value>) (Output<?>) Text.output(); } @SuppressWarnings("unchecked") @Override public Builder<Item, Value> attributesBuilder() { return (Builder<Item, Value>) (Builder<?, ?>) Record.create(); } @SuppressWarnings("unchecked") @Override public Builder<Item, Value> tagBuilder(String name) { final Builder<Item, Record> builder = Record.create(); builder.add(tag(name)); return (Builder<Item, Value>) (Builder<?, ?>) builder; } @SuppressWarnings("unchecked") @Override public Builder<Item, Value> tagBuilder(String name, Value attributes) { final Builder<Item, Record> builder = Record.create(); builder.add(tag(name, attributes)); return (Builder<Item, Value>) (Builder<?, ?>) builder; } @SuppressWarnings("unchecked") @Override public Builder<Item, Value> documentBuilder() { return (Builder<Item, Value>) (Builder<?, ?>) Record.create(); } @SuppressWarnings("unchecked") @Override public Builder<Item, Value> fragmentBuilder() { return (Builder<Item, Value>) (Builder<?, ?>) Record.create(); } static final Text XML_TAG = Text.from("xml"); static final Text XML_DOCTYPE_TAG = Text.from("xml:doctype"); static final Text XML_COMMENT_TAG = Text.from("xml:comment"); static final Text XML_PI_TAG = Text.from("xml:pi"); }
0
java-sources/ai/swim/swim-xml/3.10.0/swim
java-sources/ai/swim/swim-xml/3.10.0/swim/xml/package-info.java
// Copyright 2015-2019 SWIM.AI inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * eXtensible Markup Language (XML) codec. */ package swim.xml;
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/auth/Credentials.java
package io.taskmonk.auth; interface Credentials { }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/auth/OAuthClientCredentials.java
package io.taskmonk.auth; public class OAuthClientCredentials { String clientId; String clientSecret; public OAuthClientCredentials(String clientId, String clientSecret) { this.clientId = clientId; this.clientSecret = clientSecret; } public String getClientId() { return clientId; } public String getClientSecret() { return clientSecret; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/auth/TokenResponse.java
package io.taskmonk.auth; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.Date; public class TokenResponse { private static final Logger logger = LoggerFactory.getLogger(TokenResponse.class); String token_type; String access_token; String refresh_token; Long expires_in; LocalDateTime expires_at = LocalDateTime.now().minusDays(1); public Boolean isExpired() { LocalDateTime now = LocalDateTime.now(); if (now.isAfter(expires_at)) { return true; } return false; } public String getToken_type() { return token_type; } public void setToken_type(String token_type) { this.token_type = token_type; } public String getAccess_token() { return access_token; } public void setAccess_token(String access_token) { this.access_token = access_token; } public String getRefresh_token() { return refresh_token; } public void setRefresh_token(String refresh_token) { this.refresh_token = refresh_token; } public Long getExpires_in() { return expires_in; } public void setExpires_in(Long expires_in) { this.expires_in = expires_in; setExpiry(); } public TokenResponse() { } public TokenResponse(String token_type, String access_token, String refresh_token, Long expires_in) { this.token_type = token_type; this.access_token = access_token; this.refresh_token = refresh_token; this.expires_in = expires_in; setExpiry(); } private void setExpiry() { LocalDateTime now = LocalDateTime.now(); expires_at = now.plusSeconds(expires_in - 30); } @Override public String toString() { return "access_token = " + access_token + "; refresh_token = {}" + refresh_token + "; expires_in = " + expires_in; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/client/TaskMonkClient.java
package io.taskmonk.client; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import io.taskmonk.auth.OAuthClientCredentials; import io.taskmonk.auth.TokenResponse; import io.taskmonk.clientexceptions.*; import io.taskmonk.entities.*; import io.taskmonk.http.RedirectStrategy; import org.apache.commons.io.FilenameUtils; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.entity.GzipCompressingEntity; import org.apache.http.client.entity.GzipDecompressingEntity; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.*; import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URLEncodedUtils; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.impl.client.LaxRedirectStrategy; import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; import org.apache.http.impl.nio.client.HttpAsyncClients; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.InputStream; import java.net.URL; import java.nio.channels.Channels; import java.nio.channels.ReadableByteChannel; import java.nio.file.Files; import java.util.*; import java.util.concurrent.Future; import java.util.zip.GZIPOutputStream; import static org.apache.http.HttpHeaders.ACCEPT_ENCODING; public class TaskMonkClient { private static final Logger logger = LoggerFactory.getLogger(TaskMonkClient.class); HttpHost httpHost; HttpHost proxyHost; TokenResponse tokenResponse; OAuthClientCredentials credentials; private TokenResponse refreshToken() throws Exception { URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/oauth2/token").addParameter("grant_type", "client_credentials") .addParameter("client_id", credentials.getClientId()) .addParameter("client_secret", credentials.getClientSecret()); HttpPost post = new HttpPost(builder.build()); return invoke(post, TokenResponse.class, false); } private TokenResponse getTokenResponse() throws Exception { if (tokenResponse == null || tokenResponse.isExpired()) { tokenResponse = refreshToken(); } return tokenResponse; } private void downloadFile(String url, String localPath) throws Exception { ReadableByteChannel rbc = Channels.newChannel(new URL(url).openStream()); FileOutputStream fos = new FileOutputStream(localPath); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); } private HttpPost getHttpPost(String path, List<NameValuePair> parameters, ContentType contentType) throws Exception { URIBuilder builder = new URIBuilder(httpHost.toString() + path); builder.addParameters(parameters); HttpPost post = new HttpPost(builder.build()); post.addHeader("Content-type", contentType.toString()); return post; } private HttpGet getHttpGet(String path) throws Exception { return getHttpGet(path, new ArrayList<NameValuePair>()); } private HttpGet getHttpGet(String path, List<NameValuePair> parameters) throws Exception { URIBuilder builder = new URIBuilder(httpHost.toString() + path) .addParameters(parameters); HttpGet get = new HttpGet(builder.build()); return get; } /** * Create a new batch in an existing project with parameters set * @param newBatchData {@link NewBatchData} New batch information * @return {@link String} returns the id of the batch * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public String createBatch(String projectId, NewBatchData newBatchData) throws Exception { logger.debug("Creating new batch " + newBatchData); URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/project/" + projectId + "/batch"); HttpPost post = new HttpPost(builder.build()); post.addHeader("Content-type", "application/json"); ObjectMapper mapper = new ObjectMapper(); String body = mapper.writeValueAsString(newBatchData); logger.trace("batch create content = {}", body); StringEntity stringEntity = new StringEntity(body); post.setEntity(stringEntity); return invoke(post, Id.class).id; } /** * Create a new batch in an existing project * @param projectId . projectId for the project the batch has to be created * @param batchName . name of the batch to be created * @return {@link String} returns the id of the batch * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public String createBatch(String projectId, String batchName) throws Exception { URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/project/" + projectId + "/batch"); HttpPost post = new HttpPost(builder.build()); post.addHeader("Content-type", "application/json"); NewBatchData newBatchData = new NewBatchData(batchName); ObjectMapper mapper = new ObjectMapper(); String body = mapper.writeValueAsString(newBatchData); logger.trace("batch create content = {}", body); StringEntity stringEntity = new StringEntity(body); post.setEntity(stringEntity); return invoke(post, Id.class).id; } private <T> T invoke(HttpUriRequest request, Class<T> clazz) throws Exception { return invoke(request, clazz, true); } private <T> T invoke(HttpUriRequest request, Class<T> clazz, Boolean addAuthorization) throws Exception { if (addAuthorization) { request.addHeader("Authorization", "Bearer " + getTokenResponse().getAccess_token()); } CloseableHttpAsyncClient httpclient = HttpAsyncClients.custom() .setProxy(proxyHost) .useSystemProperties() .setRedirectStrategy(new RedirectStrategy()) .build(); httpclient.start(); Future<HttpResponse> httpResponse = httpclient.execute(request, null); logger.trace("Invoking : {} ", request); HttpResponse response = httpResponse.get(); try { if (response.getStatusLine().getStatusCode() == StatusConstants.StatusCode.OK.getCode() || response.getStatusLine().getStatusCode() == StatusConstants.StatusCode.CREATED.getCode()) { String content = EntityUtils.toString(response.getEntity()); logger.trace("content = {}", content); ObjectMapper mapper = new ObjectMapper(); T result = mapper.readValue(content, clazz); logger.trace("result {}", result); return result; } else { throw handleException(response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); } } finally { httpclient.close(); } } /** * update the details of an existing batch * @param batchId - id of an existing batch * @param batchName - name of new batch * @param priority - priority of batch * @param comments - comments in new batch * @param notifications - notifications of a new batch * @return {@link ApiResponse} - returns the id of the updated batch * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public ApiResponse editBatch(String batchId, String batchName, Integer priority, String comments, List<Notification> notifications) throws Exception{ URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/batch/" + batchId + "/edit"); HttpPut put = new HttpPut(builder.build()); put.addHeader("Content-type", "application/json"); NewBatchData newBatchData = new NewBatchData(batchName); newBatchData.setComments(comments); newBatchData.setNotifications(notifications); newBatchData.setPriority(priority); ObjectMapper mapper = new ObjectMapper(); String body = mapper.writeValueAsString(newBatchData); logger.debug("update batch content {} ", body); StringEntity entity = new StringEntity(body); put.setEntity(entity); return invoke(put, ApiResponse.class); } /** * update the state for a batch * @param batchId - id of an existing batch * @param priority - new priority for the existing batch. * @return {@link ApiResponse} - returns the id of the updated batch * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public String updateBatchPriority(String batchId, Integer priority) throws Exception { URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/batch/" + batchId + "/edit"); HttpPut put = new HttpPut(builder.build()); put.addHeader("Content-type", "application/json"); logger.debug("update batch priority {} ", priority); NewBatchData newBatchData = new NewBatchData(); ObjectMapper mapper = new ObjectMapper(); newBatchData.setPriority(priority); String body = mapper.writeValueAsString(newBatchData); logger.debug("update batch content {} ", body); StringEntity entity = new StringEntity(body); put.setEntity(entity); return invoke(put, Id.class).id; } /** * update the state for a batch * @param batchId - id of an existing batch * @param state - new state for the String. Allowed - CANCELLED * @return {@link ApiResponse} - returns the id of the updated batch * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public ApiResponse updateBatchState(String batchId, BatchState state) throws Exception{ URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/batch/" + batchId + "/state"); HttpPut put = new HttpPut(builder.build()); put.addHeader("Content-type", "application/json"); Map<String, String> params = new HashMap<String, String>(); params.put("state", state.name()); ObjectMapper mapper = new ObjectMapper(); String body = mapper.writeValueAsString(params); logger.debug("update batch content {} ", body); StringEntity entity = new StringEntity(body); put.setEntity(entity); return invoke(put, ApiResponse.class); } /** * Create a new batch in an existing project and add tasks to it * @param batchName - name of the batch to be created * @param file - file of the tasks to be added * @return {@link TaskImportResponse} returns the task import response * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public TaskImportResponse uploadTasks(String projectId ,String batchName, File file) throws Exception { logger.debug("Uploading tasks to batch {}", batchName); String batchId = createBatch(projectId, batchName); String path = file.getAbsolutePath(); String fileType = FilenameUtils.getExtension(path); byte[] bytes = Files.readAllBytes(file.toPath()); ByteArrayOutputStream arrOutputStream = new ByteArrayOutputStream(); GZIPOutputStream zipOutputStream = new GZIPOutputStream(arrOutputStream); zipOutputStream.write(bytes); zipOutputStream.close(); arrOutputStream.close(); byte[] output = arrOutputStream.toByteArray(); String encoded = Base64.getEncoder().encodeToString(output); URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/batch/" + batchId + "/tasks/import"); builder.addParameter("fileType", fileType); HttpPost post = new HttpPost(builder.build()); StringEntity stringEntity = new StringEntity(encoded); post.setEntity(stringEntity); TaskImportResponse result = invoke(post, TaskImportResponse.class); logger.debug("Upload task id = {}", result.job_id); result.setBatchId(batchId); return result; } /** * Create a new batch in an existing project and add tasks to it from an accessible url * @param batchName - name of the batch to be created * @param taskUrl - url of the file from which the tasks will be imported * @param fileType - type of file from which the tasks would be fetched * @return {@link TaskImportResponse} - returns the task import response * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public TaskImportResponse uploadTasksUrl(String projectId, String batchName, String taskUrl, String fileType) throws Exception { String batchId = createBatch(projectId, batchName); URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/project/" + projectId + "/batch/" + batchId + "/tasks/import/url"); HttpPost post = new HttpPost(builder.build()); post.addHeader("Content-type", "application/json"); ImportUrl importUrl = new ImportUrl(taskUrl, fileType); ObjectMapper mapper = new ObjectMapper(); StringEntity entity = new StringEntity(mapper.writeValueAsString(importUrl)); post.setEntity(entity); TaskImportResponse result = invoke(post, TaskImportResponse.class); result.setBatchId(batchId); return result; } /** * Add tasks to an existing batch * @param batchId - batch id of an existing batch to which the tasks are to be added * @param file - file of the tasks to be added * @return {@link TaskImportResponse} - returns the task import response * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs */ public TaskImportResponse uploadTasksToBatch(String batchId, File file) throws Exception { String path = file.getAbsolutePath(); String fileType = FilenameUtils.getExtension(path); byte[] bytes = Files.readAllBytes(file.toPath()); ByteArrayOutputStream arrOutputStream = new ByteArrayOutputStream(); GZIPOutputStream zipOutputStream = new GZIPOutputStream(arrOutputStream); zipOutputStream.write(bytes); zipOutputStream.close(); arrOutputStream.close(); byte[] output = arrOutputStream.toByteArray(); String encoded = Base64.getEncoder().encodeToString(output); URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/batch/" + batchId + "/tasks/import"); builder.addParameter("fileType", fileType); HttpPost post = new HttpPost(builder.build()); StringEntity stringEntity = new StringEntity(encoded); post.setEntity(stringEntity); TaskImportResponse result = invoke(post, TaskImportResponse.class); logger.debug("Task upload job id = {}", result.job_id); return result; } /** * Add tasks to an existing batch from an accessible url * @param batchId - batch id of the batch to which the tasks are to be added * @param taskUrl - url of the file from which the tasks would be imported * @param fileType - type of file from which the tasks would be fetched * @return {@link TaskImportResponse} - returns the task import response * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public TaskImportResponse uploadTasksUrlToBatch(String batchId, String taskUrl, String fileType) throws Exception { URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/batch/" + batchId + "/tasks/import/url"); HttpPost post = new HttpPost(builder.build()); post.addHeader("Content-type", "application/json"); ObjectMapper mapper = new ObjectMapper(); ImportUrl importUrl = new ImportUrl(taskUrl, fileType); StringEntity entity = new StringEntity(mapper.writeValueAsString(importUrl)); post.setEntity(entity); return invoke(post, TaskImportResponse.class); } /** * Add an external task * @param batchId * @param task {@link Task} - the task to be added * @return task id of the newly created task * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public String addTask(String batchId, Task task) throws Exception { URIBuilder builder = new URIBuilder(httpHost.toString() + "/api/batch/" + batchId + "/task"); ObjectMapper mapper = new ObjectMapper(); String body = mapper.writeValueAsString(task); StringEntity entity = new StringEntity(body); HttpPost post = new HttpPost(builder.build()); post.addHeader("Content-type", "application/json"); post.setEntity(entity); return invoke(post, Id.class).id; } /** * Add tasks to batch * @param batchId * @param tasks {@link Task} list of tasks * @return Job Id for the import task * @throws Exception */ public String uploadTasks(String batchId, List<Task> tasks) throws Exception { String url = "/api/batch/"+ batchId + "/tasks/import/dictionary"; URIBuilder builder = new URIBuilder(httpHost.toString() + url); ObjectMapper mapper = new ObjectMapper(); String input = mapper.writeValueAsString(tasks); StringEntity entity = new StringEntity(input); GzipCompressingEntity body = new GzipCompressingEntity(entity); HttpPost post = new HttpPost(builder.build()); post.setEntity(body); post.addHeader("Authorization", "Bearer " + getTokenResponse().getAccess_token()); CloseableHttpClient httpclient = HttpClients.custom() .setProxy(proxyHost) .useSystemProperties() .setRedirectStrategy(new LaxRedirectStrategy()) .build(); HttpResponse response = httpclient.execute(post); logger.trace("Invoking : {} ", post); try { if (response.getStatusLine().getStatusCode() == StatusConstants.StatusCode.OK.getCode() || response.getStatusLine().getStatusCode() == StatusConstants.StatusCode.CREATED.getCode()) { String content = EntityUtils.toString(response.getEntity()); Map<String, String> inputResult = mapper.reader() .forType(new TypeReference<Map<String, String>>() {}) .readValue(content); logger.debug("Input ids = " + inputResult); return inputResult.get("job_id"); } else { throw handleException(response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); } } finally { httpclient.close(); } } private void waitForCompletion(String jobId) throws Exception { JobProgressResponse jobProgressResponse = getJobProgress(jobId); while (!jobProgressResponse.isCompleted()) { Thread.sleep(5000); jobProgressResponse = getJobProgress(jobId); } return; } /** * Get the batch output in a local file path * @param batchId - batch id of an existing batch * @param outputFormat output format for the file - "CSV" or "Excel" * @param outputPath - path where the output file should be created * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public void getBatchOutput(String batchId, String outputFormat, String outputPath) throws Exception { String url = "/api/batch/" + batchId + "/output"; List<NameValuePair> parameters = new ArrayList<NameValuePair>(); parameters.add(new BasicNameValuePair("output_format", outputFormat)); Map<String, List<String>> fieldNames = new HashMap<String, List<String>>(); fieldNames.put("field_names", new ArrayList<String>()); ObjectMapper mapper = new ObjectMapper(); String content = mapper.writeValueAsString(fieldNames); HttpPost post = getHttpPost(url, parameters, ContentType.APPLICATION_JSON); post.setEntity(new StringEntity(content)); BatchOutput batchOutput = invoke(post, BatchOutput.class); waitForCompletion(batchOutput.getJobId()); downloadFile(batchOutput.getFileUrl(), outputPath); } /** * * Get the batch output as a dictionary . The output is page of tasks. Each task is a Map with the * field and its value. * @param batchId * @return {@link Page} A page of task and query params. Default of 500 tasks are returned * @throws Exception */ public Page<Task> getBatchOutputDictionary(String batchId) throws Exception { return getBatchOutputDictionary(batchId, null); } /** * * Get the batch output as a dictionary . The output is page of tasks. Each task is a Map with the * field and its value. * @param batchId * @param taskQueryParams {@link TaskQueryParams} query params for task retrieval * @return {@link Page} A list of tasks with the field and values * @throws Exception */ public Page<Task> getBatchOutputDictionary(String batchId, TaskQueryParams taskQueryParams) throws Exception { String url = "/api/batch/"+ batchId + "/output/dictionary"; URIBuilder builder = new URIBuilder(httpHost.toString() + url); List<NameValuePair> parameters = new ArrayList<NameValuePair>(); if (taskQueryParams != null) { parameters = taskQueryParams.getParameters(); } logger.trace("Parameters = " + parameters); HttpPost post = getHttpPost(url, parameters, ContentType.APPLICATION_FORM_URLENCODED); // String input = mapper.writeValueAsString(parameters); // StringEntity entity = new StringEntity(input); // logger.trace("Entity = " + entity); // HttpPost post = new HttpPost(builder.build()); post.setEntity(new UrlEncodedFormEntity(parameters)); // post.setEntity(entity); post.addHeader("Authorization", "Bearer " + getTokenResponse().getAccess_token()); // post.addHeader("Content-Type", URLEncodedUtils.CONTENT_TYPE); post.addHeader(ACCEPT_ENCODING, "gzip"); CloseableHttpAsyncClient httpclient = HttpAsyncClients.custom() .setProxy(proxyHost) .useSystemProperties() .setRedirectStrategy(new LaxRedirectStrategy()) .build(); httpclient.start(); Future<HttpResponse> httpResponse = httpclient.execute(post, null); logger.trace("Invoking : {} ", post); HttpResponse response = httpResponse.get(); try { if (response.getStatusLine().getStatusCode() == StatusConstants.StatusCode.OK.getCode() || response.getStatusLine().getStatusCode() == StatusConstants.StatusCode.CREATED.getCode()) { GzipDecompressingEntity gzipEntity = new GzipDecompressingEntity(response.getEntity()); InputStream content = gzipEntity.getContent(); logger.trace("Response content = " + content ); ObjectMapper mapper = new ObjectMapper(); Page<Task> exportResult = mapper.reader() .forType(new TypeReference<Page<Task>>() {}) .readValue(content); logger.trace("Export result = " + exportResult.getItems()); return exportResult; } else { throw handleException(response.getStatusLine().getStatusCode(), response.getStatusLine().getReasonPhrase()); } } finally { httpclient.close(); } } private Exception handleException(int statusCode, String reasonPhrase) { logger.error("status code = {}; reason = {}", statusCode, reasonPhrase); if(StatusConstants.StatusCode.FORBIDDEN.getCode() == statusCode) return new ForbiddenException(reasonPhrase); else if(StatusConstants.StatusCode.UNAUTHORIZED.getCode() == statusCode) return new ForbiddenException(reasonPhrase); else if(StatusConstants.StatusCode.INTERNALSERVERERROR.getCode() == statusCode) return new InternalServerError(reasonPhrase); else if(StatusConstants.StatusCode.NOTFOUND.getCode() == statusCode) return new NotFoundException(reasonPhrase); else return new UnhandledException("Got status code : " + statusCode + "; reason : " + reasonPhrase); } /** * Get the progress of a job * @param jobId - job id of the job * @return {@link JobProgressResponse} - returns the job progress response * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public JobProgressResponse getJobProgress(String jobId) throws Exception { String url = "/api/job/" + jobId + "/status"; HttpGet httpGet = getHttpGet(url); JobProgressResponse getResponse = invoke(httpGet, JobProgressResponse.class); return getResponse; } /** * Get the progress of batch * @param batchId - id of the batch * @return {@link JobProgressResponse} - returns the job progress response * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public JobProgressResponse getJobProgressBatch(String batchId) throws Exception { String url = "/api/batch/" + batchId + "/job/status"; List<NameValuePair> parameters = new ArrayList<NameValuePair>(); parameters.add(new BasicNameValuePair("input_type", "batch")); HttpGet httpGet = getHttpGet(url, parameters); JobProgressResponse getResponse = invoke(httpGet, JobProgressResponse.class); return getResponse; } /** * Get the status of the batch * @param batchId - id of the batch * @return {@link BatchStatus} - returns the batch status * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public BatchStatus getBatchStatus(String batchId) throws Exception { String url = "/api/batch/" + batchId + "/status"; HttpGet httpGet = getHttpGet(url); BatchStatus getResponse = invoke(httpGet, BatchStatus.class); logger.trace("batch status = {}", getResponse); return getResponse; } /** * To check if a process is complete * @param batchId - id of the batch * @return {@link Boolean} - returns true or false depending on completion of process * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an internal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public Boolean isProcessComplete(String batchId) throws Exception { BatchStatus batchStatus = getBatchStatus(batchId); return batchStatus.getCompleted().equals(batchStatus.getTotal()); } /** * To check if the upload is complete or not * @param batchId - id of the batch * @return {@link Boolean} - returns true or false depending upon upload status * @throws io.taskmonk.clientexceptions.ForbiddenException if the access is unauthorized * @throws io.taskmonk.clientexceptions.NotFoundException if object not found * @throws io.taskmonk.clientexceptions.InternalServerError if an intfieldNamesernal server error occurs * @throws io.taskmonk.clientexceptions.UnhandledException if unhandled exception occurs * */ public Boolean isUploadComplete(String batchId) throws Exception { JobProgressResponse jobResponse = getJobProgressBatch(batchId); return jobResponse.isCompleted(); } /** * To delete a specific batch * @param batchId - id of the batch * @return {@link Boolean} - returns the deleted batchId * @throws Exception * */ public Boolean deleteBatch(String batchId) throws Exception { String url = "/api/batch/" + batchId + "/delete"; URIBuilder builder = new URIBuilder(httpHost.toString() + url); HttpDelete delete = new HttpDelete(builder.build()); delete.addHeader("Content-type", "application/json"); String id = invoke(delete, Id.class).id; if (id.equalsIgnoreCase(batchId)) { return true; } return false; } /** * Gets the history of batch state changes. * @param batchId : T * @return BatchHistory * @throws Exception */ public List getBatchHistory(String batchId) throws Exception { String url = "/api/batch/" + batchId + "/status/history"; URIBuilder builder = new URIBuilder(httpHost.toString() + url); HttpGet httpGet = new HttpGet(builder.build()); return invoke(httpGet, Page.class).getItems(); } public TaskMonkClient(String server, OAuthClientCredentials credentials) { this.credentials = credentials; this.httpHost = HttpHost.create(server); } public TaskMonkClient(String server, String proxy, OAuthClientCredentials credentials) { this.credentials = credentials; this.httpHost = HttpHost.create(server); if (proxy != null) { this.proxyHost = HttpHost.create(proxy); } } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/clientexceptions/ForbiddenException.java
package io.taskmonk.clientexceptions; public class ForbiddenException extends Exception { String message; public ForbiddenException(String message) { this.message= message; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/clientexceptions/InternalServerError.java
package io.taskmonk.clientexceptions; public class InternalServerError extends Exception { String message; public InternalServerError(String message) { this.message= message; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/clientexceptions/NotFoundException.java
package io.taskmonk.clientexceptions; public class NotFoundException extends Exception { String message; public NotFoundException(String message) { this.message= message; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/clientexceptions/StatusConstants.java
package io.taskmonk.clientexceptions; public class StatusConstants { public enum StatusCode { FORBIDDEN(403, "Forbidden"), UNAUTHORIZED(401, "Unauthorised Access"), INTERNALSERVERERROR(500, "An internal server error occured"), NOTFOUND(404, " Object not found"), UNHANDLED(00, "Unhandled exception"), OK(200,"Ok"), CREATED(201, "Created") ; private final int code; private final String display; StatusCode(int code, String display){ this.code = code; this.display = display; } public int getCode() { return this.code; } public String getDisplay() { return this.display; } } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/clientexceptions/UnhandledException.java
package io.taskmonk.clientexceptions; public class UnhandledException extends Exception { String message; public UnhandledException(String message) { this.message= message; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/ApiResponse.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; /** * Bean providing job response details */ @JsonIgnoreProperties(ignoreUnknown = true) public class ApiResponse { public Boolean status; public String message; public ApiResponse() { } public ApiResponse(Boolean status, String message) { this.status = status; this.message = message; } @Override public String toString() { return "status = " + status + "; message = " + message; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/BatchHistory.java
package io.taskmonk.entities; public class BatchHistory { /** * This is the id of the batch for which the history is to be fetched. */ String batch_id; /** * The last saved state for the batch. */ String state; /** * Time at which the state change was recorded. */ Long last_updated; /** * The userId of the user if they performed any explicit update. */ String user_id; /** * Any comment recorded at the time of updating the batch. */ String comment; BatchHistory batchHistory; public BatchHistory(){ } public BatchHistory(String batch_id, String state, Long last_updated, String user_id, String comment){ this.batch_id = batch_id; this.state = state; this.last_updated = last_updated; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/BatchOutput.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; @JsonIgnoreProperties(ignoreUnknown = true) public class BatchOutput { String fileUrl; String jobId; public BatchOutput() { } @JsonProperty("file_url") public String getFileUrl() { return fileUrl; } @JsonProperty("file_url") public void setFileUrl(String fileUrl) { this.fileUrl = fileUrl; } @JsonProperty("job_id") public String getJobId() { return jobId; } @JsonProperty("job_id") public void setJobId(String jobId) { this.jobId = jobId; } @Override public String toString() { return "file_url = " + fileUrl + "; job_id = " + jobId; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/BatchState.java
package io.taskmonk.entities; /** * * Returns a state for the batch. The supported state are: *<p></p> * "ACTIVE" - The batch is being worked on by the analysts *<p></p> * "PENDING" - Work has not started on the batch *<p></p> * "SCHEDULED" - A start time for the batch has been set and will be worked on after that date * <p></p> * "CANCELLED" - The execution of the batch has been cancelled by the customer * <p></p> * "INACTIVE" - The execution of the batch has been set to inactive * <p></p> * "DELETED" - The batch has been deleted and cannot be worked upon anymore * <p></p> * "ARCHIVED" - The batch has been archived and cannot be worked upon anymore * <p></p> * "COMPLETED" - Work on this batch has been completed */ public enum BatchState { PENDING, SCHEDULED, ACTIVE, CANCELLED, INACTIVE, DELETED, ARCHIVED, COMPLETED }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/BatchStatus.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; /** * Status of a batch */ @JsonIgnoreProperties(ignoreUnknown = true) public class BatchStatus { /** * Number of tasks in progress. */ Integer in_progress; /** * Number of tasks in the batch that have been completed */ Integer completed; /** * Number of tasks in the batch that were rejected due to missing mandatory fields */ Integer rejected; /** * The total number of valid tasks in the batch */ Integer total; /** * Pending tasks for QC */ Integer qc_pending; /** * {@link BatchState} */ BatchState state; /** * Task Completion Percentage */ Integer percentage_completed; public BatchStatus() { } public BatchStatus(Integer in_progress, Integer completed, Integer total, Integer qc_pending, Integer rejected, Integer percentage_completed) { this.completed = completed; this.total = total; this.in_progress = in_progress; this.qc_pending = qc_pending; this.percentage_completed = percentage_completed; this.rejected = rejected; } public Integer getCompleted() { return completed; } public void setCompleted(Integer completed) { this.completed = completed; } public Integer getTotal() { return total; } public void setTotal(Integer total) { this.total = total; } public BatchState getState() { return state; } public void setState(BatchState state) { this.state = state; } public Integer getIn_progress() {return in_progress;} public Integer getQc_pending() {return qc_pending;} public Integer getRejected() {return rejected;} public Integer getPercentage_completed() {return percentage_completed;} public void setPercentage_completed(Integer percentage_completed) { this.percentage_completed = percentage_completed; } @Override public String toString() { return "Total = " + total + "; Completed = " + completed + "; In Progress = " + in_progress + "; state = " + state + "; Complete Percentage = " + percentage_completed + ";"; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/Id.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class Id { public String id; public Id() { } public String getId() { return id; } public void setId(String id) { this.id = id; } @Override public String toString() { return "id = " + id; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/ImportUrl.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class ImportUrl { private String file_url; private String file_type; /** * set the import settings * @param fileUrl - path to url with the input file * @param fileType - file type - CSV or Excel */ public ImportUrl(String fileUrl, String fileType) { this.file_url = fileUrl; this.file_type = fileType; } public String getFile_url() { return file_url; } public void setFile_url(String file_url) { this.file_url = file_url; } public void setFile_type(String file_type) { this.file_type = file_type; } public String getFile_type() { return file_type; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/JobProgressResponse.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; /** * Bean providing job response details */ @JsonIgnoreProperties(ignoreUnknown = true) public class JobProgressResponse { public Integer completed; public Integer total; public Integer percentage; public Integer rejected; public JobProgressResponse() { } public JobProgressResponse(Integer completed, Integer total, Integer percentage, Integer rejected) { this.completed = completed; this.total = total; this.percentage = percentage; this.rejected = rejected; } @Override public String toString() { return "completed = " + completed + "; total = " + total + "; rejected = " + rejected + "; percentage = " + percentage; } public Boolean isCompleted() { return total.equals(completed); } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/NewBatchData.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * Class used for creating and editing batch properties */ @JsonIgnoreProperties(ignoreUnknown = true) public class NewBatchData { /** * The name of the batch. Mandatory when creating a new batch. */ String batch_name; /** * Optional priority for the batch. Higher priority batches are executed first. */ Short priority; /** * Any instruction that is displayed to the analysts as they are working on tasks in the batch */ String comments = ""; /** * A start time in the future for when work should start on the batch */ Date startTime = new Date(); /** * A set of notifications for milestones on the batch */ public List<Notification> notifications = new ArrayList<Notification>(); public NewBatchData(String batch_name) { this.batch_name = batch_name; } public NewBatchData() { } @JsonProperty("batch_name") public String getBatchName() { return batch_name; } @JsonProperty("batch_name") public NewBatchData setBatchName(String batch_name) { this.batch_name = batch_name; return this; } @JsonProperty("start_time") public Date getStartTime() { return startTime; } @JsonProperty("start_time") public NewBatchData setStartTime(Date startTime) { this.startTime = startTime; return this; } public Short getPriority() { return priority; } public NewBatchData setPriority(Integer priority) { this.priority = priority.shortValue(); return this; } public String getComments() { return comments; } public NewBatchData setComments(String comments) { this.comments = comments; return this; } public List<Notification> getNotifications() { return notifications; } public NewBatchData setNotifications(List<Notification> notifications) { this.notifications = notifications; return this; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/Notification.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.Map; @JsonIgnoreProperties(ignoreUnknown = true) public class Notification { String notificationType; Map<String, String> metaData; public String getNotificationType() { return notificationType; } public void setNotificationType(String notificationType) { this.notificationType = notificationType; } public Map<String, String> getMetaData() { return metaData; } public void setMetaData(Map<String, String> metaData) { this.metaData = metaData; } public Notification(String notificationType, Map<String, String> metaData) { this.notificationType = notificationType; this.metaData = metaData; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/Page.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; @JsonIgnoreProperties(ignoreUnknown = true) public class Page<T> { /** * The page number for the paginated result. The first page number will be 1 */ Integer page; /** * The page size that was set in the query. Can be used to check if more items are present by matching * against items size */ Integer pageSize; /** * The total number of items that matched against the query */ Integer total; /** * The list of items returned for the query */ List<T> items; @JsonProperty("page_number") public Integer getPageNumber() { return page; } @JsonProperty("page_number") public void setPageNumber(Integer pageNumber) { this.page = pageNumber; } @JsonProperty("page_size") public Integer getPageSize() { return pageSize; } @JsonProperty("page_size") public void setPageSize(Integer pageSize) { this.pageSize = pageSize; } public List<T> getItems() { return items; } public void setItems(List<T> items) { this.items = items; } public Integer getTotal() { return total; } public void setTotal(Integer total) { this.total = total; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/Task.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; /** * Holds the data for a single Task. This is used for both uploading data to Taskmonk and for * receiving updates from Taskmonk */ @JsonIgnoreProperties(ignoreUnknown = true) public class Task { static final Logger logger = LoggerFactory.getLogger(Task.class); /** * The batch id for the task. This is set only when retrieving tasks from multiple projects. This * need not be set when uploading tasks to Taskmonk */ public String batchId; /** * The external id is used for correlation when sending and receiving tasks. The same externalId will be set * when Taskmonk sends an update for the task back. This is optional. */ public String externalId; /** * The data for the task. The key would be the field name and the value would be the field value. When Taskmonk * sends back the updated task, this would contain the output field values also. */ public Map<String, String> data; /** * This flag indicates that the data is encoded. Taskmonk will use this to determine whether the data needs to be * decoded. */ public boolean isEncoded; public Task() { } public Task(String batchId, String externalId, Map<String, String> data, Boolean encode) { this.batchId = batchId; this.externalId = externalId; this.data = (encode) ? encodeData(data) : data; this.isEncoded = encode; } public Task(String batchId, String externalId, Map<String, String> data) { this(batchId, externalId, data, true); } public Task(String externalId, Map<String, String> data) { this.externalId = externalId; this.data = data; } @JsonProperty("batch_id") public String getBatchId() { return batchId; } @JsonProperty("batch_id") public void setBatchId(String batchId) { this.batchId = batchId; } @JsonProperty("external_id") public String getExternalId() { return externalId; } @JsonProperty("external_id") public void setExternalId(String externalId) { this.externalId = externalId; } public Map<String, String> getData() { return data; } public void setData(Map<String, String> data) { this.data = encodeData(data); } @Override public String toString() { return "Task: " + batchId + " : " + externalId; } private Map<String, String> encodeData(Map<String, String> data) { Map<String, String> escapedData = new HashMap<String, String>(); for (Map.Entry<String, String> entry: data.entrySet()) { try { escapedData.put(entry.getKey(), URLEncoder.encode(entry.getValue(), StandardCharsets.UTF_8.toString())); } catch (UnsupportedEncodingException e) { logger.error(entry.getValue(), e); } } return escapedData; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/TaskImportResponse.java
package io.taskmonk.entities; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; /** * The response object on importing tasks to a batch. Contains the job_id which should be used in a consequent * {@link io.taskmonk.client.TaskMonkClient#getJobProgress(String)} to monitor status of the upload */ @JsonIgnoreProperties(ignoreUnknown = true) public class TaskImportResponse { public String job_id; public String batch_id; // For backward compatibility public String batchId; @JsonProperty("batch_id") public String getBatchId() { return batch_id; } @JsonProperty("batch_id") public void setBatchId(String batch_id) { this.batch_id = batch_id; this.batchId = batch_id; } @JsonProperty("job_id") public String getJobId() { return job_id; } @JsonProperty("job_id") public void setJobId(String job_id) { this.job_id = job_id; } public TaskImportResponse() { } public TaskImportResponse(String job_id) { this.job_id = job_id; } public TaskImportResponse(String job_id, String batch_id) { this.job_id = job_id; this.batchId = batch_id; this.batch_id = batch_id; } @Override public String toString() { return "job_id = " + job_id + "; batchId = {}" + batchId + "; batch_id = " + batch_id; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/entities/TaskQueryParams.java
package io.taskmonk.entities; import org.apache.commons.lang3.StringUtils; import org.apache.http.NameValuePair; import org.apache.http.message.BasicNameValuePair; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; /** * Class used to specify the tasks that need to be retrieved */ public class TaskQueryParams { /** * The list of fields to be retrieved. If this is not set, all fields will be returned */ List<String> fields = new ArrayList<String>(); /** * Retrieve tasks that were completed after this time. If this is not set, all tasks will be returned */ Date taskCompleteStart; /** * Retrieve tasks that were completed before this time. If this is not set, all tasks will be returned */ Date taskCompleteEnd; /** * Retrieve tasks that match the state. Allowed values are: * <p></p> * "COMPLETED" * <p></p> * "PENDING" * <p></p> * If not set, all tasks are returned */ String taskState; /** * The number of tasks to be returned. If not set the default number of 500 tasks will be returned. */ Integer pageSize; /** * The page number of the tasks to be returned. If not set, page number 1 will be returned. This should * be set to 1 more than the page number returned in the task query result */ Integer pageNumber; String format = "dd-MM-yyyy"; SimpleDateFormat df = new SimpleDateFormat(format); public List<String> getFields() { return fields; } public TaskQueryParams setFields(List<String> fields_names) { this.fields = fields_names; return this; } public Date getTaskCompleteStart() { return taskCompleteStart; } public TaskQueryParams setTaskCompleteStart(Date taskCompleteStart) { this.taskCompleteStart = taskCompleteStart; return this; } public Date getTaskCompleteEnd() { return taskCompleteEnd; } public TaskQueryParams setTaskCompleteEnd(Date taskCompleteEnd) { this.taskCompleteEnd = taskCompleteEnd; return this; } public String getTaskState() { return taskState; } /** * Retrieve tasks that match the state. Allowed values are: * <p></p> * "COMPLETED" * <p></p> * "PENDING" * <p></p> * If not set, all tasks are returned */ public TaskQueryParams setTaskState(String taskState) { this.taskState = taskState; return this; } public Integer getPageSize() { return pageSize; } public TaskQueryParams setPageSize(Integer pageSize) { this.pageSize = pageSize; return this; } public Integer getPageNumber() { return pageNumber; } public TaskQueryParams setPageNumber(Integer pageNumber) { this.pageNumber = pageNumber; return this; } public List<NameValuePair> getParameters() { List<NameValuePair> result = new ArrayList<NameValuePair>(); if (fields != null && fields.size() > 0) { for (String field :fields) { result.add(new BasicNameValuePair("field_names[]", field)); } } if (pageSize != null) { result.add(new BasicNameValuePair("page_size", pageSize.toString())); } if (pageNumber != null) { result.add(new BasicNameValuePair("page_no", pageNumber.toString())); } if (taskCompleteStart != null) { result.add(new BasicNameValuePair("task_complete_start", df.format(taskCompleteStart))); } if (taskCompleteEnd != null) { result.add(new BasicNameValuePair("task_complete_end", df.format(taskCompleteEnd))); } if (taskState != null) { result.add(new BasicNameValuePair("task_state", taskState)); } return result; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/http/RedirectStrategy.java
package io.taskmonk.http; /* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ import org.apache.http.annotation.Contract; import org.apache.http.annotation.ThreadingBehavior; import org.apache.http.client.methods.*; import org.apache.http.impl.client.DefaultRedirectStrategy; @Contract(threading = ThreadingBehavior.IMMUTABLE) public class RedirectStrategy extends DefaultRedirectStrategy { /** * Lax {@link org.apache.http.client.RedirectStrategy} implementation * that automatically redirects all HEAD, GET, POST, and DELETE requests. * This strategy relaxes restrictions on automatic redirection of * POST methods imposed by the HTTP specification. * * @since 4.2 */ public static final io.taskmonk.http.RedirectStrategy INSTANCE = new io.taskmonk.http.RedirectStrategy(); /** * Redirectable methods. */ private static final String[] REDIRECT_METHODS = new String[] { HttpGet.METHOD_NAME, HttpPost.METHOD_NAME, HttpHead.METHOD_NAME, HttpDelete.METHOD_NAME, HttpPut.METHOD_NAME }; @Override protected boolean isRedirectable(final String method) { for (final String m: REDIRECT_METHODS) { if (m.equalsIgnoreCase(method)) { return true; } } return false; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/MessageAction.java
package io.taskmonk.streaming; /** * An indication of how the message should be handled in the queue * <p> * COMPLETE - The message processing is complete and can be removed from the queue *<p> * ABANDON - The message processing failed. The message remains in the queue and will be attempted again */ public enum MessageAction { COMPLETE, ABANDON }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/MessageListener.java
package io.taskmonk.streaming; import io.taskmonk.entities.BatchStatus; import io.taskmonk.entities.Task; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Interface for handling messages from Taskmonk */ public interface MessageListener { public static final Logger logger = LoggerFactory.getLogger(MessageListener.class); /** * Handle task update * @param task {@link Task} * @return {@link MessageAction} The action to take on handling message */ public MessageAction onTaskUpdate(Task task); /** * Handle batchStatus * @param batchStatus {@link BatchStatus} * @return {@link MessageAction} The action to take on handling message */ public MessageAction onBatchStatus(BatchStatus batchStatus); /** * Handle any generic message * @param message * @return {@link MessageAction} The action to take on handling message */ public MessageAction onGenericMessage(String message); }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/StreamListener.java
package io.taskmonk.streaming; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.microsoft.azure.servicebus.primitives.ServiceBusException; import io.taskmonk.entities.BatchStatus; import io.taskmonk.entities.Task; import io.taskmonk.streaming.azure.MessageHandler; import io.taskmonk.streaming.impl.MessageStreamListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; /** * Class to receive update messages from Taskmonk. The client should add a {@link MessageListener} to process each * of the messages. On completion of message processing, the handler should return a {@link MessageAction} which will * determine the action to be taken on the message. The semantics provide for Atleast Once processing and so the client * might receive duplicate messages. */ public class StreamListener { private static final Logger logger = LoggerFactory.getLogger(StreamListener.class); String queueName; String accessKey; MessageStreamListener messageStreamListener; /** * The queue name and access key are specific to a project and will be provided by Taskmonk * @param queueName * @param accessKey */ public StreamListener(String queueName, String accessKey) { this.queueName = queueName; this.accessKey = accessKey; messageStreamListener = new MessageStreamListener(queueName, accessKey); } /** * Add a listener for messages from Taskmonk * @param listener {@link MessageListener} An implementation to handle messages received * @return true on succesfully adding the listener * @throws ServiceBusException * @throws InterruptedException */ public Boolean addListener(MessageListener listener) throws ServiceBusException, InterruptedException { return messageStreamListener.addMessageHandler(new MessageHandler() { @Override public MessageAction handle(String message) throws IOException { ObjectMapper mapper = new ObjectMapper(); JsonNode rootNode = mapper.readValue(message.getBytes(), JsonNode.class); String messageType = rootNode.get("message_type").asText(); if (messageType.equalsIgnoreCase("task_update")) { logger.trace("Handling task_update message"); JsonNode taskNode = rootNode.get("task"); Task task = mapper.treeToValue(taskNode, Task.class); return listener.onTaskUpdate(task); } else if (messageType.equalsIgnoreCase("batch_status")) { logger.trace("Handling batch_status message"); JsonNode node = rootNode.get("batch"); BatchStatus batchStatus = mapper.treeToValue(node, BatchStatus.class); return listener.onBatchStatus(batchStatus); } else { logger.error("Unrecognised message {}", messageType); return listener.onGenericMessage(message); } } }); } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/StreamWriter.java
package io.taskmonk.streaming; import com.fasterxml.jackson.databind.ObjectMapper; import io.taskmonk.entities.Task; import io.taskmonk.streaming.impl.MessageStreamWriter; import io.taskmonk.streaming.impl.NewTasks; import java.util.List; import java.util.Map; import java.util.UUID; /** * Class to send messages upstream to TaskMonk */ public class StreamWriter { String queueName; String accessKey; MessageStreamWriter messageStreamWriter; public StreamWriter(String queueName, String accessKey) { this.queueName = queueName; this.accessKey = accessKey; messageStreamWriter = new MessageStreamWriter(queueName, accessKey); } /** * Send a list of new tasks to TaskMonk * @param projectId * @param batchId * @param tasks - Map of key value pairs for the tasks * @return A correlation id for the upload * @throws Exception */ public String send(String projectId, String batchId, List<Task> tasks) throws Exception { NewTasks newTasks = new NewTasks(); newTasks.project_id = projectId; newTasks.batch_id = batchId; newTasks.tasks = tasks; String messageId = UUID.randomUUID().toString(); String label = String.format("%s:%s", projectId, batchId); String content = new ObjectMapper().writeValueAsString(newTasks); messageStreamWriter.send(messageId, label, content); return messageId; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/azure/AzureQueueMessageHandler.java
package io.taskmonk.streaming.azure; import com.microsoft.azure.servicebus.*; import io.taskmonk.streaming.MessageAction; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.charset.StandardCharsets; import java.util.concurrent.CompletableFuture; public class AzureQueueMessageHandler implements IMessageHandler { private static final Logger logger = LoggerFactory.getLogger(AzureQueueMessageHandler.class); MessageHandler messageHandler; QueueClient queueClient; AzureQueueMessageHandler(QueueClient queueClient, MessageHandler messageHandler) { this.messageHandler = messageHandler; this.queueClient = queueClient; } @Override public CompletableFuture<Void> onMessageAsync(IMessage message) { logger.trace("Processing {}", message.getLockToken()); switch (message.getMessageBody().getBodyType()) { case BINARY: MessageAction result = null; try { result = messageHandler.handle( new String(message.getMessageBody().getBinaryData().get(0), StandardCharsets.UTF_8)); switch (result) { case ABANDON: logger.trace("Abandoning {}", message.getLockToken()); queueClient.abandon(message.getLockToken()); break; case COMPLETE: logger.trace("Completing {}", message.getLockToken()); queueClient.complete(message.getLockToken()); break; default: logger.trace("Invalid action {}", message.getLockToken()); queueClient.complete(message.getLockToken()); break; } } catch (Exception ex) { logger.error(message.getLockToken().toString(), ex); try { queueClient.deadLetter(message.getLockToken()); } catch (Exception e) { logger.error(message.getLockToken().toString(), e); } } break; default: logger.error("Unhandled message type : " + message.getMessageBody().getBodyType()); try { queueClient.deadLetter(message.getLockToken()); } catch (Exception e) { logger.error(message.getLockToken().toString(), e); } } return CompletableFuture.completedFuture(null); } @Override public void notifyException(Throwable exception, ExceptionPhase phase) { logger.error(phase.toString(), exception); } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/azure/MessageHandler.java
package io.taskmonk.streaming.azure; import io.taskmonk.streaming.MessageAction; import java.io.IOException; public interface MessageHandler { public MessageAction handle(String message) throws IOException; }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/azure/ServiceBusListener.java
package io.taskmonk.streaming.azure; import com.microsoft.azure.servicebus.IMessageHandler; import com.microsoft.azure.servicebus.MessageHandlerOptions; import com.microsoft.azure.servicebus.QueueClient; import com.microsoft.azure.servicebus.ReceiveMode; import com.microsoft.azure.servicebus.primitives.ConnectionStringBuilder; import com.microsoft.azure.servicebus.primitives.ServiceBusException; import java.time.Duration; public class ServiceBusListener { String queueName; String accessKey; public ServiceBusListener(String queueName, String accessKey) { this.queueName = queueName; this.accessKey = accessKey; } public Boolean addMessageHandler(MessageHandler messageHandler) throws ServiceBusException, InterruptedException { String accessKeyName = "Client"; String connectionString = String.format("Endpoint=sb://taskmonk.servicebus.windows.net/;SharedAccessKeyName=%s;SharedAccessKey=%s;EntityPath=%s", accessKeyName, accessKey, queueName); QueueClient receiveClient = new QueueClient(new ConnectionStringBuilder(connectionString, queueName), ReceiveMode.PEEKLOCK); IMessageHandler azureMessageHandler = new AzureQueueMessageHandler(receiveClient, messageHandler); receiveClient.registerMessageHandler(azureMessageHandler, new MessageHandlerOptions(1, false, Duration.ofMinutes(1))); return true; } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/azure/ServiceBusSendInterface.java
package io.taskmonk.integrations.azure; import com.microsoft.azure.servicebus.Message; import com.microsoft.azure.servicebus.QueueClient; import com.microsoft.azure.servicebus.ReceiveMode; import com.microsoft.azure.servicebus.primitives.ConnectionStringBuilder; import com.microsoft.azure.servicebus.primitives.ServiceBusException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.time.Duration; import java.util.concurrent.CompletableFuture; public class ServiceBusSendInterface { private static final Logger logger = LoggerFactory.getLogger(ServiceBusSendInterface.class); String queueName; String accessKey; String accessKeyName = "Client"; String connectionString; public ServiceBusSendInterface(String queueName, String accessKey) { this.queueName = queueName; this.accessKey = accessKey; connectionString = String.format("Endpoint=sb://taskmonk.servicebus.windows.net/;SharedAccessKeyName=%s;SharedAccessKey=%s;EntityPath=%s", accessKeyName, accessKey, queueName); } public CompletableFuture<Void> send(String messageId, String label, String content) throws ServiceBusException, InterruptedException { QueueClient sendClient = new QueueClient(new ConnectionStringBuilder(connectionString, queueName), ReceiveMode.PEEKLOCK); Message message = new Message(content); message.setContentType("application/json"); message.setLabel(label); message.setMessageId(messageId); message.setTimeToLive(Duration.ofDays(14)); logger.debug("\nMessage sending: Id = {}", message.getMessageId()); return sendClient.sendAsync(message); } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/impl/MessageStreamListener.java
package io.taskmonk.streaming.impl; import io.taskmonk.streaming.azure.ServiceBusListener; public class MessageStreamListener extends ServiceBusListener { public MessageStreamListener(String queueName, String accessKey) { super(queueName, accessKey); } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/impl/MessageStreamWriter.java
package io.taskmonk.streaming.impl; public class MessageStreamWriter extends io.taskmonk.integrations.azure.ServiceBusSendInterface { public MessageStreamWriter(String queueName, String accessKey) { super(queueName, accessKey); } }
0
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming
java-sources/ai/taskmonk/taskmonk-sdk/1.16.6/io/taskmonk/streaming/impl/NewTasks.java
package io.taskmonk.streaming.impl; import io.taskmonk.entities.Task; import java.util.List; import java.util.Map; public class NewTasks { public String project_id; public String batch_id; public List<Task> tasks; }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/TectonClient.java
package ai.tecton.client; import ai.tecton.client.exceptions.BadRequestException; import ai.tecton.client.exceptions.ForbiddenException; import ai.tecton.client.exceptions.GatewayTimeoutException; import ai.tecton.client.exceptions.InternalServerErrorException; import ai.tecton.client.exceptions.ResourceExhaustedException; import ai.tecton.client.exceptions.ResourceNotFoundException; import ai.tecton.client.exceptions.ServiceUnavailableException; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonServiceException; import ai.tecton.client.exceptions.UnauthorizedException; import ai.tecton.client.request.AbstractTectonRequest; import ai.tecton.client.request.GetFeatureServiceMetadataRequest; import ai.tecton.client.request.GetFeaturesBatchRequest; import ai.tecton.client.request.GetFeaturesRequest; import ai.tecton.client.response.GetFeatureServiceMetadataResponse; import ai.tecton.client.response.GetFeaturesBatchResponse; import ai.tecton.client.response.GetFeaturesResponse; import ai.tecton.client.transport.HttpResponse; import ai.tecton.client.transport.TectonHttpClient; import java.time.Duration; import java.util.List; import java.util.stream.Collectors; import okhttp3.OkHttpClient; /** * A client for interacting with the Tecton FeatureService API. The client provides several methods * that make HTTP requests to the corresponding API endpoint and returns the response as a Java * object. * * <p>Note: Each method throws TectonServiceException when it receives an error response from the * API and a TectonClientException when an error or exception is encountered by the client. The * message included in the exception will provide more information about the error. */ public class TectonClient { private final TectonHttpClient tectonHttpClient; /** * Constructor for a simple Tecton Client * * @param url The Tecton Base Url * @param apiKey API Key for authenticating with the FeatureService API. See <a * href="https://docs.tecton.ai/docs/reading-feature-data/reading-feature-data-for-inference/reading-online-features-for-inference-using-the-http-api#creating-an-api-key-to-authenticate-to-the-http-api">Authenticating * with an API key</a> for more information */ public TectonClient(String url, String apiKey) { this.tectonHttpClient = new TectonHttpClient(url, apiKey, new TectonClientOptions.Builder().build()); } /** * Constructor for a Tecton Client with custom configurations * * @param url The Tecton Base Url * @param apiKey API Key for authenticating with the FeatureService API. See <a * href="https://docs.tecton.ai/docs/reading-feature-data/reading-feature-data-for-inference/reading-online-features-for-inference-using-the-http-api#creating-an-api-key-to-authenticate-to-the-http-api">Authenticating * with an API key</a> for more information * @param tectonClientOptions A {@link TectonClientOptions} object with custom configurations */ public TectonClient(String url, String apiKey, TectonClientOptions tectonClientOptions) { this.tectonHttpClient = new TectonHttpClient(url, apiKey, tectonClientOptions); } /** * Constructor for a Tecton Client with a custom OkHttpClient * * @param url The Tecton Base Url * @param apiKey API Key for authenticating with the FeatureService API. See <a * href="https://docs.tecton.ai/docs/reading-feature-data/reading-feature-data-for-inference/reading-online-features-for-inference-using-the-http-api#creating-an-api-key-to-authenticate-to-the-http-api">Authenticating * with an API key</a> for more information * @param httpClient An OkHttpClient for making requests and receiving responses from the Feature * Service API. Please refer to <a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-ok-http-client/">OkHttp * Documentation</a> for recommendations on creating and maintaining an OkHttp Client in your * application. Tecton recommends configuring the <a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-connection-pool">ConnectionPool</a> * in the OkHttpClient for efficiently managing HTTP connections. If you intend to use the * {@link GetFeaturesBatchRequest} to send parallel requests to Tecton, please also configure * the <a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-dispatcher/max-requests-per-host">maxRequestsPerHost</a> * in the client's Dispatcher. */ public TectonClient(String url, String apiKey, OkHttpClient httpClient) { this.tectonHttpClient = new TectonHttpClient(url, apiKey, httpClient); } /** * Makes a request to the /get-features endpoint and returns the response in the form of a {@link * GetFeaturesResponse} object * * @param getFeaturesRequest A {@link GetFeaturesRequest} object with the request parameters * @return {@link GetFeaturesResponse} object representing the response from the HTTP API * @throws TectonClientException when the client is already closed or encounters an error while * building the request or parsing the response * @throws BadRequestException If the HTTP status code is 400 (Bad Request). * @throws UnauthorizedException If the HTTP status code is 401 (Unauthorized). * @throws ForbiddenException If the HTTP status code is 403 (Forbidden). * @throws ResourceNotFoundException If the HTTP status code is 404 (Resource Not Found). * @throws ResourceExhaustedException If the HTTP status code is 429 (Resource Exhausted). * @throws InternalServerErrorException If HTTP the status code is 500 (Internal Server Error). * @throws ServiceUnavailableException If HTTP the status code is 503 (Service Unavailable). * @throws GatewayTimeoutException If HTTP the status code is 504 (Gateway Timeout). * @throws TectonServiceException If Tecton returns an error response with other status codes */ public GetFeaturesResponse getFeatures(GetFeaturesRequest getFeaturesRequest) throws TectonClientException, TectonServiceException { HttpResponse httpResponse = getHttpResponse(getFeaturesRequest); return new GetFeaturesResponse( httpResponse.getResponseBody().get(), httpResponse.getRequestDuration()); } /** * Makes a request to the /metadata endpoint and returns the response in the form of a {@link * GetFeatureServiceMetadataResponse} object * * @param getFeatureServiceMetadataRequest A {@link GetFeatureServiceMetadataRequest} object with * the request parameters * @return {@link GetFeatureServiceMetadataResponse} object representing the response from the * HTTP API * @throws TectonClientException when the client is already closed or encounters an error while * building the request or parsing the response * @throws BadRequestException If the HTTP status code is 400 (Bad Request). * @throws UnauthorizedException If the HTTP status code is 401 (Unauthorized). * @throws ForbiddenException If the HTTP status code is 403 (Forbidden). * @throws ResourceNotFoundException If the HTTP status code is 404 (Resource Not Found). * @throws ResourceExhaustedException If the HTTP status code is 429 (Resource Exhausted). * @throws InternalServerErrorException If HTTP the status code is 500 (Internal Server Error). * @throws ServiceUnavailableException If HTTP the status code is 503 (Service Unavailable). * @throws GatewayTimeoutException If HTTP the status code is 504 (Gateway Timeout). * @throws TectonServiceException If Tecton returns an error response with other status codes */ public GetFeatureServiceMetadataResponse getFeatureServiceMetadata( GetFeatureServiceMetadataRequest getFeatureServiceMetadataRequest) throws TectonClientException, TectonServiceException { HttpResponse httpResponse = getHttpResponse(getFeatureServiceMetadataRequest); return new GetFeatureServiceMetadataResponse( httpResponse.getResponseBody().get(), httpResponse.getRequestDuration()); } /** * Makes a batch request to retrieve a list of feature vector and metadata for a given workspace * and feature service * * @param batchRequest The {@link GetFeaturesRequest} object with the request parameters * @return {@link GetFeaturesBatchResponse} object with the list of feature vector and metadata * (if requested) * @throws TectonClientException when the client is already closed or the client encounters an * error while building the request or parsing the response * @throws BadRequestException If the HTTP status code is 400 (Bad Request). * @throws UnauthorizedException If the HTTP status code is 401 (Unauthorized). * @throws ForbiddenException If the HTTP status code is 403 (Forbidden). * @throws ResourceNotFoundException If the HTTP status code is 404 (Resource Not Found). * @throws ResourceExhaustedException If the HTTP status code is 429 (Resource Exhausted). * @throws InternalServerErrorException If HTTP the status code is 500 (Internal Server Error). * @throws ServiceUnavailableException If HTTP the status code is 503 (Service Unavailable). * @throws GatewayTimeoutException If HTTP the status code is 504 (Gateway Timeout). * @throws TectonServiceException If Tecton returns an error response with other status codes */ public GetFeaturesBatchResponse getFeaturesBatch(GetFeaturesBatchRequest batchRequest) throws TectonClientException, TectonServiceException { // Serialize batch request into list of JSON request List<String> requestList = batchRequest.getRequestList().stream() .map(AbstractTectonRequest::requestToJson) .collect(Collectors.toList()); // Perform parallel batch call long start = System.currentTimeMillis(); List<HttpResponse> httpResponseList = tectonHttpClient.performParallelRequests( batchRequest.getEndpoint(), batchRequest.getMethod(), requestList, batchRequest.getTimeout()); long stop = System.currentTimeMillis(); Duration totalTime = Duration.ofMillis(stop - start); // Deserialize list of JSON responses into a GetFeaturesBatchResponse return new GetFeaturesBatchResponse( httpResponseList, totalTime, batchRequest.getMicroBatchSize()); } /** * Releases all resources (connection pool, threads) held by the Tecton Client. Once the client is * closed, all future calls with the client will fail. */ public void close() { if (!this.isClosed()) { this.tectonHttpClient.close(); } } /** * Returns true if the Tecton Client has been closed, false otherwise * * @return boolean */ public boolean isClosed() { return tectonHttpClient.isClosed(); } private HttpResponse getHttpResponse(AbstractTectonRequest tectonRequest) { // Perform request and get HttpResponse HttpResponse httpResponse = tectonHttpClient.performRequest( tectonRequest.getEndpoint(), tectonRequest.getMethod(), tectonRequest.requestToJson()); httpResponse.validateResponse(); return httpResponse; } /** * A Builder class for creating an instance of {@link TectonClient} object with specific * configurations */ public static class Builder { private String url; private String apiKey; private TectonClientOptions tectonClientOptions; private OkHttpClient httpClient; /** * Setter for url * * @param url The Tecton Base Url * @return this Builder */ public Builder url(String url) { this.url = url; return this; } /** * Setter for apiKey * * @param apiKey API Key for authenticating with the FeatureService API. See <a * href="https://docs.tecton.ai/docs/reading-feature-data/reading-feature-data-for-inference/reading-online-features-for-inference-using-the-http-api#creating-an-api-key-to-authenticate-to-the-http-api">Authenticating * with an API key</a> for more information * @return this Builder */ public Builder apiKey(String apiKey) { this.apiKey = apiKey; return this; } /** * Setter for tectonClientOptions * * @param tectonClientOptions A {@link TectonClientOptions} object with custom configurations * @return this Builder */ public Builder tectonClientOptions(TectonClientOptions tectonClientOptions) { this.tectonClientOptions = tectonClientOptions; return this; } /** * Setter for httpClient * * @param httpClient An OkHttpClient for making requests and receiving responses from the * Feature Service API. Please refer to <a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-ok-http-client/">OkHttp * Documentation</a> for recommendations on creating and maintaining an OkHttp Client in * your application. Tecton recommends configuring the <a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-connection-pool">ConnectionPool</a> * in the OkHttpClient for efficiently managing HTTP connections. If you intend to use the * {@link GetFeaturesBatchRequest} to send parallel requests to Tecton, please also * configure the <a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-dispatcher/max-requests-per-host">maxRequestsPerHost</a> * in the client's Dispatcher. * @return this Builder */ public Builder httpClient(OkHttpClient httpClient) { this.httpClient = httpClient; return this; } /** * Build a {@link TectonClient} object from the Builder * * @return {@link TectonClient} */ public TectonClient build() { if (this.httpClient != null) { return new TectonClient(url, apiKey, httpClient); } else if (this.tectonClientOptions != null) { return new TectonClient(url, apiKey, tectonClientOptions); } return new TectonClient(url, apiKey); } } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/TectonClientOptions.java
package ai.tecton.client; import java.time.Duration; /** * A class that provides custom configuration options for the underlying Http Client. The Client * currently supports customizing the following configurations: * * <ul> * <li><a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-ok-http-client/-builder/read-timeout/">Read * Timeout</a> - Default = 5s * <li><a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-ok-http-client/-builder/connect-timeout/">Connect * Timeout</a> - Default = 5s * <li><a href="https://square.github.io/okhttp/3.x/okhttp/okhttp3/ConnectionPool.html/">Max Idle * Connections</a> - Default = 5 * <li><a href="https://square.github.io/okhttp/3.x/okhttp/okhttp3/ConnectionPool.html/">Keep * Alive Duration</a> - Default = 5mins * <li><a * href="https://square.github.io/okhttp/4.x/okhttp/okhttp3/-dispatcher/max-requests-per-host//">Max * Parallel Requests</a> - Default = 5 * </ul> */ public class TectonClientOptions { private static final Duration DEFAULT_READ_TIMEOUT = Duration.ofSeconds(2); private static final Duration DEFAULT_CONNECT_TIMEOUT = Duration.ofSeconds(2); private static final int DEFAULT_MAX_IDLE_CONNECTIONS = 5; private static final Duration DEFAULT_KEEPALIVE_DURATION = Duration.ofMinutes(5); private static final int DEFAULT_MAX_PARALLEL_REQUESTS = 5; private final Duration readTimeout; private final Duration connectTimeout; private final int maxIdleConnections; private final Duration keepAliveDuration; private final int maxParallelRequests; /** * Constructor that instantiates a TectonClientOptions with default configurations * * <p>Read Timeout = 5 seconds, Connect Timeout = 5 seconds, Max Idle Connections = 5, Keep Alive * Duration = 5 minutes, Max Parallel Requests = 5 */ public TectonClientOptions() { this.readTimeout = DEFAULT_READ_TIMEOUT; this.connectTimeout = DEFAULT_CONNECT_TIMEOUT; this.maxIdleConnections = DEFAULT_MAX_IDLE_CONNECTIONS; this.keepAliveDuration = DEFAULT_KEEPALIVE_DURATION; this.maxParallelRequests = DEFAULT_MAX_PARALLEL_REQUESTS; } /** Constructor that configures the TectonClientOptions with specified configurations */ public TectonClientOptions( Duration readTimeout, Duration connectTimeout, int maxIdleConnections, Duration keepAliveDuration, int maxParallelRequests) { this.readTimeout = readTimeout; this.connectTimeout = connectTimeout; this.maxIdleConnections = maxIdleConnections; this.keepAliveDuration = keepAliveDuration; this.maxParallelRequests = maxParallelRequests; } /** * Returns the connectTimeout configuration for the client * * @return connectTimeout represented as a {@link java.time.Duration} */ public Duration getConnectTimeout() { return connectTimeout; } /** * Returns the keepAliveDuration configuration for the client's ConnectionPool * * @return keepAliveDuration represented as a {@link java.time.Duration} */ public Duration getKeepAliveDuration() { return keepAliveDuration; } /** * Returns the readTimeout configuration for the client * * @return readTimeout represented as a {@link java.time.Duration} */ public Duration getReadTimeout() { return readTimeout; } /** * Returns the maxIdleConnections configuration for the client's ConnectionPool * * @return maxIdleConnections */ public int getMaxIdleConnections() { return maxIdleConnections; } /** * Returns the maxParallelRequests configuration for the client * * @return maxParallelRequests */ public int getMaxParallelRequests() { return maxParallelRequests; } /** * A Builder class for creating an instance of {@link TectonClientOptions} object with specific * configurations */ public static class Builder { private Duration readTimeout = DEFAULT_READ_TIMEOUT; private Duration connectTimeout = DEFAULT_CONNECT_TIMEOUT; private int maxIdleConnections = DEFAULT_MAX_IDLE_CONNECTIONS; private Duration keepAliveDuration = DEFAULT_KEEPALIVE_DURATION; private int maxParallelRequests = DEFAULT_MAX_PARALLEL_REQUESTS; /** * Setter for the readTimeout value for new connections. A value of 0 means no timeout, * otherwise values must be between 1 and Integer.MAX_VALUE when converted to milliseconds * * @param readTimeout readTimeout as a {@link java.time.Duration} * @return this Builder */ public Builder readTimeout(Duration readTimeout) { this.readTimeout = readTimeout; return this; } /** * Setter for the connectTimeout value for new connections. A value of 0 means no timeout, * otherwise values must be between 1 and Integer.MAX_VALUE when converted to milliseconds * * @param connectTimeout connectTimeout as a {@link java.time.Duration} * @return this Builder */ public Builder connectTimeout(Duration connectTimeout) { this.connectTimeout = connectTimeout; return this; } /** * Setter for the maximum number of idle connections to keep in the pool. If not set, the client * will use a default value of 5 * * @param maxIdleConnections int value, must be between 1 and Integer.MAX_VALUE * @return this builder */ public Builder maxIdleConnections(int maxIdleConnections) { this.maxIdleConnections = maxIdleConnections; return this; } /** * Setter for the time to keep an idle connection alive in the pool before closing it * * @param keepAliveDuration as a {@link java.time.Duration} * @return this Builder */ public Builder keepAliveDuration(Duration keepAliveDuration) { this.keepAliveDuration = keepAliveDuration; return this; } /** * Setter for the maximum number of requests to execute concurrently. Above this requests queue * in memory, waiting for the running calls to complete. Default value is 5 * * @param maxParallelRequests int value, must be between 1 and Integer.MAX_VALUE * @return this Builder */ public Builder maxParallelRequests(int maxParallelRequests) { this.maxParallelRequests = maxParallelRequests; return this; } /** * Build a {@link TectonClientOptions} object from the Builder * * @return {@link TectonClientOptions} */ public TectonClientOptions build() { return new TectonClientOptions( this.readTimeout, this.connectTimeout, this.maxIdleConnections, this.keepAliveDuration, this.maxParallelRequests); } } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/BadRequestException.java
package ai.tecton.client.exceptions; /** * An exception class representing a client error caused by a bad request. This exception is * typically thrown when the client's request is malformed or contains invalid data, resulting in a * "400 Bad Request" HTTP response status. It extends the {@link TectonClientException} class. */ public class BadRequestException extends TectonClientException { public BadRequestException(final String errorMessage, final int statusCode) { super("Bad Request: " + errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/ForbiddenException.java
package ai.tecton.client.exceptions; /** * An exception class representing a client error caused by forbidden access. This exception is * typically thrown when the API Key in the request is authenticated, but the Service Account * associated with the API Key is not authorized to access the workspace, resulting in a "403 * Forbidden" HTTP response status. It extends the {@link TectonClientException} class. */ public class ForbiddenException extends TectonClientException { public ForbiddenException(final String errorMessage, final int statusCode) { super(errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/GatewayTimeoutException.java
package ai.tecton.client.exceptions; /** * An exception class representing a client error caused by a gateway timeout. This exception is * typically thrown when a server acting as a gateway or proxy did not receive a timely response * from the Feature Server, resulting in a "504 Gateway Timeout" HTTP response status. It extends * the {@link TectonServiceException} class. */ public class GatewayTimeoutException extends TectonServiceException { public GatewayTimeoutException(final String errorMessage, final int statusCode) { super(errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/HttpStatusExceptionFactory.java
package ai.tecton.client.exceptions; import java.util.Optional; /** An exception factory that maps HTTP Status code to a custom Exception */ public class HttpStatusExceptionFactory { public static Optional<TectonException> createException(int statusCode, String errorMessage) { switch (statusCode) { case 400: return Optional.of(new BadRequestException(errorMessage, statusCode)); case 401: return Optional.of(new UnauthorizedException(errorMessage, statusCode)); case 403: return Optional.of(new ForbiddenException(errorMessage, statusCode)); case 404: return Optional.of(new ResourceNotFoundException(errorMessage, statusCode)); case 429: return Optional.of(new ResourceExhaustedException(errorMessage, statusCode)); case 500: return Optional.of(new InternalServerErrorException(errorMessage, statusCode)); case 503: return Optional.of(new ServiceUnavailableException(errorMessage, statusCode)); case 504: return Optional.of(new GatewayTimeoutException(errorMessage, statusCode)); default: return Optional.empty(); } } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/InternalServerErrorException.java
package ai.tecton.client.exceptions; /** * An exception class representing a server error caused by an internal server failure. This * exception is typically thrown when an unexpected error occurs on the server, resulting in a "500 * Internal Server Error" HTTP response status. It extends the {@link TectonServiceException} class. */ public class InternalServerErrorException extends TectonServiceException { public InternalServerErrorException(final String errorMessage, final int statusCode) { super(errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/InvalidRequestParameterException.java
package ai.tecton.client.exceptions; /** * An exception class representing a client error caused by a misconfigured request, such as missing * workspace name, missing feature service name etc. It extends the {@link TectonClientException} * class. */ public class InvalidRequestParameterException extends TectonClientException { public InvalidRequestParameterException(final String errorMessage) { super(errorMessage); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/ResourceExhaustedException.java
package ai.tecton.client.exceptions; /** * An exception class representing a client error caused by resource exhaustion. This exception * indicates that request rate exceeds the concurrent request limit set for your deployment, * resulting in a "429 Too Many Requests" HTTP response status. It extends the {@link * TectonClientException} class. */ public class ResourceExhaustedException extends TectonClientException { public ResourceExhaustedException(final String errorMessage, final int statusCode) { super(errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/ResourceNotFoundException.java
package ai.tecton.client.exceptions; /** * An exception class representing a client error caused by a resource not being found. This * exception is typically thrown when the request references a workspace, feature service or other * resources that do not exist in Tecton, resulting in a "404 Not Found" HTTP response status. It * extends the {@link TectonClientException} class. */ public class ResourceNotFoundException extends TectonClientException { public ResourceNotFoundException(final String errorMessage, final int statusCode) { super("Not Found: " + errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/ServiceUnavailableException.java
package ai.tecton.client.exceptions; /** * An exception class representing a server error caused by a temporarily unavailable service. This * exception is typically thrown when the Tecton is temporarily unable to handle the client's * request due to being overloaded or undergoing maintenance, resulting in a "503 Service * Unavailable" HTTP response status. It extends the {@link TectonServiceException} class. */ public class ServiceUnavailableException extends TectonServiceException { public ServiceUnavailableException(final String errorMessage, final int statusCode) { super(errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/TectonClientException.java
package ai.tecton.client.exceptions; /** * Exception class that is used to represent various client side errors, such as bad request * parameters, unauthorized requests etc. */ public class TectonClientException extends TectonException { public TectonClientException(String errorMessage) { super(errorMessage); } public TectonClientException(String errorMessage, int statusCode) { super(errorMessage, statusCode); } public TectonClientException(String message, Throwable t) { super(message, t); } public TectonClientException(Throwable t) { super(t); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/TectonErrorMessage.java
package ai.tecton.client.exceptions; /** * Class that declares all the different error messages included in the {@link * TectonClientException} */ public class TectonErrorMessage { public static final String INVALID_KEY = "API Key cannot be empty"; public static final String INVALID_URL = "Cannot connect to Tecton because the URL is invalid"; public static final String INVALID_KEY_VALUE = "Key cannot be null or empty."; public static final String CALL_FAILURE = "Unable to perform call. %s"; public static final String ERROR_RESPONSE = "Received Error Response from Tecton with code %s and error message: %s"; public static final String INVALID_WORKSPACENAME = "Workspace Name cannot be null or empty"; public static final String INVALID_FEATURESERVICENAME = "FeatureService Name cannot be null or empty"; public static final String EMPTY_REQUEST_MAPS = "Both Join Key map and Request Context Map cannot be empty"; public static final String INVALID_GET_FEATURE_REQUEST = "The parameters passed to the GetFeatureRequest are invalid. %s"; public static final String INVALID_GET_FEATURE_BATCH_REQUEST = "The parameters passed to the GetFeaturesBatchRequest are invalid. %s"; public static final String INVALID_GET_SERVICE_METADATA_REQUEST = "The parameters passed to the GetFeatureServiceMetadataRequest are invalid. %s"; public static final String INVALID_RESPONSE_FORMAT = "Unable to parse JSON response from Tecton"; public static final String EMPTY_RESPONSE = "Received empty response body from Tecton"; public static final String EMPTY_FEATURE_VECTOR = "Received empty feature vector from Tecton"; public static final String MISSING_EXPECTED_METADATA = "Required metadata %s is missing in the response"; public static final String UNKNOWN_DATA_TYPE = "Unknown Data Type %s in response"; public static final String INVALID_DATA_TYPE = "Unable to cast response for field %s to expected type %s (value: %s)"; public static final String MISMATCHED_TYPE = "Invalid method used to access value of type %s"; public static final String UNSUPPORTED_LIST_DATA_TYPE = "Unsupported data type detected for array feature values"; public static final String UNKNOWN_DATETIME_FORMAT = "Unable to parse effectiveTime in the response metadata"; public static final String INVALID_MICRO_BATCH_SIZE = "The microBatchSize is out of bounds and should be in the range [ %s , %s ]"; public static final String INVALID_REQUEST_DATA_LIST = "The list of GetFeaturesRequestData objects cannot be null or empty"; }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/TectonException.java
package ai.tecton.client.exceptions; import java.util.Optional; /** Base class that is used to represent various errors encountered by the Java client. */ public class TectonException extends RuntimeException { int statusCode; public TectonException(String errorMessage) { super(errorMessage); } public TectonException(String errorMessage, int statusCode) { super(errorMessage); this.statusCode = statusCode; } public TectonException(String message, Throwable t) { super(message, t); } public TectonException(Throwable t) { super(t); } /** * Returns the HTTP Status Code associated with the exception * * @return HTTP Status Code if present, Optional.empty() otherwise */ public Optional<Integer> getStatusCode() { return Optional.of(statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/TectonServiceException.java
package ai.tecton.client.exceptions; /** * Exception class that is used to represent server side errors. This includes Internal Server * Errors, Gateway Timeouts etc */ public class TectonServiceException extends TectonException { public TectonServiceException(String errorMessage) { super(errorMessage); } public TectonServiceException(String errorMessage, int statusCode) { super(errorMessage, statusCode); } public TectonServiceException(String message, Throwable t) { super(message, t); } public TectonServiceException(Throwable t) { super(t); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/UnauthorizedException.java
package ai.tecton.client.exceptions; /** * An exception class representing a client error caused by unauthorized access. This exception is * typically thrown when the API Key in the request is missing or invalid, resulting in a "401 * Unauthorized" HTTP response status. It extends the {@link TectonClientException} class. */ public class UnauthorizedException extends TectonClientException { public UnauthorizedException(final String errorMessage, final int statusCode) { super("Unauthorized: " + errorMessage, statusCode); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/exceptions/package-info.java
/** This package contains exception classes with error messages for Tecton Client */ package ai.tecton.client.exceptions;
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/FeatureServiceMetadata.java
package ai.tecton.client.model; import ai.tecton.client.request.GetFeaturesRequest; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.function.Function; import java.util.stream.Collectors; /** * Class that encapsulates metadata for a FeatureService, including the schema for join keys and * request context in GetFeaturesRequestData, as well as the in-order output schema of feature * values in GetFeaturesResponse. */ public class FeatureServiceMetadata { private final List<NameAndType> inputJoinKeys; private final List<NameAndType> inputRequestContextKeys; private final List<NameAndType> featureValues; public FeatureServiceMetadata( List<NameAndType> inputJoinKeys, List<NameAndType> inputRequestContextKeys, List<NameAndType> featureValues) { this.inputJoinKeys = inputJoinKeys; this.inputRequestContextKeys = inputRequestContextKeys; this.featureValues = featureValues; } /** * Get the input join keys that are expected to be passed in the joinKeyMap parameter for a {@link * GetFeaturesRequest} for the feature service * * @return Returns a {@link java.util.List} of {@link NameAndType} representing the input join * keys */ public List<NameAndType> getInputJoinKeys() { return this.inputJoinKeys; } /** * Get the request context keys that are expected to be passed in the requestContextMap parameter * for a {@link GetFeaturesRequest} for the feature service * * @return Returns a {@link java.util.List} of {@link NameAndType} representing the input request * context keys */ public List<NameAndType> getInputRequestContextKeys() { return this.inputRequestContextKeys; } /** * Get metadata for feature values to be returned in the GetFeaturesResponse. * * <p>The order of returned features will match the order returned by GetFeaturesResponse * * @return Returns a {@link java.util.List} of {@link NameAndType} representing the feature * metadata */ public List<NameAndType> getFeatureValues() { return this.featureValues; } /** Returns the input join keys as a {@link java.util.Map} with the join key name as the key */ public Map<String, NameAndType> getInputJoinKeysAsMap() { return this.inputJoinKeys.stream() .collect(Collectors.toMap(NameAndType::getName, Function.identity())); } /** * Returns the request context keys as a {@link java.util.Map} with the request context name as * the key */ public Map<String, NameAndType> getInputRequestContextKeysAsMap() { return this.inputRequestContextKeys.stream() .collect(Collectors.toMap(NameAndType::getName, Function.identity())); } /** * Returns the feature metadata as a {@link java.util.Map} with the (featureNamespace.featureName) * as the key */ public Map<String, NameAndType> getFeatureValuesAsMap() { return this.featureValues.stream() .collect(Collectors.toMap(NameAndType::getName, Function.identity())); } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FeatureServiceMetadata that = (FeatureServiceMetadata) o; return Objects.equals(inputJoinKeys, that.inputJoinKeys) && Objects.equals(inputRequestContextKeys, that.inputRequestContextKeys) && Objects.equals(featureValues, that.featureValues); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(inputJoinKeys, inputRequestContextKeys, featureValues); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/FeatureStatus.java
package ai.tecton.client.model; import java.util.Arrays; import java.util.Optional; import org.apache.commons.lang3.StringUtils; /** * Enum that represents the different feature statuses that can be returned when requesting feature * values. */ public enum FeatureStatus { /** Feature value is present and returned with no errors. */ PRESENT("PRESENT"), /** * Either the join keys requested are missing in the online store or the feature value is outside * ttl. */ MISSING_DATA("MISSING_DATA"), /** Unable to infer feature status. */ CACHED_PRESENT("CACHED_PRESENT"), CACHED_UNKNOWN("CACHED_UNKNOWN"), CACHED_MISSING_DATA("CACHED_MISSING_DATA"), UNKNOWN("UNKNOWN"); final String status; FeatureStatus(String status) { this.status = status; } String getStatus() { return this.status; } /** * Returns the Status that matches the String representation passed as a parameter * * @param name The String representation of the Status * @return Optional&lt;{@link ValueType}&gt; if a match is found, Optional.empty() otherwise */ public static Optional<FeatureStatus> fromString(String name) { // Map string to the corresponding ValueType enum return Arrays.stream(FeatureStatus.values()) .filter(val -> StringUtils.equalsIgnoreCase(val.getStatus(), name)) .findAny(); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/FeatureValue.java
package ai.tecton.client.model; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import java.time.Instant; import java.time.OffsetDateTime; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import org.apache.commons.lang3.StringUtils; /** * Class that represents each feature value in the feature vector returned in the * GetFeaturesResponse */ public class FeatureValue { private final String featureNamespace; private final String featureName; private Instant effectiveTime; private final Value value; private final Optional<FeatureStatus> featureStatus; private final String featureDescription; private final Map<String, String> featureTags; public FeatureValue( Object featureObject, String name, ValueType valueType, Optional<ValueType> elementValueType, String effectiveTime, Optional<FeatureStatus> featureStatus, String featureDescription, Map<String, String> featureTags) { // Split name into feature namespace and feature name String[] split = StringUtils.split(name, "."); featureNamespace = split[0]; featureName = split[1]; this.featureStatus = featureStatus; this.featureDescription = featureDescription; this.featureTags = featureTags; // Parse effective_time if present try { if (StringUtils.isNotEmpty(effectiveTime)) { this.effectiveTime = OffsetDateTime.parse(effectiveTime).toInstant(); } } catch (Exception e) { throw new TectonClientException(TectonErrorMessage.UNKNOWN_DATETIME_FORMAT); } try { // Create Value using valueType switch (valueType) { case ARRAY: this.value = new Value(featureObject, valueType, elementValueType.get()); break; case STRING: case INT64: case BOOLEAN: case FLOAT32: case FLOAT64: default: this.value = new Value(featureObject, valueType); } } catch (Exception e) { throw new TectonClientException( String.format( TectonErrorMessage.INVALID_DATA_TYPE, name, valueType.getName(), featureObject)); } } /** * Returns the ValueType representing the Tecton data_type for the feature value. Currently * supported types are ARRAY, STRING, INT64, BOOLEAN and FLOAT64 * * @return {@link ValueType} of the feature value */ public ValueType getValueType() { return value.valueType; } /** * Returns the individual array element type if the feature valueType is ARRAY. * * @return {@link ValueType} of the feature values in the array wrapped in {@link * java.util.Optional} if the getValueType() is ARRAY, Optional.empty() otherwise */ public Optional<ValueType> getListElementType() { return Optional.ofNullable(this.value.listValue.listElementType); } /** * Returns the feature status of the feature value. PRESENT if the feature value is retrieved and * present in the online store or MISSING_DATA if the feature value is missing or outside TTL * * @return Optional&lt;String&gt; of the feature value statuses {@link java.util.Optional}. */ public Optional<FeatureStatus> getFeatureStatus() { return this.featureStatus; } /** * Returns the description of the feature * * @return String */ public String getFeatureDescription() { return this.featureDescription; } /** Returns the tags of the feature */ public Map<String, String> getFeatureTags() { return this.featureTags; } /** * Returns the effective serving time for this feature. This is the most recent time that's * aligned to the interval for which a full aggregation is available for this feature. Note: Only * present if MetadataOption.EFFECTIVE_TIME is included in the GetFeaturesRequest * * @return Optional&lt;Instant&gt; representing the effectiveTime if present, Optional.empty() * otherwise */ public Optional<Instant> getEffectiveTime() { return Optional.ofNullable(effectiveTime); } /** Returns the feature service name */ public String getFeatureName() { return featureName; } /** Returns the feature service namespace */ public String getFeatureNamespace() { return featureNamespace; } class Value { private final ValueType valueType; private String stringValue; private Long int64Value; private Boolean booleanValue; private Double float64Value; private ListDataType listValue; @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Value value = (Value) o; return valueType == value.valueType && Objects.equals(stringValue, value.stringValue) && Objects.equals(int64Value, value.int64Value) && Objects.equals(booleanValue, value.booleanValue) && Objects.equals(float64Value, value.float64Value) && Objects.equals(listValue, value.listValue); } @Override public int hashCode() { return Objects.hash( valueType, stringValue, int64Value, booleanValue, float64Value, listValue); } // Primitive types // Float32 is currently not a supported type for feature values in Tecton. Refer here for all // supported types : // https://docs.tecton.ai/docs/faqs/creating_and_managing_features/#what-data-types-are-supported-for-feature-values public Value(Object featureObject, ValueType valueType) { this.valueType = valueType; switch (valueType) { case BOOLEAN: this.booleanValue = (Boolean) featureObject; break; case STRING: this.stringValue = (String) featureObject; break; case INT64: // Tecton represents all Int64 feature values as JSON strings in the response. String stringValue = (String) featureObject; if (stringValue != null) { this.int64Value = Long.parseLong(stringValue); } break; case FLOAT64: // Tecton also represents all double feature values as JSON numbers in the response. if (featureObject instanceof String) { String doubleString = (String) featureObject; if (doubleString.equals("null") || doubleString.equals("NaN") || doubleString.equals("Infinity") || doubleString.equals("-Infinity")) { this.float64Value = null; } else { this.float64Value = Double.valueOf(doubleString); } } else { this.float64Value = (Double) featureObject; } break; default: throw new TectonClientException( String.format(TectonErrorMessage.UNKNOWN_DATA_TYPE, valueType.getName())); } } // Array type Value(Object featureObject, ValueType valueType, ValueType listElementType) { this.valueType = valueType; this.listValue = new ListDataType(listElementType, featureObject); } } /** * A Feature Value of type String * * @return feature value cast to java.lang.String * @throws TectonClientException if the method is called on a value whose ValueType is not STRING */ public String stringValue() throws TectonClientException { validateValueType(ValueType.STRING); return this.value.stringValue; } /** * A Feature Value of type int64 (Long) * * @return feature value cast to java.lang.Long * @throws TectonClientException if the method is called on a value whose ValueType is not INT64 */ public Long int64value() throws TectonClientException { validateValueType(ValueType.INT64); return this.value.int64Value; } /** * A Feature Value of type Boolean * * @return feature value cast to java.lang.Boolean * @throws TectonClientException if the method is called on a value whose ValueType is not BOOLEAN */ public Boolean booleanValue() throws TectonClientException { validateValueType(ValueType.BOOLEAN); return this.value.booleanValue; } /** * A Feature Value of type Float64 (Double) * * @return feature value cast to java.lang.Double * @throws TectonClientException if the method is called on a value whose ValueType is not FLOAT64 */ public Double float64Value() throws TectonClientException { validateValueType(ValueType.FLOAT64); return this.value.float64Value; } /** * A Feature Value of type ARRAY with FLOAT64 values * * @return feature value cast to List&lt;Double&gt; * @throws TectonClientException if the method is called on a value whose valueType is not ARRAY * or listElementType is not FLOAT64 */ public List<Double> float64ArrayValue() throws TectonClientException { validateValueType(ValueType.ARRAY, ValueType.FLOAT64); return this.value.listValue.float64List; } /** * A Feature Value of type ARRAY with FLOAT32 values * * @return feature value cast to List&lt;Float&gt; * @throws TectonClientException if the method is called on a value whose valueType is not ARRAY * or listElementType is not FLOAT32 */ public List<Float> float32ArrayValue() throws TectonClientException { validateValueType(ValueType.ARRAY, ValueType.FLOAT32); return this.value.listValue.float32List; } /** * A Feature Value of type ARRAY with INT64 values * * @return feature value cast to List&lt;Long&gt; * @throws TectonClientException if the method is called on a value whose valueType is not ARRAY * or listElementType is not INT64 */ public List<Long> int64ArrayValue() throws TectonClientException { validateValueType(ValueType.ARRAY, ValueType.INT64); return this.value.listValue.int64List; } /** * A Feature Value of type ARRAY with String values * * @return feature value cast to List&lt;String&gt; * @throws TectonClientException if the method is called on a value whose valueType is not ARRAY * or listElementType is not STRING */ public List<String> stringArrayValue() throws TectonClientException { validateValueType(ValueType.ARRAY, ValueType.STRING); return this.value.listValue.stringList; } private void validateValueType(ValueType valueType) { if (this.value.valueType != valueType) { throw new TectonClientException( String.format(TectonErrorMessage.MISMATCHED_TYPE, value.valueType.getName())); } } private void validateValueType(ValueType valueType, ValueType elementType) { validateValueType(valueType); if (this.value.listValue.listElementType != elementType) { throw new TectonClientException( String.format( TectonErrorMessage.MISMATCHED_TYPE, value.listValue.listElementType.getName())); } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FeatureValue that = (FeatureValue) o; return Objects.equals(featureNamespace, that.featureNamespace) && Objects.equals(featureName, that.featureName) && Objects.equals(effectiveTime, that.effectiveTime) && Objects.equals(value, that.value) && Objects.equals(featureStatus, that.featureStatus); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(featureNamespace, featureName, effectiveTime, value, featureStatus); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/ListDataType.java
package ai.tecton.client.model; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import java.util.ArrayList; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; @SuppressWarnings("unchecked") class ListDataType { List<String> stringList; List<Float> float32List; List<Double> float64List; List<Long> int64List; ValueType listElementType; ListDataType(ValueType listElementType, Object featureObject) { List<Object> featureObjectList = (ArrayList<Object>) featureObject; this.listElementType = listElementType; // Parse List of Object to List of corresponding Java type switch (listElementType) { case INT64: if (featureObjectList != null) { this.int64List = featureObjectList.stream() .map( obj -> { String stringValue = (String) obj; return (stringValue != null) ? Long.parseLong(stringValue) : null; }) .collect(Collectors.toList()); } else { this.int64List = null; } break; case FLOAT32: if (featureObjectList != null) { this.float32List = new ArrayList<>(featureObjectList.size()); featureObjectList.forEach(obj -> this.float32List.add((Float) obj)); } else { this.float32List = null; } break; case FLOAT64: if (featureObjectList != null) { this.float64List = new ArrayList<>(featureObjectList.size()); featureObjectList.forEach(obj -> this.float64List.add((Double) obj)); } else { this.float64List = null; } break; case STRING: if (featureObjectList != null) { this.stringList = new ArrayList<>(featureObjectList.size()); featureObjectList.forEach(obj -> this.stringList.add((String) obj)); } else { this.stringList = null; } break; default: throw new TectonClientException(TectonErrorMessage.UNSUPPORTED_LIST_DATA_TYPE); } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ListDataType that = (ListDataType) o; return Objects.equals(stringList, that.stringList) && Objects.equals(float32List, that.float32List) && Objects.equals(float64List, that.float64List) && Objects.equals(int64List, that.int64List) && Objects.equals(listElementType, that.listElementType); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(stringList, float32List, float64List, int64List, listElementType); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/MetadataOption.java
package ai.tecton.client.model; import org.apache.commons.lang3.StringUtils; /** * Enum representing options for different metadata information that can be requested from the * FeatureService API */ public enum MetadataOption { /** Include feature name in the response */ NAME("include_names"), /** Include feature effective_time in the response */ EFFECTIVE_TIME("include_effective_times"), /** Include feature data_type in the response. */ DATA_TYPE("include_data_types"), /** Include SLO Info in the response */ SLO_INFO("include_slo_info"), /** Include feature status of the feature value in the response */ FEATURE_STATUS("include_serving_status"), /** Include user defined description of the feature in the response */ FEATURE_DESCRIPTION("include_feature_descriptions"), /** Include user defined tags of the feature in the response */ FEATURE_TAGS("include_feature_tags"), /** Include all metadata in the response */ ALL(), /** * Include no metadata in the response. Note that the default metadata options - NAME and * DATA_TYPE will still be included */ NONE(); private final String jsonName; MetadataOption() { this.jsonName = StringUtils.EMPTY; } MetadataOption(String name) { this.jsonName = name; } public String getJsonName() { return jsonName; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/NameAndType.java
package ai.tecton.client.model; import java.util.Objects; import java.util.Optional; /** Class that represents the return types for parameters of FeatureServiceMetadata */ public class NameAndType { String name; ValueType dataType; ValueType listElementType; /** * Constructor that creates a NameAndType with specified name and dataType * * @param name Name * @param dataType one of {@link ValueType} values */ public NameAndType(String name, ValueType dataType) { this.name = name; this.dataType = dataType; } /** * Constructor that creates a NameAndType with specified name, dataType and listElementType * * @param name Name * @param dataType dataType * @param listElementType array element type when dataType is ARRAY */ public NameAndType(String name, ValueType dataType, ValueType listElementType) { this.name = name; this.dataType = dataType; this.listElementType = listElementType; } /** * Returns the name * * @return name */ public String getName() { return name; } /** * Returns the Tecton data type as a {@link ValueType} * * @return {@link ValueType} */ public ValueType getDataType() { return this.dataType; } /** * Returns the array element type if present, Optional.empty() otherwise * * @return Optional&lt;{@link ValueType}&gt; if present, Optional.empty() otherwise */ public Optional<ValueType> getListElementType() { return Optional.ofNullable(this.listElementType); } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; NameAndType that = (NameAndType) o; return Objects.equals(name, that.name) && dataType == that.dataType && listElementType == that.listElementType; } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(name, dataType, listElementType); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/SloInformation.java
package ai.tecton.client.model; import java.util.Collections; import java.util.Objects; import java.util.Optional; import java.util.Set; /** * Class that represents SLO Info provided by Tecton when serving feature values. All values * returned are wrapped in {@link } */ public class SloInformation { private Boolean sloEligible; private Double serverTimeSeconds; private Double sloServerTimeSeconds; private Integer storeResponseSizeBytes; private Set<SloIneligibilityReason> sloIneligibilityReasons; private Double storeMaxLatency; public SloInformation( Boolean isSloEligible, Double serverTimeSeconds, Double sloServerTimeSeconds, Integer storeResponseSizeBytes, Set<SloIneligibilityReason> sloIneligibilityReasons, Double storeMaxLatency) { this.sloEligible = isSloEligible; this.serverTimeSeconds = serverTimeSeconds; this.sloServerTimeSeconds = sloServerTimeSeconds; this.sloIneligibilityReasons = sloIneligibilityReasons; this.storeResponseSizeBytes = storeResponseSizeBytes; this.storeMaxLatency = storeMaxLatency; } /** * Returns true if the response was eligible for SLO, false otherwise. * * @return Optional&lt;Boolean&gt; if present, Optional.empty() otherwise */ public Optional<Boolean> isSloEligible() { return Optional.ofNullable(sloEligible); } /** * Reasons for the response not being eligible for SLO. * * @return List&lt;{@link SloInformation.SloIneligibilityReason} */ public Set<SloIneligibilityReason> getSloIneligibilityReasons() { return sloIneligibilityReasons == null ? Collections.emptySet() : sloIneligibilityReasons; } /** * This includes the total time spent in the feature server including online transforms and store * latency * * @return Optional&lt;Double&gt; if present, Optional.empty() otherwise */ public Optional<Double> getServerTimeSeconds() { return Optional.ofNullable(serverTimeSeconds); } /** * Max latency observed by the request from the store in seconds * * @return Optional&lt;Double&gt; if present, Optional.empty() otherwise */ public Optional<Double> getStoreMaxLatency() { return Optional.ofNullable(storeMaxLatency); } /** * Total store response size bytes * * @return Optional&lt;Integer&gt; if present, Optional.empty() otherwise */ public Optional<Integer> getStoreResponseSizeBytes() { return Optional.ofNullable(storeResponseSizeBytes); } /** * The server time minus any time spent waiting on line transforms to finish after all table * transforms have finished. * * @return Optional&lt;Double&gt; if present, Optional.empty() otherwise */ public Optional<Double> getSloServerTimeSeconds() { return Optional.ofNullable(sloServerTimeSeconds); } /** Reasons due to which the Feature Serving Response can be ineligible for SLO */ public enum SloIneligibilityReason { UNKNOWN, DYNAMODB_RESPONSE_SIZE_LIMIT_EXCEEDED, REDIS_RESPONSE_SIZE_LIMIT_EXCEEDED, REDIS_LATENCY_LIMIT_EXCEEDED; } // A static builder for SloInformation public static class Builder { Boolean isSloEligible; Double serverTimeSeconds; Double sloServerTimeSeconds; Integer storeResponseSizeByte; Set<SloIneligibilityReason> sloIneligibilityReasons; Double storeMaxLatency; public Builder isSloEligible(boolean isSloEligible) { this.isSloEligible = isSloEligible; return this; } public Builder serverTimeSeconds(Double serverTimeSeconds) { this.serverTimeSeconds = serverTimeSeconds; return this; } public Builder sloServerTimeSeconds(Double sloServerTimeSeconds) { this.sloServerTimeSeconds = sloServerTimeSeconds; return this; } public Builder storeResponseSizeBytes(Integer storeResponseSizeByte) { this.storeResponseSizeByte = storeResponseSizeByte; return this; } public Builder sloIneligibilityReasons(Set<SloIneligibilityReason> sloIneligibilityReasons) { this.sloIneligibilityReasons = sloIneligibilityReasons; return this; } public Builder storeMaxLatency(Double storeMaxLatency) { this.storeMaxLatency = storeMaxLatency; return this; } public SloInformation build() { return new SloInformation( this.isSloEligible, this.serverTimeSeconds, this.sloServerTimeSeconds, this.storeResponseSizeByte, this.sloIneligibilityReasons, this.storeMaxLatency); } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SloInformation that = (SloInformation) o; return Objects.equals(sloEligible, that.sloEligible) && Objects.equals(serverTimeSeconds, that.serverTimeSeconds) && Objects.equals(sloServerTimeSeconds, that.sloServerTimeSeconds) && Objects.equals(storeResponseSizeBytes, that.storeResponseSizeBytes) && Objects.equals(sloIneligibilityReasons, that.sloIneligibilityReasons) && Objects.equals(storeMaxLatency, that.storeMaxLatency); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash( sloEligible, serverTimeSeconds, sloServerTimeSeconds, storeResponseSizeBytes, sloIneligibilityReasons, storeMaxLatency); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/ValueType.java
package ai.tecton.client.model; import java.util.Arrays; import java.util.Optional; import org.apache.commons.lang3.StringUtils; /** Enum that represents the different data types (primitive or custom) of feature values */ public enum ValueType { /** java.lang.Boolean */ BOOLEAN("boolean"), /** java.lang.Long */ INT64("int64"), /** java.lang.String */ STRING("string"), /** java.lang.Float */ FLOAT32("float32"), /** java.lang.Double */ FLOAT64("float64"), /** java.util.List */ ARRAY("array"); final String name; ValueType(String name) { this.name = name; } String getName() { return this.name; } /** * Returns the ValueType that matches the String representation passed as a parameter * * @param name The String representation of the ValueType * @return Optional&lt;{@link ValueType}&gt; if a match is found, Optional.empty() otherwise */ public static Optional<ValueType> fromString(String name) { // Map string to the corresponding ValueType enum return Arrays.stream(ValueType.values()) .filter(val -> StringUtils.equalsIgnoreCase(val.getName(), name)) .findAny(); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/model/package-info.java
/** * This package contains classes modeling the various types representing the results from the * FeatureService API */ package ai.tecton.client.model;
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/AbstractGetFeaturesRequest.java
package ai.tecton.client.request; import static java.lang.annotation.RetentionPolicy.RUNTIME; import ai.tecton.client.exceptions.InvalidRequestParameterException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.model.MetadataOption; import ai.tecton.client.transport.TectonHttpClient.HttpMethod; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.JsonQualifier; import com.squareup.moshi.Moshi; import com.squareup.moshi.Types; import java.lang.annotation.Annotation; import java.lang.annotation.Retention; import java.lang.reflect.Type; import java.util.Objects; import java.util.Set; import org.jetbrains.annotations.Nullable; /** * Parent class for GetFeaturesRequest and GetFeaturesBatchRequest and extends AbstractTectonRequest */ public abstract class AbstractGetFeaturesRequest extends AbstractTectonRequest { private static final HttpMethod httpMethod = HttpMethod.POST; final Set<MetadataOption> metadataOptions; AbstractGetFeaturesRequest( String workspaceName, String featureServiceName, String endpoint, Set<MetadataOption> metadataOptions) throws InvalidRequestParameterException { super(endpoint, httpMethod, workspaceName, featureServiceName); if (metadataOptions == null || metadataOptions.size() == 0) { this.metadataOptions = RequestConstants.DEFAULT_METADATA_OPTIONS; } else { this.metadataOptions = getMetadataOptions(metadataOptions); } } Set<MetadataOption> getMetadataOptions() { return this.metadataOptions; } static Set<MetadataOption> getMetadataOptions(Set<MetadataOption> metadataOptions) { Set<MetadataOption> finalMetadataOptionSet; if (metadataOptions.contains(MetadataOption.ALL)) { // Add everything except ALL and NONE from MetadataOption EnumSet finalMetadataOptionSet = RequestConstants.ALL_METADATA_OPTIONS; } else if (metadataOptions.contains(MetadataOption.NONE)) { finalMetadataOptionSet = RequestConstants.NONE_METADATA_OPTIONS; } else { finalMetadataOptionSet = metadataOptions; } finalMetadataOptionSet.addAll( RequestConstants.DEFAULT_METADATA_OPTIONS); // add default metadata options return finalMetadataOptionSet; } static void validateRequestParameters(GetFeaturesRequestData getFeaturesRequestData) { if (getFeaturesRequestData.isEmptyJoinKeyMap() && getFeaturesRequestData.isEmptyRequestContextMap()) { throw new InvalidRequestParameterException(TectonErrorMessage.EMPTY_REQUEST_MAPS); } } @Retention(RUNTIME) @JsonQualifier public @interface SerializeNulls { JsonAdapter.Factory JSON_ADAPTER_FACTORY = new JsonAdapter.Factory() { @Nullable @Override public JsonAdapter<?> create( Type type, Set<? extends Annotation> annotations, Moshi moshi) { Set<? extends Annotation> nextAnnotations = Types.nextAnnotations(annotations, SerializeNulls.class); if (nextAnnotations == null) { return null; } return moshi.nextAdapter(this, type, nextAnnotations).serializeNulls(); } }; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; AbstractGetFeaturesRequest that = (AbstractGetFeaturesRequest) o; return Objects.equals(metadataOptions, that.metadataOptions); } @Override public int hashCode() { return Objects.hash(super.hashCode(), metadataOptions); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/AbstractTectonRequest.java
package ai.tecton.client.request; import ai.tecton.client.exceptions.InvalidRequestParameterException; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.transport.TectonHttpClient; import java.util.Objects; import org.apache.commons.lang3.Validate; /** An abstract parent class for Tecton FeatureService API Request subclasses */ public abstract class AbstractTectonRequest { private final String endpoint; private final TectonHttpClient.HttpMethod method; private final String workspaceName; private final String featureServiceName; /** * Parent class constructor that configures the request endpoint, request method, workspaceName * and featureServiceName * * @param endpoint * @param method * @param workspaceName * @param featureServiceName * @throws TectonClientException when workspaceName or featureServiceName is null or empty */ public AbstractTectonRequest( String endpoint, TectonHttpClient.HttpMethod method, String workspaceName, String featureServiceName) throws TectonClientException { validateRequestParameters(workspaceName, featureServiceName); this.endpoint = endpoint; this.method = method; this.workspaceName = workspaceName; this.featureServiceName = featureServiceName; } /** * Returns the endpoint for request type. This endpoint will be appended to the base URL provided * to the TectonClient */ public String getEndpoint() { return endpoint; } /** Returns the Http Method used by the request type. */ public TectonHttpClient.HttpMethod getMethod() { return method; } /** Returns the featureServiceName set for the request */ public String getFeatureServiceName() { return this.featureServiceName; } /** Returns the workspaceName set for the request */ public String getWorkspaceName() { return this.workspaceName; } public abstract String requestToJson(); static void validateRequestParameters(String workspaceName, String featureServiceName) { try { Validate.notEmpty(workspaceName, TectonErrorMessage.INVALID_WORKSPACENAME); Validate.notEmpty(featureServiceName, TectonErrorMessage.INVALID_FEATURESERVICENAME); } catch (Exception e) { throw new InvalidRequestParameterException(e.getMessage()); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AbstractTectonRequest that = (AbstractTectonRequest) o; return endpoint.equals(that.endpoint) && method == that.method && workspaceName.equals(that.workspaceName) && featureServiceName.equals(that.featureServiceName); } @Override public int hashCode() { return Objects.hash(endpoint, method, workspaceName, featureServiceName); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/GetFeatureServiceMetadataRequest.java
package ai.tecton.client.request; import ai.tecton.client.exceptions.InvalidRequestParameterException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.transport.TectonHttpClient; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; /** * A subclass of {@link AbstractTectonRequest} that represents a request to the * <i>feature-service/metadata</i> endpoint to retrieve metadata about a FeatureService, including * the schema for join keys and request context, and the in-order output schema of returned feature * values. */ public class GetFeatureServiceMetadataRequest extends AbstractTectonRequest { private static final TectonHttpClient.HttpMethod method = TectonHttpClient.HttpMethod.POST; private static final String ENDPOINT = "/api/v1/feature-service/metadata"; private static final String DEFAULT_WORKSPACE = "prod"; private final JsonAdapter<GetFeatureServiceMetadataJson> jsonAdapter; /** * Constructor that creates a new GetFeatureServiceMetadataRequest with the specified * workspaceName and featureServiceName * * @param featureServiceName Name of the Feature Service for which the metadata is being requested * @param workspaceName Name of the workspace in which the Feature Service is defined */ public GetFeatureServiceMetadataRequest(String featureServiceName, String workspaceName) { super(ENDPOINT, method, workspaceName, featureServiceName); Moshi moshi = new Moshi.Builder().build(); jsonAdapter = moshi.adapter(GetFeatureServiceMetadataJson.class); } /** * Constructor that creates a new GetFeatureServiceMetadataRequest with default workspaceName * "prod" and featureServiceName provided. * * @param featureServiceName Name of the Feature Service for which the metadata is being requested */ public GetFeatureServiceMetadataRequest(String featureServiceName) { super(ENDPOINT, method, DEFAULT_WORKSPACE, featureServiceName); Moshi moshi = new Moshi.Builder().build(); jsonAdapter = moshi.adapter(GetFeatureServiceMetadataJson.class); } static class GetFeatureServiceMetadataJson { GetFeatureServiceMetadataFields params; GetFeatureServiceMetadataJson(GetFeatureServiceMetadataFields params) { this.params = params; } } static class GetFeatureServiceMetadataFields { String feature_service_name; String workspace_name; } /** * Get the JSON representation of the request that will be sent to the /metadata endpoint. * * @return JSON String representation of {@link GetFeatureServiceMetadataRequest} */ @Override public String requestToJson() { GetFeatureServiceMetadataFields serviceMetadataFields = new GetFeatureServiceMetadataFields(); serviceMetadataFields.feature_service_name = super.getFeatureServiceName(); serviceMetadataFields.workspace_name = super.getWorkspaceName(); try { return jsonAdapter.toJson(new GetFeatureServiceMetadataJson(serviceMetadataFields)); } catch (Exception e) { throw new InvalidRequestParameterException( String.format(TectonErrorMessage.INVALID_GET_SERVICE_METADATA_REQUEST, e.getMessage())); } } /** * A Builder class for building instances of {@link GetFeatureServiceMetadataRequest} objects from * values configured by setters */ public static final class Builder { private String workspaceName; private String featureServiceName; /** * Setter for workspaceName * * @param workspaceName Name of the workspace to fetch metadata for the FeatureService from * @return this Builder */ public Builder workspaceName(String workspaceName) { this.workspaceName = workspaceName; return this; } /** * Setter for featureServiceName * * @param featureServiceName Name of the Feature Service for which the metadata is being * requested * @return this Builder */ public Builder featureServiceName(String featureServiceName) { this.featureServiceName = featureServiceName; return this; } /** * Returns an instance of {@link GetFeatureServiceMetadataRequest} * * @return {@link GetFeatureServiceMetadataRequest} object * @throws InvalidRequestParameterException when workspaceName and/or featureServiceName is null * or empty */ public GetFeatureServiceMetadataRequest build() { return new GetFeatureServiceMetadataRequest(featureServiceName, workspaceName); } } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/GetFeaturesBatchRequest.java
package ai.tecton.client.request; import ai.tecton.client.exceptions.InvalidRequestParameterException; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.model.MetadataOption; import ai.tecton.client.transport.TectonHttpClient; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.collections4.ListUtils; /** * A class that represents a batch request to retrieve a list of feature vectors from the feature * server, for a given workspaceName and featureServiceName. The class can be used to make parallel * requests to retrieve multiple feature vectors from the feature server API. The actual number of * concurrent calls depends on the ConnectionPool size and {@code maxParallelRequests} * configurations in the {@link ai.tecton.client.TectonClientOptions}. * * <p>GetFeaturesBatchRequest uses either the /get-features or the /get-features-batch endpoint * depending on the configuration {@code microBatchSize}. By default, the microBatchSize is set to * {@link RequestConstants#DEFAULT_MICRO_BATCH_SIZE}. It can be configured to any value in the range * [ 1, {@link RequestConstants#MAX_MICRO_BATCH_SIZE} ] * * <p>For a GetFeaturesBatchRequest with a {@link GetFeaturesRequestData} of size {@code n} and a * {@code microBatchSize} of 1, the client enqueues {@code n} HTTP calls to be sent parallely to the * /get-features endpoint. The client waits until all calls are complete or a specific time * (configured with {@code timeout}) has elapsed and returns a {@link List} of {@link * ai.tecton.client.response.GetFeaturesResponse} objects of size {@code n}. * * <p>For a GetFeaturesBatchRequest with a {@link GetFeaturesRequestData} of size {@code n} and a * {@code microBatchSize} of k where k is in the range [ 1, {@link * RequestConstants#MAX_MICRO_BATCH_SIZE} ], the client enqueues Math.ceil(n/k) microbatch requests * to be sent parallely to the /get-features-batch endpoint, waits until all microbatch requests are * complete or a specific configured timeout has elapsed and returns a {@link List} of {@link * ai.tecton.client.response.GetFeaturesResponse} objects of size {@code n}. */ public class GetFeaturesBatchRequest { private List<? extends AbstractGetFeaturesRequest> requestList; private final int microBatchSize; private final Duration timeout; private final RequestOptions requestOptions; private static final String BATCH_ENDPOINT = "/api/v1/feature-service/get-features-batch"; private static JsonAdapter<GetFeaturesMicroBatchRequest.GetFeaturesRequestBatchJson> jsonAdapter = null; private String endpoint; private TectonHttpClient.HttpMethod method; private final Moshi moshi = new Moshi.Builder() .add(AbstractGetFeaturesRequest.SerializeNulls.JSON_ADAPTER_FACTORY) .build(); /** * Constructor that creates a new GetFeaturesBatchRequest with the specified parameters. {@code * metadataOptions} defaults to {@link RequestConstants#DEFAULT_METADATA_OPTIONS} , {@code * microBatchSize} defaults to {@link RequestConstants#DEFAULT_MICRO_BATCH_SIZE} and {@code * timeout} defaults to None * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vectors are being * requested * @param requestDataList a {@link List} of {@link GetFeaturesRequestData} object with joinKeyMap * and/or requestContextMap * @throws InvalidRequestParameterException when workspaceName or featureServiceName is empty or * null * @throws InvalidRequestParameterException when requestDataList is invalid (null/empty or * contains null/empty elements) */ public GetFeaturesBatchRequest( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList) { this( workspaceName, featureServiceName, requestDataList, RequestConstants.DEFAULT_METADATA_OPTIONS, RequestConstants.DEFAULT_MICRO_BATCH_SIZE, RequestConstants.NONE_TIMEOUT, null); } /** * Constructor that creates a new GetFeaturesBatchRequest with the specified parameters. {@code * microBatchSize} defaults to {@link RequestConstants#DEFAULT_MICRO_BATCH_SIZE} and {@code * timeout} defaults to None * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vectors are being * requested * @param requestDataList a {@link List} of {@link GetFeaturesRequestData} object with joinKeyMap * and/or requestContextMap * @param metadataOptions A {@link Set} of {@link MetadataOption} for retrieving additional * metadata about the feature values. Use {@link RequestConstants#ALL_METADATA_OPTIONS} to * request all metadata and {@link RequestConstants#NONE_METADATA_OPTIONS} to request no * metadata respectively. By default, {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will * be added to each request * @throws InvalidRequestParameterException when workspaceName or featureServiceName is empty or * null * @throws InvalidRequestParameterException when requestDataList is invalid (null/empty or * contains null/empty elements) */ public GetFeaturesBatchRequest( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList, Set<MetadataOption> metadataOptions) { this( workspaceName, featureServiceName, requestDataList, metadataOptions, RequestConstants.DEFAULT_MICRO_BATCH_SIZE, RequestConstants.NONE_TIMEOUT, null); } /** * Constructor that creates a new GetFeaturesBatchRequest with the specified parameters. {@code * timeout} defaults to None. * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vectors are being * requested * @param requestDataList a {@link List} of {@link GetFeaturesRequestData} object with joinKeyMap * and/or requestContextMap * @param metadataOptions A {@link Set} of {@link MetadataOption} for retrieving additional * metadata about the feature values. Use {@link RequestConstants#ALL_METADATA_OPTIONS} to * request all metadata and {@link RequestConstants#NONE_METADATA_OPTIONS} to request no * metadata respectively. By default, {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will * be added to each request * @param microBatchSize an int value between 1 and {@value * RequestConstants#MAX_MICRO_BATCH_SIZE}. The client splits the GetFeaturesBatchRequest into * multiple micro batches of this size and executes them parallely. By default, the * microBatchSize is set to {@value RequestConstants#DEFAULT_MICRO_BATCH_SIZE} * @throws InvalidRequestParameterException when workspaceName or featureServiceName is empty or * null * @throws InvalidRequestParameterException when requestDataList is invalid (null/empty or * contains null/empty elements) */ public GetFeaturesBatchRequest( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList, Set<MetadataOption> metadataOptions, int microBatchSize) { this( workspaceName, featureServiceName, requestDataList, metadataOptions, microBatchSize, RequestConstants.NONE_TIMEOUT, null); } /** * Constructor that creates a new GetFeaturesBatchRequest with the specified parameters * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vectors are being * requested * @param requestDataList a {@link List} of {@link GetFeaturesRequestData} object with joinKeyMap * and/or requestContextMap * @param metadataOptions metadataOptions A {@link Set} of {@link MetadataOption} for retrieving * additional metadata about the feature values. Use {@link * RequestConstants#ALL_METADATA_OPTIONS} to request all metadata and {@link * RequestConstants#NONE_METADATA_OPTIONS} to request no metadata respectively. By default, * {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will be added to each request * @param microBatchSize an int value between 1 and {@value * RequestConstants#MAX_MICRO_BATCH_SIZE}. The client splits the GetFeaturesBatchRequest into * multiple micro batches of this size and executes them parallely. By default, the * microBatchSize is set to {@value RequestConstants#DEFAULT_MICRO_BATCH_SIZE} * @param timeout The max time in {@link Duration} for which the client waits for the batch * requests to complete before canceling the operation and returning the partial list of * results. * @throws InvalidRequestParameterException when workspaceName or featureServiceName is empty or * null * @throws InvalidRequestParameterException when requestDataList is invalid (null/empty or * contains null/empty elements) * @throws InvalidRequestParameterException when the microBatchSize is out of bounds of [ 1, * {@value RequestConstants#MAX_MICRO_BATCH_SIZE} ] */ public GetFeaturesBatchRequest( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList, Set<MetadataOption> metadataOptions, int microBatchSize, Duration timeout) { this( workspaceName, featureServiceName, requestDataList, metadataOptions, microBatchSize, timeout, null); } /** * Constructor that creates a new GetFeaturesBatchRequest with the specified parameters including * requestOptions * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vectors are being * requested * @param requestDataList a {@link List} of {@link GetFeaturesRequestData} object with joinKeyMap * and/or requestContextMap * @param metadataOptions metadataOptions A {@link Set} of {@link MetadataOption} for retrieving * additional metadata about the feature values. Use {@link * RequestConstants#ALL_METADATA_OPTIONS} to request all metadata and {@link * RequestConstants#NONE_METADATA_OPTIONS} to request no metadata respectively. By default, * {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will be added to each request * @param microBatchSize an int value between 1 and {@value * RequestConstants#MAX_MICRO_BATCH_SIZE}. The client splits the GetFeaturesBatchRequest into * multiple micro batches of this size and executes them parallely. By default, the * microBatchSize is set to {@value RequestConstants#DEFAULT_MICRO_BATCH_SIZE} * @param timeout The max time in {@link Duration} for which the client waits for the batch * requests to complete before canceling the operation and returning the partial list of * results. * @param requestOptions {@link RequestOptions} object with request-level options to control * feature server behavior * @throws InvalidRequestParameterException when workspaceName or featureServiceName is empty or * null * @throws InvalidRequestParameterException when requestDataList is invalid (null/empty or * contains null/empty elements) * @throws InvalidRequestParameterException when the microBatchSize is out of bounds of [ 1, * {@value RequestConstants#MAX_MICRO_BATCH_SIZE} ] */ public GetFeaturesBatchRequest( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList, Set<MetadataOption> metadataOptions, int microBatchSize, Duration timeout, RequestOptions requestOptions) { validateParameters(workspaceName, featureServiceName, requestDataList, microBatchSize); this.timeout = timeout; this.requestOptions = requestOptions; // Create final variable for use in lambda expressions final RequestOptions finalRequestOptions = requestOptions; if (microBatchSize > 1 && requestDataList.size() > 1) { // For batch requests, partition the requestDataList into n sublists of size // microBatchSize and create GetFeaturesMicroBatchRequest for each this.requestList = ListUtils.partition(requestDataList, microBatchSize) .parallelStream() .map( requestData -> new GetFeaturesMicroBatchRequest( workspaceName, featureServiceName, requestData, metadataOptions, finalRequestOptions)) .collect(Collectors.toList()); this.microBatchSize = microBatchSize; jsonAdapter = moshi.adapter(GetFeaturesMicroBatchRequest.GetFeaturesRequestBatchJson.class); this.endpoint = BATCH_ENDPOINT; this.method = TectonHttpClient.HttpMethod.POST; } else { // For microBatchSize=1, create a List of individual GetFeaturesRequest objects this.requestList = requestDataList .parallelStream() .map( requestData -> new GetFeaturesRequest( workspaceName, featureServiceName, requestData, metadataOptions, finalRequestOptions)) .collect(Collectors.toList()); this.microBatchSize = microBatchSize; this.endpoint = GetFeaturesRequest.ENDPOINT; this.method = TectonHttpClient.HttpMethod.POST; } } /** * Return Batch Request List * * @return {@link List of {@link AbstractGetFeaturesRequest} representing the list of batch * request} */ public List<? extends AbstractGetFeaturesRequest> getRequestList() { return this.requestList; } /** * Getter for timeout * * @return timeout in {@link Duration} */ public Duration getTimeout() { return timeout; } /** * Getter for microBatchSize * * @return microBatchSize ( {@value RequestConstants#DEFAULT_MICRO_BATCH_SIZE} if not set */ public int getMicroBatchSize() { return this.microBatchSize; } public TectonHttpClient.HttpMethod getMethod() { return method; } public String getEndpoint() { return endpoint; } /** * A Builder class for building instances of {@link GetFeaturesBatchRequest} objects from values * configured by setters */ public static class Builder { private String workspaceName; private String featureServiceName; private List<GetFeaturesRequestData> requestDataList; private Set<MetadataOption> metadataOptionList = RequestConstants.DEFAULT_METADATA_OPTIONS; private int microBatchSize = RequestConstants.DEFAULT_MICRO_BATCH_SIZE; private Duration timeout = RequestConstants.NONE_TIMEOUT; private RequestOptions requestOptions; /** Constructs an empty Builder */ public Builder() { this.requestDataList = new ArrayList<>(); } /** * Setter for workspaceName * * @param workspaceName Name of the workspace in which the Feature Service is defined * @return this Builder */ public Builder workspaceName(String workspaceName) { this.workspaceName = workspaceName; return this; } /** * Setter for featureServiceName * * @param featureServiceName Name of the Feature Service for which feature vectors are being * requested * @return this Builder */ public Builder featureServiceName(String featureServiceName) { this.featureServiceName = featureServiceName; return this; } /** * Setter for a {@link java.util.List} of {@link GetFeaturesRequestData} * * @param requestDataList {@link java.util.List} of {@link GetFeaturesRequestData} objects with * joinKeyMap and/or requestContextMap * @return this Builder * @throws TectonClientException when requestDataList is null or empty */ public Builder requestDataList(List<GetFeaturesRequestData> requestDataList) throws TectonClientException { this.requestDataList = requestDataList; return this; } /** * Adds a single {@link GetFeaturesRequestData} object to the List * * @param requestData {@link GetFeaturesRequestData} object with joinKeyMap and/or * requestContextMap * @return this Builder */ public Builder addRequestData(GetFeaturesRequestData requestData) { this.requestDataList.add(requestData); return this; } /** * Setter for {@link MetadataOption} * * @param metadataOptions A {@link Set} of {@link MetadataOption} for retrieving additional * metadata about the feature values. Use {@link RequestConstants#ALL_METADATA_OPTIONS} to * request all metadata and {@link RequestConstants#NONE_METADATA_OPTIONS} to request no * metadata respectively. By default, {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will * be added to each request * @return this Builder */ public Builder metadataOptions(Set<MetadataOption> metadataOptions) { this.metadataOptionList = metadataOptions; return this; } /** * Setter for microBatchSize * * @param microBatchSize an int value between 1 and {@value * RequestConstants#MAX_MICRO_BATCH_SIZE}. The client splits the GetFeaturesBatchRequest * into multiple micro batches of this size and executes them parallely. By default, the * microBatchSize is set to {@value RequestConstants#DEFAULT_MICRO_BATCH_SIZE} * @return this Builder * @throws InvalidRequestParameterException when the microBatchSize is out of bounds of [ 1, * {@value RequestConstants#MAX_MICRO_BATCH_SIZE} ] */ public Builder microBatchSize(int microBatchSize) throws TectonClientException { this.microBatchSize = microBatchSize; return this; } /** * @param timeout The max time in {@link Duration} for which the client waits for the batch * requests to complete before canceling the operation and returning the partial list of * results. * @return this Builder */ public Builder timeout(Duration timeout) { this.timeout = timeout; return this; } /** * Setter for {@link RequestOptions} * * @param requestOptions {@link RequestOptions} object with request-level options to control * feature server behavior * @return this Builder */ public Builder requestOptions(RequestOptions requestOptions) { this.requestOptions = requestOptions; return this; } /** * Returns an instance of {@link GetFeaturesBatchRequest} created from the fields set on this * builder * * @return {@link GetFeaturesBatchRequest} object * @throws TectonClientException when requestDataList is invalid ( when the requestDataList is * null or empty, or any joinKeyMap or requestContextMap is null or empty) * @throws InvalidRequestParameterException when microBatchSize is out of bounds of [1, {@value * RequestConstants#MAX_MICRO_BATCH_SIZE} */ public GetFeaturesBatchRequest build() throws TectonClientException { return new GetFeaturesBatchRequest( workspaceName, featureServiceName, requestDataList, metadataOptionList, microBatchSize, timeout, requestOptions); } } // Validate request parameters private static void validateParameters( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList, int microBatchSize) { AbstractTectonRequest.validateRequestParameters(workspaceName, featureServiceName); if (requestDataList == null || requestDataList.isEmpty()) { throw new InvalidRequestParameterException(TectonErrorMessage.INVALID_REQUEST_DATA_LIST); } requestDataList.parallelStream().forEach(AbstractGetFeaturesRequest::validateRequestParameters); if (microBatchSize > RequestConstants.MAX_MICRO_BATCH_SIZE || microBatchSize < 1) { throw new InvalidRequestParameterException( String.format( TectonErrorMessage.INVALID_MICRO_BATCH_SIZE, 1, RequestConstants.MAX_MICRO_BATCH_SIZE)); } } // Moshi JSON Classes static class GetFeaturesMicroBatchRequest extends AbstractGetFeaturesRequest { private final List<GetFeaturesRequestData> requestDataList; private final RequestOptions requestOptions; GetFeaturesMicroBatchRequest( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList, Set<MetadataOption> metadataOptions) { super(workspaceName, featureServiceName, BATCH_ENDPOINT, metadataOptions); this.requestDataList = requestDataList; this.requestOptions = null; } GetFeaturesMicroBatchRequest( String workspaceName, String featureServiceName, List<GetFeaturesRequestData> requestDataList, Set<MetadataOption> metadataOptions, RequestOptions requestOptions) { super(workspaceName, featureServiceName, BATCH_ENDPOINT, metadataOptions); this.requestDataList = requestDataList; this.requestOptions = requestOptions; } // Moshi JSON classes static class GetFeaturesRequestBatchJson { GetFeaturesBatchFields params; GetFeaturesRequestBatchJson(GetFeaturesBatchFields params) { this.params = params; } } static class GetFeaturesBatchFields { String feature_service_name; String workspace_name; List<RequestDataField> request_data; Map<String, Boolean> metadata_options; Map<String, Object> request_options; } static class RequestDataField { @SerializeNulls Map<String, String> join_key_map; Map<String, Object> request_context_map; } List<GetFeaturesRequestData> getFeaturesRequestData() { return this.requestDataList; } // Convert MicroBatch Request to JSON String @Override public String requestToJson() { GetFeaturesBatchFields getFeaturesFields = new GetFeaturesBatchFields(); getFeaturesFields.feature_service_name = this.getFeatureServiceName(); getFeaturesFields.workspace_name = this.getWorkspaceName(); getFeaturesFields.request_data = new ArrayList<>(this.requestDataList.size()); this.requestDataList.forEach( requestData -> { RequestDataField requestDataField = new RequestDataField(); if (!requestData.isEmptyJoinKeyMap()) { requestDataField.join_key_map = requestData.getJoinKeyMap(); } if (!requestData.isEmptyRequestContextMap()) { requestDataField.request_context_map = requestData.getRequestContextMap(); } getFeaturesFields.request_data.add(requestDataField); }); if (!metadataOptions.isEmpty()) { getFeaturesFields.metadata_options = metadataOptions.stream() .collect(Collectors.toMap(MetadataOption::getJsonName, (a) -> Boolean.TRUE)); } if (requestOptions != null && !requestOptions.isEmpty()) { getFeaturesFields.request_options = requestOptions.getOptions(); } GetFeaturesRequestBatchJson getFeaturesRequestJson = new GetFeaturesRequestBatchJson(getFeaturesFields); try { return jsonAdapter.toJson(getFeaturesRequestJson); } catch (Exception e) { throw new InvalidRequestParameterException( String.format(TectonErrorMessage.INVALID_GET_FEATURE_BATCH_REQUEST, e.getMessage())); } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; GetFeaturesMicroBatchRequest that = (GetFeaturesMicroBatchRequest) o; return requestDataList.equals(that.requestDataList); } @Override public int hashCode() { return Objects.hash(super.hashCode(), requestDataList); } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetFeaturesBatchRequest that = (GetFeaturesBatchRequest) o; return microBatchSize == that.microBatchSize && Objects.equals(requestList, that.requestList) && Objects.equals(timeout, that.timeout) && Objects.equals(endpoint, that.endpoint) && method == that.method; } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(requestList, microBatchSize, timeout, endpoint, method); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/GetFeaturesRequest.java
package ai.tecton.client.request; import ai.tecton.client.exceptions.InvalidRequestParameterException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.model.MetadataOption; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; /** * A subclass of {@link AbstractTectonRequest} that represents a request to the <i>/get-features</i> * endpoint to retrieve feature values from Tecton's online store */ public class GetFeaturesRequest extends AbstractGetFeaturesRequest { static final String ENDPOINT = "/api/v1/feature-service/get-features"; private final JsonAdapter<GetFeaturesRequestJson> jsonAdapter; private final GetFeaturesRequestData getFeaturesRequestData; private final RequestOptions requestOptions; private final Moshi moshi = new Moshi.Builder().add(SerializeNulls.JSON_ADAPTER_FACTORY).build(); /** * Constructor that creates a new GetFeaturesRequest with specified parameters. {@code * metadataOptions} will default to {@link RequestConstants#DEFAULT_METADATA_OPTIONS} * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vector is being * requested * @param getFeaturesRequestData {@link GetFeaturesRequestData} object with joinKeyMap and/or * requestContextMap */ public GetFeaturesRequest( String workspaceName, String featureServiceName, GetFeaturesRequestData getFeaturesRequestData) { super(workspaceName, featureServiceName, ENDPOINT, RequestConstants.DEFAULT_METADATA_OPTIONS); validateRequestParameters(getFeaturesRequestData); this.getFeaturesRequestData = getFeaturesRequestData; this.requestOptions = null; jsonAdapter = moshi.adapter(GetFeaturesRequestJson.class); } /** * Constructor that creates a new GetFeaturesRequest with the specified parameters * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vector is being * requested * @param getFeaturesRequestData {@link GetFeaturesRequestData} object with joinKeyMap and/or * requestContextMap * @param metadataOptions A {@link Set} of {@link MetadataOption} for retrieving additional * metadata about the feature values. Use {@link RequestConstants#ALL_METADATA_OPTIONS} to * request all metadata and {@link RequestConstants#NONE_METADATA_OPTIONS} to request no * metadata respectively. By default, {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will * be added to each request */ public GetFeaturesRequest( String workspaceName, String featureServiceName, GetFeaturesRequestData getFeaturesRequestData, Set<MetadataOption> metadataOptions) { super(workspaceName, featureServiceName, ENDPOINT, metadataOptions); validateRequestParameters(getFeaturesRequestData); this.getFeaturesRequestData = getFeaturesRequestData; this.requestOptions = null; jsonAdapter = moshi.adapter(GetFeaturesRequestJson.class); } /** * Constructor that creates a new GetFeaturesRequest with the specified parameters including * requestOptions * * @param workspaceName Name of the workspace in which the Feature Service is defined * @param featureServiceName Name of the Feature Service for which the feature vector is being * requested * @param getFeaturesRequestData {@link GetFeaturesRequestData} object with joinKeyMap and/or * requestContextMap * @param metadataOptions A {@link Set} of {@link MetadataOption} for retrieving additional * metadata about the feature values. Use {@link RequestConstants#ALL_METADATA_OPTIONS} to * request all metadata and {@link RequestConstants#NONE_METADATA_OPTIONS} to request no * metadata respectively. By default, {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will * be added to each request * @param requestOptions {@link RequestOptions} object with request-level options to control * feature server behavior */ public GetFeaturesRequest( String workspaceName, String featureServiceName, GetFeaturesRequestData getFeaturesRequestData, Set<MetadataOption> metadataOptions, RequestOptions requestOptions) { super(workspaceName, featureServiceName, ENDPOINT, metadataOptions); validateRequestParameters(getFeaturesRequestData); this.getFeaturesRequestData = getFeaturesRequestData; this.requestOptions = requestOptions; jsonAdapter = moshi.adapter(GetFeaturesRequestJson.class); } GetFeaturesRequestData getFeaturesRequestData() { return this.getFeaturesRequestData; } RequestOptions getRequestOptions() { return this.requestOptions; } static class GetFeaturesRequestJson { GetFeaturesFields params; GetFeaturesRequestJson(GetFeaturesFields params) { this.params = params; } } static class GetFeaturesFields { String feature_service_name; String workspace_name; @SerializeNulls Map<String, String> join_key_map; @SerializeNulls Map<String, Object> request_context_map; Map<String, Boolean> metadata_options; Map<String, Object> request_options; } /** * Get the JSON representation of the request that will be sent to the /get-features endpoint. * * @return JSON String representation of {@link GetFeaturesRequest} */ @Override public String requestToJson() { GetFeaturesFields getFeaturesFields = new GetFeaturesFields(); getFeaturesFields.feature_service_name = this.getFeatureServiceName(); getFeaturesFields.workspace_name = this.getWorkspaceName(); if (!getFeaturesRequestData().isEmptyJoinKeyMap()) { getFeaturesFields.join_key_map = getFeaturesRequestData().getJoinKeyMap(); } if (!getFeaturesRequestData().isEmptyRequestContextMap()) { getFeaturesFields.request_context_map = getFeaturesRequestData().getRequestContextMap(); } if (!metadataOptions.isEmpty()) { getFeaturesFields.metadata_options = metadataOptions.stream() .collect(Collectors.toMap(MetadataOption::getJsonName, (a) -> Boolean.TRUE)); } if (requestOptions != null && !requestOptions.isEmpty()) { getFeaturesFields.request_options = requestOptions.getOptions(); } GetFeaturesRequestJson getFeaturesRequestJson = new GetFeaturesRequestJson(getFeaturesFields); try { return jsonAdapter.toJson(getFeaturesRequestJson); } catch (Exception e) { throw new InvalidRequestParameterException( String.format(TectonErrorMessage.INVALID_GET_FEATURE_REQUEST, e.getMessage())); } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; GetFeaturesRequest that = (GetFeaturesRequest) o; return getFeaturesRequestData.equals(that.getFeaturesRequestData) && Objects.equals(requestOptions, that.requestOptions); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(super.hashCode(), getFeaturesRequestData, requestOptions); } /** * A Builder class for building instances of {@link GetFeaturesRequest} objects from values * configured by setters */ public static final class Builder { Set<MetadataOption> metadataOptions; private String workspaceName; private String featureServiceName; private GetFeaturesRequestData getFeaturesRequestData; private RequestOptions requestOptions; /** Constructor for instantiating an empty Builder */ public Builder() { this.metadataOptions = new HashSet<>(); } /** * Setter for metadataOptions * * @param metadataOptions A {@link Set} of {@link MetadataOption} for retrieving additional * metadata about the feature values. Use {@link RequestConstants#ALL_METADATA_OPTIONS} to * request all metadata and {@link RequestConstants#NONE_METADATA_OPTIONS} to request no * metadata respectively. By default, {@link RequestConstants#DEFAULT_METADATA_OPTIONS} will * be added to each request * @return this Builder */ public Builder metadataOptions(Set<MetadataOption> metadataOptions) { this.metadataOptions = metadataOptions; return this; } /** * Setter for workspaceName * * @param workspaceName Name of the workspace in which the Feature Service is defined * @return this Builder */ public Builder workspaceName(String workspaceName) { this.workspaceName = workspaceName; return this; } /** * Setter for featureServiceName * * @param featureServiceName Name of the Feature Service for which the feature vector is being * requested * @return this Builder */ public Builder featureServiceName(String featureServiceName) { this.featureServiceName = featureServiceName; return this; } /** * Setter for {@link GetFeaturesRequestData} * * @param getFeaturesRequestData {@link GetFeaturesRequestData} object with joinKeyMap and/or * requestContextMap * @return this Builder */ public Builder getFeaturesRequestData(GetFeaturesRequestData getFeaturesRequestData) { this.getFeaturesRequestData = getFeaturesRequestData; return this; } /** * Setter for {@link RequestOptions} * * @param requestOptions {@link RequestOptions} object with request-level options to control * feature server behavior * @return this Builder */ public Builder requestOptions(RequestOptions requestOptions) { this.requestOptions = requestOptions; return this; } /** * Returns an instance of {@link GetFeaturesRequest} created from the fields set on this builder * * @return {@link GetFeaturesRequest} object * @throws InvalidRequestParameterException when workspaceName and/or featureServiceName is null * or empty */ public GetFeaturesRequest build() { if (this.requestOptions != null) { // Handle the case where requestOptions is set and metadataOptions is or is not set (using a // default if not set). Set<MetadataOption> options = this.metadataOptions.isEmpty() ? RequestConstants.DEFAULT_METADATA_OPTIONS : this.metadataOptions; return new GetFeaturesRequest( workspaceName, featureServiceName, getFeaturesRequestData, options, requestOptions); } else if (this.metadataOptions.isEmpty()) { // Handle the case where neither requestOptions nor metadataOptions are set. return new GetFeaturesRequest(workspaceName, featureServiceName, getFeaturesRequestData); } else { // Handle the case where metadataOptions is set and requestOptions is not set. return new GetFeaturesRequest(workspaceName, featureServiceName, getFeaturesRequestData); } } } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/GetFeaturesRequestData.java
package ai.tecton.client.request; import ai.tecton.client.exceptions.InvalidRequestParameterException; import ai.tecton.client.exceptions.TectonErrorMessage; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; import org.apache.commons.lang3.Validate; /** Class that represents the map parameters passed to a {@link GetFeaturesRequest} */ public class GetFeaturesRequestData { private Map<String, String> joinKeyMap; private Map<String, Object> requestContextMap; /** * Constructor that creates a new GetFeaturesRequestData object with an empty joinKeyMap and * requestContextMap */ public GetFeaturesRequestData() { this.joinKeyMap = new HashMap<>(); this.requestContextMap = new HashMap<>(); } /** * Setter method for joinKeyMap * * @param joinKeyMap Join keys used for table-based FeatureViews. * <p>The key of this map is the join key name and the value is the join key value for this * request * <p>For string keys, the value should be a string * <p>For int64 (Long) keys, the value should be a string of the decimal representation of the * integer * @return Returns the GetFeaturesRequestData object after setting joinKeyMap * @throws InvalidRequestParameterException when joinKeyMap is null or empty, or any key in the * map is null or empty */ public GetFeaturesRequestData addJoinKeyMap(Map<String, String> joinKeyMap) throws InvalidRequestParameterException { Validate.notEmpty(joinKeyMap); joinKeyMap.forEach(this::validateKeyValue); this.joinKeyMap = joinKeyMap; return this; } /** * Setter method for requestContextMap * * @param requestContextMap Request context used for OnDemand FeatureViews. * <p>The key of this map is the join key name and the value is the join key value for this * request * <p>For string values, the value should be a java.lang.String * <p>For int64 values, the value should be a java.lang.String of the decimal representation * of the integer * <p>For double values, the value should be a java.lang.Double * @return Returns the GetFeaturesRequestData object after setting requestContextMap * @throws InvalidRequestParameterException when requestContextMap is null or empty, or any key in * the map is null or empty */ public GetFeaturesRequestData addRequestContextMap(Map<String, Object> requestContextMap) throws InvalidRequestParameterException { Validate.notEmpty(requestContextMap); requestContextMap.forEach(this::validateKeyValue); this.requestContextMap = requestContextMap; return this; } /** * Add a String join key value to the joinKeyMap * * @param key join key name * @param value String join value * @return Returns the GetFeaturesRequestData object after adding the join key value * @throws InvalidRequestParameterException when the join key is null or empty */ public GetFeaturesRequestData addJoinKey(String key, String value) throws InvalidRequestParameterException { validateKeyValue(key, value); joinKeyMap.put(key, value); return this; } /** * Add an int64 join key value to the joinKeyMap * * @param key join key name * @param value int64 (Long) join value * @return Returns the GetFeaturesRequestData object after adding the join key value * @throws InvalidRequestParameterException when the join key is null or empty */ public GetFeaturesRequestData addJoinKey(String key, Long value) throws InvalidRequestParameterException { String joinKeyValue = (value == null) ? null : value.toString(); validateKeyValue(key, joinKeyValue); joinKeyMap.put(key, joinKeyValue); return this; } /** * Add a String request context value to the requestContextMap * * @param key request context name * @param value String request context value * @return Returns the GetFeaturesRequestData object after adding the request context key value * @throws InvalidRequestParameterException when the request context key is null or empty */ public GetFeaturesRequestData addRequestContext(String key, String value) throws InvalidRequestParameterException { validateKeyValue(key, value); requestContextMap.put(key, value); return this; } /** * Add an int64 request context value to the requestContextMap * * @param key request context name * @param value int64 (Long) request context value * <p>Note: The int64 value is converted to a String of the decimal representation of the * integer * @return Returns the GetFeaturesRequestData object after adding the request context key value * @throws InvalidRequestParameterException when the request context key is null or empty */ public GetFeaturesRequestData addRequestContext(String key, Long value) throws InvalidRequestParameterException { validateKeyValue(key, value); String requestContextValue = (value == null) ? null : value.toString(); requestContextMap.put(key, requestContextValue); return this; } /** * Add a Double request context value to the requestContextMap * * @param key request context name * @param value Double request context value * @return Returns the GetFeaturesRequestData object after adding the request context key value * @throws InvalidRequestParameterException when the request context key is null or empty */ public GetFeaturesRequestData addRequestContext(String key, Double value) throws InvalidRequestParameterException { validateKeyValue(key, value); requestContextMap.put(key, value); return this; } public Map<String, String> getJoinKeyMap() { return Collections.unmodifiableMap(this.joinKeyMap); } public Map<String, Object> getRequestContextMap() { return Collections.unmodifiableMap(this.requestContextMap); } public boolean isEmptyJoinKeyMap() { return this.joinKeyMap.isEmpty(); } public boolean isEmptyRequestContextMap() { return this.requestContextMap.isEmpty(); } private void validateKeyValue(String key, Object value) { try { Validate.notEmpty(key, TectonErrorMessage.INVALID_KEY_VALUE); if (value instanceof String) { Validate.notEmpty((String) value, TectonErrorMessage.INVALID_KEY_VALUE); } } catch (Exception e) { throw new InvalidRequestParameterException(e.getMessage()); } } /** A Builder class for creating an instance of {@link GetFeaturesRequestData} object */ public static class Builder { private GetFeaturesRequestData getFeaturesRequestData; /** Instantiates a new Builder */ public Builder() { getFeaturesRequestData = new GetFeaturesRequestData(); } /** * Setter for joinKeyMap * * @param joinKeyMap Join keys used for table-based FeatureViews. * <p>The key of this map is the join key name and the value is the join key value for this * request * <p>For string keys, the value should be a string * <p>For int64 (Long) keys, the value should be a string of the decimal representation of * the integer * @return this Builder */ public Builder joinKeyMap(Map<String, String> joinKeyMap) { getFeaturesRequestData = getFeaturesRequestData.addJoinKeyMap(joinKeyMap); return this; } /** * Setter for requestContextMap * * @param requestContextMap Request context used for OnDemand FeatureViews. * <p>The key of this map is the join key name and the value is the join key value for this * request * <p>For string values, the value should be a java.lang.String * <p>For int64 values, the value should be a java.lang.String of the decimal representation * of the integer * <p>For double values, the value should be a java.lang.Double * @return this Builder */ public Builder requestContextMap(Map<String, Object> requestContextMap) { getFeaturesRequestData = getFeaturesRequestData.addRequestContextMap(requestContextMap); return this; } /** * Build a {@link GetFeaturesRequestData} object from the Builder * * @return {@link GetFeaturesRequestData} */ public GetFeaturesRequestData build() { return this.getFeaturesRequestData; } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetFeaturesRequestData that = (GetFeaturesRequestData) o; return Objects.equals(joinKeyMap, that.joinKeyMap) && Objects.equals(requestContextMap, that.requestContextMap); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(joinKeyMap, requestContextMap); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/RequestConstants.java
package ai.tecton.client.request; import ai.tecton.client.model.MetadataOption; import java.time.Duration; import java.util.Arrays; import java.util.EnumSet; import java.util.HashSet; import java.util.Set; /** * Class that provides static constants that can be used as default parameters to various Request * objects */ public final class RequestConstants { /** * The maximum microBatchSize supported by Tecton for a {@link GetFeaturesBatchRequest}, set to * {@value MAX_MICRO_BATCH_SIZE} */ public static final int MAX_MICRO_BATCH_SIZE = 5; /** * The default microBatchSize for a {@link GetFeaturesBatchRequest}, set to {@value * DEFAULT_MICRO_BATCH_SIZE} */ public static final int DEFAULT_MICRO_BATCH_SIZE = 1; /** The default parameter for None timeout for a {@link GetFeaturesBatchRequest} */ public static final Duration NONE_TIMEOUT = Duration.ofMillis(Long.MAX_VALUE); /** * The default set of MetadataOptions for a {@link GetFeaturesRequest} and a {@link * GetFeaturesBatchRequest}, includes {@link MetadataOption#NAME} and {@link * MetadataOption#DATA_TYPE} */ public static Set<MetadataOption> DEFAULT_METADATA_OPTIONS = new HashSet<>(Arrays.asList(MetadataOption.NAME, MetadataOption.DATA_TYPE)); /** * The set of all {@link MetadataOption} for a {@link GetFeaturesRequest} and a {@link * GetFeaturesBatchRequest} */ public static Set<MetadataOption> ALL_METADATA_OPTIONS = EnumSet.complementOf(EnumSet.of(MetadataOption.ALL, MetadataOption.NONE)); /** * An empty set representing None MetadataOptions for a {@link GetFeaturesRequest} and a {@link * GetFeaturesBatchRequest} */ public static Set<MetadataOption> NONE_METADATA_OPTIONS = EnumSet.noneOf(MetadataOption.class); }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/RequestOptions.java
package ai.tecton.client.request; import java.util.HashMap; import java.util.Map; import java.util.Objects; /** * Class that represents request-level options to control feature server behavior. These options are * sent as part of the requestOptions field in GetFeatures and GetFeaturesBatch requests. Option * values must be either Integer or Boolean. */ public class RequestOptions { private Map<String, Object> options; /** Constructor that creates a new RequestOptions object with default empty options */ public RequestOptions() { this.options = new HashMap<>(); } /** * Sets a request option with the given key and value. * * @param key the option key * @param value the option value (must be either Integer or Boolean) * @return Returns the RequestOptions object for method chaining * @throws IllegalArgumentException if value is not Integer or Boolean */ public RequestOptions setOption(String key, Object value) { if (value == null) { throw new IllegalArgumentException("Option value must not be null: " + key); } this.options.put(key, value); return this; } /** * Gets a specific option value by key. * * @param key the option key * @return the option value, or null if not set */ public Object getOption(String key) { return this.options.get(key); } /** * Gets all options as an unmodifiable map. * * @return Map containing all request options */ public Map<String, Object> getOptions() { return new HashMap<>(this.options); } /** * Checks if any options are set. * * @return true if no options are set, false otherwise */ public boolean isEmpty() { return this.options.isEmpty(); } /** A Builder class for creating an instance of {@link RequestOptions} object */ public static class Builder { private RequestOptions requestOptions; /** Instantiates a new Builder */ public Builder() { requestOptions = new RequestOptions(); } /** * Sets a request option with the given key and value. * * @param key the option key * @param value the option value (must be either Integer or Boolean) * @return this Builder * @throws IllegalArgumentException if value is not Integer or Boolean */ public Builder option(String key, Object value) { requestOptions.setOption(key, value); return this; } /** * Build a {@link RequestOptions} object from the Builder * * @return {@link RequestOptions} */ public RequestOptions build() { return this.requestOptions; } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; RequestOptions that = (RequestOptions) o; return Objects.equals(options, that.options); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(options); } @Override public String toString() { return "RequestOptions{" + "options=" + options + '}'; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/request/package-info.java
/** This package contains classes modeling the different requests to the FeatureService API */ package ai.tecton.client.request;
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/response/AbstractTectonResponse.java
package ai.tecton.client.response; import ai.tecton.client.model.ValueType; import java.time.Duration; import java.util.Optional; /** An abstract parent class for Tecton FeatureService API Response subclasses. */ abstract class AbstractTectonResponse { private final Duration requestLatency; AbstractTectonResponse(Duration requestLatency) { this.requestLatency = requestLatency; } abstract void buildResponseFromJson(String response); static class ResponseDataType { String type; ResponseDataType elementType; ValueType getDataType() { return ValueType.fromString(type).get(); } Optional<ValueType> getListElementType() { if (elementType != null) { return ValueType.fromString(elementType.type); } return Optional.empty(); } } /** * Returns the response time (network latency + online store latency) as provided by the * underlying Http Client * * @return response time as {@link java.time.Duration} */ public Duration getRequestLatency() { return requestLatency; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/response/GetFeatureServiceMetadataResponse.java
package ai.tecton.client.response; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.model.FeatureServiceMetadata; import ai.tecton.client.model.NameAndType; import ai.tecton.client.model.ValueType; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.List; /** * A class that represents the response from the HTTP API for a call to the <i>/metadata</i> * endpoint. */ public class GetFeatureServiceMetadataResponse extends AbstractTectonResponse { private FeatureServiceMetadata featureServiceMetadata; private final JsonAdapter<GetFeatureServiceMetadataJson> jsonAdapter; public GetFeatureServiceMetadataResponse(String response, Duration requestLatency) { super(requestLatency); Moshi moshi = new Moshi.Builder().build(); jsonAdapter = moshi.adapter(GetFeatureServiceMetadataJson.class); buildResponseFromJson(response); } /** Returns a {@link FeatureServiceMetadata} object that represents the metadata returned */ public FeatureServiceMetadata getFeatureServiceMetadata() { return this.featureServiceMetadata; } @Override void buildResponseFromJson(String response) { GetFeatureServiceMetadataJson featureServiceMetadataJson; try { featureServiceMetadataJson = jsonAdapter.fromJson(response); this.featureServiceMetadata = new FeatureServiceMetadata( parseNameAndType(featureServiceMetadataJson.inputJoinKeys), parseNameAndType(featureServiceMetadataJson.inputRequestContextKeys), parseNameAndType(featureServiceMetadataJson.featureValues)); } catch (IOException | NullPointerException e) { throw new TectonClientException(TectonErrorMessage.INVALID_RESPONSE_FORMAT); } } static class GetFeatureServiceMetadataJson { private String featureServiceType; private List<NameAndTypeJson> inputJoinKeys; private List<NameAndTypeJson> inputRequestContextKeys; private List<NameAndTypeJson> outputJoinKeys; private List<NameAndTypeJson> featureValues; private String type; } static class NameAndTypeJson { String name; ResponseDataType dataType = new ResponseDataType(); } private List<NameAndType> parseNameAndType(List<NameAndTypeJson> nameAndTypeJson) { List<NameAndType> nameAndTypeList = new ArrayList<>(); if (nameAndTypeJson != null) { nameAndTypeJson.forEach( nt -> { ValueType dataValueType = nt.dataType.getDataType(); if (dataValueType == ValueType.ARRAY) { ValueType elementValueType = nt.dataType.getListElementType().get(); nameAndTypeList.add(new NameAndType(nt.name, dataValueType, elementValueType)); } else { nameAndTypeList.add(new NameAndType(nt.name, dataValueType)); } }); } return nameAndTypeList; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/response/GetFeaturesBatchResponse.java
package ai.tecton.client.response; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.model.FeatureValue; import ai.tecton.client.model.SloInformation; import ai.tecton.client.response.GetFeaturesResponseUtils.FeatureMetadata; import ai.tecton.client.response.GetFeaturesResponseUtils.FeatureVectorJson; import ai.tecton.client.transport.HttpResponse; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import java.io.IOException; import java.time.Duration; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.OptionalDouble; import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; /** * A class that represents the response from the HTTP API for when fetching batch features. The * class provides methods to access the list of feature vector returned, along with its metadata, if * present. * * <p>The {@link List} of {@link GetFeaturesResponse} objects represents the list of response, each * of which encapsulates a feature vector and its metadata. Note: The list may contain nulls for any * request that was never completed, due to a timeout. * * <p>The batchSloInformation is only present for batch requests to the /get-features-batch endpoint * (i.e. microBatchSize&gt;1) */ public class GetFeaturesBatchResponse { private final List<GetFeaturesResponse> batchResponseList; private SloInformation batchSloInfo; private Duration requestLatency; private static JsonAdapter<GetFeaturesMicroBatchResponse.GetFeaturesBatchResponseJson> jsonAdapter; public GetFeaturesBatchResponse( List<HttpResponse> httpResponseList, Duration totalDuration, int microBatchSize) { Moshi moshi = new Moshi.Builder().build(); jsonAdapter = moshi.adapter(GetFeaturesMicroBatchResponse.GetFeaturesBatchResponseJson.class); // Serialize list of HttpResponse into list of GetFeaturesMicroBatchResponse List<GetFeaturesMicroBatchResponse> microBatchResponses = httpResponseList .parallelStream() .map(httpResponse -> parseSingleHttpResponse(httpResponse, microBatchSize)) .collect(Collectors.toList()); // Concatenate list of GetFeaturesResponse objects from each microbatch into a single list // Maintain ordering this.batchResponseList = microBatchResponses .parallelStream() .map(microBatch -> microBatch.microBatchResponseList) .flatMap(List::stream) .collect(Collectors.toList()); // Compute Batch SLO Information, if present List<SloInformation> microBatchSloInfoList = microBatchResponses.stream() .filter( microBatchResponse -> microBatchResponse.getMicroBatchSloInformation().isPresent()) .map(microBatchResponse -> microBatchResponse.getMicroBatchSloInformation().get()) .collect(Collectors.toList()); if (!microBatchSloInfoList.isEmpty()) { this.batchSloInfo = computeBatchSloInfo(microBatchSloInfoList); } this.requestLatency = totalDuration; } /** * Returns a list of {@link GetFeaturesResponse} objects, each encapsulating a feature vector and * its metadata * * @return {@link List} of {@link GetFeaturesResponse} */ public List<GetFeaturesResponse> getBatchResponseList() { return batchResponseList; } /** * Returns the response time (network latency + online store latency) as provided by the * underlying Http Client * * @return response time as {@link java.time.Duration} */ public Duration getRequestLatency() { return this.requestLatency; } /** * Returns an {@link SloInformation} object wrapped in {@link java.util.Optional} if present in * the response received from the HTTP API, Optional.empty() otherwise * * @return {@link SloInformation} for the batch request */ public Optional<SloInformation> getBatchSloInformation() { return Optional.ofNullable(this.batchSloInfo); } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetFeaturesBatchResponse that = (GetFeaturesBatchResponse) o; return Objects.equals(batchResponseList, that.batchResponseList) && Objects.equals(batchSloInfo, that.batchSloInfo); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(batchResponseList, batchSloInfo); } // Parse a single HttpResponse and extract GetFeaturesResponse, SloInformation // This method is called parallely for all responses in the list private GetFeaturesMicroBatchResponse parseSingleHttpResponse( HttpResponse httpResponse, int microBatchSize) { // Null HttpResponse represents a timeout and so all the individual responses in the microbatch // will be null if (httpResponse == null) return new GetFeaturesMicroBatchResponse(Collections.nCopies(microBatchSize, null), null); // For an error response, throw TectonServiceException httpResponse.validateResponse(); String responseJson = httpResponse.getResponseBody().get(); if (microBatchSize == 1) { return new GetFeaturesMicroBatchResponse( Collections.singletonList( new GetFeaturesResponse(responseJson, httpResponse.getRequestDuration())), null); } else { return new GetFeaturesMicroBatchResponse(responseJson, httpResponse.getRequestDuration()); } } private static class GetFeaturesMicroBatchResponse extends AbstractTectonResponse { private List<GetFeaturesResponse> microBatchResponseList; private SloInformation microBatchSloInfo; GetFeaturesMicroBatchResponse(String response, Duration requestLatency) { super(requestLatency); buildResponseFromJson(response); } GetFeaturesMicroBatchResponse( List<GetFeaturesResponse> microBatchResponseList, Duration requestDuration) { super(requestDuration); this.microBatchResponseList = microBatchResponseList; } Optional<SloInformation> getMicroBatchSloInformation() { return Optional.ofNullable(this.microBatchSloInfo); } // Moshi Json Classes static class GetFeaturesBatchResponseJson { List<GetFeaturesResponseUtils.FeatureVectorJson> result; ResponseMetadataJson metadata; static class ResponseMetadataJson { List<GetFeaturesResponseUtils.FeatureMetadata> features; List<SloInformation> sloInfo; SloInformation batchSloInfo; } } @Override void buildResponseFromJson(String response) { if (response != null) { GetFeaturesBatchResponseJson responseJson; try { responseJson = jsonAdapter.fromJson(response); } catch (IOException e) { throw new TectonClientException(TectonErrorMessage.INVALID_RESPONSE_FORMAT); } List<FeatureVectorJson> featureVectorJson = responseJson.result; List<FeatureMetadata> featureMetadata = responseJson.metadata.features; List<SloInformation> sloInformationList = responseJson.metadata.sloInfo; // Parallel Stream to map each feature vector and sloInfo (if present) in the response to a // corresponding // GetFeaturesResponse object and collect to a List // Preserves ordering this.microBatchResponseList = IntStream.range(0, responseJson.result.size()) .parallel() .mapToObj( i -> generateGetFeaturesResponse( featureVectorJson, featureMetadata, sloInformationList, this.getRequestLatency(), i)) .collect(Collectors.toList()); this.microBatchSloInfo = responseJson.metadata.batchSloInfo; } } } // Generate a single GetFeaturesResponse private static GetFeaturesResponse generateGetFeaturesResponse( List<FeatureVectorJson> featureVectorJson, List<FeatureMetadata> featureMetadata, List<SloInformation> sloInformationList, Duration requestLatency, int index) { List<FeatureValue> featureValues = GetFeaturesResponseUtils.constructFeatureVector( featureVectorJson.get(index).features, featureMetadata, index); GetFeaturesResponse getFeaturesResponse = new GetFeaturesResponse(featureValues, requestLatency); if (sloInformationList != null) { getFeaturesResponse.setSloInformation(sloInformationList.get(index)); } return getFeaturesResponse; } // Compute Batch SLO Information SloInformation computeBatchSloInfo(List<SloInformation> batchSloInformation) { batchSloInformation.removeAll(Collections.singleton(null)); boolean isSloEligibleBatch = batchSloInformation.stream() .noneMatch( sloInfo -> sloInfo.isSloEligible().isPresent() && !sloInfo.isSloEligible().get()); Double maxSloServerTimeSeconds = getMaxValueFromOptionalList( batchSloInformation.stream() .map(SloInformation::getSloServerTimeSeconds) .collect(Collectors.toList())); Double storeMaxLatency = getMaxValueFromOptionalList( batchSloInformation.stream() .map(SloInformation::getStoreMaxLatency) .collect(Collectors.toList())); Double maxServerTimeSeconds = getMaxValueFromOptionalList( batchSloInformation.stream() .map(SloInformation::getServerTimeSeconds) .collect(Collectors.toList())); Set<SloInformation.SloIneligibilityReason> sloIneligibilityReasons = batchSloInformation.stream() .map(SloInformation::getSloIneligibilityReasons) .flatMap(Collection::stream) .collect(Collectors.toSet()); return new SloInformation.Builder() .isSloEligible(isSloEligibleBatch) .serverTimeSeconds(maxServerTimeSeconds) .sloServerTimeSeconds(maxSloServerTimeSeconds) .sloIneligibilityReasons(sloIneligibilityReasons) .storeMaxLatency(storeMaxLatency) .build(); } private Double getMaxValueFromOptionalList(List<Optional<Double>> values) { OptionalDouble doubleVal = values.stream().filter(Optional::isPresent).mapToDouble(Optional::get).max(); return doubleVal.isPresent() ? doubleVal.getAsDouble() : null; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/response/GetFeaturesResponse.java
package ai.tecton.client.response; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.model.FeatureStatus; import ai.tecton.client.model.FeatureValue; import ai.tecton.client.model.SloInformation; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import org.apache.commons.lang3.StringUtils; /** * A class that represents the response from the HTTP API for a call to the <i>/get-features</i> * endpoint. The class provides methods to access the feature vector returned, along with its * metadata, if present. */ public class GetFeaturesResponse extends AbstractTectonResponse { private final List<FeatureValue> featureValues; private SloInformation sloInformation; private final JsonAdapter<GetFeaturesResponseJson> jsonAdapter; private static final String NAME = "Name"; private static final String DATA_TYPE = "Data Type"; public GetFeaturesResponse(String response, Duration requestLatency) throws TectonClientException { super(requestLatency); Moshi moshi = new Moshi.Builder().build(); jsonAdapter = moshi.adapter(GetFeaturesResponseJson.class); this.featureValues = new ArrayList<>(); buildResponseFromJson(response); } // Package-Private constructor GetFeaturesResponse(List<FeatureValue> featureValues, Duration requestLatency) { super(requestLatency); this.featureValues = featureValues; Moshi moshi = new Moshi.Builder().build(); jsonAdapter = moshi.adapter(GetFeaturesResponseJson.class); } /** * Returns the feature vector as a List of {@link FeatureValue} objects * * @return List of {@link FeatureValue} */ public List<FeatureValue> getFeatureValues() { return featureValues; } /** Returns the feature vector as a Map, with the feature name as the key */ public Map<String, FeatureValue> getFeatureValuesAsMap() { Map<String, FeatureValue> featureMap = new HashMap<>(); featureValues.forEach( featureValue -> featureMap.put( StringUtils.join( featureValue.getFeatureNamespace(), ".", featureValue.getFeatureName()), featureValue)); return featureMap; } /** * Returns an {@link SloInformation} objected wrapped in {@link java.util.Optional} if present in * the response received from the HTTP API, Optional.empty() otherwise */ public Optional<SloInformation> getSloInformation() { return Optional.ofNullable(sloInformation); } static class GetFeaturesResponseJson { Result result; ResponseMetadata metadata; static class Result { List<Object> features; } static class ResponseMetadata { List<FeatureMetadata> features; SloInformation sloInfo; } static class FeatureMetadata { String name; String effectiveTime; ResponseDataType dataType = new ResponseDataType(); String status; String description; Map<String, String> tags; } } @Override void buildResponseFromJson(String response) { GetFeaturesResponseJson responseJson; try { responseJson = jsonAdapter.fromJson(response); } catch (IOException e) { throw new TectonClientException(TectonErrorMessage.INVALID_RESPONSE_FORMAT); } List<Object> featureVector = responseJson.result.features; List<GetFeaturesResponseJson.FeatureMetadata> featureMetadata = responseJson.metadata.features; validateResponse(featureVector, featureMetadata); // Construct Feature Value object from response for (int i = 0; i < responseJson.result.features.size(); i++) { FeatureValue value = new FeatureValue( featureVector.get(i), featureMetadata.get(i).name, featureMetadata.get(i).dataType.getDataType(), featureMetadata.get(i).dataType.getListElementType(), featureMetadata.get(i).effectiveTime, FeatureStatus.fromString(featureMetadata.get(i).status), featureMetadata.get(i).description, featureMetadata.get(i).tags); this.featureValues.add(value); } // Construct Slo Info if present if (responseJson.metadata.sloInfo != null) { this.sloInformation = responseJson.metadata.sloInfo; } } void setSloInformation(SloInformation sloInformation) { this.sloInformation = sloInformation; } private void validateResponse( List<Object> featureVector, List<GetFeaturesResponseJson.FeatureMetadata> featureMetadata) { if (featureVector.isEmpty()) { throw new TectonClientException(TectonErrorMessage.EMPTY_FEATURE_VECTOR); } for (GetFeaturesResponseJson.FeatureMetadata metadata : featureMetadata) { if (StringUtils.isEmpty(metadata.name)) { throw new TectonClientException( String.format(TectonErrorMessage.MISSING_EXPECTED_METADATA, NAME)); } if (StringUtils.isEmpty(metadata.dataType.type)) { { throw new TectonClientException( String.format(TectonErrorMessage.MISSING_EXPECTED_METADATA, DATA_TYPE)); } } } } /** Overrides <i>equals()</i> in class {@link Object} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; GetFeaturesResponse that = (GetFeaturesResponse) o; return Objects.equals(featureValues, that.featureValues) && Objects.equals(sloInformation, that.sloInformation); } /** Overrides <i>hashCode()</i> in class {@link Object} */ @Override public int hashCode() { return Objects.hash(featureValues, sloInformation); } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/response/GetFeaturesResponseUtils.java
package ai.tecton.client.response; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.model.FeatureStatus; import ai.tecton.client.model.FeatureValue; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; class GetFeaturesResponseUtils { private static final String NAME = "Name"; private static final String DATA_TYPE = "Data Type"; // Construct Feature Vector from list of object and metadata static List<FeatureValue> constructFeatureVector( List<Object> features, List<FeatureMetadata> featureMetadata, int index) { validateResponse(features, featureMetadata); List<FeatureValue> featureValues = new ArrayList<>(features.size()); for (int i = 0; i < features.size(); i++) { FeatureValue value = new FeatureValue( features.get(i), featureMetadata.get(i).name, featureMetadata.get(i).dataType.getDataType(), featureMetadata.get(i).dataType.getListElementType(), featureMetadata.get(i).effectiveTime, (featureMetadata.get(i).status != null) ? FeatureStatus.fromString(featureMetadata.get(i).status.get(index)) : null, featureMetadata.get(i).description, featureMetadata.get(i).tags); featureValues.add(value); } return featureValues; } // Validate response from Feature Server static void validateResponse(List<Object> featureVector, List<FeatureMetadata> featureMetadata) { if (featureVector.isEmpty()) { throw new TectonClientException(TectonErrorMessage.EMPTY_FEATURE_VECTOR); } for (FeatureMetadata metadata : featureMetadata) { if (StringUtils.isEmpty(metadata.name)) { throw new TectonClientException( String.format(TectonErrorMessage.MISSING_EXPECTED_METADATA, NAME)); } if (StringUtils.isEmpty(metadata.dataType.type)) { { throw new TectonClientException( String.format(TectonErrorMessage.MISSING_EXPECTED_METADATA, DATA_TYPE)); } } } } // Common JSON response classes for Moshi serialization static class FeatureMetadata { String name; String effectiveTime; AbstractTectonResponse.ResponseDataType dataType = new AbstractTectonResponse.ResponseDataType(); List<String> status; String description; Map<String, String> tags; } static class FeatureVectorJson { List<Object> features; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/response/package-info.java
/** This package contains classes representing the high-level responses from FeatureService API */ package ai.tecton.client.response;
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/transport/HttpRequest.java
package ai.tecton.client.transport; import okhttp3.HttpUrl; import org.apache.commons.lang3.StringUtils; class HttpRequest { HttpUrl url; TectonHttpClient.HttpMethod method; String jsonBody; String apiKey; HttpRequest( String baseUrl, String endpoint, TectonHttpClient.HttpMethod method, String apiKey, String jsonBody) { url = HttpUrl.parse(baseUrl); if (endpoint != null && !endpoint.isEmpty()) { // Paths with leading backslash results in a URL with double backslash. // See https://github.com/square/okhttp/issues/2399#issuecomment-195354749 url = url.newBuilder().addPathSegments(StringUtils.stripStart(endpoint, "/")).build(); } this.method = method; this.apiKey = apiKey; this.jsonBody = jsonBody; } HttpUrl getUrl() { return this.url; } TectonHttpClient.HttpMethod getMethod() { return method; } String getApiKey() { return apiKey; } String getJsonBody() { return jsonBody; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/transport/HttpResponse.java
package ai.tecton.client.transport; import ai.tecton.client.exceptions.HttpStatusExceptionFactory; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.exceptions.TectonException; import ai.tecton.client.exceptions.TectonServiceException; import com.squareup.moshi.JsonAdapter; import com.squareup.moshi.Moshi; import java.time.Duration; import java.util.Optional; import okhttp3.Headers; import okhttp3.Response; import okhttp3.ResponseBody; public class HttpResponse { private final boolean isSuccessful; private final int responseCode; private final String message; private final String body; private final Headers headers; private final Duration requestDuration; private static final Moshi moshi = new Moshi.Builder().build(); private static final JsonAdapter<ErrorResponseJson> errorResponseJsonAdapter = moshi.adapter(ErrorResponseJson.class); HttpResponse(Response response) throws Exception { this(response, response.body()); } HttpResponse(Response response, ResponseBody responseBody) throws Exception { this.responseCode = response.code(); this.headers = response.headers(); this.requestDuration = Duration.ofMillis(response.receivedResponseAtMillis() - response.sentRequestAtMillis()); this.isSuccessful = response.isSuccessful(); this.body = responseBody.string(); // If a Tecton error message (e.g. "invalid) 'Tecton-key' authorization header" ) isn't present // in the response, (e.g. when the request times out), the client uses the HTTP error status ( // e.g.Forbidden, Not Found) while throwing an Exception if (!this.isSuccessful) { this.message = parseErrorResponse(this.body, response.message()); } else { this.message = response.message(); } } public boolean isSuccessful() { return isSuccessful; } public int getResponseCode() { return responseCode; } public Duration getRequestDuration() { return requestDuration; } public String getMessage() { return message; } public Optional<String> getResponseBody() { return Optional.ofNullable(this.body); } public void validateResponse() { if (!this.isSuccessful()) { int responseCode = this.getResponseCode(); String errorMessage = this.getMessage(); Optional<TectonException> tectonException = HttpStatusExceptionFactory.createException(responseCode, errorMessage); if (tectonException.isPresent()) { throw tectonException.get(); } throw new TectonServiceException( String.format( TectonErrorMessage.ERROR_RESPONSE, this.getResponseCode(), this.getMessage())); } if (!this.getResponseBody().isPresent()) { throw new TectonClientException(TectonErrorMessage.EMPTY_RESPONSE); } } private static String parseErrorResponse(String responseBody, String message) { // Parse error response and extract error message try { ErrorResponseJson errorResponseJson = errorResponseJsonAdapter.fromJson(responseBody); return errorResponseJson.message; } catch (Exception e) { return message; } } static class ErrorResponseJson { String error; int code; String message; } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/transport/TectonHttpClient.java
package ai.tecton.client.transport; import ai.tecton.client.TectonClientOptions; import ai.tecton.client.exceptions.TectonClientException; import ai.tecton.client.exceptions.TectonErrorMessage; import ai.tecton.client.exceptions.TectonServiceException; import ai.tecton.client.version.Version; import java.io.IOException; import java.io.InterruptedIOException; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import okhttp3.Call; import okhttp3.Callback; import okhttp3.ConnectionPool; import okhttp3.Dispatcher; import okhttp3.HttpUrl; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.RequestBody; import okhttp3.Response; import okhttp3.ResponseBody; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; public class TectonHttpClient { private HttpUrl url; private final String apiKey; private final OkHttpClient client; private final AtomicBoolean isClosed; private static final String API_KEY_PREFIX = "Tecton-key "; private static final String USER_AGENT_STRING_PREFIX = "tecton-http-java-client "; private static final Map<String, String> defaultHeaders = new HashMap<String, String>() { { put(HttpHeader.CONTENT_TYPE.getName(), MediaType.APPLICATION_JSON.getName()); put(HttpHeader.ACCEPT.getName(), MediaType.APPLICATION_JSON.getName()); put(HttpHeader.USER_AGENT.getName(), USER_AGENT_STRING_PREFIX + Version.VERSION); } }; public TectonHttpClient(String url, String apiKey, TectonClientOptions tectonClientOptions) { validateUrl(url); validateApiKey(apiKey); this.apiKey = apiKey; Dispatcher dispatcher = new Dispatcher(); dispatcher.setMaxRequestsPerHost(tectonClientOptions.getMaxParallelRequests()); dispatcher.setMaxRequests(tectonClientOptions.getMaxParallelRequests()); OkHttpClient.Builder builder = new OkHttpClient.Builder() .readTimeout(tectonClientOptions.getReadTimeout().getSeconds(), TimeUnit.SECONDS) .connectTimeout(tectonClientOptions.getConnectTimeout().getSeconds(), TimeUnit.SECONDS) .dispatcher(dispatcher); ConnectionPool connectionPool = new ConnectionPool( tectonClientOptions.getMaxIdleConnections(), tectonClientOptions.getKeepAliveDuration().getSeconds(), TimeUnit.SECONDS); builder.connectionPool(connectionPool); client = builder.build(); isClosed = new AtomicBoolean(false); } public TectonHttpClient(String url, String apiKey, OkHttpClient httpClient) { validateUrl(url); if (apiKey != null) { validateApiKey(apiKey); } this.client = httpClient; this.apiKey = apiKey; this.isClosed = new AtomicBoolean(false); } public void close() { if (isClosed.compareAndSet(false, true)) { client.dispatcher().executorService().shutdown(); client.connectionPool().evictAll(); } } public boolean isClosed() { return isClosed.get(); } public HttpResponse performRequest(String endpoint, HttpMethod method, String requestBody) { HttpRequest httpRequest = new HttpRequest(url.url().toString(), endpoint, method, apiKey, requestBody); Request request = buildRequestWithDefaultHeaders(httpRequest); Call call = client.newCall(request); try (Response response = call.execute()) { return new HttpResponse(response); } catch (Exception e) { throw new TectonClientException(e.getMessage()); } } public List<HttpResponse> performParallelRequests( String endpoint, HttpMethod method, List<String> requestBodyList, Duration timeout) throws TectonClientException { // Initialize response list ParallelCallHandler parallelCallHandler = new ParallelCallHandler(requestBodyList.size()); // Map request body to OkHttp Request // ordering of requests is maintained List<Request> requestList = requestBodyList .parallelStream() .map( requestBody -> new HttpRequest(url.url().toString(), endpoint, method, apiKey, requestBody)) .map(this::buildRequestWithDefaultHeaders) .collect(Collectors.toList()); // Initialize a countdown latch for numberOfCalls. CountDownLatch countDownLatch = new CountDownLatch(requestBodyList.size()); Callback callback = new Callback() { @Override public void onFailure(Call call, IOException e) { // On timeout, executor rejects all pending calls. This could lead to an // InterruptedIOException for in-flight calls, which is expected. // Only log failures for other call failures such as network issues if (!(e instanceof InterruptedIOException)) { parallelCallHandler.logCallFailure(e.getMessage()); } } @Override public void onResponse(Call call, Response response) { try (ResponseBody responseBody = response.body()) { // Add response to corresponding index parallelCallHandler.set( requestList.indexOf(call.request()), new HttpResponse(response, responseBody)); } catch (Exception e) { throw new TectonServiceException(e.getMessage()); } finally { Objects.requireNonNull(response.body()).close(); countDownLatch.countDown(); } } }; // Enqueue all calls requestList .parallelStream() .forEach( request -> { client.newCall(request).enqueue(callback); }); // Wait until A) all calls have completed or B) specified timeout has elapsed try { boolean completedAllCalls = countDownLatch.await(timeout.toMillis(), TimeUnit.MILLISECONDS); if (!parallelCallHandler.failureMessageList.isEmpty()) { throw new TectonClientException( String.format( TectonErrorMessage.CALL_FAILURE, parallelCallHandler.failureMessageList.get(0))); } return parallelCallHandler.responseList; } catch (InterruptedException e) { throw new TectonClientException(e.getMessage()); } } public Request buildRequestWithDefaultHeaders(HttpRequest httpRequest) { // Construct url Request.Builder requestBuilder = new Request.Builder().url(httpRequest.getUrl()); // Add headers for (Map.Entry<String, String> header : defaultHeaders.entrySet()) { requestBuilder.header(header.getKey(), header.getValue()); } String apiKeyHeader = StringUtils.join(API_KEY_PREFIX + httpRequest.getApiKey()); requestBuilder.header(HttpHeader.AUTHORIZATION.getName(), apiKeyHeader); // Add request body switch (httpRequest.getMethod()) { case POST: default: { okhttp3.MediaType mediaType = okhttp3.MediaType.parse(MediaType.APPLICATION_JSON.getName()); RequestBody requestBody = RequestBody.create(httpRequest.getJsonBody(), mediaType); requestBuilder.post(requestBody); } } return requestBuilder.build(); } Duration getReadTimeout() { return Duration.ofMillis(client.readTimeoutMillis()); } Duration getConnectTimeout() { return Duration.ofMillis(client.connectTimeoutMillis()); } int getMaxParallelRequests() { return client.dispatcher().getMaxRequestsPerHost(); } private void validateApiKey(String apiKey) { try { Validate.notEmpty(apiKey); } catch (Exception e) { throw new TectonClientException(TectonErrorMessage.INVALID_KEY); } } private void validateUrl(String url) { try { Validate.notEmpty(url); this.url = HttpUrl.parse(url); } catch (Exception e) { throw new TectonClientException(TectonErrorMessage.INVALID_URL); } } public enum HttpMethod { GET, POST, PUT, DELETE; } enum HttpHeader { CONTENT_TYPE("Content-Type"), ACCEPT("Accept"), AUTHORIZATION("Authorization"), USER_AGENT("User-Agent"); private final String name; HttpHeader(String headerName) { this.name = headerName; } public String getName() { return name; } } enum MediaType { APPLICATION_JSON("application/json"), PLAIN_TEXT("text/plain"); private final String name; MediaType(String name) { this.name = name; } public String getName() { return this.name; } } static class ParallelCallHandler { List<HttpResponse> responseList; List<String> failureMessageList; ParallelCallHandler(int numberOfCalls) { this.responseList = new ArrayList<>(Collections.nCopies(numberOfCalls, null)); this.failureMessageList = new ArrayList<>(numberOfCalls); } void set(int index, HttpResponse httpResponse) { this.responseList.set(index, httpResponse); } void logCallFailure(String failureMessage) { // Log all call failure messages. Currently we only use one but this can be useful for error // handling per call in future this.failureMessageList.add(failureMessage); } } }
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/transport/package-info.java
/** * This package contains implementation of an HTTP Client using OkHttp library from Square. * * @see <a href="https://square.github.io/okhttp/">https://square.github.io/okhttp//</a> */ package ai.tecton.client.transport;
0
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client
java-sources/ai/tecton/java-client/0.9.3/ai/tecton/client/version/Version.java
// Generated by Maven templating plugin. Do not modify this file directly. package ai.tecton.client.version; public class Version { public static final String VERSION = "0.9.3"; }
0
java-sources/ai/test/sdk/test-ai-appium/0.1.0/ai/test
java-sources/ai/test/sdk/test-ai-appium/0.1.0/ai/test/sdk/CollectionUtils.java
package ai.test.sdk; import java.util.HashMap; /** * Shared classes and methods enhancing collections functionality. * * @author Alexander Wu (alec@test.ai) * */ final class CollectionUtils { /** * Builds a {@code HashMap} out of a list of {@code String}s. Pass in values such that {@code [ k1, v1, k2, v2, k3, v3... ]}. * * @param sl The {@code String}s to use * @return A {@code HashMap} derived from the values in {@code sl} */ public static HashMap<String, String> keyValuesToHM(String... sl) { HashMap<String, String> m = new HashMap<>(); for (int i = 0; i < sl.length; i += 2) m.put(sl[i], sl[i + 1]); return m; } /** * Simple Tuple implementation. A Tuple is an immutable two-pair of values. It may consist of any two Objects, which may or may not be in of the same type. * * @author Alexander Wu (alec@test.ai) * * @param <K> The type of Object allowed for the first Object in the tuple. * @param <V> The type of Object allowed for the second Object in the tuple. */ public static class Tuple<K, V> { /** * The k value of the tuple */ public final K k; /** * The y value of the tuple */ public final V v; /** * Constructor, creates a new Tuple from the specified values. * * @param k The first entry in the Tuple. * @param v The second entry in the Tuple. */ public Tuple(K k, V v) { this.k = k; this.v = v; } } }
0
java-sources/ai/test/sdk/test-ai-appium/0.1.0/ai/test
java-sources/ai/test/sdk/test-ai-appium/0.1.0/ai/test/sdk/JsonUtils.java
package ai.test.sdk; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import okhttp3.Response; /** * Shared utility methods for common tasks * * @author Alexander Wu (alec@test.ai) * */ final class JsonUtils { /** * The logger for this class */ private static Logger log = LoggerFactory.getLogger(JsonUtils.class); /** * Convenience method, extract the body of a {@code Response} as a {@code JsonObject}. * * @param r The Response object to use * @return The body of {@code r} as a {@code JsonObject}. */ public static JsonObject responseAsJson(Response r) { try { String body = r.body().string(); log.debug("Status: {} ----- Body: {}", r.code(), body); return JsonParser.parseString(body).getAsJsonObject(); } catch (Throwable e) { e.printStackTrace(); return null; } } /** * Convenience method, extract a String value associated with the specified key on a JsonObject. * * @param jo The JsonObject to extract a String from * @param key The key associated with the value to extract * @return The value associated with {@code key}, or the empty String if {@code key} was not in {@code jo}. */ public static String stringFromJson(JsonObject jo, String key) { return jo.has(key) ? jo.get(key).getAsString() : ""; } /** * Convenience method, extract a double value associated with the specified key on a JsonObject. * * @param jo The JsonObject to extract a double from * @param key The key associated with the value to extract * @return The value associated with {@code key}, or 0.0 if {@code key} was not in {@code jo}. */ public static double doubleFromJson(JsonObject jo, String key) { return jo.has(key) ? jo.get(key).getAsDouble() : 0; } /** * Convenience method, extract an int value associated with the specified key on a JsonObject. * * @param jo The JsonObject to extract an int from * @param key The key associated with the value to extract * @return The value associated with {@code key}, or 0 if {@code key} was not in {@code jo}. */ public static int intFromJson(JsonObject jo, String key) { return jo.has(key) ? jo.get(key).getAsInt() : 0; } /** * Convenience method, extract a boolean value associated with the specified key on a JsonObject. * * @param jo The JsonObject to extract a boolean from * @param key The key associated with the value to extract * @return The value associated with {@code key}, or false if {@code key} was not in {@code jo}. */ public static boolean booleanFromJson(JsonObject jo, String key) { return jo.has(key) ? jo.get(key).getAsBoolean() : false; } }