index int64 | repo_id string | file_path string | content string |
|---|---|---|---|
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/ElementDefinition.java | package ai.stapi.schema.structuredefinition;
import ai.stapi.serialization.SerializableObject;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import org.jetbrains.annotations.Nullable;
public class ElementDefinition implements SerializableObject {
public static final String SERIALIZATION_TYPE = "ElementDefinition";
private String path;
@Nullable
private Integer min;
@Nullable
private String max;
@Nullable
private String shortDescription;
@Nullable
private String definition;
@Nullable
private String comment;
@Nullable
private String contentReference;
private List<ElementDefinitionType> type;
protected ElementDefinition() {
}
public ElementDefinition(
String path,
List<ElementDefinitionType> type,
@Nullable Integer min,
@Nullable String max,
@Nullable String shortDescription,
@Nullable String definition,
@Nullable String comment
) {
this.path = path;
this.type = type;
this.min = min;
this.max = max;
this.shortDescription = shortDescription;
this.definition = definition;
this.comment = comment;
}
public ElementDefinition(
String path,
List<ElementDefinitionType> type,
@Nullable Integer min,
@Nullable String max,
@Nullable String shortDescription,
@Nullable String definition,
@Nullable String comment,
@Nullable String contentReference
) {
this.path = path;
this.type = type;
this.min = min;
this.max = max;
this.shortDescription = shortDescription;
this.definition = definition;
this.comment = comment;
this.contentReference = contentReference;
}
public String getPath() {
return path;
}
@Nullable
public Integer getMin() {
return min;
}
@Nullable
public String getMax() {
return max;
}
@Nullable
@JsonProperty("short")
public String getShortDescription() {
return shortDescription;
}
@Nullable
public String getDefinition() {
return definition;
}
@Nullable
public String getComment() {
return comment;
}
public List<ElementDefinitionType> getType() {
return this.type;
}
@Nullable
public String getContentReference() {
return contentReference;
}
@Override
public String toString() {
return "ElementDefinition{" +
"path='" + path + '\'' +
", min=" + min +
", max='" + max + '\'' +
", shortDescription='" + shortDescription + '\'' +
", definition='" + definition + '\'' +
", comment='" + comment + '\'' +
", type=" + type +
'}';
}
@Override
public String getSerializationType() {
return SERIALIZATION_TYPE;
}
} |
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/ElementDefinitionType.java | package ai.stapi.schema.structuredefinition;
import ai.stapi.identity.UniqueIdentifier;
import java.util.ArrayList;
import java.util.List;
public class ElementDefinitionType {
private String code;
private UniqueIdentifier codeRef;
private List<String> targetProfile;
private List<UniqueIdentifier> targetProfileRef;
protected ElementDefinitionType() {
}
public ElementDefinitionType(
String code,
UniqueIdentifier codeRef,
List<String> targetProfile,
List<UniqueIdentifier> targetProfileRef
) {
this.code = code;
this.codeRef = codeRef;
this.targetProfile = targetProfile;
this.targetProfileRef = targetProfileRef;
}
public ElementDefinitionType(String code, UniqueIdentifier codeRef) {
this.code = code;
this.codeRef = codeRef;
this.targetProfile = new ArrayList<>();
this.targetProfileRef = new ArrayList<>();
}
public ElementDefinitionType(String code) {
this.code = code;
this.codeRef = new UniqueIdentifier(code);
this.targetProfile = new ArrayList<>();
this.targetProfileRef = new ArrayList<>();
}
public ElementDefinitionType(
String code,
List<String> targetProfile
) {
this.code = code;
this.codeRef = new UniqueIdentifier(code);
this.targetProfile = targetProfile;
this.targetProfileRef = targetProfile.stream().map(UniqueIdentifier::new).toList();
}
public String getCode() {
return code;
}
public UniqueIdentifier getCodeRef() {
return codeRef;
}
public List<String> getTargetProfile() {
return targetProfile;
}
public List<UniqueIdentifier> getTargetProfileRef() {
return targetProfileRef;
}
}
|
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/RawStructureDefinitionData.java | package ai.stapi.schema.structuredefinition;
import ai.stapi.serialization.SerializableObject;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.ArrayList;
public class RawStructureDefinitionData implements SerializableObject {
public static final String SERIALIZATION_TYPE = "RawStructureDefinitionData";
private String id;
private String url;
private String status;
private String description;
private String kind;
private Boolean isAbstract;
private String type;
private String baseDefinition;
private Differential differential;
protected RawStructureDefinitionData() {
}
public RawStructureDefinitionData(
String id,
String url,
String status,
String description,
String kind,
Boolean isAbstract,
String type,
String baseDefinition,
Differential differential
) {
this.id = id;
this.url = url;
this.status = status;
this.description = description;
this.kind = kind;
this.isAbstract = isAbstract;
this.type = type;
this.baseDefinition = baseDefinition;
this.differential = differential;
}
public String getId() {
return id;
}
public String getUrl() {
return url;
}
public String getStatus() {
return status;
}
public String getDescription() {
return description;
}
public String getKind() {
return kind;
}
@JsonProperty("abstract")
public Boolean getIsAbstract() {
return isAbstract;
}
public String getType() {
return type;
}
public String getBaseDefinition() {
return baseDefinition;
}
public Differential getDifferential() {
return differential;
}
@Override
public String getSerializationType() {
return SERIALIZATION_TYPE;
}
public static class Differential {
private ArrayList<RawStructureDefinitionElementDefinition> element;
protected Differential() {
}
public Differential(ArrayList<RawStructureDefinitionElementDefinition> element) {
this.element = element;
}
public ArrayList<RawStructureDefinitionElementDefinition> getElement() {
return element;
}
}
}
|
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/RawStructureDefinitionElementDefinition.java | package ai.stapi.schema.structuredefinition;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.ArrayList;
import java.util.List;
import org.jetbrains.annotations.Nullable;
public class RawStructureDefinitionElementDefinition {
private String path;
@Nullable
private Integer min;
@Nullable
private String max;
@Nullable
private String shortDescription;
@Nullable
private String definition;
@Nullable
private String comment;
@Nullable
private String contentReference;
private ArrayList<ElementDefinitionType> type;
protected RawStructureDefinitionElementDefinition() {
}
public RawStructureDefinitionElementDefinition(
String path,
@Nullable Integer min,
@Nullable String max,
@Nullable String shortDescription,
@Nullable String definition,
@Nullable String comment,
ArrayList<ElementDefinitionType> type
) {
this.path = path;
this.min = min;
this.max = max;
this.shortDescription = shortDescription;
this.definition = definition;
this.comment = comment;
this.type = type;
}
public RawStructureDefinitionElementDefinition(
String path,
@Nullable Integer min,
@Nullable String max,
@Nullable String shortDescription,
@Nullable String definition,
@Nullable String comment,
@Nullable String contentReference,
ArrayList<ElementDefinitionType> type
) {
this.path = path;
this.min = min;
this.max = max;
this.shortDescription = shortDescription;
this.definition = definition;
this.comment = comment;
this.type = type;
this.contentReference = contentReference;
}
public String getPath() {
return path;
}
@Nullable
public Integer getMin() {
return min;
}
@Nullable
public String getMax() {
return max;
}
@Nullable
@JsonProperty("short")
public String getShortDescription() {
return shortDescription;
}
@Nullable
public String getDefinition() {
return definition;
}
@Nullable
public String getComment() {
return comment;
}
@Nullable
public String getContentReference() {
return contentReference;
}
public ArrayList<ElementDefinitionType> getType() {
return type;
}
public static class ElementDefinitionType {
private String code;
private List<String> targetProfile = new ArrayList<>();
protected ElementDefinitionType() {
}
public ElementDefinitionType(
String code,
List<String> targetProfile
) {
this.code = code;
this.targetProfile = targetProfile;
}
public String getCode() {
return code;
}
public List<String> getTargetProfile() {
return targetProfile;
}
}
} |
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/StructureDefinitionData.java | package ai.stapi.schema.structuredefinition;
import ai.stapi.identity.UniqueIdentifier;
import ai.stapi.serialization.SerializableObject;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
public class StructureDefinitionData implements SerializableObject {
public static final String SERIALIZATION_TYPE = "StructureDefinition";
private String id;
private String url;
private String status;
private String description;
private String kind;
private Boolean isAbstract;
private String type;
private String baseDefinition;
private UniqueIdentifier baseDefinitionRef;
private Differential differential;
protected StructureDefinitionData() {
}
public StructureDefinitionData(
String id,
String url,
String status,
String description,
String kind,
Boolean isAbstract,
String type,
String baseDefinition,
UniqueIdentifier baseDefinitionRef,
Differential differential
) {
this.id = id;
this.url = url;
this.status = status;
this.description = description;
this.kind = kind;
this.isAbstract = isAbstract;
this.type = type;
this.baseDefinition = baseDefinition;
this.baseDefinitionRef = baseDefinitionRef;
this.differential = differential;
}
public StructureDefinitionData(
String id,
String url,
String status,
String description,
String kind,
Boolean isAbstract,
String type,
String baseDefinition,
UniqueIdentifier baseDefinitionRef,
List<ElementDefinition> differential
) {
this.id = id;
this.url = url;
this.status = status;
this.description = description;
this.kind = kind;
this.isAbstract = isAbstract;
this.type = type;
this.baseDefinition = baseDefinition;
this.baseDefinitionRef = baseDefinitionRef;
this.differential = new Differential(differential);
}
public String getId() {
return id;
}
public String getUrl() {
return url;
}
public String getStatus() {
return status;
}
public String getDescription() {
return description;
}
public String getKind() {
return kind;
}
@JsonProperty("abstract")
public Boolean getIsAbstract() {
return isAbstract;
}
public String getType() {
return type;
}
public Differential getDifferential() {
return differential;
}
public UniqueIdentifier getBaseDefinitionRef() {
return baseDefinitionRef;
}
public String getBaseDefinition() {
return baseDefinition;
}
@Override
@JsonIgnore
public String getSerializationType() {
return StructureDefinitionData.SERIALIZATION_TYPE;
}
@Override
public String toString() {
return "StructureDefinitionDTO{" +
"id='" + id + '\'' +
", url='" + url + '\'' +
", status='" + status + '\'' +
", description='" + description + '\'' +
", kind='" + kind + '\'' +
", isAbstract=" + isAbstract +
", type='" + type + '\'' +
", baseDefinition=" + baseDefinitionRef +
", differential=" + differential +
'}';
}
public static class Differential implements SerializableObject {
public static final String SERIALIZATION_TYPE = "StructureDefinitionDifferential";
private List<ElementDefinition> element;
private Differential() {
}
public Differential(List<ElementDefinition> element) {
this.element = element;
}
public List<ElementDefinition> getElement() {
return element;
}
@Override
public String toString() {
return "Differential{" +
"element=" + element +
'}';
}
@Override
public String getSerializationType() {
return SERIALIZATION_TYPE;
}
}
}
|
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/StructureDefinitionDataMerger.java | package ai.stapi.schema.structuredefinition;
import java.util.ArrayList;
import java.util.List;
public class StructureDefinitionDataMerger {
private StructureDefinitionDataMerger() {
}
public static StructureDefinitionData merge(
StructureDefinitionData slave,
StructureDefinitionData master
) {
if (!slave.getId().equals(master.getId())) {
throw new RuntimeException(
"Cannot merge two different structure definitions." +
"\nSlave id: " + slave.getId() +
"Master id: " + master.getId()
);
}
var masterDifferential = master.getDifferential().getElement();
var slaveDifferential = slave.getDifferential().getElement();
var mergedDifferential = new ArrayList<>(masterDifferential);
slaveDifferential.stream()
.filter(
slaveElement -> !StructureDefinitionDataMerger.isContainedInMaster(
masterDifferential,
slaveElement
))
.forEach(mergedDifferential::add);
return new StructureDefinitionData(
master.getId() != null ? master.getId() : slave.getId(),
master.getUrl() != null ? master.getUrl() : slave.getUrl(),
master.getStatus() != null ? master.getStatus() : slave.getStatus(),
master.getDescription() != null ? master.getDescription() : slave.getDescription(),
master.getKind() != null ? master.getKind() : slave.getKind(),
master.getIsAbstract() != null ? master.getIsAbstract() : slave.getIsAbstract(),
master.getType() != null ? master.getType() : slave.getType(),
master.getBaseDefinition() != null ? master.getBaseDefinition() : slave.getBaseDefinition(),
master.getBaseDefinitionRef() != null ? master.getBaseDefinitionRef() : slave.getBaseDefinitionRef(),
mergedDifferential
);
}
private static boolean isContainedInMaster(
List<ElementDefinition> masterDifferential,
ElementDefinition slaveElement
) {
return masterDifferential.stream().anyMatch(
masterElement -> masterElement.getPath().equals(slaveElement.getPath())
);
}
} |
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/StructureDefinitionId.java | package ai.stapi.schema.structuredefinition;
import ai.stapi.identity.UniqueIdentifier;
public class StructureDefinitionId extends UniqueIdentifier {
private StructureDefinitionId() {
}
public StructureDefinitionId(String id) {
super(id);
}
} |
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/StructureDefinitionNormalizer.java | package ai.stapi.schema.structuredefinition;
import ai.stapi.identity.UniqueIdentifier;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class StructureDefinitionNormalizer {
private StructureDefinitionNormalizer() {
}
public static StructureDefinitionData normalize(RawStructureDefinitionData imported) {
/*
Sometimes when structure definition has element of primitive type. The type is not referenced by ID but by url.
*/
var fixedElements = getFixedElements(imported);
String replaced = null;
var baseDefinition = imported.getBaseDefinition();
if (baseDefinition != null) {
replaced = baseDefinition.replace(
"http://hl7.org/fhir/StructureDefinition/",
""
);
}
return new StructureDefinitionData(
imported.getId(),
imported.getUrl(),
imported.getStatus(),
imported.getDescription(),
imported.getKind(),
imported.getIsAbstract(),
imported.getType(),
baseDefinition,
baseDefinition == null ? null : new StructureDefinitionId(replaced),
fixedElements == null ? null : new StructureDefinitionData.Differential(fixedElements)
);
}
public static Map<String, Object> normalize(Map<String, Object> structureDefinition) {
/*
Sometimes when structure definition has element of primitive type. The type is not referenced by ID but by url.
*/
var fixedElements = getFixedElements(structureDefinition);
String replaced = null;
var baseDefinition = (String) structureDefinition.get("baseDefinition");
if (baseDefinition != null) {
replaced = baseDefinition.replace(
"http://hl7.org/fhir/StructureDefinition/",
""
);
}
var newMap = new HashMap<>(structureDefinition);
if (baseDefinition != null) {
newMap.put(
"baseDefinitionRef",
new StructureDefinitionId(replaced)
);
}
if (fixedElements != null) {
newMap.put(
"differential",
new HashMap<>(Map.of(
"element", fixedElements
))
);
}
newMap.remove("snapshot");
return newMap;
}
@Nullable
private static ArrayList<ElementDefinition> getFixedElements(
RawStructureDefinitionData imported
) {
if (imported.getDifferential() == null) {
return null;
}
return imported.getDifferential().getElement().stream().map(element -> {
ArrayList<ElementDefinitionType> fixedTypes = new ArrayList<>();
if (element.getType() != null) {
fixedTypes = element.getType().stream()
.map(type -> {
if (type.getCode().contains("http://hl7.org/fhirpath/System.")) {
var fixedCode = StringUtils.uncapitalize(
type.getCode().replace(
"http://hl7.org/fhirpath/System.",
""
)
);
return new ElementDefinitionType(
type.getCode(),
new UniqueIdentifier(fixedCode),
type.getTargetProfile(),
fixTargetProfile(type.getTargetProfile())
);
}
return new ElementDefinitionType(
type.getCode(),
new UniqueIdentifier(type.getCode()),
type.getTargetProfile(),
fixTargetProfile(type.getTargetProfile())
);
}).collect(Collectors.toCollection(ArrayList::new));
}
return new ElementDefinition(
element.getPath(),
fixedTypes,
element.getMin(),
element.getMax(),
element.getShortDescription(),
element.getDefinition(),
element.getComment(),
element.getContentReference()
);
}).collect(Collectors.toCollection(ArrayList::new));
}
@Nullable
private static ArrayList<Map<String, Object>> getFixedElements(
Map<String, Object> structureDefinition
) {
var differential = (Map<String, Object>) structureDefinition.get("differential");
if (differential == null) {
return null;
}
var elements = (List<Map<String, Object>>) differential.get("element");
return elements.stream().map(element -> {
ArrayList<Map<String, Object>> fixedTypes = new ArrayList<>();
var types = (List<Map<String, Object>>) element.get("type");
if (types != null) {
fixedTypes = types.stream().map(type -> {
var code = (String) type.get("code");
var fixedType = new HashMap<>(type);
if (code.contains("http://hl7.org/fhirpath/System.")) {
var fixedCode = StringUtils.uncapitalize(
code.replace(
"http://hl7.org/fhirpath/System.",
""
)
);
fixedType.put("codeRef", new UniqueIdentifier(fixedCode));
} else {
fixedType.put("codeRef", new UniqueIdentifier(code));
}
var targetProfile = (List<String>) type.get("targetProfile");
if (targetProfile != null) {
fixedType.put("targetProfileRef", fixTargetProfile(targetProfile));
}
return fixedType;
}).collect(Collectors.toCollection(ArrayList::new));
}
var fixedElement = new HashMap<>(element);
fixedElement.put(
"type",
fixedTypes
);
var id = (String) element.get("id");
if (id != null) {
fixedElement.put(
"id",
id.replace("[x]", "")
);
}
return fixedElement;
}).collect(Collectors.toCollection(ArrayList::new));
}
@NotNull
private static List<UniqueIdentifier> fixTargetProfile(List<String> targetProfile) {
return targetProfile.stream().map(profile -> profile.replace(
"http://hl7.org/fhir/StructureDefinition/",
""
)).map(UniqueIdentifier::new).toList();
}
}
|
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/loader/AdHocStructureDefinitionLoader.java | package ai.stapi.schema.structuredefinition.loader;
import ai.stapi.schema.adHocLoaders.GenericAdHocModelDefinitionsLoader;
import ai.stapi.schema.scopeProvider.ScopeCacher;
import ai.stapi.schema.scopeProvider.ScopeOptions;
import ai.stapi.schema.structureSchema.AbstractStructureType;
import ai.stapi.schema.structuredefinition.RawStructureDefinitionData;
import ai.stapi.schema.structuredefinition.StructureDefinitionData;
import ai.stapi.schema.structuredefinition.StructureDefinitionNormalizer;
import java.util.ArrayList;
import java.util.List;
import org.jetbrains.annotations.NotNull;
public class AdHocStructureDefinitionLoader implements StructureDefinitionLoader {
private final GenericAdHocModelDefinitionsLoader genericAdHocModelDefinitionsLoader;
private final ScopeCacher scopeCacher;
public AdHocStructureDefinitionLoader(
GenericAdHocModelDefinitionsLoader genericAdHocModelDefinitionsLoader,
ScopeCacher scopeCacher
) {
this.genericAdHocModelDefinitionsLoader = genericAdHocModelDefinitionsLoader;
this.scopeCacher = scopeCacher;
}
@Override
public List<StructureDefinitionData> load() {
return this.scopeCacher.getCachedOrCompute(
AdHocStructureDefinitionLoader.class,
this::load
);
}
private List<StructureDefinitionData> load(ScopeOptions scopeOptions) {
var rawStructure = this.genericAdHocModelDefinitionsLoader.load(
scopeOptions,
RawStructureDefinitionData.SERIALIZATION_TYPE,
RawStructureDefinitionData.class
).stream()
.map(StructureDefinitionNormalizer::normalize)
.toList();
var structureDefinition = this.genericAdHocModelDefinitionsLoader.load(
scopeOptions,
StructureDefinitionData.SERIALIZATION_TYPE,
StructureDefinitionData.class
);
var finalStructures = new ArrayList<>(rawStructure);
finalStructures.addAll(structureDefinition);
return this.sortDefinitionsByKind(finalStructures);
}
@NotNull
private ArrayList<StructureDefinitionData> sortDefinitionsByKind(
List<StructureDefinitionData> definitionDTOs
) {
var sortedList = new ArrayList<StructureDefinitionData>();
sortedList.addAll(
definitionDTOs.stream()
.filter(dto -> dto.getKind().equals(AbstractStructureType.PRIMITIVE_TYPE))
.toList()
);
sortedList.addAll(
definitionDTOs.stream()
.filter(dto -> dto.getKind().equals(AbstractStructureType.COMPLEX_TYPE))
.toList()
);
sortedList.addAll(
definitionDTOs.stream()
.filter(dto -> dto.getKind().equals(AbstractStructureType.RESOURCE))
.toList()
);
return sortedList;
}
}
|
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/loader/NullStructureDefinitionLoader.java | package ai.stapi.schema.structuredefinition.loader;
import ai.stapi.schema.structuredefinition.StructureDefinitionData;
import java.util.ArrayList;
import java.util.List;
public class NullStructureDefinitionLoader implements StructureDefinitionLoader {
@Override
public List<StructureDefinitionData> load() {
return new ArrayList<>();
}
}
|
0 | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition | java-sources/ai/stapi/schema/0.3.2/ai/stapi/schema/structuredefinition/loader/StructureDefinitionLoader.java | package ai.stapi.schema.structuredefinition.loader;
import ai.stapi.schema.structuredefinition.StructureDefinitionData;
import java.util.List;
public interface StructureDefinitionLoader {
List<StructureDefinitionData> load();
}
|
0 | java-sources/ai/starlake/starlake-core_2.12 | java-sources/ai/starlake/starlake-core_2.12/1.3.2/Setup.java | import javax.net.ssl.*;
import java.io.*;
import java.net.*;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
public class Setup extends ProxySelector implements X509TrustManager {
private static class UserPwdAuth extends Authenticator {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password.toCharArray());
}
};
private static class ResourceDependency {
private final String[] urls;
private final String artefactName;
public ResourceDependency(String artefactName, String... url) {
this.urls = url;
this.artefactName = artefactName;
}
public List<String> getUrlNames() {
return Arrays.stream(urls).map(this::getUrlName).collect(Collectors.toList());
}
public String getUrlName(String url) {
return url.substring(url.lastIndexOf("/") + 1);
}
}
private static String protocol = null;
private static String host = null;
private static int port = 0;
private static String username = null;
private static String password = null;
private static String httpsProxy = getEnv("https_proxy").orElse("");
private static String httpProxy = getEnv("http_proxy").orElse("");
private static String noProxy = getEnv("no_proxy").orElse("").replaceAll(",", "|");
private static Proxy proxy = Proxy.NO_PROXY;
private static HttpClient client = null;
private static boolean isWindowsOs() {
String os = System.getProperty("os.name").toLowerCase();
return os.startsWith("windows");
}
private static void parseProxy(String proxy) {
if (proxy.isEmpty()) {
return;
}
final Pattern pattern = Pattern.compile("(https?|socks5?):\\/\\/([^:].+)", Pattern.CASE_INSENSITIVE);
final Matcher m = pattern.matcher(proxy);
if (m.matches()) {
protocol = m.group(1).toLowerCase();
final String hostAndPortWithMaybeCredentials = m.group(2);
if (hostAndPortWithMaybeCredentials.contains("@")) {
final String[] hostAndPortWithCredentials = hostAndPortWithMaybeCredentials.split("@");
final String[] credentials = hostAndPortWithCredentials[0].split(":");
assert(credentials.length == 2): "Invalid credentials format, expecting 'username:password'";
username = credentials[0];
password = credentials[1];
final String[] hostAndPort = hostAndPortWithCredentials[1].split(":");
host = hostAndPort[0];
if (hostAndPort.length > 1) {
port = Integer.parseInt(hostAndPort[1]);
}
} else {
final String[] hostAndPort = hostAndPortWithMaybeCredentials.split(":");
host = hostAndPort[0];
if (hostAndPort.length > 1) {
port = Integer.parseInt(hostAndPort[1]);
}
}
} else {
throw new IllegalArgumentException("Invalid proxy format: " + proxy);
}
}
private static void setProxy() {
if (!httpsProxy.isEmpty()) {
parseProxy(httpsProxy);
} else if (!httpProxy.isEmpty()) {
parseProxy(httpProxy);
}
if (host != null) {
if (port == 0) {
if (protocol.equals("https")) {
port = 443;
} else if (protocol.startsWith("socks")) {
port = 1080;
} else {
port = 80;
}
}
Proxy.Type proxyType = Proxy.Type.HTTP;
if (protocol.startsWith("socks")) {
proxyType = Proxy.Type.SOCKS;
}
proxy = new Proxy(proxyType, new InetSocketAddress(host, port));
}
if (!noProxy.isEmpty()) {
System.setProperty("http.nonProxyHosts", noProxy);
}
}
// ENV VARS
public static boolean ENABLE_ALL = envIsTrue("ENABLE_ALL");
public static boolean ENABLE_BIGQUERY = ENABLE_ALL || envIsTrue("ENABLE_BIGQUERY");
public static boolean ENABLE_AZURE = ENABLE_ALL || envIsTrue("ENABLE_AZURE");
public static boolean ENABLE_SNOWFLAKE = ENABLE_ALL || envIsTrue("ENABLE_SNOWFLAKE");
public static boolean ENABLE_REDSHIFT = ENABLE_ALL || envIsTrue("ENABLE_REDSHIFT");
public static boolean ENABLE_POSTGRESQL = ENABLE_ALL || envIsTrue("ENABLE_POSTGRESQL");
public static boolean ENABLE_DUCKDB = ENABLE_ALL || envIsTrue("ENABLE_DUCKDB");
private static final boolean[] ALL_ENABLERS = new boolean[] {
ENABLE_BIGQUERY,
ENABLE_AZURE,
ENABLE_SNOWFLAKE,
ENABLE_REDSHIFT,
ENABLE_POSTGRESQL,
ENABLE_DUCKDB
};
// SCALA 2.12 by default until spark redshift is available for 2.13
private static final String SCALA_VERSION = getEnv("SCALA_VERSION").orElse("2.13");
// STARLAKE
private static final String SL_VERSION = getEnv("SL_VERSION").orElse("1.3.0");
// SPARK
private static final String SPARK_VERSION = getEnv("SPARK_VERSION").orElse("3.5.3");
private static final String HADOOP_VERSION = getEnv("HADOOP_VERSION").orElse("3");
// BIGQUERY
private static final String SPARK_BQ_VERSION = getEnv("SPARK_BQ_VERSION").orElse("0.41.1");
// deltalake
private static final String DELTA_SPARK = getEnv("SPARK_DELTA").orElse("3.2.1");
private static final String HADOOP_AZURE_VERSION = getEnv("HADOOP_AZURE_VERSION").orElse("3.3.5");
private static final String AZURE_STORAGE_VERSION = getEnv("AZURE_STORAGE_VERSION").orElse("8.6.6");
private static final String JETTY_VERSION = getEnv("JETTY_VERSION").orElse("9.4.51.v20230217");
// HADOOP_LIB ON WINDOWS
private static final ResourceDependency[] HADOOP_LIBS = new ResourceDependency[]{
new ResourceDependency("winutils", "https://raw.githubusercontent.com/cdarlint/winutils/master/hadoop-3.3.5/bin/winutils.exe"),
new ResourceDependency("hadoop.dll", "https://raw.githubusercontent.com/cdarlint/winutils/master/hadoop-3.3.5/bin/hadoop.dll")
};
// SNOWFLAKE
private static final String SNOWFLAKE_JDBC_VERSION = getEnv("SNOWFLAKE_JDBC_VERSION").orElse("3.21.0");
private static final String SPARK_SNOWFLAKE_VERSION = getEnv("SPARK_SNOWFLAKE_VERSION").orElse("3.1.1");
// POSTGRESQL
private static final String POSTGRESQL_VERSION = getEnv("POSTGRESQL_VERSION").orElse("42.5.4");
// DUCKDB
private static final String DUCKDB_VERSION = getEnv("DUCKDB_VERSION").orElse("1.1.3");
// REDSHIFT
private static final String AWS_JAVA_SDK_VERSION = getEnv("AWS_JAVA_SDK_VERSION").orElse("1.12.780");
private static final String HADOOP_AWS_VERSION = getEnv("HADOOP_AWS_VERSION").orElse("3.3.4");
private static final String REDSHIFT_JDBC_VERSION = getEnv("REDSHIFT_JDBC_VERSION").orElse("2.1.0.32");
private static String SPARK_REDSHIFT_VERSION() {
if (SCALA_VERSION.equals("2.13")) {
return getEnv("SPARK_REDSHIFT_VERSION").orElse("6.3.0-spark_3.5-SNAPSHOT");
} else {
return getEnv("SPARK_REDSHIFT_VERSION").orElse("6.3.0-spark_3.5");
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// DUCKDB
private static final ResourceDependency SPARK_JAR = new ResourceDependency("spark", "https://archive.apache.org/dist/spark/spark-" + SPARK_VERSION + "/spark-" + SPARK_VERSION + "-bin-hadoop" + HADOOP_VERSION + ".tgz");
private static final ResourceDependency SPARK_JAR_213 = new ResourceDependency("spark", "https://archive.apache.org/dist/spark/spark-" + SPARK_VERSION + "/spark-" + SPARK_VERSION + "-bin-hadoop" + HADOOP_VERSION + "-scala2.13.tgz");
private static final ResourceDependency SPARK_BQ_JAR = new ResourceDependency("spark-bigquery-with-dependencies",
"https://repo1.maven.org/maven2/com/google/cloud/spark/spark-bigquery-with-dependencies_" + SCALA_VERSION + "/" +
SPARK_BQ_VERSION + "/" +
"spark-bigquery-with-dependencies_" + SCALA_VERSION + "-" + SPARK_BQ_VERSION + ".jar");
private static final ResourceDependency DELTA_SPARK_JAR = new ResourceDependency("delta-spark",
"https://repo1.maven.org/maven2/io/delta/delta-spark_" + SCALA_VERSION + "/" + DELTA_SPARK + "/delta-spark_" + SCALA_VERSION + "-" + DELTA_SPARK + ".jar");
private static final ResourceDependency DELTA_STORAGE_JAR = new ResourceDependency("delta-storage",
"https://repo1.maven.org/maven2/io/delta/delta-storage" + "/" + DELTA_SPARK + "/delta-storage" +"-" + DELTA_SPARK + ".jar");
private static final ResourceDependency HADOOP_AZURE_JAR = new ResourceDependency("hadoop-azure", "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/" + HADOOP_AZURE_VERSION + "/hadoop-azure-" + HADOOP_AZURE_VERSION + ".jar");
private static final ResourceDependency AZURE_STORAGE_JAR = new ResourceDependency("azure-storage", "https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/" + AZURE_STORAGE_VERSION + "/azure-storage-" + AZURE_STORAGE_VERSION + ".jar");
private static final ResourceDependency JETTY_SERVER_JAR = new ResourceDependency("jetty-server", "https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-server/" + JETTY_VERSION + "/jetty-server-" + JETTY_VERSION + ".jar");
private static final ResourceDependency SNOWFLAKE_JDBC_JAR = new ResourceDependency("snowflake-jdbc", "https://repo1.maven.org/maven2/net/snowflake/snowflake-jdbc/" + SNOWFLAKE_JDBC_VERSION + "/snowflake-jdbc-" + SNOWFLAKE_JDBC_VERSION + ".jar");
private static final ResourceDependency SPARK_SNOWFLAKE_JAR = new ResourceDependency("spark-snowflake", "https://repo1.maven.org/maven2/net/snowflake/spark-snowflake_" + SCALA_VERSION +
"/" + SPARK_SNOWFLAKE_VERSION + "/spark-snowflake_" + SCALA_VERSION + "-" + SPARK_SNOWFLAKE_VERSION + ".jar");
private static final ResourceDependency POSTGRESQL_JAR = new ResourceDependency("postgresql", "https://repo1.maven.org/maven2/org/postgresql/postgresql/" + POSTGRESQL_VERSION + "/postgresql-" + POSTGRESQL_VERSION + ".jar");
private static final ResourceDependency DUCKDB_JAR = new ResourceDependency("duckdb_jdbc", "https://repo1.maven.org/maven2/org/duckdb/duckdb_jdbc/" + DUCKDB_VERSION + "/duckdb_jdbc-" + DUCKDB_VERSION + ".jar");
private static final ResourceDependency AWS_JAVA_SDK_JAR = new ResourceDependency("aws-java-sdk-bundle", "https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/" + AWS_JAVA_SDK_VERSION + "/aws-java-sdk-bundle-" + AWS_JAVA_SDK_VERSION + ".jar");
private static final ResourceDependency HADOOP_AWS_JAR = new ResourceDependency("hadoop-aws", "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/" + HADOOP_AWS_VERSION + "/hadoop-aws-" + HADOOP_AWS_VERSION + ".jar");
private static final ResourceDependency REDSHIFT_JDBC_JAR = new ResourceDependency("redshift-jdbc42", "https://repo1.maven.org/maven2/com/amazon/redshift/redshift-jdbc42/" + REDSHIFT_JDBC_VERSION + "/redshift-jdbc42-" + REDSHIFT_JDBC_VERSION + ".jar");
private static ResourceDependency SPARK_REDSHIFT_JAR() {
if (SCALA_VERSION.equals("2.13")) {
return new ResourceDependency("spark-redshift", "https://s01.oss.sonatype.org/content/repositories/snapshots/ai/starlake/spark-redshift_" + SCALA_VERSION +
"/" + SPARK_REDSHIFT_VERSION() + "/spark-redshift_" + SCALA_VERSION + "-" + SPARK_REDSHIFT_VERSION() + ".jar");
}
else {
return new ResourceDependency("spark-redshift", "https://repo1.maven.org/maven2/io/github/spark-redshift-community/spark-redshift_" + SCALA_VERSION +
"/" + SPARK_REDSHIFT_VERSION() + "/spark-redshift_" + SCALA_VERSION + "-" + SPARK_REDSHIFT_VERSION() + ".jar");
}
}
private static final ResourceDependency STARLAKE_SNAPSHOT_JAR = new ResourceDependency("starlake-core", "https://s01.oss.sonatype.org/content/repositories/snapshots/ai/starlake/starlake-core" + "_" + SCALA_VERSION + "/" + SL_VERSION + "/starlake-core"+ "_" + SCALA_VERSION + "-" + SL_VERSION + "-assembly.jar");
private static final ResourceDependency STARLAKE_RELEASE_JAR = new ResourceDependency("starlake-core", "https://s01.oss.sonatype.org/content/repositories/releases/ai/starlake/starlake-core" + "_" + SCALA_VERSION + "/" + SL_VERSION + "/starlake-core" + "_" + SCALA_VERSION + "-" + SL_VERSION + "-assembly.jar", "https://s01.oss.sonatype.org/content/repositories/releases/ai/starlake/starlake-spark3" + "_" + SCALA_VERSION + "/" + SL_VERSION + "/starlake-spark3" + "_" + SCALA_VERSION + "-" + SL_VERSION + "-assembly.jar");
private static final ResourceDependency[] snowflakeDependencies = {
SNOWFLAKE_JDBC_JAR,
SPARK_SNOWFLAKE_JAR
};
private static final ResourceDependency[] redshiftDependencies = {
AWS_JAVA_SDK_JAR,
HADOOP_AWS_JAR,
REDSHIFT_JDBC_JAR,
SPARK_REDSHIFT_JAR()
};
private static final ResourceDependency[] azureDependencies = {
HADOOP_AZURE_JAR,
AZURE_STORAGE_JAR,
JETTY_SERVER_JAR
};
private static final ResourceDependency[] postgresqlDependencies = {
POSTGRESQL_JAR
};
private static final ResourceDependency[] duckDbDependencies = {
DUCKDB_JAR
};
private static final ResourceDependency[] bigqueryDependencies = {
SPARK_BQ_JAR
};
private static final ResourceDependency[] sparkDependencies = {
DELTA_SPARK_JAR,
DELTA_STORAGE_JAR
};
private static Optional<String> getEnv(String env) {
// consider empty env variables as not set
return Optional.ofNullable(System.getenv(env)).filter(s -> !s.isEmpty());
}
private static boolean envIsTrue(String env) {
String value = getEnv(env).orElse("false");
return !value.equals("false") && !value.equals("0");
}
private static void generateUnixVersions(File targetDir) throws IOException {
generateVersions(targetDir, "versions.sh", "#!/bin/bash\nset -e\n\n",
(writer) -> (variableName, value) -> {
try {
writer.write(variableName + "=" + "${" + variableName + ":-" + value + "}\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
private static void generateWindowsVersions(File targetDir) throws IOException {
generateVersions(targetDir, "versions.cmd", "@ECHO OFF\n\n",
(writer) -> (variableName, value) -> {
try {
writer.write(
"if \"%" + variableName + "%\"==\"\" (\n" +
" SET " + variableName + "=" + value + "\n" +
")\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
// Used BiConsumer with Function because TriConsumer doesn't exist natively and avoid creating a new type
private static void generateVersions(File targetDir, String versionsFileName, String fileHeader, Function<BufferedWriter, BiConsumer<String, String>> variableWriter) throws IOException {
File versionFile = new File(targetDir, versionsFileName);
deleteFile(versionFile);
BufferedWriter writer = new BufferedWriter(new FileWriter(versionFile));
try {
writer.write(fileHeader);
variableWriter.apply(writer).accept("ENABLE_BIGQUERY", String.valueOf(ENABLE_BIGQUERY));
variableWriter.apply(writer).accept("ENABLE_AZURE", String.valueOf(ENABLE_AZURE));
variableWriter.apply(writer).accept("ENABLE_SNOWFLAKE", String.valueOf(ENABLE_SNOWFLAKE));
variableWriter.apply(writer).accept("ENABLE_POSTGRESQL", String.valueOf(ENABLE_POSTGRESQL));
variableWriter.apply(writer).accept("ENABLE_REDSHIFT", String.valueOf(ENABLE_REDSHIFT));
variableWriter.apply(writer).accept("SL_VERSION", SL_VERSION);
variableWriter.apply(writer).accept("SCALA_VERSION", SCALA_VERSION);
variableWriter.apply(writer).accept("SPARK_VERSION", SPARK_VERSION);
variableWriter.apply(writer).accept("HADOOP_VERSION", HADOOP_VERSION);
variableWriter.apply(writer).accept("DUCKDB_VERSION", DUCKDB_VERSION);
if (ENABLE_BIGQUERY || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("SPARK_BQ_VERSION", SPARK_BQ_VERSION);
}
if (ENABLE_AZURE || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("HADOOP_AZURE_VERSION", HADOOP_AZURE_VERSION);
variableWriter.apply(writer).accept("AZURE_STORAGE_VERSION", AZURE_STORAGE_VERSION);
variableWriter.apply(writer).accept("JETTY_VERSION", JETTY_VERSION);
}
if (ENABLE_SNOWFLAKE || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("SPARK_SNOWFLAKE_VERSION", SPARK_SNOWFLAKE_VERSION);
variableWriter.apply(writer).accept("SNOWFLAKE_JDBC_VERSION", SNOWFLAKE_JDBC_VERSION);
}
if (ENABLE_POSTGRESQL || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("POSTGRESQL_VERSION", POSTGRESQL_VERSION);
}
if (ENABLE_REDSHIFT || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("AWS_JAVA_SDK_VERSION", AWS_JAVA_SDK_VERSION);
variableWriter.apply(writer).accept("HADOOP_AWS_VERSION", HADOOP_AWS_VERSION);
variableWriter.apply(writer).accept("REDSHIFT_JDBC_VERSION", REDSHIFT_JDBC_VERSION);
variableWriter.apply(writer).accept("SPARK_REDSHIFT_VERSION", SPARK_REDSHIFT_VERSION());
}
} finally {
writer.close();
}
System.out.println(versionFile.getAbsolutePath() + " created");
}
private static void generateVersions(File targetDir, boolean unix) throws IOException {
if (isWindowsOs() && !unix) {
generateWindowsVersions(targetDir);
} else {
generateUnixVersions(targetDir);
}
}
private static boolean anyDependencyEnabled() {
for (boolean enabled : ALL_ENABLERS) {
if (enabled) {
return true;
}
}
return ENABLE_ALL;
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
@Override
public List<Proxy> select(URI uri) {
return Collections.singletonList(proxy);
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
throw new RuntimeException("Failed to connect to " + uri + " using proxy " + sa);
}
private static final Setup instance = new Setup();
private static final TrustManager alwaysTrustManager = instance;
private static final ProxySelector proxySelector = instance;
private static void setHttpClient() throws NoSuchAlgorithmException, KeyManagementException {
setProxy();
HttpClient.Builder clientBuilder = HttpClient.newBuilder();
clientBuilder.proxy(proxySelector);
if (username != null && password != null) {
Authenticator authenticator = new UserPwdAuth();
clientBuilder.authenticator(authenticator);
}
if (host != null && envIsTrue("SL_INSECURE")) {
System.out.println("Enabling insecure mode for SSL connections using proxy " + protocol + "://" + host + ":" + port);
// Create a trust manager that does not validate certificate chains
TrustManager[] trustAllCerts = new TrustManager[]{alwaysTrustManager};
// Install the all-trusting trust manager
SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, trustAllCerts, new java.security.SecureRandom());
clientBuilder.sslContext(sc);
}
client = clientBuilder.build();
}
private static void updateSparkLog4j2Properties(File sparkDir) {
File log4jFile = new File(new File(sparkDir, "conf"), "log4j2.properties");
try {
BufferedReader reader = new BufferedReader(new FileReader(log4jFile));
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
if (line.startsWith("rootLogger.level =")|| line.startsWith("rootLogger.level=")) {
line = "rootLogger.level = ${env:SL_LOG_LEVEL:-error}";
}
sb.append(line).append("\n");
}
reader.close();
sb.append("logger.shutdown.name=org.apache.spark.util.ShutdownHookManager").append("\n");
sb.append("logger.shutdown.level=OFF").append("\n");
sb.append("logger.env.name=org.apache.spark.SparkEnv").append("\n");
sb.append("logger.env.level=error").append("\n");
BufferedWriter writer = new BufferedWriter(new FileWriter(log4jFile));
writer.write(sb.toString());
writer.close();
} catch (IOException e) {
System.out.println("Failed to update log4j.properties");
e.printStackTrace();
}
}
private static void askUserWhichConfigToEnable() {
if (!anyDependencyEnabled()) {
System.out.print("Do you want to enable all datawarehouse configurations [y/n] ? ");
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
String answer = reader.readLine();
if (answer.equalsIgnoreCase("y")) {
ENABLE_AZURE = true;
ENABLE_BIGQUERY = true;
ENABLE_SNOWFLAKE = true;
ENABLE_REDSHIFT = true;
ENABLE_POSTGRESQL = true;
ENABLE_DUCKDB = true;
} else {
System.out.println("Please enable the configurations you want to use by setting the corresponding environment variables below");
System.out.println("ENABLE_BIGQUERY, ENABLE_DATABRICKS, ENABLE_AZURE, ENABLE_SNOWFLAKE, ENABLE_REDSHIFT, ENABLE_POSTGRESQL, ENABLE_ANY_JDBC");
System.exit(1);
}
} catch (IOException e) {
System.out.println("Failed to read user input");
e.printStackTrace();
}
}
}
public static void main(String[] args) throws IOException {
try {
if (args.length == 0) {
System.out.println("Please specify the target directory");
System.exit(1);
}
askUserWhichConfigToEnable();
final File targetDir = new File(args[0]);
if (!targetDir.exists()) {
targetDir.mkdirs();
System.out.println("Created target directory " + targetDir.getAbsolutePath());
}
setHttpClient();
if (!anyDependencyEnabled()) {
ENABLE_AZURE = true;
ENABLE_BIGQUERY = true;
ENABLE_SNOWFLAKE = true;
ENABLE_REDSHIFT = true;
ENABLE_POSTGRESQL = true;
ENABLE_DUCKDB = true;
}
final File binDir = new File(targetDir, "bin");
if (isWindowsOs()) {
final File hadoopDir = new File(binDir, "hadoop");
final File hadoopBinDir = new File(hadoopDir, "bin");
if (!hadoopBinDir.exists()) {
hadoopBinDir.mkdirs();
}
for (ResourceDependency lib : HADOOP_LIBS) {
downloadAndDisplayProgress(lib, (resource, url) -> new File(hadoopBinDir, resource.getUrlName(url)));
}
} else {
System.out.println("Unix OS detected");
}
File slDir = new File(binDir, "sl");
if (SL_VERSION.endsWith("SNAPSHOT")) {
STARLAKE_SNAPSHOT_JAR.getUrlNames().forEach(urlName -> deleteFile(new File(slDir, urlName)));
downloadAndDisplayProgress(new ResourceDependency[]{STARLAKE_SNAPSHOT_JAR}, slDir, false);
} else {
STARLAKE_RELEASE_JAR.getUrlNames().forEach(urlName -> deleteFile(new File(slDir, urlName)));
downloadAndDisplayProgress(new ResourceDependency[]{STARLAKE_RELEASE_JAR}, slDir, false);
}
File sparkDir = new File(binDir, "spark");
if (!sparkDir.exists()) {
downloadSpark(binDir);
}
File depsDir = new File(binDir, "deps");
downloadAndDisplayProgress(sparkDependencies, depsDir, true);
updateSparkLog4j2Properties(sparkDir);
downloadAndDisplayProgress(duckDbDependencies, depsDir, true);
if (ENABLE_BIGQUERY) {
downloadAndDisplayProgress(bigqueryDependencies, depsDir, true);
} else {
deleteDependencies(bigqueryDependencies, depsDir);
}
if (ENABLE_AZURE) {
downloadAndDisplayProgress(azureDependencies, depsDir, true);
} else {
deleteDependencies(azureDependencies, depsDir);
}
if (ENABLE_SNOWFLAKE) {
downloadAndDisplayProgress(snowflakeDependencies, depsDir, true);
} else {
deleteDependencies(snowflakeDependencies, depsDir);
}
if (ENABLE_REDSHIFT) {
downloadAndDisplayProgress(redshiftDependencies, depsDir, true);
} else {
deleteDependencies(redshiftDependencies, depsDir);
}
if (ENABLE_POSTGRESQL) {
downloadAndDisplayProgress(postgresqlDependencies, depsDir, true);
} else {
deleteDependencies(postgresqlDependencies, depsDir);
}
boolean unix = args.length > 1 && args[1].equalsIgnoreCase("unix");
generateVersions(targetDir, unix);
} catch (Exception e) {
System.out.println("Failed to download dependency: " + e.getMessage());
e.printStackTrace();
System.exit(1);
}
}
public static void downloadSpark(File binDir) throws IOException, InterruptedException {
ResourceDependency sparkJar = SPARK_JAR;
if (!SCALA_VERSION.equals("2.12")) {
sparkJar = SPARK_JAR_213;
}
downloadAndDisplayProgress(new ResourceDependency[]{sparkJar}, binDir, false);
sparkJar.getUrlNames().stream().map(tgzName -> new File(binDir, tgzName)).filter(File::exists).forEach(sparkFile -> {
String tgzName = sparkFile.getName();
ProcessBuilder builder = new ProcessBuilder("tar", "-xzf", sparkFile.getAbsolutePath(), "-C", binDir.getAbsolutePath()).inheritIO();
try {
Process process = builder.start();
process.waitFor();
} catch (InterruptedException | IOException e) {
System.out.println("Failed to extract spark tarball");
e.printStackTrace();
}
sparkFile.delete();
File sparkDir = new File(binDir, tgzName.substring(0, tgzName.lastIndexOf(".")));
sparkDir.renameTo(new File(binDir, "spark"));
sparkDir = new File(binDir, "spark");
File log4j2File = new File(sparkDir, "conf/log4j2.properties.template");
log4j2File.renameTo(new File(sparkDir, "conf/log4j2.properties"));
});
}
private static void downloadAndDisplayProgress(ResourceDependency[] dependencies, File targetDir, boolean replaceJar) throws IOException, InterruptedException {
if (!targetDir.exists()) {
targetDir.mkdirs();
}
if (replaceJar) {
deleteDependencies(dependencies, targetDir);
}
for (ResourceDependency dependency : dependencies) {
downloadAndDisplayProgress(dependency, (resource, url) -> new File(targetDir, resource.getUrlName(url)));
}
}
private static void deleteDependencies(ResourceDependency[] dependencies, File targetDir) {
if (targetDir.exists()) {
for (ResourceDependency dependency : dependencies) {
File[] files = targetDir.listFiles(f -> f.getName().startsWith(dependency.artefactName));
if (files != null) {
for (File file : files) {
deleteFile(file);
}
}
}
}
}
private static void deleteFile(File file) {
if (file.exists()) {
if (file.delete()) {
System.out.println(file.getAbsolutePath() + " deleted");
}
}
}
private static void downloadAndDisplayProgress(ResourceDependency resource, BiFunction<ResourceDependency, String, File> fileProducer) throws IOException, InterruptedException {
boolean succesfullyDownloaded = false;
List<String> triedUrlList = new ArrayList<>();
System.out.println("Downloading " + resource.artefactName + "...");
for (String urlStr : resource.urls) {
File file = fileProducer.apply(resource, urlStr);
final int CHUNK_SIZE = 1024;
int filePartIndex = urlStr.lastIndexOf("/") + 1;
String name = urlStr.substring(filePartIndex);
String urlFolder = urlStr.substring(0, filePartIndex);
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(urlStr))
.build();
HttpResponse<InputStream> response = client.send(request, HttpResponse.BodyHandlers.ofInputStream());
if (response.statusCode() == 200) {
long lengthOfFile = response.headers().firstValueAsLong("Content-Length").orElse(0L);
InputStream input = new BufferedInputStream(response.body());
OutputStream output = new FileOutputStream(file);
byte data[] = new byte[CHUNK_SIZE];
long total = 0;
int count;
int loop = 0;
int sbLen = 0;
long lastTime = System.currentTimeMillis();
while ((count = input.read(data)) != -1) {
total += count;
output.write(data, 0, count);
loop++;
if (loop % 1000 == 0) {
StringBuilder sb = new StringBuilder(" " + (total / 1024 / 1024) + "/" + (lengthOfFile / 1024 / 1024) + " MB");
if (lengthOfFile > 0) {
sb.append(" (");
sb.append(total * 100 / lengthOfFile);
sb.append("%)");
}
long currentTime = System.currentTimeMillis();
long timeDiff = currentTime - lastTime;
double bytesPerMilliSec = (CHUNK_SIZE * 1000.0 / timeDiff);
double bytesPerSec = bytesPerMilliSec * 1000;
double mbPerSec = bytesPerSec / 1024 / 1024;
sb.append(" ");
sb.append(String.format("[%.2f MB/sec]", mbPerSec));
lastTime = currentTime;
sbLen = sb.length();
for (int cnt = 0; cnt < sbLen; cnt++) {
System.out.print("\b");
}
System.out.print(sb);
}
}
for (int cnt = 0; cnt < sbLen; cnt++) {
System.out.print("\b");
}
System.out.print(file.getAbsolutePath() + " succesfully downloaded from " + urlFolder);
System.out.println();
output.flush();
output.close();
input.close();
succesfullyDownloaded = true;
break;
} else {
triedUrlList.add(urlStr + " (" + response.statusCode() + ")");
}
}
if (!succesfullyDownloaded) {
String triedUrls = String.join(" and ", triedUrlList);
throw new RuntimeException("Failed to fetch " + resource.artefactName + " from " + triedUrls);
}
}
}
|
0 | java-sources/ai/starlake/starlake-core_2.12/1.3.2/ai/starlake/utils | java-sources/ai/starlake/starlake-core_2.12/1.3.2/ai/starlake/utils/repackaged/BigQuerySchemaConverters.java | package ai.starlake.utils.repackaged;
/*
* Copyright 2018 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Included here only to avoid repackaged conflict
*/
import ai.starlake.job.Main;
import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FieldList;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardTableDefinition;
import com.google.cloud.bigquery.TableDefinition;
import com.google.cloud.bigquery.TableInfo;
import com.google.cloud.bigquery.TimePartitioning;
import com.google.cloud.bigquery.connector.common.BigQueryUtil;
import com.google.cloud.spark.bigquery.SchemaConvertersConfiguration;
import com.google.cloud.spark.bigquery.SparkBigQueryUtil;
import com.google.cloud.spark.bigquery.SupportedCustomDataType;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow;
import org.apache.spark.sql.catalyst.util.GenericArrayData;
import org.apache.spark.sql.types.*;
import org.apache.spark.unsafe.types.UTF8String;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
// Copied from com.google.cloud.spark.bigquery.SchemaConverters
// Last update : 0.22.0
// Known differences : b3f1946f Fix timestamp conversion between parquet converted type and BigQuery data type (#427)
@SuppressWarnings("all")
public class BigQuerySchemaConverters {
static final DecimalType NUMERIC_SPARK_TYPE =
DataTypes.createDecimalType(
BigQueryUtil.DEFAULT_NUMERIC_PRECISION, BigQueryUtil.DEFAULT_NUMERIC_SCALE);
// The maximum nesting depth of a BigQuery RECORD:
static final int MAX_BIGQUERY_NESTED_DEPTH = 15;
// Numeric cannot have more than 29 digits left of the dot. For more details
// https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#parameterized_decimal_type
private static final int NUMERIC_MAX_LEFT_OF_DOT_DIGITS = 29;
/**
* Convert a BigQuery schema to a Spark schema
*/
public static StructType toSpark(Schema schema) {
List<StructField> fieldList =
schema.getFields().stream().map(BigQuerySchemaConverters::convert).collect(Collectors.toList());
StructType structType = new StructType(fieldList.toArray(new StructField[0]));
return structType;
}
/**
* Retrieves and returns BigQuery Schema from TableInfo. If the table support pseudo columns, they
* are added to schema before schema is returned to the caller.
*/
public static Schema getSchemaWithPseudoColumns(TableInfo tableInfo) {
TimePartitioning timePartitioning = null;
TableDefinition tableDefinition = tableInfo.getDefinition();
if (tableDefinition instanceof StandardTableDefinition) {
timePartitioning = ((StandardTableDefinition) tableDefinition).getTimePartitioning();
}
boolean tableSupportsPseudoColumns =
timePartitioning != null
&& timePartitioning.getField() == null
&& timePartitioning.getType() != null;
Schema schema = tableDefinition.getSchema();
if (tableSupportsPseudoColumns) {
ArrayList<Field> fields = new ArrayList<Field>(schema.getFields());
fields.add(
createBigQueryFieldBuilder(
"_PARTITIONTIME", LegacySQLTypeName.TIMESTAMP, Field.Mode.NULLABLE, null)
.build());
// Issue #748: _PARTITIONDATE exists only when partition type is day (not hour/month/year)
if (timePartitioning.getType().equals(TimePartitioning.Type.DAY)) {
fields.add(
createBigQueryFieldBuilder(
"_PARTITIONDATE", LegacySQLTypeName.DATE, Field.Mode.NULLABLE, null)
.build());
}
schema = Schema.of(fields);
}
return schema;
}
public static InternalRow convertToInternalRow(
Schema schema,
List<String> namesInOrder,
GenericRecord record,
Optional<StructType> userProvidedSchema) {
List<StructField> userProvidedFieldList =
Arrays.stream(userProvidedSchema.orElse(new StructType()).fields())
.collect(Collectors.toList());
return convertAll(schema.getFields(), record, namesInOrder, userProvidedFieldList);
}
static Object convert(Field field, Object value, StructField userProvidedField) {
if (value == null) {
return null;
}
if (field.getMode() == Field.Mode.REPEATED) {
// rather than recurring down we strip off the repeated mode
// Due to serialization issues, reconstruct the type using reflection:
// See: https://github.com/googleapis/google-cloud-java/issues/3942
LegacySQLTypeName fType = LegacySQLTypeName.valueOfStrict(field.getType().name());
Field nestedField =
Field.newBuilder(field.getName(), fType, field.getSubFields())
// As long as this is not repeated it works, but technically arrays cannot contain
// nulls, so select required instead of nullable.
.setMode(Field.Mode.REQUIRED)
.build();
List<Object> valueList = (List<Object>) value;
return new GenericArrayData(
valueList.stream()
.map(v -> convert(nestedField, v, getStructFieldForRepeatedMode(userProvidedField)))
.collect(Collectors.toList()));
}
return convertByBigQueryType(field, value, userProvidedField);
}
private static StructField getStructFieldForRepeatedMode(StructField field) {
StructField nestedField = null;
if (field != null) {
ArrayType arrayType = ((ArrayType) field.dataType());
nestedField =
new StructField(
field.name(),
arrayType.elementType(),
arrayType.containsNull(),
Metadata.empty()); // safe to pass empty metadata as it is not used anywhere
}
return nestedField;
}
static Object convertByBigQueryType(Field bqField, Object value, StructField userProvidedField) {
if (LegacySQLTypeName.INTEGER.equals(bqField.getType())) {
if (userProvidedField != null) {
DataType userProvidedType = userProvidedField.dataType();
if (userProvidedType.equals(DataTypes.IntegerType)) {
return Integer.valueOf(((Number) value).intValue());
}
if (userProvidedType.equals(DataTypes.ShortType)) {
return Short.valueOf(((Number) value).shortValue());
}
if (userProvidedType.equals(DataTypes.ByteType)) {
return Byte.valueOf(((Number) value).byteValue());
}
}
// regular long value
return value;
}
if (LegacySQLTypeName.FLOAT.equals(bqField.getType())
|| LegacySQLTypeName.BOOLEAN.equals(bqField.getType())
|| LegacySQLTypeName.DATE.equals(bqField.getType())
|| LegacySQLTypeName.TIME.equals(bqField.getType())
|| LegacySQLTypeName.TIMESTAMP.equals(bqField.getType())) {
return value;
}
if (LegacySQLTypeName.STRING.equals(bqField.getType())
|| LegacySQLTypeName.DATETIME.equals(bqField.getType())
|| LegacySQLTypeName.GEOGRAPHY.equals(bqField.getType())
|| LegacySQLTypeName.JSON.equals(bqField.getType())) {
return UTF8String.fromBytes(((Utf8) value).getBytes());
}
if (LegacySQLTypeName.BYTES.equals(bqField.getType())) {
return getBytes((ByteBuffer) value);
}
if (LegacySQLTypeName.NUMERIC.equals(bqField.getType())
|| LegacySQLTypeName.BIGNUMERIC.equals(bqField.getType())) {
byte[] bytes = getBytes((ByteBuffer) value);
int scale =
Optional.ofNullable(bqField.getScale())
.map(Long::intValue)
.orElse(BigQueryUtil.DEFAULT_NUMERIC_SCALE);
BigDecimal b = new BigDecimal(new BigInteger(bytes), scale);
int precision =
Optional.ofNullable(bqField.getPrecision())
.map(Long::intValue)
.orElse(BigQueryUtil.DEFAULT_NUMERIC_PRECISION);
Decimal d = Decimal.apply(b, precision, scale);
return d;
}
if (LegacySQLTypeName.RECORD.equals(bqField.getType())) {
List<String> namesInOrder = null;
List<StructField> structList = null;
if (userProvidedField != null) {
StructType userStructType =
(StructType) SupportedCustomDataType.toSqlType(userProvidedField.dataType());
structList = Arrays.stream(userStructType.fields()).collect(Collectors.toList());
namesInOrder = structList.stream().map(StructField::name).collect(Collectors.toList());
} else {
namesInOrder =
bqField.getSubFields().stream().map(Field::getName).collect(Collectors.toList());
}
return convertAll(bqField.getSubFields(), (GenericRecord) value, namesInOrder, structList);
}
throw new IllegalStateException("Unexpected type: " + bqField.getType());
}
private static byte[] getBytes(ByteBuffer buf) {
byte[] bytes = new byte[buf.remaining()];
buf.get(bytes);
return bytes;
}
// Schema is not recursive so add helper for sequence of fields
static GenericInternalRow convertAll(
FieldList fieldList,
GenericRecord record,
List<String> namesInOrder,
List<StructField> userProvidedFieldList) {
Map<String, Object> fieldMap = new HashMap<>();
Map<String, StructField> userProvidedFieldMap =
userProvidedFieldList == null
? new HashMap<>()
: userProvidedFieldList.stream()
.collect(Collectors.toMap(StructField::name, Function.identity()));
fieldList.stream()
.forEach(
field ->
fieldMap.put(
field.getName(),
convert(
field,
record.get(field.getName()),
userProvidedFieldMap.get(field.getName()))));
Object[] values = new Object[namesInOrder.size()];
for (int i = 0; i < namesInOrder.size(); i++) {
values[i] = fieldMap.get(namesInOrder.get(i));
}
return new GenericInternalRow(values);
}
/**
* Create a function that converts an Avro row with the given BigQuery schema to a Spark SQL row
*
* <p>The conversion is based on the BigQuery schema, not Avro Schema, because the Avro schema is
* very painful to use.
*
* <p>Not guaranteed to be stable across all versions of Spark.
*/
private static StructField convert(Field field) {
DataType dataType = getDataType(field);
boolean nullable = true;
if (field.getMode() == Field.Mode.REQUIRED) {
nullable = false;
} else if (field.getMode() == Field.Mode.REPEATED) {
dataType = new ArrayType(dataType, true);
}
MetadataBuilder metadataBuilder = new MetadataBuilder();
if (field.getDescription() != null) {
metadataBuilder.putString("description", field.getDescription());
metadataBuilder.putString("comment", field.getDescription());
}
// JSON
if (LegacySQLTypeName.JSON.equals(field.getType())) {
metadataBuilder.putString("sqlType", "JSON");
}
Metadata metadata = metadataBuilder.build();
return convertMap(field, metadata) //
.orElse(new StructField(field.getName(), dataType, nullable, metadata));
}
static Optional<StructField> convertMap(Field field, Metadata metadata) {
if (field.getMode() != Field.Mode.REPEATED) {
return Optional.empty();
}
if (field.getType() != LegacySQLTypeName.RECORD) {
return Optional.empty();
}
FieldList subFields = field.getSubFields();
if (subFields.size() != 2) {
return Optional.empty();
}
Set<String> subFieldNames = subFields.stream().map(Field::getName).collect(Collectors.toSet());
if (!subFieldNames.contains("key") || !subFieldNames.contains("value")) {
// no "key" or "value" fields
return Optional.empty();
}
Field key = subFields.get("key");
Field value = subFields.get("value");
MapType mapType = DataTypes.createMapType(convert(key).dataType(), convert(value).dataType());
// The returned type is not nullable because the original field is a REPEATED, not NULLABLE.
// There are some compromises we need to do as BigQuery has no native MAP type
return Optional.of(new StructField(field.getName(), mapType, /* nullable */ false, metadata));
}
private static DataType getDataType(Field field) {
return getCustomDataType(field)
.map(udt -> (DataType) udt)
.orElseGet(() -> getStandardDataType(field));
}
@VisibleForTesting
static Optional<DataType> getCustomDataType(Field field) {
// metadata is kept in the description
String description = field.getDescription();
if (description != null) {
// All supported types are serialized to records
if (LegacySQLTypeName.RECORD.equals(field.getType())) {
// we don't have many types, so we keep parsing to minimum
return SupportedCustomDataType.forDescription(description)
.map(SupportedCustomDataType::getSparkDataType);
}
}
return Optional.empty();
}
private static DataType getStandardDataType(Field field) {
if (LegacySQLTypeName.INTEGER.equals(field.getType())) {
return DataTypes.LongType;
} else if (LegacySQLTypeName.FLOAT.equals(field.getType())) {
return DataTypes.DoubleType;
} else if (LegacySQLTypeName.NUMERIC.equals(field.getType())) {
return createDecimalTypeFromNumericField(
field,
LegacySQLTypeName.NUMERIC,
BigQueryUtil.DEFAULT_NUMERIC_PRECISION,
BigQueryUtil.DEFAULT_NUMERIC_SCALE);
} else if (LegacySQLTypeName.BIGNUMERIC.equals(field.getType())) {
int precision =
Optional.ofNullable(field.getPrecision())
.map(Long::intValue)
.orElse(BigQueryUtil.DEFAULT_BIG_NUMERIC_PRECISION);
if (precision > DecimalType.MAX_PRECISION()) {
throw new IllegalArgumentException(
String.format(
"BigNumeric precision is too wide (%d), Spark can only handle decimal types with max precision of %d",
precision, DecimalType.MAX_PRECISION()));
}
return createDecimalTypeFromNumericField(
field,
LegacySQLTypeName.BIGNUMERIC,
BigQueryUtil.DEFAULT_BIG_NUMERIC_PRECISION,
BigQueryUtil.DEFAULT_BIG_NUMERIC_SCALE);
} else if (LegacySQLTypeName.STRING.equals(field.getType())) {
return DataTypes.StringType;
} else if (LegacySQLTypeName.BOOLEAN.equals(field.getType())) {
return DataTypes.BooleanType;
} else if (LegacySQLTypeName.BYTES.equals(field.getType())) {
return DataTypes.BinaryType;
} else if (LegacySQLTypeName.DATE.equals(field.getType())) {
return DataTypes.DateType;
} else if (LegacySQLTypeName.TIMESTAMP.equals(field.getType())) {
return DataTypes.TimestampType;
} else if (LegacySQLTypeName.TIME.equals(field.getType())) {
return DataTypes.LongType;
// TODO(#5): add a timezone to allow parsing to timestamp
// This can be safely cast to TimestampType, but doing so causes the date to be inferred
// as the current date. It's safer to leave as a stable string and give the user the
// option of casting themselves.
} else if (LegacySQLTypeName.DATETIME.equals(field.getType())) {
return DataTypes.StringType;
} else if (LegacySQLTypeName.RECORD.equals(field.getType())) {
List<StructField> structFields =
field.getSubFields().stream().map(BigQuerySchemaConverters::convert).collect(Collectors.toList());
return new StructType(structFields.toArray(new StructField[0]));
} else if (LegacySQLTypeName.GEOGRAPHY.equals(field.getType())) {
return DataTypes.StringType;
} else if (LegacySQLTypeName.JSON.equals(field.getType())) {
return DataTypes.StringType;
} else {
throw new IllegalStateException("Unexpected type: " + field.getType());
}
}
@VisibleForTesting
static DecimalType createDecimalTypeFromNumericField(
Field field, LegacySQLTypeName expectedType, int defaultPrecision, int defaultScale) {
Preconditions.checkArgument(
field.getType().equals(expectedType),
"Field %s must be of type NUMERIC, instead it is of type %s",
field.getName(),
field.getType());
Optional<Integer> precisionOpt = Optional.ofNullable(field.getPrecision()).map(Long::intValue);
Optional<Integer> scaleOpt = Optional.ofNullable(field.getScale()).map(Long::intValue);
// Both exist
if (precisionOpt.isPresent() && scaleOpt.isPresent()) {
return DataTypes.createDecimalType(precisionOpt.get(), scaleOpt.get());
}
// Both missing
if (!precisionOpt.isPresent() && !scaleOpt.isPresent()) {
return DataTypes.createDecimalType(defaultPrecision, defaultScale);
}
// Either precision or scale exists, but not both
int maxLeftOfDotDigits = defaultPrecision - defaultScale;
if (precisionOpt.isPresent()) {
Integer precision = precisionOpt.get().intValue();
return DataTypes.createDecimalType(precision, Math.max(0, precision - maxLeftOfDotDigits));
}
// only scale exists
Integer scale = scaleOpt.get();
return DataTypes.createDecimalType(scale + maxLeftOfDotDigits, scale);
}
/** Spark ==> BigQuery Schema Converter utils: */
public static Schema toBigQuerySchema(StructType sparkSchema) {
FieldList bigQueryFields = sparkToBigQueryFields(sparkSchema, 0);
return Schema.of(bigQueryFields);
}
/**
* Returns a FieldList of all the Spark StructField objects, converted to BigQuery Field objects
*/
private static FieldList sparkToBigQueryFields(StructType sparkStruct, int depth) {
Preconditions.checkArgument(
depth < MAX_BIGQUERY_NESTED_DEPTH, "Spark Schema exceeds BigQuery maximum nesting depth.");
List<Field> bqFields = new ArrayList<>();
for (StructField field : sparkStruct.fields()) {
bqFields.add(createBigQueryColumn(field, depth));
}
return FieldList.of(bqFields);
}
/** Converts a single StructField to a BigQuery Field (column). */
@VisibleForTesting
protected static Field createBigQueryColumn(StructField sparkField, int depth) {
DataType sparkType = sparkField.dataType();
String fieldName = sparkField.name();
Field.Mode fieldMode = (sparkField.nullable()) ? Field.Mode.NULLABLE : Field.Mode.REQUIRED;
FieldList subFields = null;
LegacySQLTypeName fieldType;
OptionalLong scale = OptionalLong.empty();
long precision = 0;
if (sparkType instanceof ArrayType) {
ArrayType arrayType = (ArrayType) sparkType;
fieldMode = Field.Mode.REPEATED;
sparkType = arrayType.elementType();
}
Optional<SupportedCustomDataType> supportedCustomDataTypeOptional =
SupportedCustomDataType.of(sparkType);
// not using lambda as we need to affect method level variables
if (supportedCustomDataTypeOptional.isPresent()) {
SupportedCustomDataType supportedCustomDataType = supportedCustomDataTypeOptional.get();
sparkType = supportedCustomDataType.getSqlType();
}
if (sparkType instanceof StructType) {
subFields = sparkToBigQueryFields((StructType) sparkType, depth + 1);
fieldType = LegacySQLTypeName.RECORD;
} else if (sparkType instanceof MapType) {
MapType mapType = (MapType) sparkType;
fieldMode = Field.Mode.REPEATED;
fieldType = LegacySQLTypeName.RECORD;
subFields =
FieldList.of(
Field.newBuilder("key", toBigQueryType(mapType.keyType(), sparkField.metadata()))
.setMode(Field.Mode.REQUIRED)
.build(),
Field.newBuilder("value", toBigQueryType(mapType.valueType(), sparkField.metadata()))
.setMode(mapType.valueContainsNull() ? Field.Mode.NULLABLE : Field.Mode.REQUIRED)
.build());
} else if (sparkType instanceof DecimalType) {
DecimalType decimalType = (DecimalType) sparkType;
int leftOfDotDigits = decimalType.precision() - decimalType.scale();
fieldType =
(decimalType.scale() > BigQueryUtil.DEFAULT_NUMERIC_SCALE
|| leftOfDotDigits > NUMERIC_MAX_LEFT_OF_DOT_DIGITS)
? LegacySQLTypeName.BIGNUMERIC
: LegacySQLTypeName.NUMERIC;
scale = OptionalLong.of(decimalType.scale());
precision = decimalType.precision();
} else {
fieldType = toBigQueryType(sparkType, sparkField.metadata());
}
Field.Builder fieldBuilder =
createBigQueryFieldBuilder(fieldName, fieldType, fieldMode, subFields);
Optional<String> description =
getDescriptionOrCommentOfField(sparkField, supportedCustomDataTypeOptional);
if (description.isPresent()) {
fieldBuilder.setDescription(description.get());
}
// if this is a decimal type
if (scale.isPresent()) {
fieldBuilder.setPrecision(precision);
fieldBuilder.setScale(scale.getAsLong());
}
return fieldBuilder.build();
}
public static Optional<String> getDescriptionOrCommentOfField(
StructField field, Optional<SupportedCustomDataType> supportedCustomDataTypeOptional) {
Optional<String> description = Optional.empty();
if (!field.getComment().isEmpty()) {
description = Optional.of(field.getComment().get());
} else if (field.metadata().contains("description")
&& field.metadata().getString("description") != null) {
description = Optional.of(field.metadata().getString("description"));
}
Optional<String> marker =
supportedCustomDataTypeOptional.map(SupportedCustomDataType::getTypeMarker);
// skipping some lambdas for readability
if (description.isPresent()) {
String descriptionString = description.get();
return Optional.of(
marker.map(value -> descriptionString + " " + value).orElse(descriptionString));
}
// no description, so the field marker determines the result
return marker;
}
@VisibleForTesting
protected static LegacySQLTypeName toBigQueryType(DataType elementType, Metadata metadata) {
if (elementType instanceof BinaryType) {
return LegacySQLTypeName.BYTES;
}
if (elementType instanceof ByteType
|| elementType instanceof ShortType
|| elementType instanceof IntegerType
|| elementType instanceof LongType) {
return LegacySQLTypeName.INTEGER;
}
if (elementType instanceof BooleanType) {
return LegacySQLTypeName.BOOLEAN;
}
if (elementType instanceof FloatType || elementType instanceof DoubleType) {
return LegacySQLTypeName.FLOAT;
}
if (elementType instanceof StringType || elementType instanceof VarcharType) {
if (SparkBigQueryUtil.isJson(metadata)) {
return LegacySQLTypeName.JSON;
}
return LegacySQLTypeName.STRING;
}
if (elementType instanceof TimestampType) {
return LegacySQLTypeName.TIMESTAMP;
}
if (elementType instanceof DateType) {
return LegacySQLTypeName.DATE;
}
throw new IllegalArgumentException("Data type not expected: " + elementType.simpleString());
}
private static Field.Builder createBigQueryFieldBuilder(
String name, LegacySQLTypeName type, Field.Mode mode, FieldList subFields) {
return Field.newBuilder(name, type, subFields).setMode(mode);
}
}
|
0 | java-sources/ai/starlake/starlake-core_2.12/1.3.2/ai/starlake/utils | java-sources/ai/starlake/starlake-core_2.12/1.3.2/ai/starlake/utils/repackaged/SupportedCustomDataType.java | package ai.starlake.utils.repackaged;
import com.google.common.base.Preconditions;
import org.apache.spark.ml.linalg.SQLDataTypes;
import org.apache.spark.sql.types.DataType;
import java.util.Optional;
import java.util.stream.Stream;
/*
* Included here only to avoid repackaged conflict
*/
public enum SupportedCustomDataType {
SPARK_ML_VECTOR("vector", SQLDataTypes.VectorType()),
SPARK_ML_MATRIX("matrix", SQLDataTypes.MatrixType());
private final String typeMarker;
private final DataType sparkDataType;
SupportedCustomDataType(String typeMarker, DataType sparkDataType) {
this.typeMarker = "{spark.type=" + typeMarker + "}";
this.sparkDataType = sparkDataType;
}
public DataType getSparkDataType() {
return sparkDataType;
}
public String getTypeMarker() {
return typeMarker;
}
public static Optional<SupportedCustomDataType> of(DataType dataType) {
Preconditions.checkNotNull(dataType);
return Stream.of(values())
.filter(supportedCustomDataType -> supportedCustomDataType.sparkDataType.equals(dataType))
.findFirst();
}
public static Optional<SupportedCustomDataType> forDescription(String description) {
Preconditions.checkNotNull(description, "description cannot be null");
return Stream.of(values())
.filter(dataType -> description.endsWith(dataType.typeMarker))
.findFirst();
}
}
|
0 | java-sources/ai/starlake/starlake-core_2.13 | java-sources/ai/starlake/starlake-core_2.13/1.5.2/Setup.java | import javax.net.ssl.*;
import java.io.*;
import java.net.*;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
public class Setup extends ProxySelector implements X509TrustManager {
private static class UserPwdAuth extends Authenticator {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password.toCharArray());
}
};
private static class ResourceDependency {
private final String[] urls;
private final String artefactName;
public ResourceDependency(String artefactName, String... url) {
this.urls = url;
this.artefactName = artefactName;
}
public List<String> getUrlNames() {
return Arrays.stream(urls).map(this::getUrlName).collect(Collectors.toList());
}
public String getUrlName(String url) {
return url.substring(url.lastIndexOf("/") + 1);
}
}
private static String protocol = null;
private static String host = null;
private static int port = 0;
private static String username = null;
private static String password = null;
private static String httpsProxy = getEnv("https_proxy").orElse("");
private static String httpProxy = getEnv("http_proxy").orElse("");
private static String noProxy = getEnv("no_proxy").orElse("").replaceAll(",", "|");
private static Proxy proxy = Proxy.NO_PROXY;
private static HttpClient.Builder clientBuilder = HttpClient.newBuilder();
private static HttpClient client = null;
private static boolean isWindowsOs() {
String os = System.getProperty("os.name").toLowerCase();
return os.startsWith("windows");
}
private static void parseProxy(String proxy) {
if (proxy.isEmpty()) {
return;
}
final Pattern pattern = Pattern.compile("(https?|socks5?):\\/\\/([^:].+)", Pattern.CASE_INSENSITIVE);
final Matcher m = pattern.matcher(proxy);
if (m.matches()) {
protocol = m.group(1).toLowerCase();
final String hostAndPortWithMaybeCredentials = m.group(2);
if (hostAndPortWithMaybeCredentials.contains("@")) {
final String[] hostAndPortWithCredentials = hostAndPortWithMaybeCredentials.split("@");
final String[] credentials = hostAndPortWithCredentials[0].split(":");
assert(credentials.length == 2): "Invalid credentials format, expecting 'username:password'";
username = credentials[0];
password = credentials[1];
final String[] hostAndPort = hostAndPortWithCredentials[1].split(":");
host = hostAndPort[0];
if (hostAndPort.length > 1) {
port = Integer.parseInt(hostAndPort[1]);
}
} else {
final String[] hostAndPort = hostAndPortWithMaybeCredentials.split(":");
host = hostAndPort[0];
if (hostAndPort.length > 1) {
port = Integer.parseInt(hostAndPort[1]);
}
}
} else {
throw new IllegalArgumentException("Invalid proxy format: " + proxy);
}
}
private static void setProxy() {
if (!httpsProxy.isEmpty()) {
parseProxy(httpsProxy);
} else if (!httpProxy.isEmpty()) {
parseProxy(httpProxy);
}
if (host != null) {
if (port == 0) {
if (protocol.equals("https")) {
port = 443;
} else if (protocol.startsWith("socks")) {
port = 1080;
} else {
port = 80;
}
}
Proxy.Type proxyType = Proxy.Type.HTTP;
if (protocol.startsWith("socks")) {
proxyType = Proxy.Type.SOCKS;
}
proxy = new Proxy(proxyType, new InetSocketAddress(host, port));
}
if (!noProxy.isEmpty()) {
System.setProperty("http.nonProxyHosts", noProxy);
}
}
// ENV VARS
public static boolean ENABLE_ALL = envIsTrue("ENABLE_ALL");
public static boolean ENABLE_BIGQUERY = ENABLE_ALL || envIsTrue("ENABLE_BIGQUERY");
public static boolean ENABLE_AZURE = ENABLE_ALL || envIsTrue("ENABLE_AZURE");
public static boolean ENABLE_SNOWFLAKE = ENABLE_ALL || envIsTrue("ENABLE_SNOWFLAKE");
public static boolean ENABLE_REDSHIFT = ENABLE_ALL || envIsTrue("ENABLE_REDSHIFT");
public static boolean ENABLE_POSTGRESQL = ENABLE_ALL || envIsTrue("ENABLE_POSTGRESQL");
public static boolean ENABLE_DUCKDB = ENABLE_ALL || envIsTrue("ENABLE_DUCKDB");
public static boolean ENABLE_KAFKA = ENABLE_ALL || envIsTrue("ENABLE_KAFKA");
public static boolean ENABLE_MARIADB = ENABLE_ALL || envIsTrue("ENABLE_MARIA");
public static boolean ENABLE_CLICKHOUSE = ENABLE_ALL || envIsTrue("ENABLE_CLICKHOUSE");
private static final boolean[] ALL_ENABLERS = new boolean[] {
ENABLE_BIGQUERY,
ENABLE_AZURE,
ENABLE_SNOWFLAKE,
ENABLE_REDSHIFT,
ENABLE_POSTGRESQL,
ENABLE_DUCKDB,
ENABLE_KAFKA,
ENABLE_MARIADB,
ENABLE_CLICKHOUSE
};
private static final boolean ENABLE_API = envIsTrueWithDefaultTrue("ENABLE_API");
private static final String SL_API_VERSION = getEnv("SL_API_VERSION").orElse("0.1.0-SNAPSHOT");
private static final String SCALA_VERSION = getEnv("SCALA_VERSION").orElse("2.13");
// STARLAKE
private static final String SL_VERSION = getEnv("SL_VERSION").orElse("1.4.0");
// SPARK
private static final String SPARK_VERSION = getEnv("SPARK_VERSION").orElse("3.5.6");
private static final String HADOOP_VERSION = getEnv("HADOOP_VERSION").orElse("3");
// BIGQUERY
private static final String SPARK_BQ_VERSION = getEnv("SPARK_BQ_VERSION").orElse("0.42.3");
// deltalake
private static final String DELTA_SPARK = getEnv("SPARK_DELTA").orElse("3.3.2");
// ICEBERG
private static final String ICEBERG_SPARK = getEnv("SPARK_ICEBERG").orElse("1.9.0");
private static final String HADOOP_AZURE_VERSION = getEnv("HADOOP_AZURE_VERSION").orElse("3.3.5");
private static final String AZURE_STORAGE_VERSION = getEnv("AZURE_STORAGE_VERSION").orElse("8.6.6");
private static final String JETTY_VERSION = getEnv("JETTY_VERSION").orElse("9.4.51.v20230217");
// HADOOP_LIB ON WINDOWS
private static final ResourceDependency[] HADOOP_LIBS = new ResourceDependency[]{
new ResourceDependency("winutils", "https://raw.githubusercontent.com/cdarlint/winutils/master/hadoop-3.3.5/bin/winutils.exe"),
new ResourceDependency("hadoop.dll", "https://raw.githubusercontent.com/cdarlint/winutils/master/hadoop-3.3.5/bin/hadoop.dll")
};
// SNOWFLAKE
private static final String SNOWFLAKE_JDBC_VERSION = getEnv("SNOWFLAKE_JDBC_VERSION").orElse("3.24.2");
private static final String SPARK_SNOWFLAKE_VERSION = getEnv("SPARK_SNOWFLAKE_VERSION").orElse("3.1.3");
// POSTGRESQL
private static final String POSTGRESQL_VERSION = getEnv("POSTGRESQL_VERSION").orElse("42.7.6");
// MARIADB
private static final String MARIADB_VERSION = getEnv("MARIADB_VERSION").orElse("3.5.4");
// CLICKHOUSE
private static final String CLICKHOUSE_VERSION = getEnv("CLICKHOUSE_VERSION").orElse("0.9.0");
// DUCKDB
private static final String DUCKDB_VERSION = getEnv("DUCKDB_VERSION").orElse("1.3.0.0");
// REDSHIFT
private static final String AWS_JAVA_SDK_VERSION = getEnv("AWS_JAVA_SDK_VERSION").orElse("1.12.787");
private static final String HADOOP_AWS_VERSION = getEnv("HADOOP_AWS_VERSION").orElse("3.3.4");
private static final String REDSHIFT_JDBC_VERSION = getEnv("REDSHIFT_JDBC_VERSION").orElse("2.1.0.33");
private static final String SPARK_REDSHIFT_VERSION = getEnv("SPARK_REDSHIFT_VERSION").orElse("6.3.0-spark_3.5-SNAPSHOT");
private static final String CONFLUENT_VERSION = getEnv("CONFLUENT_VERSION").orElse("7.7.2");
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// API
private static final ResourceDependency SL_API_ZIP = new ResourceDependency("starlake-api", "https://central.sonatype.com/repository/maven-snapshots/ai/starlake/starlake-api" + "_" + SCALA_VERSION + "/" + SL_API_VERSION + "/starlake-api"+ "_" + SCALA_VERSION + "-" + SL_API_VERSION + ".zip");
// SPARK
private static final ResourceDependency SPARK_JAR = new ResourceDependency("dist/spark", "https://archive.apache.org/dist/spark/spark-" + SPARK_VERSION + "/spark-" + SPARK_VERSION + "-bin-hadoop" + HADOOP_VERSION + "-scala2.13.tgz");
private static final ResourceDependency SPARK_BQ_JAR = new ResourceDependency("bigquery-with-dependencies",
"https://repo1.maven.org/maven2/com/google/cloud/spark/spark-bigquery-with-dependencies_" + SCALA_VERSION + "/" +
SPARK_BQ_VERSION + "/" +
"spark-bigquery-with-dependencies_" + SCALA_VERSION + "-" + SPARK_BQ_VERSION + ".jar");
private static final ResourceDependency DELTA_SPARK_JAR = new ResourceDependency("delta-spark",
"https://repo1.maven.org/maven2/io/delta/delta-spark_" + SCALA_VERSION + "/" + DELTA_SPARK + "/delta-spark_" + SCALA_VERSION + "-" + DELTA_SPARK + ".jar");
private static final ResourceDependency ICEBERG_SPARK_JAR = new ResourceDependency("iceberg-spark",
"https://repo1.maven.org/maven2/org/apache/iceberg/iceberg-spark-runtime-3.5_" + SCALA_VERSION + "/" + ICEBERG_SPARK + "/iceberg-spark-runtime-3.5_" + SCALA_VERSION + "-" + ICEBERG_SPARK + ".jar");
private static final ResourceDependency DELTA_STORAGE_JAR = new ResourceDependency("delta-storage",
"https://repo1.maven.org/maven2/io/delta/delta-storage" + "/" + DELTA_SPARK + "/delta-storage" +"-" + DELTA_SPARK + ".jar");
private static final ResourceDependency HADOOP_AZURE_JAR = new ResourceDependency("hadoop-azure", "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/" + HADOOP_AZURE_VERSION + "/hadoop-azure-" + HADOOP_AZURE_VERSION + ".jar");
private static final ResourceDependency AZURE_STORAGE_JAR = new ResourceDependency("azure-storage", "https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/" + AZURE_STORAGE_VERSION + "/azure-storage-" + AZURE_STORAGE_VERSION + ".jar");
private static final ResourceDependency JETTY_SERVER_JAR = new ResourceDependency("jetty-server", "https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-server/" + JETTY_VERSION + "/jetty-server-" + JETTY_VERSION + ".jar");
private static final ResourceDependency SNOWFLAKE_JDBC_JAR = new ResourceDependency("snowflake-jdbc", "https://repo1.maven.org/maven2/net/snowflake/snowflake-jdbc/" + SNOWFLAKE_JDBC_VERSION + "/snowflake-jdbc-" + SNOWFLAKE_JDBC_VERSION + ".jar");
private static final ResourceDependency SPARK_SNOWFLAKE_JAR = new ResourceDependency("spark-snowflake", "https://repo1.maven.org/maven2/net/snowflake/spark-snowflake_" + SCALA_VERSION +
"/" + SPARK_SNOWFLAKE_VERSION + "/spark-snowflake_" + SCALA_VERSION + "-" + SPARK_SNOWFLAKE_VERSION + ".jar");
private static final ResourceDependency POSTGRESQL_JAR = new ResourceDependency("postgresql", "https://repo1.maven.org/maven2/org/postgresql/postgresql/" + POSTGRESQL_VERSION + "/postgresql-" + POSTGRESQL_VERSION + ".jar");
private static final ResourceDependency DUCKDB_JAR = new ResourceDependency("duckdb_jdbc", "https://repo1.maven.org/maven2/org/duckdb/duckdb_jdbc/" + DUCKDB_VERSION + "/duckdb_jdbc-" + DUCKDB_VERSION + ".jar");
private static final ResourceDependency AWS_JAVA_SDK_JAR = new ResourceDependency("aws-java-sdk-bundle", "https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/" + AWS_JAVA_SDK_VERSION + "/aws-java-sdk-bundle-" + AWS_JAVA_SDK_VERSION + ".jar");
private static final ResourceDependency HADOOP_AWS_JAR = new ResourceDependency("hadoop-aws", "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/" + HADOOP_AWS_VERSION + "/hadoop-aws-" + HADOOP_AWS_VERSION + ".jar");
private static final ResourceDependency REDSHIFT_JDBC_JAR = new ResourceDependency("redshift-jdbc42", "https://repo1.maven.org/maven2/com/amazon/redshift/redshift-jdbc42/" + REDSHIFT_JDBC_VERSION + "/redshift-jdbc42-" + REDSHIFT_JDBC_VERSION + ".jar");
private static ResourceDependency SPARK_REDSHIFT_JAR = new ResourceDependency("spark-redshift", "https://central.sonatype.com/repository/maven-snapshots/ai/starlake/spark-redshift_" + SCALA_VERSION + "/" + SPARK_REDSHIFT_VERSION + "/spark-redshift_" + SCALA_VERSION + "-" + SPARK_REDSHIFT_VERSION + ".jar");
private static final ResourceDependency STARLAKE_SNAPSHOT_JAR = new ResourceDependency("starlake-core", "https://central.sonatype.com/repository/maven-snapshots/ai/starlake/starlake-core" + "_" + SCALA_VERSION + "/" + SL_VERSION + "/starlake-core"+ "_" + SCALA_VERSION + "-" + SL_VERSION + "-assembly.jar");
private static final ResourceDependency STARLAKE_RELEASE_JAR = new ResourceDependency("starlake-core", " https://central.sonatype.com/repository/maven-releases/ai/starlake/starlake-core" + "_" + SCALA_VERSION + "/" + SL_VERSION + "/starlake-core" + "_" + SCALA_VERSION + "-" + SL_VERSION + "-assembly.jar");
private static final ResourceDependency CONFLUENT_KAFKA_SCHEMA_REGISTRY_CLIENT = new ResourceDependency("kafka-schema-registry-client", "https://packages.confluent.io/maven/io/confluent/kafka-schema-registry-client/" + CONFLUENT_VERSION + "/kafka-schema-registry-client-" + CONFLUENT_VERSION + ".jar");
private static final ResourceDependency CONFLUENT_KAFKA_AVRO_SERIALIZER = new ResourceDependency("kafka-avro-serializer", "https://packages.confluent.io/maven/io/confluent/kafka-avro-serializer/" + CONFLUENT_VERSION + "/kafka-avro-serializer-" + CONFLUENT_VERSION + ".jar");
private static final ResourceDependency MARIADB_JAR = new ResourceDependency("mariadb-java-client", "https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/" + MARIADB_VERSION + "/mariadb-java-client-" + MARIADB_VERSION + ".jar");
private static final ResourceDependency CLICKHOUSE_JAR = new ResourceDependency("clickhouse-jdbc", "https://repo1.maven.org/maven2/com/clickhouse/clickhouse-jdbc/"+ CLICKHOUSE_VERSION + "/clickhouse-jdbc-" + CLICKHOUSE_VERSION + "-all.jar");
private static final ResourceDependency[] snowflakeDependencies = {
SNOWFLAKE_JDBC_JAR,
SPARK_SNOWFLAKE_JAR
};
private static final ResourceDependency[] redshiftDependencies = {
AWS_JAVA_SDK_JAR,
HADOOP_AWS_JAR,
REDSHIFT_JDBC_JAR,
SPARK_REDSHIFT_JAR
};
private static final ResourceDependency[] azureDependencies = {
HADOOP_AZURE_JAR,
AZURE_STORAGE_JAR,
JETTY_SERVER_JAR
};
private static final ResourceDependency[] postgresqlDependencies = {
POSTGRESQL_JAR
};
private static final ResourceDependency[] duckDbDependencies = {
DUCKDB_JAR
};
private static final ResourceDependency[] bigqueryDependencies = {
SPARK_BQ_JAR
};
private static final ResourceDependency[] deltaSparkDependencies = {
DELTA_SPARK_JAR,
DELTA_STORAGE_JAR
};
private static final ResourceDependency[] icebergSparkDependencies = {
ICEBERG_SPARK_JAR
};
private static final ResourceDependency[] confluentDependencies = {
CONFLUENT_KAFKA_SCHEMA_REGISTRY_CLIENT,
CONFLUENT_KAFKA_AVRO_SERIALIZER
};
private static final ResourceDependency[] mariadbDependencies = {
MARIADB_JAR
};
private static final ResourceDependency[] clickhouseDependencies = {
// CLICKHOUSE_JAR
};
private static Optional<String> getEnv(String env) {
// consider empty env variables as not set
return Optional.ofNullable(System.getenv(env)).filter(s -> !s.isEmpty());
}
private static boolean envIsTrue(String env) {
String value = getEnv(env).orElse("false");
return !value.equals("false") && !value.equals("0");
}
private static boolean envIsTrueWithDefaultTrue(String env) {
String value = getEnv(env).orElse("true");
return !value.equals("false") && !value.equals("0");
}
private static void generateUnixVersions(File targetDir) throws IOException {
generateVersions(targetDir, "versions.sh", "#!/bin/bash\nset -e\n\n",
(writer) -> (variableName, value) -> {
try {
writer.write(variableName + "=" + "${" + variableName + ":-" + value + "}\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
private static void generateWindowsVersions(File targetDir) throws IOException {
generateVersions(targetDir, "versions.cmd", "@ECHO OFF\n\n",
(writer) -> (variableName, value) -> {
try {
writer.write(
"if \"%" + variableName + "%\"==\"\" (\n" +
" SET " + variableName + "=" + value + "\n" +
")\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
// Used BiConsumer with Function because TriConsumer doesn't exist natively and avoid creating a new type
private static void generateVersions(File targetDir, String versionsFileName, String fileHeader, Function<BufferedWriter, BiConsumer<String, String>> variableWriter) throws IOException {
File versionFile = new File(targetDir, versionsFileName);
deleteFile(versionFile);
BufferedWriter writer = new BufferedWriter(new FileWriter(versionFile));
try {
writer.write(fileHeader);
variableWriter.apply(writer).accept("ENABLE_BIGQUERY", String.valueOf(ENABLE_BIGQUERY));
variableWriter.apply(writer).accept("ENABLE_AZURE", String.valueOf(ENABLE_AZURE));
variableWriter.apply(writer).accept("ENABLE_SNOWFLAKE", String.valueOf(ENABLE_SNOWFLAKE));
variableWriter.apply(writer).accept("ENABLE_POSTGRESQL", String.valueOf(ENABLE_POSTGRESQL));
variableWriter.apply(writer).accept("ENABLE_MARIADB", String.valueOf(ENABLE_MARIADB));
variableWriter.apply(writer).accept("ENABLE_REDSHIFT", String.valueOf(ENABLE_REDSHIFT));
variableWriter.apply(writer).accept("ENABLE_KAFKA", String.valueOf(ENABLE_KAFKA));
variableWriter.apply(writer).accept("ENABLE_DUCKDB", String.valueOf(ENABLE_DUCKDB));
variableWriter.apply(writer).accept("SL_VERSION", SL_VERSION);
variableWriter.apply(writer).accept("SCALA_VERSION", SCALA_VERSION);
variableWriter.apply(writer).accept("SPARK_VERSION", SPARK_VERSION);
variableWriter.apply(writer).accept("HADOOP_VERSION", HADOOP_VERSION);
variableWriter.apply(writer).accept("DUCKDB_VERSION", DUCKDB_VERSION);
if (ENABLE_BIGQUERY || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("SPARK_BQ_VERSION", SPARK_BQ_VERSION);
}
if (ENABLE_AZURE || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("HADOOP_AZURE_VERSION", HADOOP_AZURE_VERSION);
variableWriter.apply(writer).accept("AZURE_STORAGE_VERSION", AZURE_STORAGE_VERSION);
variableWriter.apply(writer).accept("JETTY_VERSION", JETTY_VERSION);
}
if (ENABLE_SNOWFLAKE || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("SPARK_SNOWFLAKE_VERSION", SPARK_SNOWFLAKE_VERSION);
variableWriter.apply(writer).accept("SNOWFLAKE_JDBC_VERSION", SNOWFLAKE_JDBC_VERSION);
}
if (ENABLE_POSTGRESQL || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("POSTGRESQL_VERSION", POSTGRESQL_VERSION);
}
if (ENABLE_REDSHIFT || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("AWS_JAVA_SDK_VERSION", AWS_JAVA_SDK_VERSION);
variableWriter.apply(writer).accept("HADOOP_AWS_VERSION", HADOOP_AWS_VERSION);
variableWriter.apply(writer).accept("REDSHIFT_JDBC_VERSION", REDSHIFT_JDBC_VERSION);
variableWriter.apply(writer).accept("SPARK_REDSHIFT_VERSION", SPARK_REDSHIFT_VERSION);
}
if (ENABLE_KAFKA || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("CONFLUENT_VERSION", CONFLUENT_VERSION);
}
if (ENABLE_DUCKDB || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("DUCKDB_VERSION", DUCKDB_VERSION);
}
} finally {
writer.close();
}
System.out.println(versionFile.getAbsolutePath() + " created");
}
private static void generateVersions(File targetDir, boolean unix) throws IOException {
if (isWindowsOs() && !unix) {
generateWindowsVersions(targetDir);
} else {
generateUnixVersions(targetDir);
}
}
private static boolean anyDependencyEnabled() {
for (boolean enabled : ALL_ENABLERS) {
if (enabled) {
return true;
}
}
return ENABLE_ALL;
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
@Override
public List<Proxy> select(URI uri) {
return Collections.singletonList(proxy);
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
throw new RuntimeException("Failed to connect to " + uri + " using proxy " + sa);
}
private static final Setup instance = new Setup();
private static final TrustManager alwaysTrustManager = instance;
private static final ProxySelector proxySelector = instance;
private static void setHttpClient() throws NoSuchAlgorithmException, KeyManagementException {
setProxy();
clientBuilder.proxy(proxySelector);
if (username != null && password != null) {
Authenticator authenticator = new UserPwdAuth();
clientBuilder.authenticator(authenticator);
}
if (host != null && envIsTrue("SL_INSECURE")) {
System.out.println("Enabling insecure mode for SSL connections using proxy " + protocol + "://" + host + ":" + port);
// Create a trust manager that does not validate certificate chains
TrustManager[] trustAllCerts = new TrustManager[]{alwaysTrustManager};
// Install the all-trusting trust manager
SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, trustAllCerts, new java.security.SecureRandom());
clientBuilder.sslContext(sc);
}
}
private static void updateSparkLog4j2Properties(File sparkDir) {
File log4jFile = new File(new File(sparkDir, "conf"), "log4j2.properties");
try {
BufferedReader reader = new BufferedReader(new FileReader(log4jFile));
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
if (line.startsWith("rootLogger.level =")|| line.startsWith("rootLogger.level=")) {
line = "rootLogger.level = ${env:SL_LOG_LEVEL:-error}";
}
sb.append(line).append("\n");
}
reader.close();
sb.append("logger.shutdown.name=org.apache.spark.util.ShutdownHookManager").append("\n");
sb.append("logger.shutdown.level=OFF").append("\n");
sb.append("logger.env.name=org.apache.spark.SparkEnv").append("\n");
sb.append("logger.env.level=error").append("\n");
BufferedWriter writer = new BufferedWriter(new FileWriter(log4jFile));
writer.write(sb.toString());
writer.close();
} catch (IOException e) {
System.out.println("Failed to update log4j.properties");
e.printStackTrace();
}
}
private static void enableAllDependencies() {
ENABLE_AZURE = true;
ENABLE_BIGQUERY = true;
ENABLE_SNOWFLAKE = true;
ENABLE_REDSHIFT = true;
ENABLE_POSTGRESQL = true;
ENABLE_MARIADB = true;
ENABLE_CLICKHOUSE = false;
ENABLE_DUCKDB = true;
ENABLE_KAFKA = true;
}
private static void askUserWhichConfigToEnable() {
System.out.println("Please enable at least one of the following profiles to download the required dependencies:");
System.out.println("Note: You may install more dependencies later by copying them to the bin/deps directory");
System.out.println("1) Azure");
System.out.println("2) BigQuery");
System.out.println("3) Snowflake");
System.out.println("4) Redshift ");
System.out.println("5) Postgres ");
System.out.println("6) DuckDB ");
System.out.println("7) Spark ");
System.out.println("8) Kafka ");
System.out.println("9) Mariadb ");
// System.out.println("10) ClickHouse");
System.out.println("A) All ");
System.out.println("N) None ");
System.out.print("Please enter your choice(s) separated by commas (e.g. 1,2,3): ");
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
String answer = reader.readLine();
if (answer.equalsIgnoreCase("n")) {
System.out.println("Please enable the configurations you want to use by setting the corresponding environment variables below");
System.out.println("ENABLE_BIGQUERY, ENABLE_DATABRICKS, ENABLE_AZURE, ENABLE_SNOWFLAKE, ENABLE_DUCKDB, ENABLE_REDSHIFT, ENABLE_POSTGRESQL, ENABLE_ANY_JDBC, ENABLE_KAFKA, ENABLE_MARIADB");
System.exit(1);
}
else if (answer.equalsIgnoreCase("a")) {
enableAllDependencies();
}
else {
String[] choices = answer.split(",");
for (String choice : choices) {
switch (choice.trim()) {
case "1":
ENABLE_AZURE = true;
break;
case "2":
ENABLE_BIGQUERY = true;
break;
case "3":
ENABLE_SNOWFLAKE = true;
break;
case "4":
ENABLE_REDSHIFT = true;
break;
case "5":
ENABLE_POSTGRESQL = true;
break;
case "6":
ENABLE_DUCKDB = true;
break;
case "7":
break;
case "8":
ENABLE_KAFKA = true;
break;
case "9":
ENABLE_MARIADB = true;
break;
// case "10":
// ENABLE_CLICKHOUSE = true;
// break;
default:
enableAllDependencies();
System.out.println("Installing All dependencies.");
}
}
}
} catch (IOException e) {
System.out.println("Failed to read user input");
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException {
try {
if (args.length == 0) {
System.out.println("Please specify the target directory");
System.exit(1);
}
if (!anyDependencyEnabled()) {
askUserWhichConfigToEnable();
}
final File targetDir = new File(args[0]);
if (!targetDir.exists()) {
targetDir.mkdirs();
System.out.println("Created target directory " + targetDir.getAbsolutePath());
}
setHttpClient();
final File binDir = new File(targetDir, "bin");
if (isWindowsOs()) {
final File hadoopDir = new File(binDir, "hadoop");
final File hadoopBinDir = new File(hadoopDir, "bin");
deleteRecursively(hadoopDir);
hadoopBinDir.mkdirs();
for (ResourceDependency lib : HADOOP_LIBS) {
downloadAndDisplayProgress(lib, (resource, url) -> new File(hadoopBinDir, resource.getUrlName(url)));
}
}
File slDir = new File(binDir, "sl");
deleteRecursively(slDir);
if (SL_VERSION.endsWith("SNAPSHOT")) {
downloadAndDisplayProgress(new ResourceDependency[]{STARLAKE_SNAPSHOT_JAR}, slDir, false);
} else {
downloadAndDisplayProgress(new ResourceDependency[]{STARLAKE_RELEASE_JAR}, slDir, false);
}
if (ENABLE_API) {
downloadApi(targetDir);
}
File sparkDir = new File(binDir, "spark");
// deleteRecursively(sparkDir);
if (!sparkDir.exists()) {
downloadSpark(binDir);
}
File depsDir = new File(binDir, "deps");
deleteDependencies(deltaSparkDependencies, depsDir);
downloadAndDisplayProgress(deltaSparkDependencies, depsDir, true);
deleteDependencies(icebergSparkDependencies, depsDir);
downloadAndDisplayProgress(icebergSparkDependencies, depsDir, true);
updateSparkLog4j2Properties(sparkDir);
deleteDependencies(duckDbDependencies, depsDir);
if (ENABLE_DUCKDB) {
downloadAndDisplayProgress(duckDbDependencies, depsDir, true);
}
deleteDependencies(confluentDependencies, depsDir);
if (ENABLE_KAFKA) {
downloadAndDisplayProgress(confluentDependencies, depsDir, true);
}
deleteDependencies(redshiftDependencies, depsDir);
if (ENABLE_REDSHIFT) {
downloadAndDisplayProgress(redshiftDependencies, depsDir, true);
}
deleteDependencies(bigqueryDependencies, depsDir);
if (ENABLE_BIGQUERY) {
downloadAndDisplayProgress(bigqueryDependencies, depsDir, true);
}
deleteDependencies(azureDependencies, depsDir);
if (ENABLE_AZURE) {
downloadAndDisplayProgress(azureDependencies, depsDir, true);
}
deleteDependencies(snowflakeDependencies, depsDir);
if (ENABLE_SNOWFLAKE) {
downloadAndDisplayProgress(snowflakeDependencies, depsDir, true);
}
deleteDependencies(postgresqlDependencies, depsDir);
if (ENABLE_POSTGRESQL) {
downloadAndDisplayProgress(postgresqlDependencies, depsDir, true);
}
deleteDependencies(mariadbDependencies, depsDir);
if (ENABLE_MARIADB) {
downloadAndDisplayProgress(mariadbDependencies, depsDir, true);
}
deleteDependencies(clickhouseDependencies, depsDir);
if (ENABLE_CLICKHOUSE) {
downloadAndDisplayProgress(clickhouseDependencies, depsDir, true);
}
boolean unix = args.length > 1 && args[1].equalsIgnoreCase("unix");
generateVersions(targetDir, unix);
} catch (Exception e) {
System.out.println("Failed to download dependency: " + e.getMessage());
e.printStackTrace();
System.exit(1);
}
}
public static void downloadApi(File targetDir) throws IOException, InterruptedException {
final File projectsDir = new File(targetDir, "projects");
final File demoDir = new File(projectsDir, "demo");
final File binDir = new File(targetDir, "bin");
File apiDir = new File(binDir, "api");
deleteRecursively(apiDir);
ResourceDependency apiZip = SL_API_ZIP;
downloadAndDisplayProgress(new ResourceDependency[]{apiZip}, binDir, false);
apiZip.getUrlNames().stream().map(zipName -> new File(binDir, zipName)).filter(File::exists).forEach(zipFile -> {
try {
unzip(zipFile, binDir);
} catch (IOException e) {
System.out.println("Failed to extract API: " + e.getMessage());
e.printStackTrace();
}
zipFile.delete();
// Rename extracted directory
File extractedDir = new File(binDir, apiZip.artefactName + "-" + SL_API_VERSION);
File renamedDir = new File(binDir, "api");
if (extractedDir.exists()) {
System.out.println("Renaming " + extractedDir.getAbsolutePath() + " to " + renamedDir.getAbsolutePath());
extractedDir.renameTo(renamedDir);
}
});
File starbakeZip = new File(apiDir, "starbake.zip");
final File demoZip = new File(demoDir, "starbake.zip");
demoDir.mkdirs();
if (starbakeZip.exists() && !starbakeZip.renameTo(demoZip)) {
System.out.println("Failed to rename " + starbakeZip.getAbsolutePath() + " to " + demoZip.getAbsolutePath());
}
}
public static void downloadSpark(File binDir) throws IOException, InterruptedException {
ResourceDependency sparkJar = SPARK_JAR;
downloadAndDisplayProgress(new ResourceDependency[]{sparkJar}, binDir, false);
sparkJar.getUrlNames().stream().map(tgzName -> new File(binDir, tgzName)).filter(File::exists).forEach(sparkFile -> {
String tgzName = sparkFile.getName();
ProcessBuilder builder = new ProcessBuilder("tar", "-xzf", sparkFile.getAbsolutePath(), "-C", binDir.getAbsolutePath()).inheritIO();
try {
Process process = builder.start();
process.waitFor();
} catch (InterruptedException | IOException e) {
System.out.println("Failed to extract spark tarball");
e.printStackTrace();
}
sparkFile.delete();
File sparkDir = new File(binDir, tgzName.substring(0, tgzName.lastIndexOf(".")));
sparkDir.renameTo(new File(binDir, "spark"));
sparkDir = new File(binDir, "spark");
File log4j2File = new File(sparkDir, "conf/log4j2.properties.template");
log4j2File.renameTo(new File(sparkDir, "conf/log4j2.properties"));
});
}
private static void downloadAndDisplayProgress(ResourceDependency[] dependencies, File targetDir, boolean replaceJar) throws IOException, InterruptedException {
if (!targetDir.exists()) {
targetDir.mkdirs();
}
if (replaceJar) {
deleteDependencies(dependencies, targetDir);
}
for (ResourceDependency dependency : dependencies) {
downloadAndDisplayProgress(dependency, (resource, url) -> new File(targetDir, resource.getUrlName(url)));
}
}
private static void deleteDependencies(ResourceDependency[] dependencies, File targetDir) {
if (targetDir.exists()) {
for (ResourceDependency dependency : dependencies) {
File[] files = targetDir.listFiles(f -> f.getPath().contains(dependency.artefactName));
if (files != null) {
for (File file : files) {
deleteFile(file);
}
}
}
}
}
private static void deleteRecursively(File directoryToBeDeleted) {
File[] allContents = directoryToBeDeleted.listFiles();
if (allContents != null) {
for (File file : allContents) {
deleteRecursively(file);
}
}
directoryToBeDeleted.delete();
}
private static void deleteFile(File file) {
if (file.exists()) {
if (file.delete()) {
System.out.println(file.getAbsolutePath() + " deleted");
}
}
}
private static void downloadAndDisplayProgress(ResourceDependency resource, BiFunction<ResourceDependency, String, File> fileProducer) throws IOException, InterruptedException {
try {
client = clientBuilder.build();
boolean succesfullyDownloaded = false;
List<String> triedUrlList = new ArrayList<>();
System.out.println("Downloading " + resource.artefactName + "...");
for (String urlStr : resource.urls) {
System.out.println("from " + urlStr);
File file = fileProducer.apply(resource, urlStr);
final int CHUNK_SIZE = 1024;
int filePartIndex = urlStr.lastIndexOf("/") + 1;
String name = urlStr.substring(filePartIndex);
String urlFolder = urlStr.substring(0, filePartIndex);
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(urlStr))
.build();
HttpResponse<InputStream> response = client.send(request, HttpResponse.BodyHandlers.ofInputStream());
if (response.statusCode() == 200) {
long lengthOfFile = response.headers().firstValueAsLong("Content-Length").orElse(0L);
InputStream input = new BufferedInputStream(response.body());
OutputStream output = new FileOutputStream(file);
byte[] data = new byte[CHUNK_SIZE];
long total = 0;
int count;
int loop = 0;
int sbLen = 0;
long lastTime = System.currentTimeMillis();
while ((count = input.read(data)) != -1) {
total += count;
output.write(data, 0, count);
loop++;
if (loop % 1000 == 0) {
StringBuilder sb = new StringBuilder(" " + (total / 1024 / 1024) + "/" + (lengthOfFile / 1024 / 1024) + " MB");
if (lengthOfFile > 0) {
sb.append(" (");
sb.append(total * 100 / lengthOfFile);
sb.append("%)");
}
long currentTime = System.currentTimeMillis();
long timeDiff = currentTime - lastTime;
double bytesPerMilliSec = (CHUNK_SIZE * 1000.0 / timeDiff);
double bytesPerSec = bytesPerMilliSec * 1000;
double mbPerSec = bytesPerSec / 1024 / 1024;
sb.append(" ");
sb.append(String.format("[%.2f MB/sec]", mbPerSec));
lastTime = currentTime;
sbLen = sb.length();
for (int cnt = 0; cnt < sbLen; cnt++) {
System.out.print("\b");
}
System.out.print(sb);
}
}
for (int cnt = 0; cnt < sbLen; cnt++) {
System.out.print("\b");
}
System.out.print(file.getAbsolutePath() + " succesfully downloaded from " + urlFolder);
System.out.println();
output.flush();
output.close();
input.close();
succesfullyDownloaded = true;
break;
} else {
triedUrlList.add(urlStr + " (" + response.statusCode() + ")");
}
}
if (!succesfullyDownloaded) {
String triedUrls = String.join(" and ", triedUrlList);
throw new RuntimeException("Failed to fetch " + resource.artefactName + " from " + triedUrls);
}
} catch (IOException | InterruptedException e) {
System.out.println("Failed to download " + resource.artefactName + " from " + resource.urls);
throw e;
}
finally {
client = null; // Close the client to release resources
}
}
// Utility method to unzip using Java
private static void unzip(File zipFile, File destDir) throws IOException {
try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFile))) {
ZipEntry entry = zipIn.getNextEntry();
while (entry != null) {
File filePath = new File(destDir, entry.getName());
if (entry.isDirectory()) {
filePath.mkdirs();
} else {
// Create parent dirs if they don't exist
filePath.getParentFile().mkdirs();
try (BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath))) {
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = zipIn.read(buffer)) != -1) {
bos.write(buffer, 0, bytesRead);
}
}
}
zipIn.closeEntry();
entry = zipIn.getNextEntry();
}
}
}
}
|
0 | java-sources/ai/starlake/starlake-spark2_2.11/0.3.26/ai/starlake/utils | java-sources/ai/starlake/starlake-spark2_2.11/0.3.26/ai/starlake/utils/repackaged/BigQuerySchemaConverters.java | package ai.starlake.utils.repackaged;
/*
* Copyright 2018 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Included here only to avoid repackaged conflict
*/
import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.FieldList;
import com.google.cloud.bigquery.LegacySQLTypeName;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.StandardTableDefinition;
import com.google.cloud.bigquery.TableDefinition;
import com.google.cloud.bigquery.TableInfo;
import com.google.cloud.bigquery.TimePartitioning;
import com.google.cloud.spark.bigquery.SupportedCustomDataType;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.Utf8;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.GenericInternalRow;
import org.apache.spark.sql.catalyst.util.GenericArrayData;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.BinaryType;
import org.apache.spark.sql.types.BooleanType;
import org.apache.spark.sql.types.ByteType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.DateType;
import org.apache.spark.sql.types.Decimal;
import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.DoubleType;
import org.apache.spark.sql.types.FloatType;
import org.apache.spark.sql.types.IntegerType;
import org.apache.spark.sql.types.LongType;
import org.apache.spark.sql.types.MapType;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.MetadataBuilder;
import org.apache.spark.sql.types.ShortType;
import org.apache.spark.sql.types.StringType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.types.TimestampType;
import org.apache.spark.sql.types.UserDefinedType;
import org.apache.spark.unsafe.types.UTF8String;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
// Copied from com.google.cloud.spark.bigquery.SchemaConverters
// Last update : 0.22.0
// Known differences : b3f1946f Fix timestamp conversion between parquet converted type and BigQuery data type (#427)
@SuppressWarnings("all")
public class BigQuerySchemaConverters {
// Numeric is a fixed precision Decimal Type with 38 digits of precision and 9 digits of scale.
// See https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#numeric-type
static final int BQ_NUMERIC_PRECISION = 38;
static final int BQ_NUMERIC_SCALE = 9;
static final int BQ_BIG_NUMERIC_SCALE = 38;
private static final DecimalType NUMERIC_SPARK_TYPE =
DataTypes.createDecimalType(BQ_NUMERIC_PRECISION, BQ_NUMERIC_SCALE);
// The maximum nesting depth of a BigQuery RECORD:
static final int MAX_BIGQUERY_NESTED_DEPTH = 15;
static final String MAPTYPE_ERROR_MESSAGE = "MapType is unsupported.";
/**
* Convert a BigQuery schema to a Spark schema
*/
public static StructType toSpark(Schema schema) {
List<StructField> fieldList =
schema.getFields().stream().map(BigQuerySchemaConverters::convert).collect(Collectors.toList());
StructType structType = new StructType(fieldList.toArray(new StructField[0]));
return structType;
}
/**
* Retrieves and returns BigQuery Schema from TableInfo. If the table support pseudo columns, they
* are added to schema before schema is returned to the caller.
*/
public static Schema getSchemaWithPseudoColumns(TableInfo tableInfo) {
TimePartitioning timePartitioning = null;
TableDefinition tableDefinition = tableInfo.getDefinition();
if (tableDefinition instanceof StandardTableDefinition) {
timePartitioning = ((StandardTableDefinition) tableDefinition).getTimePartitioning();
}
boolean tableSupportsPseudoColumns =
timePartitioning != null
&& timePartitioning.getField() == null
&& timePartitioning.getType() != null;
Schema schema = tableDefinition.getSchema();
if (tableSupportsPseudoColumns) {
ArrayList<Field> fields = new ArrayList<Field>(schema.getFields());
fields.add(
createBigQueryFieldBuilder(
"_PARTITIONTIME", LegacySQLTypeName.TIMESTAMP, Field.Mode.NULLABLE, null)
.build());
fields.add(
createBigQueryFieldBuilder(
"_PARTITIONDATE", LegacySQLTypeName.DATE, Field.Mode.NULLABLE, null)
.build());
schema = Schema.of(fields);
}
return schema;
}
public static InternalRow convertToInternalRow(
Schema schema,
List<String> namesInOrder,
GenericRecord record,
Optional<StructType> userProvidedSchema) {
List<StructField> userProvidedFieldList =
Arrays.stream(userProvidedSchema.orElse(new StructType()).fields())
.collect(Collectors.toList());
return convertAll(schema.getFields(), record, namesInOrder, userProvidedFieldList);
}
static Object convert(Field field, Object value, StructField userProvidedField) {
if (value == null) {
return null;
}
if (field.getMode() == Field.Mode.REPEATED) {
// rather than recurring down we strip off the repeated mode
// Due to serialization issues, reconstruct the type using reflection:
// See: https://github.com/googleapis/google-cloud-java/issues/3942
LegacySQLTypeName fType = LegacySQLTypeName.valueOfStrict(field.getType().name());
Field nestedField =
Field.newBuilder(field.getName(), fType, field.getSubFields())
// As long as this is not repeated it works, but technically arrays cannot contain
// nulls, so select required instead of nullable.
.setMode(Field.Mode.REQUIRED)
.build();
List<Object> valueList = (List<Object>) value;
return new GenericArrayData(
valueList.stream()
.map(v -> convert(nestedField, v, getStructFieldForRepeatedMode(userProvidedField)))
.collect(Collectors.toList()));
}
Object datum = convertByBigQueryType(field, value, userProvidedField);
Optional<Object> customDatum =
getCustomDataType(field).map(dt -> ((UserDefinedType) dt).deserialize(datum));
return customDatum.orElse(datum);
}
private static StructField getStructFieldForRepeatedMode(StructField field) {
StructField nestedField = null;
if (field != null) {
ArrayType arrayType = ((ArrayType) field.dataType());
nestedField =
new StructField(
field.name(),
arrayType.elementType(),
arrayType.containsNull(),
Metadata.empty()); // safe to pass empty metadata as it is not used anywhere
}
return nestedField;
}
static Object convertByBigQueryType(Field bqField, Object value, StructField userProvidedField) {
if (LegacySQLTypeName.INTEGER.equals(bqField.getType())
|| LegacySQLTypeName.FLOAT.equals(bqField.getType())
|| LegacySQLTypeName.BOOLEAN.equals(bqField.getType())
|| LegacySQLTypeName.DATE.equals(bqField.getType())
|| LegacySQLTypeName.TIME.equals(bqField.getType())
|| LegacySQLTypeName.TIMESTAMP.equals(bqField.getType())) {
return value;
}
if (LegacySQLTypeName.STRING.equals(bqField.getType())
|| LegacySQLTypeName.DATETIME.equals(bqField.getType())
|| LegacySQLTypeName.GEOGRAPHY.equals(bqField.getType())) {
return UTF8String.fromBytes(((Utf8) value).getBytes());
}
if (LegacySQLTypeName.BYTES.equals(bqField.getType())) {
return getBytes((ByteBuffer) value);
}
if (LegacySQLTypeName.NUMERIC.equals(bqField.getType())) {
byte[] bytes = getBytes((ByteBuffer) value);
BigDecimal b = new BigDecimal(new BigInteger(bytes), BQ_NUMERIC_SCALE);
Decimal d = Decimal.apply(b, BQ_NUMERIC_PRECISION, BQ_NUMERIC_SCALE);
return d;
}
// TODO : uncomment after updating google-cloud-bigquery
// if (LegacySQLTypeName.BIGNUMERIC.equals(bqField.getType())) {
// byte[] bytes = getBytes((ByteBuffer) value);
// BigDecimal bigDecimal = new BigDecimal(new BigInteger(bytes), BQ_BIG_NUMERIC_SCALE);
// return UTF8String.fromString(bigDecimal.toString());
// }
if (LegacySQLTypeName.RECORD.equals(bqField.getType())) {
List<String> namesInOrder = null;
List<StructField> structList = null;
if (userProvidedField != null) {
structList =
Arrays.stream(((StructType) userProvidedField.dataType()).fields())
.collect(Collectors.toList());
namesInOrder = structList.stream().map(StructField::name).collect(Collectors.toList());
} else {
namesInOrder =
bqField.getSubFields().stream().map(Field::getName).collect(Collectors.toList());
}
return convertAll(bqField.getSubFields(), (GenericRecord) value, namesInOrder, structList);
}
throw new IllegalStateException("Unexpected type: " + bqField.getType());
}
private static byte[] getBytes(ByteBuffer buf) {
byte[] bytes = new byte[buf.remaining()];
buf.get(bytes);
return bytes;
}
// Schema is not recursive so add helper for sequence of fields
static GenericInternalRow convertAll(
FieldList fieldList,
GenericRecord record,
List<String> namesInOrder,
List<StructField> userProvidedFieldList) {
Map<String, Object> fieldMap = new HashMap<>();
Map<String, StructField> userProvidedFieldMap =
userProvidedFieldList == null
? new HashMap<>()
: userProvidedFieldList.stream()
.collect(Collectors.toMap(StructField::name, Function.identity()));
fieldList.stream()
.forEach(
field ->
fieldMap.put(
field.getName(),
convert(
field,
record.get(field.getName()),
userProvidedFieldMap.get(field.getName()))));
Object[] values = new Object[namesInOrder.size()];
for (int i = 0; i < namesInOrder.size(); i++) {
values[i] = fieldMap.get(namesInOrder.get(i));
}
return new GenericInternalRow(values);
}
/**
* Create a function that converts an Avro row with the given BigQuery schema to a Spark SQL row
*
* <p>The conversion is based on the BigQuery schema, not Avro Schema, because the Avro schema is
* very painful to use.
*
* <p>Not guaranteed to be stable across all versions of Spark.
*/
private static StructField convert(Field field) {
DataType dataType = getDataType(field);
boolean nullable = true;
if (field.getMode() == Field.Mode.REQUIRED) {
nullable = false;
} else if (field.getMode() == Field.Mode.REPEATED) {
dataType = new ArrayType(dataType, true);
}
MetadataBuilder metadata = new MetadataBuilder();
if (field.getDescription() != null) {
metadata.putString("description", field.getDescription());
metadata.putString("comment", field.getDescription());
}
return new StructField(field.getName(), dataType, nullable, metadata.build());
}
private static DataType getDataType(Field field) {
return getCustomDataType(field).orElseGet(() -> getStandardDataType(field));
}
@VisibleForTesting
static Optional<DataType> getCustomDataType(Field field) {
// metadata is kept in the description
String description = field.getDescription();
if (description != null) {
// All supported types are serialized to records
if (LegacySQLTypeName.RECORD.equals(field.getType())) {
// we don't have many types, so we keep parsing to minimum
return com.google.cloud.spark.bigquery.SupportedCustomDataType.forDescription(description)
.map(SupportedCustomDataType::getSparkDataType);
}
}
return Optional.empty();
}
private static DataType getStandardDataType(Field field) {
if (LegacySQLTypeName.INTEGER.equals(field.getType())) {
return DataTypes.LongType;
} else if (LegacySQLTypeName.FLOAT.equals(field.getType())) {
return DataTypes.DoubleType;
} else if (LegacySQLTypeName.NUMERIC.equals(field.getType())) {
return NUMERIC_SPARK_TYPE;
// TODO : uncomment after updating google-cloud-bigquery
// } else if (LegacySQLTypeName.BIGNUMERIC.equals(field.getType())) {
// return BigQueryDataTypes.BigNumericType;
} else if (LegacySQLTypeName.STRING.equals(field.getType())) {
return DataTypes.StringType;
} else if (LegacySQLTypeName.BOOLEAN.equals(field.getType())) {
return DataTypes.BooleanType;
} else if (LegacySQLTypeName.BYTES.equals(field.getType())) {
return DataTypes.BinaryType;
} else if (LegacySQLTypeName.DATE.equals(field.getType())) {
return DataTypes.DateType;
} else if (LegacySQLTypeName.TIMESTAMP.equals(field.getType())) {
return DataTypes.TimestampType;
} else if (LegacySQLTypeName.TIME.equals(field.getType())) {
return DataTypes.LongType;
// TODO(#5): add a timezone to allow parsing to timestamp
// This can be safely cast to TimestampType, but doing so causes the date to be inferred
// as the current date. It's safer to leave as a stable string and give the user the
// option of casting themselves.
} else if (LegacySQLTypeName.DATETIME.equals(field.getType())) {
return DataTypes.StringType;
} else if (LegacySQLTypeName.RECORD.equals(field.getType())) {
List<StructField> structFields =
field.getSubFields().stream().map(BigQuerySchemaConverters::convert).collect(Collectors.toList());
return new StructType(structFields.toArray(new StructField[0]));
} else if (LegacySQLTypeName.GEOGRAPHY.equals(field.getType())) {
return DataTypes.StringType;
} else {
throw new IllegalStateException("Unexpected type: " + field.getType());
}
}
/**
* Spark ==> BigQuery Schema Converter utils:
*/
public static Schema toBigQuerySchema(StructType sparkSchema) {
FieldList bigQueryFields = sparkToBigQueryFields(sparkSchema, 0);
return Schema.of(bigQueryFields);
}
/**
* Returns a FieldList of all the Spark StructField objects, converted to BigQuery Field objects
*/
private static FieldList sparkToBigQueryFields(StructType sparkStruct, int depth) {
Preconditions.checkArgument(
depth < MAX_BIGQUERY_NESTED_DEPTH, "Spark Schema exceeds BigQuery maximum nesting depth.");
List<Field> bqFields = new ArrayList<>();
for (StructField field : sparkStruct.fields()) {
bqFields.add(createBigQueryColumn(field, depth));
}
return FieldList.of(bqFields);
}
/**
* Converts a single StructField to a BigQuery Field (column).
*/
@VisibleForTesting
protected static Field createBigQueryColumn(StructField sparkField, int depth) {
DataType sparkType = sparkField.dataType();
String fieldName = sparkField.name();
Field.Mode fieldMode = (sparkField.nullable()) ? Field.Mode.NULLABLE : Field.Mode.REQUIRED;
FieldList subFields = null;
LegacySQLTypeName fieldType;
if (sparkType instanceof ArrayType) {
ArrayType arrayType = (ArrayType) sparkType;
fieldMode = Field.Mode.REPEATED;
sparkType = arrayType.elementType();
}
if (sparkType instanceof StructType) {
subFields = sparkToBigQueryFields((StructType) sparkType, depth + 1);
fieldType = LegacySQLTypeName.RECORD;
} else {
fieldType = toBigQueryType(sparkType);
}
Field.Builder fieldBuilder =
createBigQueryFieldBuilder(fieldName, fieldType, fieldMode, subFields);
Optional<String> description = getDescriptionOrCommentOfField(sparkField);
if (description.isPresent()) {
fieldBuilder.setDescription(description.get());
}
return fieldBuilder.build();
}
public static Optional<String> getDescriptionOrCommentOfField(StructField field) {
if (!field.getComment().isEmpty()) {
return Optional.of(field.getComment().get());
}
if (field.metadata().contains("description")
&& field.metadata().getString("description") != null) {
return Optional.of(field.metadata().getString("description"));
}
return Optional.empty();
}
@VisibleForTesting
protected static LegacySQLTypeName toBigQueryType(DataType elementType) {
if (elementType instanceof BinaryType) {
return LegacySQLTypeName.BYTES;
}
if (elementType instanceof ByteType
|| elementType instanceof ShortType
|| elementType instanceof IntegerType
|| elementType instanceof LongType) {
return LegacySQLTypeName.INTEGER;
}
if (elementType instanceof BooleanType) {
return LegacySQLTypeName.BOOLEAN;
}
if (elementType instanceof FloatType || elementType instanceof DoubleType) {
return LegacySQLTypeName.FLOAT;
}
if (elementType instanceof DecimalType) {
DecimalType decimalType = (DecimalType) elementType;
if (decimalType.precision() <= BQ_NUMERIC_PRECISION
&& decimalType.scale() <= BQ_NUMERIC_SCALE) {
return LegacySQLTypeName.NUMERIC;
} else {
throw new IllegalArgumentException(
"Decimal type is too wide to fit in BigQuery Numeric format");
}
}
if (elementType instanceof StringType) {
return LegacySQLTypeName.STRING;
}
if (elementType instanceof TimestampType) {
// return LegacySQLTypeName.TIMESTAMP; FIXME: Restore this correct conversion when the Vortex
// team adds microsecond support to their backend
return LegacySQLTypeName.TIMESTAMP;
}
if (elementType instanceof DateType) {
return LegacySQLTypeName.DATE;
}
if (elementType instanceof MapType) {
throw new IllegalArgumentException(MAPTYPE_ERROR_MESSAGE);
} else {
throw new IllegalArgumentException("Data type not expected: " + elementType.simpleString());
}
}
private static Field.Builder createBigQueryFieldBuilder(
String name, LegacySQLTypeName type, Field.Mode mode, FieldList subFields) {
return Field.newBuilder(name, type, subFields).setMode(mode);
}
}
|
0 | java-sources/ai/starlake/starlake-spark3_2.12 | java-sources/ai/starlake/starlake-spark3_2.12/1.3.0/Setup.java | import javax.net.ssl.*;
import java.io.*;
import java.net.*;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Setup extends ProxySelector implements X509TrustManager {
private static class UserPwdAuth extends Authenticator {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password.toCharArray());
}
};
private static class JarDependency {
private final String url;
private final String artefactName;
public JarDependency(String artefactName, String url) {
this.url = url;
this.artefactName = artefactName;
}
public String getUrlName() {
return url.substring(url.lastIndexOf("/") + 1);
}
}
private static String protocol = null;
private static String host = null;
private static int port = 0;
private static String username = null;
private static String password = null;
private static String httpsProxy = getEnv("https_proxy").orElse("");
private static String httpProxy = getEnv("http_proxy").orElse("");
private static String noProxy = getEnv("no_proxy").orElse("").replaceAll(",", "|");
private static Proxy proxy = Proxy.NO_PROXY;
private static HttpClient client = null;
private static boolean isWindowsOs() {
String os = System.getProperty("os.name").toLowerCase();
return os.startsWith("windows");
}
private static void parseProxy(String proxy) {
if (proxy.isEmpty()) {
return;
}
final Pattern pattern = Pattern.compile("(https?|socks5?):\\/\\/([^:].+)", Pattern.CASE_INSENSITIVE);
final Matcher m = pattern.matcher(proxy);
if (m.matches()) {
protocol = m.group(1).toLowerCase();
final String hostAndPortWithMaybeCredentials = m.group(2);
if (hostAndPortWithMaybeCredentials.contains("@")) {
final String[] hostAndPortWithCredentials = hostAndPortWithMaybeCredentials.split("@");
final String[] credentials = hostAndPortWithCredentials[0].split(":");
assert(credentials.length == 2): "Invalid credentials format, expecting 'username:password'";
username = credentials[0];
password = credentials[1];
final String[] hostAndPort = hostAndPortWithCredentials[1].split(":");
host = hostAndPort[0];
if (hostAndPort.length > 1) {
port = Integer.parseInt(hostAndPort[1]);
}
} else {
final String[] hostAndPort = hostAndPortWithMaybeCredentials.split(":");
host = hostAndPort[0];
if (hostAndPort.length > 1) {
port = Integer.parseInt(hostAndPort[1]);
}
}
} else {
throw new IllegalArgumentException("Invalid proxy format: " + proxy);
}
}
private static void setProxy() {
if (!httpsProxy.isEmpty()) {
parseProxy(httpsProxy);
} else if (!httpProxy.isEmpty()) {
parseProxy(httpProxy);
}
if (host != null) {
if (port == 0) {
if (protocol.equals("https")) {
port = 443;
} else if (protocol.startsWith("socks")) {
port = 1080;
} else {
port = 80;
}
}
Proxy.Type proxyType = Proxy.Type.HTTP;
if (protocol.startsWith("socks")) {
proxyType = Proxy.Type.SOCKS;
}
proxy = new Proxy(proxyType, new InetSocketAddress(host, port));
}
if (!noProxy.isEmpty()) {
System.setProperty("http.nonProxyHosts", noProxy);
}
}
// ENV VARS
public static boolean ENABLE_ALL = envIsTrue("ENABLE_ALL");
public static boolean ENABLE_BIGQUERY = ENABLE_ALL || envIsTrue("ENABLE_BIGQUERY");
public static boolean ENABLE_AZURE = ENABLE_ALL || envIsTrue("ENABLE_AZURE");
public static boolean ENABLE_SNOWFLAKE = ENABLE_ALL || envIsTrue("ENABLE_SNOWFLAKE");
public static boolean ENABLE_REDSHIFT = ENABLE_ALL || envIsTrue("ENABLE_REDSHIFT");
public static boolean ENABLE_POSTGRESQL = ENABLE_ALL || envIsTrue("ENABLE_POSTGRESQL");
public static boolean ENABLE_DUCKDB = ENABLE_ALL || envIsTrue("ENABLE_DUCKDB");
private static final boolean[] ALL_ENABLERS = new boolean[] {
ENABLE_BIGQUERY,
ENABLE_AZURE,
ENABLE_SNOWFLAKE,
ENABLE_REDSHIFT,
ENABLE_POSTGRESQL,
ENABLE_DUCKDB
};
// SCALA 2.12 by default until spark redshift is available for 2.13
private static final String SCALA_VERSION = getEnv("SCALA_VERSION").orElse("2.13");
// STARLAKE
private static final String SL_VERSION = getEnv("SL_VERSION").orElse("1.2.0-SNAPSHOT");
// SPARK
private static final String SPARK_VERSION = getEnv("SPARK_VERSION").orElse("3.5.3");
private static final String SPARK_MAJOR_VERSION = SPARK_VERSION.split("\\.")[0];
private static final String HADOOP_VERSION = getEnv("HADOOP_VERSION").orElse("3");
// BIGQUERY
private static final String SPARK_BQ_VERSION = getEnv("SPARK_BQ_VERSION").orElse("0.40.0");
// deltalake
private static final String DELTA_SPARK = getEnv("SPARK_DELTA").orElse("3.2.0");
private static final String HADOOP_AZURE_VERSION = getEnv("HADOOP_AZURE_VERSION").orElse("3.3.5");
private static final String AZURE_STORAGE_VERSION = getEnv("AZURE_STORAGE_VERSION").orElse("8.6.6");
private static final String JETTY_VERSION = getEnv("JETTY_VERSION").orElse("9.4.51.v20230217");
// HADOOP_LIB ON WINDOWS
private static final String[] HADOOP_LIBS = new String[]{
"https://raw.githubusercontent.com/cdarlint/winutils/master/hadoop-3.3.5/bin/winutils.exe",
"https://raw.githubusercontent.com/cdarlint/winutils/master/hadoop-3.3.5/bin/hadoop.dll",
};
// SNOWFLAKE
private static final String SNOWFLAKE_JDBC_VERSION = getEnv("SNOWFLAKE_JDBC_VERSION").orElse("3.18.0");
private static final String SPARK_SNOWFLAKE_VERSION = getEnv("SPARK_SNOWFLAKE_VERSION").orElse("3.0.0");
// POSTGRESQL
private static final String POSTGRESQL_VERSION = getEnv("POSTGRESQL_VERSION").orElse("42.5.4");
// DUCKDB
private static final String DUCKDB_VERSION = getEnv("DUCKDB_VERSION").orElse("1.1.0");
// REDSHIFT
private static final String AWS_JAVA_SDK_VERSION = getEnv("AWS_JAVA_SDK_VERSION").orElse("1.12.595");
private static final String HADOOP_AWS_VERSION = getEnv("HADOOP_AWS_VERSION").orElse("3.3.4");
private static final String REDSHIFT_JDBC_VERSION = getEnv("REDSHIFT_JDBC_VERSION").orElse("2.1.0.30");
private static String SPARK_REDSHIFT_VERSION() {
if (SCALA_VERSION.equals("2.13")) {
return getEnv("SPARK_REDSHIFT_VERSION").orElse("6.3.0-spark_3.5-SNAPSHOT");
} else {
return getEnv("SPARK_REDSHIFT_VERSION").orElse("6.3.0-spark_3.5");
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// DUCKDB
private static final JarDependency SPARK_JAR = new JarDependency("spark", "https://archive.apache.org/dist/spark/spark-" + SPARK_VERSION + "/spark-" + SPARK_VERSION + "-bin-hadoop" + HADOOP_VERSION + ".tgz");
private static final JarDependency SPARK_JAR_213 = new JarDependency("spark", "https://archive.apache.org/dist/spark/spark-" + SPARK_VERSION + "/spark-" + SPARK_VERSION + "-bin-hadoop" + HADOOP_VERSION + "-scala2.13.tgz");
private static final JarDependency SPARK_BQ_JAR = new JarDependency("spark-bigquery-with-dependencies",
"https://repo1.maven.org/maven2/com/google/cloud/spark/spark-bigquery-with-dependencies_" + SCALA_VERSION + "/" +
SPARK_BQ_VERSION + "/" +
"spark-bigquery-with-dependencies_" + SCALA_VERSION + "-" + SPARK_BQ_VERSION + ".jar");
private static final JarDependency DELTA_SPARK_JAR = new JarDependency("delta-spark",
"https://repo1.maven.org/maven2/io/delta/delta-spark_" + SCALA_VERSION + "/" + DELTA_SPARK + "/delta-spark_" + SCALA_VERSION + "-" + DELTA_SPARK + ".jar");
private static final JarDependency DELTA_STORAGE_JAR = new JarDependency("delta-storage",
"https://repo1.maven.org/maven2/io/delta/delta-storage" + "/" + DELTA_SPARK + "/delta-storage" +"-" + DELTA_SPARK + ".jar");
private static final JarDependency HADOOP_AZURE_JAR = new JarDependency("hadoop-azure", "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-azure/" + HADOOP_AZURE_VERSION + "/hadoop-azure-" + HADOOP_AZURE_VERSION + ".jar");
private static final JarDependency AZURE_STORAGE_JAR = new JarDependency("azure-storage", "https://repo1.maven.org/maven2/com/microsoft/azure/azure-storage/" + AZURE_STORAGE_VERSION + "/azure-storage-" + AZURE_STORAGE_VERSION + ".jar");
private static final JarDependency JETTY_SERVER_JAR = new JarDependency("jetty-server", "https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-server/" + JETTY_VERSION + "/jetty-server-" + JETTY_VERSION + ".jar");
private static final JarDependency SNOWFLAKE_JDBC_JAR = new JarDependency("snowflake-jdbc", "https://repo1.maven.org/maven2/net/snowflake/snowflake-jdbc/" + SNOWFLAKE_JDBC_VERSION + "/snowflake-jdbc-" + SNOWFLAKE_JDBC_VERSION + ".jar");
private static final JarDependency SPARK_SNOWFLAKE_JAR = new JarDependency("spark-snowflake", "https://repo1.maven.org/maven2/net/snowflake/spark-snowflake_" + SCALA_VERSION +
"/" + SPARK_SNOWFLAKE_VERSION + "/spark-snowflake_" + SCALA_VERSION + "-" + SPARK_SNOWFLAKE_VERSION + ".jar");
private static final JarDependency POSTGRESQL_JAR = new JarDependency("postgresql", "https://repo1.maven.org/maven2/org/postgresql/postgresql/" + POSTGRESQL_VERSION + "/postgresql-" + POSTGRESQL_VERSION + ".jar");
private static final JarDependency DUCKDB_JAR = new JarDependency("duckdb_jdbc", "https://repo1.maven.org/maven2/org/duckdb/duckdb_jdbc/" + DUCKDB_VERSION + "/duckdb_jdbc-" + DUCKDB_VERSION + ".jar");
private static final JarDependency AWS_JAVA_SDK_JAR = new JarDependency("aws-java-sdk-bundle", "https://repo1.maven.org/maven2/com/amazonaws/aws-java-sdk-bundle/" + AWS_JAVA_SDK_VERSION + "/aws-java-sdk-bundle-" + AWS_JAVA_SDK_VERSION + ".jar");
private static final JarDependency HADOOP_AWS_JAR = new JarDependency("hadoop-aws", "https://repo1.maven.org/maven2/org/apache/hadoop/hadoop-aws/" + HADOOP_AWS_VERSION + "/hadoop-aws-" + HADOOP_AWS_VERSION + ".jar");
private static final JarDependency REDSHIFT_JDBC_JAR = new JarDependency("redshift-jdbc42", "https://repo1.maven.org/maven2/com/amazon/redshift/redshift-jdbc42/" + REDSHIFT_JDBC_VERSION + "/redshift-jdbc42-" + REDSHIFT_JDBC_VERSION + ".jar");
private static JarDependency SPARK_REDSHIFT_JAR() {
if (SCALA_VERSION.equals("2.13")) {
return new JarDependency("spark-redshift", "https://s01.oss.sonatype.org/content/repositories/snapshots/ai/starlake/spark-redshift_" + SCALA_VERSION +
"/" + SPARK_REDSHIFT_VERSION() + "/spark-redshift_" + SCALA_VERSION + "-" + SPARK_REDSHIFT_VERSION() + ".jar");
}
else {
return new JarDependency("spark-redshift", "https://repo1.maven.org/maven2/io/github/spark-redshift-community/spark-redshift_" + SCALA_VERSION +
"/" + SPARK_REDSHIFT_VERSION() + "/spark-redshift_" + SCALA_VERSION + "-" + SPARK_REDSHIFT_VERSION() + ".jar");
}
}
private static final JarDependency STARLAKE_SNAPSHOT_JAR = new JarDependency("starlake-spark", "https://s01.oss.sonatype.org/content/repositories/snapshots/ai/starlake/starlake-spark" + SPARK_MAJOR_VERSION + "_" + SCALA_VERSION + "/" + SL_VERSION + "/starlake-spark" + SPARK_MAJOR_VERSION + "_" + SCALA_VERSION + "-" + SL_VERSION + "-assembly.jar");
private static final JarDependency STARLAKE_RELEASE_JAR = new JarDependency("starlake-spark", "https://s01.oss.sonatype.org/content/repositories/releases/ai/starlake/starlake-spark" + SPARK_MAJOR_VERSION + "_" + SCALA_VERSION + "/" + SL_VERSION + "/starlake-spark" + SPARK_MAJOR_VERSION + "_" + SCALA_VERSION + "-" + SL_VERSION + "-assembly.jar");
private static final JarDependency[] snowflakeDependencies = {
SNOWFLAKE_JDBC_JAR,
SPARK_SNOWFLAKE_JAR
};
private static final JarDependency[] redshiftDependencies = {
AWS_JAVA_SDK_JAR,
HADOOP_AWS_JAR,
REDSHIFT_JDBC_JAR,
SPARK_REDSHIFT_JAR()
};
private static final JarDependency[] azureDependencies = {
HADOOP_AZURE_JAR,
AZURE_STORAGE_JAR,
JETTY_SERVER_JAR
};
private static final JarDependency[] postgresqlDependencies = {
POSTGRESQL_JAR
};
private static final JarDependency[] duckDbDependencies = {
DUCKDB_JAR
};
private static final JarDependency[] bigqueryDependencies = {
SPARK_BQ_JAR
};
private static final JarDependency[] sparkDependencies = {
DELTA_SPARK_JAR,
DELTA_STORAGE_JAR
};
private static Optional<String> getEnv(String env) {
// consider empty env variables as not set
return Optional.ofNullable(System.getenv(env)).filter(s -> !s.isEmpty());
}
private static boolean envIsTrue(String env) {
String value = getEnv(env).orElse("false");
return !value.equals("false") && !value.equals("0");
}
private static void generateUnixVersions(File targetDir) throws IOException {
generateVersions(targetDir, "versions.sh", "#!/bin/bash\nset -e\n\n",
(writer) -> (variableName, value) -> {
try {
writer.write(variableName + "=" + "${" + variableName + ":-" + value + "}\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
private static void generateWindowsVersions(File targetDir) throws IOException {
generateVersions(targetDir, "versions.cmd", "@ECHO OFF\n\n",
(writer) -> (variableName, value) -> {
try {
writer.write(
"if \"%" + variableName + "%\"==\"\" (\n" +
" SET " + variableName + "=" + value + "\n" +
")\n");
} catch (IOException e) {
throw new RuntimeException(e);
}
});
}
// Used BiConsumer with Function because TriConsumer doesn't exist natively and avoid creating a new type
private static void generateVersions(File targetDir, String versionsFileName, String fileHeader, Function<BufferedWriter, BiConsumer<String, String>> variableWriter) throws IOException {
File versionFile = new File(targetDir, versionsFileName);
deleteFile(versionFile);
BufferedWriter writer = new BufferedWriter(new FileWriter(versionFile));
try {
writer.write(fileHeader);
variableWriter.apply(writer).accept("ENABLE_BIGQUERY", String.valueOf(ENABLE_BIGQUERY));
variableWriter.apply(writer).accept("ENABLE_AZURE", String.valueOf(ENABLE_AZURE));
variableWriter.apply(writer).accept("ENABLE_SNOWFLAKE", String.valueOf(ENABLE_SNOWFLAKE));
variableWriter.apply(writer).accept("ENABLE_POSTGRESQL", String.valueOf(ENABLE_POSTGRESQL));
variableWriter.apply(writer).accept("ENABLE_REDSHIFT", String.valueOf(ENABLE_REDSHIFT));
variableWriter.apply(writer).accept("SL_VERSION", SL_VERSION);
variableWriter.apply(writer).accept("SCALA_VERSION", SCALA_VERSION);
variableWriter.apply(writer).accept("SPARK_VERSION", SPARK_VERSION);
variableWriter.apply(writer).accept("HADOOP_VERSION", HADOOP_VERSION);
variableWriter.apply(writer).accept("DUCKDB_VERSION", DUCKDB_VERSION);
if (ENABLE_BIGQUERY || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("SPARK_BQ_VERSION", SPARK_BQ_VERSION);
}
if (ENABLE_AZURE || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("HADOOP_AZURE_VERSION", HADOOP_AZURE_VERSION);
variableWriter.apply(writer).accept("AZURE_STORAGE_VERSION", AZURE_STORAGE_VERSION);
variableWriter.apply(writer).accept("JETTY_VERSION", JETTY_VERSION);
}
if (ENABLE_SNOWFLAKE || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("SPARK_SNOWFLAKE_VERSION", SPARK_SNOWFLAKE_VERSION);
variableWriter.apply(writer).accept("SNOWFLAKE_JDBC_VERSION", SNOWFLAKE_JDBC_VERSION);
}
if (ENABLE_POSTGRESQL || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("POSTGRESQL_VERSION", POSTGRESQL_VERSION);
}
if (ENABLE_REDSHIFT || !anyDependencyEnabled()) {
variableWriter.apply(writer).accept("AWS_JAVA_SDK_VERSION", AWS_JAVA_SDK_VERSION);
variableWriter.apply(writer).accept("HADOOP_AWS_VERSION", HADOOP_AWS_VERSION);
variableWriter.apply(writer).accept("REDSHIFT_JDBC_VERSION", REDSHIFT_JDBC_VERSION);
variableWriter.apply(writer).accept("SPARK_REDSHIFT_VERSION", SPARK_REDSHIFT_VERSION());
}
} finally {
writer.close();
}
System.out.println(versionFile.getAbsolutePath() + " created");
}
private static void generateVersions(File targetDir, boolean unix) throws IOException {
if (isWindowsOs() && !unix) {
generateWindowsVersions(targetDir);
} else {
generateUnixVersions(targetDir);
}
}
private static boolean anyDependencyEnabled() {
for (boolean enabled : ALL_ENABLERS) {
if (enabled) {
return true;
}
}
return ENABLE_ALL;
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[0];
}
@Override
public List<Proxy> select(URI uri) {
return Collections.singletonList(proxy);
}
@Override
public void connectFailed(URI uri, SocketAddress sa, IOException ioe) {
throw new RuntimeException("Failed to connect to " + uri + " using proxy " + sa);
}
private static final Setup instance = new Setup();
private static final TrustManager alwaysTrustManager = instance;
private static final ProxySelector proxySelector = instance;
private static void setHttpClient() throws NoSuchAlgorithmException, KeyManagementException {
setProxy();
HttpClient.Builder clientBuilder = HttpClient.newBuilder();
clientBuilder.proxy(proxySelector);
if (username != null && password != null) {
Authenticator authenticator = new UserPwdAuth();
clientBuilder.authenticator(authenticator);
}
if (host != null && envIsTrue("SL_INSECURE")) {
System.out.println("Enabling insecure mode for SSL connections using proxy " + protocol + "://" + host + ":" + port);
// Create a trust manager that does not validate certificate chains
TrustManager[] trustAllCerts = new TrustManager[]{alwaysTrustManager};
// Install the all-trusting trust manager
SSLContext sc = SSLContext.getInstance("SSL");
sc.init(null, trustAllCerts, new java.security.SecureRandom());
clientBuilder.sslContext(sc);
}
client = clientBuilder.build();
}
private static void updateSparkLog4j2Properties(File sparkDir) {
File log4jFile = new File(new File(sparkDir, "conf"), "log4j2.properties");
try {
BufferedReader reader = new BufferedReader(new FileReader(log4jFile));
StringBuilder sb = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
if (line.startsWith("rootLogger.level =")|| line.startsWith("rootLogger.level=")) {
line = "rootLogger.level = ${env:SL_LOG_LEVEL:-error}";
}
sb.append(line).append("\n");
}
reader.close();
sb.append("logger.shutdown.name=org.apache.spark.util.ShutdownHookManager").append("\n");
sb.append("logger.shutdown.level=OFF").append("\n");
sb.append("logger.env.name=org.apache.spark.SparkEnv").append("\n");
sb.append("logger.env.level=error").append("\n");
BufferedWriter writer = new BufferedWriter(new FileWriter(log4jFile));
writer.write(sb.toString());
writer.close();
} catch (IOException e) {
System.out.println("Failed to update log4j.properties");
e.printStackTrace();
}
}
private static void askUserWhichConfigToEnable() {
if (!anyDependencyEnabled()) {
System.out.println("Do you want to enable all datawarehouse configurations ? [y/n]");
try {
BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
String answer = reader.readLine();
if (answer.equalsIgnoreCase("y")) {
ENABLE_AZURE = true;
ENABLE_BIGQUERY = true;
ENABLE_SNOWFLAKE = true;
ENABLE_REDSHIFT = true;
ENABLE_POSTGRESQL = true;
ENABLE_DUCKDB = true;
} else {
System.out.println("Please enable the configurations you want to use by setting the corresponding environment variables below");
System.out.println("ENABLE_BIGQUERY, ENABLE_DATABRICKS, ENABLE_AZURE, ENABLE_SNOWFLAKE, ENABLE_REDSHIFT, ENABLE_POSTGRESQL, ENABLE_ANY_JDBC");
System.exit(1);
}
} catch (IOException e) {
System.out.println("Failed to read user input");
e.printStackTrace();
}
}
}
public static void main(String[] args) throws IOException {
try {
if (args.length == 0) {
System.out.println("Please specify the target directory");
System.exit(1);
}
askUserWhichConfigToEnable();
final File targetDir = new File(args[0]);
if (!targetDir.exists()) {
targetDir.mkdirs();
System.out.println("Created target directory " + targetDir.getAbsolutePath());
}
setHttpClient();
if (!anyDependencyEnabled()) {
ENABLE_AZURE = true;
ENABLE_BIGQUERY = true;
ENABLE_SNOWFLAKE = true;
ENABLE_REDSHIFT = true;
ENABLE_POSTGRESQL = true;
ENABLE_DUCKDB = true;
}
final File binDir = new File(targetDir, "bin");
if (isWindowsOs()) {
final File hadoopDir = new File(binDir, "hadoop");
final File hadoopBinDir = new File(hadoopDir, "bin");
if (!hadoopBinDir.exists()) {
hadoopBinDir.mkdirs();
}
for (String lib : HADOOP_LIBS) {
final File libFile = new File(hadoopBinDir, lib.substring(lib.lastIndexOf("/") + 1));
downloadAndDisplayProgress(lib, libFile.getAbsolutePath());
}
} else {
System.out.println("Unix OS detected");
}
File slDir = new File(binDir, "sl");
if (SL_VERSION.endsWith("SNAPSHOT")) {
deleteFile(new File(slDir, STARLAKE_SNAPSHOT_JAR.getUrlName()));
downloadAndDisplayProgress(new JarDependency[]{STARLAKE_SNAPSHOT_JAR}, slDir, false);
} else {
deleteFile(new File(slDir, STARLAKE_RELEASE_JAR.getUrlName()));
downloadAndDisplayProgress(new JarDependency[]{STARLAKE_RELEASE_JAR}, slDir, false);
}
File sparkDir = new File(binDir, "spark");
if (!sparkDir.exists()) {
downloadSpark(binDir);
}
File depsDir = new File(binDir, "deps");
downloadAndDisplayProgress(sparkDependencies, depsDir, true);
updateSparkLog4j2Properties(sparkDir);
downloadAndDisplayProgress(duckDbDependencies, depsDir, true);
if (ENABLE_BIGQUERY) {
downloadAndDisplayProgress(bigqueryDependencies, depsDir, true);
} else {
deleteDependencies(bigqueryDependencies, depsDir);
}
if (ENABLE_AZURE) {
downloadAndDisplayProgress(azureDependencies, depsDir, true);
} else {
deleteDependencies(azureDependencies, depsDir);
}
if (ENABLE_SNOWFLAKE) {
downloadAndDisplayProgress(snowflakeDependencies, depsDir, true);
} else {
deleteDependencies(snowflakeDependencies, depsDir);
}
if (ENABLE_REDSHIFT) {
downloadAndDisplayProgress(redshiftDependencies, depsDir, true);
} else {
deleteDependencies(redshiftDependencies, depsDir);
}
if (ENABLE_POSTGRESQL) {
downloadAndDisplayProgress(postgresqlDependencies, depsDir, true);
} else {
deleteDependencies(postgresqlDependencies, depsDir);
}
boolean unix = args.length > 1 && args[1].equalsIgnoreCase("unix");
generateVersions(targetDir, unix);
} catch (Exception e) {
System.out.println("Failed to download dependency" + e.getMessage());
e.printStackTrace();
System.exit(1);
}
}
public static void downloadSpark(File binDir) throws IOException, InterruptedException {
JarDependency sparkJar = SPARK_JAR;
if (!SCALA_VERSION.equals("2.12")) {
sparkJar = SPARK_JAR_213;
}
downloadAndDisplayProgress(new JarDependency[]{sparkJar}, binDir, false);
String tgzName = sparkJar.getUrlName();
final File sparkFile = new File(binDir, tgzName);
ProcessBuilder builder = new ProcessBuilder("tar", "-xzf", sparkFile.getAbsolutePath(), "-C", binDir.getAbsolutePath()).inheritIO();
Process process = builder.start();
try {
process.waitFor();
} catch (InterruptedException e) {
System.out.println("Failed to extract spark tarball");
e.printStackTrace();
}
sparkFile.delete();
File sparkDir = new File(binDir, tgzName.substring(0, tgzName.lastIndexOf(".")));
sparkDir.renameTo(new File(binDir, "spark"));
sparkDir = new File(binDir, "spark");
File log4j2File = new File(sparkDir, "conf/log4j2.properties.template");
log4j2File.renameTo(new File(sparkDir, "conf/log4j2.properties"));
}
private static void downloadAndDisplayProgress(JarDependency[] dependencies, File targetDir, boolean replaceJar) throws IOException, InterruptedException {
if (!targetDir.exists()) {
targetDir.mkdirs();
}
if (replaceJar) {
deleteDependencies(dependencies, targetDir);
}
for (JarDependency dependency : dependencies) {
final File targetFile = new File(targetDir, dependency.getUrlName());
downloadAndDisplayProgress(dependency.url, targetFile.getAbsolutePath());
}
}
private static void deleteDependencies(JarDependency[] dependencies, File targetDir) {
if (targetDir.exists()) {
for (JarDependency dependency : dependencies) {
File[] files = targetDir.listFiles(f -> f.getName().startsWith(dependency.artefactName));
if (files != null) {
for (File file : files) {
deleteFile(file);
}
}
}
}
}
private static void deleteFile(File file) {
if (file.exists()) {
if (file.delete()) {
System.out.println(file.getAbsolutePath() + " deleted");
}
}
}
private static void downloadAndDisplayProgress(String urlStr, String file) throws IOException, InterruptedException {
final int CHUNK_SIZE = 1024;
int filePartIndex = urlStr.lastIndexOf("/") + 1;
String name = urlStr.substring(filePartIndex);
String urlFolder = urlStr.substring(0, filePartIndex);
System.out.println("Downloading to " + file + " from " + urlFolder + " ...");
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(urlStr))
.build();
HttpResponse<InputStream> response = client.send(request, HttpResponse.BodyHandlers.ofInputStream());
long lengthOfFile = response.headers().firstValueAsLong("Content-Length").orElse(0L);
InputStream input = new BufferedInputStream(response.body());
OutputStream output = new FileOutputStream(file);
byte data[] = new byte[CHUNK_SIZE];
long total = 0;
int count;
int loop = 0;
int sbLen = 0;
long lastTime = System.currentTimeMillis();
while ((count = input.read(data)) != -1) {
total += count;
output.write(data, 0, count);
loop++;
if (loop % 1000 == 0) {
StringBuilder sb = new StringBuilder(" " + (total / 1024 / 1024) + "/" + (lengthOfFile / 1024 / 1024) + " MB");
if (lengthOfFile > 0) {
sb.append(" (");
sb.append(total * 100 / lengthOfFile);
sb.append("%)");
}
long currentTime = System.currentTimeMillis();
long timeDiff = currentTime - lastTime;
double bytesPerMilliSec = (CHUNK_SIZE * 1000.0 / timeDiff);
double bytesPerSec = bytesPerMilliSec * 1000;
double mbPerSec = bytesPerSec / 1024 / 1024;
sb.append(" ");
sb.append(String.format("[%.2f MB/sec]", mbPerSec));
lastTime = currentTime;
sbLen = sb.length();
for (int cnt = 0; cnt < sbLen; cnt++) {
System.out.print("\b");
}
System.out.print(sb);
}
}
for (int cnt = 0; cnt < sbLen; cnt++) {
System.out.print("\b");
}
System.out.print(name + " downloaded");
System.out.println();
output.flush();
output.close();
input.close();
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/DeleteResult.java | package art.starrynift;
import lombok.Data;
/**
* A response when deleting an object
*/
@Data
public class DeleteResult {
/**
* The id of the object.
*/
String id;
/**
* The type of object deleted, for example "file" or "model"
*/
String object;
/**
* True if successfully deleted
*/
boolean deleted;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/ListSearchParameters.java | package art.starrynift;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Common options when getting a list of objects
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class ListSearchParameters {
/**
* A limit on the number of objects to be returned.
* Limit can range between 1 and 100, and the default is 20
*/
Integer limit;
/**
* Sort order by the 'created_at' timestamp of the objects.
* 'asc' for ascending order and 'desc' for descending order.
*/
Order order;
/**
* A cursor for use in pagination. after is an object ID that defines your place in the list.
* For instance, if you make a list request and receive 100 objects, ending with obj_foo,
* your subsequent call can include after=obj_foo in order to fetch the next page of the list
*/
String after;
/**
* A cursor for use in pagination. before is an object ID that defines your place in the list.
* For instance, if you make a list request and receive 100 objects, ending with obj_foo,
* your subsequent call can include before=obj_foo in order to fetch the previous page of the list.
*/
String before;
public enum Order {
@JsonProperty("asc")
ASCENDING,
@JsonProperty("desc")
DESCENDING
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/OpenAiError.java | package art.starrynift;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Represents the error body when an OpenAI request fails
*/
@Data
@NoArgsConstructor
@AllArgsConstructor
public class OpenAiError {
public OpenAiErrorDetails error;
@Data
@NoArgsConstructor
@AllArgsConstructor
public static class OpenAiErrorDetails {
/**
* Human-readable error message
*/
String message;
/**
* OpenAI error type, for example "invalid_request_error"
* https://platform.openai.com/docs/guides/error-codes/python-library-error-types
*/
String type;
String param;
/**
* OpenAI error code, for example "invalid_api_key"
*/
String code;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/OpenAiHttpException.java | package art.starrynift;
public class OpenAiHttpException extends RuntimeException {
/**
* HTTP status code
*/
public final int statusCode;
/**
* OpenAI error code, for example "invalid_api_key"
*/
public final String code;
public final String param;
/**
* OpenAI error type, for example "invalid_request_error"
* https://platform.openai.com/docs/guides/error-codes/python-library-error-types
*/
public final String type;
public OpenAiHttpException(OpenAiError error, Exception parent, int statusCode) {
super(error.error.message, parent);
this.statusCode = statusCode;
this.code = error.error.code;
this.param = error.error.param;
this.type = error.error.type;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/OpenAiResponse.java | package art.starrynift;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
/**
* A wrapper class to fit the OpenAI engine and search endpoints
*/
@Data
public class OpenAiResponse<T> {
/**
* A list containing the actual results
*/
public List<T> data;
/**
* The type of object returned, should be "list"
*/
public String object;
/**
* The first id included
*/
@JsonProperty("first_id")
public String firstId;
/**
* The last id included
*/
@JsonProperty("last_id")
public String lastId;
/**
* True if there are objects after lastId
*/
@JsonProperty("has_more")
public boolean hasMore;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/Usage.java | package art.starrynift;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
/**
* The OpenAI resources used by a request
*/
@Data
public class Usage {
/**
* The number of prompt tokens used.
*/
@JsonProperty("prompt_tokens")
long promptTokens;
/**
* The number of completion tokens used.
*/
@JsonProperty("completion_tokens")
long completionTokens;
/**
* The number of total tokens used
*/
@JsonProperty("total_tokens")
long totalTokens;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/Assistant.java | package art.starrynift.assistants;
import java.util.List;
import java.util.Map;
import javax.tools.Tool;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class Assistant {
/**
* The identifier, which can be referenced in API endpoints.
*/
String id;
/**
* The object type which is always 'assistant'
*/
String object;
/**
* The Unix timestamp(in seconds) for when the assistant was created
*/
@JsonProperty("created_at")
Integer createdAt;
/**
* The name of the assistant. The maximum length is 256
*/
String name;
/**
* The description of the assistant.
*/
String description;
/**
* ID of the model to use
*/
@NonNull
String model;
/**
* The system instructions that the assistant uses.
*/
String instructions;
/**
* A list of tools enabled on the assistant.
*/
List<Tool> tools;
/**
* A list of file IDs attached to this assistant.
*/
@JsonProperty("file_ids")
List<String> fileIds;
/**
* Set of 16 key-value pairs that can be attached to an object.
*/
Map<String, String> metadata;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/AssistantFile.java | package art.starrynift.assistants;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
@Data
public class AssistantFile {
/**
* The identifier of the Assistant File
*/
String id;
/**
* The object type, which is always assistant.file.
*/
String object;
/**
* The Unix timestamp (in seconds) for when the assistant file was created.
*/
@JsonProperty("created_at")
String createdAt;
/**
* The assistant ID that the file is attached to
*/
@JsonProperty("assistant_id")
String assistantId;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/AssistantFileRequest.java | package art.starrynift.assistants;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class AssistantFileRequest {
@JsonProperty("file_id")
String fileId;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/AssistantFunction.java | package art.starrynift.assistants;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.Map;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class AssistantFunction {
private String description;
private String name;
private Map<String, Object> parameters;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/AssistantRequest.java | package art.starrynift.assistants;
import java.util.List;
import java.util.Map;
import javax.tools.Tool;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class AssistantRequest {
/**
* ID of the model to use
*/
@NonNull
String model;
/**
* The name of the assistant. The maximum length is 256
*/
String name;
/**
* The description of the assistant.
*/
String description;
/**
* The system instructions that the assistant uses.
*/
String instructions;
/**
* A list of tools enabled on the assistant.
*/
List<Tool> tools;
/**
* A list of file IDs attached to this assistant.
*/
@JsonProperty("file_ids")
List<String> fileIds;
/**
* Set of 16 key-value pairs that can be attached to an object.
*/
Map<String, String> metadata;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/AssistantSortOrder.java | package art.starrynift.assistants;
import com.fasterxml.jackson.annotation.JsonProperty;
public enum AssistantSortOrder {
@JsonProperty("asc")
ASC,
@JsonProperty("desc")
DESC
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/AssistantToolsEnum.java | package art.starrynift.assistants;
import com.fasterxml.jackson.annotation.JsonProperty;
public enum AssistantToolsEnum {
@JsonProperty("code_interpreter")
CODE_INTERPRETER,
@JsonProperty("function")
FUNCTION,
@JsonProperty("retrieval")
RETRIEVAL
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/assistants/ModifyAssistantRequest.java | package art.starrynift.assistants;
import java.util.List;
import java.util.Map;
import javax.tools.Tool;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class ModifyAssistantRequest {
/**
* ID of the model to use
*/
String model;
/**
* The name of the assistant. The maximum length is 256
*/
String name;
/**
* The description of the assistant.
*/
String description;
/**
* The system instructions that the assistant uses.
*/
String instructions;
/**
* A list of tools enabled on the assistant.
*/
List<Tool> tools;
/**
* A list of file IDs attached to this assistant.
*/
@JsonProperty("file_ids")
List<String> fileIds;
/**
* Set of 16 key-value pairs that can be attached to an object.
*/
Map<String, String> metadata;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/audio/CreateSpeechRequest.java | package art.starrynift.audio;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class CreateSpeechRequest {
/**
* The name of the model to use.
*/
@NonNull
String model;
/**
* The text to generate audio for. The maximum length is 4096 characters.
*/
@NonNull
String input;
/**
* The voice to use when generating the audio.
*/
@NonNull
String voice;
/**
* The format to audio in. Supported formats are mp3, opus, aac, and flac. Defaults to mp3.
*/
@JsonProperty("response_format")
String responseFormat;
/**
* The speed of the generated audio. Select a value from 0.25 to 4.0. Defaults to 1.0.
*/
Double speed;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/audio/CreateTranscriptionRequest.java | package art.starrynift.audio;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
/**
* A request for OpenAi to create transcription based on an audio file
* All fields except model are optional
*
* https://platform.openai.com/docs/api-reference/audio/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class CreateTranscriptionRequest {
/**
* The name of the model to use.
*/
@NonNull
String model;
/**
* An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.
*/
String prompt;
/**
* The format of the transcript output, in one of these options: json or verbose_json
*/
@JsonProperty("response_format")
String responseFormat;
/**
* The sampling temperature, between 0 and 1.
* Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
* If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.
*/
Double temperature;
/**
* The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.
*/
String language;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/audio/CreateTranslationRequest.java | package art.starrynift.audio;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.*;
/**
* A request for OpenAi to create English translation based on an audio file
* All fields except model are optional
*
* https://platform.openai.com/docs/api-reference/audio/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class CreateTranslationRequest {
/**
* The name of the model to use.
*/
@NonNull
String model;
/**
* An optional text to guide the model's style or continue a previous audio segment. The prompt should be in English.
*/
String prompt;
/**
* The format of the translated output, in one of these options: json or verbose_json
*/
@JsonProperty("response_format")
String responseFormat;
/**
* The sampling temperature, between 0 and 1.
* Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
* If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.
*/
Double temperature;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/audio/TranscriptionResult.java | package art.starrynift.audio;
import lombok.Data;
import java.util.List;
/**
* An object with the text transcription
*
* https://platform.openai.com/docs/api-reference/audio/create
*/
@Data
public class TranscriptionResult {
/**
* The text transcription.
*/
String text;
/**
* Task name
* @apiNote verbose_json response format only
*/
String task;
/**
* Speech language
* @apiNote verbose_json response format only
*/
String language;
/**
* Speech duration
* @apiNote verbose_json response format only
*/
Double duration;
/**
* List of segments
* @apiNote verbose_json response format only
*/
List<TranscriptionSegment> segments;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/audio/TranscriptionSegment.java | package art.starrynift.audio;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
/**
* An object represents transcription segment
*
* https://platform.openai.com/docs/api-reference/audio/create
*/
@Data
public class TranscriptionSegment {
Integer id;
Integer seek;
Double start;
Double end;
String text;
List<Integer> tokens;
Double temperature;
@JsonProperty("avg_logprob")
Double averageLogProb;
@JsonProperty("compression_ratio")
Double compressionRatio;
@JsonProperty("no_speech_prob")
Double noSpeechProb;
@JsonProperty("transient")
Boolean transientFlag;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/audio/TranslationResult.java | package art.starrynift.audio;
import lombok.Data;
import java.util.List;
/**
* An object with the English transcription
*
* https://platform.openai.com/docs/api-reference/audio/create
*/
@Data
public class TranslationResult {
/**
* Translated text.
*/
String text;
/**
* Task name
* @apiNote verbose_json response format only
*/
String task;
/**
* Translated language
* @apiNote verbose_json response format only
*/
String language;
/**
* Speech duration
* @apiNote verbose_json response format only
*/
Double duration;
/**
* List of segments
* @apiNote verbose_json response format only
*/
List<TranscriptionSegment> segments;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/BillingUsage.java | package art.starrynift.billing;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.math.BigDecimal;
import java.util.List;
/**
* Amount consumption information
*
*/
@Data
public class BillingUsage {
@JsonProperty("object")
private String object;
/**
* Account expenditure details
*/
@JsonProperty("daily_costs")
private List<DailyCost> dailyCosts;
/**
* Total usage amount: cents
*/
@JsonProperty("total_usage")
private BigDecimal totalUsage;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/CreditGrantsResponse.java | package art.starrynift.billing;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.io.Serializable;
import java.math.BigDecimal;
/**
* Return value of balance inquiry interface
*
*/
@Data
public class CreditGrantsResponse implements Serializable {
private String object;
/**
* Total amount: US dollars
*/
@JsonProperty("total_granted")
private BigDecimal totalGranted;
/**
* Total usage amount: US dollars
*/
@JsonProperty("total_used")
private BigDecimal totalUsed;
/**
* Total remaining amount: US dollars
*/
@JsonProperty("total_available")
private BigDecimal totalAvailable;
/**
* Balance details
*/
private Grants grants;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/DailyCost.java | package art.starrynift.billing;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
/**
* List of amount consumption
*
*/
@Data
public class DailyCost {
/**
*
*/
@JsonProperty("timestamp")
private long timestamp;
/**
* Model consumption amount details
*/
@JsonProperty("line_items")
private List<LineItem> lineItems;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/Datum.java | package art.starrynift.billing;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.math.BigDecimal;
/**
*
*
*/
@Data
public class Datum {
private String object;
private String id;
/**
* Gift amount: US dollars
*/
@JsonProperty("grant_amount")
private BigDecimal grantAmount;
/**
* Usage amount: US dollars
*/
@JsonProperty("used_amount")
private BigDecimal usedAmount;
/**
* Effective timestamp
*/
@JsonProperty("effective_at")
private Long effectiveAt;
/**
* Expiration timestamp
*/
@JsonProperty("expires_at")
private Long expiresAt;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/Grants.java | package art.starrynift.billing;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
/**
*
*
*/
@Data
public class Grants {
private String object;
@JsonProperty("data")
private List<Datum> data;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/LineItem.java | package art.starrynift.billing;
import lombok.Data;
import java.math.BigDecimal;
/**
* List of amount consumption
*
*/
@Data
public class LineItem {
/**
* model name
*/
private String name;
/**
* Expenditure amount
*/
private BigDecimal cost;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/Plan.java | package art.starrynift.billing;
import lombok.Data;
/**
*
*
*/
@Data
public class Plan {
private String title;
private String id;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/billing/Subscription.java | package art.starrynift.billing;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
/**
* Account information
*
*
*/
@Data
public class Subscription {
@JsonProperty("object")
private String object;
@JsonProperty("has_payment_method")
private boolean hasPaymentMethod;
@JsonProperty("canceled")
private boolean canceled;
@JsonProperty("canceled_at")
private Object canceledAt;
@JsonProperty("delinquent")
private Object delinquent;
@JsonProperty("access_until")
private long accessUntil;
@JsonProperty("soft_limit")
private long softLimit;
@JsonProperty("hard_limit")
private long hardLimit;
@JsonProperty("system_hard_limit")
private long systemHardLimit;
@JsonProperty("soft_limit_usd")
private double softLimitUsd;
@JsonProperty("hard_limit_usd")
private double hardLimitUsd;
@JsonProperty("system_hard_limit_usd")
private double systemHardLimitUsd;
@JsonProperty("plan")
private Plan plan;
@JsonProperty("account_name")
private String accountName;
@JsonProperty("po_number")
private Object poNumber;
@JsonProperty("billing_email")
private Object billingEmail;
@JsonProperty("tax_ids")
private Object taxIds;
@JsonProperty("billing_address")
private Object billingAddress;
@JsonProperty("business_address")
private Object businessAddress;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/ClaudeClient.java | package art.starrynift.claude;
import java.io.IOException;
import java.net.Proxy;
import java.time.Duration;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import art.starrynift.claude.Interceptor.ClaudeAuthenticationInterceptor;
import art.starrynift.claude.api.ClaudeApi;
import art.starrynift.claude.entity.ChatFunction;
import art.starrynift.claude.entity.ChatFunctionMixIn;
import art.starrynift.claude.entity.ResponseBodyCallback;
import art.starrynift.claude.entity.SSE;
import art.starrynift.claude.error.ChatResponseError;
import art.starrynift.claude.exception.VacSdkException;
import art.starrynift.claude.request.ChatRequest;
import art.starrynift.claude.response.ChatResponse;
import art.starrynift.claude.response.StreamChatResponse;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Flowable;
import io.reactivex.Single;
import okhttp3.Authenticator;
import okhttp3.ConnectionPool;
import okhttp3.Credentials;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.ResponseBody;
import okhttp3.Route;
import retrofit2.Call;
import retrofit2.HttpException;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory;
import retrofit2.converter.jackson.JacksonConverterFactory;
public class ClaudeClient {
private static final String BASE_URL = "https://api.anthropic.com";
private static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(10);
private static final ObjectMapper mapper = defaultObjectMapper();
private final ClaudeApi api;
private final ExecutorService executorService;
public ClaudeClient(final String token) {
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient client = defaultClient(token, DEFAULT_TIMEOUT);
Retrofit retrofit = defaultRetrofit(client, mapper, null);
this.api = retrofit.create(ClaudeApi.class);
this.executorService = client.dispatcher().executorService();
}
public ClaudeClient(final String token, final Duration timeout) {
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient client = defaultClient(token, timeout);
Retrofit retrofit = defaultRetrofit(client, mapper, null);
this.api = retrofit.create(ClaudeApi.class);
this.executorService = client.dispatcher().executorService();
}
public ClaudeClient(final String token, final Duration timeout, String baseUrl) {
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient client = defaultClient(token, timeout);
Retrofit retrofit = defaultRetrofit(client, mapper, baseUrl);
this.api = retrofit.create(ClaudeApi.class);
this.executorService = client.dispatcher().executorService();
}
public ClaudeClient(ClaudeApi api) {
this.api = api;
this.executorService = null;
}
public ClaudeClient(final String token, final Duration timeout, Proxy proxy) {
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient httpClient = defaultClient(token, timeout)
.newBuilder()
.proxy(proxy)
.build();
Retrofit retrofit = defaultRetrofit(httpClient, mapper, null);
this.api = retrofit.create(ClaudeApi.class);
this.executorService = httpClient.dispatcher().executorService();
}
public ClaudeClient(final String token, final Duration timeout, Proxy proxy, String proxyUsername, String proxyPassword) {
Authenticator proxyAuthenticator = new Authenticator() {
@Override
public Request authenticate(Route route, Response response) throws IOException {
String credential = Credentials.basic(proxyUsername, proxyPassword);
return response.request().newBuilder()
.header("Proxy-Authorization", credential)
.build();
}
};
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient httpClient = defaultClient(token, timeout)
.newBuilder()
.proxy(proxy)
.proxyAuthenticator(proxyAuthenticator)
.build();
Retrofit retrofit = defaultRetrofit(httpClient, mapper, null);
this.api = retrofit.create(ClaudeApi.class);
this.executorService = httpClient.dispatcher().executorService();
}
public ClaudeClient(final String token, final Duration timeout, Proxy proxy, Authenticator proxyAuthenticator) {
ObjectMapper mapper = defaultObjectMapper();
OkHttpClient httpClient = defaultClient(token, timeout)
.newBuilder()
.proxy(proxy)
.proxyAuthenticator(proxyAuthenticator)
.build();
Retrofit retrofit = defaultRetrofit(httpClient, mapper, null);
this.api = retrofit.create(ClaudeApi.class);
this.executorService = httpClient.dispatcher().executorService();
}
public static ObjectMapper defaultObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE);
mapper.addMixIn(ChatFunction.class, ChatFunctionMixIn.class);
return mapper;
}
public static OkHttpClient defaultClient(String apiKey, Duration timeout) {
return new OkHttpClient.Builder()
.addInterceptor(new ClaudeAuthenticationInterceptor(apiKey))
.connectionPool(new ConnectionPool(5, 1, TimeUnit.SECONDS))
.readTimeout(timeout.toMillis(), TimeUnit.MILLISECONDS)
.build();
}
public static Retrofit defaultRetrofit(OkHttpClient client, ObjectMapper mapper, String baseUrl) {
if (baseUrl == null || "".equals(baseUrl)) {
baseUrl = BASE_URL;
}
return new Retrofit.Builder()
.baseUrl(baseUrl)
.client(client)
.addConverterFactory(JacksonConverterFactory.create(mapper))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build();
}
public static <T> T execute(Single<T> apiCall) {
try {
return apiCall.blockingGet();
} catch (HttpException e) {
try {
if (e.response() == null || e.response().errorBody() == null) {
throw e;
}
String errorBody = e.response().errorBody().string();
ChatResponseError error = defaultObjectMapper().readValue(errorBody, ChatResponseError.class);
VacSdkException ve = new VacSdkException("-1", "error", error);
throw ve;
} catch (IOException ex) {
// couldn't parse error
throw e;
}
}
}
public ChatResponse chat(ChatRequest request) {
return execute(api.chat(request));
}
public Flowable<StreamChatResponse> streamChat(ChatRequest request) {
request.setStream(true);
return stream(api.streamChat(request), StreamChatResponse.class);
}
public static <T> Flowable<T> stream(Call<ResponseBody> apiCall, Class<T> cl) {
return stream(apiCall).map(sse -> {
if (sse.getData() == null || "".equals(sse.getData())) {
return null;
} else {
return mapper.readValue(sse.getData(), cl);
}
});
}
public static Flowable<SSE> stream(Call<ResponseBody> apiCall) {
return stream(apiCall, false);
}
public static Flowable<SSE> stream(Call<ResponseBody> apiCall, boolean emitDone) {
return Flowable.create(emitter -> apiCall.enqueue(new ResponseBodyCallback(emitter, emitDone)), BackpressureStrategy.BUFFER);
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/Interceptor/ClaudeAuthenticationInterceptor.java | package art.starrynift.claude.Interceptor;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
import java.io.IOException;
import java.util.Objects;
/**
* OkHttp Interceptor that adds an authorization token header
*/
public class ClaudeAuthenticationInterceptor implements Interceptor {
private final String apiKey;
public ClaudeAuthenticationInterceptor(String apiKey) {
Objects.requireNonNull(apiKey, "Claude token required");
this.apiKey = apiKey;
}
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request()
.newBuilder()
.header("x-api-key", apiKey)
.header("anthropic-version", "2023-06-01")
.header("content-type", "application/json")
.build();
return chain.proceed(request);
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/api/ClaudeApi.java | package art.starrynift.claude.api;
import art.starrynift.claude.request.ChatRequest;
import art.starrynift.claude.response.ChatResponse;
import io.reactivex.Single;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.http.Body;
import retrofit2.http.Headers;
import retrofit2.http.POST;
import retrofit2.http.Streaming;
public interface ClaudeApi {
@Headers({"anthropic-version: 2023-06-01"})
@POST("v1/messages")
Single<ChatResponse> chat(@Body ChatRequest request);
@Headers({"anthropic-version: 2023-06-01"})
@Streaming
@POST("v1/messages")
Call<ResponseBody> streamChat(@Body ChatRequest request);
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/ChatFunction.java | package art.starrynift.claude.entity;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import java.util.function.Function;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ChatFunction {
@NonNull
private String name;
private String description;
@JsonProperty("input_schema")
private Class<?> parametersClass;
@JsonIgnore
private Function<Object, Object> executor;
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String name;
private String description;
private Class<?> parameters;
private Function<Object, Object> executor;
public Builder name(String name) {
this.name = name;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public <T> Builder executor(Class<T> requestClass, Function<T, Object> executor) {
this.parameters = requestClass;
this.executor = (Function<Object, Object>) executor;
return this;
}
public ChatFunction build() {
ChatFunction chatFunction = new ChatFunction();
chatFunction.setName(name);
chatFunction.setDescription(description);
chatFunction.setParametersClass(parameters);
chatFunction.setExecutor(executor);
return chatFunction;
}
}
} |
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/ChatFunctionMixIn.java | package art.starrynift.claude.entity;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
public abstract class ChatFunctionMixIn {
@JsonSerialize(using = ChatFunctionParametersSerializerAndDeserializer.Serializer.class)
@JsonDeserialize(using = ChatFunctionParametersSerializerAndDeserializer.Deserializer.class)
abstract Class<?> getParametersClass();
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/ChatFunctionParametersSerializerAndDeserializer.java | package art.starrynift.claude.entity;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.JsonSerializer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.kjetland.jackson.jsonSchema.JsonSchemaConfig;
import com.kjetland.jackson.jsonSchema.JsonSchemaGenerator;
import java.io.IOException;
public class ChatFunctionParametersSerializerAndDeserializer {
private final static ObjectMapper MAPPER = new ObjectMapper();
private final static JsonSchemaConfig config = JsonSchemaConfig.vanillaJsonSchemaDraft4();
private final static JsonSchemaGenerator jsonSchemaGenerator = new JsonSchemaGenerator(MAPPER, config);
private ChatFunctionParametersSerializerAndDeserializer() {
}
public static class Serializer extends JsonSerializer<Class<?>> {
private Serializer() {
}
@Override
public void serialize(Class<?> value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
if (value == null) {
gen.writeNull();
} else {
try {
JsonNode schema = jsonSchemaGenerator.generateJsonSchema(value);
gen.writeObject(schema);
} catch (Exception e) {
throw new RuntimeException("Failed to generate JSON Schema", e);
}
}
}
}
public static class Deserializer extends JsonDeserializer<Class<?>> {
private Deserializer() {
}
@Override
public Class<?> deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
// todo: need fixed here
return null;
}
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/ChatMessage.java | package art.starrynift.claude.entity;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import java.util.List;
//@Data
//@Builder
//@AllArgsConstructor
//public class ChatMessage {
//
// private String role;
//
// private Object content;
//
// public ChatMessage(String role, String msg) {
// this.role = role;
// this.content = msg;
// }
//
// public ChatMessage(String role, List<ChatMessageContent> contents) {
// this.role = role;
// this.content = contents;
// }
//
//}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/ChatMessageContent.java | package art.starrynift.claude.entity;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ChatMessageContent {
private String type;
private String text;
private Object image_url;
private ContentSource source;
@JsonProperty("tool_use_id")
private String toolUseId;
private String content;
// only in function response
private String id;
private String name;
private JsonNode input;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class ContentSource {
private String type;
@JsonProperty("media_type")
private String mediaType;
private String data;
}
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public static class ImageUrl {
private String url;
private String detail;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/ResponseBodyCallback.java | package art.starrynift.claude.entity;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import com.fasterxml.jackson.databind.ObjectMapper;
import art.starrynift.claude.ClaudeClient;
import art.starrynift.claude.error.ChatResponseError;
import art.starrynift.claude.exception.VacSdkException;
import io.reactivex.FlowableEmitter;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.HttpException;
import retrofit2.Response;
/**
* Callback to parse Server Sent Events (SSE) from raw InputStream and
* emit the events with io.reactivex.FlowableEmitter to allow streaming of
* SSE.
*/
public class ResponseBodyCallback implements Callback<ResponseBody> {
private static final ObjectMapper mapper = ClaudeClient.defaultObjectMapper();
private FlowableEmitter<SSE> emitter;
private boolean emitDone;
public ResponseBodyCallback(FlowableEmitter<SSE> emitter, boolean emitDone) {
this.emitter = emitter;
this.emitDone = emitDone;
}
@Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
BufferedReader reader = null;
try {
if (!response.isSuccessful()) {
HttpException e = new HttpException(response);
ResponseBody errorBody = response.errorBody();
if (errorBody == null) {
throw e;
} else {
ChatResponseError error = mapper.readValue(
errorBody.string(),
ChatResponseError.class
);
throw new VacSdkException("-1", "stream error", error);
}
}
InputStream in = response.body().byteStream();
reader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8));
String line;
SSE sse = null;
while (!emitter.isCancelled() && (line = reader.readLine()) != null) {
if (line.startsWith("data:")) {
String data = line.substring(6).trim();
sse = new SSE(data);
} else if (line.equals("") && sse != null) {
emitter.onNext(sse);
sse = null;
} else if (line.startsWith("event:")) {
String event = line.substring(7).trim();
}else {
throw new SSEFormatException("Invalid sse format! " + line);
}
}
emitter.onComplete();
} catch (Throwable t) {
onFailure(call, t);
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
// do nothing
}
}
}
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
emitter.onError(t);
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/SSE.java | package art.starrynift.claude.entity;
/**
* Simple Server Sent Event representation
*/
public class SSE {
private static final String DONE_DATA = "[DONE]";
private static final String DONE_EVENT = "message_stop";
private final String data;
public SSE(String data){
this.data = data;
}
public String getData(){
return this.data;
}
public byte[] toBytes(){
return String.format("data: %s\n\n", this.data).getBytes();
}
} |
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/entity/SSEFormatException.java | package art.starrynift.claude.entity;
/**
* Exception indicating a SSE format error
*/
public class SSEFormatException extends Throwable{
public SSEFormatException(String msg){
super(msg);
}
} |
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/error/ChatResponseError.java | package art.starrynift.claude.error;
import lombok.Data;
@Data
public class ChatResponseError {
private String type;
private ChatResponseErrorDetail error;
@Data
public static class ChatResponseErrorDetail{
private String type;
private String message;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/exception/VacSdkException.java | package art.starrynift.claude.exception;
import java.io.Serializable;
import art.starrynift.claude.error.ChatResponseError;
import lombok.Data;
@Data
public class VacSdkException extends RuntimeException implements Serializable {
private String code;
private String msg;
private ChatResponseError detail;
public VacSdkException(String code, String msg) {
super(msg);
this.code = code;
this.msg = msg;
}
public VacSdkException(String code, String msg, ChatResponseError detail) {
super(msg);
this.code = code;
this.msg = msg;
this.detail = detail;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/request/ChatRequest.java | package art.starrynift.claude.request;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import art.starrynift.claude.entity.ChatFunction;
import art.starrynift.completion.chat.ChatMessage;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
@Data
@Builder
public class ChatRequest {
private String model;
private List<ChatMessage> messages;
private String system;
@NonNull
@JsonProperty("max_tokens")
private Integer maxTokens;
private Map<String, Object> metadata;
@JsonProperty("stop_sequences")
private List<String> stopSequences;
private boolean stream;
private float temperature;
@JsonProperty("top_p")
private float topP;
@JsonProperty("top_k")
private Integer topK;
private List<ChatFunction> tools;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/response/ChatResponse.java | package art.starrynift.claude.response;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
import art.starrynift.claude.entity.ChatMessageContent;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class ChatResponse {
private String id;
private String type;
private String role;
private List<ChatMessageContent> content;
private String model;
@JsonProperty("stop_reason")
private String stopReason;
@JsonProperty("stop_sequence")
private String stopSequence;
private Map<String, Object> usage;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/response/StreamChatResponse.java | package art.starrynift.claude.response;
import art.starrynift.claude.entity.ChatMessageContent;
import art.starrynift.claude.error.ChatResponseError;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class StreamChatResponse {
private String type;
private ChatMessageContent delta;
private ChatResponseError.ChatResponseErrorDetail error;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/claude/service/FunctionExecutor.java | package art.starrynift.claude.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import art.starrynift.claude.entity.ChatFunction;
import art.starrynift.claude.entity.ChatMessageContent;
import art.starrynift.completion.chat.ChatMessage;
public class FunctionExecutor {
public static final String FUNCTION_ROLE = "tool";
private ObjectMapper MAPPER = new ObjectMapper();
private final Map<String, ChatFunction> FUNCTIONS = new HashMap<>();
public FunctionExecutor(List<ChatFunction> functions) {
setFunctions(functions);
}
public FunctionExecutor(List<ChatFunction> functions, ObjectMapper objectMapper) {
setFunctions(functions);
setObjectMapper(objectMapper);
}
public Optional<ChatMessage> executeAndConvertToMessageSafely(ChatMessageContent call) {
try {
return Optional.ofNullable(executeAndConvertToMessage(call));
} catch (Exception ignored) {
return Optional.empty();
}
}
public ChatMessage executeAndConvertToMessageHandlingExceptions(ChatMessageContent call) {
try {
return executeAndConvertToMessage(call);
} catch (Exception exception) {
exception.printStackTrace();
return convertExceptionToMessage(exception, call.getId());
}
}
public ChatMessage convertExceptionToMessage(Exception exception, String toolCallId) {
ChatMessageContent content = new ChatMessageContent();
content.setType("tool_result");
content.setToolUseId(toolCallId);
String error = exception.getMessage() == null ? exception.toString() : exception.getMessage();
content.setContent("{\"error\": \"" + error + "\"}");
return new ChatMessage("user", Arrays.asList(content));
}
public ChatMessage executeAndConvertToMessage(ChatMessageContent call) {
ChatMessageContent content = new ChatMessageContent();
content.setType("tool_result");
content.setToolUseId(call.getId());
content.setContent(executeAndConvertToJson(call).toPrettyString());
return new ChatMessage("user", Arrays.asList(content));
}
public JsonNode executeAndConvertToJson(ChatMessageContent call) {
try {
Object execution = execute(call);
if (execution instanceof TextNode) {
JsonNode objectNode = MAPPER.readTree(((TextNode) execution).asText());
if (objectNode.isMissingNode())
return (JsonNode) execution;
return objectNode;
}
if (execution instanceof ObjectNode) {
return (JsonNode) execution;
}
if (execution instanceof String) {
JsonNode objectNode = MAPPER.readTree((String) execution);
if (objectNode.isMissingNode())
throw new RuntimeException("Parsing exception");
return objectNode;
}
return MAPPER.readValue(MAPPER.writeValueAsString(execution), JsonNode.class);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public <T> T execute(ChatMessageContent call) {
ChatFunction function = FUNCTIONS.get(call.getName());
Object obj;
try {
JsonNode arguments = call.getInput();
obj = MAPPER.readValue(arguments instanceof TextNode ? arguments.asText() : arguments.toPrettyString(), function.getParametersClass());
} catch (JsonProcessingException e) {
obj = null;
}
return (T) function.getExecutor().apply(obj);
}
public List<ChatFunction> getFunctions() {
return new ArrayList<>(FUNCTIONS.values());
}
public void setFunctions(List<ChatFunction> functions) {
this.FUNCTIONS.clear();
functions.forEach(f -> this.FUNCTIONS.put(f.getName(), f));
}
public void setObjectMapper(ObjectMapper objectMapper) {
this.MAPPER = objectMapper;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/client/AuthenticationInterceptor.java | package art.starrynift.client;
import java.io.IOException;
import okhttp3.Interceptor;
import okhttp3.Request;
import okhttp3.Response;
/**
* OkHttp Interceptor that adds an authorization token header
*/
public class AuthenticationInterceptor implements Interceptor {
private final String token;
protected AuthenticationInterceptor(String token) {
this.token = token;
}
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request()
.newBuilder()
.header("Authorization", "Bearer " + token)
.build();
return chain.proceed(request);
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/client/DeepseekApi.java | package art.starrynift.client;
import art.starrynift.DeleteResult;
import art.starrynift.OpenAiResponse;
import art.starrynift.assistants.*;
import art.starrynift.audio.CreateSpeechRequest;
import art.starrynift.audio.TranscriptionResult;
import art.starrynift.audio.TranslationResult;
import art.starrynift.billing.BillingUsage;
import art.starrynift.billing.Subscription;
import art.starrynift.completion.CompletionRequest;
import art.starrynift.completion.CompletionResult;
import art.starrynift.completion.chat.ChatCompletionRequest;
import art.starrynift.completion.chat.ChatCompletionResult;
import art.starrynift.edit.EditRequest;
import art.starrynift.edit.EditResult;
import art.starrynift.embedding.EmbeddingRequest;
import art.starrynift.embedding.EmbeddingResult;
import art.starrynift.engine.Engine;
import art.starrynift.file.File;
import art.starrynift.fine_tuning.FineTuningEvent;
import art.starrynift.fine_tuning.FineTuningJob;
import art.starrynift.fine_tuning.FineTuningJobRequest;
import art.starrynift.image.CreateImageRequest;
import art.starrynift.image.ImageResult;
import art.starrynift.messages.Message;
import art.starrynift.messages.MessageFile;
import art.starrynift.messages.MessageRequest;
import art.starrynift.messages.ModifyMessageRequest;
import art.starrynift.model.Model;
import art.starrynift.moderation.ModerationRequest;
import art.starrynift.moderation.ModerationResult;
import art.starrynift.runs.*;
import art.starrynift.threads.ThreadRequest;
import io.reactivex.Single;
import okhttp3.MultipartBody;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.http.*;
import java.time.LocalDate;
import java.util.Map;
public interface DeepseekApi {
@GET("v1/models")
Single<OpenAiResponse<Model>> listModels();
@GET("/v1/models/{model_id}")
Single<Model> getModel(@Path("model_id") String modelId);
@POST("/v1/completions")
Single<CompletionResult> createCompletion(@Body CompletionRequest request);
@Streaming
@POST("/v1/completions")
Call<ResponseBody> createCompletionStream(@Body CompletionRequest request);
@POST("/chat/completions")
Single<ChatCompletionResult> createChatCompletion(@Body ChatCompletionRequest request);
@Streaming
@POST("/chat/completions")
Call<ResponseBody> createChatCompletionStream(@Body ChatCompletionRequest request);
@Deprecated
@POST("/v1/engines/{engine_id}/completions")
Single<CompletionResult> createCompletion(@Path("engine_id") String engineId, @Body CompletionRequest request);
@POST("/v1/edits")
Single<EditResult> createEdit(@Body EditRequest request);
@Deprecated
@POST("/v1/engines/{engine_id}/edits")
Single<EditResult> createEdit(@Path("engine_id") String engineId, @Body EditRequest request);
@POST("/v1/embeddings")
Single<EmbeddingResult> createEmbeddings(@Body EmbeddingRequest request);
@Deprecated
@POST("/v1/engines/{engine_id}/embeddings")
Single<EmbeddingResult> createEmbeddings(@Path("engine_id") String engineId, @Body EmbeddingRequest request);
@GET("/v1/files")
Single<OpenAiResponse<File>> listFiles();
@Multipart
@POST("/v1/files")
Single<File> uploadFile(@Part("purpose") RequestBody purpose, @Part MultipartBody.Part file);
@DELETE("/v1/files/{file_id}")
Single<DeleteResult> deleteFile(@Path("file_id") String fileId);
@GET("/v1/files/{file_id}")
Single<File> retrieveFile(@Path("file_id") String fileId);
@Streaming
@GET("/v1/files/{file_id}/content")
Single<ResponseBody> retrieveFileContent(@Path("file_id") String fileId);
@POST("/v1/fine_tuning/jobs")
Single<FineTuningJob> createFineTuningJob(@Body FineTuningJobRequest request);
@GET("/v1/fine_tuning/jobs")
Single<OpenAiResponse<FineTuningJob>> listFineTuningJobs();
@GET("/v1/fine_tuning/jobs/{fine_tuning_job_id}")
Single<FineTuningJob> retrieveFineTuningJob(@Path("fine_tuning_job_id") String fineTuningJobId);
@POST("/v1/fine_tuning/jobs/{fine_tuning_job_id}/cancel")
Single<FineTuningJob> cancelFineTuningJob(@Path("fine_tuning_job_id") String fineTuningJobId);
@GET("/v1/fine_tuning/jobs/{fine_tuning_job_id}/events")
Single<OpenAiResponse<FineTuningEvent>> listFineTuningJobEvents(@Path("fine_tuning_job_id") String fineTuningJobId);
@POST("/v1/completions")
Single<CompletionResult> createFineTuneCompletion(@Body CompletionRequest request);
@DELETE("/v1/models/{fine_tune_id}")
Single<DeleteResult> deleteFineTune(@Path("fine_tune_id") String fineTuneId);
@POST("/v1/images/generations")
Single<ImageResult> createImage(@Body CreateImageRequest request);
@POST("/v1/images/edits")
Single<ImageResult> createImageEdit(@Body RequestBody requestBody);
@POST("/v1/images/variations")
Single<ImageResult> createImageVariation(@Body RequestBody requestBody);
@POST("/v1/audio/transcriptions")
Single<TranscriptionResult> createTranscription(@Body RequestBody requestBody);
@POST("/v1/audio/translations")
Single<TranslationResult> createTranslation(@Body RequestBody requestBody);
@POST("/v1/audio/speech")
Single<ResponseBody> createSpeech(@Body CreateSpeechRequest requestBody);
@POST("/v1/moderations")
Single<ModerationResult> createModeration(@Body ModerationRequest request);
@Deprecated
@GET("v1/engines")
Single<OpenAiResponse<Engine>> getEngines();
@Deprecated
@GET("/v1/engines/{engine_id}")
Single<Engine> getEngine(@Path("engine_id") String engineId);
/**
* Account information inquiry: It contains total amount (in US dollars) and other information.
*
* @return
*/
@Deprecated
@GET("v1/dashboard/billing/subscription")
Single<Subscription> subscription();
/**
* Account call interface consumption amount inquiry.
* totalUsage = Total amount used by the account (in US cents).
*
* @param starDate
* @param endDate
* @return Consumption amount information.
*/
@Deprecated
@GET("v1/dashboard/billing/usage")
Single<BillingUsage> billingUsage(@Query("start_date") LocalDate starDate, @Query("end_date") LocalDate endDate);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/assistants")
Single<Assistant> createAssistant(@Body AssistantRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants/{assistant_id}")
Single<Assistant> retrieveAssistant(@Path("assistant_id") String assistantId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/assistants/{assistant_id}")
Single<Assistant> modifyAssistant(@Path("assistant_id") String assistantId, @Body ModifyAssistantRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@DELETE("/v1/assistants/{assistant_id}")
Single<DeleteResult> deleteAssistant(@Path("assistant_id") String assistantId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants")
Single<OpenAiResponse<Assistant>> listAssistants(@QueryMap Map<String, Object> filterRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/assistants/{assistant_id}/files")
Single<AssistantFile> createAssistantFile(@Path("assistant_id") String assistantId, @Body AssistantFileRequest fileRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants/{assistant_id}/files/{file_id}")
Single<AssistantFile> retrieveAssistantFile(@Path("assistant_id") String assistantId, @Path("file_id") String fileId);
@Headers({"OpenAI-Beta: assistants=v1"})
@DELETE("/v1/assistants/{assistant_id}/files/{file_id}")
Single<DeleteResult> deleteAssistantFile(@Path("assistant_id") String assistantId, @Path("file_id") String fileId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants/{assistant_id}/files")
Single<OpenAiResponse<AssistantFile>> listAssistantFiles(@Path("assistant_id") String assistantId, @QueryMap Map<String, Object> filterRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads")
Single<Thread> createThread(@Body ThreadRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}")
Single<Thread> retrieveThread(@Path("thread_id") String threadId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads/{thread_id}")
Single<Thread> modifyThread(@Path("thread_id") String threadId, @Body ThreadRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@DELETE("/v1/threads/{thread_id}")
Single<DeleteResult> deleteThread(@Path("thread_id") String threadId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads/{thread_id}/messages")
Single<Message> createMessage(@Path("thread_id") String threadId, @Body MessageRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}")
Single<Message> retrieveMessage(@Path("thread_id") String threadId, @Path("message_id") String messageId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads/{thread_id}/messages/{message_id}")
Single<Message> modifyMessage(@Path("thread_id") String threadId, @Path("message_id") String messageId, @Body ModifyMessageRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages")
Single<OpenAiResponse<Message>> listMessages(@Path("thread_id") String threadId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages")
Single<OpenAiResponse<Message>> listMessages(@Path("thread_id") String threadId, @QueryMap Map<String, Object> filterRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}/files/{file_id}")
Single<MessageFile> retrieveMessageFile(@Path("thread_id") String threadId, @Path("message_id") String messageId, @Path("file_id") String fileId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}/files")
Single<OpenAiResponse<MessageFile>> listMessageFiles(@Path("thread_id") String threadId, @Path("message_id") String messageId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}/files")
Single<OpenAiResponse<MessageFile>> listMessageFiles(@Path("thread_id") String threadId, @Path("message_id") String messageId, @QueryMap Map<String, Object> filterRequest);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs")
Single<Run> createRun(@Path("thread_id") String threadId, @Body RunCreateRequest runCreateRequest);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs/{run_id}")
Single<Run> retrieveRun(@Path("thread_id") String threadId, @Path("run_id") String runId);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs/{run_id}")
Single<Run> modifyRun(@Path("thread_id") String threadId, @Path("run_id") String runId, @Body Map<String, String> metadata);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs")
Single<OpenAiResponse<Run>> listRuns(@Path("thread_id") String threadId, @QueryMap Map<String, String> listSearchParameters);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs/{run_id}/submit_tool_outputs")
Single<Run> submitToolOutputs(@Path("thread_id") String threadId, @Path("run_id") String runId, @Body SubmitToolOutputsRequest submitToolOutputsRequest);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs/{run_id}/cancel")
Single<Run> cancelRun(@Path("thread_id") String threadId, @Path("run_id") String runId);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/runs")
Single<Run> createThreadAndRun(@Body CreateThreadAndRunRequest createThreadAndRunRequest);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs/{run_id}/steps/{step_id}")
Single<RunStep> retrieveRunStep(@Path("thread_id") String threadId, @Path("run_id") String runId, @Path("step_id") String stepId);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs/{run_id}/steps")
Single<OpenAiResponse<RunStep>> listRunSteps(@Path("thread_id") String threadId, @Path("run_id") String runId, @QueryMap Map<String, String> listSearchParameters);
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/client/OpenAiApi.java | package art.starrynift.client;
import java.time.LocalDate;
import java.util.Map;
import art.starrynift.DeleteResult;
import art.starrynift.OpenAiResponse;
import art.starrynift.assistants.Assistant;
import art.starrynift.assistants.AssistantFile;
import art.starrynift.assistants.AssistantFileRequest;
import art.starrynift.assistants.AssistantRequest;
import art.starrynift.assistants.ModifyAssistantRequest;
import art.starrynift.audio.CreateSpeechRequest;
import art.starrynift.audio.TranscriptionResult;
import art.starrynift.audio.TranslationResult;
import art.starrynift.billing.BillingUsage;
import art.starrynift.billing.Subscription;
import art.starrynift.completion.CompletionRequest;
import art.starrynift.completion.CompletionResult;
import art.starrynift.completion.chat.ChatCompletionRequest;
import art.starrynift.completion.chat.ChatCompletionResult;
import art.starrynift.edit.EditRequest;
import art.starrynift.edit.EditResult;
import art.starrynift.embedding.EmbeddingRequest;
import art.starrynift.embedding.EmbeddingResult;
import art.starrynift.engine.Engine;
import art.starrynift.file.File;
import art.starrynift.fine_tuning.FineTuningEvent;
import art.starrynift.fine_tuning.FineTuningJob;
import art.starrynift.fine_tuning.FineTuningJobRequest;
import art.starrynift.image.CreateImageRequest;
import art.starrynift.image.ImageResult;
import art.starrynift.messages.Message;
import art.starrynift.messages.MessageFile;
import art.starrynift.messages.MessageRequest;
import art.starrynift.messages.ModifyMessageRequest;
import art.starrynift.model.Model;
import art.starrynift.moderation.ModerationRequest;
import art.starrynift.moderation.ModerationResult;
import art.starrynift.runs.CreateThreadAndRunRequest;
import art.starrynift.runs.Run;
import art.starrynift.runs.RunCreateRequest;
import art.starrynift.runs.RunStep;
import art.starrynift.runs.SubmitToolOutputsRequest;
import art.starrynift.threads.ThreadRequest;
import io.reactivex.Single;
import okhttp3.MultipartBody;
import okhttp3.RequestBody;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.http.Body;
import retrofit2.http.DELETE;
import retrofit2.http.GET;
import retrofit2.http.Headers;
import retrofit2.http.Multipart;
import retrofit2.http.POST;
import retrofit2.http.Part;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.http.QueryMap;
import retrofit2.http.Streaming;
public interface OpenAiApi {
@GET("v1/models")
Single<OpenAiResponse<Model>> listModels();
@GET("/v1/models/{model_id}")
Single<Model> getModel(@Path("model_id") String modelId);
@POST("/v1/completions")
Single<CompletionResult> createCompletion(@Body CompletionRequest request);
@Streaming
@POST("/v1/completions")
Call<ResponseBody> createCompletionStream(@Body CompletionRequest request);
@POST("/v1/chat/completions")
Single<ChatCompletionResult> createChatCompletion(@Body ChatCompletionRequest request);
@Streaming
@POST("/v1/chat/completions")
Call<ResponseBody> createChatCompletionStream(@Body ChatCompletionRequest request);
@Deprecated
@POST("/v1/engines/{engine_id}/completions")
Single<CompletionResult> createCompletion(@Path("engine_id") String engineId, @Body CompletionRequest request);
@POST("/v1/edits")
Single<EditResult> createEdit(@Body EditRequest request);
@Deprecated
@POST("/v1/engines/{engine_id}/edits")
Single<EditResult> createEdit(@Path("engine_id") String engineId, @Body EditRequest request);
@POST("/v1/embeddings")
Single<EmbeddingResult> createEmbeddings(@Body EmbeddingRequest request);
@Deprecated
@POST("/v1/engines/{engine_id}/embeddings")
Single<EmbeddingResult> createEmbeddings(@Path("engine_id") String engineId, @Body EmbeddingRequest request);
@GET("/v1/files")
Single<OpenAiResponse<File>> listFiles();
@Multipart
@POST("/v1/files")
Single<File> uploadFile(@Part("purpose") RequestBody purpose, @Part MultipartBody.Part file);
@DELETE("/v1/files/{file_id}")
Single<DeleteResult> deleteFile(@Path("file_id") String fileId);
@GET("/v1/files/{file_id}")
Single<File> retrieveFile(@Path("file_id") String fileId);
@Streaming
@GET("/v1/files/{file_id}/content")
Single<ResponseBody> retrieveFileContent(@Path("file_id") String fileId);
@POST("/v1/fine_tuning/jobs")
Single<FineTuningJob> createFineTuningJob(@Body FineTuningJobRequest request);
@GET("/v1/fine_tuning/jobs")
Single<OpenAiResponse<FineTuningJob>> listFineTuningJobs();
@GET("/v1/fine_tuning/jobs/{fine_tuning_job_id}")
Single<FineTuningJob> retrieveFineTuningJob(@Path("fine_tuning_job_id") String fineTuningJobId);
@POST("/v1/fine_tuning/jobs/{fine_tuning_job_id}/cancel")
Single<FineTuningJob> cancelFineTuningJob(@Path("fine_tuning_job_id") String fineTuningJobId);
@GET("/v1/fine_tuning/jobs/{fine_tuning_job_id}/events")
Single<OpenAiResponse<FineTuningEvent>> listFineTuningJobEvents(@Path("fine_tuning_job_id") String fineTuningJobId);
@POST("/v1/completions")
Single<CompletionResult> createFineTuneCompletion(@Body CompletionRequest request);
@DELETE("/v1/models/{fine_tune_id}")
Single<DeleteResult> deleteFineTune(@Path("fine_tune_id") String fineTuneId);
@POST("/v1/images/generations")
Single<ImageResult> createImage(@Body CreateImageRequest request);
@POST("/v1/images/edits")
Single<ImageResult> createImageEdit(@Body RequestBody requestBody);
@POST("/v1/images/variations")
Single<ImageResult> createImageVariation(@Body RequestBody requestBody);
@POST("/v1/audio/transcriptions")
Single<TranscriptionResult> createTranscription(@Body RequestBody requestBody);
@POST("/v1/audio/translations")
Single<TranslationResult> createTranslation(@Body RequestBody requestBody);
@POST("/v1/audio/speech")
Single<ResponseBody> createSpeech(@Body CreateSpeechRequest requestBody);
@POST("/v1/moderations")
Single<ModerationResult> createModeration(@Body ModerationRequest request);
@Deprecated
@GET("v1/engines")
Single<OpenAiResponse<Engine>> getEngines();
@Deprecated
@GET("/v1/engines/{engine_id}")
Single<Engine> getEngine(@Path("engine_id") String engineId);
/**
* Account information inquiry: It contains total amount (in US dollars) and other information.
*
* @return
*/
@Deprecated
@GET("v1/dashboard/billing/subscription")
Single<Subscription> subscription();
/**
* Account call interface consumption amount inquiry.
* totalUsage = Total amount used by the account (in US cents).
*
* @param starDate
* @param endDate
* @return Consumption amount information.
*/
@Deprecated
@GET("v1/dashboard/billing/usage")
Single<BillingUsage> billingUsage(@Query("start_date") LocalDate starDate, @Query("end_date") LocalDate endDate);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/assistants")
Single<Assistant> createAssistant(@Body AssistantRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants/{assistant_id}")
Single<Assistant> retrieveAssistant(@Path("assistant_id") String assistantId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/assistants/{assistant_id}")
Single<Assistant> modifyAssistant(@Path("assistant_id") String assistantId, @Body ModifyAssistantRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@DELETE("/v1/assistants/{assistant_id}")
Single<DeleteResult> deleteAssistant(@Path("assistant_id") String assistantId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants")
Single<OpenAiResponse<Assistant>> listAssistants(@QueryMap Map<String, Object> filterRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/assistants/{assistant_id}/files")
Single<AssistantFile> createAssistantFile(@Path("assistant_id") String assistantId, @Body AssistantFileRequest fileRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants/{assistant_id}/files/{file_id}")
Single<AssistantFile> retrieveAssistantFile(@Path("assistant_id") String assistantId, @Path("file_id") String fileId);
@Headers({"OpenAI-Beta: assistants=v1"})
@DELETE("/v1/assistants/{assistant_id}/files/{file_id}")
Single<DeleteResult> deleteAssistantFile(@Path("assistant_id") String assistantId, @Path("file_id") String fileId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/assistants/{assistant_id}/files")
Single<OpenAiResponse<AssistantFile>> listAssistantFiles(@Path("assistant_id") String assistantId, @QueryMap Map<String, Object> filterRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads")
Single<Thread> createThread(@Body ThreadRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}")
Single<Thread> retrieveThread(@Path("thread_id") String threadId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads/{thread_id}")
Single<Thread> modifyThread(@Path("thread_id") String threadId, @Body ThreadRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@DELETE("/v1/threads/{thread_id}")
Single<DeleteResult> deleteThread(@Path("thread_id") String threadId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads/{thread_id}/messages")
Single<Message> createMessage(@Path("thread_id") String threadId, @Body MessageRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}")
Single<Message> retrieveMessage(@Path("thread_id") String threadId, @Path("message_id") String messageId);
@Headers({"OpenAI-Beta: assistants=v1"})
@POST("/v1/threads/{thread_id}/messages/{message_id}")
Single<Message> modifyMessage(@Path("thread_id") String threadId, @Path("message_id") String messageId, @Body ModifyMessageRequest request);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages")
Single<OpenAiResponse<Message>> listMessages(@Path("thread_id") String threadId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages")
Single<OpenAiResponse<Message>> listMessages(@Path("thread_id") String threadId, @QueryMap Map<String, Object> filterRequest);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}/files/{file_id}")
Single<MessageFile> retrieveMessageFile(@Path("thread_id") String threadId, @Path("message_id") String messageId, @Path("file_id") String fileId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}/files")
Single<OpenAiResponse<MessageFile>> listMessageFiles(@Path("thread_id") String threadId, @Path("message_id") String messageId);
@Headers({"OpenAI-Beta: assistants=v1"})
@GET("/v1/threads/{thread_id}/messages/{message_id}/files")
Single<OpenAiResponse<MessageFile>> listMessageFiles(@Path("thread_id") String threadId, @Path("message_id") String messageId, @QueryMap Map<String, Object> filterRequest);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs")
Single<Run> createRun(@Path("thread_id") String threadId, @Body RunCreateRequest runCreateRequest);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs/{run_id}")
Single<Run> retrieveRun(@Path("thread_id") String threadId, @Path("run_id") String runId);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs/{run_id}")
Single<Run> modifyRun(@Path("thread_id") String threadId, @Path("run_id") String runId, @Body Map<String, String> metadata);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs")
Single<OpenAiResponse<Run>> listRuns(@Path("thread_id") String threadId, @QueryMap Map<String, String> listSearchParameters);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs/{run_id}/submit_tool_outputs")
Single<Run> submitToolOutputs(@Path("thread_id") String threadId, @Path("run_id") String runId, @Body SubmitToolOutputsRequest submitToolOutputsRequest);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/{thread_id}/runs/{run_id}/cancel")
Single<Run> cancelRun(@Path("thread_id") String threadId, @Path("run_id") String runId);
@Headers("OpenAI-Beta: assistants=v1")
@POST("/v1/threads/runs")
Single<Run> createThreadAndRun(@Body CreateThreadAndRunRequest createThreadAndRunRequest);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs/{run_id}/steps/{step_id}")
Single<RunStep> retrieveRunStep(@Path("thread_id") String threadId, @Path("run_id") String runId, @Path("step_id") String stepId);
@Headers("OpenAI-Beta: assistants=v1")
@GET("/v1/threads/{thread_id}/runs/{run_id}/steps")
Single<OpenAiResponse<RunStep>> listRunSteps(@Path("thread_id") String threadId, @Path("run_id") String runId, @QueryMap Map<String, String> listSearchParameters);
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/common/LastError.java | package art.starrynift.common;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class LastError {
private String code;
private String message;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/CompletionChoice.java | package art.starrynift.completion;
import lombok.Data;
/**
* A completion generated by OpenAI
*
* https://beta.openai.com/docs/api-reference/completions/create
*/
@Data
public class CompletionChoice {
/**
* The generated text. Will include the prompt if {@link CompletionRequest#echo } is true
*/
String text;
/**
* This index of this completion in the returned list.
*/
Integer index;
/**
* The log probabilities of the chosen tokens and the top {@link CompletionRequest#logprobs} tokens
*/
LogProbResult logprobs;
/**
* The reason why GPT stopped generating, for example "length".
*/
String finish_reason;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/CompletionChunk.java | package art.starrynift.completion;
import lombok.Data;
import java.util.List;
/**
* Object containing a response chunk from the completions streaming api.
*
* https://beta.openai.com/docs/api-reference/completions/create
*/
@Data
public class CompletionChunk {
/**
* A unique id assigned to this completion.
*/
String id;
/**https://beta.openai.com/docs/api-reference/create-completion
* The type of object returned, should be "text_completion"
*/
String object;
/**
* The creation time in epoch seconds.
*/
long created;
/**
* The model used.
*/
String model;
/**
* A list of generated completions.
*/
List<CompletionChoice> choices;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/CompletionRequest.java | package art.starrynift.completion;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* A request for OpenAi to generate a predicted completion for a prompt.
* All fields are nullable.
*
* https://beta.openai.com/docs/api-reference/completions/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class CompletionRequest {
/**
* The name of the model to use.
* Required if specifying a fine-tuned model or if using the new v1/completions endpoint.
*/
String model;
/**
* An optional prompt to complete from
*/
String prompt;
/**
* The suffix that comes after a completion of inserted text.
*/
String suffix;
/**
* The maximum number of tokens to generate.
* Requests can use up to 2048 tokens shared between prompt and completion.
* (One token is roughly 4 characters for normal English text)
*/
@JsonProperty("max_tokens")
Integer maxTokens;
/**
* What sampling temperature to use. Higher values means the model will take more risks.
* Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
*
* We generally recommend using this or {@link CompletionRequest#topP} but not both.
*/
Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of
* the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are
* considered.
*
* We generally recommend using this or {@link CompletionRequest#temperature} but not both.
*/
@JsonProperty("top_p")
Double topP;
/**
* How many completions to generate for each prompt.
*
* Because this parameter generates many completions, it can quickly consume your token quota.
* Use carefully and ensure that you have reasonable settings for {@link CompletionRequest#maxTokens} and {@link CompletionRequest#stop}.
*/
Integer n;
/**
* Whether to stream back partial progress.
* If set, tokens will be sent as data-only server-sent events as they become available,
* with the stream terminated by a data: DONE message.
*/
Boolean stream;
/**
* Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens.
* For example, if logprobs is 10, the API will return a list of the 10 most likely tokens.
* The API will always return the logprob of the sampled token,
* so there may be up to logprobs+1 elements in the response.
*/
Integer logprobs;
/**
* Echo back the prompt in addition to the completion
*/
Boolean echo;
/**
* Up to 4 sequences where the API will stop generating further tokens.
* The returned text will not contain the stop sequence.
*/
List<String> stop;
/**
* Number between 0 and 1 (default 0) that penalizes new tokens based on whether they appear in the text so far.
* Increases the model's likelihood to talk about new topics.
*/
@JsonProperty("presence_penalty")
Double presencePenalty;
/**
* Number between 0 and 1 (default 0) that penalizes new tokens based on their existing frequency in the text so far.
* Decreases the model's likelihood to repeat the same line verbatim.
*/
@JsonProperty("frequency_penalty")
Double frequencyPenalty;
/**
* Generates best_of completions server-side and returns the "best"
* (the one with the lowest log probability per token).
* Results cannot be streamed.
*
* When used with {@link CompletionRequest#n}, best_of controls the number of candidate completions and n specifies how many to return,
* best_of must be greater than n.
*/
@JsonProperty("best_of")
Integer bestOf;
/**
* Modify the likelihood of specified tokens appearing in the completion.
*
* Maps tokens (specified by their token ID in the GPT tokenizer) to an associated bias value from -100 to 100.
*
* https://beta.openai.com/docs/api-reference/completions/create#completions/create-logit_bias
*/
@JsonProperty("logit_bias")
Map<String, Integer> logitBias;
/**
* A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
*/
String user;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/CompletionResult.java | package art.starrynift.completion;
import java.util.List;
import art.starrynift.Usage;
import lombok.Data;
/**
* An object containing a response from the completion api
*
* https://beta.openai.com/docs/api-reference/completions/create
*/
@Data
public class CompletionResult {
/**
* A unique id assigned to this completion.
*/
String id;
/**https://beta.openai.com/docs/api-reference/create-completion
* The type of object returned, should be "text_completion"
*/
String object;
/**
* The creation time in epoch seconds.
*/
long created;
/**
* The GPT model used.
*/
String model;
/**
* A list of generated completions.
*/
List<CompletionChoice> choices;
/**
* The API usage for this request
*/
Usage usage;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/LogProbResult.java | package art.starrynift.completion;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
import java.util.Map;
/**
* Log probabilities of different token options
* Returned if {@link CompletionRequest#logprobs} is greater than zero
*
* https://beta.openai.com/docs/api-reference/create-completion
*/
@Data
public class LogProbResult {
/**
* The tokens chosen by the completion api
*/
List<String> tokens;
/**
* The log probability of each token in {@link tokens}
*/
@JsonProperty("token_logprobs")
List<Double> tokenLogprobs;
/**
* A map for each index in the completion result.
* The map contains the top {@link CompletionRequest#logprobs} tokens and their probabilities
*/
@JsonProperty("top_logprobs")
List<Map<String, Double>> topLogprobs;
/**
* The character offset from the start of the returned text for each of the chosen tokens.
*/
List<Integer> textOffset;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatCompletionChoice.java | package art.starrynift.completion.chat;
import com.fasterxml.jackson.annotation.JsonAlias;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
/**
* A chat completion generated by OpenAI
*/
@Data
public class ChatCompletionChoice {
/**
* This index of this completion in the returned list.
*/
Integer index;
/**
* The {@link ChatMessageRole#assistant} message or delta (when streaming) which was generated
*/
@JsonAlias("delta")
ChatMessage message;
/**
* The reason why GPT stopped generating, for example "length".
*/
@JsonProperty("finish_reason")
String finishReason;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatCompletionChunk.java | package art.starrynift.completion.chat;
import lombok.Data;
import java.util.List;
/**
* Object containing a response chunk from the chat completions streaming api.
*/
@Data
public class ChatCompletionChunk {
/**
* Unique id assigned to this chat completion.
*/
String id;
/**
* The type of object returned, should be "chat.completion.chunk"
*/
String object;
/**
* The creation time in epoch seconds.
*/
long created;
/**
* The model used.
*/
String model;
/**
* A list of all generated completions.
*/
List<ChatCompletionChoice> choices;
} |
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatCompletionRequest.java | package art.starrynift.completion.chat;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public class ChatCompletionRequest {
/**
* ID of the model to use.
*/
String model;
/**
* The messages to generate chat completions for, in the <a
* href="https://platform.openai.com/docs/guides/chat/introduction">chat format</a>.<br>
* see {@link ChatMessage}
*/
List<ChatMessage> messages;
/**
* What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower
* values like 0.2 will make it more focused and deterministic.<br>
* We generally recommend altering this or top_p but not both.
*/
Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens
* with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.<br>
* We generally recommend altering this or temperature but not both.
*/
@JsonProperty("top_p")
Double topP;
/**
* How many chat completion chatCompletionChoices to generate for each input message.
*/
Integer n;
/**
* If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only <a
* href="https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format">server-sent
* events</a> as they become available, with the stream terminated by a data: [DONE] message.
*/
Boolean stream;
/**
* Up to 4 sequences where the API will stop generating further tokens.
*/
List<String> stop;
/**
* The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will
* be (4096 - prompt tokens).
*/
@JsonProperty("max_tokens")
Integer maxTokens;
/**
* Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far,
* increasing the model's likelihood to talk about new topics.
*/
@JsonProperty("presence_penalty")
Double presencePenalty;
/**
* Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far,
* decreasing the model's likelihood to repeat the same line verbatim.
*/
@JsonProperty("frequency_penalty")
Double frequencyPenalty;
/**
* Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100
* to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will
* vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100
* should result in a ban or exclusive selection of the relevant token.
*/
@JsonProperty("logit_bias")
Map<String, Integer> logitBias;
/**
* A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
*/
String user;
/**
* A list of the available functions.
*/
List<?> functions;
/**
* Controls how the model responds to function calls, as specified in the <a href="https://platform.openai.com/docs/api-reference/chat/create#chat/create-function_call">OpenAI documentation</a>.
*/
@JsonProperty("function_call")
ChatCompletionRequestFunctionCall functionCall;
List<?> tools;
Object tool_choice;
@Data
@Builder
@AllArgsConstructor
@NoArgsConstructor
public static class ChatCompletionRequestFunctionCall {
String name;
public static ChatCompletionRequestFunctionCall of(String name) {
return new ChatCompletionRequestFunctionCall(name);
}
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatCompletionResult.java | package art.starrynift.completion.chat;
import java.util.List;
import art.starrynift.Usage;
import lombok.Data;
/**
* Object containing a response from the chat completions api.
*/
@Data
public class ChatCompletionResult {
/**
* Unique id assigned to this chat completion.
*/
String id;
/**
* The type of object returned, should be "chat.completion"
*/
String object;
/**
* The creation time in epoch seconds.
*/
long created;
/**
* The GPT model used.
*/
String model;
/**
* A list of all generated completions.
*/
List<ChatCompletionChoice> choices;
/**
* The API usage for this request.
*/
Usage usage;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatFunction.java | package art.starrynift.completion.chat;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import java.util.function.Function;
@Data
@NoArgsConstructor
public class ChatFunction {
/**
* The name of the function being called.
*/
@NonNull
private String name;
/**
* A description of what the function does, used by the model to choose when and how to call the function.
*/
private String description;
/**
* The parameters the functions accepts.
*/
@JsonProperty("parameters")
private Class<?> parametersClass;
@JsonIgnore
private Function<Object, Object> executor;
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String name;
private String description;
private Class<?> parameters;
private Function<Object, Object> executor;
public Builder name(String name) {
this.name = name;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public <T> Builder executor(Class<T> requestClass, Function<T, Object> executor) {
this.parameters = requestClass;
this.executor = (Function<Object, Object>) executor;
return this;
}
public ChatFunction build() {
ChatFunction chatFunction = new ChatFunction();
chatFunction.setName(name);
chatFunction.setDescription(description);
chatFunction.setParametersClass(parameters);
chatFunction.setExecutor(executor);
return chatFunction;
}
}
} |
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatFunctionCall.java | package art.starrynift.completion.chat;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class ChatFunctionCall {
/**
* The name of the function being called
*/
String name;
/**
* The arguments of the call produced by the model, represented as a JsonNode for easy manipulation.
*/
JsonNode arguments;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatFunctionDynamic.java | package art.starrynift.completion.chat;
import lombok.Data;
import lombok.NonNull;
@Data
public class ChatFunctionDynamic {
/**
* The name of the function being called.
*/
@NonNull
private String name;
/**
* A description of what the function does, used by the model to choose when and how to call the function.
*/
private String description;
/**
* The parameters the functions accepts.
*/
private ChatFunctionParameters parameters;
public static Builder builder() {
return new Builder();
}
public static class Builder {
private String name;
private String description;
private ChatFunctionParameters parameters = new ChatFunctionParameters();
public Builder name(String name) {
this.name = name;
return this;
}
public Builder description(String description) {
this.description = description;
return this;
}
public Builder parameters(ChatFunctionParameters parameters) {
this.parameters = parameters;
return this;
}
public Builder addProperty(ChatFunctionProperty property) {
this.parameters.addProperty(property);
return this;
}
public ChatFunctionDynamic build() {
ChatFunctionDynamic chatFunction = new ChatFunctionDynamic(name);
chatFunction.setDescription(description);
chatFunction.setParameters(parameters);
return chatFunction;
}
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatFunctionParameters.java | package art.starrynift.completion.chat;
import lombok.Data;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@Data
public class ChatFunctionParameters {
private final String type = "object";
private final HashMap<String, ChatFunctionProperty> properties = new HashMap<>();
private List<String> required;
public void addProperty(ChatFunctionProperty property) {
properties.put(property.getName(), property);
if (Boolean.TRUE.equals(property.getRequired())) {
if (this.required == null) {
this.required = new ArrayList<>();
}
this.required.add(property.getName());
}
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatFunctionProperty.java | package art.starrynift.completion.chat;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Builder;
import lombok.Data;
import lombok.NonNull;
import java.util.Set;
@Data
@Builder
public class ChatFunctionProperty {
@NonNull
@JsonIgnore
private String name;
@NonNull
private String type;
@JsonIgnore
private Boolean required;
private String description;
private ChatFunctionProperty items;
@JsonProperty("enum")
private Set<?> enumValues;
} |
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatMessage.java | package art.starrynift.completion.chat;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
/**
* <p>Each object has a role (either "system", "user", or "assistant") and content (the content of the message). Conversations can be as short as 1 message or fill many pages.</p>
* <p>Typically, a conversation is formatted with a system message first, followed by alternating user and assistant messages.</p>
* <p>The system message helps set the behavior of the assistant. In the example above, the assistant was instructed with "You are a helpful assistant."<br>
* The user messages help instruct the assistant. They can be generated by the end users of an application, or set by a developer as an instruction.<br>
* The assistant messages help store prior responses. They can also be written by a developer to help give examples of desired behavior.
* </p>
*
* see <a href="https://platform.openai.com/docs/guides/chat/introduction">OpenAi documentation</a>
*/
@Data
@NoArgsConstructor(force = true)
@RequiredArgsConstructor
@AllArgsConstructor
public class ChatMessage {
/**
* Must be either 'system', 'user', 'assistant' or 'function'.<br>
* You may use {@link ChatMessageRole} enum.
*/
@NonNull
String role;
@JsonInclude() // content should always exist in the call, even if it is null
Object content;
//name is optional, The name of the author of this message. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.
String name;
@JsonProperty("function_call")
ChatFunctionCall functionCall;
@JsonProperty("tool_calls")
List<ToolCalls> toolCalls;
public ChatMessage(String role, Object content) {
this.role = role;
this.content = content;
}
public ChatMessage(String role, String content, String name) {
this.role = role;
this.content = content;
this.name = name;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ChatMessageRole.java | package art.starrynift.completion.chat;
/**
* see {@link ChatMessage} documentation.
*/
public enum ChatMessageRole {
SYSTEM("system"),
USER("user"),
ASSISTANT("assistant"),
FUNCTION("function");
private final String value;
ChatMessageRole(final String value) {
this.value = value;
}
public String value() {
return value;
}
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/completion/chat/ToolCalls.java | package art.starrynift.completion.chat;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class ToolCalls {
/**
* The name of the function being called
*/
String id;
/**
* The arguments of the call produced by the model, represented as a JsonNode for easy manipulation.
*/
String type;
ChatFunctionCall function;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/edit/EditChoice.java | package art.starrynift.edit;
import lombok.Data;
/**
* An edit generated by OpenAi
*
* https://beta.openai.com/docs/api-reference/edits/create
*/
@Data
public class EditChoice {
/**
* The edited text.
*/
String text;
/**
* This index of this completion in the returned list.
*/
Integer index;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/edit/EditRequest.java | package art.starrynift.edit;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.*;
/**
* Given a prompt and an instruction, OpenAi will return an edited version of the prompt
*
* https://beta.openai.com/docs/api-reference/edits/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class EditRequest {
/**
* The name of the model to use.
* Required if using the new v1/edits endpoint.
*/
String model;
/**
* The input text to use as a starting point for the edit.
*/
String input;
/**
* The instruction that tells the model how to edit the prompt.
* For example, "Fix the spelling mistakes"
*/
@NonNull
String instruction;
/**
* How many edits to generate for the input and instruction.
*/
Integer n;
/**
* What sampling temperature to use. Higher values means the model will take more risks.
* Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.
*
* We generally recommend altering this or {@link EditRequest#topP} but not both.
*/
Double temperature;
/**
* An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of
* the tokens with top_p probability mass.So 0.1 means only the tokens comprising the top 10% probability mass are
* considered.
*
* We generally recommend altering this or {@link EditRequest#temperature} but not both.
*/
@JsonProperty("top_p")
Double topP;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/edit/EditResult.java | package art.starrynift.edit;
import java.util.List;
import art.starrynift.Usage;
import lombok.Data;
/**
* A list of edits generated by OpenAI
*
* https://beta.openai.com/docs/api-reference/edits/create
*/
@Data
public class EditResult {
/**
* The type of object returned, should be "edit"
*/
public String object;
/**
* The creation time in epoch milliseconds.
*/
public long created;
/**
* A list of generated edits.
*/
public List<EditChoice> choices;
/**
* The API usage for this request
*/
public Usage usage;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/embedding/Embedding.java | package art.starrynift.embedding;
import lombok.Data;
import java.util.List;
/**
* Represents an embedding returned by the embedding api
*
* https://beta.openai.com/docs/api-reference/classifications/create
*/
@Data
public class Embedding {
/**
* The type of object returned, should be "embedding"
*/
String object;
/**
* The embedding vector
*/
List<Double> embedding;
/**
* The position of this embedding in the list
*/
Integer index;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/embedding/EmbeddingRequest.java | package art.starrynift.embedding;
import lombok.*;
import java.util.List;
/**
* Creates an embedding vector representing the input text.
*
* https://beta.openai.com/docs/api-reference/embeddings/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class EmbeddingRequest {
/**
* The name of the model to use.
* Required if using the new v1/embeddings endpoint.
*/
String model;
/**
* Input text to get embeddings for, encoded as a string or array of tokens.
* To get embeddings for multiple inputs in a single request, pass an array of strings or array of token arrays.
* Each input must not exceed 2048 tokens in length.
* <p>
* Unless you are embedding code, we suggest replacing newlines (\n) in your input with a single space,
* as we have observed inferior results when newlines are present.
*/
@NonNull
List<String> input;
/**
* A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
*/
String user;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/embedding/EmbeddingResult.java | package art.starrynift.embedding;
import java.util.List;
import art.starrynift.Usage;
import lombok.Data;
/**
* An object containing a response from the answer api
*
* https://beta.openai.com/docs/api-reference/embeddings/create
*/
@Data
public class EmbeddingResult {
/**
* The GPTmodel used for generating embeddings
*/
String model;
/**
* The type of object returned, should be "list"
*/
String object;
/**
* A list of the calculated embeddings
*/
List<Embedding> data;
/**
* The API usage for this request
*/
Usage usage;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/engine/Engine.java | package art.starrynift.engine;
import lombok.Data;
/**
* GPT-3 engine details
*
* https://beta.openai.com/docs/api-reference/retrieve-engine
*/
@Deprecated
@Data
public class Engine {
/**
* An identifier for this engine, used to specify an engine for completions or searching.
*/
public String id;
/**
* The type of object returned, should be "engine"
*/
public String object;
/**
* The owner of the GPT-3 engine, typically "openai"
*/
public String owner;
/**
* Whether the engine is ready to process requests or not
*/
public boolean ready;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/file/File.java | package art.starrynift.file;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
/**
* A file uploaded to OpenAi
*
* https://beta.openai.com/docs/api-reference/files
*/
@Data
public class File {
/**
* The unique id of this file.
*/
String id;
/**
* The type of object returned, should be "file".
*/
String object;
/**
* File size in bytes.
*/
Long bytes;
/**
* The creation time in epoch seconds.
*/
@JsonProperty("created_at")
Long createdAt;
/**
* The name of the file.
*/
String filename;
/**
* Description of the file's purpose.
*/
String purpose;
/**
* The current status of the file, which can be either uploaded, processed, pending, error, deleting or deleted.
*/
String status;
/**
* Additional details about the status of the file.
* If the file is in the error state, this will include a message describing the error.
*/
@JsonProperty("status_details")
String statusDetails;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/fine_tuning/FineTuningEvent.java | package art.starrynift.fine_tuning;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
/**
* An object representing an event in the lifecycle of a fine-tuning job
*
* https://platform.openai.com/docs/api-reference/fine-tuning/list-events
*/
@Data
public class FineTuningEvent {
/**
* The type of object returned, should be "fine-tuneing.job.event".
*/
String object;
/**
* The ID of the fine-tuning event.
*/
String id;
/**
* The creation time in epoch seconds.
*/
@JsonProperty("created_at")
Long createdAt;
/**
* The log level of this message.
*/
String level;
/**
* The event message.
*/
String message;
/**
* The type of event, i.e. "message"
*/
String type;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/fine_tuning/FineTuningJob.java | package art.starrynift.fine_tuning;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
/**
* Fine-tuning job
* https://platform.openai.com/docs/api-reference/fine-tuning/object
*/
@Data
public class FineTuningJob {
/**
* The object identifier, which can be referenced in the API endpoints.
*/
String id;
/**
* The object type, which is always "fine_tuning.job".
*/
String object;
/**
* The unix timestamp for when the fine-tuning job was created.
*/
@JsonProperty("created_at")
Long createdAt;
/**
* The unix timestamp for when the fine-tuning job was finished.
*/
@JsonProperty("finished_at")
Long finishedAt;
/**
* The base model that is being fine-tuned.
*/
String model;
/**
* The name of the fine-tuned model that is being created.
* Can be null if no fine-tuned model is created yet.
*/
@JsonProperty("fine_tuned_model")
String fineTunedModel;
/**
* The organization that owns the fine-tuning job.
*/
@JsonProperty("organization_id")
String organizationId;
/**
* The current status of the fine-tuning job.
* Can be either created, pending, running, succeeded, failed, or cancelled.
*/
String status;
/**
* The hyperparameters used for the fine-tuning job.
* See the fine-tuning guide for more details.
*/
Hyperparameters hyperparameters;
/**
* The file ID used for training.
*/
@JsonProperty("training_file")
String trainingFile;
/**
* The file ID used for validation.
* Can be null if validation is not used.
*/
@JsonProperty("validation_file")
String validationFile;
/**
* The compiled results files for the fine-tuning job.
*/
@JsonProperty("result_files")
List<String> resultFiles;
/**
* The total number of billable tokens processed by this fine-tuning job.
*/
@JsonProperty("trained_tokens")
Integer trainedTokens;
} |
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/fine_tuning/FineTuningJobRequest.java | package art.starrynift.fine_tuning;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.*;
/**
* Request to create a fine tuning job
* https://platform.openai.com/docs/api-reference/fine-tuning/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class FineTuningJobRequest {
/**
* The ID of an uploaded file that contains training data.
*/
@NonNull
@JsonProperty("training_file")
String trainingFile;
/**
* The ID of an uploaded file that contains validation data.
* Optional.
*/
@JsonProperty("validation_file")
String validationFile;
/**
* The name of the model to fine-tune.
*/
@NonNull
String model;
/**
* The hyperparameters used for the fine-tuning job.
*/
Hyperparameters hyperparameters;
/**
* A string of up to 40 characters that will be added to your fine-tuned model name.
*/
String suffix;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/fine_tuning/Hyperparameters.java | package art.starrynift.fine_tuning;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Hyperparameters for a fine-tuning job
* https://platform.openai.com/docs/api-reference/fine-tuning/object#hyperparameters
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class Hyperparameters {
/**
* The number of epochs to train the model for.
* An epoch refers to one full cycle through the training dataset.
* "Auto" decides the optimal number of epochs based on the size of the dataset.
* If setting the number manually, we support any number between 1 and 50 epochs.
*/
@JsonProperty("n_epochs")
Integer nEpochs;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/image/CreateImageEditRequest.java | package art.starrynift.image;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.*;
/**
* A request for OpenAi to edit an image based on a prompt
* All fields except prompt are optional
*
* https://beta.openai.com/docs/api-reference/images/create-edit
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class CreateImageEditRequest {
/**
* A text description of the desired image(s). The maximum length in 1000 characters.
*/
@NonNull
String prompt;
/**
* The model to use for image generation. Only dall-e-2 is supported at this time. Defaults to dall-e-2.
*/
String model;
/**
* The number of images to generate. Must be between 1 and 10. Defaults to 1.
*/
Integer n;
/**
* The size of the generated images. Must be one of "256x256", "512x512", or "1024x1024". Defaults to "1024x1024".
*/
String size;
/**
* The format in which the generated images are returned. Must be one of url or b64_json. Defaults to url.
*/
@JsonProperty("response_format")
String responseFormat;
/**
* A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
*/
String user;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/image/CreateImageRequest.java | package art.starrynift.image;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.*;
/**
* A request for OpenAi to create an image based on a prompt
* All fields except prompt are optional
*
* https://beta.openai.com/docs/api-reference/images/create
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class CreateImageRequest {
/**
* A text description of the desired image(s). The maximum length is 1000 characters for dall-e-2 and 4000 characters for dall-e-3.
*/
@NonNull
String prompt;
/**
* The model to use for image generation. Defaults to "dall-e-2".
*/
String model;
/**
* The number of images to generate. Must be between 1 and 10. For dall-e-3, only n=1 is supported. Defaults to 1.
*/
Integer n;
/**
* The quality of the image that will be generated. "hd" creates images with finer details and greater consistency across the image. This param is only supported for dall-e-3. Defaults to "standard".
*/
String quality;
/**
* The size of the generated images. Must be one of 256x256, 512x512, or 1024x1024 for dall-e-2. Must be one of 1024x1024, 1792x1024, or 1024x1792 for dall-e-3 models. Defaults to 1024x1024.
*/
String size;
/**
* The format in which the generated images are returned. Must be one of url or b64_json. Defaults to url.
*/
@JsonProperty("response_format")
String responseFormat;
/**
* The style of the generated images. Must be one of vivid or natural. Vivid causes the model to lean towards generating hyper-real and dramatic images. Natural causes the model to produce more natural, less hyper-real looking images. This param is only supported for dall-e-3. Defaults to vivid.
*/
String style;
/**
* A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
*/
String user;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/image/CreateImageVariationRequest.java | package art.starrynift.image;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.*;
/**
* A request for OpenAi to create a variation of an image
* All fields are optional
*
* https://beta.openai.com/docs/api-reference/images/create-variation
*/
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Data
public class CreateImageVariationRequest {
/**
* The number of images to generate. Must be between 1 and 10. Defaults to 1.
*/
Integer n;
/**
* The model to use for image generation. Only dall-e-2 is supported at this time. Defaults to dall-e-2.
*/
String model;
/**
* The size of the generated images. Must be one of "256x256", "512x512", or "1024x1024". Defaults to "1024x1024".
*/
String size;
/**
* The format in which the generated images are returned. Must be one of url or b64_json. Defaults to url.
*/
@JsonProperty("response_format")
String responseFormat;
/**
* A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.
*/
String user;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/image/Image.java | package art.starrynift.image;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
/**
* An object containing either a URL or a base 64 encoded image.
*
* https://beta.openai.com/docs/api-reference/images
*/
@Data
public class Image {
/**
* The URL where the image can be accessed.
*/
String url;
/**
* Base64 encoded image string.
*/
@JsonProperty("b64_json")
String b64Json;
/**
* The prompt that was used to generate the image, if there was any revision to the prompt.
*/
@JsonProperty("revised_prompt")
String revisedPrompt;
}
|
0 | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift | java-sources/ai/starrynift/aisdk/starry-ai-sdk/1.0.3/art/starrynift/image/ImageResult.java | package art.starrynift.image;
import lombok.Data;
import java.util.List;
/**
* An object with a list of image results.
*
* https://beta.openai.com/docs/api-reference/images
*/
@Data
public class ImageResult {
/**
* The creation time in epoch seconds.
*/
Long created;
/**
* List of image results.
*/
List<Image> data;
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.