id
int32
0
165k
repo
stringlengths
7
58
path
stringlengths
12
218
func_name
stringlengths
3
140
original_string
stringlengths
73
34.1k
language
stringclasses
1 value
code
stringlengths
73
34.1k
code_tokens
list
docstring
stringlengths
3
16k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
105
339
25,900
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-09/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/KafkaSimpleStreamingExtractor.java
KafkaSimpleStreamingExtractor.readRecordEnvelopeImpl
@Override public RecordEnvelope<D> readRecordEnvelopeImpl() throws DataRecordException, IOException { if (!_isStarted.get()) { throw new IOException("Streaming extractor has not been started."); } while ((_records == null) || (!_records.hasNext())) { synchronized (_consumer) { if (_close.get()) { throw new ClosedChannelException(); } _records = _consumer.poll(this.fetchTimeOut).iterator(); } } ConsumerRecord<S, D> record = _records.next(); _rowCount.getAndIncrement(); return new RecordEnvelope<D>(record.value(), new KafkaWatermark(_partition, new LongWatermark(record.offset()))); }
java
@Override public RecordEnvelope<D> readRecordEnvelopeImpl() throws DataRecordException, IOException { if (!_isStarted.get()) { throw new IOException("Streaming extractor has not been started."); } while ((_records == null) || (!_records.hasNext())) { synchronized (_consumer) { if (_close.get()) { throw new ClosedChannelException(); } _records = _consumer.poll(this.fetchTimeOut).iterator(); } } ConsumerRecord<S, D> record = _records.next(); _rowCount.getAndIncrement(); return new RecordEnvelope<D>(record.value(), new KafkaWatermark(_partition, new LongWatermark(record.offset()))); }
[ "@", "Override", "public", "RecordEnvelope", "<", "D", ">", "readRecordEnvelopeImpl", "(", ")", "throws", "DataRecordException", ",", "IOException", "{", "if", "(", "!", "_isStarted", ".", "get", "(", ")", ")", "{", "throw", "new", "IOException", "(", "\"Streaming extractor has not been started.\"", ")", ";", "}", "while", "(", "(", "_records", "==", "null", ")", "||", "(", "!", "_records", ".", "hasNext", "(", ")", ")", ")", "{", "synchronized", "(", "_consumer", ")", "{", "if", "(", "_close", ".", "get", "(", ")", ")", "{", "throw", "new", "ClosedChannelException", "(", ")", ";", "}", "_records", "=", "_consumer", ".", "poll", "(", "this", ".", "fetchTimeOut", ")", ".", "iterator", "(", ")", ";", "}", "}", "ConsumerRecord", "<", "S", ",", "D", ">", "record", "=", "_records", ".", "next", "(", ")", ";", "_rowCount", ".", "getAndIncrement", "(", ")", ";", "return", "new", "RecordEnvelope", "<", "D", ">", "(", "record", ".", "value", "(", ")", ",", "new", "KafkaWatermark", "(", "_partition", ",", "new", "LongWatermark", "(", "record", ".", "offset", "(", ")", ")", ")", ")", ";", "}" ]
Return the next record when available. Will never time out since this is a streaming source.
[ "Return", "the", "next", "record", "when", "available", ".", "Will", "never", "time", "out", "since", "this", "is", "a", "streaming", "source", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-09/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/KafkaSimpleStreamingExtractor.java#L208-L225
25,901
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_catalog/FSPathAlterationListenerAdaptor.java
FSPathAlterationListenerAdaptor.onFileDelete
@Override public void onFileDelete(Path rawPath) { URI jobSpecUri = this.converter.computeURI(rawPath); // TODO: fix version listeners.onDeleteJob(jobSpecUri, null); }
java
@Override public void onFileDelete(Path rawPath) { URI jobSpecUri = this.converter.computeURI(rawPath); // TODO: fix version listeners.onDeleteJob(jobSpecUri, null); }
[ "@", "Override", "public", "void", "onFileDelete", "(", "Path", "rawPath", ")", "{", "URI", "jobSpecUri", "=", "this", ".", "converter", ".", "computeURI", "(", "rawPath", ")", ";", "// TODO: fix version", "listeners", ".", "onDeleteJob", "(", "jobSpecUri", ",", "null", ")", ";", "}" ]
For already deleted job configuration file, the only identifier is path it doesn't make sense to loadJobConfig Here. @param rawPath This could be the complete path to the newly-deleted configuration file.
[ "For", "already", "deleted", "job", "configuration", "file", "the", "only", "identifier", "is", "path", "it", "doesn", "t", "make", "sense", "to", "loadJobConfig", "Here", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_catalog/FSPathAlterationListenerAdaptor.java#L70-L75
25,902
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java
JobScheduler.scheduleJobImmediately
public Future<?> scheduleJobImmediately(Properties jobProps, JobListener jobListener, JobLauncher jobLauncher) { Callable<Void> callable = new Callable<Void>() { @Override public Void call() throws JobException { try { runJob(jobProps, jobListener, jobLauncher); } catch (JobException je) { LOG.error("Failed to run job " + jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY), je); throw je; } return null; } }; final Future<?> future = this.jobExecutor.submit(callable); return new Future() { @Override public boolean cancel(boolean mayInterruptIfRunning) { if (!cancelRequested) { return false; } boolean result = true; try { jobLauncher.cancelJob(jobListener); } catch (JobException e) { LOG.error("Failed to cancel job " + jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY), e); result = false; } if (mayInterruptIfRunning) { result &= future.cancel(true); } return result; } @Override public boolean isCancelled() { return future.isCancelled(); } @Override public boolean isDone() { return future.isDone(); } @Override public Object get() throws InterruptedException, ExecutionException { return future.get(); } @Override public Object get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return future.get(timeout, unit); } }; }
java
public Future<?> scheduleJobImmediately(Properties jobProps, JobListener jobListener, JobLauncher jobLauncher) { Callable<Void> callable = new Callable<Void>() { @Override public Void call() throws JobException { try { runJob(jobProps, jobListener, jobLauncher); } catch (JobException je) { LOG.error("Failed to run job " + jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY), je); throw je; } return null; } }; final Future<?> future = this.jobExecutor.submit(callable); return new Future() { @Override public boolean cancel(boolean mayInterruptIfRunning) { if (!cancelRequested) { return false; } boolean result = true; try { jobLauncher.cancelJob(jobListener); } catch (JobException e) { LOG.error("Failed to cancel job " + jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY), e); result = false; } if (mayInterruptIfRunning) { result &= future.cancel(true); } return result; } @Override public boolean isCancelled() { return future.isCancelled(); } @Override public boolean isDone() { return future.isDone(); } @Override public Object get() throws InterruptedException, ExecutionException { return future.get(); } @Override public Object get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return future.get(timeout, unit); } }; }
[ "public", "Future", "<", "?", ">", "scheduleJobImmediately", "(", "Properties", "jobProps", ",", "JobListener", "jobListener", ",", "JobLauncher", "jobLauncher", ")", "{", "Callable", "<", "Void", ">", "callable", "=", "new", "Callable", "<", "Void", ">", "(", ")", "{", "@", "Override", "public", "Void", "call", "(", ")", "throws", "JobException", "{", "try", "{", "runJob", "(", "jobProps", ",", "jobListener", ",", "jobLauncher", ")", ";", "}", "catch", "(", "JobException", "je", ")", "{", "LOG", ".", "error", "(", "\"Failed to run job \"", "+", "jobProps", ".", "getProperty", "(", "ConfigurationKeys", ".", "JOB_NAME_KEY", ")", ",", "je", ")", ";", "throw", "je", ";", "}", "return", "null", ";", "}", "}", ";", "final", "Future", "<", "?", ">", "future", "=", "this", ".", "jobExecutor", ".", "submit", "(", "callable", ")", ";", "return", "new", "Future", "(", ")", "{", "@", "Override", "public", "boolean", "cancel", "(", "boolean", "mayInterruptIfRunning", ")", "{", "if", "(", "!", "cancelRequested", ")", "{", "return", "false", ";", "}", "boolean", "result", "=", "true", ";", "try", "{", "jobLauncher", ".", "cancelJob", "(", "jobListener", ")", ";", "}", "catch", "(", "JobException", "e", ")", "{", "LOG", ".", "error", "(", "\"Failed to cancel job \"", "+", "jobProps", ".", "getProperty", "(", "ConfigurationKeys", ".", "JOB_NAME_KEY", ")", ",", "e", ")", ";", "result", "=", "false", ";", "}", "if", "(", "mayInterruptIfRunning", ")", "{", "result", "&=", "future", ".", "cancel", "(", "true", ")", ";", "}", "return", "result", ";", "}", "@", "Override", "public", "boolean", "isCancelled", "(", ")", "{", "return", "future", ".", "isCancelled", "(", ")", ";", "}", "@", "Override", "public", "boolean", "isDone", "(", ")", "{", "return", "future", ".", "isDone", "(", ")", ";", "}", "@", "Override", "public", "Object", "get", "(", ")", "throws", "InterruptedException", ",", "ExecutionException", "{", "return", "future", ".", "get", "(", ")", ";", "}", "@", "Override", "public", "Object", "get", "(", "long", "timeout", ",", "TimeUnit", "unit", ")", "throws", "InterruptedException", ",", "ExecutionException", ",", "TimeoutException", "{", "return", "future", ".", "get", "(", "timeout", ",", "unit", ")", ";", "}", "}", ";", "}" ]
Schedule a job immediately. <p> This method calls the Quartz scheduler to scheduler the job. </p> @param jobProps Job configuration properties @param jobListener {@link JobListener} used for callback, can be <em>null</em> if no callback is needed. @throws JobException when there is anything wrong with scheduling the job
[ "Schedule", "a", "job", "immediately", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java#L258-L312
25,903
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java
JobScheduler.unscheduleJob
public void unscheduleJob(String jobName) throws JobException { if (this.scheduledJobs.containsKey(jobName)) { try { this.scheduler.getScheduler().deleteJob(this.scheduledJobs.remove(jobName)); } catch (SchedulerException se) { LOG.error("Failed to unschedule and delete job " + jobName, se); throw new JobException("Failed to unschedule and delete job " + jobName, se); } } }
java
public void unscheduleJob(String jobName) throws JobException { if (this.scheduledJobs.containsKey(jobName)) { try { this.scheduler.getScheduler().deleteJob(this.scheduledJobs.remove(jobName)); } catch (SchedulerException se) { LOG.error("Failed to unschedule and delete job " + jobName, se); throw new JobException("Failed to unschedule and delete job " + jobName, se); } } }
[ "public", "void", "unscheduleJob", "(", "String", "jobName", ")", "throws", "JobException", "{", "if", "(", "this", ".", "scheduledJobs", ".", "containsKey", "(", "jobName", ")", ")", "{", "try", "{", "this", ".", "scheduler", ".", "getScheduler", "(", ")", ".", "deleteJob", "(", "this", ".", "scheduledJobs", ".", "remove", "(", "jobName", ")", ")", ";", "}", "catch", "(", "SchedulerException", "se", ")", "{", "LOG", ".", "error", "(", "\"Failed to unschedule and delete job \"", "+", "jobName", ",", "se", ")", ";", "throw", "new", "JobException", "(", "\"Failed to unschedule and delete job \"", "+", "jobName", ",", "se", ")", ";", "}", "}", "}" ]
Unschedule and delete a job. @param jobName Job name @throws JobException when there is anything wrong unschedule the job
[ "Unschedule", "and", "delete", "a", "job", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java#L408-L418
25,904
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java
JobScheduler.scheduleGeneralConfiguredJobs
private void scheduleGeneralConfiguredJobs() throws ConfigurationException, JobException, IOException { LOG.info("Scheduling configured jobs"); for (Properties jobProps : loadGeneralJobConfigs()) { if (!jobProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) { // A job without a cron schedule is considered a one-time job jobProps.setProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "true"); } boolean runOnce = Boolean.valueOf(jobProps.getProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "false")); scheduleJob(jobProps, runOnce ? new RunOnceJobListener() : new EmailNotificationJobListener()); this.listener.addToJobNameMap(jobProps); } }
java
private void scheduleGeneralConfiguredJobs() throws ConfigurationException, JobException, IOException { LOG.info("Scheduling configured jobs"); for (Properties jobProps : loadGeneralJobConfigs()) { if (!jobProps.containsKey(ConfigurationKeys.JOB_SCHEDULE_KEY)) { // A job without a cron schedule is considered a one-time job jobProps.setProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "true"); } boolean runOnce = Boolean.valueOf(jobProps.getProperty(ConfigurationKeys.JOB_RUN_ONCE_KEY, "false")); scheduleJob(jobProps, runOnce ? new RunOnceJobListener() : new EmailNotificationJobListener()); this.listener.addToJobNameMap(jobProps); } }
[ "private", "void", "scheduleGeneralConfiguredJobs", "(", ")", "throws", "ConfigurationException", ",", "JobException", ",", "IOException", "{", "LOG", ".", "info", "(", "\"Scheduling configured jobs\"", ")", ";", "for", "(", "Properties", "jobProps", ":", "loadGeneralJobConfigs", "(", ")", ")", "{", "if", "(", "!", "jobProps", ".", "containsKey", "(", "ConfigurationKeys", ".", "JOB_SCHEDULE_KEY", ")", ")", "{", "// A job without a cron schedule is considered a one-time job", "jobProps", ".", "setProperty", "(", "ConfigurationKeys", ".", "JOB_RUN_ONCE_KEY", ",", "\"true\"", ")", ";", "}", "boolean", "runOnce", "=", "Boolean", ".", "valueOf", "(", "jobProps", ".", "getProperty", "(", "ConfigurationKeys", ".", "JOB_RUN_ONCE_KEY", ",", "\"false\"", ")", ")", ";", "scheduleJob", "(", "jobProps", ",", "runOnce", "?", "new", "RunOnceJobListener", "(", ")", ":", "new", "EmailNotificationJobListener", "(", ")", ")", ";", "this", ".", "listener", ".", "addToJobNameMap", "(", "jobProps", ")", ";", "}", "}" ]
Schedule Gobblin jobs in general position
[ "Schedule", "Gobblin", "jobs", "in", "general", "position" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java#L505-L519
25,905
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java
JobScheduler.startGeneralJobConfigFileMonitor
private void startGeneralJobConfigFileMonitor() throws Exception { SchedulerUtils.addPathAlterationObserver(this.pathAlterationDetector, this.listener, jobConfigFileDirPath); this.pathAlterationDetector.start(); this.closer.register(new Closeable() { @Override public void close() throws IOException { try { pathAlterationDetector.stop(1000); } catch (InterruptedException e) { throw new IOException(e); } } }); }
java
private void startGeneralJobConfigFileMonitor() throws Exception { SchedulerUtils.addPathAlterationObserver(this.pathAlterationDetector, this.listener, jobConfigFileDirPath); this.pathAlterationDetector.start(); this.closer.register(new Closeable() { @Override public void close() throws IOException { try { pathAlterationDetector.stop(1000); } catch (InterruptedException e) { throw new IOException(e); } } }); }
[ "private", "void", "startGeneralJobConfigFileMonitor", "(", ")", "throws", "Exception", "{", "SchedulerUtils", ".", "addPathAlterationObserver", "(", "this", ".", "pathAlterationDetector", ",", "this", ".", "listener", ",", "jobConfigFileDirPath", ")", ";", "this", ".", "pathAlterationDetector", ".", "start", "(", ")", ";", "this", ".", "closer", ".", "register", "(", "new", "Closeable", "(", ")", "{", "@", "Override", "public", "void", "close", "(", ")", "throws", "IOException", "{", "try", "{", "pathAlterationDetector", ".", "stop", "(", "1000", ")", ";", "}", "catch", "(", "InterruptedException", "e", ")", "{", "throw", "new", "IOException", "(", "e", ")", ";", "}", "}", "}", ")", ";", "}" ]
Start the job configuration file monitor using generic file system API. <p> The job configuration file monitor currently only supports monitoring the following types of changes: <ul> <li>New job configuration files.</li> <li>Changes to existing job configuration files.</li> <li>Changes to existing common properties file with a .properties extension.</li> <li>Deletion to existing job configuration files.</li> <li>Deletion to existing common properties file with a .properties extension.</li> </ul> </p> <p> This monitor has one limitation: in case more than one file including at least one common properties file are changed between two adjacent checks, the reloading of affected job configuration files may be intermixed and applied in an order that is not desirable. This is because the order the listener is called on the changes is not controlled by Gobblin, but instead by the monitor itself. </p>
[ "Start", "the", "job", "configuration", "file", "monitor", "using", "generic", "file", "system", "API", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/scheduler/JobScheduler.java#L553-L567
25,906
apache/incubator-gobblin
gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/hive/HiveDatasetFinder.java
HiveDatasetFinder.getTables
public Collection<DbAndTable> getTables() throws IOException { List<DbAndTable> tables = Lists.newArrayList(); try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { Iterable<String> databases = Iterables.filter(client.get().getAllDatabases(), new Predicate<String>() { @Override public boolean apply(String db) { return HiveDatasetFinder.this.whitelistBlacklist.acceptDb(db); } }); for (final String db : databases) { Iterable<String> tableNames = Iterables.filter(client.get().getAllTables(db), new Predicate<String>() { @Override public boolean apply(String table) { return HiveDatasetFinder.this.whitelistBlacklist.acceptTable(db, table); } }); for (String tableName : tableNames) { tables.add(new DbAndTable(db, tableName)); } } } catch (Exception exc) { throw new IOException(exc); } return tables; }
java
public Collection<DbAndTable> getTables() throws IOException { List<DbAndTable> tables = Lists.newArrayList(); try (AutoReturnableObject<IMetaStoreClient> client = this.clientPool.getClient()) { Iterable<String> databases = Iterables.filter(client.get().getAllDatabases(), new Predicate<String>() { @Override public boolean apply(String db) { return HiveDatasetFinder.this.whitelistBlacklist.acceptDb(db); } }); for (final String db : databases) { Iterable<String> tableNames = Iterables.filter(client.get().getAllTables(db), new Predicate<String>() { @Override public boolean apply(String table) { return HiveDatasetFinder.this.whitelistBlacklist.acceptTable(db, table); } }); for (String tableName : tableNames) { tables.add(new DbAndTable(db, tableName)); } } } catch (Exception exc) { throw new IOException(exc); } return tables; }
[ "public", "Collection", "<", "DbAndTable", ">", "getTables", "(", ")", "throws", "IOException", "{", "List", "<", "DbAndTable", ">", "tables", "=", "Lists", ".", "newArrayList", "(", ")", ";", "try", "(", "AutoReturnableObject", "<", "IMetaStoreClient", ">", "client", "=", "this", ".", "clientPool", ".", "getClient", "(", ")", ")", "{", "Iterable", "<", "String", ">", "databases", "=", "Iterables", ".", "filter", "(", "client", ".", "get", "(", ")", ".", "getAllDatabases", "(", ")", ",", "new", "Predicate", "<", "String", ">", "(", ")", "{", "@", "Override", "public", "boolean", "apply", "(", "String", "db", ")", "{", "return", "HiveDatasetFinder", ".", "this", ".", "whitelistBlacklist", ".", "acceptDb", "(", "db", ")", ";", "}", "}", ")", ";", "for", "(", "final", "String", "db", ":", "databases", ")", "{", "Iterable", "<", "String", ">", "tableNames", "=", "Iterables", ".", "filter", "(", "client", ".", "get", "(", ")", ".", "getAllTables", "(", "db", ")", ",", "new", "Predicate", "<", "String", ">", "(", ")", "{", "@", "Override", "public", "boolean", "apply", "(", "String", "table", ")", "{", "return", "HiveDatasetFinder", ".", "this", ".", "whitelistBlacklist", ".", "acceptTable", "(", "db", ",", "table", ")", ";", "}", "}", ")", ";", "for", "(", "String", "tableName", ":", "tableNames", ")", "{", "tables", ".", "add", "(", "new", "DbAndTable", "(", "db", ",", "tableName", ")", ")", ";", "}", "}", "}", "catch", "(", "Exception", "exc", ")", "{", "throw", "new", "IOException", "(", "exc", ")", ";", "}", "return", "tables", ";", "}" ]
Get all tables in db with given table pattern.
[ "Get", "all", "tables", "in", "db", "with", "given", "table", "pattern", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/hive/HiveDatasetFinder.java#L194-L221
25,907
apache/incubator-gobblin
gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/replication/ConfigBasedDatasetsFinder.java
ConfigBasedDatasetsFinder.getValidDatasetURIsHelper
protected static Set<URI> getValidDatasetURIsHelper(Collection<URI> allDatasetURIs, Set<URI> disabledURISet, Path datasetCommonRoot){ if (allDatasetURIs == null || allDatasetURIs.isEmpty()) { return ImmutableSet.of(); } Comparator<URI> pathLengthComparator = new Comparator<URI>() { public int compare(URI c1, URI c2) { return c1.getPath().length() - c2.getPath().length(); } }; List<URI> sortedDatasetsList = new ArrayList<URI>(allDatasetURIs); // sort the URI based on the path length to make sure the parent path appear before children Collections.sort(sortedDatasetsList, pathLengthComparator); TreeSet<URI> uriSet = new TreeSet<URI>(); Set<URI> noneLeaf = new HashSet<URI>(); for (URI u : sortedDatasetsList) { // filter out none common root if (PathUtils.isAncestor(datasetCommonRoot, new Path(u.getPath()))) { URI floor = uriSet.floor(u); // check for ancestor Paths if (floor != null && PathUtils.isAncestor(new Path(floor.getPath()), new Path(u.getPath()))) { noneLeaf.add(floor); } uriSet.add(u); } } // only get the leaf nodes Set<URI> validURISet = new HashSet<URI>(); for (URI u : uriSet) { if (!noneLeaf.contains(u)) { validURISet.add(u); } } // remove disabled URIs for (URI disable : disabledURISet) { if (validURISet.remove(disable)) { log.info("skip disabled dataset " + disable); } else { log.info("There's no URI " + disable + " available in validURISet."); } } return validURISet; }
java
protected static Set<URI> getValidDatasetURIsHelper(Collection<URI> allDatasetURIs, Set<URI> disabledURISet, Path datasetCommonRoot){ if (allDatasetURIs == null || allDatasetURIs.isEmpty()) { return ImmutableSet.of(); } Comparator<URI> pathLengthComparator = new Comparator<URI>() { public int compare(URI c1, URI c2) { return c1.getPath().length() - c2.getPath().length(); } }; List<URI> sortedDatasetsList = new ArrayList<URI>(allDatasetURIs); // sort the URI based on the path length to make sure the parent path appear before children Collections.sort(sortedDatasetsList, pathLengthComparator); TreeSet<URI> uriSet = new TreeSet<URI>(); Set<URI> noneLeaf = new HashSet<URI>(); for (URI u : sortedDatasetsList) { // filter out none common root if (PathUtils.isAncestor(datasetCommonRoot, new Path(u.getPath()))) { URI floor = uriSet.floor(u); // check for ancestor Paths if (floor != null && PathUtils.isAncestor(new Path(floor.getPath()), new Path(u.getPath()))) { noneLeaf.add(floor); } uriSet.add(u); } } // only get the leaf nodes Set<URI> validURISet = new HashSet<URI>(); for (URI u : uriSet) { if (!noneLeaf.contains(u)) { validURISet.add(u); } } // remove disabled URIs for (URI disable : disabledURISet) { if (validURISet.remove(disable)) { log.info("skip disabled dataset " + disable); } else { log.info("There's no URI " + disable + " available in validURISet."); } } return validURISet; }
[ "protected", "static", "Set", "<", "URI", ">", "getValidDatasetURIsHelper", "(", "Collection", "<", "URI", ">", "allDatasetURIs", ",", "Set", "<", "URI", ">", "disabledURISet", ",", "Path", "datasetCommonRoot", ")", "{", "if", "(", "allDatasetURIs", "==", "null", "||", "allDatasetURIs", ".", "isEmpty", "(", ")", ")", "{", "return", "ImmutableSet", ".", "of", "(", ")", ";", "}", "Comparator", "<", "URI", ">", "pathLengthComparator", "=", "new", "Comparator", "<", "URI", ">", "(", ")", "{", "public", "int", "compare", "(", "URI", "c1", ",", "URI", "c2", ")", "{", "return", "c1", ".", "getPath", "(", ")", ".", "length", "(", ")", "-", "c2", ".", "getPath", "(", ")", ".", "length", "(", ")", ";", "}", "}", ";", "List", "<", "URI", ">", "sortedDatasetsList", "=", "new", "ArrayList", "<", "URI", ">", "(", "allDatasetURIs", ")", ";", "// sort the URI based on the path length to make sure the parent path appear before children", "Collections", ".", "sort", "(", "sortedDatasetsList", ",", "pathLengthComparator", ")", ";", "TreeSet", "<", "URI", ">", "uriSet", "=", "new", "TreeSet", "<", "URI", ">", "(", ")", ";", "Set", "<", "URI", ">", "noneLeaf", "=", "new", "HashSet", "<", "URI", ">", "(", ")", ";", "for", "(", "URI", "u", ":", "sortedDatasetsList", ")", "{", "// filter out none common root", "if", "(", "PathUtils", ".", "isAncestor", "(", "datasetCommonRoot", ",", "new", "Path", "(", "u", ".", "getPath", "(", ")", ")", ")", ")", "{", "URI", "floor", "=", "uriSet", ".", "floor", "(", "u", ")", ";", "// check for ancestor Paths", "if", "(", "floor", "!=", "null", "&&", "PathUtils", ".", "isAncestor", "(", "new", "Path", "(", "floor", ".", "getPath", "(", ")", ")", ",", "new", "Path", "(", "u", ".", "getPath", "(", ")", ")", ")", ")", "{", "noneLeaf", ".", "add", "(", "floor", ")", ";", "}", "uriSet", ".", "add", "(", "u", ")", ";", "}", "}", "// only get the leaf nodes", "Set", "<", "URI", ">", "validURISet", "=", "new", "HashSet", "<", "URI", ">", "(", ")", ";", "for", "(", "URI", "u", ":", "uriSet", ")", "{", "if", "(", "!", "noneLeaf", ".", "contains", "(", "u", ")", ")", "{", "validURISet", ".", "add", "(", "u", ")", ";", "}", "}", "// remove disabled URIs", "for", "(", "URI", "disable", ":", "disabledURISet", ")", "{", "if", "(", "validURISet", ".", "remove", "(", "disable", ")", ")", "{", "log", ".", "info", "(", "\"skip disabled dataset \"", "+", "disable", ")", ";", "}", "else", "{", "log", ".", "info", "(", "\"There's no URI \"", "+", "disable", "+", "\" available in validURISet.\"", ")", ";", "}", "}", "return", "validURISet", ";", "}" ]
Extended signature for testing convenience.
[ "Extended", "signature", "for", "testing", "convenience", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/replication/ConfigBasedDatasetsFinder.java#L187-L235
25,908
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/recordcount/CompactionRecordCountProvider.java
CompactionRecordCountProvider.getRecordCount
@Override public long getRecordCount(Path filepath) { String filename = filepath.getName(); Preconditions.checkArgument(filename.startsWith(M_OUTPUT_FILE_PREFIX) || filename.startsWith(MR_OUTPUT_FILE_PREFIX), String.format("%s is not a supported filename, which should start with %s, or %s.", filename, M_OUTPUT_FILE_PREFIX, MR_OUTPUT_FILE_PREFIX)); String prefixWithCounts = filename.split(Pattern.quote(SEPARATOR))[0]; if (filename.startsWith(M_OUTPUT_FILE_PREFIX)) { return Long.parseLong(prefixWithCounts.substring(M_OUTPUT_FILE_PREFIX.length())); } return Long.parseLong(prefixWithCounts.substring(MR_OUTPUT_FILE_PREFIX.length())); }
java
@Override public long getRecordCount(Path filepath) { String filename = filepath.getName(); Preconditions.checkArgument(filename.startsWith(M_OUTPUT_FILE_PREFIX) || filename.startsWith(MR_OUTPUT_FILE_PREFIX), String.format("%s is not a supported filename, which should start with %s, or %s.", filename, M_OUTPUT_FILE_PREFIX, MR_OUTPUT_FILE_PREFIX)); String prefixWithCounts = filename.split(Pattern.quote(SEPARATOR))[0]; if (filename.startsWith(M_OUTPUT_FILE_PREFIX)) { return Long.parseLong(prefixWithCounts.substring(M_OUTPUT_FILE_PREFIX.length())); } return Long.parseLong(prefixWithCounts.substring(MR_OUTPUT_FILE_PREFIX.length())); }
[ "@", "Override", "public", "long", "getRecordCount", "(", "Path", "filepath", ")", "{", "String", "filename", "=", "filepath", ".", "getName", "(", ")", ";", "Preconditions", ".", "checkArgument", "(", "filename", ".", "startsWith", "(", "M_OUTPUT_FILE_PREFIX", ")", "||", "filename", ".", "startsWith", "(", "MR_OUTPUT_FILE_PREFIX", ")", ",", "String", ".", "format", "(", "\"%s is not a supported filename, which should start with %s, or %s.\"", ",", "filename", ",", "M_OUTPUT_FILE_PREFIX", ",", "MR_OUTPUT_FILE_PREFIX", ")", ")", ";", "String", "prefixWithCounts", "=", "filename", ".", "split", "(", "Pattern", ".", "quote", "(", "SEPARATOR", ")", ")", "[", "0", "]", ";", "if", "(", "filename", ".", "startsWith", "(", "M_OUTPUT_FILE_PREFIX", ")", ")", "{", "return", "Long", ".", "parseLong", "(", "prefixWithCounts", ".", "substring", "(", "M_OUTPUT_FILE_PREFIX", ".", "length", "(", ")", ")", ")", ";", "}", "return", "Long", ".", "parseLong", "(", "prefixWithCounts", ".", "substring", "(", "MR_OUTPUT_FILE_PREFIX", ".", "length", "(", ")", ")", ")", ";", "}" ]
Get the record count through filename.
[ "Get", "the", "record", "count", "through", "filename", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/recordcount/CompactionRecordCountProvider.java#L76-L87
25,909
apache/incubator-gobblin
gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java
MultiHopsFlowToJobSpecCompiler.findPath
private void findPath(Map<Spec, SpecExecutor> specExecutorInstanceMap, Spec spec) { inMemoryWeightGraphGenerator(); FlowSpec flowSpec = (FlowSpec) spec; if (optionalUserSpecifiedPath.isPresent()) { log.info("Starting to evaluate user's specified path ... "); if (userSpecifiedPathVerificator(specExecutorInstanceMap, flowSpec)) { log.info("User specified path[ " + optionalUserSpecifiedPath.get() + "] successfully verified."); return; } else { log.error("Will not execute user specified path[ " + optionalUserSpecifiedPath.get() + "]"); log.info("Start to execute FlowCompiler's algorithm for valid data movement path"); } } ServiceNode sourceNode = new BaseServiceNodeImpl(flowSpec.getConfig().getString(ServiceConfigKeys.FLOW_SOURCE_IDENTIFIER_KEY)); ServiceNode targetNode = new BaseServiceNodeImpl(flowSpec.getConfig().getString(ServiceConfigKeys.FLOW_DESTINATION_IDENTIFIER_KEY)); List<FlowEdge> resultEdgePath = dijkstraBasedPathFindingHelper(sourceNode, targetNode, this.weightedGraph); for (int i = 0; i < resultEdgePath.size() ; i++) { FlowEdge tmpFlowEdge = resultEdgePath.get(i); ServiceNode edgeSrcNode = ((LoadBasedFlowEdgeImpl) tmpFlowEdge).getSourceNode(); ServiceNode edgeTgtNode = ((LoadBasedFlowEdgeImpl) tmpFlowEdge).getTargetNode(); specExecutorInstanceMap.put(convertHopToJobSpec(edgeSrcNode, edgeTgtNode, flowSpec), ((LoadBasedFlowEdgeImpl) (resultEdgePath.get(i))).getSpecExecutorInstance()); } }
java
private void findPath(Map<Spec, SpecExecutor> specExecutorInstanceMap, Spec spec) { inMemoryWeightGraphGenerator(); FlowSpec flowSpec = (FlowSpec) spec; if (optionalUserSpecifiedPath.isPresent()) { log.info("Starting to evaluate user's specified path ... "); if (userSpecifiedPathVerificator(specExecutorInstanceMap, flowSpec)) { log.info("User specified path[ " + optionalUserSpecifiedPath.get() + "] successfully verified."); return; } else { log.error("Will not execute user specified path[ " + optionalUserSpecifiedPath.get() + "]"); log.info("Start to execute FlowCompiler's algorithm for valid data movement path"); } } ServiceNode sourceNode = new BaseServiceNodeImpl(flowSpec.getConfig().getString(ServiceConfigKeys.FLOW_SOURCE_IDENTIFIER_KEY)); ServiceNode targetNode = new BaseServiceNodeImpl(flowSpec.getConfig().getString(ServiceConfigKeys.FLOW_DESTINATION_IDENTIFIER_KEY)); List<FlowEdge> resultEdgePath = dijkstraBasedPathFindingHelper(sourceNode, targetNode, this.weightedGraph); for (int i = 0; i < resultEdgePath.size() ; i++) { FlowEdge tmpFlowEdge = resultEdgePath.get(i); ServiceNode edgeSrcNode = ((LoadBasedFlowEdgeImpl) tmpFlowEdge).getSourceNode(); ServiceNode edgeTgtNode = ((LoadBasedFlowEdgeImpl) tmpFlowEdge).getTargetNode(); specExecutorInstanceMap.put(convertHopToJobSpec(edgeSrcNode, edgeTgtNode, flowSpec), ((LoadBasedFlowEdgeImpl) (resultEdgePath.get(i))).getSpecExecutorInstance()); } }
[ "private", "void", "findPath", "(", "Map", "<", "Spec", ",", "SpecExecutor", ">", "specExecutorInstanceMap", ",", "Spec", "spec", ")", "{", "inMemoryWeightGraphGenerator", "(", ")", ";", "FlowSpec", "flowSpec", "=", "(", "FlowSpec", ")", "spec", ";", "if", "(", "optionalUserSpecifiedPath", ".", "isPresent", "(", ")", ")", "{", "log", ".", "info", "(", "\"Starting to evaluate user's specified path ... \"", ")", ";", "if", "(", "userSpecifiedPathVerificator", "(", "specExecutorInstanceMap", ",", "flowSpec", ")", ")", "{", "log", ".", "info", "(", "\"User specified path[ \"", "+", "optionalUserSpecifiedPath", ".", "get", "(", ")", "+", "\"] successfully verified.\"", ")", ";", "return", ";", "}", "else", "{", "log", ".", "error", "(", "\"Will not execute user specified path[ \"", "+", "optionalUserSpecifiedPath", ".", "get", "(", ")", "+", "\"]\"", ")", ";", "log", ".", "info", "(", "\"Start to execute FlowCompiler's algorithm for valid data movement path\"", ")", ";", "}", "}", "ServiceNode", "sourceNode", "=", "new", "BaseServiceNodeImpl", "(", "flowSpec", ".", "getConfig", "(", ")", ".", "getString", "(", "ServiceConfigKeys", ".", "FLOW_SOURCE_IDENTIFIER_KEY", ")", ")", ";", "ServiceNode", "targetNode", "=", "new", "BaseServiceNodeImpl", "(", "flowSpec", ".", "getConfig", "(", ")", ".", "getString", "(", "ServiceConfigKeys", ".", "FLOW_DESTINATION_IDENTIFIER_KEY", ")", ")", ";", "List", "<", "FlowEdge", ">", "resultEdgePath", "=", "dijkstraBasedPathFindingHelper", "(", "sourceNode", ",", "targetNode", ",", "this", ".", "weightedGraph", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "resultEdgePath", ".", "size", "(", ")", ";", "i", "++", ")", "{", "FlowEdge", "tmpFlowEdge", "=", "resultEdgePath", ".", "get", "(", "i", ")", ";", "ServiceNode", "edgeSrcNode", "=", "(", "(", "LoadBasedFlowEdgeImpl", ")", "tmpFlowEdge", ")", ".", "getSourceNode", "(", ")", ";", "ServiceNode", "edgeTgtNode", "=", "(", "(", "LoadBasedFlowEdgeImpl", ")", "tmpFlowEdge", ")", ".", "getTargetNode", "(", ")", ";", "specExecutorInstanceMap", ".", "put", "(", "convertHopToJobSpec", "(", "edgeSrcNode", ",", "edgeTgtNode", ",", "flowSpec", ")", ",", "(", "(", "LoadBasedFlowEdgeImpl", ")", "(", "resultEdgePath", ".", "get", "(", "i", ")", ")", ")", ".", "getSpecExecutorInstance", "(", ")", ")", ";", "}", "}" ]
that a topologySpec not being reflected in findPath.
[ "that", "a", "topologySpec", "not", "being", "reflected", "in", "findPath", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java#L179-L207
25,910
apache/incubator-gobblin
gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java
MultiHopsFlowToJobSpecCompiler.populateEdgeTemplateMap
@Override protected void populateEdgeTemplateMap() { if (templateCatalog.isPresent()) { for (FlowEdge flowEdge : this.weightedGraph.edgeSet()) { edgeTemplateMap.put(flowEdge.getEdgeIdentity(), templateCatalog.get(). getAllTemplates(). stream().map(jobTemplate -> jobTemplate.getUri()).collect(Collectors.toList())); } } }
java
@Override protected void populateEdgeTemplateMap() { if (templateCatalog.isPresent()) { for (FlowEdge flowEdge : this.weightedGraph.edgeSet()) { edgeTemplateMap.put(flowEdge.getEdgeIdentity(), templateCatalog.get(). getAllTemplates(). stream().map(jobTemplate -> jobTemplate.getUri()).collect(Collectors.toList())); } } }
[ "@", "Override", "protected", "void", "populateEdgeTemplateMap", "(", ")", "{", "if", "(", "templateCatalog", ".", "isPresent", "(", ")", ")", "{", "for", "(", "FlowEdge", "flowEdge", ":", "this", ".", "weightedGraph", ".", "edgeSet", "(", ")", ")", "{", "edgeTemplateMap", ".", "put", "(", "flowEdge", ".", "getEdgeIdentity", "(", ")", ",", "templateCatalog", ".", "get", "(", ")", ".", "getAllTemplates", "(", ")", ".", "stream", "(", ")", ".", "map", "(", "jobTemplate", "->", "jobTemplate", ".", "getUri", "(", ")", ")", ".", "collect", "(", "Collectors", ".", "toList", "(", ")", ")", ")", ";", "}", "}", "}" ]
As the base implementation here, all templates will be considered for each edge.
[ "As", "the", "base", "implementation", "here", "all", "templates", "will", "be", "considered", "for", "each", "edge", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java#L212-L221
25,911
apache/incubator-gobblin
gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java
MultiHopsFlowToJobSpecCompiler.userSpecifiedPathVerificator
private boolean userSpecifiedPathVerificator(Map<Spec, SpecExecutor> specExecutorInstanceMap, FlowSpec flowSpec) { Map<Spec, SpecExecutor> tmpSpecExecutorInstanceMap = new HashMap<>(); List<String> userSpecfiedPath = Arrays.asList(optionalUserSpecifiedPath.get().split(",")); for (int i = 0; i < userSpecfiedPath.size() - 1; i++) { ServiceNode sourceNode = new BaseServiceNodeImpl(userSpecfiedPath.get(i)); ServiceNode targetNode = new BaseServiceNodeImpl(userSpecfiedPath.get(i + 1)); if (weightedGraph.containsVertex(sourceNode) && weightedGraph.containsVertex(targetNode) && weightedGraph.containsEdge(sourceNode, targetNode)) { tmpSpecExecutorInstanceMap.put(convertHopToJobSpec(sourceNode, targetNode, flowSpec), (((LoadBasedFlowEdgeImpl) weightedGraph.getEdge(sourceNode, targetNode)).getSpecExecutorInstance())); } else { log.error("User Specified Path is invalid"); return false; } } specExecutorInstanceMap.putAll(tmpSpecExecutorInstanceMap); return true; }
java
private boolean userSpecifiedPathVerificator(Map<Spec, SpecExecutor> specExecutorInstanceMap, FlowSpec flowSpec) { Map<Spec, SpecExecutor> tmpSpecExecutorInstanceMap = new HashMap<>(); List<String> userSpecfiedPath = Arrays.asList(optionalUserSpecifiedPath.get().split(",")); for (int i = 0; i < userSpecfiedPath.size() - 1; i++) { ServiceNode sourceNode = new BaseServiceNodeImpl(userSpecfiedPath.get(i)); ServiceNode targetNode = new BaseServiceNodeImpl(userSpecfiedPath.get(i + 1)); if (weightedGraph.containsVertex(sourceNode) && weightedGraph.containsVertex(targetNode) && weightedGraph.containsEdge(sourceNode, targetNode)) { tmpSpecExecutorInstanceMap.put(convertHopToJobSpec(sourceNode, targetNode, flowSpec), (((LoadBasedFlowEdgeImpl) weightedGraph.getEdge(sourceNode, targetNode)).getSpecExecutorInstance())); } else { log.error("User Specified Path is invalid"); return false; } } specExecutorInstanceMap.putAll(tmpSpecExecutorInstanceMap); return true; }
[ "private", "boolean", "userSpecifiedPathVerificator", "(", "Map", "<", "Spec", ",", "SpecExecutor", ">", "specExecutorInstanceMap", ",", "FlowSpec", "flowSpec", ")", "{", "Map", "<", "Spec", ",", "SpecExecutor", ">", "tmpSpecExecutorInstanceMap", "=", "new", "HashMap", "<>", "(", ")", ";", "List", "<", "String", ">", "userSpecfiedPath", "=", "Arrays", ".", "asList", "(", "optionalUserSpecifiedPath", ".", "get", "(", ")", ".", "split", "(", "\",\"", ")", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "userSpecfiedPath", ".", "size", "(", ")", "-", "1", ";", "i", "++", ")", "{", "ServiceNode", "sourceNode", "=", "new", "BaseServiceNodeImpl", "(", "userSpecfiedPath", ".", "get", "(", "i", ")", ")", ";", "ServiceNode", "targetNode", "=", "new", "BaseServiceNodeImpl", "(", "userSpecfiedPath", ".", "get", "(", "i", "+", "1", ")", ")", ";", "if", "(", "weightedGraph", ".", "containsVertex", "(", "sourceNode", ")", "&&", "weightedGraph", ".", "containsVertex", "(", "targetNode", ")", "&&", "weightedGraph", ".", "containsEdge", "(", "sourceNode", ",", "targetNode", ")", ")", "{", "tmpSpecExecutorInstanceMap", ".", "put", "(", "convertHopToJobSpec", "(", "sourceNode", ",", "targetNode", ",", "flowSpec", ")", ",", "(", "(", "(", "LoadBasedFlowEdgeImpl", ")", "weightedGraph", ".", "getEdge", "(", "sourceNode", ",", "targetNode", ")", ")", ".", "getSpecExecutorInstance", "(", ")", ")", ")", ";", "}", "else", "{", "log", ".", "error", "(", "\"User Specified Path is invalid\"", ")", ";", "return", "false", ";", "}", "}", "specExecutorInstanceMap", ".", "putAll", "(", "tmpSpecExecutorInstanceMap", ")", ";", "return", "true", ";", "}" ]
else return true.
[ "else", "return", "true", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java#L225-L242
25,912
apache/incubator-gobblin
gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java
MultiHopsFlowToJobSpecCompiler.weightGraphGenerateHelper
private void weightGraphGenerateHelper(TopologySpec topologySpec) { try { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { BaseServiceNodeImpl sourceNode = new BaseServiceNodeImpl(capability.getKey().getNodeName()); BaseServiceNodeImpl targetNode = new BaseServiceNodeImpl(capability.getValue().getNodeName()); if (!weightedGraph.containsVertex(sourceNode)) { weightedGraph.addVertex(sourceNode); } if (!weightedGraph.containsVertex(targetNode)) { weightedGraph.addVertex(targetNode); } FlowEdge flowEdge = new LoadBasedFlowEdgeImpl(sourceNode, targetNode, defaultFlowEdgeProps, topologySpec.getSpecExecutor()); // In Multi-Graph if flowEdge existed, just skip it. if (!weightedGraph.containsEdge(flowEdge)) { weightedGraph.addEdge(sourceNode, targetNode, flowEdge); } } } catch (InterruptedException | ExecutionException e) { Instrumented.markMeter(this.flowCompilationFailedMeter); throw new RuntimeException("Cannot determine topology capabilities", e); } }
java
private void weightGraphGenerateHelper(TopologySpec topologySpec) { try { Map<ServiceNode, ServiceNode> capabilities = topologySpec.getSpecExecutor().getCapabilities().get(); for (Map.Entry<ServiceNode, ServiceNode> capability : capabilities.entrySet()) { BaseServiceNodeImpl sourceNode = new BaseServiceNodeImpl(capability.getKey().getNodeName()); BaseServiceNodeImpl targetNode = new BaseServiceNodeImpl(capability.getValue().getNodeName()); if (!weightedGraph.containsVertex(sourceNode)) { weightedGraph.addVertex(sourceNode); } if (!weightedGraph.containsVertex(targetNode)) { weightedGraph.addVertex(targetNode); } FlowEdge flowEdge = new LoadBasedFlowEdgeImpl(sourceNode, targetNode, defaultFlowEdgeProps, topologySpec.getSpecExecutor()); // In Multi-Graph if flowEdge existed, just skip it. if (!weightedGraph.containsEdge(flowEdge)) { weightedGraph.addEdge(sourceNode, targetNode, flowEdge); } } } catch (InterruptedException | ExecutionException e) { Instrumented.markMeter(this.flowCompilationFailedMeter); throw new RuntimeException("Cannot determine topology capabilities", e); } }
[ "private", "void", "weightGraphGenerateHelper", "(", "TopologySpec", "topologySpec", ")", "{", "try", "{", "Map", "<", "ServiceNode", ",", "ServiceNode", ">", "capabilities", "=", "topologySpec", ".", "getSpecExecutor", "(", ")", ".", "getCapabilities", "(", ")", ".", "get", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "ServiceNode", ",", "ServiceNode", ">", "capability", ":", "capabilities", ".", "entrySet", "(", ")", ")", "{", "BaseServiceNodeImpl", "sourceNode", "=", "new", "BaseServiceNodeImpl", "(", "capability", ".", "getKey", "(", ")", ".", "getNodeName", "(", ")", ")", ";", "BaseServiceNodeImpl", "targetNode", "=", "new", "BaseServiceNodeImpl", "(", "capability", ".", "getValue", "(", ")", ".", "getNodeName", "(", ")", ")", ";", "if", "(", "!", "weightedGraph", ".", "containsVertex", "(", "sourceNode", ")", ")", "{", "weightedGraph", ".", "addVertex", "(", "sourceNode", ")", ";", "}", "if", "(", "!", "weightedGraph", ".", "containsVertex", "(", "targetNode", ")", ")", "{", "weightedGraph", ".", "addVertex", "(", "targetNode", ")", ";", "}", "FlowEdge", "flowEdge", "=", "new", "LoadBasedFlowEdgeImpl", "(", "sourceNode", ",", "targetNode", ",", "defaultFlowEdgeProps", ",", "topologySpec", ".", "getSpecExecutor", "(", ")", ")", ";", "// In Multi-Graph if flowEdge existed, just skip it.", "if", "(", "!", "weightedGraph", ".", "containsEdge", "(", "flowEdge", ")", ")", "{", "weightedGraph", ".", "addEdge", "(", "sourceNode", ",", "targetNode", ",", "flowEdge", ")", ";", "}", "}", "}", "catch", "(", "InterruptedException", "|", "ExecutionException", "e", ")", "{", "Instrumented", ".", "markMeter", "(", "this", ".", "flowCompilationFailedMeter", ")", ";", "throw", "new", "RuntimeException", "(", "\"Cannot determine topology capabilities\"", ",", "e", ")", ";", "}", "}" ]
Helper function for transform TopologySpecMap into a weightedDirectedGraph.
[ "Helper", "function", "for", "transform", "TopologySpecMap", "into", "a", "weightedDirectedGraph", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java#L245-L272
25,913
apache/incubator-gobblin
gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java
MultiHopsFlowToJobSpecCompiler.convertHopToJobSpec
private JobSpec convertHopToJobSpec (ServiceNode sourceNode, ServiceNode targetNode, FlowSpec flowSpec) { FlowEdge flowEdge = weightedGraph.getAllEdges(sourceNode, targetNode).iterator().next(); URI templateURI = getTemplateURI (sourceNode, targetNode, flowSpec, flowEdge); return buildJobSpec(sourceNode, targetNode, templateURI, flowSpec); }
java
private JobSpec convertHopToJobSpec (ServiceNode sourceNode, ServiceNode targetNode, FlowSpec flowSpec) { FlowEdge flowEdge = weightedGraph.getAllEdges(sourceNode, targetNode).iterator().next(); URI templateURI = getTemplateURI (sourceNode, targetNode, flowSpec, flowEdge); return buildJobSpec(sourceNode, targetNode, templateURI, flowSpec); }
[ "private", "JobSpec", "convertHopToJobSpec", "(", "ServiceNode", "sourceNode", ",", "ServiceNode", "targetNode", ",", "FlowSpec", "flowSpec", ")", "{", "FlowEdge", "flowEdge", "=", "weightedGraph", ".", "getAllEdges", "(", "sourceNode", ",", "targetNode", ")", ".", "iterator", "(", ")", ".", "next", "(", ")", ";", "URI", "templateURI", "=", "getTemplateURI", "(", "sourceNode", ",", "targetNode", ",", "flowSpec", ",", "flowEdge", ")", ";", "return", "buildJobSpec", "(", "sourceNode", ",", "targetNode", ",", "templateURI", ",", "flowSpec", ")", ";", "}" ]
A naive implementation of resolving templates in each JobSpec among Multi-hop FlowSpec. Handle the case when edge is not specified. Always select the first available template.
[ "A", "naive", "implementation", "of", "resolving", "templates", "in", "each", "JobSpec", "among", "Multi", "-", "hop", "FlowSpec", ".", "Handle", "the", "case", "when", "edge", "is", "not", "specified", ".", "Always", "select", "the", "first", "available", "template", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java#L327-L331
25,914
apache/incubator-gobblin
gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java
MultiHopsFlowToJobSpecCompiler.jobSpecURIGenerator
@Override public URI jobSpecURIGenerator(Object... objects) { FlowSpec flowSpec = (FlowSpec) objects[0]; ServiceNode sourceNode = (ServiceNode) objects[1]; ServiceNode targetNode = (ServiceNode) objects[2]; try { return new URI(JobSpec.Builder.DEFAULT_JOB_CATALOG_SCHEME, flowSpec.getUri().getAuthority(), StringUtils.appendIfMissing(StringUtils.prependIfMissing(flowSpec.getUri().getPath(), "/"),"/") + sourceNode.getNodeName() + "-" + targetNode.getNodeName(), null); } catch (URISyntaxException e) { log.error( "URI construction failed when jobSpec from " + sourceNode.getNodeName() + " to " + targetNode.getNodeName()); throw new RuntimeException(); } }
java
@Override public URI jobSpecURIGenerator(Object... objects) { FlowSpec flowSpec = (FlowSpec) objects[0]; ServiceNode sourceNode = (ServiceNode) objects[1]; ServiceNode targetNode = (ServiceNode) objects[2]; try { return new URI(JobSpec.Builder.DEFAULT_JOB_CATALOG_SCHEME, flowSpec.getUri().getAuthority(), StringUtils.appendIfMissing(StringUtils.prependIfMissing(flowSpec.getUri().getPath(), "/"),"/") + sourceNode.getNodeName() + "-" + targetNode.getNodeName(), null); } catch (URISyntaxException e) { log.error( "URI construction failed when jobSpec from " + sourceNode.getNodeName() + " to " + targetNode.getNodeName()); throw new RuntimeException(); } }
[ "@", "Override", "public", "URI", "jobSpecURIGenerator", "(", "Object", "...", "objects", ")", "{", "FlowSpec", "flowSpec", "=", "(", "FlowSpec", ")", "objects", "[", "0", "]", ";", "ServiceNode", "sourceNode", "=", "(", "ServiceNode", ")", "objects", "[", "1", "]", ";", "ServiceNode", "targetNode", "=", "(", "ServiceNode", ")", "objects", "[", "2", "]", ";", "try", "{", "return", "new", "URI", "(", "JobSpec", ".", "Builder", ".", "DEFAULT_JOB_CATALOG_SCHEME", ",", "flowSpec", ".", "getUri", "(", ")", ".", "getAuthority", "(", ")", ",", "StringUtils", ".", "appendIfMissing", "(", "StringUtils", ".", "prependIfMissing", "(", "flowSpec", ".", "getUri", "(", ")", ".", "getPath", "(", ")", ",", "\"/\"", ")", ",", "\"/\"", ")", "+", "sourceNode", ".", "getNodeName", "(", ")", "+", "\"-\"", "+", "targetNode", ".", "getNodeName", "(", ")", ",", "null", ")", ";", "}", "catch", "(", "URISyntaxException", "e", ")", "{", "log", ".", "error", "(", "\"URI construction failed when jobSpec from \"", "+", "sourceNode", ".", "getNodeName", "(", ")", "+", "\" to \"", "+", "targetNode", ".", "getNodeName", "(", ")", ")", ";", "throw", "new", "RuntimeException", "(", ")", ";", "}", "}" ]
A naive implementation of generating a jobSpec's URI within a multi-hop logical Flow.
[ "A", "naive", "implementation", "of", "generating", "a", "jobSpec", "s", "URI", "within", "a", "multi", "-", "hop", "logical", "Flow", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-service/src/main/java/org/apache/gobblin/service/modules/flow/MultiHopsFlowToJobSpecCompiler.java#L343-L357
25,915
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/DatePartitionedNestedRetriever.java
DatePartitionedNestedRetriever.getFileFilter
private PathFilter getFileFilter() { final String extension = (this.expectedExtension.startsWith(".")) ? this.expectedExtension : "." + this.expectedExtension; return new PathFilter() { @Override public boolean accept(Path path) { return path.getName().endsWith(extension) && !(schemaInSourceDir && path.getName().equals(schemaFile)) ; } }; }
java
private PathFilter getFileFilter() { final String extension = (this.expectedExtension.startsWith(".")) ? this.expectedExtension : "." + this.expectedExtension; return new PathFilter() { @Override public boolean accept(Path path) { return path.getName().endsWith(extension) && !(schemaInSourceDir && path.getName().equals(schemaFile)) ; } }; }
[ "private", "PathFilter", "getFileFilter", "(", ")", "{", "final", "String", "extension", "=", "(", "this", ".", "expectedExtension", ".", "startsWith", "(", "\".\"", ")", ")", "?", "this", ".", "expectedExtension", ":", "\".\"", "+", "this", ".", "expectedExtension", ";", "return", "new", "PathFilter", "(", ")", "{", "@", "Override", "public", "boolean", "accept", "(", "Path", "path", ")", "{", "return", "path", ".", "getName", "(", ")", ".", "endsWith", "(", "extension", ")", "&&", "!", "(", "schemaInSourceDir", "&&", "path", ".", "getName", "(", ")", ".", "equals", "(", "schemaFile", ")", ")", ";", "}", "}", ";", "}" ]
This method is to filter out files that don't need to be processed by extension @return the pathFilter
[ "This", "method", "is", "to", "filter", "out", "files", "that", "don", "t", "need", "to", "be", "processed", "by", "extension" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/DatePartitionedNestedRetriever.java#L214-L226
25,916
apache/incubator-gobblin
gobblin-hive-registration/src/main/java/org/apache/gobblin/hive/metastore/HiveMetaStoreUtils.java
HiveMetaStoreUtils.getDeserializer
private static Deserializer getDeserializer(HiveRegistrationUnit unit) { Optional<String> serdeClass = unit.getSerDeType(); if (!serdeClass.isPresent()) { return null; } String serde = serdeClass.get(); HiveConf hiveConf; Deserializer deserializer; try { hiveConf = SharedResourcesBrokerFactory .getImplicitBroker().getSharedResource(new HiveConfFactory<>(), SharedHiveConfKey.INSTANCE); deserializer = ReflectionUtils.newInstance(hiveConf.getClassByName(serde).asSubclass(Deserializer.class), hiveConf); } catch (ClassNotFoundException e) { LOG.warn("Serde class " + serde + " not found!", e); return null; } catch (NotConfiguredException nce) { LOG.error("Implicit broker is not configured properly", nce); return null; } Properties props = new Properties(); props.putAll(unit.getProps().getProperties()); props.putAll(unit.getStorageProps().getProperties()); props.putAll(unit.getSerDeProps().getProperties()); try { SerDeUtils.initializeSerDe(deserializer, hiveConf, props, null); // Temporary check that's needed until Gobblin is upgraded to Hive 1.1.0+, which includes the improved error // handling in AvroSerDe added in HIVE-7868. if (deserializer instanceof AvroSerDe) { try { inVokeDetermineSchemaOrThrowExceptionMethod(props, new Configuration()); } catch (SchemaParseException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { LOG.warn("Failed to initialize AvroSerDe."); throw new SerDeException(e); } } } catch (SerDeException e) { LOG.warn("Failed to initialize serde " + serde + " with properties " + props + " for table " + unit.getDbName() + "." + unit.getTableName()); return null; } return deserializer; }
java
private static Deserializer getDeserializer(HiveRegistrationUnit unit) { Optional<String> serdeClass = unit.getSerDeType(); if (!serdeClass.isPresent()) { return null; } String serde = serdeClass.get(); HiveConf hiveConf; Deserializer deserializer; try { hiveConf = SharedResourcesBrokerFactory .getImplicitBroker().getSharedResource(new HiveConfFactory<>(), SharedHiveConfKey.INSTANCE); deserializer = ReflectionUtils.newInstance(hiveConf.getClassByName(serde).asSubclass(Deserializer.class), hiveConf); } catch (ClassNotFoundException e) { LOG.warn("Serde class " + serde + " not found!", e); return null; } catch (NotConfiguredException nce) { LOG.error("Implicit broker is not configured properly", nce); return null; } Properties props = new Properties(); props.putAll(unit.getProps().getProperties()); props.putAll(unit.getStorageProps().getProperties()); props.putAll(unit.getSerDeProps().getProperties()); try { SerDeUtils.initializeSerDe(deserializer, hiveConf, props, null); // Temporary check that's needed until Gobblin is upgraded to Hive 1.1.0+, which includes the improved error // handling in AvroSerDe added in HIVE-7868. if (deserializer instanceof AvroSerDe) { try { inVokeDetermineSchemaOrThrowExceptionMethod(props, new Configuration()); } catch (SchemaParseException | InvocationTargetException | NoSuchMethodException | IllegalAccessException e) { LOG.warn("Failed to initialize AvroSerDe."); throw new SerDeException(e); } } } catch (SerDeException e) { LOG.warn("Failed to initialize serde " + serde + " with properties " + props + " for table " + unit.getDbName() + "." + unit.getTableName()); return null; } return deserializer; }
[ "private", "static", "Deserializer", "getDeserializer", "(", "HiveRegistrationUnit", "unit", ")", "{", "Optional", "<", "String", ">", "serdeClass", "=", "unit", ".", "getSerDeType", "(", ")", ";", "if", "(", "!", "serdeClass", ".", "isPresent", "(", ")", ")", "{", "return", "null", ";", "}", "String", "serde", "=", "serdeClass", ".", "get", "(", ")", ";", "HiveConf", "hiveConf", ";", "Deserializer", "deserializer", ";", "try", "{", "hiveConf", "=", "SharedResourcesBrokerFactory", ".", "getImplicitBroker", "(", ")", ".", "getSharedResource", "(", "new", "HiveConfFactory", "<>", "(", ")", ",", "SharedHiveConfKey", ".", "INSTANCE", ")", ";", "deserializer", "=", "ReflectionUtils", ".", "newInstance", "(", "hiveConf", ".", "getClassByName", "(", "serde", ")", ".", "asSubclass", "(", "Deserializer", ".", "class", ")", ",", "hiveConf", ")", ";", "}", "catch", "(", "ClassNotFoundException", "e", ")", "{", "LOG", ".", "warn", "(", "\"Serde class \"", "+", "serde", "+", "\" not found!\"", ",", "e", ")", ";", "return", "null", ";", "}", "catch", "(", "NotConfiguredException", "nce", ")", "{", "LOG", ".", "error", "(", "\"Implicit broker is not configured properly\"", ",", "nce", ")", ";", "return", "null", ";", "}", "Properties", "props", "=", "new", "Properties", "(", ")", ";", "props", ".", "putAll", "(", "unit", ".", "getProps", "(", ")", ".", "getProperties", "(", ")", ")", ";", "props", ".", "putAll", "(", "unit", ".", "getStorageProps", "(", ")", ".", "getProperties", "(", ")", ")", ";", "props", ".", "putAll", "(", "unit", ".", "getSerDeProps", "(", ")", ".", "getProperties", "(", ")", ")", ";", "try", "{", "SerDeUtils", ".", "initializeSerDe", "(", "deserializer", ",", "hiveConf", ",", "props", ",", "null", ")", ";", "// Temporary check that's needed until Gobblin is upgraded to Hive 1.1.0+, which includes the improved error", "// handling in AvroSerDe added in HIVE-7868.", "if", "(", "deserializer", "instanceof", "AvroSerDe", ")", "{", "try", "{", "inVokeDetermineSchemaOrThrowExceptionMethod", "(", "props", ",", "new", "Configuration", "(", ")", ")", ";", "}", "catch", "(", "SchemaParseException", "|", "InvocationTargetException", "|", "NoSuchMethodException", "|", "IllegalAccessException", "e", ")", "{", "LOG", ".", "warn", "(", "\"Failed to initialize AvroSerDe.\"", ")", ";", "throw", "new", "SerDeException", "(", "e", ")", ";", "}", "}", "}", "catch", "(", "SerDeException", "e", ")", "{", "LOG", ".", "warn", "(", "\"Failed to initialize serde \"", "+", "serde", "+", "\" with properties \"", "+", "props", "+", "\" for table \"", "+", "unit", ".", "getDbName", "(", ")", "+", "\".\"", "+", "unit", ".", "getTableName", "(", ")", ")", ";", "return", "null", ";", "}", "return", "deserializer", ";", "}" ]
Returns a Deserializer from HiveRegistrationUnit if present and successfully initialized. Else returns null.
[ "Returns", "a", "Deserializer", "from", "HiveRegistrationUnit", "if", "present", "and", "successfully", "initialized", ".", "Else", "returns", "null", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-hive-registration/src/main/java/org/apache/gobblin/hive/metastore/HiveMetaStoreUtils.java#L371-L418
25,917
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_catalog/ImmutableFSJobCatalog.java
ImmutableFSJobCatalog.getJobs
@Override public synchronized List<JobSpec> getJobs() { return Lists.transform(Lists.newArrayList( loader.loadPullFilesRecursively(loader.getRootDirectory(), this.sysConfig, shouldLoadGlobalConf())), this.converter); }
java
@Override public synchronized List<JobSpec> getJobs() { return Lists.transform(Lists.newArrayList( loader.loadPullFilesRecursively(loader.getRootDirectory(), this.sysConfig, shouldLoadGlobalConf())), this.converter); }
[ "@", "Override", "public", "synchronized", "List", "<", "JobSpec", ">", "getJobs", "(", ")", "{", "return", "Lists", ".", "transform", "(", "Lists", ".", "newArrayList", "(", "loader", ".", "loadPullFilesRecursively", "(", "loader", ".", "getRootDirectory", "(", ")", ",", "this", ".", "sysConfig", ",", "shouldLoadGlobalConf", "(", ")", ")", ")", ",", "this", ".", "converter", ")", ";", "}" ]
Fetch all the job files under the jobConfDirPath @return A collection of JobSpec
[ "Fetch", "all", "the", "job", "files", "under", "the", "jobConfDirPath" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_catalog/ImmutableFSJobCatalog.java#L164-L169
25,918
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_catalog/ImmutableFSJobCatalog.java
ImmutableFSJobCatalog.getJobSpec
@Override public synchronized JobSpec getJobSpec(URI uri) throws JobSpecNotFoundException { try { Path targetJobSpecFullPath = getPathForURI(this.jobConfDirPath, uri); return this.converter.apply(loader.loadPullFile(targetJobSpecFullPath, this.sysConfig, shouldLoadGlobalConf())); } catch (FileNotFoundException e) { throw new JobSpecNotFoundException(uri); } catch (IOException e) { throw new RuntimeException("IO exception thrown on loading single job configuration file:" + e.getMessage()); } }
java
@Override public synchronized JobSpec getJobSpec(URI uri) throws JobSpecNotFoundException { try { Path targetJobSpecFullPath = getPathForURI(this.jobConfDirPath, uri); return this.converter.apply(loader.loadPullFile(targetJobSpecFullPath, this.sysConfig, shouldLoadGlobalConf())); } catch (FileNotFoundException e) { throw new JobSpecNotFoundException(uri); } catch (IOException e) { throw new RuntimeException("IO exception thrown on loading single job configuration file:" + e.getMessage()); } }
[ "@", "Override", "public", "synchronized", "JobSpec", "getJobSpec", "(", "URI", "uri", ")", "throws", "JobSpecNotFoundException", "{", "try", "{", "Path", "targetJobSpecFullPath", "=", "getPathForURI", "(", "this", ".", "jobConfDirPath", ",", "uri", ")", ";", "return", "this", ".", "converter", ".", "apply", "(", "loader", ".", "loadPullFile", "(", "targetJobSpecFullPath", ",", "this", ".", "sysConfig", ",", "shouldLoadGlobalConf", "(", ")", ")", ")", ";", "}", "catch", "(", "FileNotFoundException", "e", ")", "{", "throw", "new", "JobSpecNotFoundException", "(", "uri", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "throw", "new", "RuntimeException", "(", "\"IO exception thrown on loading single job configuration file:\"", "+", "e", ".", "getMessage", "(", ")", ")", ";", "}", "}" ]
Fetch single job file based on its URI, return null requested URI not existed @param uri The relative Path to the target job configuration. @return
[ "Fetch", "single", "job", "file", "based", "on", "its", "URI", "return", "null", "requested", "URI", "not", "existed" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_catalog/ImmutableFSJobCatalog.java#L212-L223
25,919
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/workunit/packer/KafkaAvgRecordTimeBasedWorkUnitSizeEstimator.java
KafkaAvgRecordTimeBasedWorkUnitSizeEstimator.readPrevAvgRecordMillis
private void readPrevAvgRecordMillis(SourceState state) { Map<String, List<Double>> prevAvgMillis = Maps.newHashMap(); for (WorkUnitState workUnitState : state.getPreviousWorkUnitStates()) { List<KafkaPartition> partitions = KafkaUtils.getPartitions(workUnitState); for (KafkaPartition partition : partitions) { if (KafkaUtils.containsPartitionAvgRecordMillis(workUnitState, partition)) { double prevAvgMillisForPartition = KafkaUtils.getPartitionAvgRecordMillis(workUnitState, partition); if (prevAvgMillis.containsKey(partition.getTopicName())) { prevAvgMillis.get(partition.getTopicName()).add(prevAvgMillisForPartition); } else { prevAvgMillis.put(partition.getTopicName(), Lists.newArrayList(prevAvgMillisForPartition)); } } } } this.estAvgMillis.clear(); if (prevAvgMillis.isEmpty()) { this.avgEstAvgMillis = 1.0; } else { List<Double> allEstAvgMillis = Lists.newArrayList(); for (Map.Entry<String, List<Double>> entry : prevAvgMillis.entrySet()) { String topic = entry.getKey(); List<Double> prevAvgMillisForPartitions = entry.getValue(); // If a topic has k partitions, and in the previous run, each partition recorded its avg time to pull // a record, then use the geometric mean of these k numbers as the estimated avg time to pull // a record in this run. double estAvgMillisForTopic = geometricMean(prevAvgMillisForPartitions); this.estAvgMillis.put(topic, estAvgMillisForTopic); LOG.info(String.format("Estimated avg time to pull a record for topic %s is %f milliseconds", topic, estAvgMillisForTopic)); allEstAvgMillis.add(estAvgMillisForTopic); } // If a topic was not pulled in the previous run, use this.avgEstAvgMillis as the estimated avg time // to pull a record in this run, which is the geometric mean of all topics whose avg times to pull // a record in the previous run are known. this.avgEstAvgMillis = geometricMean(allEstAvgMillis); } LOG.info("For all topics not pulled in the previous run, estimated avg time to pull a record is " + this.avgEstAvgMillis + " milliseconds"); }
java
private void readPrevAvgRecordMillis(SourceState state) { Map<String, List<Double>> prevAvgMillis = Maps.newHashMap(); for (WorkUnitState workUnitState : state.getPreviousWorkUnitStates()) { List<KafkaPartition> partitions = KafkaUtils.getPartitions(workUnitState); for (KafkaPartition partition : partitions) { if (KafkaUtils.containsPartitionAvgRecordMillis(workUnitState, partition)) { double prevAvgMillisForPartition = KafkaUtils.getPartitionAvgRecordMillis(workUnitState, partition); if (prevAvgMillis.containsKey(partition.getTopicName())) { prevAvgMillis.get(partition.getTopicName()).add(prevAvgMillisForPartition); } else { prevAvgMillis.put(partition.getTopicName(), Lists.newArrayList(prevAvgMillisForPartition)); } } } } this.estAvgMillis.clear(); if (prevAvgMillis.isEmpty()) { this.avgEstAvgMillis = 1.0; } else { List<Double> allEstAvgMillis = Lists.newArrayList(); for (Map.Entry<String, List<Double>> entry : prevAvgMillis.entrySet()) { String topic = entry.getKey(); List<Double> prevAvgMillisForPartitions = entry.getValue(); // If a topic has k partitions, and in the previous run, each partition recorded its avg time to pull // a record, then use the geometric mean of these k numbers as the estimated avg time to pull // a record in this run. double estAvgMillisForTopic = geometricMean(prevAvgMillisForPartitions); this.estAvgMillis.put(topic, estAvgMillisForTopic); LOG.info(String.format("Estimated avg time to pull a record for topic %s is %f milliseconds", topic, estAvgMillisForTopic)); allEstAvgMillis.add(estAvgMillisForTopic); } // If a topic was not pulled in the previous run, use this.avgEstAvgMillis as the estimated avg time // to pull a record in this run, which is the geometric mean of all topics whose avg times to pull // a record in the previous run are known. this.avgEstAvgMillis = geometricMean(allEstAvgMillis); } LOG.info("For all topics not pulled in the previous run, estimated avg time to pull a record is " + this.avgEstAvgMillis + " milliseconds"); }
[ "private", "void", "readPrevAvgRecordMillis", "(", "SourceState", "state", ")", "{", "Map", "<", "String", ",", "List", "<", "Double", ">", ">", "prevAvgMillis", "=", "Maps", ".", "newHashMap", "(", ")", ";", "for", "(", "WorkUnitState", "workUnitState", ":", "state", ".", "getPreviousWorkUnitStates", "(", ")", ")", "{", "List", "<", "KafkaPartition", ">", "partitions", "=", "KafkaUtils", ".", "getPartitions", "(", "workUnitState", ")", ";", "for", "(", "KafkaPartition", "partition", ":", "partitions", ")", "{", "if", "(", "KafkaUtils", ".", "containsPartitionAvgRecordMillis", "(", "workUnitState", ",", "partition", ")", ")", "{", "double", "prevAvgMillisForPartition", "=", "KafkaUtils", ".", "getPartitionAvgRecordMillis", "(", "workUnitState", ",", "partition", ")", ";", "if", "(", "prevAvgMillis", ".", "containsKey", "(", "partition", ".", "getTopicName", "(", ")", ")", ")", "{", "prevAvgMillis", ".", "get", "(", "partition", ".", "getTopicName", "(", ")", ")", ".", "add", "(", "prevAvgMillisForPartition", ")", ";", "}", "else", "{", "prevAvgMillis", ".", "put", "(", "partition", ".", "getTopicName", "(", ")", ",", "Lists", ".", "newArrayList", "(", "prevAvgMillisForPartition", ")", ")", ";", "}", "}", "}", "}", "this", ".", "estAvgMillis", ".", "clear", "(", ")", ";", "if", "(", "prevAvgMillis", ".", "isEmpty", "(", ")", ")", "{", "this", ".", "avgEstAvgMillis", "=", "1.0", ";", "}", "else", "{", "List", "<", "Double", ">", "allEstAvgMillis", "=", "Lists", ".", "newArrayList", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "String", ",", "List", "<", "Double", ">", ">", "entry", ":", "prevAvgMillis", ".", "entrySet", "(", ")", ")", "{", "String", "topic", "=", "entry", ".", "getKey", "(", ")", ";", "List", "<", "Double", ">", "prevAvgMillisForPartitions", "=", "entry", ".", "getValue", "(", ")", ";", "// If a topic has k partitions, and in the previous run, each partition recorded its avg time to pull", "// a record, then use the geometric mean of these k numbers as the estimated avg time to pull", "// a record in this run.", "double", "estAvgMillisForTopic", "=", "geometricMean", "(", "prevAvgMillisForPartitions", ")", ";", "this", ".", "estAvgMillis", ".", "put", "(", "topic", ",", "estAvgMillisForTopic", ")", ";", "LOG", ".", "info", "(", "String", ".", "format", "(", "\"Estimated avg time to pull a record for topic %s is %f milliseconds\"", ",", "topic", ",", "estAvgMillisForTopic", ")", ")", ";", "allEstAvgMillis", ".", "add", "(", "estAvgMillisForTopic", ")", ";", "}", "// If a topic was not pulled in the previous run, use this.avgEstAvgMillis as the estimated avg time", "// to pull a record in this run, which is the geometric mean of all topics whose avg times to pull", "// a record in the previous run are known.", "this", ".", "avgEstAvgMillis", "=", "geometricMean", "(", "allEstAvgMillis", ")", ";", "}", "LOG", ".", "info", "(", "\"For all topics not pulled in the previous run, estimated avg time to pull a record is \"", "+", "this", ".", "avgEstAvgMillis", "+", "\" milliseconds\"", ")", ";", "}" ]
Get avg time to pull a record in the previous run for all topics, each of which is the geometric mean of the avg time to pull a record of all partitions of the topic. If a topic was not pulled in the previous run (e.g., it's a new topic), it will use the geometric mean of avg record time of topics that were pulled in the previous run. If no topic was pulled in the previous run, 1.0 will be used for all topics.
[ "Get", "avg", "time", "to", "pull", "a", "record", "in", "the", "previous", "run", "for", "all", "topics", "each", "of", "which", "is", "the", "geometric", "mean", "of", "the", "avg", "time", "to", "pull", "a", "record", "of", "all", "partitions", "of", "the", "topic", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/workunit/packer/KafkaAvgRecordTimeBasedWorkUnitSizeEstimator.java#L109-L151
25,920
apache/incubator-gobblin
gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/replication/ConfigBasedDataset.java
ConfigBasedDataset.initDataFileVersionStrategy
private Optional<DataFileVersionStrategy> initDataFileVersionStrategy(EndPoint endPoint, ReplicationConfiguration rc, Properties props) { // rc is the dataset config??? if (!(endPoint instanceof HadoopFsEndPoint)) { log.warn("Data file version currently only handle the Hadoop Fs EndPoint replication"); return Optional.absent(); } Configuration conf = HadoopUtils.newConfiguration(); try { HadoopFsEndPoint hEndpoint = (HadoopFsEndPoint) endPoint; FileSystem fs = FileSystem.get(hEndpoint.getFsURI(), conf); // If configStore doesn't contain the strategy, check from job properties. // If no strategy is found, default to the modification time strategy. this.versionStrategyFromCS = rc.getVersionStrategyFromConfigStore(); String nonEmptyStrategy = versionStrategyFromCS.isPresent()? versionStrategyFromCS.get() : props.getProperty(DataFileVersionStrategy.DATA_FILE_VERSION_STRATEGY_KEY, DataFileVersionStrategy.DEFAULT_DATA_FILE_VERSION_STRATEGY); Config versionStrategyConfig = ConfigFactory.parseMap(ImmutableMap.of( DataFileVersionStrategy.DATA_FILE_VERSION_STRATEGY_KEY, nonEmptyStrategy)); DataFileVersionStrategy strategy = DataFileVersionStrategy.instantiateDataFileVersionStrategy(fs, versionStrategyConfig); log.debug("{} has version strategy {}", hEndpoint.getClusterName(), strategy.getClass().getName()); return Optional.of(strategy); } catch (IOException e) { log.error("Version strategy cannot be created due to {}", e); return Optional.absent(); } }
java
private Optional<DataFileVersionStrategy> initDataFileVersionStrategy(EndPoint endPoint, ReplicationConfiguration rc, Properties props) { // rc is the dataset config??? if (!(endPoint instanceof HadoopFsEndPoint)) { log.warn("Data file version currently only handle the Hadoop Fs EndPoint replication"); return Optional.absent(); } Configuration conf = HadoopUtils.newConfiguration(); try { HadoopFsEndPoint hEndpoint = (HadoopFsEndPoint) endPoint; FileSystem fs = FileSystem.get(hEndpoint.getFsURI(), conf); // If configStore doesn't contain the strategy, check from job properties. // If no strategy is found, default to the modification time strategy. this.versionStrategyFromCS = rc.getVersionStrategyFromConfigStore(); String nonEmptyStrategy = versionStrategyFromCS.isPresent()? versionStrategyFromCS.get() : props.getProperty(DataFileVersionStrategy.DATA_FILE_VERSION_STRATEGY_KEY, DataFileVersionStrategy.DEFAULT_DATA_FILE_VERSION_STRATEGY); Config versionStrategyConfig = ConfigFactory.parseMap(ImmutableMap.of( DataFileVersionStrategy.DATA_FILE_VERSION_STRATEGY_KEY, nonEmptyStrategy)); DataFileVersionStrategy strategy = DataFileVersionStrategy.instantiateDataFileVersionStrategy(fs, versionStrategyConfig); log.debug("{} has version strategy {}", hEndpoint.getClusterName(), strategy.getClass().getName()); return Optional.of(strategy); } catch (IOException e) { log.error("Version strategy cannot be created due to {}", e); return Optional.absent(); } }
[ "private", "Optional", "<", "DataFileVersionStrategy", ">", "initDataFileVersionStrategy", "(", "EndPoint", "endPoint", ",", "ReplicationConfiguration", "rc", ",", "Properties", "props", ")", "{", "// rc is the dataset config???", "if", "(", "!", "(", "endPoint", "instanceof", "HadoopFsEndPoint", ")", ")", "{", "log", ".", "warn", "(", "\"Data file version currently only handle the Hadoop Fs EndPoint replication\"", ")", ";", "return", "Optional", ".", "absent", "(", ")", ";", "}", "Configuration", "conf", "=", "HadoopUtils", ".", "newConfiguration", "(", ")", ";", "try", "{", "HadoopFsEndPoint", "hEndpoint", "=", "(", "HadoopFsEndPoint", ")", "endPoint", ";", "FileSystem", "fs", "=", "FileSystem", ".", "get", "(", "hEndpoint", ".", "getFsURI", "(", ")", ",", "conf", ")", ";", "// If configStore doesn't contain the strategy, check from job properties.", "// If no strategy is found, default to the modification time strategy.", "this", ".", "versionStrategyFromCS", "=", "rc", ".", "getVersionStrategyFromConfigStore", "(", ")", ";", "String", "nonEmptyStrategy", "=", "versionStrategyFromCS", ".", "isPresent", "(", ")", "?", "versionStrategyFromCS", ".", "get", "(", ")", ":", "props", ".", "getProperty", "(", "DataFileVersionStrategy", ".", "DATA_FILE_VERSION_STRATEGY_KEY", ",", "DataFileVersionStrategy", ".", "DEFAULT_DATA_FILE_VERSION_STRATEGY", ")", ";", "Config", "versionStrategyConfig", "=", "ConfigFactory", ".", "parseMap", "(", "ImmutableMap", ".", "of", "(", "DataFileVersionStrategy", ".", "DATA_FILE_VERSION_STRATEGY_KEY", ",", "nonEmptyStrategy", ")", ")", ";", "DataFileVersionStrategy", "strategy", "=", "DataFileVersionStrategy", ".", "instantiateDataFileVersionStrategy", "(", "fs", ",", "versionStrategyConfig", ")", ";", "log", ".", "debug", "(", "\"{} has version strategy {}\"", ",", "hEndpoint", ".", "getClusterName", "(", ")", ",", "strategy", ".", "getClass", "(", ")", ".", "getName", "(", ")", ")", ";", "return", "Optional", ".", "of", "(", "strategy", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "log", ".", "error", "(", "\"Version strategy cannot be created due to {}\"", ",", "e", ")", ";", "return", "Optional", ".", "absent", "(", ")", ";", "}", "}" ]
Get the version strategy that can retrieve the data file version from the end point. @return the version strategy. Empty value when the version is not supported for this end point.
[ "Get", "the", "version", "strategy", "that", "can", "retrieve", "the", "data", "file", "version", "from", "the", "end", "point", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/replication/ConfigBasedDataset.java#L122-L149
25,921
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/filebased/FileBasedExtractor.java
FileBasedExtractor.closeCurrentFile
public void closeCurrentFile() { try { this.closer.close(); } catch (IOException e) { if (this.currentFile != null) { LOG.error("Failed to close file: " + this.currentFile, e); } } }
java
public void closeCurrentFile() { try { this.closer.close(); } catch (IOException e) { if (this.currentFile != null) { LOG.error("Failed to close file: " + this.currentFile, e); } } }
[ "public", "void", "closeCurrentFile", "(", ")", "{", "try", "{", "this", ".", "closer", ".", "close", "(", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "if", "(", "this", ".", "currentFile", "!=", "null", ")", "{", "LOG", ".", "error", "(", "\"Failed to close file: \"", "+", "this", ".", "currentFile", ",", "e", ")", ";", "}", "}", "}" ]
Closes the current file being read.
[ "Closes", "the", "current", "file", "being", "read", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/filebased/FileBasedExtractor.java#L217-L225
25,922
apache/incubator-gobblin
gobblin-hive-registration/src/main/java/org/apache/gobblin/hive/metastore/HiveMetaStoreBasedRegister.java
HiveMetaStoreBasedRegister.ensureHiveTableExistenceBeforeAlternation
private boolean ensureHiveTableExistenceBeforeAlternation(String tableName, String dbName, IMetaStoreClient client, Table table, HiveSpec spec) throws TException{ try (AutoCloseableLock lock = this.locks.getTableLock(dbName, tableName)) { try { try (Timer.Context context = this.metricContext.timer(CREATE_HIVE_TABLE).time()) { client.createTable(getTableWithCreateTimeNow(table)); log.info(String.format("Created Hive table %s in db %s", tableName, dbName)); return true; } catch (AlreadyExistsException e) { } }catch (TException e) { log.error( String.format("Unable to create Hive table %s in db %s: " + e.getMessage(), tableName, dbName), e); throw e; } log.info("Table {} already exists in db {}.", tableName, dbName); try { HiveTable existingTable; try (Timer.Context context = this.metricContext.timer(GET_HIVE_TABLE).time()) { existingTable = HiveMetaStoreUtils.getHiveTable(client.getTable(dbName, tableName)); } if (needToUpdateTable(existingTable, spec.getTable())) { try (Timer.Context context = this.metricContext.timer(ALTER_TABLE).time()) { client.alter_table(dbName, tableName, getNewTblByMergingExistingTblProps(table, existingTable)); } log.info(String.format("updated Hive table %s in db %s", tableName, dbName)); } } catch (TException e2) { log.error( String.format("Unable to create or alter Hive table %s in db %s: " + e2.getMessage(), tableName, dbName), e2); throw e2; } // When the logic up to here it means table already existed in db and alteration happen. Return false. return false; } }
java
private boolean ensureHiveTableExistenceBeforeAlternation(String tableName, String dbName, IMetaStoreClient client, Table table, HiveSpec spec) throws TException{ try (AutoCloseableLock lock = this.locks.getTableLock(dbName, tableName)) { try { try (Timer.Context context = this.metricContext.timer(CREATE_HIVE_TABLE).time()) { client.createTable(getTableWithCreateTimeNow(table)); log.info(String.format("Created Hive table %s in db %s", tableName, dbName)); return true; } catch (AlreadyExistsException e) { } }catch (TException e) { log.error( String.format("Unable to create Hive table %s in db %s: " + e.getMessage(), tableName, dbName), e); throw e; } log.info("Table {} already exists in db {}.", tableName, dbName); try { HiveTable existingTable; try (Timer.Context context = this.metricContext.timer(GET_HIVE_TABLE).time()) { existingTable = HiveMetaStoreUtils.getHiveTable(client.getTable(dbName, tableName)); } if (needToUpdateTable(existingTable, spec.getTable())) { try (Timer.Context context = this.metricContext.timer(ALTER_TABLE).time()) { client.alter_table(dbName, tableName, getNewTblByMergingExistingTblProps(table, existingTable)); } log.info(String.format("updated Hive table %s in db %s", tableName, dbName)); } } catch (TException e2) { log.error( String.format("Unable to create or alter Hive table %s in db %s: " + e2.getMessage(), tableName, dbName), e2); throw e2; } // When the logic up to here it means table already existed in db and alteration happen. Return false. return false; } }
[ "private", "boolean", "ensureHiveTableExistenceBeforeAlternation", "(", "String", "tableName", ",", "String", "dbName", ",", "IMetaStoreClient", "client", ",", "Table", "table", ",", "HiveSpec", "spec", ")", "throws", "TException", "{", "try", "(", "AutoCloseableLock", "lock", "=", "this", ".", "locks", ".", "getTableLock", "(", "dbName", ",", "tableName", ")", ")", "{", "try", "{", "try", "(", "Timer", ".", "Context", "context", "=", "this", ".", "metricContext", ".", "timer", "(", "CREATE_HIVE_TABLE", ")", ".", "time", "(", ")", ")", "{", "client", ".", "createTable", "(", "getTableWithCreateTimeNow", "(", "table", ")", ")", ";", "log", ".", "info", "(", "String", ".", "format", "(", "\"Created Hive table %s in db %s\"", ",", "tableName", ",", "dbName", ")", ")", ";", "return", "true", ";", "}", "catch", "(", "AlreadyExistsException", "e", ")", "{", "}", "}", "catch", "(", "TException", "e", ")", "{", "log", ".", "error", "(", "String", ".", "format", "(", "\"Unable to create Hive table %s in db %s: \"", "+", "e", ".", "getMessage", "(", ")", ",", "tableName", ",", "dbName", ")", ",", "e", ")", ";", "throw", "e", ";", "}", "log", ".", "info", "(", "\"Table {} already exists in db {}.\"", ",", "tableName", ",", "dbName", ")", ";", "try", "{", "HiveTable", "existingTable", ";", "try", "(", "Timer", ".", "Context", "context", "=", "this", ".", "metricContext", ".", "timer", "(", "GET_HIVE_TABLE", ")", ".", "time", "(", ")", ")", "{", "existingTable", "=", "HiveMetaStoreUtils", ".", "getHiveTable", "(", "client", ".", "getTable", "(", "dbName", ",", "tableName", ")", ")", ";", "}", "if", "(", "needToUpdateTable", "(", "existingTable", ",", "spec", ".", "getTable", "(", ")", ")", ")", "{", "try", "(", "Timer", ".", "Context", "context", "=", "this", ".", "metricContext", ".", "timer", "(", "ALTER_TABLE", ")", ".", "time", "(", ")", ")", "{", "client", ".", "alter_table", "(", "dbName", ",", "tableName", ",", "getNewTblByMergingExistingTblProps", "(", "table", ",", "existingTable", ")", ")", ";", "}", "log", ".", "info", "(", "String", ".", "format", "(", "\"updated Hive table %s in db %s\"", ",", "tableName", ",", "dbName", ")", ")", ";", "}", "}", "catch", "(", "TException", "e2", ")", "{", "log", ".", "error", "(", "String", ".", "format", "(", "\"Unable to create or alter Hive table %s in db %s: \"", "+", "e2", ".", "getMessage", "(", ")", ",", "tableName", ",", "dbName", ")", ",", "e2", ")", ";", "throw", "e2", ";", "}", "// When the logic up to here it means table already existed in db and alteration happen. Return false.", "return", "false", ";", "}", "}" ]
If table existed on Hive side will return false; Or will create the table thru. RPC and return retVal from remote MetaStore.
[ "If", "table", "existed", "on", "Hive", "side", "will", "return", "false", ";", "Or", "will", "create", "the", "table", "thru", ".", "RPC", "and", "return", "retVal", "from", "remote", "MetaStore", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-hive-registration/src/main/java/org/apache/gobblin/hive/metastore/HiveMetaStoreBasedRegister.java#L172-L209
25,923
apache/incubator-gobblin
gobblin-hive-registration/src/main/java/org/apache/gobblin/hive/metastore/HiveMetaStoreBasedRegister.java
HiveMetaStoreBasedRegister.ensureHiveDbExistence
private boolean ensureHiveDbExistence(String hiveDbName, IMetaStoreClient client) throws IOException{ try (AutoCloseableLock lock = this.locks.getDbLock(hiveDbName)) { Database db = new Database(); db.setName(hiveDbName); try { try (Timer.Context context = this.metricContext.timer(GET_HIVE_DATABASE).time()) { client.getDatabase(db.getName()); } return false; } catch (NoSuchObjectException nsoe) { // proceed with create } catch (TException te) { throw new IOException(te); } Preconditions.checkState(this.hiveDbRootDir.isPresent(), "Missing required property " + HiveRegProps.HIVE_DB_ROOT_DIR); db.setLocationUri(new Path(this.hiveDbRootDir.get(), hiveDbName + HIVE_DB_EXTENSION).toString()); try { try (Timer.Context context = this.metricContext.timer(CREATE_HIVE_DATABASE).time()) { client.createDatabase(db); } log.info("Created database " + hiveDbName); HiveMetaStoreEventHelper.submitSuccessfulDBCreation(this.eventSubmitter, hiveDbName); return true; } catch (AlreadyExistsException e) { return false; } catch (TException e) { HiveMetaStoreEventHelper.submitFailedDBCreation(this.eventSubmitter, hiveDbName, e); throw new IOException("Unable to create Hive database " + hiveDbName, e); } } }
java
private boolean ensureHiveDbExistence(String hiveDbName, IMetaStoreClient client) throws IOException{ try (AutoCloseableLock lock = this.locks.getDbLock(hiveDbName)) { Database db = new Database(); db.setName(hiveDbName); try { try (Timer.Context context = this.metricContext.timer(GET_HIVE_DATABASE).time()) { client.getDatabase(db.getName()); } return false; } catch (NoSuchObjectException nsoe) { // proceed with create } catch (TException te) { throw new IOException(te); } Preconditions.checkState(this.hiveDbRootDir.isPresent(), "Missing required property " + HiveRegProps.HIVE_DB_ROOT_DIR); db.setLocationUri(new Path(this.hiveDbRootDir.get(), hiveDbName + HIVE_DB_EXTENSION).toString()); try { try (Timer.Context context = this.metricContext.timer(CREATE_HIVE_DATABASE).time()) { client.createDatabase(db); } log.info("Created database " + hiveDbName); HiveMetaStoreEventHelper.submitSuccessfulDBCreation(this.eventSubmitter, hiveDbName); return true; } catch (AlreadyExistsException e) { return false; } catch (TException e) { HiveMetaStoreEventHelper.submitFailedDBCreation(this.eventSubmitter, hiveDbName, e); throw new IOException("Unable to create Hive database " + hiveDbName, e); } } }
[ "private", "boolean", "ensureHiveDbExistence", "(", "String", "hiveDbName", ",", "IMetaStoreClient", "client", ")", "throws", "IOException", "{", "try", "(", "AutoCloseableLock", "lock", "=", "this", ".", "locks", ".", "getDbLock", "(", "hiveDbName", ")", ")", "{", "Database", "db", "=", "new", "Database", "(", ")", ";", "db", ".", "setName", "(", "hiveDbName", ")", ";", "try", "{", "try", "(", "Timer", ".", "Context", "context", "=", "this", ".", "metricContext", ".", "timer", "(", "GET_HIVE_DATABASE", ")", ".", "time", "(", ")", ")", "{", "client", ".", "getDatabase", "(", "db", ".", "getName", "(", ")", ")", ";", "}", "return", "false", ";", "}", "catch", "(", "NoSuchObjectException", "nsoe", ")", "{", "// proceed with create", "}", "catch", "(", "TException", "te", ")", "{", "throw", "new", "IOException", "(", "te", ")", ";", "}", "Preconditions", ".", "checkState", "(", "this", ".", "hiveDbRootDir", ".", "isPresent", "(", ")", ",", "\"Missing required property \"", "+", "HiveRegProps", ".", "HIVE_DB_ROOT_DIR", ")", ";", "db", ".", "setLocationUri", "(", "new", "Path", "(", "this", ".", "hiveDbRootDir", ".", "get", "(", ")", ",", "hiveDbName", "+", "HIVE_DB_EXTENSION", ")", ".", "toString", "(", ")", ")", ";", "try", "{", "try", "(", "Timer", ".", "Context", "context", "=", "this", ".", "metricContext", ".", "timer", "(", "CREATE_HIVE_DATABASE", ")", ".", "time", "(", ")", ")", "{", "client", ".", "createDatabase", "(", "db", ")", ";", "}", "log", ".", "info", "(", "\"Created database \"", "+", "hiveDbName", ")", ";", "HiveMetaStoreEventHelper", ".", "submitSuccessfulDBCreation", "(", "this", ".", "eventSubmitter", ",", "hiveDbName", ")", ";", "return", "true", ";", "}", "catch", "(", "AlreadyExistsException", "e", ")", "{", "return", "false", ";", "}", "catch", "(", "TException", "e", ")", "{", "HiveMetaStoreEventHelper", ".", "submitFailedDBCreation", "(", "this", ".", "eventSubmitter", ",", "hiveDbName", ",", "e", ")", ";", "throw", "new", "IOException", "(", "\"Unable to create Hive database \"", "+", "hiveDbName", ",", "e", ")", ";", "}", "}", "}" ]
If databse existed on Hive side will return false; Or will create the table thru. RPC and return retVal from remote MetaStore. @param hiveDbName is the hive databases to be checked for existence
[ "If", "databse", "existed", "on", "Hive", "side", "will", "return", "false", ";", "Or", "will", "create", "the", "table", "thru", ".", "RPC", "and", "return", "retVal", "from", "remote", "MetaStore", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-hive-registration/src/main/java/org/apache/gobblin/hive/metastore/HiveMetaStoreBasedRegister.java#L217-L251
25,924
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/TaskState.java
TaskState.isCompleted
public boolean isCompleted() { WorkingState state = getWorkingState(); return state == WorkingState.SUCCESSFUL || state == WorkingState.COMMITTED || state == WorkingState.FAILED; }
java
public boolean isCompleted() { WorkingState state = getWorkingState(); return state == WorkingState.SUCCESSFUL || state == WorkingState.COMMITTED || state == WorkingState.FAILED; }
[ "public", "boolean", "isCompleted", "(", ")", "{", "WorkingState", "state", "=", "getWorkingState", "(", ")", ";", "return", "state", "==", "WorkingState", ".", "SUCCESSFUL", "||", "state", "==", "WorkingState", ".", "COMMITTED", "||", "state", "==", "WorkingState", ".", "FAILED", ";", "}" ]
Return whether the task has completed running or not. @return {@code true} if the task has completed or {@code false} otherwise
[ "Return", "whether", "the", "task", "has", "completed", "running", "or", "not", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/TaskState.java#L243-L246
25,925
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/TaskState.java
TaskState.updateByteMetrics
public synchronized void updateByteMetrics(long bytesWritten, int branchIndex) { TaskMetrics metrics = TaskMetrics.get(this); String forkBranchId = TaskMetrics.taskInstanceRemoved(this.taskId); Counter taskByteCounter = metrics.getCounter(MetricGroup.TASK.name(), forkBranchId, BYTES); long inc = bytesWritten - taskByteCounter.getCount(); taskByteCounter.inc(inc); metrics.getMeter(MetricGroup.TASK.name(), forkBranchId, BYTES_PER_SECOND).mark(inc); metrics.getCounter(MetricGroup.JOB.name(), this.jobId, BYTES).inc(inc); metrics.getMeter(MetricGroup.JOB.name(), this.jobId, BYTES_PER_SECOND).mark(inc); }
java
public synchronized void updateByteMetrics(long bytesWritten, int branchIndex) { TaskMetrics metrics = TaskMetrics.get(this); String forkBranchId = TaskMetrics.taskInstanceRemoved(this.taskId); Counter taskByteCounter = metrics.getCounter(MetricGroup.TASK.name(), forkBranchId, BYTES); long inc = bytesWritten - taskByteCounter.getCount(); taskByteCounter.inc(inc); metrics.getMeter(MetricGroup.TASK.name(), forkBranchId, BYTES_PER_SECOND).mark(inc); metrics.getCounter(MetricGroup.JOB.name(), this.jobId, BYTES).inc(inc); metrics.getMeter(MetricGroup.JOB.name(), this.jobId, BYTES_PER_SECOND).mark(inc); }
[ "public", "synchronized", "void", "updateByteMetrics", "(", "long", "bytesWritten", ",", "int", "branchIndex", ")", "{", "TaskMetrics", "metrics", "=", "TaskMetrics", ".", "get", "(", "this", ")", ";", "String", "forkBranchId", "=", "TaskMetrics", ".", "taskInstanceRemoved", "(", "this", ".", "taskId", ")", ";", "Counter", "taskByteCounter", "=", "metrics", ".", "getCounter", "(", "MetricGroup", ".", "TASK", ".", "name", "(", ")", ",", "forkBranchId", ",", "BYTES", ")", ";", "long", "inc", "=", "bytesWritten", "-", "taskByteCounter", ".", "getCount", "(", ")", ";", "taskByteCounter", ".", "inc", "(", "inc", ")", ";", "metrics", ".", "getMeter", "(", "MetricGroup", ".", "TASK", ".", "name", "(", ")", ",", "forkBranchId", ",", "BYTES_PER_SECOND", ")", ".", "mark", "(", "inc", ")", ";", "metrics", ".", "getCounter", "(", "MetricGroup", ".", "JOB", ".", "name", "(", ")", ",", "this", ".", "jobId", ",", "BYTES", ")", ".", "inc", "(", "inc", ")", ";", "metrics", ".", "getMeter", "(", "MetricGroup", ".", "JOB", ".", "name", "(", ")", ",", "this", ".", "jobId", ",", "BYTES_PER_SECOND", ")", ".", "mark", "(", "inc", ")", ";", "}" ]
Collect byte-level metrics. @param bytesWritten number of bytes written by the writer @param branchIndex fork branch index @deprecated see {@link org.apache.gobblin.instrumented.writer.InstrumentedDataWriterBase}.
[ "Collect", "byte", "-", "level", "metrics", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/TaskState.java#L278-L288
25,926
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/TaskState.java
TaskState.adjustJobMetricsOnRetry
public void adjustJobMetricsOnRetry(int branches) { TaskMetrics metrics = TaskMetrics.get(this); for (int i = 0; i < branches; i++) { String forkBranchId = ForkOperatorUtils.getForkId(this.taskId, i); long recordsWritten = metrics.getCounter(MetricGroup.TASK.name(), forkBranchId, RECORDS).getCount(); long bytesWritten = metrics.getCounter(MetricGroup.TASK.name(), forkBranchId, BYTES).getCount(); metrics.getCounter(MetricGroup.JOB.name(), this.jobId, RECORDS).dec(recordsWritten); metrics.getCounter(MetricGroup.JOB.name(), this.jobId, BYTES).dec(bytesWritten); } }
java
public void adjustJobMetricsOnRetry(int branches) { TaskMetrics metrics = TaskMetrics.get(this); for (int i = 0; i < branches; i++) { String forkBranchId = ForkOperatorUtils.getForkId(this.taskId, i); long recordsWritten = metrics.getCounter(MetricGroup.TASK.name(), forkBranchId, RECORDS).getCount(); long bytesWritten = metrics.getCounter(MetricGroup.TASK.name(), forkBranchId, BYTES).getCount(); metrics.getCounter(MetricGroup.JOB.name(), this.jobId, RECORDS).dec(recordsWritten); metrics.getCounter(MetricGroup.JOB.name(), this.jobId, BYTES).dec(bytesWritten); } }
[ "public", "void", "adjustJobMetricsOnRetry", "(", "int", "branches", ")", "{", "TaskMetrics", "metrics", "=", "TaskMetrics", ".", "get", "(", "this", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "branches", ";", "i", "++", ")", "{", "String", "forkBranchId", "=", "ForkOperatorUtils", ".", "getForkId", "(", "this", ".", "taskId", ",", "i", ")", ";", "long", "recordsWritten", "=", "metrics", ".", "getCounter", "(", "MetricGroup", ".", "TASK", ".", "name", "(", ")", ",", "forkBranchId", ",", "RECORDS", ")", ".", "getCount", "(", ")", ";", "long", "bytesWritten", "=", "metrics", ".", "getCounter", "(", "MetricGroup", ".", "TASK", ".", "name", "(", ")", ",", "forkBranchId", ",", "BYTES", ")", ".", "getCount", "(", ")", ";", "metrics", ".", "getCounter", "(", "MetricGroup", ".", "JOB", ".", "name", "(", ")", ",", "this", ".", "jobId", ",", "RECORDS", ")", ".", "dec", "(", "recordsWritten", ")", ";", "metrics", ".", "getCounter", "(", "MetricGroup", ".", "JOB", ".", "name", "(", ")", ",", "this", ".", "jobId", ",", "BYTES", ")", ".", "dec", "(", "bytesWritten", ")", ";", "}", "}" ]
Adjust job-level metrics when the task gets retried. @param branches number of forked branches
[ "Adjust", "job", "-", "level", "metrics", "when", "the", "task", "gets", "retried", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/TaskState.java#L295-L305
25,927
apache/incubator-gobblin
gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/event/lineage/LineageInfo.java
LineageInfo.getFullEventName
public static String getFullEventName(State state) { return Joiner.on('.').join(LineageEventBuilder.LIENAGE_EVENT_NAMESPACE, state.getProp(getKey(NAME_KEY))); }
java
public static String getFullEventName(State state) { return Joiner.on('.').join(LineageEventBuilder.LIENAGE_EVENT_NAMESPACE, state.getProp(getKey(NAME_KEY))); }
[ "public", "static", "String", "getFullEventName", "(", "State", "state", ")", "{", "return", "Joiner", ".", "on", "(", "'", "'", ")", ".", "join", "(", "LineageEventBuilder", ".", "LIENAGE_EVENT_NAMESPACE", ",", "state", ".", "getProp", "(", "getKey", "(", "NAME_KEY", ")", ")", ")", ";", "}" ]
Get the full lineage event name from a state
[ "Get", "the", "full", "lineage", "event", "name", "from", "a", "state" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-metrics-libs/gobblin-metrics-base/src/main/java/org/apache/gobblin/metrics/event/lineage/LineageInfo.java#L252-L254
25,928
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/request_allocation/VectorAlgebra.java
VectorAlgebra.addVector
static double[] addVector(double[] x, double[] y, double c, double[] reuse) { if (reuse == null) { reuse = new double[x.length]; } for (int i = 0; i < x.length; i++) { reuse[i] = x[i] + c * y[i]; } return reuse; }
java
static double[] addVector(double[] x, double[] y, double c, double[] reuse) { if (reuse == null) { reuse = new double[x.length]; } for (int i = 0; i < x.length; i++) { reuse[i] = x[i] + c * y[i]; } return reuse; }
[ "static", "double", "[", "]", "addVector", "(", "double", "[", "]", "x", ",", "double", "[", "]", "y", ",", "double", "c", ",", "double", "[", "]", "reuse", ")", "{", "if", "(", "reuse", "==", "null", ")", "{", "reuse", "=", "new", "double", "[", "x", ".", "length", "]", ";", "}", "for", "(", "int", "i", "=", "0", ";", "i", "<", "x", ".", "length", ";", "i", "++", ")", "{", "reuse", "[", "i", "]", "=", "x", "[", "i", "]", "+", "c", "*", "y", "[", "i", "]", ";", "}", "return", "reuse", ";", "}" ]
Performs x + cy
[ "Performs", "x", "+", "cy" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/request_allocation/VectorAlgebra.java#L28-L36
25,929
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/EventMetadataUtils.java
EventMetadataUtils.getProcessedCount
public static long getProcessedCount(List<TaskState> taskStates) { long value = 0; for (TaskState taskState : taskStates) { value += taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN, 0); } return value; }
java
public static long getProcessedCount(List<TaskState> taskStates) { long value = 0; for (TaskState taskState : taskStates) { value += taskState.getPropAsLong(ConfigurationKeys.WRITER_RECORDS_WRITTEN, 0); } return value; }
[ "public", "static", "long", "getProcessedCount", "(", "List", "<", "TaskState", ">", "taskStates", ")", "{", "long", "value", "=", "0", ";", "for", "(", "TaskState", "taskState", ":", "taskStates", ")", "{", "value", "+=", "taskState", ".", "getPropAsLong", "(", "ConfigurationKeys", ".", "WRITER_RECORDS_WRITTEN", ",", "0", ")", ";", "}", "return", "value", ";", "}" ]
Get the number of records written by all the writers @return Sum of the writer records written count across all tasks
[ "Get", "the", "number", "of", "records", "written", "by", "all", "the", "writers" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/EventMetadataUtils.java#L35-L43
25,930
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/EventMetadataUtils.java
EventMetadataUtils.getTaskFailureExceptions
public static String getTaskFailureExceptions(List<TaskState> taskStates) { StringBuffer sb = new StringBuffer(); // Add task failure messages in a group followed by task failure exceptions appendTaskStateValues(taskStates, sb, TASK_FAILURE_MESSAGE_KEY); appendTaskStateValues(taskStates, sb, ConfigurationKeys.TASK_FAILURE_EXCEPTION_KEY); return sb.toString(); }
java
public static String getTaskFailureExceptions(List<TaskState> taskStates) { StringBuffer sb = new StringBuffer(); // Add task failure messages in a group followed by task failure exceptions appendTaskStateValues(taskStates, sb, TASK_FAILURE_MESSAGE_KEY); appendTaskStateValues(taskStates, sb, ConfigurationKeys.TASK_FAILURE_EXCEPTION_KEY); return sb.toString(); }
[ "public", "static", "String", "getTaskFailureExceptions", "(", "List", "<", "TaskState", ">", "taskStates", ")", "{", "StringBuffer", "sb", "=", "new", "StringBuffer", "(", ")", ";", "// Add task failure messages in a group followed by task failure exceptions", "appendTaskStateValues", "(", "taskStates", ",", "sb", ",", "TASK_FAILURE_MESSAGE_KEY", ")", ";", "appendTaskStateValues", "(", "taskStates", ",", "sb", ",", "ConfigurationKeys", ".", "TASK_FAILURE_EXCEPTION_KEY", ")", ";", "return", "sb", ".", "toString", "(", ")", ";", "}" ]
Get failure messages @return The concatenated failure messages from all the task states
[ "Get", "failure", "messages" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/EventMetadataUtils.java#L49-L57
25,931
apache/incubator-gobblin
gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/writer/FileAwareInputStreamDataWriter.java
FileAwareInputStreamDataWriter.commit
@Override public void commit() throws IOException { if (!this.actualProcessedCopyableFile.isPresent()) { return; } CopyableFile copyableFile = this.actualProcessedCopyableFile.get(); Path stagingFilePath = getStagingFilePath(copyableFile); Path outputFilePath = getSplitOutputFilePath(copyableFile, this.outputDir, copyableFile.getDatasetAndPartition(this.copyableDatasetMetadata), this.state); log.info(String.format("Committing data from %s to %s", stagingFilePath, outputFilePath)); try { setFilePermissions(copyableFile); Iterator<OwnerAndPermission> ancestorOwnerAndPermissionIt = copyableFile.getAncestorsOwnerAndPermission() == null ? Iterators.<OwnerAndPermission>emptyIterator() : copyableFile.getAncestorsOwnerAndPermission().iterator(); ensureDirectoryExists(this.fs, outputFilePath.getParent(), ancestorOwnerAndPermissionIt); this.fileContext.rename(stagingFilePath, outputFilePath, renameOptions); } catch (IOException ioe) { log.error("Could not commit file %s.", outputFilePath); // persist file this.recoveryHelper.persistFile(this.state, copyableFile, stagingFilePath); throw ioe; } finally { try { this.fs.delete(this.stagingDir, true); } catch (IOException ioe) { log.warn("Failed to delete staging path at " + this.stagingDir); } } }
java
@Override public void commit() throws IOException { if (!this.actualProcessedCopyableFile.isPresent()) { return; } CopyableFile copyableFile = this.actualProcessedCopyableFile.get(); Path stagingFilePath = getStagingFilePath(copyableFile); Path outputFilePath = getSplitOutputFilePath(copyableFile, this.outputDir, copyableFile.getDatasetAndPartition(this.copyableDatasetMetadata), this.state); log.info(String.format("Committing data from %s to %s", stagingFilePath, outputFilePath)); try { setFilePermissions(copyableFile); Iterator<OwnerAndPermission> ancestorOwnerAndPermissionIt = copyableFile.getAncestorsOwnerAndPermission() == null ? Iterators.<OwnerAndPermission>emptyIterator() : copyableFile.getAncestorsOwnerAndPermission().iterator(); ensureDirectoryExists(this.fs, outputFilePath.getParent(), ancestorOwnerAndPermissionIt); this.fileContext.rename(stagingFilePath, outputFilePath, renameOptions); } catch (IOException ioe) { log.error("Could not commit file %s.", outputFilePath); // persist file this.recoveryHelper.persistFile(this.state, copyableFile, stagingFilePath); throw ioe; } finally { try { this.fs.delete(this.stagingDir, true); } catch (IOException ioe) { log.warn("Failed to delete staging path at " + this.stagingDir); } } }
[ "@", "Override", "public", "void", "commit", "(", ")", "throws", "IOException", "{", "if", "(", "!", "this", ".", "actualProcessedCopyableFile", ".", "isPresent", "(", ")", ")", "{", "return", ";", "}", "CopyableFile", "copyableFile", "=", "this", ".", "actualProcessedCopyableFile", ".", "get", "(", ")", ";", "Path", "stagingFilePath", "=", "getStagingFilePath", "(", "copyableFile", ")", ";", "Path", "outputFilePath", "=", "getSplitOutputFilePath", "(", "copyableFile", ",", "this", ".", "outputDir", ",", "copyableFile", ".", "getDatasetAndPartition", "(", "this", ".", "copyableDatasetMetadata", ")", ",", "this", ".", "state", ")", ";", "log", ".", "info", "(", "String", ".", "format", "(", "\"Committing data from %s to %s\"", ",", "stagingFilePath", ",", "outputFilePath", ")", ")", ";", "try", "{", "setFilePermissions", "(", "copyableFile", ")", ";", "Iterator", "<", "OwnerAndPermission", ">", "ancestorOwnerAndPermissionIt", "=", "copyableFile", ".", "getAncestorsOwnerAndPermission", "(", ")", "==", "null", "?", "Iterators", ".", "<", "OwnerAndPermission", ">", "emptyIterator", "(", ")", ":", "copyableFile", ".", "getAncestorsOwnerAndPermission", "(", ")", ".", "iterator", "(", ")", ";", "ensureDirectoryExists", "(", "this", ".", "fs", ",", "outputFilePath", ".", "getParent", "(", ")", ",", "ancestorOwnerAndPermissionIt", ")", ";", "this", ".", "fileContext", ".", "rename", "(", "stagingFilePath", ",", "outputFilePath", ",", "renameOptions", ")", ";", "}", "catch", "(", "IOException", "ioe", ")", "{", "log", ".", "error", "(", "\"Could not commit file %s.\"", ",", "outputFilePath", ")", ";", "// persist file", "this", ".", "recoveryHelper", ".", "persistFile", "(", "this", ".", "state", ",", "copyableFile", ",", "stagingFilePath", ")", ";", "throw", "ioe", ";", "}", "finally", "{", "try", "{", "this", ".", "fs", ".", "delete", "(", "this", ".", "stagingDir", ",", "true", ")", ";", "}", "catch", "(", "IOException", "ioe", ")", "{", "log", ".", "warn", "(", "\"Failed to delete staging path at \"", "+", "this", ".", "stagingDir", ")", ";", "}", "}", "}" ]
Moves the file from task staging to task output. Each task has its own staging directory but all the tasks share the same task output directory. {@inheritDoc} @see DataWriter#commit()
[ "Moves", "the", "file", "from", "task", "staging", "to", "task", "output", ".", "Each", "task", "has", "its", "own", "staging", "directory", "but", "all", "the", "tasks", "share", "the", "same", "task", "output", "directory", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-data-management/src/main/java/org/apache/gobblin/data/management/copy/writer/FileAwareInputStreamDataWriter.java#L414-L450
25,932
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/MultiLongWatermark.java
MultiLongWatermark.getGap
public long getGap(MultiLongWatermark highWatermark) { Preconditions.checkNotNull(highWatermark); Preconditions.checkArgument(this.values.size() == highWatermark.values.size()); long diff = 0; for (int i = 0; i < this.values.size(); i++) { Preconditions.checkArgument(this.values.get(i) <= highWatermark.values.get(i)); diff += highWatermark.values.get(i) - this.values.get(i); } return diff; }
java
public long getGap(MultiLongWatermark highWatermark) { Preconditions.checkNotNull(highWatermark); Preconditions.checkArgument(this.values.size() == highWatermark.values.size()); long diff = 0; for (int i = 0; i < this.values.size(); i++) { Preconditions.checkArgument(this.values.get(i) <= highWatermark.values.get(i)); diff += highWatermark.values.get(i) - this.values.get(i); } return diff; }
[ "public", "long", "getGap", "(", "MultiLongWatermark", "highWatermark", ")", "{", "Preconditions", ".", "checkNotNull", "(", "highWatermark", ")", ";", "Preconditions", ".", "checkArgument", "(", "this", ".", "values", ".", "size", "(", ")", "==", "highWatermark", ".", "values", ".", "size", "(", ")", ")", ";", "long", "diff", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "this", ".", "values", ".", "size", "(", ")", ";", "i", "++", ")", "{", "Preconditions", ".", "checkArgument", "(", "this", ".", "values", ".", "get", "(", "i", ")", "<=", "highWatermark", ".", "values", ".", "get", "(", "i", ")", ")", ";", "diff", "+=", "highWatermark", ".", "values", ".", "get", "(", "i", ")", "-", "this", ".", "values", ".", "get", "(", "i", ")", ";", "}", "return", "diff", ";", "}" ]
Get the number of records that need to be pulled given the high watermark.
[ "Get", "the", "number", "of", "records", "that", "need", "to", "be", "pulled", "given", "the", "high", "watermark", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/MultiLongWatermark.java#L94-L103
25,933
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-08/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/KafkaWrapper.java
KafkaWrapper.create
public static KafkaWrapper create(State state) { Preconditions.checkNotNull(state.getProp(ConfigurationKeys.KAFKA_BROKERS), "Need to specify at least one Kafka broker."); KafkaWrapper.Builder builder = new KafkaWrapper.Builder(); if (state.getPropAsBoolean(USE_NEW_KAFKA_API, DEFAULT_USE_NEW_KAFKA_API)) { builder = builder.withNewKafkaAPI(); } Config config = ConfigUtils.propertiesToConfig(state.getProperties()); return builder.withBrokers(state.getPropAsList(ConfigurationKeys.KAFKA_BROKERS)) .withConfig(config) .build(); }
java
public static KafkaWrapper create(State state) { Preconditions.checkNotNull(state.getProp(ConfigurationKeys.KAFKA_BROKERS), "Need to specify at least one Kafka broker."); KafkaWrapper.Builder builder = new KafkaWrapper.Builder(); if (state.getPropAsBoolean(USE_NEW_KAFKA_API, DEFAULT_USE_NEW_KAFKA_API)) { builder = builder.withNewKafkaAPI(); } Config config = ConfigUtils.propertiesToConfig(state.getProperties()); return builder.withBrokers(state.getPropAsList(ConfigurationKeys.KAFKA_BROKERS)) .withConfig(config) .build(); }
[ "public", "static", "KafkaWrapper", "create", "(", "State", "state", ")", "{", "Preconditions", ".", "checkNotNull", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "KAFKA_BROKERS", ")", ",", "\"Need to specify at least one Kafka broker.\"", ")", ";", "KafkaWrapper", ".", "Builder", "builder", "=", "new", "KafkaWrapper", ".", "Builder", "(", ")", ";", "if", "(", "state", ".", "getPropAsBoolean", "(", "USE_NEW_KAFKA_API", ",", "DEFAULT_USE_NEW_KAFKA_API", ")", ")", "{", "builder", "=", "builder", ".", "withNewKafkaAPI", "(", ")", ";", "}", "Config", "config", "=", "ConfigUtils", ".", "propertiesToConfig", "(", "state", ".", "getProperties", "(", ")", ")", ";", "return", "builder", ".", "withBrokers", "(", "state", ".", "getPropAsList", "(", "ConfigurationKeys", ".", "KAFKA_BROKERS", ")", ")", ".", "withConfig", "(", "config", ")", ".", "build", "(", ")", ";", "}" ]
Create a KafkaWrapper based on the given type of Kafka API and list of Kafka brokers. @param state A {@link State} object that should contain a list of comma separated Kafka brokers in property "kafka.brokers". It may optionally specify whether to use the new Kafka API by setting use.new.kafka.api=true.
[ "Create", "a", "KafkaWrapper", "based", "on", "the", "given", "type", "of", "Kafka", "API", "and", "list", "of", "Kafka", "brokers", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-08/src/main/java/org/apache/gobblin/source/extractor/extract/kafka/KafkaWrapper.java#L127-L138
25,934
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/publisher/TimePartitionedDataPublisher.java
TimePartitionedDataPublisher.addWriterOutputToExistingDir
@Override protected void addWriterOutputToExistingDir(Path writerOutput, Path publisherOutput, WorkUnitState workUnitState, int branchId, ParallelRunner parallelRunner) throws IOException { for (FileStatus status : FileListUtils.listFilesRecursively(this.writerFileSystemByBranches.get(branchId), writerOutput)) { String filePathStr = status.getPath().toString(); String pathSuffix = filePathStr.substring(filePathStr.indexOf(writerOutput.toString()) + writerOutput.toString().length() + 1); Path outputPath = new Path(publisherOutput, pathSuffix); WriterUtils.mkdirsWithRecursivePermissionWithRetry(this.publisherFileSystemByBranches.get(branchId), outputPath.getParent(), this.permissions.get(branchId), this.retrierConfig); movePath(parallelRunner, workUnitState, status.getPath(), outputPath, branchId); } }
java
@Override protected void addWriterOutputToExistingDir(Path writerOutput, Path publisherOutput, WorkUnitState workUnitState, int branchId, ParallelRunner parallelRunner) throws IOException { for (FileStatus status : FileListUtils.listFilesRecursively(this.writerFileSystemByBranches.get(branchId), writerOutput)) { String filePathStr = status.getPath().toString(); String pathSuffix = filePathStr.substring(filePathStr.indexOf(writerOutput.toString()) + writerOutput.toString().length() + 1); Path outputPath = new Path(publisherOutput, pathSuffix); WriterUtils.mkdirsWithRecursivePermissionWithRetry(this.publisherFileSystemByBranches.get(branchId), outputPath.getParent(), this.permissions.get(branchId), this.retrierConfig); movePath(parallelRunner, workUnitState, status.getPath(), outputPath, branchId); } }
[ "@", "Override", "protected", "void", "addWriterOutputToExistingDir", "(", "Path", "writerOutput", ",", "Path", "publisherOutput", ",", "WorkUnitState", "workUnitState", ",", "int", "branchId", ",", "ParallelRunner", "parallelRunner", ")", "throws", "IOException", "{", "for", "(", "FileStatus", "status", ":", "FileListUtils", ".", "listFilesRecursively", "(", "this", ".", "writerFileSystemByBranches", ".", "get", "(", "branchId", ")", ",", "writerOutput", ")", ")", "{", "String", "filePathStr", "=", "status", ".", "getPath", "(", ")", ".", "toString", "(", ")", ";", "String", "pathSuffix", "=", "filePathStr", ".", "substring", "(", "filePathStr", ".", "indexOf", "(", "writerOutput", ".", "toString", "(", ")", ")", "+", "writerOutput", ".", "toString", "(", ")", ".", "length", "(", ")", "+", "1", ")", ";", "Path", "outputPath", "=", "new", "Path", "(", "publisherOutput", ",", "pathSuffix", ")", ";", "WriterUtils", ".", "mkdirsWithRecursivePermissionWithRetry", "(", "this", ".", "publisherFileSystemByBranches", ".", "get", "(", "branchId", ")", ",", "outputPath", ".", "getParent", "(", ")", ",", "this", ".", "permissions", ".", "get", "(", "branchId", ")", ",", "this", ".", "retrierConfig", ")", ";", "movePath", "(", "parallelRunner", ",", "workUnitState", ",", "status", ".", "getPath", "(", ")", ",", "outputPath", ",", "branchId", ")", ";", "}", "}" ]
This method needs to be overridden for TimePartitionedDataPublisher, since the output folder structure contains timestamp, we have to move the files recursively. For example, move {writerOutput}/2015/04/08/15/output.avro to {publisherOutput}/2015/04/08/15/output.avro
[ "This", "method", "needs", "to", "be", "overridden", "for", "TimePartitionedDataPublisher", "since", "the", "output", "folder", "structure", "contains", "timestamp", "we", "have", "to", "move", "the", "files", "recursively", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/publisher/TimePartitionedDataPublisher.java#L58-L74
25,935
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java
EmailUtils.sendEmail
public static void sendEmail(State state, String subject, String message) throws EmailException { Email email = new SimpleEmail(); email.setHostName(state.getProp(ConfigurationKeys.EMAIL_HOST_KEY, ConfigurationKeys.DEFAULT_EMAIL_HOST)); if (state.contains(ConfigurationKeys.EMAIL_SMTP_PORT_KEY)) { email.setSmtpPort(state.getPropAsInt(ConfigurationKeys.EMAIL_SMTP_PORT_KEY)); } email.setFrom(state.getProp(ConfigurationKeys.EMAIL_FROM_KEY)); if (state.contains(ConfigurationKeys.EMAIL_USER_KEY) && state.contains(ConfigurationKeys.EMAIL_PASSWORD_KEY)) { email.setAuthentication(state.getProp(ConfigurationKeys.EMAIL_USER_KEY), PasswordManager.getInstance(state).readPassword(state.getProp(ConfigurationKeys.EMAIL_PASSWORD_KEY))); } Iterable<String> tos = Splitter.on(',').trimResults().omitEmptyStrings().split(state.getProp(ConfigurationKeys.EMAIL_TOS_KEY)); for (String to : tos) { email.addTo(to); } String hostName; try { hostName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException uhe) { LOGGER.error("Failed to get the host name", uhe); hostName = "unknown"; } email.setSubject(subject); String fromHostLine = String.format("This email was sent from host: %s%n%n", hostName); email.setMsg(fromHostLine + message); email.send(); }
java
public static void sendEmail(State state, String subject, String message) throws EmailException { Email email = new SimpleEmail(); email.setHostName(state.getProp(ConfigurationKeys.EMAIL_HOST_KEY, ConfigurationKeys.DEFAULT_EMAIL_HOST)); if (state.contains(ConfigurationKeys.EMAIL_SMTP_PORT_KEY)) { email.setSmtpPort(state.getPropAsInt(ConfigurationKeys.EMAIL_SMTP_PORT_KEY)); } email.setFrom(state.getProp(ConfigurationKeys.EMAIL_FROM_KEY)); if (state.contains(ConfigurationKeys.EMAIL_USER_KEY) && state.contains(ConfigurationKeys.EMAIL_PASSWORD_KEY)) { email.setAuthentication(state.getProp(ConfigurationKeys.EMAIL_USER_KEY), PasswordManager.getInstance(state).readPassword(state.getProp(ConfigurationKeys.EMAIL_PASSWORD_KEY))); } Iterable<String> tos = Splitter.on(',').trimResults().omitEmptyStrings().split(state.getProp(ConfigurationKeys.EMAIL_TOS_KEY)); for (String to : tos) { email.addTo(to); } String hostName; try { hostName = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException uhe) { LOGGER.error("Failed to get the host name", uhe); hostName = "unknown"; } email.setSubject(subject); String fromHostLine = String.format("This email was sent from host: %s%n%n", hostName); email.setMsg(fromHostLine + message); email.send(); }
[ "public", "static", "void", "sendEmail", "(", "State", "state", ",", "String", "subject", ",", "String", "message", ")", "throws", "EmailException", "{", "Email", "email", "=", "new", "SimpleEmail", "(", ")", ";", "email", ".", "setHostName", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "EMAIL_HOST_KEY", ",", "ConfigurationKeys", ".", "DEFAULT_EMAIL_HOST", ")", ")", ";", "if", "(", "state", ".", "contains", "(", "ConfigurationKeys", ".", "EMAIL_SMTP_PORT_KEY", ")", ")", "{", "email", ".", "setSmtpPort", "(", "state", ".", "getPropAsInt", "(", "ConfigurationKeys", ".", "EMAIL_SMTP_PORT_KEY", ")", ")", ";", "}", "email", ".", "setFrom", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "EMAIL_FROM_KEY", ")", ")", ";", "if", "(", "state", ".", "contains", "(", "ConfigurationKeys", ".", "EMAIL_USER_KEY", ")", "&&", "state", ".", "contains", "(", "ConfigurationKeys", ".", "EMAIL_PASSWORD_KEY", ")", ")", "{", "email", ".", "setAuthentication", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "EMAIL_USER_KEY", ")", ",", "PasswordManager", ".", "getInstance", "(", "state", ")", ".", "readPassword", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "EMAIL_PASSWORD_KEY", ")", ")", ")", ";", "}", "Iterable", "<", "String", ">", "tos", "=", "Splitter", ".", "on", "(", "'", "'", ")", ".", "trimResults", "(", ")", ".", "omitEmptyStrings", "(", ")", ".", "split", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "EMAIL_TOS_KEY", ")", ")", ";", "for", "(", "String", "to", ":", "tos", ")", "{", "email", ".", "addTo", "(", "to", ")", ";", "}", "String", "hostName", ";", "try", "{", "hostName", "=", "InetAddress", ".", "getLocalHost", "(", ")", ".", "getHostName", "(", ")", ";", "}", "catch", "(", "UnknownHostException", "uhe", ")", "{", "LOGGER", ".", "error", "(", "\"Failed to get the host name\"", ",", "uhe", ")", ";", "hostName", "=", "\"unknown\"", ";", "}", "email", ".", "setSubject", "(", "subject", ")", ";", "String", "fromHostLine", "=", "String", ".", "format", "(", "\"This email was sent from host: %s%n%n\"", ",", "hostName", ")", ";", "email", ".", "setMsg", "(", "fromHostLine", "+", "message", ")", ";", "email", ".", "send", "(", ")", ";", "}" ]
A general method for sending emails. @param state a {@link State} object containing configuration properties @param subject email subject @param message email message @throws EmailException if there is anything wrong sending the email
[ "A", "general", "method", "for", "sending", "emails", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java#L53-L82
25,936
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java
EmailUtils.sendJobCompletionEmail
public static void sendJobCompletionEmail(String jobId, String message, String state, State jobState) throws EmailException { sendEmail(jobState, String.format("Gobblin notification: job %s has completed with state %s", jobId, state), message); }
java
public static void sendJobCompletionEmail(String jobId, String message, String state, State jobState) throws EmailException { sendEmail(jobState, String.format("Gobblin notification: job %s has completed with state %s", jobId, state), message); }
[ "public", "static", "void", "sendJobCompletionEmail", "(", "String", "jobId", ",", "String", "message", ",", "String", "state", ",", "State", "jobState", ")", "throws", "EmailException", "{", "sendEmail", "(", "jobState", ",", "String", ".", "format", "(", "\"Gobblin notification: job %s has completed with state %s\"", ",", "jobId", ",", "state", ")", ",", "message", ")", ";", "}" ]
Send a job completion notification email. @param jobId job name @param message email message @param state job state @param jobState a {@link State} object carrying job configuration properties @throws EmailException if there is anything wrong sending the email
[ "Send", "a", "job", "completion", "notification", "email", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java#L93-L97
25,937
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java
EmailUtils.sendJobCancellationEmail
public static void sendJobCancellationEmail(String jobId, String message, State jobState) throws EmailException { sendEmail(jobState, String.format("Gobblin notification: job %s has been cancelled", jobId), message); }
java
public static void sendJobCancellationEmail(String jobId, String message, State jobState) throws EmailException { sendEmail(jobState, String.format("Gobblin notification: job %s has been cancelled", jobId), message); }
[ "public", "static", "void", "sendJobCancellationEmail", "(", "String", "jobId", ",", "String", "message", ",", "State", "jobState", ")", "throws", "EmailException", "{", "sendEmail", "(", "jobState", ",", "String", ".", "format", "(", "\"Gobblin notification: job %s has been cancelled\"", ",", "jobId", ")", ",", "message", ")", ";", "}" ]
Send a job cancellation notification email. @param jobId job name @param message email message @param jobState a {@link State} object carrying job configuration properties @throws EmailException if there is anything wrong sending the email
[ "Send", "a", "job", "cancellation", "notification", "email", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java#L107-L109
25,938
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java
EmailUtils.sendJobFailureAlertEmail
public static void sendJobFailureAlertEmail(String jobName, String message, int failures, State jobState) throws EmailException { sendEmail(jobState, String.format("Gobblin alert: job %s has failed %d %s consecutively in the past", jobName, failures, failures > 1 ? "times" : "time"), message); }
java
public static void sendJobFailureAlertEmail(String jobName, String message, int failures, State jobState) throws EmailException { sendEmail(jobState, String.format("Gobblin alert: job %s has failed %d %s consecutively in the past", jobName, failures, failures > 1 ? "times" : "time"), message); }
[ "public", "static", "void", "sendJobFailureAlertEmail", "(", "String", "jobName", ",", "String", "message", ",", "int", "failures", ",", "State", "jobState", ")", "throws", "EmailException", "{", "sendEmail", "(", "jobState", ",", "String", ".", "format", "(", "\"Gobblin alert: job %s has failed %d %s consecutively in the past\"", ",", "jobName", ",", "failures", ",", "failures", ">", "1", "?", "\"times\"", ":", "\"time\"", ")", ",", "message", ")", ";", "}" ]
Send a job failure alert email. @param jobName job name @param message email message @param failures number of consecutive job failures @param jobState a {@link State} object carrying job configuration properties @throws EmailException if there is anything wrong sending the email
[ "Send", "a", "job", "failure", "alert", "email", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/EmailUtils.java#L120-L124
25,939
apache/incubator-gobblin
gobblin-compaction/src/main/java/org/apache/gobblin/compaction/conditions/RecompactionCombineCondition.java
RecompactionCombineCondition.isRecompactionNeeded
public boolean isRecompactionNeeded (DatasetHelper helper) { if (recompactionConditions.isEmpty()) return false; if (operation == CombineOperation.OR) { for (RecompactionCondition c : recompactionConditions) { if (c.isRecompactionNeeded(helper)) { return true; } } return false; } else { for (RecompactionCondition c : recompactionConditions) { if (!c.isRecompactionNeeded(helper)) { return false; } } return true; } }
java
public boolean isRecompactionNeeded (DatasetHelper helper) { if (recompactionConditions.isEmpty()) return false; if (operation == CombineOperation.OR) { for (RecompactionCondition c : recompactionConditions) { if (c.isRecompactionNeeded(helper)) { return true; } } return false; } else { for (RecompactionCondition c : recompactionConditions) { if (!c.isRecompactionNeeded(helper)) { return false; } } return true; } }
[ "public", "boolean", "isRecompactionNeeded", "(", "DatasetHelper", "helper", ")", "{", "if", "(", "recompactionConditions", ".", "isEmpty", "(", ")", ")", "return", "false", ";", "if", "(", "operation", "==", "CombineOperation", ".", "OR", ")", "{", "for", "(", "RecompactionCondition", "c", ":", "recompactionConditions", ")", "{", "if", "(", "c", ".", "isRecompactionNeeded", "(", "helper", ")", ")", "{", "return", "true", ";", "}", "}", "return", "false", ";", "}", "else", "{", "for", "(", "RecompactionCondition", "c", ":", "recompactionConditions", ")", "{", "if", "(", "!", "c", ".", "isRecompactionNeeded", "(", "helper", ")", ")", "{", "return", "false", ";", "}", "}", "return", "true", ";", "}", "}" ]
For OR combination, return true iff one of conditions return true For AND combination, return true iff all of conditions return true Other cases, return false
[ "For", "OR", "combination", "return", "true", "iff", "one", "of", "conditions", "return", "true", "For", "AND", "combination", "return", "true", "iff", "all", "of", "conditions", "return", "true", "Other", "cases", "return", "false" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-compaction/src/main/java/org/apache/gobblin/compaction/conditions/RecompactionCombineCondition.java#L111-L130
25,940
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/row/RowLevelErrFileWriter.java
RowLevelErrFileWriter.open
public void open(Path errFilePath) throws IOException { this.fs.mkdirs(errFilePath.getParent()); OutputStream os = this.closer.register(this.fs.exists(errFilePath) ? this.fs.append(errFilePath) : this.fs.create(errFilePath)); this.writer = this.closer .register(new BufferedWriter(new OutputStreamWriter(os, ConfigurationKeys.DEFAULT_CHARSET_ENCODING))); }
java
public void open(Path errFilePath) throws IOException { this.fs.mkdirs(errFilePath.getParent()); OutputStream os = this.closer.register(this.fs.exists(errFilePath) ? this.fs.append(errFilePath) : this.fs.create(errFilePath)); this.writer = this.closer .register(new BufferedWriter(new OutputStreamWriter(os, ConfigurationKeys.DEFAULT_CHARSET_ENCODING))); }
[ "public", "void", "open", "(", "Path", "errFilePath", ")", "throws", "IOException", "{", "this", ".", "fs", ".", "mkdirs", "(", "errFilePath", ".", "getParent", "(", ")", ")", ";", "OutputStream", "os", "=", "this", ".", "closer", ".", "register", "(", "this", ".", "fs", ".", "exists", "(", "errFilePath", ")", "?", "this", ".", "fs", ".", "append", "(", "errFilePath", ")", ":", "this", ".", "fs", ".", "create", "(", "errFilePath", ")", ")", ";", "this", ".", "writer", "=", "this", ".", "closer", ".", "register", "(", "new", "BufferedWriter", "(", "new", "OutputStreamWriter", "(", "os", ",", "ConfigurationKeys", ".", "DEFAULT_CHARSET_ENCODING", ")", ")", ")", ";", "}" ]
Open a BufferedWriter @param errFilePath path to write the file
[ "Open", "a", "BufferedWriter" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/qualitychecker/row/RowLevelErrFileWriter.java#L53-L59
25,941
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getGlobalPartition
public Partition getGlobalPartition(long previousWatermark) { ExtractType extractType = ExtractType.valueOf(state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); WatermarkType watermarkType = WatermarkType.valueOf( state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType); int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark(); long lowWatermark = getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark); long highWatermark = getHighWatermark(extractType, watermarkType); return new Partition(lowWatermark, highWatermark, true, hasUserSpecifiedHighWatermark); }
java
public Partition getGlobalPartition(long previousWatermark) { ExtractType extractType = ExtractType.valueOf(state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); WatermarkType watermarkType = WatermarkType.valueOf( state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType); int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark(); long lowWatermark = getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark); long highWatermark = getHighWatermark(extractType, watermarkType); return new Partition(lowWatermark, highWatermark, true, hasUserSpecifiedHighWatermark); }
[ "public", "Partition", "getGlobalPartition", "(", "long", "previousWatermark", ")", "{", "ExtractType", "extractType", "=", "ExtractType", ".", "valueOf", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_EXTRACT_TYPE", ")", ".", "toUpperCase", "(", ")", ")", ";", "WatermarkType", "watermarkType", "=", "WatermarkType", ".", "valueOf", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_WATERMARK_TYPE", ",", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_TYPE", ")", ".", "toUpperCase", "(", ")", ")", ";", "WatermarkPredicate", "watermark", "=", "new", "WatermarkPredicate", "(", "null", ",", "watermarkType", ")", ";", "int", "deltaForNextWatermark", "=", "watermark", ".", "getDeltaNumForNextWatermark", "(", ")", ";", "long", "lowWatermark", "=", "getLowWatermark", "(", "extractType", ",", "watermarkType", ",", "previousWatermark", ",", "deltaForNextWatermark", ")", ";", "long", "highWatermark", "=", "getHighWatermark", "(", "extractType", ",", "watermarkType", ")", ";", "return", "new", "Partition", "(", "lowWatermark", ",", "highWatermark", ",", "true", ",", "hasUserSpecifiedHighWatermark", ")", ";", "}" ]
Get the global partition of the whole data set, which has the global low and high watermarks @param previousWatermark previous watermark for computing the low watermark of current run @return a Partition instance
[ "Get", "the", "global", "partition", "of", "the", "whole", "data", "set", "which", "has", "the", "global", "low", "and", "high", "watermarks" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L95-L108
25,942
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getPartitions
@Deprecated public HashMap<Long, Long> getPartitions(long previousWatermark) { HashMap<Long, Long> defaultPartition = Maps.newHashMap(); if (!isWatermarkExists()) { defaultPartition.put(ConfigurationKeys.DEFAULT_WATERMARK_VALUE, ConfigurationKeys.DEFAULT_WATERMARK_VALUE); LOG.info("Watermark column or type not found - Default partition with low watermark and high watermark as " + ConfigurationKeys.DEFAULT_WATERMARK_VALUE); return defaultPartition; } ExtractType extractType = ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); WatermarkType watermarkType = WatermarkType.valueOf( this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); int interval = getUpdatedInterval(this.state.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_PARTITION_INTERVAL, 0), extractType, watermarkType); int sourceMaxAllowedPartitions = this.state.getPropAsInt(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 0); int maxPartitions = (sourceMaxAllowedPartitions != 0 ? sourceMaxAllowedPartitions : ConfigurationKeys.DEFAULT_MAX_NUMBER_OF_PARTITIONS); WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType); int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark(); LOG.info("is watermark override: " + this.isWatermarkOverride()); LOG.info("is full extract: " + this.isFullDump()); long lowWatermark = this.getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark); long highWatermark = this.getHighWatermark(extractType, watermarkType); if (lowWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE || highWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE) { LOG.info( "Low watermark or high water mark is not found. Hence cannot generate partitions - Default partition with low watermark: " + lowWatermark + " and high watermark: " + highWatermark); defaultPartition.put(lowWatermark, highWatermark); return defaultPartition; } LOG.info("Generate partitions with low watermark: " + lowWatermark + "; high watermark: " + highWatermark + "; partition interval in hours: " + interval + "; Maximum number of allowed partitions: " + maxPartitions); return watermark.getPartitions(lowWatermark, highWatermark, interval, maxPartitions); }
java
@Deprecated public HashMap<Long, Long> getPartitions(long previousWatermark) { HashMap<Long, Long> defaultPartition = Maps.newHashMap(); if (!isWatermarkExists()) { defaultPartition.put(ConfigurationKeys.DEFAULT_WATERMARK_VALUE, ConfigurationKeys.DEFAULT_WATERMARK_VALUE); LOG.info("Watermark column or type not found - Default partition with low watermark and high watermark as " + ConfigurationKeys.DEFAULT_WATERMARK_VALUE); return defaultPartition; } ExtractType extractType = ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); WatermarkType watermarkType = WatermarkType.valueOf( this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); int interval = getUpdatedInterval(this.state.getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_PARTITION_INTERVAL, 0), extractType, watermarkType); int sourceMaxAllowedPartitions = this.state.getPropAsInt(ConfigurationKeys.SOURCE_MAX_NUMBER_OF_PARTITIONS, 0); int maxPartitions = (sourceMaxAllowedPartitions != 0 ? sourceMaxAllowedPartitions : ConfigurationKeys.DEFAULT_MAX_NUMBER_OF_PARTITIONS); WatermarkPredicate watermark = new WatermarkPredicate(null, watermarkType); int deltaForNextWatermark = watermark.getDeltaNumForNextWatermark(); LOG.info("is watermark override: " + this.isWatermarkOverride()); LOG.info("is full extract: " + this.isFullDump()); long lowWatermark = this.getLowWatermark(extractType, watermarkType, previousWatermark, deltaForNextWatermark); long highWatermark = this.getHighWatermark(extractType, watermarkType); if (lowWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE || highWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE) { LOG.info( "Low watermark or high water mark is not found. Hence cannot generate partitions - Default partition with low watermark: " + lowWatermark + " and high watermark: " + highWatermark); defaultPartition.put(lowWatermark, highWatermark); return defaultPartition; } LOG.info("Generate partitions with low watermark: " + lowWatermark + "; high watermark: " + highWatermark + "; partition interval in hours: " + interval + "; Maximum number of allowed partitions: " + maxPartitions); return watermark.getPartitions(lowWatermark, highWatermark, interval, maxPartitions); }
[ "@", "Deprecated", "public", "HashMap", "<", "Long", ",", "Long", ">", "getPartitions", "(", "long", "previousWatermark", ")", "{", "HashMap", "<", "Long", ",", "Long", ">", "defaultPartition", "=", "Maps", ".", "newHashMap", "(", ")", ";", "if", "(", "!", "isWatermarkExists", "(", ")", ")", "{", "defaultPartition", ".", "put", "(", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ",", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ")", ";", "LOG", ".", "info", "(", "\"Watermark column or type not found - Default partition with low watermark and high watermark as \"", "+", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ")", ";", "return", "defaultPartition", ";", "}", "ExtractType", "extractType", "=", "ExtractType", ".", "valueOf", "(", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_EXTRACT_TYPE", ")", ".", "toUpperCase", "(", ")", ")", ";", "WatermarkType", "watermarkType", "=", "WatermarkType", ".", "valueOf", "(", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_WATERMARK_TYPE", ",", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_TYPE", ")", ".", "toUpperCase", "(", ")", ")", ";", "int", "interval", "=", "getUpdatedInterval", "(", "this", ".", "state", ".", "getPropAsInt", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_PARTITION_INTERVAL", ",", "0", ")", ",", "extractType", ",", "watermarkType", ")", ";", "int", "sourceMaxAllowedPartitions", "=", "this", ".", "state", ".", "getPropAsInt", "(", "ConfigurationKeys", ".", "SOURCE_MAX_NUMBER_OF_PARTITIONS", ",", "0", ")", ";", "int", "maxPartitions", "=", "(", "sourceMaxAllowedPartitions", "!=", "0", "?", "sourceMaxAllowedPartitions", ":", "ConfigurationKeys", ".", "DEFAULT_MAX_NUMBER_OF_PARTITIONS", ")", ";", "WatermarkPredicate", "watermark", "=", "new", "WatermarkPredicate", "(", "null", ",", "watermarkType", ")", ";", "int", "deltaForNextWatermark", "=", "watermark", ".", "getDeltaNumForNextWatermark", "(", ")", ";", "LOG", ".", "info", "(", "\"is watermark override: \"", "+", "this", ".", "isWatermarkOverride", "(", ")", ")", ";", "LOG", ".", "info", "(", "\"is full extract: \"", "+", "this", ".", "isFullDump", "(", ")", ")", ";", "long", "lowWatermark", "=", "this", ".", "getLowWatermark", "(", "extractType", ",", "watermarkType", ",", "previousWatermark", ",", "deltaForNextWatermark", ")", ";", "long", "highWatermark", "=", "this", ".", "getHighWatermark", "(", "extractType", ",", "watermarkType", ")", ";", "if", "(", "lowWatermark", "==", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", "||", "highWatermark", "==", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ")", "{", "LOG", ".", "info", "(", "\"Low watermark or high water mark is not found. Hence cannot generate partitions - Default partition with low watermark: \"", "+", "lowWatermark", "+", "\" and high watermark: \"", "+", "highWatermark", ")", ";", "defaultPartition", ".", "put", "(", "lowWatermark", ",", "highWatermark", ")", ";", "return", "defaultPartition", ";", "}", "LOG", ".", "info", "(", "\"Generate partitions with low watermark: \"", "+", "lowWatermark", "+", "\"; high watermark: \"", "+", "highWatermark", "+", "\"; partition interval in hours: \"", "+", "interval", "+", "\"; Maximum number of allowed partitions: \"", "+", "maxPartitions", ")", ";", "return", "watermark", ".", "getPartitions", "(", "lowWatermark", ",", "highWatermark", ",", "interval", ",", "maxPartitions", ")", ";", "}" ]
Get partitions with low and high water marks @param previousWatermark previous water mark from metadata @return map of partition intervals. map's key is interval begin time (in format {@link Partitioner#WATERMARKTIMEFORMAT}) map's value is interval end time (in format {@link Partitioner#WATERMARKTIMEFORMAT})
[ "Get", "partitions", "with", "low", "and", "high", "water", "marks" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L118-L159
25,943
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getPartitionList
public List<Partition> getPartitionList(long previousWatermark) { if (state.getPropAsBoolean(HAS_USER_SPECIFIED_PARTITIONS)) { return createUserSpecifiedPartitions(); } List<Partition> partitions = new ArrayList<>(); /* * Use the deprecated getPartitions(long) as a helper function, avoid duplicating logic. When it can be removed, its * logic will be put here. */ HashMap<Long, Long> partitionMap = getPartitions(previousWatermark); if (partitionMap.size() == 0) { return partitions; } if (partitionMap.size() == 1) { Map.Entry<Long, Long> entry = partitionMap.entrySet().iterator().next(); Long lwm = entry.getKey(); Long hwm = entry.getValue(); if (lwm == hwm) { if (lwm != -1) { // we always allow [-1, -1] interval due to some test cases relies on this logic. boolean allowEqualBoundary = state.getPropAsBoolean(ALLOW_EQUAL_WATERMARK_BOUNDARY, false); LOG.info("Single partition with LWM = HWM and allowEqualBoundary=" + allowEqualBoundary); if (!allowEqualBoundary) { return partitions; } } } } /* * Can't use highWatermark directly, as the partitionMap may have different precision. For example, highWatermark * may be specified to seconds, but partitionMap could be specified to hour or date. */ Long highestWatermark = Collections.max(partitionMap.values()); for (Map.Entry<Long, Long> entry : partitionMap.entrySet()) { Long partitionHighWatermark = entry.getValue(); // Apply hasUserSpecifiedHighWatermark to the last partition, which has highestWatermark if (partitionHighWatermark.equals(highestWatermark)) { partitions.add(new Partition(entry.getKey(), partitionHighWatermark, true, hasUserSpecifiedHighWatermark)); } else { // The partitionHighWatermark was computed on the fly not what user specifies partitions.add(new Partition(entry.getKey(), partitionHighWatermark, false)); } } return partitions; }
java
public List<Partition> getPartitionList(long previousWatermark) { if (state.getPropAsBoolean(HAS_USER_SPECIFIED_PARTITIONS)) { return createUserSpecifiedPartitions(); } List<Partition> partitions = new ArrayList<>(); /* * Use the deprecated getPartitions(long) as a helper function, avoid duplicating logic. When it can be removed, its * logic will be put here. */ HashMap<Long, Long> partitionMap = getPartitions(previousWatermark); if (partitionMap.size() == 0) { return partitions; } if (partitionMap.size() == 1) { Map.Entry<Long, Long> entry = partitionMap.entrySet().iterator().next(); Long lwm = entry.getKey(); Long hwm = entry.getValue(); if (lwm == hwm) { if (lwm != -1) { // we always allow [-1, -1] interval due to some test cases relies on this logic. boolean allowEqualBoundary = state.getPropAsBoolean(ALLOW_EQUAL_WATERMARK_BOUNDARY, false); LOG.info("Single partition with LWM = HWM and allowEqualBoundary=" + allowEqualBoundary); if (!allowEqualBoundary) { return partitions; } } } } /* * Can't use highWatermark directly, as the partitionMap may have different precision. For example, highWatermark * may be specified to seconds, but partitionMap could be specified to hour or date. */ Long highestWatermark = Collections.max(partitionMap.values()); for (Map.Entry<Long, Long> entry : partitionMap.entrySet()) { Long partitionHighWatermark = entry.getValue(); // Apply hasUserSpecifiedHighWatermark to the last partition, which has highestWatermark if (partitionHighWatermark.equals(highestWatermark)) { partitions.add(new Partition(entry.getKey(), partitionHighWatermark, true, hasUserSpecifiedHighWatermark)); } else { // The partitionHighWatermark was computed on the fly not what user specifies partitions.add(new Partition(entry.getKey(), partitionHighWatermark, false)); } } return partitions; }
[ "public", "List", "<", "Partition", ">", "getPartitionList", "(", "long", "previousWatermark", ")", "{", "if", "(", "state", ".", "getPropAsBoolean", "(", "HAS_USER_SPECIFIED_PARTITIONS", ")", ")", "{", "return", "createUserSpecifiedPartitions", "(", ")", ";", "}", "List", "<", "Partition", ">", "partitions", "=", "new", "ArrayList", "<>", "(", ")", ";", "/*\n * Use the deprecated getPartitions(long) as a helper function, avoid duplicating logic. When it can be removed, its\n * logic will be put here.\n */", "HashMap", "<", "Long", ",", "Long", ">", "partitionMap", "=", "getPartitions", "(", "previousWatermark", ")", ";", "if", "(", "partitionMap", ".", "size", "(", ")", "==", "0", ")", "{", "return", "partitions", ";", "}", "if", "(", "partitionMap", ".", "size", "(", ")", "==", "1", ")", "{", "Map", ".", "Entry", "<", "Long", ",", "Long", ">", "entry", "=", "partitionMap", ".", "entrySet", "(", ")", ".", "iterator", "(", ")", ".", "next", "(", ")", ";", "Long", "lwm", "=", "entry", ".", "getKey", "(", ")", ";", "Long", "hwm", "=", "entry", ".", "getValue", "(", ")", ";", "if", "(", "lwm", "==", "hwm", ")", "{", "if", "(", "lwm", "!=", "-", "1", ")", "{", "// we always allow [-1, -1] interval due to some test cases relies on this logic.", "boolean", "allowEqualBoundary", "=", "state", ".", "getPropAsBoolean", "(", "ALLOW_EQUAL_WATERMARK_BOUNDARY", ",", "false", ")", ";", "LOG", ".", "info", "(", "\"Single partition with LWM = HWM and allowEqualBoundary=\"", "+", "allowEqualBoundary", ")", ";", "if", "(", "!", "allowEqualBoundary", ")", "{", "return", "partitions", ";", "}", "}", "}", "}", "/*\n * Can't use highWatermark directly, as the partitionMap may have different precision. For example, highWatermark\n * may be specified to seconds, but partitionMap could be specified to hour or date.\n */", "Long", "highestWatermark", "=", "Collections", ".", "max", "(", "partitionMap", ".", "values", "(", ")", ")", ";", "for", "(", "Map", ".", "Entry", "<", "Long", ",", "Long", ">", "entry", ":", "partitionMap", ".", "entrySet", "(", ")", ")", "{", "Long", "partitionHighWatermark", "=", "entry", ".", "getValue", "(", ")", ";", "// Apply hasUserSpecifiedHighWatermark to the last partition, which has highestWatermark", "if", "(", "partitionHighWatermark", ".", "equals", "(", "highestWatermark", ")", ")", "{", "partitions", ".", "add", "(", "new", "Partition", "(", "entry", ".", "getKey", "(", ")", ",", "partitionHighWatermark", ",", "true", ",", "hasUserSpecifiedHighWatermark", ")", ")", ";", "}", "else", "{", "// The partitionHighWatermark was computed on the fly not what user specifies", "partitions", ".", "add", "(", "new", "Partition", "(", "entry", ".", "getKey", "(", ")", ",", "partitionHighWatermark", ",", "false", ")", ")", ";", "}", "}", "return", "partitions", ";", "}" ]
Get an unordered list of partition with lowWatermark, highWatermark, and hasUserSpecifiedHighWatermark. @param previousWatermark previous water mark from metadata @return an unordered list of partition
[ "Get", "an", "unordered", "list", "of", "partition", "with", "lowWatermark", "highWatermark", "and", "hasUserSpecifiedHighWatermark", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L167-L217
25,944
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.createUserSpecifiedPartitions
private List<Partition> createUserSpecifiedPartitions() { List<Partition> partitions = new ArrayList<>(); List<String> watermarkPoints = state.getPropAsList(USER_SPECIFIED_PARTITIONS); boolean isEarlyStopped = state.getPropAsBoolean(IS_EARLY_STOPPED); if (watermarkPoints == null || watermarkPoints.size() == 0 ) { LOG.info("There should be some partition points"); long defaultWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; partitions.add(new Partition(defaultWatermark, defaultWatermark, true, true)); return partitions; } WatermarkType watermarkType = WatermarkType.valueOf( state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); long lowWatermark = adjustWatermark(watermarkPoints.get(0), watermarkType); long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; // Only one partition point specified if (watermarkPoints.size() == 1) { if (watermarkType != WatermarkType.SIMPLE) { String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); String currentTime = Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone); highWatermark = adjustWatermark(currentTime, watermarkType); } partitions.add(new Partition(lowWatermark, highWatermark, true, false)); return partitions; } int i; for (i = 1; i < watermarkPoints.size() - 1; i++) { highWatermark = adjustWatermark(watermarkPoints.get(i), watermarkType); partitions.add(new Partition(lowWatermark, highWatermark, true)); lowWatermark = highWatermark; } // Last partition highWatermark = adjustWatermark(watermarkPoints.get(i), watermarkType); ExtractType extractType = ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); // If it is early stop, we should not remove upper bounds if ((isFullDump() || isSnapshot(extractType)) && !isEarlyStopped) { // The upper bounds can be removed for last work unit partitions.add(new Partition(lowWatermark, highWatermark, true, false)); } else { // The upper bounds can not be removed for last work unit partitions.add(new Partition(lowWatermark, highWatermark, true, true)); } return partitions; }
java
private List<Partition> createUserSpecifiedPartitions() { List<Partition> partitions = new ArrayList<>(); List<String> watermarkPoints = state.getPropAsList(USER_SPECIFIED_PARTITIONS); boolean isEarlyStopped = state.getPropAsBoolean(IS_EARLY_STOPPED); if (watermarkPoints == null || watermarkPoints.size() == 0 ) { LOG.info("There should be some partition points"); long defaultWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; partitions.add(new Partition(defaultWatermark, defaultWatermark, true, true)); return partitions; } WatermarkType watermarkType = WatermarkType.valueOf( state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE, ConfigurationKeys.DEFAULT_WATERMARK_TYPE) .toUpperCase()); long lowWatermark = adjustWatermark(watermarkPoints.get(0), watermarkType); long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; // Only one partition point specified if (watermarkPoints.size() == 1) { if (watermarkType != WatermarkType.SIMPLE) { String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); String currentTime = Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone); highWatermark = adjustWatermark(currentTime, watermarkType); } partitions.add(new Partition(lowWatermark, highWatermark, true, false)); return partitions; } int i; for (i = 1; i < watermarkPoints.size() - 1; i++) { highWatermark = adjustWatermark(watermarkPoints.get(i), watermarkType); partitions.add(new Partition(lowWatermark, highWatermark, true)); lowWatermark = highWatermark; } // Last partition highWatermark = adjustWatermark(watermarkPoints.get(i), watermarkType); ExtractType extractType = ExtractType.valueOf(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_EXTRACT_TYPE).toUpperCase()); // If it is early stop, we should not remove upper bounds if ((isFullDump() || isSnapshot(extractType)) && !isEarlyStopped) { // The upper bounds can be removed for last work unit partitions.add(new Partition(lowWatermark, highWatermark, true, false)); } else { // The upper bounds can not be removed for last work unit partitions.add(new Partition(lowWatermark, highWatermark, true, true)); } return partitions; }
[ "private", "List", "<", "Partition", ">", "createUserSpecifiedPartitions", "(", ")", "{", "List", "<", "Partition", ">", "partitions", "=", "new", "ArrayList", "<>", "(", ")", ";", "List", "<", "String", ">", "watermarkPoints", "=", "state", ".", "getPropAsList", "(", "USER_SPECIFIED_PARTITIONS", ")", ";", "boolean", "isEarlyStopped", "=", "state", ".", "getPropAsBoolean", "(", "IS_EARLY_STOPPED", ")", ";", "if", "(", "watermarkPoints", "==", "null", "||", "watermarkPoints", ".", "size", "(", ")", "==", "0", ")", "{", "LOG", ".", "info", "(", "\"There should be some partition points\"", ")", ";", "long", "defaultWatermark", "=", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ";", "partitions", ".", "add", "(", "new", "Partition", "(", "defaultWatermark", ",", "defaultWatermark", ",", "true", ",", "true", ")", ")", ";", "return", "partitions", ";", "}", "WatermarkType", "watermarkType", "=", "WatermarkType", ".", "valueOf", "(", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_WATERMARK_TYPE", ",", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_TYPE", ")", ".", "toUpperCase", "(", ")", ")", ";", "long", "lowWatermark", "=", "adjustWatermark", "(", "watermarkPoints", ".", "get", "(", "0", ")", ",", "watermarkType", ")", ";", "long", "highWatermark", "=", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ";", "// Only one partition point specified", "if", "(", "watermarkPoints", ".", "size", "(", ")", "==", "1", ")", "{", "if", "(", "watermarkType", "!=", "WatermarkType", ".", "SIMPLE", ")", "{", "String", "timeZone", "=", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_TIMEZONE", ")", ";", "String", "currentTime", "=", "Utils", ".", "dateTimeToString", "(", "getCurrentTime", "(", "timeZone", ")", ",", "WATERMARKTIMEFORMAT", ",", "timeZone", ")", ";", "highWatermark", "=", "adjustWatermark", "(", "currentTime", ",", "watermarkType", ")", ";", "}", "partitions", ".", "add", "(", "new", "Partition", "(", "lowWatermark", ",", "highWatermark", ",", "true", ",", "false", ")", ")", ";", "return", "partitions", ";", "}", "int", "i", ";", "for", "(", "i", "=", "1", ";", "i", "<", "watermarkPoints", ".", "size", "(", ")", "-", "1", ";", "i", "++", ")", "{", "highWatermark", "=", "adjustWatermark", "(", "watermarkPoints", ".", "get", "(", "i", ")", ",", "watermarkType", ")", ";", "partitions", ".", "add", "(", "new", "Partition", "(", "lowWatermark", ",", "highWatermark", ",", "true", ")", ")", ";", "lowWatermark", "=", "highWatermark", ";", "}", "// Last partition", "highWatermark", "=", "adjustWatermark", "(", "watermarkPoints", ".", "get", "(", "i", ")", ",", "watermarkType", ")", ";", "ExtractType", "extractType", "=", "ExtractType", ".", "valueOf", "(", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_EXTRACT_TYPE", ")", ".", "toUpperCase", "(", ")", ")", ";", "// If it is early stop, we should not remove upper bounds", "if", "(", "(", "isFullDump", "(", ")", "||", "isSnapshot", "(", "extractType", ")", ")", "&&", "!", "isEarlyStopped", ")", "{", "// The upper bounds can be removed for last work unit", "partitions", ".", "add", "(", "new", "Partition", "(", "lowWatermark", ",", "highWatermark", ",", "true", ",", "false", ")", ")", ";", "}", "else", "{", "// The upper bounds can not be removed for last work unit", "partitions", ".", "add", "(", "new", "Partition", "(", "lowWatermark", ",", "highWatermark", ",", "true", ",", "true", ")", ")", ";", "}", "return", "partitions", ";", "}" ]
Generate the partitions based on the lists specified by the user in job config
[ "Generate", "the", "partitions", "based", "on", "the", "lists", "specified", "by", "the", "user", "in", "job", "config" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L222-L276
25,945
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.adjustWatermark
private static long adjustWatermark(String baseWatermark, WatermarkType watermarkType) { long result = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; switch (watermarkType) { case SIMPLE: result = SimpleWatermark.adjustWatermark(baseWatermark, 0); break; case DATE: result = DateWatermark.adjustWatermark(baseWatermark, 0); break; case HOUR: result = HourWatermark.adjustWatermark(baseWatermark, 0); break; case TIMESTAMP: result = TimestampWatermark.adjustWatermark(baseWatermark, 0); break; } return result; }
java
private static long adjustWatermark(String baseWatermark, WatermarkType watermarkType) { long result = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; switch (watermarkType) { case SIMPLE: result = SimpleWatermark.adjustWatermark(baseWatermark, 0); break; case DATE: result = DateWatermark.adjustWatermark(baseWatermark, 0); break; case HOUR: result = HourWatermark.adjustWatermark(baseWatermark, 0); break; case TIMESTAMP: result = TimestampWatermark.adjustWatermark(baseWatermark, 0); break; } return result; }
[ "private", "static", "long", "adjustWatermark", "(", "String", "baseWatermark", ",", "WatermarkType", "watermarkType", ")", "{", "long", "result", "=", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ";", "switch", "(", "watermarkType", ")", "{", "case", "SIMPLE", ":", "result", "=", "SimpleWatermark", ".", "adjustWatermark", "(", "baseWatermark", ",", "0", ")", ";", "break", ";", "case", "DATE", ":", "result", "=", "DateWatermark", ".", "adjustWatermark", "(", "baseWatermark", ",", "0", ")", ";", "break", ";", "case", "HOUR", ":", "result", "=", "HourWatermark", ".", "adjustWatermark", "(", "baseWatermark", ",", "0", ")", ";", "break", ";", "case", "TIMESTAMP", ":", "result", "=", "TimestampWatermark", ".", "adjustWatermark", "(", "baseWatermark", ",", "0", ")", ";", "break", ";", "}", "return", "result", ";", "}" ]
Adjust a watermark based on watermark type @param baseWatermark the original watermark @param watermarkType Watermark Type @return the adjusted watermark value
[ "Adjust", "a", "watermark", "based", "on", "watermark", "type" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L285-L302
25,946
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getUpdatedInterval
private static int getUpdatedInterval(int inputInterval, ExtractType extractType, WatermarkType watermarkType) { LOG.debug("Getting updated interval"); if ((extractType == ExtractType.SNAPSHOT && watermarkType == WatermarkType.DATE)) { return inputInterval * 24; } else if (extractType == ExtractType.APPEND_DAILY) { return (inputInterval < 1 ? 1 : inputInterval) * 24; } else { return inputInterval; } }
java
private static int getUpdatedInterval(int inputInterval, ExtractType extractType, WatermarkType watermarkType) { LOG.debug("Getting updated interval"); if ((extractType == ExtractType.SNAPSHOT && watermarkType == WatermarkType.DATE)) { return inputInterval * 24; } else if (extractType == ExtractType.APPEND_DAILY) { return (inputInterval < 1 ? 1 : inputInterval) * 24; } else { return inputInterval; } }
[ "private", "static", "int", "getUpdatedInterval", "(", "int", "inputInterval", ",", "ExtractType", "extractType", ",", "WatermarkType", "watermarkType", ")", "{", "LOG", ".", "debug", "(", "\"Getting updated interval\"", ")", ";", "if", "(", "(", "extractType", "==", "ExtractType", ".", "SNAPSHOT", "&&", "watermarkType", "==", "WatermarkType", ".", "DATE", ")", ")", "{", "return", "inputInterval", "*", "24", ";", "}", "else", "if", "(", "extractType", "==", "ExtractType", ".", "APPEND_DAILY", ")", "{", "return", "(", "inputInterval", "<", "1", "?", "1", ":", "inputInterval", ")", "*", "24", ";", "}", "else", "{", "return", "inputInterval", ";", "}", "}" ]
Calculate interval in hours with the given interval @param inputInterval input interval @param extractType Extract type @param watermarkType Watermark type @return interval in range
[ "Calculate", "interval", "in", "hours", "with", "the", "given", "interval" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L312-L321
25,947
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getSnapshotLowWatermark
private long getSnapshotLowWatermark(WatermarkType watermarkType, long previousWatermark, int deltaForNextWatermark) { LOG.debug("Getting snapshot low water mark"); String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE); if (isPreviousWatermarkExists(previousWatermark)) { if (isSimpleWatermark(watermarkType)) { return previousWatermark + deltaForNextWatermark - this.state .getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, 0); } DateTime wm = Utils.toDateTime(previousWatermark, WATERMARKTIMEFORMAT, timeZone).plusSeconds( (deltaForNextWatermark - this.state .getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, 0))); return Long.parseLong(Utils.dateTimeToString(wm, WATERMARKTIMEFORMAT, timeZone)); } // If previous watermark is not found, override with the start value // (irrespective of source.is.watermark.override flag) long startValue = Utils.getLongWithCurrentDate(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE), timeZone); LOG.info("Overriding low water mark with the given start value: " + startValue); return startValue; }
java
private long getSnapshotLowWatermark(WatermarkType watermarkType, long previousWatermark, int deltaForNextWatermark) { LOG.debug("Getting snapshot low water mark"); String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE, ConfigurationKeys.DEFAULT_SOURCE_TIMEZONE); if (isPreviousWatermarkExists(previousWatermark)) { if (isSimpleWatermark(watermarkType)) { return previousWatermark + deltaForNextWatermark - this.state .getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, 0); } DateTime wm = Utils.toDateTime(previousWatermark, WATERMARKTIMEFORMAT, timeZone).plusSeconds( (deltaForNextWatermark - this.state .getPropAsInt(ConfigurationKeys.SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS, 0))); return Long.parseLong(Utils.dateTimeToString(wm, WATERMARKTIMEFORMAT, timeZone)); } // If previous watermark is not found, override with the start value // (irrespective of source.is.watermark.override flag) long startValue = Utils.getLongWithCurrentDate(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_START_VALUE), timeZone); LOG.info("Overriding low water mark with the given start value: " + startValue); return startValue; }
[ "private", "long", "getSnapshotLowWatermark", "(", "WatermarkType", "watermarkType", ",", "long", "previousWatermark", ",", "int", "deltaForNextWatermark", ")", "{", "LOG", ".", "debug", "(", "\"Getting snapshot low water mark\"", ")", ";", "String", "timeZone", "=", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_TIMEZONE", ",", "ConfigurationKeys", ".", "DEFAULT_SOURCE_TIMEZONE", ")", ";", "if", "(", "isPreviousWatermarkExists", "(", "previousWatermark", ")", ")", "{", "if", "(", "isSimpleWatermark", "(", "watermarkType", ")", ")", "{", "return", "previousWatermark", "+", "deltaForNextWatermark", "-", "this", ".", "state", ".", "getPropAsInt", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS", ",", "0", ")", ";", "}", "DateTime", "wm", "=", "Utils", ".", "toDateTime", "(", "previousWatermark", ",", "WATERMARKTIMEFORMAT", ",", "timeZone", ")", ".", "plusSeconds", "(", "(", "deltaForNextWatermark", "-", "this", ".", "state", ".", "getPropAsInt", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_LOW_WATERMARK_BACKUP_SECS", ",", "0", ")", ")", ")", ";", "return", "Long", ".", "parseLong", "(", "Utils", ".", "dateTimeToString", "(", "wm", ",", "WATERMARKTIMEFORMAT", ",", "timeZone", ")", ")", ";", "}", "// If previous watermark is not found, override with the start value", "// (irrespective of source.is.watermark.override flag)", "long", "startValue", "=", "Utils", ".", "getLongWithCurrentDate", "(", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_START_VALUE", ")", ",", "timeZone", ")", ";", "LOG", ".", "info", "(", "\"Overriding low water mark with the given start value: \"", "+", "startValue", ")", ";", "return", "startValue", ";", "}" ]
Get low water mark @param watermarkType Watermark type @param previousWatermark Previous water mark @param deltaForNextWatermark delta number for next water mark @return Previous watermark (fallback to {@link ConfigurationKeys#SOURCE_QUERYBASED_START_VALUE} iff previous watermark is unavailable)
[ "Get", "low", "water", "mark" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L369-L390
25,948
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getHighWatermark
@VisibleForTesting protected long getHighWatermark(ExtractType extractType, WatermarkType watermarkType) { LOG.debug("Getting high watermark"); String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; if (this.isWatermarkOverride()) { highWatermark = this.state.getPropAsLong(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, 0); if (highWatermark == 0) { highWatermark = Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone)); } else { // User specifies SOURCE_QUERYBASED_END_VALUE hasUserSpecifiedHighWatermark = true; } LOG.info("Overriding high water mark with the given end value:" + highWatermark); } else { if (isSnapshot(extractType)) { highWatermark = this.getSnapshotHighWatermark(watermarkType); } else { highWatermark = this.getAppendHighWatermark(extractType); } } return (highWatermark == 0 ? ConfigurationKeys.DEFAULT_WATERMARK_VALUE : highWatermark); }
java
@VisibleForTesting protected long getHighWatermark(ExtractType extractType, WatermarkType watermarkType) { LOG.debug("Getting high watermark"); String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; if (this.isWatermarkOverride()) { highWatermark = this.state.getPropAsLong(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, 0); if (highWatermark == 0) { highWatermark = Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone)); } else { // User specifies SOURCE_QUERYBASED_END_VALUE hasUserSpecifiedHighWatermark = true; } LOG.info("Overriding high water mark with the given end value:" + highWatermark); } else { if (isSnapshot(extractType)) { highWatermark = this.getSnapshotHighWatermark(watermarkType); } else { highWatermark = this.getAppendHighWatermark(extractType); } } return (highWatermark == 0 ? ConfigurationKeys.DEFAULT_WATERMARK_VALUE : highWatermark); }
[ "@", "VisibleForTesting", "protected", "long", "getHighWatermark", "(", "ExtractType", "extractType", ",", "WatermarkType", "watermarkType", ")", "{", "LOG", ".", "debug", "(", "\"Getting high watermark\"", ")", ";", "String", "timeZone", "=", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_TIMEZONE", ")", ";", "long", "highWatermark", "=", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ";", "if", "(", "this", ".", "isWatermarkOverride", "(", ")", ")", "{", "highWatermark", "=", "this", ".", "state", ".", "getPropAsLong", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_END_VALUE", ",", "0", ")", ";", "if", "(", "highWatermark", "==", "0", ")", "{", "highWatermark", "=", "Long", ".", "parseLong", "(", "Utils", ".", "dateTimeToString", "(", "getCurrentTime", "(", "timeZone", ")", ",", "WATERMARKTIMEFORMAT", ",", "timeZone", ")", ")", ";", "}", "else", "{", "// User specifies SOURCE_QUERYBASED_END_VALUE", "hasUserSpecifiedHighWatermark", "=", "true", ";", "}", "LOG", ".", "info", "(", "\"Overriding high water mark with the given end value:\"", "+", "highWatermark", ")", ";", "}", "else", "{", "if", "(", "isSnapshot", "(", "extractType", ")", ")", "{", "highWatermark", "=", "this", ".", "getSnapshotHighWatermark", "(", "watermarkType", ")", ";", "}", "else", "{", "highWatermark", "=", "this", ".", "getAppendHighWatermark", "(", "extractType", ")", ";", "}", "}", "return", "(", "highWatermark", "==", "0", "?", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ":", "highWatermark", ")", ";", "}" ]
Get high water mark @param extractType Extract type @param watermarkType Watermark type @return high water mark in {@link Partitioner#WATERMARKTIMEFORMAT}
[ "Get", "high", "water", "mark" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L422-L444
25,949
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getSnapshotHighWatermark
private long getSnapshotHighWatermark(WatermarkType watermarkType) { LOG.debug("Getting snapshot high water mark"); if (isSimpleWatermark(watermarkType)) { return ConfigurationKeys.DEFAULT_WATERMARK_VALUE; } String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); return Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone)); }
java
private long getSnapshotHighWatermark(WatermarkType watermarkType) { LOG.debug("Getting snapshot high water mark"); if (isSimpleWatermark(watermarkType)) { return ConfigurationKeys.DEFAULT_WATERMARK_VALUE; } String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); return Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone)); }
[ "private", "long", "getSnapshotHighWatermark", "(", "WatermarkType", "watermarkType", ")", "{", "LOG", ".", "debug", "(", "\"Getting snapshot high water mark\"", ")", ";", "if", "(", "isSimpleWatermark", "(", "watermarkType", ")", ")", "{", "return", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ";", "}", "String", "timeZone", "=", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_TIMEZONE", ")", ";", "return", "Long", ".", "parseLong", "(", "Utils", ".", "dateTimeToString", "(", "getCurrentTime", "(", "timeZone", ")", ",", "WATERMARKTIMEFORMAT", ",", "timeZone", ")", ")", ";", "}" ]
Get snapshot high water mark @param watermarkType Watermark type @return snapshot high water mark
[ "Get", "snapshot", "high", "water", "mark" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L452-L459
25,950
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getAppendHighWatermark
private long getAppendHighWatermark(ExtractType extractType) { LOG.debug("Getting append high water mark"); if (this.isFullDump()) { LOG.info("Overriding high water mark with end value:" + ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE); long highWatermark = this.state.getPropAsLong(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, 0); if (highWatermark != 0) { // User specifies SOURCE_QUERYBASED_END_VALUE hasUserSpecifiedHighWatermark = true; } return highWatermark; } return this.getAppendWatermarkCutoff(extractType); }
java
private long getAppendHighWatermark(ExtractType extractType) { LOG.debug("Getting append high water mark"); if (this.isFullDump()) { LOG.info("Overriding high water mark with end value:" + ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE); long highWatermark = this.state.getPropAsLong(ConfigurationKeys.SOURCE_QUERYBASED_END_VALUE, 0); if (highWatermark != 0) { // User specifies SOURCE_QUERYBASED_END_VALUE hasUserSpecifiedHighWatermark = true; } return highWatermark; } return this.getAppendWatermarkCutoff(extractType); }
[ "private", "long", "getAppendHighWatermark", "(", "ExtractType", "extractType", ")", "{", "LOG", ".", "debug", "(", "\"Getting append high water mark\"", ")", ";", "if", "(", "this", ".", "isFullDump", "(", ")", ")", "{", "LOG", ".", "info", "(", "\"Overriding high water mark with end value:\"", "+", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_END_VALUE", ")", ";", "long", "highWatermark", "=", "this", ".", "state", ".", "getPropAsLong", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_END_VALUE", ",", "0", ")", ";", "if", "(", "highWatermark", "!=", "0", ")", "{", "// User specifies SOURCE_QUERYBASED_END_VALUE", "hasUserSpecifiedHighWatermark", "=", "true", ";", "}", "return", "highWatermark", ";", "}", "return", "this", ".", "getAppendWatermarkCutoff", "(", "extractType", ")", ";", "}" ]
Get append high water mark @param extractType Extract type @return append high water mark
[ "Get", "append", "high", "water", "mark" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L467-L479
25,951
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getAppendWatermarkCutoff
private long getAppendWatermarkCutoff(ExtractType extractType) { LOG.debug("Getting append water mark cutoff"); long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); AppendMaxLimitType limitType = getAppendLimitType(extractType, this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT)); if (limitType == null) { LOG.debug("Limit type is not found"); return highWatermark; } int limitDelta = getAppendLimitDelta(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT)); // if it is CURRENTDATE or CURRENTHOUR then high water mark is current time if (limitDelta == 0) { highWatermark = Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone)); } // if CURRENTDATE or CURRENTHOUR has offset then high water mark is end of day of the given offset else { int seconds = 3599; // x:59:59 String format = null; switch (limitType) { case CURRENTDATE: format = "yyyyMMdd"; limitDelta = limitDelta * 24 * 60 * 60; seconds = 86399; // 23:59:59 break; case CURRENTHOUR: format = "yyyyMMddHH"; limitDelta = limitDelta * 60 * 60; seconds = 3599; // x:59:59 break; case CURRENTMINUTE: format = "yyyyMMddHHmm"; limitDelta = limitDelta * 60; seconds = 59; break; case CURRENTSECOND: format = "yyyyMMddHHmmss"; seconds = 0; break; default: break; } DateTime deltaTime = getCurrentTime(timeZone).minusSeconds(limitDelta); DateTime previousTime = Utils.toDateTime(Utils.dateTimeToString(deltaTime, format, timeZone), format, timeZone).plusSeconds(seconds); highWatermark = Long.parseLong(Utils.dateTimeToString(previousTime, WATERMARKTIMEFORMAT, timeZone)); // User specifies SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT hasUserSpecifiedHighWatermark = true; } return highWatermark; }
java
private long getAppendWatermarkCutoff(ExtractType extractType) { LOG.debug("Getting append water mark cutoff"); long highWatermark = ConfigurationKeys.DEFAULT_WATERMARK_VALUE; String timeZone = this.state.getProp(ConfigurationKeys.SOURCE_TIMEZONE); AppendMaxLimitType limitType = getAppendLimitType(extractType, this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT)); if (limitType == null) { LOG.debug("Limit type is not found"); return highWatermark; } int limitDelta = getAppendLimitDelta(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT)); // if it is CURRENTDATE or CURRENTHOUR then high water mark is current time if (limitDelta == 0) { highWatermark = Long.parseLong(Utils.dateTimeToString(getCurrentTime(timeZone), WATERMARKTIMEFORMAT, timeZone)); } // if CURRENTDATE or CURRENTHOUR has offset then high water mark is end of day of the given offset else { int seconds = 3599; // x:59:59 String format = null; switch (limitType) { case CURRENTDATE: format = "yyyyMMdd"; limitDelta = limitDelta * 24 * 60 * 60; seconds = 86399; // 23:59:59 break; case CURRENTHOUR: format = "yyyyMMddHH"; limitDelta = limitDelta * 60 * 60; seconds = 3599; // x:59:59 break; case CURRENTMINUTE: format = "yyyyMMddHHmm"; limitDelta = limitDelta * 60; seconds = 59; break; case CURRENTSECOND: format = "yyyyMMddHHmmss"; seconds = 0; break; default: break; } DateTime deltaTime = getCurrentTime(timeZone).minusSeconds(limitDelta); DateTime previousTime = Utils.toDateTime(Utils.dateTimeToString(deltaTime, format, timeZone), format, timeZone).plusSeconds(seconds); highWatermark = Long.parseLong(Utils.dateTimeToString(previousTime, WATERMARKTIMEFORMAT, timeZone)); // User specifies SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT hasUserSpecifiedHighWatermark = true; } return highWatermark; }
[ "private", "long", "getAppendWatermarkCutoff", "(", "ExtractType", "extractType", ")", "{", "LOG", ".", "debug", "(", "\"Getting append water mark cutoff\"", ")", ";", "long", "highWatermark", "=", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ";", "String", "timeZone", "=", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_TIMEZONE", ")", ";", "AppendMaxLimitType", "limitType", "=", "getAppendLimitType", "(", "extractType", ",", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT", ")", ")", ";", "if", "(", "limitType", "==", "null", ")", "{", "LOG", ".", "debug", "(", "\"Limit type is not found\"", ")", ";", "return", "highWatermark", ";", "}", "int", "limitDelta", "=", "getAppendLimitDelta", "(", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT", ")", ")", ";", "// if it is CURRENTDATE or CURRENTHOUR then high water mark is current time", "if", "(", "limitDelta", "==", "0", ")", "{", "highWatermark", "=", "Long", ".", "parseLong", "(", "Utils", ".", "dateTimeToString", "(", "getCurrentTime", "(", "timeZone", ")", ",", "WATERMARKTIMEFORMAT", ",", "timeZone", ")", ")", ";", "}", "// if CURRENTDATE or CURRENTHOUR has offset then high water mark is end of day of the given offset", "else", "{", "int", "seconds", "=", "3599", ";", "// x:59:59", "String", "format", "=", "null", ";", "switch", "(", "limitType", ")", "{", "case", "CURRENTDATE", ":", "format", "=", "\"yyyyMMdd\"", ";", "limitDelta", "=", "limitDelta", "*", "24", "*", "60", "*", "60", ";", "seconds", "=", "86399", ";", "// 23:59:59", "break", ";", "case", "CURRENTHOUR", ":", "format", "=", "\"yyyyMMddHH\"", ";", "limitDelta", "=", "limitDelta", "*", "60", "*", "60", ";", "seconds", "=", "3599", ";", "// x:59:59", "break", ";", "case", "CURRENTMINUTE", ":", "format", "=", "\"yyyyMMddHHmm\"", ";", "limitDelta", "=", "limitDelta", "*", "60", ";", "seconds", "=", "59", ";", "break", ";", "case", "CURRENTSECOND", ":", "format", "=", "\"yyyyMMddHHmmss\"", ";", "seconds", "=", "0", ";", "break", ";", "default", ":", "break", ";", "}", "DateTime", "deltaTime", "=", "getCurrentTime", "(", "timeZone", ")", ".", "minusSeconds", "(", "limitDelta", ")", ";", "DateTime", "previousTime", "=", "Utils", ".", "toDateTime", "(", "Utils", ".", "dateTimeToString", "(", "deltaTime", ",", "format", ",", "timeZone", ")", ",", "format", ",", "timeZone", ")", ".", "plusSeconds", "(", "seconds", ")", ";", "highWatermark", "=", "Long", ".", "parseLong", "(", "Utils", ".", "dateTimeToString", "(", "previousTime", ",", "WATERMARKTIMEFORMAT", ",", "timeZone", ")", ")", ";", "// User specifies SOURCE_QUERYBASED_APPEND_MAX_WATERMARK_LIMIT", "hasUserSpecifiedHighWatermark", "=", "true", ";", "}", "return", "highWatermark", ";", "}" ]
Get cutoff for high water mark @param extractType Extract type @return cutoff
[ "Get", "cutoff", "for", "high", "water", "mark" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L487-L541
25,952
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getAppendLimitType
private static AppendMaxLimitType getAppendLimitType(ExtractType extractType, String maxLimit) { LOG.debug("Getting append limit type"); AppendMaxLimitType limitType; switch (extractType) { case APPEND_DAILY: limitType = AppendMaxLimitType.CURRENTDATE; break; case APPEND_HOURLY: limitType = AppendMaxLimitType.CURRENTHOUR; break; default: limitType = null; break; } if (!Strings.isNullOrEmpty(maxLimit)) { LOG.debug("Getting append limit type from the config"); String[] limitParams = maxLimit.split("-"); if (limitParams.length >= 1) { limitType = AppendMaxLimitType.valueOf(limitParams[0]); } } return limitType; }
java
private static AppendMaxLimitType getAppendLimitType(ExtractType extractType, String maxLimit) { LOG.debug("Getting append limit type"); AppendMaxLimitType limitType; switch (extractType) { case APPEND_DAILY: limitType = AppendMaxLimitType.CURRENTDATE; break; case APPEND_HOURLY: limitType = AppendMaxLimitType.CURRENTHOUR; break; default: limitType = null; break; } if (!Strings.isNullOrEmpty(maxLimit)) { LOG.debug("Getting append limit type from the config"); String[] limitParams = maxLimit.split("-"); if (limitParams.length >= 1) { limitType = AppendMaxLimitType.valueOf(limitParams[0]); } } return limitType; }
[ "private", "static", "AppendMaxLimitType", "getAppendLimitType", "(", "ExtractType", "extractType", ",", "String", "maxLimit", ")", "{", "LOG", ".", "debug", "(", "\"Getting append limit type\"", ")", ";", "AppendMaxLimitType", "limitType", ";", "switch", "(", "extractType", ")", "{", "case", "APPEND_DAILY", ":", "limitType", "=", "AppendMaxLimitType", ".", "CURRENTDATE", ";", "break", ";", "case", "APPEND_HOURLY", ":", "limitType", "=", "AppendMaxLimitType", ".", "CURRENTHOUR", ";", "break", ";", "default", ":", "limitType", "=", "null", ";", "break", ";", "}", "if", "(", "!", "Strings", ".", "isNullOrEmpty", "(", "maxLimit", ")", ")", "{", "LOG", ".", "debug", "(", "\"Getting append limit type from the config\"", ")", ";", "String", "[", "]", "limitParams", "=", "maxLimit", ".", "split", "(", "\"-\"", ")", ";", "if", "(", "limitParams", ".", "length", ">=", "1", ")", "{", "limitType", "=", "AppendMaxLimitType", ".", "valueOf", "(", "limitParams", "[", "0", "]", ")", ";", "}", "}", "return", "limitType", ";", "}" ]
Get append max limit type from the input @param extractType Extract type @param maxLimit @return Max limit type
[ "Get", "append", "max", "limit", "type", "from", "the", "input" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L550-L573
25,953
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.getAppendLimitDelta
private static int getAppendLimitDelta(String maxLimit) { LOG.debug("Getting append limit delta"); int limitDelta = 0; if (!Strings.isNullOrEmpty(maxLimit)) { String[] limitParams = maxLimit.split("-"); if (limitParams.length >= 2) { limitDelta = Integer.parseInt(limitParams[1]); } } return limitDelta; }
java
private static int getAppendLimitDelta(String maxLimit) { LOG.debug("Getting append limit delta"); int limitDelta = 0; if (!Strings.isNullOrEmpty(maxLimit)) { String[] limitParams = maxLimit.split("-"); if (limitParams.length >= 2) { limitDelta = Integer.parseInt(limitParams[1]); } } return limitDelta; }
[ "private", "static", "int", "getAppendLimitDelta", "(", "String", "maxLimit", ")", "{", "LOG", ".", "debug", "(", "\"Getting append limit delta\"", ")", ";", "int", "limitDelta", "=", "0", ";", "if", "(", "!", "Strings", ".", "isNullOrEmpty", "(", "maxLimit", ")", ")", "{", "String", "[", "]", "limitParams", "=", "maxLimit", ".", "split", "(", "\"-\"", ")", ";", "if", "(", "limitParams", ".", "length", ">=", "2", ")", "{", "limitDelta", "=", "Integer", ".", "parseInt", "(", "limitParams", "[", "1", "]", ")", ";", "}", "}", "return", "limitDelta", ";", "}" ]
Get append max limit delta num @param maxLimit @return Max limit delta number
[ "Get", "append", "max", "limit", "delta", "num" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L581-L591
25,954
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java
Partitioner.isWatermarkExists
private boolean isWatermarkExists() { if (!Strings.isNullOrEmpty(this.state.getProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY)) && !Strings .isNullOrEmpty(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE))) { return true; } return false; }
java
private boolean isWatermarkExists() { if (!Strings.isNullOrEmpty(this.state.getProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY)) && !Strings .isNullOrEmpty(this.state.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE))) { return true; } return false; }
[ "private", "boolean", "isWatermarkExists", "(", ")", "{", "if", "(", "!", "Strings", ".", "isNullOrEmpty", "(", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "EXTRACT_DELTA_FIELDS_KEY", ")", ")", "&&", "!", "Strings", ".", "isNullOrEmpty", "(", "this", ".", "state", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_WATERMARK_TYPE", ")", ")", ")", "{", "return", "true", ";", "}", "return", "false", ";", "}" ]
true if water mark columns and water mark type provided @return true if water mark exists
[ "true", "if", "water", "mark", "columns", "and", "water", "mark", "type", "provided" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/partition/Partitioner.java#L611-L617
25,955
apache/incubator-gobblin
gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java
EncryptionConfigParser.getConfigForBranch
public static Map<String, Object> getConfigForBranch(EntityType entityType, WorkUnitState workUnitState) { return getConfigForBranch(entityType, null, workUnitState); }
java
public static Map<String, Object> getConfigForBranch(EntityType entityType, WorkUnitState workUnitState) { return getConfigForBranch(entityType, null, workUnitState); }
[ "public", "static", "Map", "<", "String", ",", "Object", ">", "getConfigForBranch", "(", "EntityType", "entityType", ",", "WorkUnitState", "workUnitState", ")", "{", "return", "getConfigForBranch", "(", "entityType", ",", "null", ",", "workUnitState", ")", ";", "}" ]
Retrieve encryption configuration for the branch the WorKUnitState represents @param entityType Type of entity we are retrieving config for @param workUnitState State for the object querying config @return A list of encryption properties or null if encryption isn't configured
[ "Retrieve", "encryption", "configuration", "for", "the", "branch", "the", "WorKUnitState", "represents" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java#L101-L103
25,956
apache/incubator-gobblin
gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java
EncryptionConfigParser.getConfigForBranch
public static Map<String, Object> getConfigForBranch(EntityType entityType, State taskState, int numBranches, int branch) { return getConfigForBranch(taskState, entityType.getConfigPrefix(), ForkOperatorUtils.getPropertyNameForBranch("", numBranches, branch)); }
java
public static Map<String, Object> getConfigForBranch(EntityType entityType, State taskState, int numBranches, int branch) { return getConfigForBranch(taskState, entityType.getConfigPrefix(), ForkOperatorUtils.getPropertyNameForBranch("", numBranches, branch)); }
[ "public", "static", "Map", "<", "String", ",", "Object", ">", "getConfigForBranch", "(", "EntityType", "entityType", ",", "State", "taskState", ",", "int", "numBranches", ",", "int", "branch", ")", "{", "return", "getConfigForBranch", "(", "taskState", ",", "entityType", ".", "getConfigPrefix", "(", ")", ",", "ForkOperatorUtils", ".", "getPropertyNameForBranch", "(", "\"\"", ",", "numBranches", ",", "branch", ")", ")", ";", "}" ]
Retrieve encryption config for a given branch of a task @param entityType Entity type we are retrieving config for @param taskState State of the task @param numBranches Number of branches overall @param branch Branch # of the current object @return A list of encryption properties or null if encryption isn't configured
[ "Retrieve", "encryption", "config", "for", "a", "given", "branch", "of", "a", "task" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java#L147-L151
25,957
apache/incubator-gobblin
gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java
EncryptionConfigParser.getKeystoreType
public static String getKeystoreType(Map<String, Object> parameters) { String type = (String)parameters.get(ENCRYPTION_KEYSTORE_TYPE_KEY); if (type == null) { type = ENCRYPTION_KEYSTORE_TYPE_KEY_DEFAULT; } return type; }
java
public static String getKeystoreType(Map<String, Object> parameters) { String type = (String)parameters.get(ENCRYPTION_KEYSTORE_TYPE_KEY); if (type == null) { type = ENCRYPTION_KEYSTORE_TYPE_KEY_DEFAULT; } return type; }
[ "public", "static", "String", "getKeystoreType", "(", "Map", "<", "String", ",", "Object", ">", "parameters", ")", "{", "String", "type", "=", "(", "String", ")", "parameters", ".", "get", "(", "ENCRYPTION_KEYSTORE_TYPE_KEY", ")", ";", "if", "(", "type", "==", "null", ")", "{", "type", "=", "ENCRYPTION_KEYSTORE_TYPE_KEY_DEFAULT", ";", "}", "return", "type", ";", "}" ]
Get the type of keystore to instantiate
[ "Get", "the", "type", "of", "keystore", "to", "instantiate" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java#L189-L196
25,958
apache/incubator-gobblin
gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java
EncryptionConfigParser.getCipher
public static String getCipher(Map<String, Object> parameters) { return (String)parameters.get(ENCRYPTION_CIPHER_KEY); }
java
public static String getCipher(Map<String, Object> parameters) { return (String)parameters.get(ENCRYPTION_CIPHER_KEY); }
[ "public", "static", "String", "getCipher", "(", "Map", "<", "String", ",", "Object", ">", "parameters", ")", "{", "return", "(", "String", ")", "parameters", ".", "get", "(", "ENCRYPTION_CIPHER_KEY", ")", ";", "}" ]
Get the underlying cipher name @param parameters parameters map @return the cipher name
[ "Get", "the", "underlying", "cipher", "name" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java#L211-L213
25,959
apache/incubator-gobblin
gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java
EncryptionConfigParser.extractPropertiesForBranch
private static Map<String, Object> extractPropertiesForBranch( Properties properties, String prefix, String branchSuffix) { Map<String, Object> ret = new HashMap<>(); for (Map.Entry<Object, Object> prop: properties.entrySet()) { String key = (String)prop.getKey(); if (key.startsWith(prefix) && (branchSuffix.length() == 0 || key.endsWith(branchSuffix))) { int strippedKeyStart = Math.min(key.length(), prefix.length() + 1); // filter out subkeys that don't have a '.' -- eg writer.encrypted.foo shouldn't be returned // if prefix is writer.encrypt if (strippedKeyStart != key.length() && key.charAt(strippedKeyStart - 1) != '.') { continue; } int strippedKeyEnd = Math.max(strippedKeyStart, key.length() - branchSuffix.length()); String strippedKey = key.substring(strippedKeyStart, strippedKeyEnd); ret.put(strippedKey, prop.getValue()); } } return ret; }
java
private static Map<String, Object> extractPropertiesForBranch( Properties properties, String prefix, String branchSuffix) { Map<String, Object> ret = new HashMap<>(); for (Map.Entry<Object, Object> prop: properties.entrySet()) { String key = (String)prop.getKey(); if (key.startsWith(prefix) && (branchSuffix.length() == 0 || key.endsWith(branchSuffix))) { int strippedKeyStart = Math.min(key.length(), prefix.length() + 1); // filter out subkeys that don't have a '.' -- eg writer.encrypted.foo shouldn't be returned // if prefix is writer.encrypt if (strippedKeyStart != key.length() && key.charAt(strippedKeyStart - 1) != '.') { continue; } int strippedKeyEnd = Math.max(strippedKeyStart, key.length() - branchSuffix.length()); String strippedKey = key.substring(strippedKeyStart, strippedKeyEnd); ret.put(strippedKey, prop.getValue()); } } return ret; }
[ "private", "static", "Map", "<", "String", ",", "Object", ">", "extractPropertiesForBranch", "(", "Properties", "properties", ",", "String", "prefix", ",", "String", "branchSuffix", ")", "{", "Map", "<", "String", ",", "Object", ">", "ret", "=", "new", "HashMap", "<>", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "Object", ",", "Object", ">", "prop", ":", "properties", ".", "entrySet", "(", ")", ")", "{", "String", "key", "=", "(", "String", ")", "prop", ".", "getKey", "(", ")", ";", "if", "(", "key", ".", "startsWith", "(", "prefix", ")", "&&", "(", "branchSuffix", ".", "length", "(", ")", "==", "0", "||", "key", ".", "endsWith", "(", "branchSuffix", ")", ")", ")", "{", "int", "strippedKeyStart", "=", "Math", ".", "min", "(", "key", ".", "length", "(", ")", ",", "prefix", ".", "length", "(", ")", "+", "1", ")", ";", "// filter out subkeys that don't have a '.' -- eg writer.encrypted.foo shouldn't be returned", "// if prefix is writer.encrypt", "if", "(", "strippedKeyStart", "!=", "key", ".", "length", "(", ")", "&&", "key", ".", "charAt", "(", "strippedKeyStart", "-", "1", ")", "!=", "'", "'", ")", "{", "continue", ";", "}", "int", "strippedKeyEnd", "=", "Math", ".", "max", "(", "strippedKeyStart", ",", "key", ".", "length", "(", ")", "-", "branchSuffix", ".", "length", "(", ")", ")", ";", "String", "strippedKey", "=", "key", ".", "substring", "(", "strippedKeyStart", ",", "strippedKeyEnd", ")", ";", "ret", ".", "put", "(", "strippedKey", ",", "prop", ".", "getValue", "(", ")", ")", ";", "}", "}", "return", "ret", ";", "}" ]
Extract a set of properties for a given branch, stripping out the prefix and branch suffix. Eg - original output: writer.encrypt.1 -> foo writer.encrypt.something.1 -> bar will transform to "" -> foo something - bar this is very similar to ConfigUtils and typesafe config; need to figure out config story @param properties Properties to extract data from @param prefix Prefix to match; all other properties will be ignored @param branchSuffix Suffix for all config properties @return Transformed properties as described above
[ "Extract", "a", "set", "of", "properties", "for", "a", "given", "branch", "stripping", "out", "the", "prefix", "and", "branch", "suffix", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core-base/src/main/java/org/apache/gobblin/crypto/EncryptionConfigParser.java#L233-L256
25,960
apache/incubator-gobblin
gobblin-modules/gobblin-compliance/src/main/java/org/apache/gobblin/compliance/restore/LKGRestorePolicy.java
LKGRestorePolicy.isRestorable
private boolean isRestorable(HivePartitionDataset dataset, HivePartitionVersion version) throws IOException { if (version.getLocation().toString().equalsIgnoreCase(dataset.getLocation().toString())) { return false; } FileSystem fs = ProxyUtils.getOwnerFs(new State(this.state), version.getOwner()); if (!HadoopUtils.hasContent(fs, version.getLocation())) { return false; } return true; }
java
private boolean isRestorable(HivePartitionDataset dataset, HivePartitionVersion version) throws IOException { if (version.getLocation().toString().equalsIgnoreCase(dataset.getLocation().toString())) { return false; } FileSystem fs = ProxyUtils.getOwnerFs(new State(this.state), version.getOwner()); if (!HadoopUtils.hasContent(fs, version.getLocation())) { return false; } return true; }
[ "private", "boolean", "isRestorable", "(", "HivePartitionDataset", "dataset", ",", "HivePartitionVersion", "version", ")", "throws", "IOException", "{", "if", "(", "version", ".", "getLocation", "(", ")", ".", "toString", "(", ")", ".", "equalsIgnoreCase", "(", "dataset", ".", "getLocation", "(", ")", ".", "toString", "(", ")", ")", ")", "{", "return", "false", ";", "}", "FileSystem", "fs", "=", "ProxyUtils", ".", "getOwnerFs", "(", "new", "State", "(", "this", ".", "state", ")", ",", "version", ".", "getOwner", "(", ")", ")", ";", "if", "(", "!", "HadoopUtils", ".", "hasContent", "(", "fs", ",", "version", ".", "getLocation", "(", ")", ")", ")", "{", "return", "false", ";", "}", "return", "true", ";", "}" ]
A version is called restorable if it can be used to restore dataset. If a version is pointing to same data location as of the dataset, then it can't be used for restoring If a version is pointing to an empty data location, then it can't be used for restoring
[ "A", "version", "is", "called", "restorable", "if", "it", "can", "be", "used", "to", "restore", "dataset", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-compliance/src/main/java/org/apache/gobblin/compliance/restore/LKGRestorePolicy.java#L81-L91
25,961
apache/incubator-gobblin
gobblin-cluster/src/main/java/org/apache/gobblin/cluster/HelixUtils.java
HelixUtils.createGobblinHelixCluster
public static void createGobblinHelixCluster( String zkConnectionString, String clusterName, boolean overwrite) { ClusterSetup clusterSetup = new ClusterSetup(zkConnectionString); // Create the cluster and overwrite if it already exists clusterSetup.addCluster(clusterName, overwrite); // Helix 0.6.x requires a configuration property to have the form key=value. String autoJoinConfig = ZKHelixManager.ALLOW_PARTICIPANT_AUTO_JOIN + "=true"; clusterSetup.setConfig(HelixConfigScope.ConfigScopeProperty.CLUSTER, clusterName, autoJoinConfig); }
java
public static void createGobblinHelixCluster( String zkConnectionString, String clusterName, boolean overwrite) { ClusterSetup clusterSetup = new ClusterSetup(zkConnectionString); // Create the cluster and overwrite if it already exists clusterSetup.addCluster(clusterName, overwrite); // Helix 0.6.x requires a configuration property to have the form key=value. String autoJoinConfig = ZKHelixManager.ALLOW_PARTICIPANT_AUTO_JOIN + "=true"; clusterSetup.setConfig(HelixConfigScope.ConfigScopeProperty.CLUSTER, clusterName, autoJoinConfig); }
[ "public", "static", "void", "createGobblinHelixCluster", "(", "String", "zkConnectionString", ",", "String", "clusterName", ",", "boolean", "overwrite", ")", "{", "ClusterSetup", "clusterSetup", "=", "new", "ClusterSetup", "(", "zkConnectionString", ")", ";", "// Create the cluster and overwrite if it already exists", "clusterSetup", ".", "addCluster", "(", "clusterName", ",", "overwrite", ")", ";", "// Helix 0.6.x requires a configuration property to have the form key=value.", "String", "autoJoinConfig", "=", "ZKHelixManager", ".", "ALLOW_PARTICIPANT_AUTO_JOIN", "+", "\"=true\"", ";", "clusterSetup", ".", "setConfig", "(", "HelixConfigScope", ".", "ConfigScopeProperty", ".", "CLUSTER", ",", "clusterName", ",", "autoJoinConfig", ")", ";", "}" ]
Create a Helix cluster for the Gobblin Cluster application. @param zkConnectionString the ZooKeeper connection string @param clusterName the Helix cluster name @param overwrite true to overwrite exiting cluster, false to reuse existing cluster
[ "Create", "a", "Helix", "cluster", "for", "the", "Gobblin", "Cluster", "application", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-cluster/src/main/java/org/apache/gobblin/cluster/HelixUtils.java#L72-L82
25,962
apache/incubator-gobblin
gobblin-cluster/src/main/java/org/apache/gobblin/cluster/HelixUtils.java
HelixUtils.submitJobToQueue
@Deprecated public static void submitJobToQueue( JobConfig.Builder jobConfigBuilder, String queueName, String jobName, TaskDriver helixTaskDriver, HelixManager helixManager, long jobQueueDeleteTimeoutSeconds) throws Exception { submitJobToWorkFlow(jobConfigBuilder, queueName, jobName, helixTaskDriver, helixManager, jobQueueDeleteTimeoutSeconds); }
java
@Deprecated public static void submitJobToQueue( JobConfig.Builder jobConfigBuilder, String queueName, String jobName, TaskDriver helixTaskDriver, HelixManager helixManager, long jobQueueDeleteTimeoutSeconds) throws Exception { submitJobToWorkFlow(jobConfigBuilder, queueName, jobName, helixTaskDriver, helixManager, jobQueueDeleteTimeoutSeconds); }
[ "@", "Deprecated", "public", "static", "void", "submitJobToQueue", "(", "JobConfig", ".", "Builder", "jobConfigBuilder", ",", "String", "queueName", ",", "String", "jobName", ",", "TaskDriver", "helixTaskDriver", ",", "HelixManager", "helixManager", ",", "long", "jobQueueDeleteTimeoutSeconds", ")", "throws", "Exception", "{", "submitJobToWorkFlow", "(", "jobConfigBuilder", ",", "queueName", ",", "jobName", ",", "helixTaskDriver", ",", "helixManager", ",", "jobQueueDeleteTimeoutSeconds", ")", ";", "}" ]
We have switched from Helix JobQueue to WorkFlow based job execution.
[ "We", "have", "switched", "from", "Helix", "JobQueue", "to", "WorkFlow", "based", "job", "execution", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-cluster/src/main/java/org/apache/gobblin/cluster/HelixUtils.java#L98-L107
25,963
apache/incubator-gobblin
gobblin-cluster/src/main/java/org/apache/gobblin/cluster/HelixUtils.java
HelixUtils.deleteStoppedHelixJob
private static void deleteStoppedHelixJob(HelixManager helixManager, String workFlowName, String jobName) throws InterruptedException { WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); while (workflowContext.getJobState(TaskUtil.getNamespacedJobName(workFlowName, jobName)) != STOPPED) { log.info("Waiting for job {} to stop...", jobName); workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); Thread.sleep(1000); } // deleting the entire workflow, as one workflow contains only one job new TaskDriver(helixManager).deleteAndWaitForCompletion(workFlowName, 10000L); log.info("Workflow deleted."); }
java
private static void deleteStoppedHelixJob(HelixManager helixManager, String workFlowName, String jobName) throws InterruptedException { WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); while (workflowContext.getJobState(TaskUtil.getNamespacedJobName(workFlowName, jobName)) != STOPPED) { log.info("Waiting for job {} to stop...", jobName); workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName); Thread.sleep(1000); } // deleting the entire workflow, as one workflow contains only one job new TaskDriver(helixManager).deleteAndWaitForCompletion(workFlowName, 10000L); log.info("Workflow deleted."); }
[ "private", "static", "void", "deleteStoppedHelixJob", "(", "HelixManager", "helixManager", ",", "String", "workFlowName", ",", "String", "jobName", ")", "throws", "InterruptedException", "{", "WorkflowContext", "workflowContext", "=", "TaskDriver", ".", "getWorkflowContext", "(", "helixManager", ",", "workFlowName", ")", ";", "while", "(", "workflowContext", ".", "getJobState", "(", "TaskUtil", ".", "getNamespacedJobName", "(", "workFlowName", ",", "jobName", ")", ")", "!=", "STOPPED", ")", "{", "log", ".", "info", "(", "\"Waiting for job {} to stop...\"", ",", "jobName", ")", ";", "workflowContext", "=", "TaskDriver", ".", "getWorkflowContext", "(", "helixManager", ",", "workFlowName", ")", ";", "Thread", ".", "sleep", "(", "1000", ")", ";", "}", "// deleting the entire workflow, as one workflow contains only one job", "new", "TaskDriver", "(", "helixManager", ")", ".", "deleteAndWaitForCompletion", "(", "workFlowName", ",", "10000L", ")", ";", "log", ".", "info", "(", "\"Workflow deleted.\"", ")", ";", "}" ]
Deletes the stopped Helix Workflow. Caller should stop the Workflow before calling this method. @param helixManager helix manager @param workFlowName workflow needed to be deleted @param jobName helix job name @throws InterruptedException
[ "Deletes", "the", "stopped", "Helix", "Workflow", ".", "Caller", "should", "stop", "the", "Workflow", "before", "calling", "this", "method", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-cluster/src/main/java/org/apache/gobblin/cluster/HelixUtils.java#L242-L253
25,964
apache/incubator-gobblin
gobblin-data-management/src/main/java/org/apache/gobblin/data/management/conversion/hive/publisher/HiveConvertPublisher.java
HiveConvertPublisher.copyPartitionParams
@VisibleForTesting public boolean copyPartitionParams(String completeSourcePartitionName, String completeDestPartitionName, List<String> whitelist, List<String> blacklist) { Optional<Partition> sourcePartitionOptional = getPartitionObject(completeSourcePartitionName); Optional<Partition> destPartitionOptional = getPartitionObject(completeDestPartitionName); if ((!sourcePartitionOptional.isPresent()) || (!destPartitionOptional.isPresent())) { return false; } Map<String, String> sourceParams = sourcePartitionOptional.get().getParameters(); Map<String, String> destParams = destPartitionOptional.get().getParameters(); for (Map.Entry<String, String> param : sourceParams.entrySet()) { if (!matched(whitelist, blacklist, param.getKey())) { continue; } destParams.put(param.getKey(), param.getValue()); } destPartitionOptional.get().setParameters(destParams); if (!dropPartition(completeDestPartitionName)) { return false; } if (!addPartition(destPartitionOptional.get(), completeDestPartitionName)) { return false; } return true; }
java
@VisibleForTesting public boolean copyPartitionParams(String completeSourcePartitionName, String completeDestPartitionName, List<String> whitelist, List<String> blacklist) { Optional<Partition> sourcePartitionOptional = getPartitionObject(completeSourcePartitionName); Optional<Partition> destPartitionOptional = getPartitionObject(completeDestPartitionName); if ((!sourcePartitionOptional.isPresent()) || (!destPartitionOptional.isPresent())) { return false; } Map<String, String> sourceParams = sourcePartitionOptional.get().getParameters(); Map<String, String> destParams = destPartitionOptional.get().getParameters(); for (Map.Entry<String, String> param : sourceParams.entrySet()) { if (!matched(whitelist, blacklist, param.getKey())) { continue; } destParams.put(param.getKey(), param.getValue()); } destPartitionOptional.get().setParameters(destParams); if (!dropPartition(completeDestPartitionName)) { return false; } if (!addPartition(destPartitionOptional.get(), completeDestPartitionName)) { return false; } return true; }
[ "@", "VisibleForTesting", "public", "boolean", "copyPartitionParams", "(", "String", "completeSourcePartitionName", ",", "String", "completeDestPartitionName", ",", "List", "<", "String", ">", "whitelist", ",", "List", "<", "String", ">", "blacklist", ")", "{", "Optional", "<", "Partition", ">", "sourcePartitionOptional", "=", "getPartitionObject", "(", "completeSourcePartitionName", ")", ";", "Optional", "<", "Partition", ">", "destPartitionOptional", "=", "getPartitionObject", "(", "completeDestPartitionName", ")", ";", "if", "(", "(", "!", "sourcePartitionOptional", ".", "isPresent", "(", ")", ")", "||", "(", "!", "destPartitionOptional", ".", "isPresent", "(", ")", ")", ")", "{", "return", "false", ";", "}", "Map", "<", "String", ",", "String", ">", "sourceParams", "=", "sourcePartitionOptional", ".", "get", "(", ")", ".", "getParameters", "(", ")", ";", "Map", "<", "String", ",", "String", ">", "destParams", "=", "destPartitionOptional", ".", "get", "(", ")", ".", "getParameters", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "String", ",", "String", ">", "param", ":", "sourceParams", ".", "entrySet", "(", ")", ")", "{", "if", "(", "!", "matched", "(", "whitelist", ",", "blacklist", ",", "param", ".", "getKey", "(", ")", ")", ")", "{", "continue", ";", "}", "destParams", ".", "put", "(", "param", ".", "getKey", "(", ")", ",", "param", ".", "getValue", "(", ")", ")", ";", "}", "destPartitionOptional", ".", "get", "(", ")", ".", "setParameters", "(", "destParams", ")", ";", "if", "(", "!", "dropPartition", "(", "completeDestPartitionName", ")", ")", "{", "return", "false", ";", "}", "if", "(", "!", "addPartition", "(", "destPartitionOptional", ".", "get", "(", ")", ",", "completeDestPartitionName", ")", ")", "{", "return", "false", ";", "}", "return", "true", ";", "}" ]
Method to copy partition parameters from source partition to destination partition @param completeSourcePartitionName dbName@tableName@partitionName @param completeDestPartitionName dbName@tableName@partitionName
[ "Method", "to", "copy", "partition", "parameters", "from", "source", "partition", "to", "destination", "partition" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-data-management/src/main/java/org/apache/gobblin/data/management/conversion/hive/publisher/HiveConvertPublisher.java#L319-L344
25,965
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/ConfigUtils.java
ConfigUtils.findFullPrefixKeys
public static Set<String> findFullPrefixKeys(Properties properties, Optional<String> keyPrefix) { TreeSet<String> propNames = new TreeSet<>(); for (Map.Entry<Object, Object> entry : properties.entrySet()) { String entryKey = entry.getKey().toString(); if (StringUtils.startsWith(entryKey, keyPrefix.or(StringUtils.EMPTY))) { propNames.add(entryKey); } } Set<String> result = new HashSet<>(); String lastKey = null; Iterator<String> sortedKeysIter = propNames.iterator(); while(sortedKeysIter.hasNext()) { String propName = sortedKeysIter.next(); if (null != lastKey && propName.startsWith(lastKey + ".")) { result.add(lastKey); } lastKey = propName; } return result; }
java
public static Set<String> findFullPrefixKeys(Properties properties, Optional<String> keyPrefix) { TreeSet<String> propNames = new TreeSet<>(); for (Map.Entry<Object, Object> entry : properties.entrySet()) { String entryKey = entry.getKey().toString(); if (StringUtils.startsWith(entryKey, keyPrefix.or(StringUtils.EMPTY))) { propNames.add(entryKey); } } Set<String> result = new HashSet<>(); String lastKey = null; Iterator<String> sortedKeysIter = propNames.iterator(); while(sortedKeysIter.hasNext()) { String propName = sortedKeysIter.next(); if (null != lastKey && propName.startsWith(lastKey + ".")) { result.add(lastKey); } lastKey = propName; } return result; }
[ "public", "static", "Set", "<", "String", ">", "findFullPrefixKeys", "(", "Properties", "properties", ",", "Optional", "<", "String", ">", "keyPrefix", ")", "{", "TreeSet", "<", "String", ">", "propNames", "=", "new", "TreeSet", "<>", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "Object", ",", "Object", ">", "entry", ":", "properties", ".", "entrySet", "(", ")", ")", "{", "String", "entryKey", "=", "entry", ".", "getKey", "(", ")", ".", "toString", "(", ")", ";", "if", "(", "StringUtils", ".", "startsWith", "(", "entryKey", ",", "keyPrefix", ".", "or", "(", "StringUtils", ".", "EMPTY", ")", ")", ")", "{", "propNames", ".", "add", "(", "entryKey", ")", ";", "}", "}", "Set", "<", "String", ">", "result", "=", "new", "HashSet", "<>", "(", ")", ";", "String", "lastKey", "=", "null", ";", "Iterator", "<", "String", ">", "sortedKeysIter", "=", "propNames", ".", "iterator", "(", ")", ";", "while", "(", "sortedKeysIter", ".", "hasNext", "(", ")", ")", "{", "String", "propName", "=", "sortedKeysIter", ".", "next", "(", ")", ";", "if", "(", "null", "!=", "lastKey", "&&", "propName", ".", "startsWith", "(", "lastKey", "+", "\".\"", ")", ")", "{", "result", ".", "add", "(", "lastKey", ")", ";", "}", "lastKey", "=", "propName", ";", "}", "return", "result", ";", "}" ]
Finds a list of properties whose keys are complete prefix of other keys. This function is meant to be used during conversion from Properties to typesafe Config as the latter does not support this scenario. @param properties the Properties collection to inspect @param keyPrefix an optional key prefix which limits which properties are inspected.
[ "Finds", "a", "list", "of", "properties", "whose", "keys", "are", "complete", "prefix", "of", "other", "keys", ".", "This", "function", "is", "meant", "to", "be", "used", "during", "conversion", "from", "Properties", "to", "typesafe", "Config", "as", "the", "latter", "does", "not", "support", "this", "scenario", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/ConfigUtils.java#L174-L196
25,966
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/ConfigUtils.java
ConfigUtils.guessPropertiesTypes
private static Map<String, Object> guessPropertiesTypes(Map<Object, Object> srcProperties) { Map<String, Object> res = new HashMap<>(); for (Map.Entry<Object, Object> prop : srcProperties.entrySet()) { Object value = prop.getValue(); if (null != value && value instanceof String && !Strings.isNullOrEmpty(value.toString())) { try { value = Long.parseLong(value.toString()); } catch (NumberFormatException e) { try { value = Double.parseDouble(value.toString()); } catch (NumberFormatException e2) { if (value.toString().equalsIgnoreCase("true") || value.toString().equalsIgnoreCase("yes")) { value = Boolean.TRUE; } else if (value.toString().equalsIgnoreCase("false") || value.toString().equalsIgnoreCase("no")) { value = Boolean.FALSE; } else { // nothing to do } } } } res.put(prop.getKey().toString(), value); } return res; }
java
private static Map<String, Object> guessPropertiesTypes(Map<Object, Object> srcProperties) { Map<String, Object> res = new HashMap<>(); for (Map.Entry<Object, Object> prop : srcProperties.entrySet()) { Object value = prop.getValue(); if (null != value && value instanceof String && !Strings.isNullOrEmpty(value.toString())) { try { value = Long.parseLong(value.toString()); } catch (NumberFormatException e) { try { value = Double.parseDouble(value.toString()); } catch (NumberFormatException e2) { if (value.toString().equalsIgnoreCase("true") || value.toString().equalsIgnoreCase("yes")) { value = Boolean.TRUE; } else if (value.toString().equalsIgnoreCase("false") || value.toString().equalsIgnoreCase("no")) { value = Boolean.FALSE; } else { // nothing to do } } } } res.put(prop.getKey().toString(), value); } return res; }
[ "private", "static", "Map", "<", "String", ",", "Object", ">", "guessPropertiesTypes", "(", "Map", "<", "Object", ",", "Object", ">", "srcProperties", ")", "{", "Map", "<", "String", ",", "Object", ">", "res", "=", "new", "HashMap", "<>", "(", ")", ";", "for", "(", "Map", ".", "Entry", "<", "Object", ",", "Object", ">", "prop", ":", "srcProperties", ".", "entrySet", "(", ")", ")", "{", "Object", "value", "=", "prop", ".", "getValue", "(", ")", ";", "if", "(", "null", "!=", "value", "&&", "value", "instanceof", "String", "&&", "!", "Strings", ".", "isNullOrEmpty", "(", "value", ".", "toString", "(", ")", ")", ")", "{", "try", "{", "value", "=", "Long", ".", "parseLong", "(", "value", ".", "toString", "(", ")", ")", ";", "}", "catch", "(", "NumberFormatException", "e", ")", "{", "try", "{", "value", "=", "Double", ".", "parseDouble", "(", "value", ".", "toString", "(", ")", ")", ";", "}", "catch", "(", "NumberFormatException", "e2", ")", "{", "if", "(", "value", ".", "toString", "(", ")", ".", "equalsIgnoreCase", "(", "\"true\"", ")", "||", "value", ".", "toString", "(", ")", ".", "equalsIgnoreCase", "(", "\"yes\"", ")", ")", "{", "value", "=", "Boolean", ".", "TRUE", ";", "}", "else", "if", "(", "value", ".", "toString", "(", ")", ".", "equalsIgnoreCase", "(", "\"false\"", ")", "||", "value", ".", "toString", "(", ")", ".", "equalsIgnoreCase", "(", "\"no\"", ")", ")", "{", "value", "=", "Boolean", ".", "FALSE", ";", "}", "else", "{", "// nothing to do", "}", "}", "}", "}", "res", ".", "put", "(", "prop", ".", "getKey", "(", ")", ".", "toString", "(", ")", ",", "value", ")", ";", "}", "return", "res", ";", "}" ]
Attempts to guess type types of a Properties. By default, typesafe will make all property values Strings. This implementation will try to recognize booleans and numbers. All keys are treated as strings.
[ "Attempts", "to", "guess", "type", "types", "of", "a", "Properties", ".", "By", "default", "typesafe", "will", "make", "all", "property", "values", "Strings", ".", "This", "implementation", "will", "try", "to", "recognize", "booleans", "and", "numbers", ".", "All", "keys", "are", "treated", "as", "strings", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/ConfigUtils.java#L278-L302
25,967
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/ConfigUtils.java
ConfigUtils.verifySubset
public static boolean verifySubset(Config superConfig, Config subConfig) { for (Map.Entry<String, ConfigValue> entry : subConfig.entrySet()) { if (!superConfig.hasPath(entry.getKey()) || !superConfig.getValue(entry.getKey()).unwrapped() .equals(entry.getValue().unwrapped())) { return false; } } return true; }
java
public static boolean verifySubset(Config superConfig, Config subConfig) { for (Map.Entry<String, ConfigValue> entry : subConfig.entrySet()) { if (!superConfig.hasPath(entry.getKey()) || !superConfig.getValue(entry.getKey()).unwrapped() .equals(entry.getValue().unwrapped())) { return false; } } return true; }
[ "public", "static", "boolean", "verifySubset", "(", "Config", "superConfig", ",", "Config", "subConfig", ")", "{", "for", "(", "Map", ".", "Entry", "<", "String", ",", "ConfigValue", ">", "entry", ":", "subConfig", ".", "entrySet", "(", ")", ")", "{", "if", "(", "!", "superConfig", ".", "hasPath", "(", "entry", ".", "getKey", "(", ")", ")", "||", "!", "superConfig", ".", "getValue", "(", "entry", ".", "getKey", "(", ")", ")", ".", "unwrapped", "(", ")", ".", "equals", "(", "entry", ".", "getValue", "(", ")", ".", "unwrapped", "(", ")", ")", ")", "{", "return", "false", ";", "}", "}", "return", "true", ";", "}" ]
Check that every key-value in superConfig is in subConfig
[ "Check", "that", "every", "key", "-", "value", "in", "superConfig", "is", "in", "subConfig" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/ConfigUtils.java#L481-L489
25,968
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.checkReaderWriterCompatibility
public static boolean checkReaderWriterCompatibility(Schema readerSchema, Schema writerSchema, boolean ignoreNamespace) { if (ignoreNamespace) { List<Schema.Field> fields = deepCopySchemaFields(readerSchema); readerSchema = Schema.createRecord(writerSchema.getName(), writerSchema.getDoc(), writerSchema.getNamespace(), readerSchema.isError()); readerSchema.setFields(fields); } return SchemaCompatibility.checkReaderWriterCompatibility(readerSchema, writerSchema).getType().equals(SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE); }
java
public static boolean checkReaderWriterCompatibility(Schema readerSchema, Schema writerSchema, boolean ignoreNamespace) { if (ignoreNamespace) { List<Schema.Field> fields = deepCopySchemaFields(readerSchema); readerSchema = Schema.createRecord(writerSchema.getName(), writerSchema.getDoc(), writerSchema.getNamespace(), readerSchema.isError()); readerSchema.setFields(fields); } return SchemaCompatibility.checkReaderWriterCompatibility(readerSchema, writerSchema).getType().equals(SchemaCompatibility.SchemaCompatibilityType.COMPATIBLE); }
[ "public", "static", "boolean", "checkReaderWriterCompatibility", "(", "Schema", "readerSchema", ",", "Schema", "writerSchema", ",", "boolean", "ignoreNamespace", ")", "{", "if", "(", "ignoreNamespace", ")", "{", "List", "<", "Schema", ".", "Field", ">", "fields", "=", "deepCopySchemaFields", "(", "readerSchema", ")", ";", "readerSchema", "=", "Schema", ".", "createRecord", "(", "writerSchema", ".", "getName", "(", ")", ",", "writerSchema", ".", "getDoc", "(", ")", ",", "writerSchema", ".", "getNamespace", "(", ")", ",", "readerSchema", ".", "isError", "(", ")", ")", ";", "readerSchema", ".", "setFields", "(", "fields", ")", ";", "}", "return", "SchemaCompatibility", ".", "checkReaderWriterCompatibility", "(", "readerSchema", ",", "writerSchema", ")", ".", "getType", "(", ")", ".", "equals", "(", "SchemaCompatibility", ".", "SchemaCompatibilityType", ".", "COMPATIBLE", ")", ";", "}" ]
Validates that the provided reader schema can be used to decode avro data written with the provided writer schema. @param readerSchema schema to check. @param writerSchema schema to check. @param ignoreNamespace whether name and namespace should be ignored in validation @return true if validation passes
[ "Validates", "that", "the", "provided", "reader", "schema", "can", "be", "used", "to", "decode", "avro", "data", "written", "with", "the", "provided", "writer", "schema", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L101-L110
25,969
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.getField
public static Optional<Field> getField(Schema schema, String fieldLocation) { Preconditions.checkNotNull(schema); Preconditions.checkArgument(!Strings.isNullOrEmpty(fieldLocation)); Splitter splitter = Splitter.on(FIELD_LOCATION_DELIMITER).omitEmptyStrings().trimResults(); List<String> pathList = Lists.newArrayList(splitter.split(fieldLocation)); if (pathList.size() == 0) { return Optional.absent(); } return AvroUtils.getFieldHelper(schema, pathList, 0); }
java
public static Optional<Field> getField(Schema schema, String fieldLocation) { Preconditions.checkNotNull(schema); Preconditions.checkArgument(!Strings.isNullOrEmpty(fieldLocation)); Splitter splitter = Splitter.on(FIELD_LOCATION_DELIMITER).omitEmptyStrings().trimResults(); List<String> pathList = Lists.newArrayList(splitter.split(fieldLocation)); if (pathList.size() == 0) { return Optional.absent(); } return AvroUtils.getFieldHelper(schema, pathList, 0); }
[ "public", "static", "Optional", "<", "Field", ">", "getField", "(", "Schema", "schema", ",", "String", "fieldLocation", ")", "{", "Preconditions", ".", "checkNotNull", "(", "schema", ")", ";", "Preconditions", ".", "checkArgument", "(", "!", "Strings", ".", "isNullOrEmpty", "(", "fieldLocation", ")", ")", ";", "Splitter", "splitter", "=", "Splitter", ".", "on", "(", "FIELD_LOCATION_DELIMITER", ")", ".", "omitEmptyStrings", "(", ")", ".", "trimResults", "(", ")", ";", "List", "<", "String", ">", "pathList", "=", "Lists", ".", "newArrayList", "(", "splitter", ".", "split", "(", "fieldLocation", ")", ")", ";", "if", "(", "pathList", ".", "size", "(", ")", "==", "0", ")", "{", "return", "Optional", ".", "absent", "(", ")", ";", "}", "return", "AvroUtils", ".", "getFieldHelper", "(", "schema", ",", "pathList", ",", "0", ")", ";", "}" ]
Given a GenericRecord, this method will return the field specified by the path parameter. The fieldLocation parameter is an ordered string specifying the location of the nested field to retrieve. For example, field1.nestedField1 takes field "field1", and retrieves "nestedField1" from it. @param schema is the record to retrieve the schema from @param fieldLocation is the location of the field @return the field
[ "Given", "a", "GenericRecord", "this", "method", "will", "return", "the", "field", "specified", "by", "the", "path", "parameter", ".", "The", "fieldLocation", "parameter", "is", "an", "ordered", "string", "specifying", "the", "location", "of", "the", "nested", "field", "to", "retrieve", ".", "For", "example", "field1", ".", "nestedField1", "takes", "field", "field1", "and", "retrieves", "nestedField1", "from", "it", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L192-L204
25,970
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.getFieldValue
public static Optional<Object> getFieldValue(GenericRecord record, String fieldLocation) { Map<String, Object> ret = getMultiFieldValue(record, fieldLocation); return Optional.fromNullable(ret.get(fieldLocation)); }
java
public static Optional<Object> getFieldValue(GenericRecord record, String fieldLocation) { Map<String, Object> ret = getMultiFieldValue(record, fieldLocation); return Optional.fromNullable(ret.get(fieldLocation)); }
[ "public", "static", "Optional", "<", "Object", ">", "getFieldValue", "(", "GenericRecord", "record", ",", "String", "fieldLocation", ")", "{", "Map", "<", "String", ",", "Object", ">", "ret", "=", "getMultiFieldValue", "(", "record", ",", "fieldLocation", ")", ";", "return", "Optional", ".", "fromNullable", "(", "ret", ".", "get", "(", "fieldLocation", ")", ")", ";", "}" ]
Given a GenericRecord, this method will return the field specified by the path parameter. The fieldLocation parameter is an ordered string specifying the location of the nested field to retrieve. For example, field1.nestedField1 takes the the value of the field "field1", and retrieves the field "nestedField1" from it. @param record is the record to retrieve the field from @param fieldLocation is the location of the field @return the value of the field
[ "Given", "a", "GenericRecord", "this", "method", "will", "return", "the", "field", "specified", "by", "the", "path", "parameter", ".", "The", "fieldLocation", "parameter", "is", "an", "ordered", "string", "specifying", "the", "location", "of", "the", "nested", "field", "to", "retrieve", ".", "For", "example", "field1", ".", "nestedField1", "takes", "the", "the", "value", "of", "the", "field", "field1", "and", "retrieves", "the", "field", "nestedField1", "from", "it", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L240-L243
25,971
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.getObjectFromMap
private static Object getObjectFromMap(Map map, String key) { Utf8 utf8Key = new Utf8(key); return map.get(utf8Key); }
java
private static Object getObjectFromMap(Map map, String key) { Utf8 utf8Key = new Utf8(key); return map.get(utf8Key); }
[ "private", "static", "Object", "getObjectFromMap", "(", "Map", "map", ",", "String", "key", ")", "{", "Utf8", "utf8Key", "=", "new", "Utf8", "(", "key", ")", ";", "return", "map", ".", "get", "(", "utf8Key", ")", ";", "}" ]
This method is to get object from map given a key as string. Avro persists string as Utf8 @param map passed from {@link #getFieldHelper(Map, Object, List, int)} @param key passed from {@link #getFieldHelper(Map, Object, List, int)} @return This could again be a GenericRecord
[ "This", "method", "is", "to", "get", "object", "from", "map", "given", "a", "key", "as", "string", ".", "Avro", "persists", "string", "as", "Utf8" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L353-L356
25,972
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.convertRecordSchema
public static GenericRecord convertRecordSchema(GenericRecord record, Schema newSchema) throws IOException { if (record.getSchema().equals(newSchema)) { return record; } try { BinaryDecoder decoder = new DecoderFactory().binaryDecoder(recordToByteArray(record), null); DatumReader<GenericRecord> reader = new GenericDatumReader<>(record.getSchema(), newSchema); return reader.read(null, decoder); } catch (IOException e) { throw new IOException( String.format("Cannot convert avro record to new schema. Origianl schema = %s, new schema = %s", record.getSchema(), newSchema), e); } }
java
public static GenericRecord convertRecordSchema(GenericRecord record, Schema newSchema) throws IOException { if (record.getSchema().equals(newSchema)) { return record; } try { BinaryDecoder decoder = new DecoderFactory().binaryDecoder(recordToByteArray(record), null); DatumReader<GenericRecord> reader = new GenericDatumReader<>(record.getSchema(), newSchema); return reader.read(null, decoder); } catch (IOException e) { throw new IOException( String.format("Cannot convert avro record to new schema. Origianl schema = %s, new schema = %s", record.getSchema(), newSchema), e); } }
[ "public", "static", "GenericRecord", "convertRecordSchema", "(", "GenericRecord", "record", ",", "Schema", "newSchema", ")", "throws", "IOException", "{", "if", "(", "record", ".", "getSchema", "(", ")", ".", "equals", "(", "newSchema", ")", ")", "{", "return", "record", ";", "}", "try", "{", "BinaryDecoder", "decoder", "=", "new", "DecoderFactory", "(", ")", ".", "binaryDecoder", "(", "recordToByteArray", "(", "record", ")", ",", "null", ")", ";", "DatumReader", "<", "GenericRecord", ">", "reader", "=", "new", "GenericDatumReader", "<>", "(", "record", ".", "getSchema", "(", ")", ",", "newSchema", ")", ";", "return", "reader", ".", "read", "(", "null", ",", "decoder", ")", ";", "}", "catch", "(", "IOException", "e", ")", "{", "throw", "new", "IOException", "(", "String", ".", "format", "(", "\"Cannot convert avro record to new schema. Origianl schema = %s, new schema = %s\"", ",", "record", ".", "getSchema", "(", ")", ",", "newSchema", ")", ",", "e", ")", ";", "}", "}" ]
Change the schema of an Avro record. @param record The Avro record whose schema is to be changed. @param newSchema The target schema. It must be compatible as reader schema with record.getSchema() as writer schema. @return a new Avro record with the new schema. @throws IOException if conversion failed.
[ "Change", "the", "schema", "of", "an", "Avro", "record", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L372-L387
25,973
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.recordToByteArray
public static byte[] recordToByteArray(GenericRecord record) throws IOException { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema()); writer.write(record, encoder); byte[] byteArray = out.toByteArray(); return byteArray; } }
java
public static byte[] recordToByteArray(GenericRecord record) throws IOException { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null); DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(record.getSchema()); writer.write(record, encoder); byte[] byteArray = out.toByteArray(); return byteArray; } }
[ "public", "static", "byte", "[", "]", "recordToByteArray", "(", "GenericRecord", "record", ")", "throws", "IOException", "{", "try", "(", "ByteArrayOutputStream", "out", "=", "new", "ByteArrayOutputStream", "(", ")", ")", "{", "Encoder", "encoder", "=", "EncoderFactory", ".", "get", "(", ")", ".", "directBinaryEncoder", "(", "out", ",", "null", ")", ";", "DatumWriter", "<", "GenericRecord", ">", "writer", "=", "new", "GenericDatumWriter", "<>", "(", "record", ".", "getSchema", "(", ")", ")", ";", "writer", ".", "write", "(", "record", ",", "encoder", ")", ";", "byte", "[", "]", "byteArray", "=", "out", ".", "toByteArray", "(", ")", ";", "return", "byteArray", ";", "}", "}" ]
Convert a GenericRecord to a byte array.
[ "Convert", "a", "GenericRecord", "to", "a", "byte", "array", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L392-L400
25,974
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.getSchemaFromDataFile
public static Schema getSchemaFromDataFile(Path dataFile, FileSystem fs) throws IOException { try (SeekableInput sin = new FsInput(dataFile, fs.getConf()); DataFileReader<GenericRecord> reader = new DataFileReader<>(sin, new GenericDatumReader<GenericRecord>())) { return reader.getSchema(); } }
java
public static Schema getSchemaFromDataFile(Path dataFile, FileSystem fs) throws IOException { try (SeekableInput sin = new FsInput(dataFile, fs.getConf()); DataFileReader<GenericRecord> reader = new DataFileReader<>(sin, new GenericDatumReader<GenericRecord>())) { return reader.getSchema(); } }
[ "public", "static", "Schema", "getSchemaFromDataFile", "(", "Path", "dataFile", ",", "FileSystem", "fs", ")", "throws", "IOException", "{", "try", "(", "SeekableInput", "sin", "=", "new", "FsInput", "(", "dataFile", ",", "fs", ".", "getConf", "(", ")", ")", ";", "DataFileReader", "<", "GenericRecord", ">", "reader", "=", "new", "DataFileReader", "<>", "(", "sin", ",", "new", "GenericDatumReader", "<", "GenericRecord", ">", "(", ")", ")", ")", "{", "return", "reader", ".", "getSchema", "(", ")", ";", "}", "}" ]
Get Avro schema from an Avro data file.
[ "Get", "Avro", "schema", "from", "an", "Avro", "data", "file", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L405-L410
25,975
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.parseSchemaFromFile
public static Schema parseSchemaFromFile(Path filePath, FileSystem fs) throws IOException { Preconditions.checkArgument(fs.exists(filePath), filePath + " does not exist"); try (FSDataInputStream in = fs.open(filePath)) { return new Schema.Parser().parse(in); } }
java
public static Schema parseSchemaFromFile(Path filePath, FileSystem fs) throws IOException { Preconditions.checkArgument(fs.exists(filePath), filePath + " does not exist"); try (FSDataInputStream in = fs.open(filePath)) { return new Schema.Parser().parse(in); } }
[ "public", "static", "Schema", "parseSchemaFromFile", "(", "Path", "filePath", ",", "FileSystem", "fs", ")", "throws", "IOException", "{", "Preconditions", ".", "checkArgument", "(", "fs", ".", "exists", "(", "filePath", ")", ",", "filePath", "+", "\" does not exist\"", ")", ";", "try", "(", "FSDataInputStream", "in", "=", "fs", ".", "open", "(", "filePath", ")", ")", "{", "return", "new", "Schema", ".", "Parser", "(", ")", ".", "parse", "(", "in", ")", ";", "}", "}" ]
Parse Avro schema from a schema file.
[ "Parse", "Avro", "schema", "from", "a", "schema", "file", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L415-L421
25,976
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.writeSchemaToFile
public static void writeSchemaToFile(Schema schema, Path filePath, Path tempFilePath, FileSystem fs, boolean overwrite, FsPermission perm) throws IOException { boolean fileExists = fs.exists(filePath); if (!overwrite) { Preconditions.checkState(!fileExists, filePath + " already exists"); } else { // delete the target file now if not using a staging file if (fileExists && null == tempFilePath) { HadoopUtils.deletePath(fs, filePath, true); // file has been removed fileExists = false; } } // If the file exists then write to a temp file to make the replacement as close to atomic as possible Path writeFilePath = fileExists ? tempFilePath : filePath; try (DataOutputStream dos = fs.create(writeFilePath)) { dos.writeChars(schema.toString()); } fs.setPermission(writeFilePath, perm); // Replace existing file with the staged file if (fileExists) { if (!fs.delete(filePath, true)) { throw new IOException( String.format("Failed to delete %s while renaming %s to %s", filePath, tempFilePath, filePath)); } HadoopUtils.movePath(fs, tempFilePath, fs, filePath, true, fs.getConf()); } }
java
public static void writeSchemaToFile(Schema schema, Path filePath, Path tempFilePath, FileSystem fs, boolean overwrite, FsPermission perm) throws IOException { boolean fileExists = fs.exists(filePath); if (!overwrite) { Preconditions.checkState(!fileExists, filePath + " already exists"); } else { // delete the target file now if not using a staging file if (fileExists && null == tempFilePath) { HadoopUtils.deletePath(fs, filePath, true); // file has been removed fileExists = false; } } // If the file exists then write to a temp file to make the replacement as close to atomic as possible Path writeFilePath = fileExists ? tempFilePath : filePath; try (DataOutputStream dos = fs.create(writeFilePath)) { dos.writeChars(schema.toString()); } fs.setPermission(writeFilePath, perm); // Replace existing file with the staged file if (fileExists) { if (!fs.delete(filePath, true)) { throw new IOException( String.format("Failed to delete %s while renaming %s to %s", filePath, tempFilePath, filePath)); } HadoopUtils.movePath(fs, tempFilePath, fs, filePath, true, fs.getConf()); } }
[ "public", "static", "void", "writeSchemaToFile", "(", "Schema", "schema", ",", "Path", "filePath", ",", "Path", "tempFilePath", ",", "FileSystem", "fs", ",", "boolean", "overwrite", ",", "FsPermission", "perm", ")", "throws", "IOException", "{", "boolean", "fileExists", "=", "fs", ".", "exists", "(", "filePath", ")", ";", "if", "(", "!", "overwrite", ")", "{", "Preconditions", ".", "checkState", "(", "!", "fileExists", ",", "filePath", "+", "\" already exists\"", ")", ";", "}", "else", "{", "// delete the target file now if not using a staging file", "if", "(", "fileExists", "&&", "null", "==", "tempFilePath", ")", "{", "HadoopUtils", ".", "deletePath", "(", "fs", ",", "filePath", ",", "true", ")", ";", "// file has been removed", "fileExists", "=", "false", ";", "}", "}", "// If the file exists then write to a temp file to make the replacement as close to atomic as possible", "Path", "writeFilePath", "=", "fileExists", "?", "tempFilePath", ":", "filePath", ";", "try", "(", "DataOutputStream", "dos", "=", "fs", ".", "create", "(", "writeFilePath", ")", ")", "{", "dos", ".", "writeChars", "(", "schema", ".", "toString", "(", ")", ")", ";", "}", "fs", ".", "setPermission", "(", "writeFilePath", ",", "perm", ")", ";", "// Replace existing file with the staged file", "if", "(", "fileExists", ")", "{", "if", "(", "!", "fs", ".", "delete", "(", "filePath", ",", "true", ")", ")", "{", "throw", "new", "IOException", "(", "String", ".", "format", "(", "\"Failed to delete %s while renaming %s to %s\"", ",", "filePath", ",", "tempFilePath", ",", "filePath", ")", ")", ";", "}", "HadoopUtils", ".", "movePath", "(", "fs", ",", "tempFilePath", ",", "fs", ",", "filePath", ",", "true", ",", "fs", ".", "getConf", "(", ")", ")", ";", "}", "}" ]
Write a schema to a file @param schema the schema @param filePath the target file @param tempFilePath if not null then this path is used for a temporary file used to stage the write @param fs a {@link FileSystem} @param overwrite should any existing target file be overwritten? @param perm permissions @throws IOException
[ "Write", "a", "schema", "to", "a", "file" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L449-L482
25,977
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.nullifyFieldsForSchemaMerge
public static Schema nullifyFieldsForSchemaMerge(Schema oldSchema, Schema newSchema) { if (oldSchema == null) { LOG.warn("No previous schema available, use the new schema instead."); return newSchema; } if (!(oldSchema.getType().equals(Type.RECORD) && newSchema.getType().equals(Type.RECORD))) { LOG.warn("Both previous schema and new schema need to be record type. Quit merging schema."); return newSchema; } List<Field> combinedFields = Lists.newArrayList(); for (Field newFld : newSchema.getFields()) { combinedFields.add(new Field(newFld.name(), newFld.schema(), newFld.doc(), newFld.defaultValue())); } for (Field oldFld : oldSchema.getFields()) { if (newSchema.getField(oldFld.name()) == null) { List<Schema> union = Lists.newArrayList(); Schema oldFldSchema = oldFld.schema(); if (oldFldSchema.getType().equals(Type.UNION)) { union.add(Schema.create(Type.NULL)); for (Schema itemInUion : oldFldSchema.getTypes()) { if (!itemInUion.getType().equals(Type.NULL)) { union.add(itemInUion); } } Schema newFldSchema = Schema.createUnion(union); combinedFields.add(new Field(oldFld.name(), newFldSchema, oldFld.doc(), oldFld.defaultValue())); } else { union.add(Schema.create(Type.NULL)); union.add(oldFldSchema); Schema newFldSchema = Schema.createUnion(union); combinedFields.add(new Field(oldFld.name(), newFldSchema, oldFld.doc(), oldFld.defaultValue())); } } } Schema mergedSchema = Schema.createRecord(newSchema.getName(), newSchema.getDoc(), newSchema.getNamespace(), newSchema.isError()); mergedSchema.setFields(combinedFields); return mergedSchema; }
java
public static Schema nullifyFieldsForSchemaMerge(Schema oldSchema, Schema newSchema) { if (oldSchema == null) { LOG.warn("No previous schema available, use the new schema instead."); return newSchema; } if (!(oldSchema.getType().equals(Type.RECORD) && newSchema.getType().equals(Type.RECORD))) { LOG.warn("Both previous schema and new schema need to be record type. Quit merging schema."); return newSchema; } List<Field> combinedFields = Lists.newArrayList(); for (Field newFld : newSchema.getFields()) { combinedFields.add(new Field(newFld.name(), newFld.schema(), newFld.doc(), newFld.defaultValue())); } for (Field oldFld : oldSchema.getFields()) { if (newSchema.getField(oldFld.name()) == null) { List<Schema> union = Lists.newArrayList(); Schema oldFldSchema = oldFld.schema(); if (oldFldSchema.getType().equals(Type.UNION)) { union.add(Schema.create(Type.NULL)); for (Schema itemInUion : oldFldSchema.getTypes()) { if (!itemInUion.getType().equals(Type.NULL)) { union.add(itemInUion); } } Schema newFldSchema = Schema.createUnion(union); combinedFields.add(new Field(oldFld.name(), newFldSchema, oldFld.doc(), oldFld.defaultValue())); } else { union.add(Schema.create(Type.NULL)); union.add(oldFldSchema); Schema newFldSchema = Schema.createUnion(union); combinedFields.add(new Field(oldFld.name(), newFldSchema, oldFld.doc(), oldFld.defaultValue())); } } } Schema mergedSchema = Schema.createRecord(newSchema.getName(), newSchema.getDoc(), newSchema.getNamespace(), newSchema.isError()); mergedSchema.setFields(combinedFields); return mergedSchema; }
[ "public", "static", "Schema", "nullifyFieldsForSchemaMerge", "(", "Schema", "oldSchema", ",", "Schema", "newSchema", ")", "{", "if", "(", "oldSchema", "==", "null", ")", "{", "LOG", ".", "warn", "(", "\"No previous schema available, use the new schema instead.\"", ")", ";", "return", "newSchema", ";", "}", "if", "(", "!", "(", "oldSchema", ".", "getType", "(", ")", ".", "equals", "(", "Type", ".", "RECORD", ")", "&&", "newSchema", ".", "getType", "(", ")", ".", "equals", "(", "Type", ".", "RECORD", ")", ")", ")", "{", "LOG", ".", "warn", "(", "\"Both previous schema and new schema need to be record type. Quit merging schema.\"", ")", ";", "return", "newSchema", ";", "}", "List", "<", "Field", ">", "combinedFields", "=", "Lists", ".", "newArrayList", "(", ")", ";", "for", "(", "Field", "newFld", ":", "newSchema", ".", "getFields", "(", ")", ")", "{", "combinedFields", ".", "add", "(", "new", "Field", "(", "newFld", ".", "name", "(", ")", ",", "newFld", ".", "schema", "(", ")", ",", "newFld", ".", "doc", "(", ")", ",", "newFld", ".", "defaultValue", "(", ")", ")", ")", ";", "}", "for", "(", "Field", "oldFld", ":", "oldSchema", ".", "getFields", "(", ")", ")", "{", "if", "(", "newSchema", ".", "getField", "(", "oldFld", ".", "name", "(", ")", ")", "==", "null", ")", "{", "List", "<", "Schema", ">", "union", "=", "Lists", ".", "newArrayList", "(", ")", ";", "Schema", "oldFldSchema", "=", "oldFld", ".", "schema", "(", ")", ";", "if", "(", "oldFldSchema", ".", "getType", "(", ")", ".", "equals", "(", "Type", ".", "UNION", ")", ")", "{", "union", ".", "add", "(", "Schema", ".", "create", "(", "Type", ".", "NULL", ")", ")", ";", "for", "(", "Schema", "itemInUion", ":", "oldFldSchema", ".", "getTypes", "(", ")", ")", "{", "if", "(", "!", "itemInUion", ".", "getType", "(", ")", ".", "equals", "(", "Type", ".", "NULL", ")", ")", "{", "union", ".", "add", "(", "itemInUion", ")", ";", "}", "}", "Schema", "newFldSchema", "=", "Schema", ".", "createUnion", "(", "union", ")", ";", "combinedFields", ".", "add", "(", "new", "Field", "(", "oldFld", ".", "name", "(", ")", ",", "newFldSchema", ",", "oldFld", ".", "doc", "(", ")", ",", "oldFld", ".", "defaultValue", "(", ")", ")", ")", ";", "}", "else", "{", "union", ".", "add", "(", "Schema", ".", "create", "(", "Type", ".", "NULL", ")", ")", ";", "union", ".", "add", "(", "oldFldSchema", ")", ";", "Schema", "newFldSchema", "=", "Schema", ".", "createUnion", "(", "union", ")", ";", "combinedFields", ".", "add", "(", "new", "Field", "(", "oldFld", ".", "name", "(", ")", ",", "newFldSchema", ",", "oldFld", ".", "doc", "(", ")", ",", "oldFld", ".", "defaultValue", "(", ")", ")", ")", ";", "}", "}", "}", "Schema", "mergedSchema", "=", "Schema", ".", "createRecord", "(", "newSchema", ".", "getName", "(", ")", ",", "newSchema", ".", "getDoc", "(", ")", ",", "newSchema", ".", "getNamespace", "(", ")", ",", "newSchema", ".", "isError", "(", ")", ")", ";", "mergedSchema", ".", "setFields", "(", "combinedFields", ")", ";", "return", "mergedSchema", ";", "}" ]
Merge oldSchema and newSchame. Set a field default value to null, if this field exists in the old schema but not in the new schema. @param oldSchema @param newSchema @return schema that contains all the fields in both old and new schema.
[ "Merge", "oldSchema", "and", "newSchame", ".", "Set", "a", "field", "default", "value", "to", "null", "if", "this", "field", "exists", "in", "the", "old", "schema", "but", "not", "in", "the", "new", "schema", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L553-L595
25,978
apache/incubator-gobblin
gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java
AvroUtils.removeUncomparableFields
public static Optional<Schema> removeUncomparableFields(Schema schema) { return removeUncomparableFields(schema, Sets.<Schema> newHashSet()); }
java
public static Optional<Schema> removeUncomparableFields(Schema schema) { return removeUncomparableFields(schema, Sets.<Schema> newHashSet()); }
[ "public", "static", "Optional", "<", "Schema", ">", "removeUncomparableFields", "(", "Schema", "schema", ")", "{", "return", "removeUncomparableFields", "(", "schema", ",", "Sets", ".", "<", "Schema", ">", "newHashSet", "(", ")", ")", ";", "}" ]
Remove map, array, enum fields, as well as union fields that contain map, array or enum, from an Avro schema. A schema with these fields cannot be used as Mapper key in a MapReduce job.
[ "Remove", "map", "array", "enum", "fields", "as", "well", "as", "union", "fields", "that", "contain", "map", "array", "or", "enum", "from", "an", "Avro", "schema", ".", "A", "schema", "with", "these", "fields", "cannot", "be", "used", "as", "Mapper", "key", "in", "a", "MapReduce", "job", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-utility/src/main/java/org/apache/gobblin/util/AvroUtils.java#L602-L604
25,979
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/spec_store/FSSpecStore.java
FSSpecStore.getAllVersionsOfSpec
@Override public Collection<Spec> getAllVersionsOfSpec(URI specUri) { Preconditions.checkArgument(null != specUri, "Spec URI should not be null"); Path specPath = getPathForURI(this.fsSpecStoreDirPath, specUri, FlowSpec.Builder.DEFAULT_VERSION); return getAllVersionsOfSpec(specPath); }
java
@Override public Collection<Spec> getAllVersionsOfSpec(URI specUri) { Preconditions.checkArgument(null != specUri, "Spec URI should not be null"); Path specPath = getPathForURI(this.fsSpecStoreDirPath, specUri, FlowSpec.Builder.DEFAULT_VERSION); return getAllVersionsOfSpec(specPath); }
[ "@", "Override", "public", "Collection", "<", "Spec", ">", "getAllVersionsOfSpec", "(", "URI", "specUri", ")", "{", "Preconditions", ".", "checkArgument", "(", "null", "!=", "specUri", ",", "\"Spec URI should not be null\"", ")", ";", "Path", "specPath", "=", "getPathForURI", "(", "this", ".", "fsSpecStoreDirPath", ",", "specUri", ",", "FlowSpec", ".", "Builder", ".", "DEFAULT_VERSION", ")", ";", "return", "getAllVersionsOfSpec", "(", "specPath", ")", ";", "}" ]
Returns all versions of the spec defined by specUri. Currently, multiple versions are not supported, so this should return exactly one spec. @param specUri URI for the {@link Spec} to be retrieved. @return all versions of the spec.
[ "Returns", "all", "versions", "of", "the", "spec", "defined", "by", "specUri", ".", "Currently", "multiple", "versions", "are", "not", "supported", "so", "this", "should", "return", "exactly", "one", "spec", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/spec_store/FSSpecStore.java#L136-L141
25,980
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/spec_store/FSSpecStore.java
FSSpecStore.getPathForURI
protected Path getPathForURI(Path fsSpecStoreDirPath, URI uri, String version) { return PathUtils.addExtension(PathUtils.mergePaths(fsSpecStoreDirPath, new Path(uri)), version); }
java
protected Path getPathForURI(Path fsSpecStoreDirPath, URI uri, String version) { return PathUtils.addExtension(PathUtils.mergePaths(fsSpecStoreDirPath, new Path(uri)), version); }
[ "protected", "Path", "getPathForURI", "(", "Path", "fsSpecStoreDirPath", ",", "URI", "uri", ",", "String", "version", ")", "{", "return", "PathUtils", ".", "addExtension", "(", "PathUtils", ".", "mergePaths", "(", "fsSpecStoreDirPath", ",", "new", "Path", "(", "uri", ")", ")", ",", "version", ")", ";", "}" ]
Construct a file path given URI and version of a spec. @param fsSpecStoreDirPath The directory path for specs. @param uri Uri as the identifier of JobSpec @return
[ "Construct", "a", "file", "path", "given", "URI", "and", "version", "of", "a", "spec", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/spec_store/FSSpecStore.java#L332-L334
25,981
apache/incubator-gobblin
gobblin-modules/gobblin-parquet/src/main/java/org/apache/gobblin/converter/parquet/JsonElementConversionFactory.java
JsonElementConversionFactory.getConverter
public static JsonElementConverter getConverter(JsonSchema schema, boolean repeated) { InputType fieldType = schema.getInputType(); switch (fieldType) { case INT: return new IntConverter(schema, repeated); case LONG: return new LongConverter(schema, repeated); case FLOAT: return new FloatConverter(schema, repeated); case DOUBLE: return new DoubleConverter(schema, repeated); case BOOLEAN: return new BooleanConverter(schema, repeated); case STRING: return new StringConverter(schema, repeated); case ARRAY: return new ArrayConverter(schema); case ENUM: return new EnumConverter(schema); case RECORD: return new RecordConverter(schema); case MAP: return new MapConverter(schema); case DATE: case TIMESTAMP: return new StringConverter(schema, repeated); default: throw new UnsupportedOperationException(fieldType + " is unsupported"); } }
java
public static JsonElementConverter getConverter(JsonSchema schema, boolean repeated) { InputType fieldType = schema.getInputType(); switch (fieldType) { case INT: return new IntConverter(schema, repeated); case LONG: return new LongConverter(schema, repeated); case FLOAT: return new FloatConverter(schema, repeated); case DOUBLE: return new DoubleConverter(schema, repeated); case BOOLEAN: return new BooleanConverter(schema, repeated); case STRING: return new StringConverter(schema, repeated); case ARRAY: return new ArrayConverter(schema); case ENUM: return new EnumConverter(schema); case RECORD: return new RecordConverter(schema); case MAP: return new MapConverter(schema); case DATE: case TIMESTAMP: return new StringConverter(schema, repeated); default: throw new UnsupportedOperationException(fieldType + " is unsupported"); } }
[ "public", "static", "JsonElementConverter", "getConverter", "(", "JsonSchema", "schema", ",", "boolean", "repeated", ")", "{", "InputType", "fieldType", "=", "schema", ".", "getInputType", "(", ")", ";", "switch", "(", "fieldType", ")", "{", "case", "INT", ":", "return", "new", "IntConverter", "(", "schema", ",", "repeated", ")", ";", "case", "LONG", ":", "return", "new", "LongConverter", "(", "schema", ",", "repeated", ")", ";", "case", "FLOAT", ":", "return", "new", "FloatConverter", "(", "schema", ",", "repeated", ")", ";", "case", "DOUBLE", ":", "return", "new", "DoubleConverter", "(", "schema", ",", "repeated", ")", ";", "case", "BOOLEAN", ":", "return", "new", "BooleanConverter", "(", "schema", ",", "repeated", ")", ";", "case", "STRING", ":", "return", "new", "StringConverter", "(", "schema", ",", "repeated", ")", ";", "case", "ARRAY", ":", "return", "new", "ArrayConverter", "(", "schema", ")", ";", "case", "ENUM", ":", "return", "new", "EnumConverter", "(", "schema", ")", ";", "case", "RECORD", ":", "return", "new", "RecordConverter", "(", "schema", ")", ";", "case", "MAP", ":", "return", "new", "MapConverter", "(", "schema", ")", ";", "case", "DATE", ":", "case", "TIMESTAMP", ":", "return", "new", "StringConverter", "(", "schema", ",", "repeated", ")", ";", "default", ":", "throw", "new", "UnsupportedOperationException", "(", "fieldType", "+", "\" is unsupported\"", ")", ";", "}", "}" ]
Use to create a converter for a single field from a parquetSchema. @param schema @param repeated - Is the {@link Type} repeated in the parent {@link Group} @return
[ "Use", "to", "create", "a", "converter", "for", "a", "single", "field", "from", "a", "parquetSchema", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-parquet/src/main/java/org/apache/gobblin/converter/parquet/JsonElementConversionFactory.java#L74-L115
25,982
apache/incubator-gobblin
gobblin-service/src/main/java/org/apache/gobblin/service/modules/core/GitConfigMonitor.java
GitConfigMonitor.loadConfigFileWithFlowNameOverrides
private Config loadConfigFileWithFlowNameOverrides(Path configFilePath) throws IOException { Config flowConfig = this.pullFileLoader.loadPullFile(configFilePath, emptyConfig, false); String flowName = FSSpecStore.getSpecName(configFilePath); String flowGroup = FSSpecStore.getSpecGroup(configFilePath); return flowConfig.withValue(ConfigurationKeys.FLOW_NAME_KEY, ConfigValueFactory.fromAnyRef(flowName)) .withValue(ConfigurationKeys.FLOW_GROUP_KEY, ConfigValueFactory.fromAnyRef(flowGroup)); }
java
private Config loadConfigFileWithFlowNameOverrides(Path configFilePath) throws IOException { Config flowConfig = this.pullFileLoader.loadPullFile(configFilePath, emptyConfig, false); String flowName = FSSpecStore.getSpecName(configFilePath); String flowGroup = FSSpecStore.getSpecGroup(configFilePath); return flowConfig.withValue(ConfigurationKeys.FLOW_NAME_KEY, ConfigValueFactory.fromAnyRef(flowName)) .withValue(ConfigurationKeys.FLOW_GROUP_KEY, ConfigValueFactory.fromAnyRef(flowGroup)); }
[ "private", "Config", "loadConfigFileWithFlowNameOverrides", "(", "Path", "configFilePath", ")", "throws", "IOException", "{", "Config", "flowConfig", "=", "this", ".", "pullFileLoader", ".", "loadPullFile", "(", "configFilePath", ",", "emptyConfig", ",", "false", ")", ";", "String", "flowName", "=", "FSSpecStore", ".", "getSpecName", "(", "configFilePath", ")", ";", "String", "flowGroup", "=", "FSSpecStore", ".", "getSpecGroup", "(", "configFilePath", ")", ";", "return", "flowConfig", ".", "withValue", "(", "ConfigurationKeys", ".", "FLOW_NAME_KEY", ",", "ConfigValueFactory", ".", "fromAnyRef", "(", "flowName", ")", ")", ".", "withValue", "(", "ConfigurationKeys", ".", "FLOW_GROUP_KEY", ",", "ConfigValueFactory", ".", "fromAnyRef", "(", "flowGroup", ")", ")", ";", "}" ]
Load the config file and override the flow name and flow path properties with the names from the file path @param configFilePath path of the config file relative to the repository root @return the configuration object @throws IOException
[ "Load", "the", "config", "file", "and", "override", "the", "flow", "name", "and", "flow", "path", "properties", "with", "the", "names", "from", "the", "file", "path" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-service/src/main/java/org/apache/gobblin/service/modules/core/GitConfigMonitor.java#L170-L177
25,983
apache/incubator-gobblin
gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java
HttpUtils.buildURI
public static URI buildURI(String urlTemplate, Map<String, String> keys, Map<String, String> queryParams) { // Compute base url String url = urlTemplate; if (keys != null && keys.size() != 0) { url = StrSubstitutor.replace(urlTemplate, keys); } try { URIBuilder uriBuilder = new URIBuilder(url); // Append query parameters if (queryParams != null && queryParams.size() != 0) { for (Map.Entry<String, String> entry : queryParams.entrySet()) { uriBuilder.addParameter(entry.getKey(), entry.getValue()); } } return uriBuilder.build(); } catch (URISyntaxException e) { throw new RuntimeException("Fail to build uri", e); } }
java
public static URI buildURI(String urlTemplate, Map<String, String> keys, Map<String, String> queryParams) { // Compute base url String url = urlTemplate; if (keys != null && keys.size() != 0) { url = StrSubstitutor.replace(urlTemplate, keys); } try { URIBuilder uriBuilder = new URIBuilder(url); // Append query parameters if (queryParams != null && queryParams.size() != 0) { for (Map.Entry<String, String> entry : queryParams.entrySet()) { uriBuilder.addParameter(entry.getKey(), entry.getValue()); } } return uriBuilder.build(); } catch (URISyntaxException e) { throw new RuntimeException("Fail to build uri", e); } }
[ "public", "static", "URI", "buildURI", "(", "String", "urlTemplate", ",", "Map", "<", "String", ",", "String", ">", "keys", ",", "Map", "<", "String", ",", "String", ">", "queryParams", ")", "{", "// Compute base url", "String", "url", "=", "urlTemplate", ";", "if", "(", "keys", "!=", "null", "&&", "keys", ".", "size", "(", ")", "!=", "0", ")", "{", "url", "=", "StrSubstitutor", ".", "replace", "(", "urlTemplate", ",", "keys", ")", ";", "}", "try", "{", "URIBuilder", "uriBuilder", "=", "new", "URIBuilder", "(", "url", ")", ";", "// Append query parameters", "if", "(", "queryParams", "!=", "null", "&&", "queryParams", ".", "size", "(", ")", "!=", "0", ")", "{", "for", "(", "Map", ".", "Entry", "<", "String", ",", "String", ">", "entry", ":", "queryParams", ".", "entrySet", "(", ")", ")", "{", "uriBuilder", ".", "addParameter", "(", "entry", ".", "getKey", "(", ")", ",", "entry", ".", "getValue", "(", ")", ")", ";", "}", "}", "return", "uriBuilder", ".", "build", "(", ")", ";", "}", "catch", "(", "URISyntaxException", "e", ")", "{", "throw", "new", "RuntimeException", "(", "\"Fail to build uri\"", ",", "e", ")", ";", "}", "}" ]
Given a url template, interpolate with keys and build the URI after adding query parameters <p> With url template: http://test.com/resource/(urn:${resourceId})/entities/(entity:${entityId}), keys: { "resourceId": 123, "entityId": 456 }, queryParams: { "locale": "en_US" }, the outpuT URI is: http://test.com/resource/(urn:123)/entities/(entity:456)?locale=en_US </p> @param urlTemplate url template @param keys data map to interpolate url template @param queryParams query parameters added to the url @return a uri
[ "Given", "a", "url", "template", "interpolate", "with", "keys", "and", "build", "the", "URI", "after", "adding", "query", "parameters" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java#L101-L120
25,984
apache/incubator-gobblin
gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java
HttpUtils.getErrorCodeWhitelist
public static Set<String> getErrorCodeWhitelist(Config config) { String list = config.getString(HttpConstants.ERROR_CODE_WHITELIST).toLowerCase(); return new HashSet<>(getStringList(list)); }
java
public static Set<String> getErrorCodeWhitelist(Config config) { String list = config.getString(HttpConstants.ERROR_CODE_WHITELIST).toLowerCase(); return new HashSet<>(getStringList(list)); }
[ "public", "static", "Set", "<", "String", ">", "getErrorCodeWhitelist", "(", "Config", "config", ")", "{", "String", "list", "=", "config", ".", "getString", "(", "HttpConstants", ".", "ERROR_CODE_WHITELIST", ")", ".", "toLowerCase", "(", ")", ";", "return", "new", "HashSet", "<>", "(", "getStringList", "(", "list", ")", ")", ";", "}" ]
Get the error code whitelist from a config
[ "Get", "the", "error", "code", "whitelist", "from", "a", "config" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java#L132-L135
25,985
apache/incubator-gobblin
gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java
HttpUtils.toMap
public static Map<String, Object> toMap(String jsonString) { Map<String, Object> map = new HashMap<>(); return GSON.fromJson(jsonString, map.getClass()); }
java
public static Map<String, Object> toMap(String jsonString) { Map<String, Object> map = new HashMap<>(); return GSON.fromJson(jsonString, map.getClass()); }
[ "public", "static", "Map", "<", "String", ",", "Object", ">", "toMap", "(", "String", "jsonString", ")", "{", "Map", "<", "String", ",", "Object", ">", "map", "=", "new", "HashMap", "<>", "(", ")", ";", "return", "GSON", ".", "fromJson", "(", "jsonString", ",", "map", ".", "getClass", "(", ")", ")", ";", "}" ]
Convert a json encoded string to a Map @param jsonString json string @return the Map encoded in the string
[ "Convert", "a", "json", "encoded", "string", "to", "a", "Map" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java#L174-L177
25,986
apache/incubator-gobblin
gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java
HttpUtils.createR2ClientLimiterKey
public static String createR2ClientLimiterKey(Config config) { String urlTemplate = config.getString(HttpConstants.URL_TEMPLATE); try { String escaped = URIUtil.encodeQuery(urlTemplate); URI uri = new URI(escaped); if (uri.getHost() == null) throw new RuntimeException("Cannot get host part from uri" + urlTemplate); String key = uri.getScheme() + "/" + uri.getHost(); if (uri.getPort() > 0) { key = key + "/" + uri.getPort(); } log.info("Get limiter key [" + key + "]"); return key; } catch (Exception e) { throw new RuntimeException("Cannot create R2 limiter key", e); } }
java
public static String createR2ClientLimiterKey(Config config) { String urlTemplate = config.getString(HttpConstants.URL_TEMPLATE); try { String escaped = URIUtil.encodeQuery(urlTemplate); URI uri = new URI(escaped); if (uri.getHost() == null) throw new RuntimeException("Cannot get host part from uri" + urlTemplate); String key = uri.getScheme() + "/" + uri.getHost(); if (uri.getPort() > 0) { key = key + "/" + uri.getPort(); } log.info("Get limiter key [" + key + "]"); return key; } catch (Exception e) { throw new RuntimeException("Cannot create R2 limiter key", e); } }
[ "public", "static", "String", "createR2ClientLimiterKey", "(", "Config", "config", ")", "{", "String", "urlTemplate", "=", "config", ".", "getString", "(", "HttpConstants", ".", "URL_TEMPLATE", ")", ";", "try", "{", "String", "escaped", "=", "URIUtil", ".", "encodeQuery", "(", "urlTemplate", ")", ";", "URI", "uri", "=", "new", "URI", "(", "escaped", ")", ";", "if", "(", "uri", ".", "getHost", "(", ")", "==", "null", ")", "throw", "new", "RuntimeException", "(", "\"Cannot get host part from uri\"", "+", "urlTemplate", ")", ";", "String", "key", "=", "uri", ".", "getScheme", "(", ")", "+", "\"/\"", "+", "uri", ".", "getHost", "(", ")", ";", "if", "(", "uri", ".", "getPort", "(", ")", ">", "0", ")", "{", "key", "=", "key", "+", "\"/\"", "+", "uri", ".", "getPort", "(", ")", ";", "}", "log", ".", "info", "(", "\"Get limiter key [\"", "+", "key", "+", "\"]\"", ")", ";", "return", "key", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "RuntimeException", "(", "\"Cannot create R2 limiter key\"", ",", "e", ")", ";", "}", "}" ]
Convert D2 URL template into a string used for throttling limiter Valid: d2://host/${resource-id} Invalid: d2://host${resource-id}, because we cannot differentiate the host
[ "Convert", "D2", "URL", "template", "into", "a", "string", "used", "for", "throttling", "limiter" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-http/src/main/java/org/apache/gobblin/utils/HttpUtils.java#L204-L222
25,987
apache/incubator-gobblin
gobblin-core-base/src/main/java/org/apache/gobblin/instrumented/writer/InstrumentedDataWriterBase.java
InstrumentedDataWriterBase.onSuccessfulWrite
public void onSuccessfulWrite(long startTimeNanos) { Instrumented.updateTimer(this.dataWriterTimer, System.nanoTime() - startTimeNanos, TimeUnit.NANOSECONDS); Instrumented.markMeter(this.successfulWritesMeter); }
java
public void onSuccessfulWrite(long startTimeNanos) { Instrumented.updateTimer(this.dataWriterTimer, System.nanoTime() - startTimeNanos, TimeUnit.NANOSECONDS); Instrumented.markMeter(this.successfulWritesMeter); }
[ "public", "void", "onSuccessfulWrite", "(", "long", "startTimeNanos", ")", "{", "Instrumented", ".", "updateTimer", "(", "this", ".", "dataWriterTimer", ",", "System", ".", "nanoTime", "(", ")", "-", "startTimeNanos", ",", "TimeUnit", ".", "NANOSECONDS", ")", ";", "Instrumented", ".", "markMeter", "(", "this", ".", "successfulWritesMeter", ")", ";", "}" ]
Called after a successful write of a record. @param startTimeNanos time at which writing started.
[ "Called", "after", "a", "successful", "write", "of", "a", "record", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core-base/src/main/java/org/apache/gobblin/instrumented/writer/InstrumentedDataWriterBase.java#L185-L188
25,988
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java
EnvelopeSchemaConverter.convertSchema
@Override public String convertSchema(Schema inputSchema, WorkUnitState workUnit) throws SchemaConversionException { return EnvelopeSchemaConverter.class.getName(); }
java
@Override public String convertSchema(Schema inputSchema, WorkUnitState workUnit) throws SchemaConversionException { return EnvelopeSchemaConverter.class.getName(); }
[ "@", "Override", "public", "String", "convertSchema", "(", "Schema", "inputSchema", ",", "WorkUnitState", "workUnit", ")", "throws", "SchemaConversionException", "{", "return", "EnvelopeSchemaConverter", ".", "class", ".", "getName", "(", ")", ";", "}" ]
Do nothing, actual schema must be obtained from records.
[ "Do", "nothing", "actual", "schema", "must", "be", "obtained", "from", "records", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java#L102-L105
25,989
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java
EnvelopeSchemaConverter.convertRecord
@Override public Iterable<GenericRecord> convertRecord(String outputSchema, GenericRecord inputRecord, WorkUnitState workUnit) throws DataConversionException { try { String schemaIdField = workUnit.contains(PAYLOAD_SCHEMA_ID_FIELD) ? workUnit.getProp(PAYLOAD_SCHEMA_ID_FIELD) : DEFAULT_PAYLOAD_SCHEMA_ID_FIELD; String payloadField = workUnit.contains(PAYLOAD_FIELD) ? workUnit.getProp(PAYLOAD_FIELD) : DEFAULT_PAYLOAD_FIELD; String schemaKey = String.valueOf(inputRecord.get(schemaIdField)); Schema payloadSchema = (Schema) this.registry.getSchemaByKey(schemaKey); byte[] payload = getPayload(inputRecord, payloadField); GenericRecord outputRecord = deserializePayload(payload, payloadSchema); if (this.fieldRemover.isPresent()) { payloadSchema = this.fieldRemover.get().removeFields(payloadSchema); } return new SingleRecordIterable<>(AvroUtils.convertRecordSchema(outputRecord, payloadSchema)); } catch (IOException | SchemaRegistryException | ExecutionException e) { throw new DataConversionException(e); } }
java
@Override public Iterable<GenericRecord> convertRecord(String outputSchema, GenericRecord inputRecord, WorkUnitState workUnit) throws DataConversionException { try { String schemaIdField = workUnit.contains(PAYLOAD_SCHEMA_ID_FIELD) ? workUnit.getProp(PAYLOAD_SCHEMA_ID_FIELD) : DEFAULT_PAYLOAD_SCHEMA_ID_FIELD; String payloadField = workUnit.contains(PAYLOAD_FIELD) ? workUnit.getProp(PAYLOAD_FIELD) : DEFAULT_PAYLOAD_FIELD; String schemaKey = String.valueOf(inputRecord.get(schemaIdField)); Schema payloadSchema = (Schema) this.registry.getSchemaByKey(schemaKey); byte[] payload = getPayload(inputRecord, payloadField); GenericRecord outputRecord = deserializePayload(payload, payloadSchema); if (this.fieldRemover.isPresent()) { payloadSchema = this.fieldRemover.get().removeFields(payloadSchema); } return new SingleRecordIterable<>(AvroUtils.convertRecordSchema(outputRecord, payloadSchema)); } catch (IOException | SchemaRegistryException | ExecutionException e) { throw new DataConversionException(e); } }
[ "@", "Override", "public", "Iterable", "<", "GenericRecord", ">", "convertRecord", "(", "String", "outputSchema", ",", "GenericRecord", "inputRecord", ",", "WorkUnitState", "workUnit", ")", "throws", "DataConversionException", "{", "try", "{", "String", "schemaIdField", "=", "workUnit", ".", "contains", "(", "PAYLOAD_SCHEMA_ID_FIELD", ")", "?", "workUnit", ".", "getProp", "(", "PAYLOAD_SCHEMA_ID_FIELD", ")", ":", "DEFAULT_PAYLOAD_SCHEMA_ID_FIELD", ";", "String", "payloadField", "=", "workUnit", ".", "contains", "(", "PAYLOAD_FIELD", ")", "?", "workUnit", ".", "getProp", "(", "PAYLOAD_FIELD", ")", ":", "DEFAULT_PAYLOAD_FIELD", ";", "String", "schemaKey", "=", "String", ".", "valueOf", "(", "inputRecord", ".", "get", "(", "schemaIdField", ")", ")", ";", "Schema", "payloadSchema", "=", "(", "Schema", ")", "this", ".", "registry", ".", "getSchemaByKey", "(", "schemaKey", ")", ";", "byte", "[", "]", "payload", "=", "getPayload", "(", "inputRecord", ",", "payloadField", ")", ";", "GenericRecord", "outputRecord", "=", "deserializePayload", "(", "payload", ",", "payloadSchema", ")", ";", "if", "(", "this", ".", "fieldRemover", ".", "isPresent", "(", ")", ")", "{", "payloadSchema", "=", "this", ".", "fieldRemover", ".", "get", "(", ")", ".", "removeFields", "(", "payloadSchema", ")", ";", "}", "return", "new", "SingleRecordIterable", "<>", "(", "AvroUtils", ".", "convertRecordSchema", "(", "outputRecord", ",", "payloadSchema", ")", ")", ";", "}", "catch", "(", "IOException", "|", "SchemaRegistryException", "|", "ExecutionException", "e", ")", "{", "throw", "new", "DataConversionException", "(", "e", ")", ";", "}", "}" ]
Get actual schema from registry and deserialize payload using it.
[ "Get", "actual", "schema", "from", "registry", "and", "deserialize", "payload", "using", "it", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java#L110-L129
25,990
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java
EnvelopeSchemaConverter.getPayload
public byte[] getPayload(GenericRecord inputRecord, String payloadFieldName) { ByteBuffer bb = (ByteBuffer) inputRecord.get(payloadFieldName); byte[] payloadBytes; if (bb.hasArray()) { payloadBytes = bb.array(); } else { payloadBytes = new byte[bb.remaining()]; bb.get(payloadBytes); } String hexString = new String(payloadBytes, StandardCharsets.UTF_8); return DatatypeConverter.parseHexBinary(hexString); }
java
public byte[] getPayload(GenericRecord inputRecord, String payloadFieldName) { ByteBuffer bb = (ByteBuffer) inputRecord.get(payloadFieldName); byte[] payloadBytes; if (bb.hasArray()) { payloadBytes = bb.array(); } else { payloadBytes = new byte[bb.remaining()]; bb.get(payloadBytes); } String hexString = new String(payloadBytes, StandardCharsets.UTF_8); return DatatypeConverter.parseHexBinary(hexString); }
[ "public", "byte", "[", "]", "getPayload", "(", "GenericRecord", "inputRecord", ",", "String", "payloadFieldName", ")", "{", "ByteBuffer", "bb", "=", "(", "ByteBuffer", ")", "inputRecord", ".", "get", "(", "payloadFieldName", ")", ";", "byte", "[", "]", "payloadBytes", ";", "if", "(", "bb", ".", "hasArray", "(", ")", ")", "{", "payloadBytes", "=", "bb", ".", "array", "(", ")", ";", "}", "else", "{", "payloadBytes", "=", "new", "byte", "[", "bb", ".", "remaining", "(", ")", "]", ";", "bb", ".", "get", "(", "payloadBytes", ")", ";", "}", "String", "hexString", "=", "new", "String", "(", "payloadBytes", ",", "StandardCharsets", ".", "UTF_8", ")", ";", "return", "DatatypeConverter", ".", "parseHexBinary", "(", "hexString", ")", ";", "}" ]
Get payload field from GenericRecord and convert to byte array
[ "Get", "payload", "field", "from", "GenericRecord", "and", "convert", "to", "byte", "array" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java#L134-L145
25,991
apache/incubator-gobblin
gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java
EnvelopeSchemaConverter.deserializePayload
public GenericRecord deserializePayload(byte[] payload, Schema payloadSchema) throws IOException, ExecutionException { Decoder decoder = this.decoderFactory.binaryDecoder(payload, null); GenericDatumReader<GenericRecord> reader = this.readers.get(payloadSchema); return reader.read(null, decoder); }
java
public GenericRecord deserializePayload(byte[] payload, Schema payloadSchema) throws IOException, ExecutionException { Decoder decoder = this.decoderFactory.binaryDecoder(payload, null); GenericDatumReader<GenericRecord> reader = this.readers.get(payloadSchema); return reader.read(null, decoder); }
[ "public", "GenericRecord", "deserializePayload", "(", "byte", "[", "]", "payload", ",", "Schema", "payloadSchema", ")", "throws", "IOException", ",", "ExecutionException", "{", "Decoder", "decoder", "=", "this", ".", "decoderFactory", ".", "binaryDecoder", "(", "payload", ",", "null", ")", ";", "GenericDatumReader", "<", "GenericRecord", ">", "reader", "=", "this", ".", "readers", ".", "get", "(", "payloadSchema", ")", ";", "return", "reader", ".", "read", "(", "null", ",", "decoder", ")", ";", "}" ]
Deserialize payload using payload schema
[ "Deserialize", "payload", "using", "payload", "schema" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-modules/gobblin-kafka-common/src/main/java/org/apache/gobblin/converter/EnvelopeSchemaConverter.java#L150-L154
25,992
apache/incubator-gobblin
gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_monitor/KafkaJobMonitor.java
KafkaJobMonitor.deleteStateStore
private void deleteStateStore(URI jobSpecUri) throws IOException { int EXPECTED_NUM_URI_TOKENS = 3; String[] uriTokens = jobSpecUri.getPath().split("/"); if (null == this.datasetStateStore) { log.warn("Job state store deletion failed as datasetstore is not initialized."); return; } if (uriTokens.length != EXPECTED_NUM_URI_TOKENS) { log.error("Invalid URI {}.", jobSpecUri); return; } String jobName = uriTokens[EXPECTED_NUM_URI_TOKENS - 1]; this.datasetStateStore.delete(jobName); log.info("JobSpec {} deleted with statestore.", jobSpecUri); }
java
private void deleteStateStore(URI jobSpecUri) throws IOException { int EXPECTED_NUM_URI_TOKENS = 3; String[] uriTokens = jobSpecUri.getPath().split("/"); if (null == this.datasetStateStore) { log.warn("Job state store deletion failed as datasetstore is not initialized."); return; } if (uriTokens.length != EXPECTED_NUM_URI_TOKENS) { log.error("Invalid URI {}.", jobSpecUri); return; } String jobName = uriTokens[EXPECTED_NUM_URI_TOKENS - 1]; this.datasetStateStore.delete(jobName); log.info("JobSpec {} deleted with statestore.", jobSpecUri); }
[ "private", "void", "deleteStateStore", "(", "URI", "jobSpecUri", ")", "throws", "IOException", "{", "int", "EXPECTED_NUM_URI_TOKENS", "=", "3", ";", "String", "[", "]", "uriTokens", "=", "jobSpecUri", ".", "getPath", "(", ")", ".", "split", "(", "\"/\"", ")", ";", "if", "(", "null", "==", "this", ".", "datasetStateStore", ")", "{", "log", ".", "warn", "(", "\"Job state store deletion failed as datasetstore is not initialized.\"", ")", ";", "return", ";", "}", "if", "(", "uriTokens", ".", "length", "!=", "EXPECTED_NUM_URI_TOKENS", ")", "{", "log", ".", "error", "(", "\"Invalid URI {}.\"", ",", "jobSpecUri", ")", ";", "return", ";", "}", "String", "jobName", "=", "uriTokens", "[", "EXPECTED_NUM_URI_TOKENS", "-", "1", "]", ";", "this", ".", "datasetStateStore", ".", "delete", "(", "jobName", ")", ";", "log", ".", "info", "(", "\"JobSpec {} deleted with statestore.\"", ",", "jobSpecUri", ")", ";", "}" ]
It fetches the job name from the given jobSpecUri and deletes its corresponding state store @param jobSpecUri jobSpecUri as created by {@link FlowConfigResourceLocalHandler.FlowUriUtils.createFlowSpecUri} @throws IOException
[ "It", "fetches", "the", "job", "name", "from", "the", "given", "jobSpecUri", "and", "deletes", "its", "corresponding", "state", "store" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-runtime/src/main/java/org/apache/gobblin/runtime/job_monitor/KafkaJobMonitor.java#L128-L144
25,993
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java
QueryBasedExtractor.shouldRemoveDataPullUpperBounds
private boolean shouldRemoveDataPullUpperBounds() { if (!this.workUnitState.getPropAsBoolean(ConfigurationKeys.SOURCE_QUERYBASED_ALLOW_REMOVE_UPPER_BOUNDS, true)) { return false; } // Only consider the last work unit if (!partition.isLastPartition()) { return false; } // Don't remove if user specifies one or is recorded in previous run if (partition.getHasUserSpecifiedHighWatermark() || this.workUnitState.getProp(ConfigurationKeys.WORK_UNIT_STATE_ACTUAL_HIGH_WATER_MARK_KEY) != null) { return false; } return true; }
java
private boolean shouldRemoveDataPullUpperBounds() { if (!this.workUnitState.getPropAsBoolean(ConfigurationKeys.SOURCE_QUERYBASED_ALLOW_REMOVE_UPPER_BOUNDS, true)) { return false; } // Only consider the last work unit if (!partition.isLastPartition()) { return false; } // Don't remove if user specifies one or is recorded in previous run if (partition.getHasUserSpecifiedHighWatermark() || this.workUnitState.getProp(ConfigurationKeys.WORK_UNIT_STATE_ACTUAL_HIGH_WATER_MARK_KEY) != null) { return false; } return true; }
[ "private", "boolean", "shouldRemoveDataPullUpperBounds", "(", ")", "{", "if", "(", "!", "this", ".", "workUnitState", ".", "getPropAsBoolean", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_ALLOW_REMOVE_UPPER_BOUNDS", ",", "true", ")", ")", "{", "return", "false", ";", "}", "// Only consider the last work unit", "if", "(", "!", "partition", ".", "isLastPartition", "(", ")", ")", "{", "return", "false", ";", "}", "// Don't remove if user specifies one or is recorded in previous run", "if", "(", "partition", ".", "getHasUserSpecifiedHighWatermark", "(", ")", "||", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "WORK_UNIT_STATE_ACTUAL_HIGH_WATER_MARK_KEY", ")", "!=", "null", ")", "{", "return", "false", ";", "}", "return", "true", ";", "}" ]
Check if it's appropriate to remove data pull upper bounds in the last work unit, fetching as much data as possible from the source. As between the time when data query was created and that was executed, there might be some new data generated in the source. Removing the upper bounds will help us grab the new data. Note: It's expected that there might be some duplicate data between runs because of removing the upper bounds @return should remove or not
[ "Check", "if", "it", "s", "appropriate", "to", "remove", "data", "pull", "upper", "bounds", "in", "the", "last", "work", "unit", "fetching", "as", "much", "data", "as", "possible", "from", "the", "source", ".", "As", "between", "the", "time", "when", "data", "query", "was", "created", "and", "that", "was", "executed", "there", "might", "be", "some", "new", "data", "generated", "in", "the", "source", ".", "Removing", "the", "upper", "bounds", "will", "help", "us", "grab", "the", "new", "data", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java#L187-L204
25,994
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java
QueryBasedExtractor.removeDataPullUpperBounds
private void removeDataPullUpperBounds() { log.info("Removing data pull upper bound for last work unit"); Iterator<Predicate> it = predicateList.iterator(); while (it.hasNext()) { Predicate predicate = it.next(); if (predicate.getType() == Predicate.PredicateType.HWM) { log.info("Remove predicate: " + predicate.condition); it.remove(); } } }
java
private void removeDataPullUpperBounds() { log.info("Removing data pull upper bound for last work unit"); Iterator<Predicate> it = predicateList.iterator(); while (it.hasNext()) { Predicate predicate = it.next(); if (predicate.getType() == Predicate.PredicateType.HWM) { log.info("Remove predicate: " + predicate.condition); it.remove(); } } }
[ "private", "void", "removeDataPullUpperBounds", "(", ")", "{", "log", ".", "info", "(", "\"Removing data pull upper bound for last work unit\"", ")", ";", "Iterator", "<", "Predicate", ">", "it", "=", "predicateList", ".", "iterator", "(", ")", ";", "while", "(", "it", ".", "hasNext", "(", ")", ")", "{", "Predicate", "predicate", "=", "it", ".", "next", "(", ")", ";", "if", "(", "predicate", ".", "getType", "(", ")", "==", "Predicate", ".", "PredicateType", ".", "HWM", ")", "{", "log", ".", "info", "(", "\"Remove predicate: \"", "+", "predicate", ".", "condition", ")", ";", "it", ".", "remove", "(", ")", ";", "}", "}", "}" ]
Remove all upper bounds in the predicateList used for pulling data
[ "Remove", "all", "upper", "bounds", "in", "the", "predicateList", "used", "for", "pulling", "data" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java#L209-L219
25,995
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java
QueryBasedExtractor.getIterator
private Iterator<D> getIterator() throws DataRecordException, IOException { if (Boolean.valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_SPECIFIC_API_ACTIVE))) { return this.getRecordSetFromSourceApi(this.schema, this.entity, this.workUnit, this.predicateList); } return this.getRecordSet(this.schema, this.entity, this.workUnit, this.predicateList); }
java
private Iterator<D> getIterator() throws DataRecordException, IOException { if (Boolean.valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_SPECIFIC_API_ACTIVE))) { return this.getRecordSetFromSourceApi(this.schema, this.entity, this.workUnit, this.predicateList); } return this.getRecordSet(this.schema, this.entity, this.workUnit, this.predicateList); }
[ "private", "Iterator", "<", "D", ">", "getIterator", "(", ")", "throws", "DataRecordException", ",", "IOException", "{", "if", "(", "Boolean", ".", "valueOf", "(", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_IS_SPECIFIC_API_ACTIVE", ")", ")", ")", "{", "return", "this", ".", "getRecordSetFromSourceApi", "(", "this", ".", "schema", ",", "this", ".", "entity", ",", "this", ".", "workUnit", ",", "this", ".", "predicateList", ")", ";", "}", "return", "this", ".", "getRecordSet", "(", "this", ".", "schema", ",", "this", ".", "entity", ",", "this", ".", "workUnit", ",", "this", ".", "predicateList", ")", ";", "}" ]
Get iterator from protocol specific api if is.specific.api.active is false Get iterator from source specific api if is.specific.api.active is true @return iterator
[ "Get", "iterator", "from", "protocol", "specific", "api", "if", "is", ".", "specific", ".", "api", ".", "active", "is", "false", "Get", "iterator", "from", "source", "specific", "api", "if", "is", ".", "specific", ".", "api", ".", "active", "is", "true" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java#L226-L231
25,996
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java
QueryBasedExtractor.close
@Override public void close() { log.info("Updating the current state high water mark with " + this.highWatermark); this.workUnitState.setActualHighWatermark(new LongWatermark(this.highWatermark)); try { this.closeConnection(); } catch (Exception e) { log.error("Failed to close the extractor", e); } }
java
@Override public void close() { log.info("Updating the current state high water mark with " + this.highWatermark); this.workUnitState.setActualHighWatermark(new LongWatermark(this.highWatermark)); try { this.closeConnection(); } catch (Exception e) { log.error("Failed to close the extractor", e); } }
[ "@", "Override", "public", "void", "close", "(", ")", "{", "log", ".", "info", "(", "\"Updating the current state high water mark with \"", "+", "this", ".", "highWatermark", ")", ";", "this", ".", "workUnitState", ".", "setActualHighWatermark", "(", "new", "LongWatermark", "(", "this", ".", "highWatermark", ")", ")", ";", "try", "{", "this", ".", "closeConnection", "(", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "log", ".", "error", "(", "\"Failed to close the extractor\"", ",", "e", ")", ";", "}", "}" ]
close extractor read stream update high watermark
[ "close", "extractor", "read", "stream", "update", "high", "watermark" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java#L264-L273
25,997
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java
QueryBasedExtractor.build
public Extractor<S, D> build() throws ExtractPrepareException { String watermarkColumn = this.workUnitState.getProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY); long lwm = partition.getLowWatermark(); long hwm = partition.getHighWatermark(); log.info("Low water mark: " + lwm + "; and High water mark: " + hwm); WatermarkType watermarkType; if (StringUtils.isBlank(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE))) { watermarkType = null; } else { watermarkType = WatermarkType .valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE).toUpperCase()); } log.info("Source Entity is " + this.entity); try { this.setTimeOut( this.workUnitState.getPropAsInt(ConfigurationKeys.SOURCE_CONN_TIMEOUT, ConfigurationKeys.DEFAULT_CONN_TIMEOUT)); this.extractMetadata(this.schema, this.entity, this.workUnit); if (StringUtils.isNotBlank(watermarkColumn)) { if (partition.isLastPartition()) { // Get a more accurate high watermark from the source long adjustedHighWatermark = this.getLatestWatermark(watermarkColumn, watermarkType, lwm, hwm); log.info("High water mark from source: " + adjustedHighWatermark); // If the source reports a finer high watermark, then consider the same as runtime high watermark. // Else, consider the low watermark as high water mark(with no delta).i.e, don't move the pointer if (adjustedHighWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE) { adjustedHighWatermark = getLowWatermarkWithNoDelta(lwm); } this.highWatermark = adjustedHighWatermark; } else { this.highWatermark = hwm; } log.info("High water mark for the current run: " + highWatermark); this.setRangePredicates(watermarkColumn, watermarkType, lwm, highWatermark); } // if it is set to true, skip count calculation and set source count to -1 if (!Boolean.valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_SKIP_COUNT_CALC))) { this.sourceRecordCount = this.getSourceCount(this.schema, this.entity, this.workUnit, this.predicateList); } else { log.info("Skip count calculation"); this.sourceRecordCount = -1; } if (this.sourceRecordCount == 0) { log.info("Record count is 0; Setting fetch status to false to skip readRecord()"); this.setFetchStatus(false); } } catch (SchemaException e) { throw new ExtractPrepareException("Failed to get schema for this object; error - " + e.getMessage(), e); } catch (HighWatermarkException e) { throw new ExtractPrepareException("Failed to get high watermark; error - " + e.getMessage(), e); } catch (RecordCountException e) { throw new ExtractPrepareException("Failed to get record count; error - " + e.getMessage(), e); } catch (Exception e) { throw new ExtractPrepareException("Failed to prepare the extract build; error - " + e.getMessage(), e); } return this; }
java
public Extractor<S, D> build() throws ExtractPrepareException { String watermarkColumn = this.workUnitState.getProp(ConfigurationKeys.EXTRACT_DELTA_FIELDS_KEY); long lwm = partition.getLowWatermark(); long hwm = partition.getHighWatermark(); log.info("Low water mark: " + lwm + "; and High water mark: " + hwm); WatermarkType watermarkType; if (StringUtils.isBlank(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE))) { watermarkType = null; } else { watermarkType = WatermarkType .valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_WATERMARK_TYPE).toUpperCase()); } log.info("Source Entity is " + this.entity); try { this.setTimeOut( this.workUnitState.getPropAsInt(ConfigurationKeys.SOURCE_CONN_TIMEOUT, ConfigurationKeys.DEFAULT_CONN_TIMEOUT)); this.extractMetadata(this.schema, this.entity, this.workUnit); if (StringUtils.isNotBlank(watermarkColumn)) { if (partition.isLastPartition()) { // Get a more accurate high watermark from the source long adjustedHighWatermark = this.getLatestWatermark(watermarkColumn, watermarkType, lwm, hwm); log.info("High water mark from source: " + adjustedHighWatermark); // If the source reports a finer high watermark, then consider the same as runtime high watermark. // Else, consider the low watermark as high water mark(with no delta).i.e, don't move the pointer if (adjustedHighWatermark == ConfigurationKeys.DEFAULT_WATERMARK_VALUE) { adjustedHighWatermark = getLowWatermarkWithNoDelta(lwm); } this.highWatermark = adjustedHighWatermark; } else { this.highWatermark = hwm; } log.info("High water mark for the current run: " + highWatermark); this.setRangePredicates(watermarkColumn, watermarkType, lwm, highWatermark); } // if it is set to true, skip count calculation and set source count to -1 if (!Boolean.valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_SKIP_COUNT_CALC))) { this.sourceRecordCount = this.getSourceCount(this.schema, this.entity, this.workUnit, this.predicateList); } else { log.info("Skip count calculation"); this.sourceRecordCount = -1; } if (this.sourceRecordCount == 0) { log.info("Record count is 0; Setting fetch status to false to skip readRecord()"); this.setFetchStatus(false); } } catch (SchemaException e) { throw new ExtractPrepareException("Failed to get schema for this object; error - " + e.getMessage(), e); } catch (HighWatermarkException e) { throw new ExtractPrepareException("Failed to get high watermark; error - " + e.getMessage(), e); } catch (RecordCountException e) { throw new ExtractPrepareException("Failed to get record count; error - " + e.getMessage(), e); } catch (Exception e) { throw new ExtractPrepareException("Failed to prepare the extract build; error - " + e.getMessage(), e); } return this; }
[ "public", "Extractor", "<", "S", ",", "D", ">", "build", "(", ")", "throws", "ExtractPrepareException", "{", "String", "watermarkColumn", "=", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "EXTRACT_DELTA_FIELDS_KEY", ")", ";", "long", "lwm", "=", "partition", ".", "getLowWatermark", "(", ")", ";", "long", "hwm", "=", "partition", ".", "getHighWatermark", "(", ")", ";", "log", ".", "info", "(", "\"Low water mark: \"", "+", "lwm", "+", "\"; and High water mark: \"", "+", "hwm", ")", ";", "WatermarkType", "watermarkType", ";", "if", "(", "StringUtils", ".", "isBlank", "(", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_WATERMARK_TYPE", ")", ")", ")", "{", "watermarkType", "=", "null", ";", "}", "else", "{", "watermarkType", "=", "WatermarkType", ".", "valueOf", "(", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_WATERMARK_TYPE", ")", ".", "toUpperCase", "(", ")", ")", ";", "}", "log", ".", "info", "(", "\"Source Entity is \"", "+", "this", ".", "entity", ")", ";", "try", "{", "this", ".", "setTimeOut", "(", "this", ".", "workUnitState", ".", "getPropAsInt", "(", "ConfigurationKeys", ".", "SOURCE_CONN_TIMEOUT", ",", "ConfigurationKeys", ".", "DEFAULT_CONN_TIMEOUT", ")", ")", ";", "this", ".", "extractMetadata", "(", "this", ".", "schema", ",", "this", ".", "entity", ",", "this", ".", "workUnit", ")", ";", "if", "(", "StringUtils", ".", "isNotBlank", "(", "watermarkColumn", ")", ")", "{", "if", "(", "partition", ".", "isLastPartition", "(", ")", ")", "{", "// Get a more accurate high watermark from the source", "long", "adjustedHighWatermark", "=", "this", ".", "getLatestWatermark", "(", "watermarkColumn", ",", "watermarkType", ",", "lwm", ",", "hwm", ")", ";", "log", ".", "info", "(", "\"High water mark from source: \"", "+", "adjustedHighWatermark", ")", ";", "// If the source reports a finer high watermark, then consider the same as runtime high watermark.", "// Else, consider the low watermark as high water mark(with no delta).i.e, don't move the pointer", "if", "(", "adjustedHighWatermark", "==", "ConfigurationKeys", ".", "DEFAULT_WATERMARK_VALUE", ")", "{", "adjustedHighWatermark", "=", "getLowWatermarkWithNoDelta", "(", "lwm", ")", ";", "}", "this", ".", "highWatermark", "=", "adjustedHighWatermark", ";", "}", "else", "{", "this", ".", "highWatermark", "=", "hwm", ";", "}", "log", ".", "info", "(", "\"High water mark for the current run: \"", "+", "highWatermark", ")", ";", "this", ".", "setRangePredicates", "(", "watermarkColumn", ",", "watermarkType", ",", "lwm", ",", "highWatermark", ")", ";", "}", "// if it is set to true, skip count calculation and set source count to -1", "if", "(", "!", "Boolean", ".", "valueOf", "(", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_SKIP_COUNT_CALC", ")", ")", ")", "{", "this", ".", "sourceRecordCount", "=", "this", ".", "getSourceCount", "(", "this", ".", "schema", ",", "this", ".", "entity", ",", "this", ".", "workUnit", ",", "this", ".", "predicateList", ")", ";", "}", "else", "{", "log", ".", "info", "(", "\"Skip count calculation\"", ")", ";", "this", ".", "sourceRecordCount", "=", "-", "1", ";", "}", "if", "(", "this", ".", "sourceRecordCount", "==", "0", ")", "{", "log", ".", "info", "(", "\"Record count is 0; Setting fetch status to false to skip readRecord()\"", ")", ";", "this", ".", "setFetchStatus", "(", "false", ")", ";", "}", "}", "catch", "(", "SchemaException", "e", ")", "{", "throw", "new", "ExtractPrepareException", "(", "\"Failed to get schema for this object; error - \"", "+", "e", ".", "getMessage", "(", ")", ",", "e", ")", ";", "}", "catch", "(", "HighWatermarkException", "e", ")", "{", "throw", "new", "ExtractPrepareException", "(", "\"Failed to get high watermark; error - \"", "+", "e", ".", "getMessage", "(", ")", ",", "e", ")", ";", "}", "catch", "(", "RecordCountException", "e", ")", "{", "throw", "new", "ExtractPrepareException", "(", "\"Failed to get record count; error - \"", "+", "e", ".", "getMessage", "(", ")", ",", "e", ")", ";", "}", "catch", "(", "Exception", "e", ")", "{", "throw", "new", "ExtractPrepareException", "(", "\"Failed to prepare the extract build; error - \"", "+", "e", ".", "getMessage", "(", ")", ",", "e", ")", ";", "}", "return", "this", ";", "}" ]
build schema, record count and high water mark
[ "build", "schema", "record", "count", "and", "high", "water", "mark" ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java#L285-L346
25,998
apache/incubator-gobblin
gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java
QueryBasedExtractor.setRangePredicates
private void setRangePredicates(String watermarkColumn, WatermarkType watermarkType, long lwmValue, long hwmValue) { log.debug("Getting range predicates"); String lwmOperator = partition.isLowWatermarkInclusive() ? ">=" : ">"; String hwmOperator = (partition.isLastPartition() || partition.isHighWatermarkInclusive()) ? "<=" : "<"; WatermarkPredicate watermark = new WatermarkPredicate(watermarkColumn, watermarkType); this.addPredicates(watermark.getPredicate(this, lwmValue, lwmOperator, Predicate.PredicateType.LWM)); this.addPredicates(watermark.getPredicate(this, hwmValue, hwmOperator, Predicate.PredicateType.HWM)); if (Boolean.valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_HOURLY_EXTRACT))) { String hourColumn = this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_HOUR_COLUMN); if (StringUtils.isNotBlank(hourColumn)) { WatermarkPredicate hourlyWatermark = new WatermarkPredicate(hourColumn, WatermarkType.HOUR); this.addPredicates(hourlyWatermark.getPredicate(this, lwmValue, lwmOperator, Predicate.PredicateType.LWM)); this.addPredicates(hourlyWatermark.getPredicate(this, hwmValue, hwmOperator, Predicate.PredicateType.HWM)); } } }
java
private void setRangePredicates(String watermarkColumn, WatermarkType watermarkType, long lwmValue, long hwmValue) { log.debug("Getting range predicates"); String lwmOperator = partition.isLowWatermarkInclusive() ? ">=" : ">"; String hwmOperator = (partition.isLastPartition() || partition.isHighWatermarkInclusive()) ? "<=" : "<"; WatermarkPredicate watermark = new WatermarkPredicate(watermarkColumn, watermarkType); this.addPredicates(watermark.getPredicate(this, lwmValue, lwmOperator, Predicate.PredicateType.LWM)); this.addPredicates(watermark.getPredicate(this, hwmValue, hwmOperator, Predicate.PredicateType.HWM)); if (Boolean.valueOf(this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_IS_HOURLY_EXTRACT))) { String hourColumn = this.workUnitState.getProp(ConfigurationKeys.SOURCE_QUERYBASED_HOUR_COLUMN); if (StringUtils.isNotBlank(hourColumn)) { WatermarkPredicate hourlyWatermark = new WatermarkPredicate(hourColumn, WatermarkType.HOUR); this.addPredicates(hourlyWatermark.getPredicate(this, lwmValue, lwmOperator, Predicate.PredicateType.LWM)); this.addPredicates(hourlyWatermark.getPredicate(this, hwmValue, hwmOperator, Predicate.PredicateType.HWM)); } } }
[ "private", "void", "setRangePredicates", "(", "String", "watermarkColumn", ",", "WatermarkType", "watermarkType", ",", "long", "lwmValue", ",", "long", "hwmValue", ")", "{", "log", ".", "debug", "(", "\"Getting range predicates\"", ")", ";", "String", "lwmOperator", "=", "partition", ".", "isLowWatermarkInclusive", "(", ")", "?", "\">=\"", ":", "\">\"", ";", "String", "hwmOperator", "=", "(", "partition", ".", "isLastPartition", "(", ")", "||", "partition", ".", "isHighWatermarkInclusive", "(", ")", ")", "?", "\"<=\"", ":", "\"<\"", ";", "WatermarkPredicate", "watermark", "=", "new", "WatermarkPredicate", "(", "watermarkColumn", ",", "watermarkType", ")", ";", "this", ".", "addPredicates", "(", "watermark", ".", "getPredicate", "(", "this", ",", "lwmValue", ",", "lwmOperator", ",", "Predicate", ".", "PredicateType", ".", "LWM", ")", ")", ";", "this", ".", "addPredicates", "(", "watermark", ".", "getPredicate", "(", "this", ",", "hwmValue", ",", "hwmOperator", ",", "Predicate", ".", "PredicateType", ".", "HWM", ")", ")", ";", "if", "(", "Boolean", ".", "valueOf", "(", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_IS_HOURLY_EXTRACT", ")", ")", ")", "{", "String", "hourColumn", "=", "this", ".", "workUnitState", ".", "getProp", "(", "ConfigurationKeys", ".", "SOURCE_QUERYBASED_HOUR_COLUMN", ")", ";", "if", "(", "StringUtils", ".", "isNotBlank", "(", "hourColumn", ")", ")", "{", "WatermarkPredicate", "hourlyWatermark", "=", "new", "WatermarkPredicate", "(", "hourColumn", ",", "WatermarkType", ".", "HOUR", ")", ";", "this", ".", "addPredicates", "(", "hourlyWatermark", ".", "getPredicate", "(", "this", ",", "lwmValue", ",", "lwmOperator", ",", "Predicate", ".", "PredicateType", ".", "LWM", ")", ")", ";", "this", ".", "addPredicates", "(", "hourlyWatermark", ".", "getPredicate", "(", "this", ",", "hwmValue", ",", "hwmOperator", ",", "Predicate", ".", "PredicateType", ".", "HWM", ")", ")", ";", "}", "}", "}" ]
range predicates for watermark column and transaction columns. @param watermarkColumn name of the column used as watermark @param watermarkType watermark type @param lwmValue estimated low watermark value @param hwmValue estimated high watermark value
[ "range", "predicates", "for", "watermark", "column", "and", "transaction", "columns", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-core/src/main/java/org/apache/gobblin/source/extractor/extract/QueryBasedExtractor.java#L411-L428
25,999
apache/incubator-gobblin
gobblin-compaction/src/main/java/org/apache/gobblin/compaction/dataset/Dataset.java
Dataset.modifyDatasetForRecompact
public void modifyDatasetForRecompact(State recompactState) { if (!this.jobProps().getPropAsBoolean(MRCompactor.COMPACTION_RECOMPACT_ALL_DATA, MRCompactor.DEFAULT_COMPACTION_RECOMPACT_ALL_DATA)) { this.overwriteInputPath(this.outputLatePath); this.cleanAdditionalInputPath(); } else { this.overwriteInputPath(this.outputPath); this.overwriteInputLatePath(this.outputLatePath); this.addAdditionalInputPath(this.outputLatePath); } this.setJobProps(recompactState); this.resetNeedToRecompact(); }
java
public void modifyDatasetForRecompact(State recompactState) { if (!this.jobProps().getPropAsBoolean(MRCompactor.COMPACTION_RECOMPACT_ALL_DATA, MRCompactor.DEFAULT_COMPACTION_RECOMPACT_ALL_DATA)) { this.overwriteInputPath(this.outputLatePath); this.cleanAdditionalInputPath(); } else { this.overwriteInputPath(this.outputPath); this.overwriteInputLatePath(this.outputLatePath); this.addAdditionalInputPath(this.outputLatePath); } this.setJobProps(recompactState); this.resetNeedToRecompact(); }
[ "public", "void", "modifyDatasetForRecompact", "(", "State", "recompactState", ")", "{", "if", "(", "!", "this", ".", "jobProps", "(", ")", ".", "getPropAsBoolean", "(", "MRCompactor", ".", "COMPACTION_RECOMPACT_ALL_DATA", ",", "MRCompactor", ".", "DEFAULT_COMPACTION_RECOMPACT_ALL_DATA", ")", ")", "{", "this", ".", "overwriteInputPath", "(", "this", ".", "outputLatePath", ")", ";", "this", ".", "cleanAdditionalInputPath", "(", ")", ";", "}", "else", "{", "this", ".", "overwriteInputPath", "(", "this", ".", "outputPath", ")", ";", "this", ".", "overwriteInputLatePath", "(", "this", ".", "outputLatePath", ")", ";", "this", ".", "addAdditionalInputPath", "(", "this", ".", "outputLatePath", ")", ";", "}", "this", ".", "setJobProps", "(", "recompactState", ")", ";", "this", ".", "resetNeedToRecompact", "(", ")", ";", "}" ]
Modify an existing dataset to recompact from its ouput path.
[ "Modify", "an", "existing", "dataset", "to", "recompact", "from", "its", "ouput", "path", "." ]
f029b4c0fea0fe4aa62f36dda2512344ff708bae
https://github.com/apache/incubator-gobblin/blob/f029b4c0fea0fe4aa62f36dda2512344ff708bae/gobblin-compaction/src/main/java/org/apache/gobblin/compaction/dataset/Dataset.java#L354-L366