code
stringlengths
73
34.1k
label
stringclasses
1 value
protected void cancel() { int numAnimators = mCurrentAnimators.size(); for (int i = numAnimators - 1; i >= 0; i--) { Animator animator = mCurrentAnimators.get(i); animator.cancel(); } if (mListeners != null && mListeners.size() > 0) { ArrayList<TransitionListener> tmpListeners = (ArrayList<TransitionListener>) mListeners.clone(); int numListeners = tmpListeners.size(); for (int i = 0; i < numListeners; ++i) { tmpListeners.get(i).onTransitionCancel(this); } } }
java
public BatchPoints point(final Point point) { point.getTags().putAll(this.tags); this.points.add(point); return this; }
java
public String lineProtocol() { StringBuilder sb = new StringBuilder(); for (Point point : this.points) { sb.append(point.lineProtocol(this.precision)).append("\n"); } return sb.toString(); }
java
public boolean isMergeAbleWith(final BatchPoints that) { return Objects.equals(database, that.database) && Objects.equals(retentionPolicy, that.retentionPolicy) && Objects.equals(tags, that.tags) && consistency == that.consistency; }
java
public boolean mergeIn(final BatchPoints that) { boolean mergeAble = isMergeAbleWith(that); if (mergeAble) { this.points.addAll(that.points); } return mergeAble; }
java
public Iterable<QueryResult> traverse(final InputStream is) { MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(is); return () -> { return new Iterator<QueryResult>() { @Override public boolean hasNext() { try { return unpacker.hasNext(); } catch (IOException e) { throw new InfluxDBException(e); } } @Override public QueryResult next() { return parse(unpacker); } }; }; }
java
public QueryResult parse(final InputStream is) { MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(is); return parse(unpacker); }
java
public static void checkPositiveNumber(final Number number, final String name) throws IllegalArgumentException { if (number == null || number.doubleValue() <= 0) { throw new IllegalArgumentException("Expecting a positive number for " + name); } }
java
public static void checkNotNegativeNumber(final Number number, final String name) throws IllegalArgumentException { if (number == null || number.doubleValue() < 0) { throw new IllegalArgumentException("Expecting a positive or zero number for " + name); } }
java
public static void checkDuration(final String duration, final String name) throws IllegalArgumentException { if (!duration.matches("(\\d+[wdmhs])+|inf")) { throw new IllegalArgumentException("Invalid InfluxDB duration: " + duration + " for " + name); } }
java
public BatchOptions jitterDuration(final int jitterDuration) { BatchOptions clone = getClone(); clone.jitterDuration = jitterDuration; return clone; }
java
public BatchOptions bufferLimit(final int bufferLimit) { BatchOptions clone = getClone(); clone.bufferLimit = bufferLimit; return clone; }
java
private Call<QueryResult> callQuery(final Query query) { Call<QueryResult> call; String db = query.getDatabase(); if (db == null) { db = this.database; } if (query instanceof BoundParameterQuery) { BoundParameterQuery boundParameterQuery = (BoundParameterQuery) query; call = this.influxDBService.postQuery(db, query.getCommandWithUrlEncoded(), boundParameterQuery.getParameterJsonWithUrlEncoded()); } else { if (query.requiresPost()) { call = this.influxDBService.postQuery(db, query.getCommandWithUrlEncoded()); } else { call = this.influxDBService.query(db, query.getCommandWithUrlEncoded()); } } return call; }
java
public static InfluxDBException buildExceptionForErrorState(final InputStream messagePackErrorBody) { try { MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(messagePackErrorBody); ImmutableMapValue mapVal = (ImmutableMapValue) unpacker.unpackValue(); return InfluxDBException.buildExceptionFromErrorMessage( mapVal.map().get(new ImmutableStringValueImpl("error")).toString()); } catch (Exception e) { return new InfluxDBException(e); } }
java
void put(final AbstractBatchEntry batchEntry) { try { this.queue.put(batchEntry); } catch (InterruptedException e) { throw new RuntimeException(e); } if (this.queue.size() >= this.actions) { this.scheduler.submit(new Runnable() { @Override public void run() { write(); } }); } }
java
public static Builder measurementByPOJO(final Class<?> clazz) { Objects.requireNonNull(clazz, "clazz"); throwExceptionIfMissingAnnotation(clazz, Measurement.class); String measurementName = findMeasurementName(clazz); return new Builder(measurementName); }
java
protected void restoreState(View view, Set<ViewCommand<View>> currentState) { if (mViewCommands.isEmpty()) { return; } mViewCommands.reapply(view, currentState); }
java
public void attachView(View view) { if (view == null) { throw new IllegalArgumentException("Mvp view must be not null"); } boolean isViewAdded = mViews.add(view); if (!isViewAdded) { return; } mInRestoreState.add(view); Set<ViewCommand<View>> currentState = mViewStates.get(view); currentState = currentState == null ? Collections.<ViewCommand<View>>emptySet() : currentState; restoreState(view, currentState); mViewStates.remove(view); mInRestoreState.remove(view); }
java
public <T extends MvpPresenter> void add(String tag, T instance) { mPresenters.put(tag, instance); }
java
@SuppressWarnings("unused") public boolean isInRestoreState(View view) { //noinspection SimplifiableIfStatement if (mViewState != null) { return mViewState.isInRestoreState(view); } return false; }
java
@SuppressWarnings({"unchecked", "unused"}) public void setViewState(MvpViewState<View> viewState) { mViewStateAsView = (View) viewState; mViewState = (MvpViewState) viewState; }
java
private static boolean hasMoxyReflector() { if (hasMoxyReflector != null) { return hasMoxyReflector; } try { new MoxyReflector(); hasMoxyReflector = true; } catch (NoClassDefFoundError error) { hasMoxyReflector = false; } return hasMoxyReflector; }
java
public void onSaveInstanceState(Bundle outState) { if (mParentDelegate == null) { Bundle moxyDelegateBundle = new Bundle(); outState.putBundle(MOXY_DELEGATE_TAGS_KEY, moxyDelegateBundle); outState = moxyDelegateBundle; } outState.putAll(mBundle); outState.putString(mKeyTag, mDelegateTag); for (MvpDelegate childDelegate : mChildDelegates) { childDelegate.onSaveInstanceState(outState); } }
java
private static SortedMap<TypeElement, List<TypeElement>> getPresenterBinders(List<TypeElement> presentersContainers) { Map<TypeElement, TypeElement> extendingMap = new HashMap<>(); for (TypeElement presentersContainer : presentersContainers) { TypeMirror superclass = presentersContainer.getSuperclass(); TypeElement parent = null; while (superclass.getKind() == TypeKind.DECLARED) { TypeElement superclassElement = (TypeElement) ((DeclaredType) superclass).asElement(); if (presentersContainers.contains(superclassElement)) { parent = superclassElement; break; } superclass = superclassElement.getSuperclass(); } extendingMap.put(presentersContainer, parent); } // TreeMap for sorting SortedMap<TypeElement, List<TypeElement>> elementListMap = new TreeMap<>(TYPE_ELEMENT_COMPARATOR); for (TypeElement presentersContainer : presentersContainers) { ArrayList<TypeElement> typeElements = new ArrayList<>(); typeElements.add(presentersContainer); TypeElement key = presentersContainer; while ((key = extendingMap.get(key)) != null) { typeElements.add(key); } elementListMap.put(presentersContainer, typeElements); } return elementListMap; }
java
public void injectPresenter(MvpPresenter<?> presenter, String delegateTag) { Set<String> delegateTags = mConnections.get(presenter); if (delegateTags == null) { delegateTags = new HashSet<>(); mConnections.put(presenter, delegateTags); } delegateTags.add(delegateTag); Set<MvpPresenter> presenters = mTags.get(delegateTag); if (presenters == null) { presenters = new HashSet<>(); mTags.put(delegateTag, presenters); } presenters.add(presenter); }
java
public boolean rejectPresenter(MvpPresenter<?> presenter, String delegateTag) { Set<MvpPresenter> presenters = mTags.get(delegateTag); if (presenters != null) { presenters.remove(presenter); } if (presenters == null || presenters.isEmpty()) { mTags.remove(delegateTag); } Set<String> delegateTags = mConnections.get(presenter); if (delegateTags == null) { mConnections.remove(presenter); return true; } Iterator<String> tagsIterator = delegateTags.iterator(); while (tagsIterator.hasNext()) { String tag = tagsIterator.next(); if (tag.startsWith(delegateTag)) { tagsIterator.remove(); } } boolean noTags = delegateTags.isEmpty(); if (noTags) { mConnections.remove(presenter); } return noTags; }
java
public void bind(boolean wholeCore) { if (bound && assignedThread != null && assignedThread.isAlive()) throw new IllegalStateException("cpu " + cpuId + " already bound to " + assignedThread); if (areAssertionsEnabled()) boundHere = new Throwable("Bound here"); if (wholeCore) { lockInventory.bindWholeCore(cpuId); } else if (cpuId >= 0) { bound = true; assignedThread = Thread.currentThread(); LOGGER.info("Assigning cpu {} to {}", cpuId, assignedThread); } if (cpuId >= 0) { BitSet affinity = new BitSet(); affinity.set(cpuId, true); Affinity.setAffinity(affinity); } }
java
public static String toHexString(final BitSet set) { ByteArrayOutputStream out = new ByteArrayOutputStream(); PrintWriter writer = new PrintWriter(out); final long[] longs = set.toLongArray(); for (long aLong : longs) { writer.write(Long.toHexString(aLong)); } writer.flush(); return new String(out.toByteArray(), java.nio.charset.StandardCharsets.UTF_8); }
java
int[] getChunkSizes(Track track) { long[] referenceChunkStarts = fragmenter.sampleNumbers(track); int[] chunkSizes = new int[referenceChunkStarts.length]; for (int i = 0; i < referenceChunkStarts.length; i++) { long start = referenceChunkStarts[i] - 1; long end; if (referenceChunkStarts.length == i + 1) { end = track.getSamples().size(); } else { end = referenceChunkStarts[i + 1] - 1; } chunkSizes[i] = l2i(end - start); } assert DefaultMp4Builder.this.track2Sample.get(track).size() == sum(chunkSizes) : "The number of samples and the sum of all chunk lengths must be equal"; return chunkSizes; }
java
private void print(FileChannel fc, int level, long start, long end) throws IOException { fc.position(start); if(end <= 0) { end = start + fc.size(); System.out.println("Setting END to " + end); } while (end - fc.position() > 8) { long begin = fc.position(); ByteBuffer bb = ByteBuffer.allocate(8); fc.read(bb); bb.rewind(); long size = IsoTypeReader.readUInt32(bb); String type = IsoTypeReader.read4cc(bb); long fin = begin + size; // indent by the required number of spaces for (int i = 0; i < level; i++) { System.out.print(" "); } System.out.println(type + "@" + (begin) + " size: " + size); if (containers.contains(type)) { print(fc, level + 1, begin + 8, fin); if(fc.position() != fin) { System.out.println("End of container contents at " + fc.position()); System.out.println(" FIN = " + fin); } } fc.position(fin); } }
java
public ParsableBox parseBox(ReadableByteChannel byteChannel, String parentType) throws IOException { header.get().rewind().limit(8); int bytesRead = 0; int b; while ((b = byteChannel.read(header.get())) + bytesRead < 8) { if (b < 0) { throw new EOFException(); } else { bytesRead += b; } } header.get().rewind(); long size = IsoTypeReader.readUInt32(header.get()); // do plausibility check if (size < 8 && size > 1) { LOG.error("Plausibility check failed: size < 8 (size = {}). Stop parsing!", size); return null; } String type = IsoTypeReader.read4cc(header.get()); //System.err.println(type); byte[] usertype = null; long contentSize; if (size == 1) { header.get().limit(16); byteChannel.read(header.get()); header.get().position(8); size = IsoTypeReader.readUInt64(header.get()); contentSize = size - 16; } else if (size == 0) { throw new RuntimeException("box size of zero means 'till end of file. That is not yet supported"); } else { contentSize = size - 8; } if (UserBox.TYPE.equals(type)) { header.get().limit(header.get().limit() + 16); byteChannel.read(header.get()); usertype = new byte[16]; for (int i = header.get().position() - 16; i < header.get().position(); i++) { usertype[i - (header.get().position() - 16)] = header.get().get(i); } contentSize -= 16; } ParsableBox parsableBox = null; if( skippedTypes != null && skippedTypes.contains(type) ) { LOG.trace("Skipping box {} {} {}", type, usertype, parentType); parsableBox = new SkipBox(type, usertype, parentType); } else { LOG.trace("Creating box {} {} {}", type, usertype, parentType); parsableBox = createBox(type, usertype, parentType); } //LOG.finest("Parsing " + box.getType()); // System.out.println("parsing " + Mp4Arrays.toString(box.getType()) + " " + box.getClass().getName() + " size=" + size); header.get().rewind(); parsableBox.parse(byteChannel, header.get(), contentSize, this); return parsableBox; }
java
public static List<long[]> getSyncSamplesTimestamps(Movie movie, Track track) { List<long[]> times = new LinkedList<long[]>(); for (Track currentTrack : movie.getTracks()) { if (currentTrack.getHandler().equals(track.getHandler())) { long[] currentTrackSyncSamples = currentTrack.getSyncSamples(); if (currentTrackSyncSamples != null && currentTrackSyncSamples.length > 0) { final long[] currentTrackTimes = getTimes(currentTrack, movie); times.add(currentTrackTimes); } } } return times; }
java
public static int[] blowupCompositionTimes(List<CompositionTimeToSample.Entry> entries) { long numOfSamples = 0; for (CompositionTimeToSample.Entry entry : entries) { numOfSamples += entry.getCount(); } assert numOfSamples <= Integer.MAX_VALUE; int[] decodingTime = new int[(int) numOfSamples]; int current = 0; for (CompositionTimeToSample.Entry entry : entries) { for (int i = 0; i < entry.getCount(); i++) { decodingTime[current++] = entry.getOffset(); } } return decodingTime; }
java
public static String readString(ByteBuffer byteBuffer) { ByteArrayOutputStream out = new ByteArrayOutputStream(); int read; while ((read = byteBuffer.get()) != 0) { out.write(read); } return Utf8.convert(out.toByteArray()); }
java
protected boolean isChunkReady(StreamingTrack streamingTrack, StreamingSample next) { long ts = nextSampleStartTime.get(streamingTrack); long cfst = nextChunkCreateStartTime.get(streamingTrack); return (ts >= cfst + 2 * streamingTrack.getTimescale()); // chunk interleave of 2 seconds }
java
protected boolean isFragmentReady(StreamingTrack streamingTrack, StreamingSample next) { long ts = nextSampleStartTime.get(streamingTrack); long cfst = nextFragmentCreateStartTime.get(streamingTrack); if ((ts > cfst + 3 * streamingTrack.getTimescale())) { // mininum fragment length == 3 seconds SampleFlagsSampleExtension sfExt = next.getSampleExtension(SampleFlagsSampleExtension.class); if (sfExt == null || sfExt.isSyncSample()) { //System.err.println(streamingTrack + " ready at " + ts); // the next sample needs to be a sync sample // when there is no SampleFlagsSampleExtension we assume syncSample == true return true; } } return false; }
java
protected long[] getSampleSizes(long startSample, long endSample, Track track, int sequenceNumber) { List<Sample> samples = getSamples(startSample, endSample, track); long[] sampleSizes = new long[samples.size()]; for (int i = 0; i < sampleSizes.length; i++) { sampleSizes[i] = samples.get(i).getSize(); } return sampleSizes; }
java
protected ParsableBox createMoof(long startSample, long endSample, Track track, int sequenceNumber) { MovieFragmentBox moof = new MovieFragmentBox(); createMfhd(startSample, endSample, track, sequenceNumber, moof); createTraf(startSample, endSample, track, sequenceNumber, moof); TrackRunBox firstTrun = moof.getTrackRunBoxes().get(0); firstTrun.setDataOffset(1); // dummy to make size correct firstTrun.setDataOffset((int) (8 + moof.getSize())); // mdat header + moof size return moof; }
java
protected ParsableBox createMvhd(Movie movie) { MovieHeaderBox mvhd = new MovieHeaderBox(); mvhd.setVersion(1); mvhd.setCreationTime(getDate()); mvhd.setModificationTime(getDate()); mvhd.setDuration(0);//no duration in moov for fragmented movies long movieTimeScale = movie.getTimescale(); mvhd.setTimescale(movieTimeScale); // find the next available trackId long nextTrackId = 0; for (Track track : movie.getTracks()) { nextTrackId = nextTrackId < track.getTrackMetaData().getTrackId() ? track.getTrackMetaData().getTrackId() : nextTrackId; } mvhd.setNextTrackId(++nextTrackId); return mvhd; }
java
public synchronized final void parseDetails() { LOG.debug("parsing details of {}", this.getType()); if (content != null) { ByteBuffer content = this.content; isParsed = true; content.rewind(); _parseDetails(content); if (content.remaining() > 0) { deadBytes = content.slice(); } this.content = null; assert verify(content); } }
java
public long getSize() { long size = isParsed ? getContentSize() : content.limit(); size += (8 + // size|type (size >= ((1L << 32) - 8) ? 8 : 0) + // 32bit - 8 byte size and type (UserBox.TYPE.equals(getType()) ? 16 : 0)); size += (deadBytes == null ? 0 : deadBytes.limit()); return size; }
java
private boolean verify(ByteBuffer content) { ByteBuffer bb = ByteBuffer.allocate(l2i(getContentSize() + (deadBytes != null ? deadBytes.limit() : 0))); getContent(bb); if (deadBytes != null) { deadBytes.rewind(); while (deadBytes.remaining() > 0) { bb.put(deadBytes); } } content.rewind(); bb.rewind(); if (content.remaining() != bb.remaining()) { LOG.error("{}: remaining differs {} vs. {}", this.getType(), content.remaining(), bb.remaining()); return false; } int p = content.position(); for (int i = content.limit() - 1, j = bb.limit() - 1; i >= p; i--, j--) { byte v1 = content.get(i); byte v2 = bb.get(j); if (v1 != v2) { LOG.error("{}: buffers differ at {}: {}/{}", this.getType(), i, v1, v2); byte[] b1 = new byte[content.remaining()]; byte[] b2 = new byte[bb.remaining()]; content.get(b1); bb.get(b2); LOG.error("original : {}", Hex.encodeHex(b1, 4)); LOG.error("reconstructed : {}", Hex.encodeHex(b2, 4)); return false; } } return true; }
java
static int[] allTags() { int[] ints = new int[0xFE - 0x6A]; for (int i = 0x6A; i < 0xFE; i++) { final int pos = i - 0x6A; LOG.trace("pos: {}", pos); ints[pos] = i; } return ints; }
java
public String[] getAllTagNames() { String names[] = new String[tags.size()]; for (int i = 0; i < tags.size(); i++) { XtraTag tag = tags.elementAt(i); names[i] = tag.tagName; } return names; }
java
public String getFirstStringValue(String name) { Object objs[] = getValues(name); for (Object obj : objs) { if (obj instanceof String) { return (String) obj; } } return null; }
java
public Date getFirstDateValue(String name) { Object objs[] = getValues(name); for (Object obj : objs) { if (obj instanceof Date) { return (Date) obj; } } return null; }
java
public Long getFirstLongValue(String name) { Object objs[] = getValues(name); for (Object obj : objs) { if (obj instanceof Long) { return (Long) obj; } } return null; }
java
public Object[] getValues(String name) { XtraTag tag = getTagByName(name); Object values[]; if (tag != null) { values = new Object[tag.values.size()]; for (int i = 0; i < tag.values.size(); i++) { values[i] = tag.values.elementAt(i).getValueAsObject(); } } else { values = new Object[0]; } return values; }
java
public void setTagValues(String name, String values[]) { removeTag(name); XtraTag tag = new XtraTag(name); for (int i = 0; i < values.length; i++) { tag.values.addElement(new XtraValue(values[i])); } tags.addElement(tag); }
java
public void setTagValue(String name, Date date) { removeTag(name); XtraTag tag = new XtraTag(name); tag.values.addElement(new XtraValue(date)); tags.addElement(tag); }
java
public void setTagValue(String name, long value) { removeTag(name); XtraTag tag = new XtraTag(name); tag.values.addElement(new XtraValue(value)); tags.addElement(tag); }
java
public long[] blowup(int chunkCount) { long[] numberOfSamples = new long[chunkCount]; int j = 0; List<SampleToChunkBox.Entry> sampleToChunkEntries = new LinkedList<Entry>(entries); Collections.reverse(sampleToChunkEntries); Iterator<Entry> iterator = sampleToChunkEntries.iterator(); SampleToChunkBox.Entry currentEntry = iterator.next(); for (int i = numberOfSamples.length; i > 1; i--) { numberOfSamples[i - 1] = currentEntry.getSamplesPerChunk(); if (i == currentEntry.getFirstChunk()) { currentEntry = iterator.next(); } } numberOfSamples[0] = currentEntry.getSamplesPerChunk(); return numberOfSamples; }
java
public static synchronized long[] blowupTimeToSamples(List<TimeToSampleBox.Entry> entries) { SoftReference<long[]> cacheEntry; if ((cacheEntry = cache.get(entries)) != null) { long[] cacheVal; if ((cacheVal = cacheEntry.get()) != null) { return cacheVal; } } long numOfSamples = 0; for (TimeToSampleBox.Entry entry : entries) { numOfSamples += entry.getCount(); } assert numOfSamples <= Integer.MAX_VALUE; long[] decodingTime = new long[(int) numOfSamples]; int current = 0; for (TimeToSampleBox.Entry entry : entries) { for (int i = 0; i < entry.getCount(); i++) { decodingTime[current++] = entry.getDelta(); } } cache.put(entries, new SoftReference<long[]>(decodingTime)); return decodingTime; }
java
void register(Object listener) { Multimap<Class<?>, Subscriber> listenerMethods = findAllSubscribers(listener); for (Map.Entry<Class<?>, Collection<Subscriber>> entry : listenerMethods.asMap().entrySet()) { Class<?> eventType = entry.getKey(); Collection<Subscriber> eventMethodsInListener = entry.getValue(); CopyOnWriteArraySet<Subscriber> eventSubscribers = subscribers.get(eventType); if (eventSubscribers == null) { CopyOnWriteArraySet<Subscriber> newSet = new CopyOnWriteArraySet<Subscriber>(); eventSubscribers = MoreObjects.firstNonNull(subscribers.putIfAbsent(eventType, newSet), newSet); } eventSubscribers.addAll(eventMethodsInListener); } }
java
public int deleteRow() { // build the delete string String deleteString = "DELETE FROM " + tableName + this.generatePKWhere(); PreparedStatement ps = null; // System.out.println("delete string "+deleteString); try { // fill the question marks ps = cConn.prepareStatement(deleteString); ps.clearParameters(); int i; for (int j = 0; j < primaryKeys.length; j++) { ps.setObject(j + 1, resultRowPKs[aktRowNr][j]); } // end of for (int i=0; i<primaryKeys.length; i++) ps.executeUpdate(); } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); return 0; } finally { try { if (ps != null) { ps.close(); } } catch (SQLException e) {} } // delete the corresponding primary key values from resultRowPKs numberOfResult--; for (int i = aktRowNr; i < numberOfResult; i++) { for (int j = 0; j < primaryKeys.length; j++) { resultRowPKs[i][j] = resultRowPKs[i + 1][j]; } } // there are the following outcomes after deleting aktRowNr: /* A B C D E F no rows left J N N N N N one row left - J N J N N deleted row was the last row - J J N N N deleted row was the pre-last - - - - J N first D X + D + * . D X X D D . D X + last X new numberOfResult 0 1 2 1 2 2 old aktRowNr 0 1 2 0 1 0 D - deleted row X - any one row + - one or more rows * - zero or more rows */ // A. return to the search panel and tell 'last row deleted' on the status line // B. show the previous row and disable previous button // C. show the previous row as akt row // D. show akt row and disable next button // E. show akt row and disable next button // F. show akt row // these actions reduce to the following actions for ZaurusEditor: // 1. show search panel // 2. disable previous button // 3. disable next button // 4. do nothing // and 1,2,3,4 are the possible return codes int actionCode; if (numberOfResult == 0) { // case A actionCode = 1; ZaurusEditor.printStatus("Last row was deleted."); return actionCode; } else if (numberOfResult == aktRowNr) { // B or C // new aktRow is previous row aktRowNr--; if (aktRowNr == 0) { // B actionCode = 2; } else { // C actionCode = 4; } // end of if (aktRowNr == 0) } else { // D, E, F if (numberOfResult >= 2 && aktRowNr < numberOfResult - 1) { // F actionCode = 4; } else { actionCode = 3; } // end of else } this.showAktRow(); ZaurusEditor.printStatus("Row was deleted."); return actionCode; }
java
public String getPrimaryKeysString() { String result = ""; for (int i = 0; i < primaryKeys.length; i++) { if (result != "") { result += ", "; } result += primaryKeys[i]; } // end of for (int i=0; i<primaryKeys.length; i++) return result; }
java
public void insertNewRow() { // reset all fields for (int i = 0; i < komponente.length; i++) { komponente[i].clearContent(); } // end of for (int i=0; i<komponente.length; i++) // reset the field for the primary keys for (int i = 0; i < primaryKeys.length; i++) { komponente[pkColIndex[i]].setEditable(true); } ZaurusEditor.printStatus("enter a new row for table " + tableName); }
java
public boolean saveChanges() { // the initial settings of the textfields counts with one // so a real change by the user needs as many changes as there are columns // System.out.print("Anderungen in den Feldern: "); // there are changes to the database // memorize all columns which have been changed int[] changedColumns = new int[columns.length]; int countChanged = 0; // build the update string String updateString = ""; for (int i = 0; i < columns.length; i++) { if (komponente[i].hasChanged()) { if (updateString != "") { updateString += ", "; } updateString += columns[i] + "=?"; changedColumns[countChanged++] = i; } } // end of for (int i=0; i<columns.length; i++) if (countChanged > 0) { updateString = "UPDATE " + tableName + " SET " + updateString + this.generatePKWhere(); PreparedStatement ps = null; // System.out.println("update "+updateString); try { // fill the question marks ps = cConn.prepareStatement(updateString); ps.clearParameters(); int i; for (i = 0; i < countChanged; i++) { ps.setObject(i + 1, komponente[changedColumns[i]].getContent()); // System.out.print(" changed feld "+komponente[changedColumns[i]].getContent()); } // end of for (int i=0; i<countChanged; i++) // System.out.println(); for (int j = 0; j < primaryKeys.length; j++) { ps.setObject(i + j + 1, resultRowPKs[aktRowNr][j]); } // end of for (int i=0; i<primaryKeys.length; i++) ps.executeUpdate(); ZaurusEditor.printStatus("changed row was saved to table " + tableName); return true; } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); return false; } finally { try { if (ps != null) { ps.close(); } } catch (SQLException e) {} } } else { // System.out.println("no changes"); return true; } // end of if (changed) }
java
public boolean saveNewRow() { // check the fields of the primary keys whether one is empty boolean onePKempty = false; int tmp; PreparedStatement ps = null; for (tmp = 0; tmp < primaryKeys.length; tmp++) { if (komponente[pkColIndex[tmp]].getContent().equals("")) { onePKempty = true; break; } } if (onePKempty) { komponente[pkColIndex[tmp]].requestFocus(); ZaurusEditor.printStatus("no value for primary key " + primaryKeys[tmp]); return false; } // end of if (onePKempty) // build the insert string String insertString = "INSERT INTO " + tableName + " VALUES("; for (int j = 0; j < columns.length; j++) { if (j > 0) { insertString += ", "; } insertString += "?"; } // end of for (int i=0; i<columns.length; i++) insertString += ")"; // System.out.println("insert string "+insertString); try { // fill the question marks ps = cConn.prepareStatement(insertString); ps.clearParameters(); int i; for (i = 0; i < columns.length; i++) { ps.setObject(i + 1, komponente[i].getContent()); } ps.executeUpdate(); ZaurusEditor.printStatus("new row was saved to table " + tableName); return true; } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); return false; } finally { try { if (ps != null) { ps.close(); } } catch (SQLException e) {} } }
java
public int searchRows(String[] words, boolean allWords, boolean ignoreCase, boolean noMatchWhole) { // System.out.print("search in " + tableName + " for: "); // for (int i=0; i < words.length; i++) { // System.out.print(words[i]+", "); // } // System.out.println("allWords = "+allWords+", ignoreCase = "+ignoreCase+", noMatchWhole= "+noMatchWhole); String where = this.generateWhere(words, allWords, ignoreCase, noMatchWhole); Vector temp = new Vector(20); Statement stmt = null; try { stmt = cConn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT " + this.getPrimaryKeysString() + " FROM " + tableName + where); while (rs.next()) { Object[] pkValues = new Object[primaryKeys.length]; for (int i = 0; i < primaryKeys.length; i++) { pkValues[i] = rs.getObject(pkColIndex[i] + 1); } // end of for (int i=0; i<primaryKeys.length; i++) temp.addElement(pkValues); } rs.close(); } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); return -1; } finally { try { if (stmt != null) { stmt.close(); } } catch (SQLException e) {} } resultRowPKs = new Object[temp.size()][primaryKeys.length]; numberOfResult = temp.size(); for (int i = 0; i < primaryKeys.length; i++) { for (int j = 0; j < temp.size(); j++) { resultRowPKs[j][i] = ((Object[]) temp.elementAt(j))[i]; } // end of for (int j=0; j<temp.size(); j++) } // end of for (int i=0; i<primaryKeys.length; i++) // prepare statement for fetching the result rows for later use String stmtString = "SELECT * FROM " + tableName; try { pStmt = cConn.prepareStatement(stmtString + this.generatePKWhere()); } catch (SQLException e) { System.out.println("SQL Exception: " + e.getMessage()); } // end of try-catch // System.out.println("prepared statement: "+stmtString); if (numberOfResult > 0) { this.disablePKFields(); aktRowNr = 0; this.showAktRow(); } // end of if (numberOfResult > 0) // System.out.println("number of rows: "+numberOfResult); return numberOfResult; }
java
private void disablePKFields() { for (int i = 0; i < primaryKeys.length; i++) { komponente[pkColIndex[i]].setEditable(false); } // end of for (int i=0; i<columns.length; i++) }
java
private void fillZChoice(ZaurusChoice zc, String tab, String col) { Statement stmt = null; try { if (cConn == null) { return; } stmt = cConn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT * FROM " + tab + " ORDER BY " + col); ResultSetMetaData rsmd = rs.getMetaData(); int numberOfColumns = rsmd.getColumnCount(); int colIndex = rs.findColumn(col); while (rs.next()) { String tmp = ""; for (int i = 1; i <= numberOfColumns; i++) { if (i > 1) { tmp += "; "; } tmp += rs.getString(i); } // end of for (int i=1; i<=numberOfColumns; i++) zc.add(tmp, rs.getString(colIndex)); } rs.close(); } catch (SQLException e) { System.out.println("SQL Exception: " + e.getMessage()); } finally { try { if (stmt != null) { stmt.close(); } } catch (SQLException e) {} } }
java
private void fetchColumns() { Vector temp = new Vector(20); Vector tempType = new Vector(20); try { if (cConn == null) { return; } if (dbmeta == null) { dbmeta = cConn.getMetaData(); } ResultSet colList = dbmeta.getColumns(null, null, tableName, "%"); while (colList.next()) { temp.addElement(colList.getString("COLUMN_NAME")); tempType.addElement(new Short(colList.getShort("DATA_TYPE"))); } colList.close(); } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); } columns = new String[temp.size()]; temp.copyInto(columns); columnTypes = new short[temp.size()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = ((Short) tempType.elementAt(i)).shortValue(); } }
java
private String generateWhere(String[] words, boolean allWords, boolean ignoreCase, boolean noMatchWhole) { String result = ""; // if all words must match use AND between the different conditions String join; if (allWords) { join = " AND "; } else { join = " OR "; } // end of else for (int wordInd = 0; wordInd < words.length; wordInd++) { String oneCondition = ""; for (int col = 0; col < columns.length; col++) { if (oneCondition != "") { oneCondition += " OR "; } if (ignoreCase) { if (noMatchWhole) { oneCondition += "LOWER(" + columns[col] + ") LIKE '%" + words[wordInd].toLowerCase() + "%'"; } else { oneCondition += "LOWER(" + columns[col] + ") LIKE '" + words[wordInd].toLowerCase() + "'"; } } else { if (noMatchWhole) { oneCondition += columns[col] + " LIKE '%" + words[wordInd] + "%'"; } else { oneCondition += columns[col] + " LIKE '" + words[wordInd] + "'"; } } } if (result != "") { result += join; } result += "(" + oneCondition + ")"; } if (result != "") { result = " WHERE " + result; } // end of if (result != "") // System.out.println("result: "+result); return result; }
java
private int getColIndex(String name) { for (int i = 0; i < columns.length; i++) { if (name.equals(columns[i])) { return i; } // end of if (name.equals(columns[i])) } // end of for (int i=0; i<columns.length; i++) return -1; }
java
private int getColIndex(String colName, String tabName) { int ordPos = 0; try { if (cConn == null) { return -1; } if (dbmeta == null) { dbmeta = cConn.getMetaData(); } ResultSet colList = dbmeta.getColumns(null, null, tabName, colName); colList.next(); ordPos = colList.getInt("ORDINAL_POSITION"); colList.close(); } catch (SQLException e) { System.out.println("SQL Exception: " + e.getMessage()); } return ordPos - 1; }
java
private int getConstraintIndex(int colIndex) { for (int i = 0; i < imColIndex.length; i++) { for (int j = 0; j < imColIndex[i].length; j++) { if (colIndex == imColIndex[i][j]) { return i; } // end of if (col == imColIndex[i][j]) } // end of for (int j=0; j<imColIndex[i].length; j++) } // end of for (int i=0; i<imColIndex.length; i++) return -1; }
java
private void showAktRow() { try { pStmt.clearParameters(); for (int i = 0; i < primaryKeys.length; i++) { pStmt.setObject(i + 1, resultRowPKs[aktRowNr][i]); } // end of for (int i=0; i<primaryKeys.length; i++) ResultSet rs = pStmt.executeQuery(); rs.next(); for (int i = 0; i < columns.length; i++) { komponente[i].setContent(rs.getString(i + 1)); } // end of for (int i=0; i<primaryKeys.length; i++) rs.close(); } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); } // end of try-catch for (int i = 0; i < columns.length; i++) { komponente[i].clearChanges(); } }
java
private void voltConvertBinaryLiteralOperandsToBigint() { // Strange that CONCAT is an arithmetic operator. // You could imagine using it for VARBINARY, so // definitely don't convert its operands to BIGINT! assert(opType != OpTypes.CONCAT); for (int i = 0; i < nodes.length; ++i) { Expression e = nodes[i]; ExpressionValue.voltMutateToBigintType(e, this, i); } }
java
public int findColumn(String tableName, String columnName) { // The namedJoinColumnExpressions are ExpressionColumn objects // for columns named in USING conditions. Each range variable // has a possibly empty list of these. If two range variables are // operands of a join with a USING condition, both get the same list // of USING columns. In our semantics the query // select T2.C from T1 join T2 using(C); // selects T2.C. This is not standard behavior, but it seems to // be common to mysql and postgresql. The query // select C from T1 join T2 using(C); // selects the C from T1 or T2, since the using clause says // they will have the same value. In the query // select C from T1 join T2 using(C), T3; // where T3 has a column named C, there is an ambiguity, since // the first join tree (T1 join T2 using(C)) has a column named C and // T3 has another C column. In this case we need the T1.C notation. // The query // select T1.C from T1 join T2 using(C), T3; // will select the C from the first join tree, and // select T3.C from T1 join T2 using(C), T3; // will select the C from the second join tree, which is just T3. // If we don't have a table name and there are some USING columns, // then look into them. If the name is in the USING columns, it // is not in this range variable. The function getColumnExpression // will fetch this using variable in another search. if (namedJoinColumnExpressions != null && tableName == null && namedJoinColumnExpressions.containsKey(columnName)) { return -1; } if (variables != null) { return variables.getIndex(columnName); } else if (columnAliases != null) { return columnAliases.getIndex(columnName); } else { return rangeTable.findColumn(columnName); } }
java
void addIndexCondition(Expression[] exprList, Index index, int colCount, boolean isJoin) { // VoltDB extension if (rangeIndex == index && isJoinIndex && (!isJoin) && (multiColumnCount > 0) && (colCount == 0)) { // This is one particular set of conditions which broke the classification of // ON and WHERE clauses. return; } // End of VoltDB extension rangeIndex = index; isJoinIndex = isJoin; for (int i = 0; i < colCount; i++) { Expression e = exprList[i]; indexEndCondition = ExpressionLogical.andExpressions(indexEndCondition, e); } if (colCount == 1) { indexCondition = exprList[0]; } else { findFirstExpressions = exprList; isMultiFindFirst = true; multiColumnCount = colCount; } }
java
public HsqlName getSubqueryTableName() { HsqlName hsqlName = new HsqlName(this, SqlInvariants.SYSTEM_SUBQUERY, false, SchemaObject.TABLE); hsqlName.schema = SqlInvariants.SYSTEM_SCHEMA_HSQLNAME; return hsqlName; }
java
static public HsqlName getAutoColumnName(int i) { if (i < autoColumnNames.length) { return autoColumnNames[i]; } return new HsqlName(staticManager, makeAutoColumnName("C_", i), 0, false); }
java
public String getString(String key) { String value = wrappedBundle.getString(key); if (value.length() < 1) { value = getStringFromFile(key); // For conciseness and sanity, get rid of all \r's so that \n // will definitively be our line breaks. if (value.indexOf('\r') > -1) value = value.replaceAll("\\Q\r\n", "\n") .replaceAll("\\Q\r", "\n"); if (value.length() > 0 && value.charAt(value.length() - 1) == '\n') value = value.substring(0, value.length() - 1); } return RefCapablePropertyResourceBundle.toNativeLs(value); }
java
static private RefCapablePropertyResourceBundle getRef(String baseName, ResourceBundle rb, ClassLoader loader) { if (!(rb instanceof PropertyResourceBundle)) throw new MissingResourceException( "Found a Resource Bundle, but it is a " + rb.getClass().getName(), PropertyResourceBundle.class.getName(), null); if (allBundles.containsKey(rb)) return (RefCapablePropertyResourceBundle) allBundles.get(rb); RefCapablePropertyResourceBundle newPRAFP = new RefCapablePropertyResourceBundle(baseName, (PropertyResourceBundle) rb, loader); allBundles.put(rb, newPRAFP); return newPRAFP; }
java
private static boolean checkPureColumnIndex(Index index, int aggCol, List<AbstractExpression> filterExprs) { boolean found = false; // all left child of filterExprs must be of type TupleValueExpression in equality comparison for (AbstractExpression expr : filterExprs) { if (expr.getExpressionType() != ExpressionType.COMPARE_EQUAL) { return false; } if (!(expr.getLeft() instanceof TupleValueExpression)) { return false; } if (((TupleValueExpression)expr.getLeft()).getColumnIndex() == aggCol) { found = true; } } if (found) { return true; } if (index.getColumns().size() > filterExprs.size()) { List<ColumnRef> indexedColRefs = CatalogUtil.getSortedCatalogItems(index.getColumns(), "index"); if (indexedColRefs.get(filterExprs.size()).getColumn().getIndex() == aggCol) { return true; } } return false; }
java
public static Runnable writeHashinatorConfig( InstanceId instId, String path, String nonce, int hostId, HashinatorSnapshotData hashData, boolean isTruncationSnapshot) throws IOException { final File file = new VoltFile(path, constructHashinatorConfigFilenameForNonce(nonce, hostId)); if (file.exists()) { if (!file.delete()) { if (isTruncationSnapshot) { VoltDB.crashLocalVoltDB("Unexpected exception while attempting to delete old hash file for truncation snapshot"); } throw new IOException("Unable to replace existing hashinator config " + file); } } boolean success = false; try { final FileOutputStream fos = new FileOutputStream(file); ByteBuffer fileBuffer = hashData.saveToBuffer(instId); fos.getChannel().write(fileBuffer); success = true; return new Runnable() { @Override public void run() { try { fos.getChannel().force(true); } catch (IOException e) { if (isTruncationSnapshot) { VoltDB.crashLocalVoltDB("Unexpected exception while attempting to create hash file for truncation snapshot", true, e); } throw new RuntimeException(e); } finally { try { fos.close(); } catch (IOException e) { if (isTruncationSnapshot) { VoltDB.crashLocalVoltDB("Unexpected exception while attempting to create hash file for truncation snapshot", true, e); } throw new RuntimeException(e); } } } }; } finally { if (!success) { file.delete(); } } }
java
public static String parseNonceFromDigestFilename(String filename) { if (filename == null || !filename.endsWith(".digest")) { throw new IllegalArgumentException("Bad digest filename: " + filename); } return parseNonceFromSnapshotFilename(filename); }
java
public static String parseNonceFromHashinatorConfigFilename(String filename) { if (filename == null || !filename.endsWith(HASH_EXTENSION)) { throw new IllegalArgumentException("Bad hashinator config filename: " + filename); } return parseNonceFromSnapshotFilename(filename); }
java
public static String parseNonceFromSnapshotFilename(String filename) { if (filename == null) { throw new IllegalArgumentException("Bad snapshot filename: " + filename); } // For the snapshot catalog if (filename.endsWith(".jar")) { return filename.substring(0, filename.indexOf(".jar")); } // for everything else valid in new format or volt1.2 or earlier table files else if (filename.indexOf("-") > 0) { return filename.substring(0, filename.indexOf("-")); } // volt 1.2 and earlier digest filename else if (filename.endsWith(".digest")) { return filename.substring(0, filename.indexOf(".digest")); } // Hashinator config filename. else if (filename.endsWith(HASH_EXTENSION)) { return filename.substring(0, filename.indexOf(HASH_EXTENSION)); } throw new IllegalArgumentException("Bad snapshot filename: " + filename); }
java
public static List<ByteBuffer> retrieveHashinatorConfigs( String path, String nonce, int maxConfigs, VoltLogger logger) throws IOException { VoltFile directory = new VoltFile(path); ArrayList<ByteBuffer> configs = new ArrayList<ByteBuffer>(); if (directory.listFiles() == null) { return configs; } for (File file : directory.listFiles()) { if (file.getName().startsWith(nonce + "-host_") && file.getName().endsWith(HASH_EXTENSION)) { byte[] rawData = new byte[(int) file.length()]; FileInputStream fis = null; DataInputStream dis = null; try { fis = new FileInputStream(file); dis = new DataInputStream(fis); dis.readFully(rawData); configs.add(ByteBuffer.wrap(rawData)); } finally { if (dis != null) { dis.close(); } if (fis != null) { fis.close(); } } } } return configs; }
java
public static Runnable writeSnapshotCatalog(String path, String nonce, boolean isTruncationSnapshot) throws IOException { String filename = SnapshotUtil.constructCatalogFilenameForNonce(nonce); try { return VoltDB.instance().getCatalogContext().writeCatalogJarToFile(path, filename, CatalogJarWriteMode.RECOVER); } catch (IOException ioe) { if (isTruncationSnapshot) { VoltDB.crashLocalVoltDB("Unexpected exception while attempting to create Catalog file for truncation snapshot", true, ioe); } throw new IOException("Unable to write snapshot catalog to file: " + path + File.separator + filename, ioe); } }
java
public static Runnable writeTerminusMarker(final String nonce, final NodeSettings paths, final VoltLogger logger) { final File f = new File(paths.getVoltDBRoot(), VoltDB.TERMINUS_MARKER); return new Runnable() { @Override public void run() { try(PrintWriter pw = new PrintWriter(new FileWriter(f), true)) { pw.println(nonce); } catch (IOException e) { throw new RuntimeException("Failed to create .complete file for " + f.getName(), e); } } }; }
java
public static void retrieveSnapshotFiles( File directory, Map<String, Snapshot> namedSnapshotMap, FileFilter filter, boolean validate, SnapshotPathType stype, VoltLogger logger) { NamedSnapshots namedSnapshots = new NamedSnapshots(namedSnapshotMap, stype); retrieveSnapshotFilesInternal(directory, namedSnapshots, filter, validate, stype, logger, 0); }
java
public static final String constructFilenameForTable(Table table, String fileNonce, SnapshotFormat format, int hostId) { String extension = ".vpt"; if (format == SnapshotFormat.CSV) { extension = ".csv"; } StringBuilder filename_builder = new StringBuilder(fileNonce); filename_builder.append("-"); filename_builder.append(table.getTypeName()); if (!table.getIsreplicated()) { filename_builder.append("-host_"); filename_builder.append(hostId); } filename_builder.append(extension);//Volt partitioned table return filename_builder.toString(); }
java
public static void requestSnapshot(final long clientHandle, final String path, final String nonce, final boolean blocking, final SnapshotFormat format, final SnapshotPathType stype, final String data, final SnapshotResponseHandler handler, final boolean notifyChanges) { final SnapshotInitiationInfo snapInfo = new SnapshotInitiationInfo(path, nonce, blocking, format, stype, data); final SimpleClientResponseAdapter adapter = new SimpleClientResponseAdapter(ClientInterface.SNAPSHOT_UTIL_CID, "SnapshotUtilAdapter", true); final LinkedBlockingQueue<ClientResponse> responses = new LinkedBlockingQueue<ClientResponse>(); adapter.registerCallback(clientHandle, new SimpleClientResponseAdapter.Callback() { @Override public void handleResponse(ClientResponse response) { responses.offer(response); } }); final SnapshotDaemon sd = VoltDB.instance().getClientInterface().getSnapshotDaemon(); Runnable work = new Runnable() { @Override public void run() { ClientResponse response = null; // abort if unable to succeed in 2 hours final long startTime = System.currentTimeMillis(); boolean hasRequested = false; while (System.currentTimeMillis() - startTime <= TimeUnit.HOURS.toMillis(2)) { try { if (!hasRequested) { sd.createAndWatchRequestNode(clientHandle, adapter, snapInfo, notifyChanges); hasRequested = true; } try { response = responses.poll( TimeUnit.HOURS.toMillis(2) - (System.currentTimeMillis() - startTime), TimeUnit.MILLISECONDS); if (response == null) { break; } } catch (InterruptedException e) { VoltDB.crashLocalVoltDB("Should never happen", true, e); } VoltTable[] results = response.getResults(); if (response.getStatus() != ClientResponse.SUCCESS) { break; } else if (isSnapshotInProgress(results)) { // retry after a second Thread.sleep(1000); // Request again hasRequested = false; continue; } else if (isSnapshotQueued(results) && notifyChanges) { //Wait for an update on the queued state via ZK continue; } else { // other errors are not recoverable break; } } catch (ForwardClientException e) { //This happens when something goes wrong in the snapshot daemon //I think it will always be that there was an existing snapshot request //It should eventually terminate and then we can submit one. try { Thread.sleep(5000); } catch (InterruptedException e1) {} new VoltLogger("SNAPSHOT").warn("Partition detection is unable to submit a snapshot request " + "because one already exists. Retrying."); continue; } catch (InterruptedException ignore) {} } handler.handleResponse(response); } }; // Use an executor service here to avoid explosion of threads??? ThreadFactory factory = CoreUtils.getThreadFactory("Snapshot Request - " + nonce); Thread workThread = factory.newThread(work); workThread.start(); }
java
public static ListenableFuture<SnapshotCompletionInterest.SnapshotCompletionEvent> watchSnapshot(final String nonce) { final SettableFuture<SnapshotCompletionInterest.SnapshotCompletionEvent> result = SettableFuture.create(); SnapshotCompletionInterest interest = new SnapshotCompletionInterest() { @Override public CountDownLatch snapshotCompleted(SnapshotCompletionEvent event) { if (event.nonce.equals(nonce) && event.didSucceed) { VoltDB.instance().getSnapshotCompletionMonitor().removeInterest(this); result.set(event); } return null; } }; VoltDB.instance().getSnapshotCompletionMonitor().addInterest(interest); return result; }
java
public static HashinatorSnapshotData retrieveHashinatorConfig( String path, String nonce, int hostId, VoltLogger logger) throws IOException { HashinatorSnapshotData hashData = null; String expectedFileName = constructHashinatorConfigFilenameForNonce(nonce, hostId); File[] files = new VoltFile(path).listFiles(); if (files != null) { for (File file : files) { if (file.getName().equals(expectedFileName)) { hashData = new HashinatorSnapshotData(); hashData.restoreFromFile(file); break; } } } if (hashData == null) { throw new IOException("Missing hashinator data in snapshot"); } return hashData; }
java
public static String getRealPath(SnapshotPathType stype, String path) { if (stype == SnapshotPathType.SNAP_CL) { return VoltDB.instance().getCommandLogSnapshotPath(); } else if (stype == SnapshotPathType.SNAP_AUTO) { return VoltDB.instance().getSnapshotPath(); } return path; }
java
public void close() throws SQLException { validate(); try { this.connection.rollback(); this.connection.clearWarnings(); this.connectionDefaults.setDefaults(this.connection); this.connection.reset(); fireCloseEvent(); } catch (SQLException e) { fireSqlExceptionEvent(e); throw e; } }
java
public void closePhysically() throws SQLException { SQLException exception = null; if (!isClosed && this.connection != null && !this.connection.isClosed()) { try { this.connection.close(); } catch (SQLException e) { //catch and hold so that the rest of the finalizer is run too. Throw at the end if present. exception = e; } } this.isClosed = true; this.pooledConnection = null; this.connection = null; this.connectionDefaults = null; this.connectionListeners.clear(); this.connectionListeners = null; if (exception != null) { throw exception; } }
java
public void startSnapshotWithTargets(Collection<SnapshotDataTarget> targets, long now) { // TRAIL [SnapSave:9] 5 [all SP] Start snapshot by putting task into the site queue. //Basically asserts that there are no tasks with null targets at this point //getTarget checks and crashes for (SnapshotTableTask t : m_snapshotTableTasks.values()) { t.getTarget(); } ArrayList<SnapshotDataTarget> targetsToClose = Lists.newArrayList(); for (final SnapshotDataTarget target : targets) { if (target.needsFinalClose()) { targetsToClose.add(target); } } m_snapshotTargets = targetsToClose; // Queue the first snapshot task VoltDB.instance().schedulePriorityWork( new Runnable() { @Override public void run() { m_siteTaskerQueue.offer(new SnapshotTask()); } }, (m_quietUntil + (5 * m_snapshotPriority) - now), 0, TimeUnit.MILLISECONDS); m_quietUntil += 5 * m_snapshotPriority; }
java
private List<BBContainer> getOutputBuffers(Collection<SnapshotTableTask> tableTasks, boolean noSchedule) { final int desired = tableTasks.size(); while (true) { int available = m_availableSnapshotBuffers.get(); //Limit the number of buffers used concurrently if (desired > available) { return null; } if (m_availableSnapshotBuffers.compareAndSet(available, available - desired)) break; } List<BBContainer> outputBuffers = new ArrayList<BBContainer>(tableTasks.size()); for (int ii = 0; ii < tableTasks.size(); ii++) { final BBContainer origin = DBBPool.allocateDirectAndPool(m_snapshotBufferLength); outputBuffers.add(createNewBuffer(origin, noSchedule)); } return outputBuffers; }
java
public void write(RowOutputInterface out, ResultMetaData meta) throws IOException { beforeFirst(); out.writeLong(id); out.writeInt(size); out.writeInt(0); // offset out.writeInt(size); while (hasNext()) { Object[] data = getNext(); out.writeData(meta.getColumnCount(), meta.columnTypes, data, null, null); } beforeFirst(); }
java
public static ClientInterface create( HostMessenger messenger, CatalogContext context, ReplicationRole replicationRole, Cartographer cartographer, InetAddress clientIntf, int clientPort, InetAddress adminIntf, int adminPort, SslContext SslContext) throws Exception { /* * Construct the runnables so they have access to the list of connections */ final ClientInterface ci = new ClientInterface( clientIntf, clientPort, adminIntf, adminPort, context, messenger, replicationRole, cartographer, SslContext); return ci; }
java
public void initializeSnapshotDaemon(HostMessenger messenger, GlobalServiceElector gse) { m_snapshotDaemon.init(this, messenger, new Runnable() { @Override public void run() { bindAdapter(m_snapshotDaemonAdapter, null); } }, gse); }
java
public ClientInterfaceHandleManager bindAdapter(final Connection adapter, final ClientInterfaceRepairCallback repairCallback) { return bindAdapter(adapter, repairCallback, false); }
java
public void mayActivateSnapshotDaemon() { SnapshotSchedule schedule = m_catalogContext.get().database.getSnapshotschedule().get("default"); if (schedule != null) { final ListenableFuture<Void> future = m_snapshotDaemon.mayGoActiveOrInactive(schedule); future.addListener(new Runnable() { @Override public void run() { try { future.get(); } catch (InterruptedException e) { VoltDB.crashLocalVoltDB("Failed to make SnapshotDaemon active", false, e); } catch (ExecutionException e) { VoltDB.crashLocalVoltDB("Failed to make SnapshotDaemon active", false, e); } } }, CoreUtils.SAMETHREADEXECUTOR); } }
java
public void notifyOfCatalogUpdate() { m_catalogContext.set(VoltDB.instance().getCatalogContext()); /* * Update snapshot daemon settings. * * Don't do it if the system is still initializing (CL replay), * because snapshot daemon may call @SnapshotScan on activation and * it will mess replaying txns up. */ if (VoltDB.instance().getMode() != OperationMode.INITIALIZING) { mayActivateSnapshotDaemon(); //add a notification to client right away StoredProcedureInvocation spi = new StoredProcedureInvocation(); spi.setProcName("@SystemCatalog"); spi.setParams("PROCEDURES"); spi.setClientHandle(ASYNC_PROC_HANDLE); notifyClients(m_currentProcValues,m_currentProcSupplier, spi, OpsSelector.SYSTEMCATALOG); } }
java
private final void checkForDeadConnections(final long now) { final ArrayList<Pair<Connection, Integer>> connectionsToRemove = new ArrayList<Pair<Connection, Integer>>(); for (final ClientInterfaceHandleManager cihm : m_cihm.values()) { // Internal connections don't implement calculatePendingWriteDelta(), so check for real connection first if (VoltPort.class == cihm.connection.getClass()) { final int delta = cihm.connection.writeStream().calculatePendingWriteDelta(now); if (delta > CLIENT_HANGUP_TIMEOUT) { connectionsToRemove.add(Pair.of(cihm.connection, delta)); } } } for (final Pair<Connection, Integer> p : connectionsToRemove) { Connection c = p.getFirst(); networkLog.warn("Closing connection to " + c + " because it hasn't read a response that was pending for " + p.getSecond() + " milliseconds"); c.unregister(); } }
java