repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
drbild/c2dm4j
|
src/test/java/org/whispercomm/c2dm4j/backoff/ExponentialBackoffTest.java
|
/*
* Copyright 2012 The Regents of the University of Michigan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.whispercomm.c2dm4j.backoff;
import org.junit.Before;
import org.junit.Test;
import org.whispercomm.c2dm4j.backoff.ExponentialBackoff;
import static org.whispercomm.c2dm4j.test.Matchers.*;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.*;
/**
* Unit tests for {@link ExponentialBackoff}.
*
* @author <NAME>
*
*/
public class ExponentialBackoffTest {
ExponentialBackoff cut;
@Before
public void setup() {
cut = new ExponentialBackoff(10, 5);
}
@Test
public void initialDelayIsZero() {
Attempt attempt = cut.begin();
assertThat(attempt.delay(), is(0L));
}
@Test
public void afterOneFailDelayIs10ms() {
cut.begin().recordFailure();
assertThat(cut.begin().delay(), is(approx(10L, 10)));
}
@Test
public void afterTwoFailsDelayIs30ms() {
cut.begin().recordFailure();
cut.begin().recordFailure();
assertThat(cut.begin().delay(), is(approx(30L, 10)));
}
@Test
public void afterFiveFailsDelayIs310ms() {
for (int i = 0; i < 5; ++i) {
cut.begin().recordFailure();
}
assertThat(cut.begin().delay(), is(approx(310L, 10)));
}
@Test
public void afterMaxFailsDelayIsStill310ms() {
for (int i = 0; i < 6; ++i) {
cut.begin().recordFailure();
}
assertThat(cut.begin().delay(), is(approx(310L, 10)));
}
@Test
public void succeedResetsDelayTo0ms() {
for (int i = 0; i < 4; ++i) {
cut.begin().recordFailure();
}
assertThat(cut.begin().delay(), is(approx(150L, 10)));
cut.begin().recordSuccess();
assertThat(cut.begin().delay(), is(0L));
}
@Test
public void failureOnOldAttemptDoesNotIncreaseDelay() {
for (int i = 0; i < 3; ++i) {
cut.begin().recordFailure();
}
Attempt attempt = cut.begin();
cut.begin().recordFailure();
assertThat(cut.begin().delay(), is(approx(150L, 10)));
attempt.recordFailure();
assertThat(cut.begin().delay(), is(approx(150L, 10)));
}
@Test
public void successOnOldAttemptDoesNotResetDelay() {
cut.begin().recordFailure();
cut.begin().recordFailure();
Attempt attempt = cut.begin();
cut.begin().recordFailure();
assertThat(cut.begin().delay(), is(approx(70L, 10)));
attempt.recordSuccess();
assertThat(cut.begin().delay(), is(approx(70L, 10)));
}
}
|
spothero/Specs
|
GCDThreadsafe/0.1.1/GCDThreadsafe.podspec
|
<reponame>spothero/Specs<gh_stars>1-10
Pod::Spec.new do |s|
#
# GCDThreadsafe
# CocoaPods podspec
#
s.name = 'GCDThreadsafe'
s.version = '0.1.1'
s.ios.deployment_target = '6.1'
s.osx.deployment_target = '10.8'
# s.platform = :ios, '6.1'
s.author = { '<NAME>' => '<EMAIL>' }
s.summary = 'Easy threadsafeing + the performance of Grand Central Dispatch.'
s.homepage = 'http://github.com/brynbellomy/GCDThreadsafe'
s.license = { :type => 'WTFPL', :file => 'LICENSE.md' }
s.source = { :git => 'https://github.com/brynbellomy/GCDThreadsafe.git', :tag => "v#{s.version.to_s}", :submodules => true }
s.requires_arc = true
s.source_files = 'GCDThreadsafe/*.{m,h}'
# xcode 5
s.xcconfig = { 'CLANG_ENABLE_MODULES' => 'YES' }
s.dependency 'libextobjc', '~> 0.2.5'
end
|
wegrzyns/adnoter
|
src/main/java/fr/lium/experimental/spkDiarization/programs/MNamedSpeakerTest.java
|
<filename>src/main/java/fr/lium/experimental/spkDiarization/programs/MNamedSpeakerTest.java<gh_stars>0
/**
*
* <p>
* SNamedSpeaker2
* </p>
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @version v2.0
*
* Copyright (c) 2007-2009 Universite du Maine. All Rights Reserved. Use is subject to license terms.
*
* THIS SOFTWARE IS PROVIDED BY THE "UNIVERSITE DU MAINE" AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package fr.lium.experimental.spkDiarization.programs;
import java.lang.reflect.InvocationTargetException;
import java.util.Iterator;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import www.spatial.maine.edu.assignment.HungarianAlgorithm;
import fr.lium.experimental.spkDiarization.libClusteringData.speakerName.SpeakerName;
import fr.lium.experimental.spkDiarization.libClusteringData.speakerName.SpeakerNameSet;
import fr.lium.experimental.spkDiarization.libClusteringData.transcription.EntitySet;
import fr.lium.experimental.spkDiarization.libClusteringData.transcription.Link;
import fr.lium.experimental.spkDiarization.libClusteringData.transcription.LinkSet;
import fr.lium.experimental.spkDiarization.libClusteringData.turnRepresentation.Turn;
import fr.lium.experimental.spkDiarization.libClusteringData.turnRepresentation.TurnSet;
import fr.lium.experimental.spkDiarization.libNamedSpeaker.SpeakerNameUtils;
import fr.lium.experimental.spkDiarization.libNamedSpeaker.TargetNameMap;
import fr.lium.experimental.spkDiarization.libSCTree.SCT;
import fr.lium.experimental.spkDiarization.libSCTree.SCTProbabilities;
import fr.lium.experimental.spkDiarization.libSCTree.SCTSolution;
import fr.lium.spkDiarization.lib.DiarizationException;
import fr.lium.spkDiarization.lib.MainTools;
import fr.lium.spkDiarization.lib.SpkDiarizationLogger;
import fr.lium.spkDiarization.libClusteringData.Cluster;
import fr.lium.spkDiarization.libClusteringData.ClusterSet;
import fr.lium.spkDiarization.libFeature.AudioFeatureSet;
import fr.lium.spkDiarization.libModel.Distance;
import fr.lium.spkDiarization.libModel.gaussian.GMMArrayList;
import fr.lium.spkDiarization.parameter.Parameter;
import fr.lium.spkDiarization.programs.Identification;
/**
* The Class MNamedSpeakerTest.
*
* @author <NAME>
*/
public class MNamedSpeakerTest {
/** The Constant logger. */
private final static Logger logger = Logger.getLogger(MNamedSpeakerTest.class.getName());
/** The Constant PREVIOUS_THRESHOLD. */
public static final double PREVIOUS_THRESHOLD = 0.05;
/** The Constant CURRENT_THRESHOLD. */
public static final double CURRENT_THRESHOLD = 0.05;
/** The Constant NEXT_THRESHOLD. */
public static final double NEXT_THRESHOLD = 0.05;
// public static final double PREVIOUS_THRESHOLD = 0.09;
// public static final double CURRENT_THRESHOLD = 0.2;
// public static final double NEXT_THRESHOLD = 0.2;
/** The parameter. */
static Parameter parameter;
/** The name and gender map. */
static TargetNameMap nameAndGenderMap;
/** The first name and gender map. */
static TargetNameMap firstNameAndGenderMap;
/** The next false. */
static int nextFalse = 0;
/** The next total. */
static int nextTotal = 0;
/** The previous false. */
static int previousFalse = 0;
/** The previous total. */
static int previousTotal = 0;
/** The current false. */
static int currentFalse = 0;
/** The current total. */
static int currentTotal = 0;
/** The other false. */
static int otherFalse = 0;
/** The other total. */
static int otherTotal = 0;
/**
* Print the available options.
*
* @param parameter is all the parameters
* @param program name of this program
* @throws IllegalArgumentException the illegal argument exception
* @throws IllegalAccessException the illegal access exception
* @throws InvocationTargetException the invocation target exception
*/
public static void info(Parameter parameter, String program) throws IllegalArgumentException, IllegalAccessException, InvocationTargetException {
if (parameter.help) {
logger.config(parameter.getSeparator2());
logger.config("Program name = " + program);
logger.config(parameter.getSeparator());
parameter.logShow();
parameter.getParameterSegmentationInputFile().logAll(); // sInMask
parameter.getParameterSegmentationOutputFile().logAll(); // sOutMask
logger.config(parameter.getSeparator());
parameter.getParameterNamedSpeaker().logAll();
logger.config(parameter.getSeparator());
parameter.getParameterModelSetInputFile().logAll(); // tInMask
parameter.getParameterTopGaussian().logTopGaussian(); // sTop
logger.config(parameter.getSeparator());
parameter.getParameterScore().logAll();
}
}
/**
* For each Solution of the SolutionsSet, put probabilities in the Cluster of the previous, current and next Turn for the target speaker.
*
* @param solution a SCT solution
* @param speakerName the name of the target speaker
* @param turnSet list of turn
* @param index index of the current turn in turns
*
* TODO make permutation of speaker name word (firstname/lastname - lastname/firstname)
*/
public static void putSpeakerName(SCTSolution solution, String speakerName, TurnSet turnSet, int index) {
SCTProbabilities probabilities = solution.getProbabilities();
String normalizedSpeakerName = SpeakerNameUtils.normalizeSpeakerName(speakerName);
String speakerGender;
if (parameter.getParameterNamedSpeaker().isFirstNameCheck()) {
StringTokenizer tokenizer = new StringTokenizer(normalizedSpeakerName, "_");
if (tokenizer.hasMoreTokens()) {
normalizedSpeakerName = tokenizer.nextToken();
}
speakerGender = firstNameAndGenderMap.get(normalizedSpeakerName);
logger.finest("normalized speaker name: " + normalizedSpeakerName + ", speakerGender firstname checked: "
+ speakerGender);
} else {
speakerGender = nameAndGenderMap.get(normalizedSpeakerName);
logger.finest("normalized speaker name: " + normalizedSpeakerName + ", speakerGender name checked: "
+ speakerGender);
}
// logger.fine("normalized speaker name:" + normalizedSpeakerName + " gender is:" + speakerGender);
Turn turn;
double scorePrev = 0, scoreCurrent = 0, scoreNext = 0.0;
boolean maximum = parameter.getParameterNamedSpeaker().isMaximum();
String nextName = "";
String previousName = "";
String currentName = "";
String maxKey = probabilities.getMaxKey();
// previous turn
if ((index - 1) >= 0) {
turn = turnSet.get(index - 1);
previousName = SpeakerNameUtils.normalizeSpeakerName(turn.getCluster().getName());
scorePrev = probabilities.get(SpeakerNameUtils.PREVIOUS);
if ((checkGender(turn, speakerGender) == true)
&& ((!maximum && (scorePrev > PREVIOUS_THRESHOLD)) || (maximum && maxKey.equals(SpeakerNameUtils.PREVIOUS)))) {
logger.finest("SCT cluster: " + turn.getCluster().getName() + " speaker name:" + speakerName
+ " add score: " + scorePrev + " PUT ON PREVIOUS " + turn.first().getStart());
addScore(turn, speakerName, scorePrev);
}
}
// Current turn
turn = turnSet.get(index);
// float turnStart = turn.first().getStartInSecond();
// float turnEnd = turn.last().getStartInSecond() + turn.last().getLengthInSecond();
// String gender = turn.first().getCluster().getGender();
currentName = SpeakerNameUtils.normalizeSpeakerName(turn.getCluster().getName());
scoreCurrent = probabilities.get(SpeakerNameUtils.CURRENT);
if ((checkGender(turn, speakerGender) == true)
&& ((!maximum && (scoreCurrent > CURRENT_THRESHOLD)) || (maximum && maxKey.equals(SpeakerNameUtils.CURRENT)))) {
logger.finest("SCT cluster: " + turn.getCluster().getName() + " speaker name:" + speakerName
+ " add score: " + scoreCurrent + " PUT ON CURRENT " + turn.first().getStart());
addScore(turn, speakerName, scoreCurrent);
}
// newt turn
if ((index + 1) < turnSet.size()) {
turn = turnSet.get(index + 1);
scoreNext = probabilities.get(SpeakerNameUtils.NEXT);
nextName = SpeakerNameUtils.normalizeSpeakerName(turn.getCluster().getName());
// if ((checkGender(turn, speakerGender) == true) && (scoreNext > NEXT_THRESHOLD)) {
if ((checkGender(turn, speakerGender) == true)
&& ((!maximum && (scoreNext > NEXT_THRESHOLD)) || (maximum && maxKey.equals(SpeakerNameUtils.NEXT)))) {
logger.finest("SCT cluster: " + turn.getCluster().getName() + " speaker name:" + speakerName
+ " add score: " + scoreNext + " PUT ON NEXT " + turn.first().getStart());
addScore(turn, speakerName, scoreNext);
}
}
speakerName = SpeakerNameUtils.normalizeSpeakerName(speakerName);
if (maxKey.equals(SpeakerNameUtils.NEXT)) {
if (!speakerName.equals(nextName)) {
nextFalse++;
}
nextTotal++;
}
if (maxKey.equals(SpeakerNameUtils.CURRENT)) {
if (!speakerName.equals(currentName)) {
currentFalse++;
}
currentTotal++;
}
if (maxKey.equals(SpeakerNameUtils.PREVIOUS)) {
if (!speakerName.equals(previousName)) {
previousFalse++;
}
previousTotal++;
}
if (maxKey.equals(SpeakerNameUtils.OTHER)) {
if (speakerName.equals(nextName) || speakerName.equals(previousName) || speakerName.equals(currentName)) {
otherFalse++;
}
otherTotal++;
}
// logger.info("DETECTED :" + SpeakerNameUtils.normalizeSpeakerName(speakerName) + ", PREVIOUS previous score: "
// + scorePrev);
}
/**
* Check coherence of genres between the target speaker name and the gender of the trun .
*
* @param turn the turn
* @param speakerGender the speaker gender
*
* @return true, if successful
*/
public static boolean checkGender(Turn turn, String speakerGender) {
// logger.info("gender speaker:" + speakerGender + " check=" + parameter.getParameterNamedSpeaker().isDontCheckGender());
if (parameter.getParameterNamedSpeaker().isDontCheckGender() == false) {
// logger.info("gender check speaker:" + speakerGender + " turn=" + turn.getCluster().getGender());
if ((turn.getCluster().getGender().equals(speakerGender) == false) && (speakerGender != null)
&& !"U".equals(speakerGender)) {
return false;
}
}
return true;
}
/**
* Adds the score to the cluster attached to the turn.
*
* @param turn the turn link to a cluster
* @param name the name of the target speaker
* @param value the score
*/
public static void addScore(Turn turn, String name, double value) {
Cluster cluster = turn.getCluster();
SpeakerName speakerName = cluster.getSpeakerName(name);
// Keeping the old way (just summing the score)
speakerName.incrementScoreCluster(value);
// Adding the new way, will keep trace of each score
speakerName.addScoreCluster(value);
}
/**
* Test the SCT over each segment containing a linkSet and an entity. The result of the SCT (speaker name and probability) are stored in the clusters of the previous, current or next turn.
*
* @param clusterSet the cluster set
* @param sct the sct
* @param targetSpeakerNameMap the target speaker name map
* @throws CloneNotSupportedException the clone not supported exception
* @throws DiarizationException TODO manager open and close speaker list
*/
public static void computeSCTSCore(ClusterSet clusterSet, SCT sct, TargetNameMap targetSpeakerNameMap) throws CloneNotSupportedException, DiarizationException {
TurnSet turns = clusterSet.getTurns();
boolean isCloseListCheck = parameter.getParameterNamedSpeaker().isCloseListCheck();
for (int i = 0; i < turns.size(); i++) {
// logger.info("+++++++++++++++++++++++++++++++++++++++++++++++");
Turn currentTurn = turns.get(i);
LinkSet linkSet = currentTurn.getCollapsedLinkSet();
// linkSet.debug();
boolean startTurn = true;
boolean endTurn = true;
SpeakerNameUtils.makeLinkSetForSCT(linkSet, startTurn, endTurn);
// logger.info("*" + currentTurn.first().getStartInSecond() + "/" + currentTurn.last().getLastInSecond() + "/"
// + currentTurn.getCluster().getName() + "***********************************************");
for (int index = 0; index < linkSet.size(); index++) {
Link link = linkSet.getLink(index);
if (link.haveEntity(EntitySet.TypePersonne) == true) {
LinkSet linkSetForTest = SpeakerNameUtils.reduceLinkSetForSCT(linkSet, index, 5, startTurn, endTurn, true);
// logger.info(currentTurn.first().getStart() + " -------------------------------------------------");
// linkSetForTest.debug();
String speakerName = link.getWord();
if (SpeakerNameUtils.checkSpeakerName(speakerName, isCloseListCheck, nameAndGenderMap, firstNameAndGenderMap) == true) {
// logger.info("test speaker:" + speakerName);
SCTSolution solution = sct.test(linkSetForTest);
String ch = "";
for (Double v : solution.getProbabilities().values()) {
ch += " " + v;
}
logger.info("@@ trun:" + i + "/" + currentTurn.get(0).getStartInSecond() + " name:"
+ speakerName + " proba:" + ch);
// logger.info("put solution speaker:" + speakerName);
putSpeakerName(solution, speakerName, turns, i);
}
}
}
}
}
/**
* Assign the candidate speaker name to cluster. Just sort the cluster according to the max
*
* @param clusterSet the clusters
* @param clusterSetResult the cluster set result
* @return the a new cluster set
*/
public static ClusterSet decideMaximumFirst(ClusterSet clusterSet, ClusterSet clusterSetResult) {
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
// logger.info("decide: Cluster = " + cluster.getName() + " ");
if (SpkDiarizationLogger.DEBUG) cluster.getSpeakerNameSet().debug();
}
SpeakerName max = new SpeakerName("");
int size = clusterSet.clusterGetSize();
for (int i = 0; i < size; i++) {
Cluster cluster = getMaxSpeakerName(clusterSet, max);
if (cluster == null) {
break;
}
String newName = SpeakerNameUtils.normalizeSpeakerName(max.getName().replace(' ', '_').toLowerCase());
int dist = Distance.levenshteinDistance(cluster.getName(), max.getName());
logger.info("decide: Cluster = " + cluster.getName() + " --> " + newName + " lenvenshtein=" + dist
+ " score=" + max.getScore() + " || ");
if (SpkDiarizationLogger.DEBUG) cluster.getSpeakerNameSet().debug();
clusterSetResult.getCluster(cluster.getName()).setName(newName);
clusterSet.removeCluster(cluster.getName());
for (String name : clusterSet) {
clusterSet.getCluster(name).RemoveSpeakerName(max.getName());
}
}
String unk = "unk";
clusterSetResult.createANewCluster(unk);
for (String name : clusterSet) {
// logger.info("decide: create unk");
clusterSetResult.mergeCluster(unk, name);
}
return clusterSetResult;
}
/**
* Assign the candidate speaker name to cluster. Use the Hungarian Algorithm
*
* @param clusterSet the clusters
* @param clusterSetResult the cluster set result
* @return the a new cluster set
*/
public static ClusterSet decideHungarian(ClusterSet clusterSet, ClusterSet clusterSetResult) {
logger.finest("Enter decideHungarian");
TreeMap<Cluster, Integer> clusterIndexMap = new TreeMap<Cluster, Integer>();
TreeMap<String, Integer> speakerNameIndexMap = new TreeMap<String, Integer>();
TreeMap<Integer, Cluster> reverseClusterIndexMap = new TreeMap<Integer, Cluster>();
TreeMap<Integer, String> reverseSpeakerNameIndexMap = new TreeMap<Integer, String>();
int clusterIndex = 0;
int spkIndex = 0;
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
SpeakerNameSet spkNameSet = cluster.getSpeakerNameSet();
if (spkNameSet.size() == 0) {
continue;
}
clusterIndexMap.put(cluster, clusterIndex);
reverseClusterIndexMap.put(clusterIndex, cluster);
Iterator<String> itr = spkNameSet.iterator();
while (itr.hasNext()) {
String key = itr.next();
SpeakerName spkName = spkNameSet.get(key);
if (!speakerNameIndexMap.containsKey(spkName.getName())) {
speakerNameIndexMap.put(spkName.getName(), spkIndex);
reverseSpeakerNameIndexMap.put(spkIndex, spkName.getName());
spkIndex++;
}
}
clusterIndex++;
}
double[][] costMatrix = new double[clusterIndexMap.size()][speakerNameIndexMap.size()];
// Start by assigning a great value to each entry (worst value)
for (int i = 0; i < clusterIndexMap.size(); i++) {
for (int j = 0; j < speakerNameIndexMap.size(); j++) {
// costMatrix[i][j]=Float.MAX_VALUE;
costMatrix[i][j] = 0;
}
}
clusterIndex = 0;
spkIndex = 0;
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
SpeakerNameSet spkNameSet = cluster.getSpeakerNameSet();
if (spkNameSet.size() == 0) {
continue;
}
clusterIndex = clusterIndexMap.get(cluster);
Iterator<String> itr = spkNameSet.iterator();
while (itr.hasNext()) {
String key = itr.next();
SpeakerName spkName = spkNameSet.get(key);
spkIndex = speakerNameIndexMap.get(spkName.getName());
costMatrix[clusterIndex][spkIndex] = spkName.getScore();
}
}
boolean transposed = false;
if ((costMatrix.length > 0) && (costMatrix.length > costMatrix[0].length)) {
logger.finest("Array transposed (because rows>columns).\n"); // Cols must be >= Rows.
costMatrix = HungarianAlgorithm.transpose(costMatrix);
transposed = true;
}
logger.finest("(Printing out only 2 decimals)");
logger.finest("The matrix is:");
String log = "";
for (double[] element : costMatrix) {
for (double element2 : element) {
log += String.format("%.2f ", element2).toString();
}
logger.finest(log);
}
if (costMatrix.length > 0) {
String sumType = "max";
int[][] assignment = new int[costMatrix.length][2];
assignment = HungarianAlgorithm.hgAlgorithm(costMatrix, sumType); // Call Hungarian algorithm.
logger.finest("The winning assignment (" + sumType + " sum) is:\n");
double sum = 0;
for (int[] element : assignment) {
// <COMMENT> to avoid printing the elements that make up the assignment
Cluster cluster;
String newName;
if (!transposed) {
cluster = reverseClusterIndexMap.get(element[0]);
newName = reverseSpeakerNameIndexMap.get(element[1]);
} else {
cluster = reverseClusterIndexMap.get(element[1]);
newName = reverseSpeakerNameIndexMap.get(element[0]);
}
logger.info(String.format("array(%d,%s %s=>%d,%s %s) = %.2f", (element[0]), cluster.getName(), cluster.getGender(), (element[1]), newName, nameAndGenderMap.get(newName), costMatrix[element[0]][element[1]]).toString());
sum = sum + costMatrix[element[0]][element[1]];
if (costMatrix[element[0]][element[1]] > 0) {
clusterSetResult.getCluster(cluster.getName()).setName(newName);
clusterSet.removeCluster(cluster.getName());
}
for (String name : clusterSet) {
clusterSet.getCluster(name).RemoveSpeakerName(newName);
}
// </COMMENT>
}
logger.finest(String.format("\nThe %s is: %.2f\n", sumType, sum).toString());
// HungarianAlgorithm.printTime((endTime - startTime) / 1000000000.0);
}
String unk = "unk";
clusterSetResult.createANewCluster(unk);
for (String name : clusterSet) {
// logger.info("decide: create unk");
clusterSetResult.mergeCluster(unk, name);
}
return clusterSetResult;
}
/**
* Prints the float matrix.
*
* @param matrix the matrix
*/
public static void printFloatMatrix(float[][] matrix) {
String log = "";
for (float[] element : matrix) {
for (float element2 : element) {
log += element2 + "\t";
}
logger.finest(log);
}
}
/**
* Gets the maximum score of the SpeakerNameSet instance of each cluster instance stored in clusters.
*
* @param clusterSet the clusters in with the cluster with the maximum score is searched
* @param max the max score SpeakerName instance, this is an output
*
* @return the cluster with the maximum score
*/
public static Cluster getMaxSpeakerName(ClusterSet clusterSet, SpeakerName max) {
Cluster maxCluster = null;
max.set("", Double.NEGATIVE_INFINITY, 1.0);
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
SpeakerName tmp = cluster.getMaxSpeakerName();
if (tmp != null) {
if (max.getScore() < tmp.getScore()) {
max.set(tmp.getName(), tmp.getScore(), 1.0);
maxCluster = cluster;
}
}
}
return maxCluster;
}
/*
* Killer function ;-) It will go through each cluster, and compute each speaker score according to the belief function theory
*/
/**
* Compute belief functions.
*
* @param clusters the clusters
* @throws Exception the exception
*/
public static void computeBeliefFunctions(ClusterSet clusters) throws Exception {
for (String name : clusters) {
Cluster cluster = clusters.getCluster(name);
cluster.computeBeliefFunctions();
cluster.debugSpeakerName();
}
}
/*
* Is called to normalized each summed score It computes the score of a speaker for a cluster using normalization
*/
/**
* Sets the sum score.
*
* @param clusters the new sum score
*/
public static void setSumScore(ClusterSet clusters) {
}
/*
* Is called to normalized each summed score It computes the score of a speaker for a cluster using normalization
*/
/**
* Sets the score.
*
* @param clusterSet the new score
*/
public static void setScore(ClusterSet clusterSet) {
// Normalize the speakers scores
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
cluster.computeNormalizedScore();
}
}
/**
* The main method.
*
* @param args the arguments
* @throws Exception the exception
*/
public static void main(String[] args) throws Exception {
try {
SpkDiarizationLogger.setup();
parameter = MainTools.getParameters(args);
info(parameter, "MNamedSpeakerTest");
if (parameter.show.isEmpty() == false) {
ClusterSet clusterSet = MainTools.readClusterSet(parameter);
ClusterSet clusterSetSave = clusterSet.clone();
clusterSet.collapse();
// get the speaker name list
nameAndGenderMap = null;
nameAndGenderMap = SpeakerNameUtils.loadList(parameter.getParameterNamedSpeaker().getNameAndGenderList());
firstNameAndGenderMap = null;
if (parameter.getParameterNamedSpeaker().isFirstNameCheck()) {
firstNameAndGenderMap = SpeakerNameUtils.loadList(parameter.getParameterNamedSpeaker().getFirstNameList());
}
SCT sct = new SCT(SpeakerNameUtils.getNbOfLabel());
sct.read(parameter.show, parameter.getParameterNamedSpeaker().getSCTMask());
computeSCTSCore(clusterSet, sct, nameAndGenderMap);
if (parameter.getParameterNamedSpeaker().isUseAudio()) {
// Features
AudioFeatureSet featureSet = MainTools.readFeatureSet(parameter, clusterSet);
// Top Gaussian model
GMMArrayList gmmTopGaussianList = MainTools.readGMMForTopGaussian(parameter, featureSet);
GMMArrayList gmmList = MainTools.readGMMContainer(parameter);
clusterSet = Identification.make(featureSet, clusterSet, gmmList, gmmTopGaussianList, parameter);
}
if (parameter.getParameterNamedSpeaker().isBeliefFunctions()) {
// Put the code for the belief functions
computeBeliefFunctions(clusterSet);
} else if (parameter.getParameterNamedSpeaker().isMaximum()) {
// Nothing
} else { // decide on score accumulation
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
if (SpkDiarizationLogger.DEBUG) cluster.getSpeakerNameSet().debug();
}
setScore(clusterSet);
}
if (parameter.getParameterNamedSpeaker().isHungarian()) {
clusterSet = decideHungarian(clusterSet, clusterSetSave);
} else {
clusterSet = decideMaximumFirst(clusterSet, clusterSetSave);
}
// printStat();
MainTools.writeClusterSet(parameter, clusterSetSave, true);
}
} catch (DiarizationException e) {
logger.log(Level.SEVERE, "exception ", e);
e.printStackTrace();
}
}
/**
* Prints the stat.
*/
protected static void printStat() {
logger.info("Hungarian " + parameter.getParameterNamedSpeaker().isHungarian());
logger.info("Maximum " + parameter.getParameterNamedSpeaker().isMaximum());
logger.info("+++++++++++++++++++++++++++++++++++++++++++++++");
logger.info("SCT STATS " + parameter.show);
logger.info("SCT STATS Next false : " + nextFalse + " / " + nextTotal);
logger.info("SCT STATS Previous false : " + previousFalse + " / " + previousTotal);
logger.info("SCT STATS Current false : " + currentFalse + " / " + currentTotal);
logger.info("SCT STATS Other false : " + otherFalse + " / " + otherTotal);
}
}
|
timfel/netbeans
|
java/debugger.jpda.truffle/src/org/netbeans/modules/debugger/jpda/truffle/MIMETypes.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.debugger.jpda.truffle;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.netbeans.api.extexecution.base.ProcessBuilder;
import org.netbeans.api.java.classpath.ClassPath;
import org.netbeans.api.java.platform.JavaPlatform;
import org.netbeans.api.java.platform.JavaPlatformManager;
import org.netbeans.api.java.platform.Specification;
import org.netbeans.api.java.project.JavaProjectConstants;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectUtils;
import org.netbeans.api.project.SourceGroup;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
/**
* Get the MIME types of languages installed in the Truffle/GraalVM platform.
*/
public final class MIMETypes {
private static final Logger LOG = Logger.getLogger(MIMETypes.class.getName());
public static final String PROP_MIME_TYPES = "MIME types"; // NOI18N
private static final String MIME_TYPES_MAIN = "org.netbeans.modules.debugger.jpda.backend.truffle.GetMIMETypes"; // NOI18N
private static final MIMETypes INSTANCE = new MIMETypes();
private static String TEMP_TRUFFLE_JAR;
private final Map<JavaPlatform, Set<String>> platformMIMETypes = new WeakHashMap<>();
private Set<String> allPlatformsMIMETypes;
private PropertyChangeListener allPlatformsListener;
private PropertyChangeSupport pcs = new PropertyChangeSupport(this);
private MIMETypes() {
}
public static MIMETypes getDefault() {
return INSTANCE;
}
public Set<String> get(Project prj) {
JavaPlatform jp = getProjectPlatform(prj);
if (jp == null) {
return Collections.emptySet();
}
return get(jp);
}
private synchronized Set<String> get(JavaPlatform jp) {
Set<String> mTypes = platformMIMETypes.get(jp);
if (mTypes == null) {
FileObject graalvm = jp.findTool("polyglot"); // NOI18N
FileObject java = jp.findTool("java"); // NOI18N
if (graalvm != null && java != null) {
File javaFile = FileUtil.toFile(java);
if (javaFile != null) {
ProcessBuilder pb = ProcessBuilder.getLocal();
pb.setExecutable(javaFile.getAbsolutePath());
try {
pb.setArguments(Arrays.asList("-cp", getTruffleJarPath(), MIME_TYPES_MAIN)); // NOI18N
Process proc = pb.call();
try (BufferedReader r = new BufferedReader(new InputStreamReader(proc.getInputStream()))) {
mTypes = new HashSet<>();
String line;
while ((line = r.readLine()) != null) {
mTypes.add(line);
}
}
try (BufferedReader r = new BufferedReader(new InputStreamReader(proc.getErrorStream()))) {
String line;
while ((line = r.readLine()) != null) {
LOG.info("Error from "+javaFile+" : "+line);
}
}
LOG.log(Level.FINE, "MIME types of {0} are: {1}", new Object[]{jp, mTypes});
} catch (IOException ioex) {
LOG.log(Level.CONFIG, "", ioex);
}
}
}
if (mTypes == null) {
mTypes = Collections.emptySet();
}
platformMIMETypes.put(jp, mTypes);
}
return mTypes;
}
private static synchronized String getTruffleJarPath() throws IOException {
if (TEMP_TRUFFLE_JAR == null) {
File truffleJarFile = File.createTempFile("TmpTruffleBcknd", ".jar"); // NOI18N
truffleJarFile.deleteOnExit();
FileUtil.copy(RemoteServices.openRemoteClasses(), new FileOutputStream(truffleJarFile));
TEMP_TRUFFLE_JAR = truffleJarFile.getAbsolutePath();
}
return TEMP_TRUFFLE_JAR;
}
/**
* Get MIME types based on registered Java platforms.
* The call returns either a cached set, or queries the platforms.
*
* @return a set of MIME types.
*/
public synchronized Set<String> get() {
if (allPlatformsMIMETypes != null) {
return allPlatformsMIMETypes;
}
JavaPlatformManager pm = JavaPlatformManager.getDefault();
if (allPlatformsListener == null) {
allPlatformsListener = new PropertyChangeListener() {
@Override public void propertyChange(PropertyChangeEvent evt) {
synchronized (MIMETypes.this) {
allPlatformsMIMETypes = null;
}
pcs.firePropertyChange(PROP_MIME_TYPES, null, null);
}
};
pm.addPropertyChangeListener(allPlatformsListener);
}
JavaPlatform[] installedPlatforms = pm.getPlatforms(null, new Specification ("j2se", null)); //NOI18N
Set<String> mTypes = new HashSet<>();
for (int i = 0; i < installedPlatforms.length; i++) {
mTypes.addAll(get(installedPlatforms[i]));
}
allPlatformsMIMETypes = mTypes;
return mTypes;
}
/**
* Get cached MIME types based on registered Java platforms.
* @return a cached set, or <code>null</code>.
*/
public synchronized Set<String> getCached() {
return allPlatformsMIMETypes;
}
private static JavaPlatform getProjectPlatform(Project prj) {
SourceGroup[] sourceGroups = ProjectUtils.getSources(prj).getSourceGroups(JavaProjectConstants.SOURCES_TYPE_JAVA);
ClassPath bootClassPath = ClassPath.getClassPath(sourceGroups[0].getRootFolder(), ClassPath.BOOT);
FileObject[] prjBootRoots = bootClassPath.getRoots();
JavaPlatformManager pm = JavaPlatformManager.getDefault();
JavaPlatform[] installedPlatforms = pm.getPlatforms(null, new Specification ("j2se", null)); //NOI18N
for (int i = 0; i < installedPlatforms.length; i++) {
ClassPath bootstrapLibraries = installedPlatforms[i].getBootstrapLibraries();
if (Arrays.equals(prjBootRoots, bootstrapLibraries.getRoots())) {
return installedPlatforms[i];
}
}
return null;
}
public void addPropertyChangeListener(PropertyChangeListener l) {
pcs.addPropertyChangeListener(l);
}
public void removePropertyChangeListener(PropertyChangeListener l) {
pcs.removePropertyChangeListener(l);
}
}
|
wangyixiaohuihui/spark2-annotation
|
resource-managers/yarn/src/test/scala/org/apache/spark/scheduler/cluster/YarnSchedulerBackendSuite.scala
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster
import scala.language.reflectiveCalls
import org.mockito.Mockito.when
import org.scalatest.mock.MockitoSugar
import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite}
import org.apache.spark.scheduler.TaskSchedulerImpl
import org.apache.spark.serializer.JavaSerializer
class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with LocalSparkContext {
test("RequestExecutors reflects node blacklist and is serializable") {
sc = new SparkContext("local", "YarnSchedulerBackendSuite")
val sched = mock[TaskSchedulerImpl]
when(sched.sc).thenReturn(sc)
val yarnSchedulerBackend = new YarnSchedulerBackend(sched, sc) {
def setHostToLocalTaskCount(hostToLocalTaskCount: Map[String, Int]): Unit = {
this.hostToLocalTaskCount = hostToLocalTaskCount
}
}
val ser = new JavaSerializer(sc.conf).newInstance()
for {
blacklist <- IndexedSeq(Set[String](), Set("a", "b", "c"))
numRequested <- 0 until 10
hostToLocalCount <- IndexedSeq(
Map[String, Int](),
Map("a" -> 1, "b" -> 2)
)
} {
yarnSchedulerBackend.setHostToLocalTaskCount(hostToLocalCount)
when(sched.nodeBlacklist()).thenReturn(blacklist)
val req = yarnSchedulerBackend.prepareRequestExecutors(numRequested)
assert(req.requestedTotal === numRequested)
assert(req.nodeBlacklist === blacklist)
assert(req.hostToLocalTaskCount.keySet.intersect(blacklist).isEmpty)
// Serialize to make sure serialization doesn't throw an error
ser.serialize(req)
}
sc.stop()
}
}
|
InariSoft/inari-firefly-libGDX
|
src/main/java/com/inari/firefly/libgdx/intro/InitInariIntro.java
|
package com.inari.firefly.libgdx.intro;
import com.inari.commons.geom.PositionF;
import com.inari.commons.geom.Rectangle;
import com.inari.commons.graphics.RGBColor;
import com.inari.firefly.asset.Asset;
import com.inari.firefly.control.task.Task;
import com.inari.firefly.entity.EEntity;
import com.inari.firefly.entity.EntityController;
import com.inari.firefly.entity.EntitySystem;
import com.inari.firefly.graphics.ETransform;
import com.inari.firefly.graphics.TextureAsset;
import com.inari.firefly.graphics.sprite.ESprite;
import com.inari.firefly.graphics.sprite.SpriteAsset;
public final class InitInariIntro extends Task {
public InitInariIntro( int id ) {
super( id );
}
@Override
public final void runTask() {
EntitySystem entitySystem = context.getSystem( EntitySystem.SYSTEM_KEY );
int controllerId = context.getComponentBuilder( EntityController.TYPE_KEY, IntroAnimationController.class )
.activate();
context.getComponentBuilder( Asset.TYPE_KEY, TextureAsset.class )
.set( TextureAsset.NAME, BuildInariIntro.INTRO_TEXTURE )
.set( TextureAsset.RESOURCE_NAME, BuildInariIntro.INARI_LOGO_RESOURCE_PATH )
.activate();
TextureAsset textureAsset = context.getSystemComponent( Asset.TYPE_KEY, BuildInariIntro.INTRO_TEXTURE, TextureAsset.class );
context.getComponentBuilder( Asset.TYPE_KEY, SpriteAsset.class )
.set( SpriteAsset.NAME, BuildInariIntro.INTRO_SPRITE )
.set( SpriteAsset.TEXTURE_ASSET_NAME, BuildInariIntro.INTRO_TEXTURE )
.set( SpriteAsset.TEXTURE_REGION, new Rectangle( 0, 0, textureAsset.getTextureWidth(), textureAsset.getTextureHeight() ) )
.activate();
entitySystem.getEntityBuilder()
.set( ETransform.VIEW_ID, 0 )
.set( ETransform.POSITION, new PositionF(
context.getScreenWidth() / 2 - textureAsset.getTextureWidth() / 2,
context.getScreenHeight() / 2 - textureAsset.getTextureHeight() / 2
) )
.set( ESprite.SPRITE_ASSET_NAME, BuildInariIntro.INTRO_SPRITE )
.set( ESprite.TINT_COLOR, new RGBColor( 1f, 1f, 1f, 0f ) )
.add( EEntity.CONTROLLER_IDS, controllerId )
.activate();
}
public static class IntroAnimationController extends EntityController {
public IntroAnimationController( int id ) {
super( id );
}
@Override
protected void update( int entityId ) {
ESprite sprite = context.getEntityComponent( entityId, ESprite.TYPE_KEY );
RGBColor tintColor = sprite.getTintColor();
if( tintColor.a < 1f) {
tintColor.a = tintColor.a + 0.05f;
}
}
}
}
|
Appkad/botbuilder-java
|
libraries/bot-schema/src/test/java/com/microsoft/bot/schema/ActivityTest.java
|
package com.microsoft.bot.schema;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.microsoft.bot.schema.teams.TeamsChannelData;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
public class ActivityTest {
@Test
public void GetConversationReference() {
Activity activity = createActivity();
ConversationReference conversationReference = activity.getConversationReference();
Assert.assertEquals(activity.getId(), conversationReference.getActivityId());
Assert.assertEquals(activity.getFrom().getId(), conversationReference.getUser().getId());
Assert.assertEquals(activity.getRecipient().getId(), conversationReference.getBot().getId());
Assert.assertEquals(activity.getConversation().getId(), conversationReference.getConversation().getId());
Assert.assertEquals(activity.getChannelId(), conversationReference.getChannelId());
Assert.assertEquals(activity.getServiceUrl(), conversationReference.getServiceUrl());
}
@Test
public void GetReplyConversationReference() {
Activity activity = createActivity();
ResourceResponse reply = new ResourceResponse() {{
setId("1234");
}};
ConversationReference conversationReference = activity.getReplyConversationReference(reply);
Assert.assertEquals(reply.getId(), conversationReference.getActivityId());
Assert.assertEquals(activity.getFrom().getId(), conversationReference.getUser().getId());
Assert.assertEquals(activity.getRecipient().getId(), conversationReference.getBot().getId());
Assert.assertEquals(activity.getConversation().getId(), conversationReference.getConversation().getId());
Assert.assertEquals(activity.getChannelId(), conversationReference.getChannelId());
Assert.assertEquals(activity.getServiceUrl(), conversationReference.getServiceUrl());
}
@Test
public void ApplyConversationReference_isIncoming() {
Activity activity = createActivity();
ConversationReference conversationReference = new ConversationReference() {{
setChannelId("cr_123");
setServiceUrl("cr_serviceUrl");
setConversation(new ConversationAccount(){{
setId("cr_456");
}});
setUser(new ChannelAccount() {{
setId("cr_abc");
}});
setBot(new ChannelAccount() {{
setId("cr_def");
}});
setActivityId("cr_12345");
}};
activity.applyConversationReference(conversationReference, true);
Assert.assertEquals(conversationReference.getChannelId(), activity.getChannelId());
Assert.assertEquals(conversationReference.getServiceUrl(), activity.getServiceUrl());
Assert.assertEquals(conversationReference.getConversation().getId(), activity.getConversation().getId());
Assert.assertEquals(conversationReference.getUser().getId(), activity.getFrom().getId());
Assert.assertEquals(conversationReference.getBot().getId(), activity.getRecipient().getId());
Assert.assertEquals(conversationReference.getActivityId(), activity.getId());
}
@Test
public void ApplyConversationReference() {
Activity activity = createActivity();
ConversationReference conversationReference = new ConversationReference() {{
setChannelId("123");
setServiceUrl("serviceUrl");
setConversation(new ConversationAccount(){{
setId("456");
}});
setUser(new ChannelAccount() {{
setId("abc");
}});
setBot(new ChannelAccount() {{
setId("def");
}});
setActivityId("12345");
}};
activity.applyConversationReference(conversationReference, false);
Assert.assertEquals(conversationReference.getChannelId(), activity.getChannelId());
Assert.assertEquals(conversationReference.getServiceUrl(), activity.getServiceUrl());
Assert.assertEquals(conversationReference.getConversation().getId(), activity.getConversation().getId());
Assert.assertEquals(conversationReference.getBot().getId(), activity.getFrom().getId());
Assert.assertEquals(conversationReference.getUser().getId(), activity.getRecipient().getId());
Assert.assertEquals(conversationReference.getActivityId(), activity.getReplyToId());
}
@Test
public void CreateTraceAllowsNullRecipient() {
Activity activity = createActivity();
activity.setRecipient(null);
Activity trace = activity.createTrace("test");
Assert.assertNull(trace.getFrom().getId());
}
private Activity createActivity() {
ChannelAccount account1 = new ChannelAccount() {{
setId("ChannelAccount_Id_1");
setName("ChannelAccount_Name_1");
setProperties("Name", JsonNodeFactory.instance.objectNode().put("Name", "Value"));
setRole(RoleTypes.USER);
}};
ChannelAccount account2 = new ChannelAccount() {{
setId("ChannelAccount_Id_2");
setName("ChannelAccount_Name_2");
setProperties("Name", JsonNodeFactory.instance.objectNode().put("Name", "Value"));
setRole(RoleTypes.USER);
}};
ConversationAccount conversationAccount = new ConversationAccount() {{
setConversationType("a");
setId("123");
setIsGroup(true);
setName("Name");
setProperties("Name", JsonNodeFactory.instance.objectNode().put("Name", "Value"));
}};
Activity activity = new Activity() {{
setId("123");
setFrom(account1);
setRecipient(account2);
setConversation(conversationAccount);
setChannelId("ChannelId123");
setServiceUrl("ServiceUrl123");
}};
return activity;
}
private static final String serializedActivity = "{\n"+
" \"attachments\": [],\n"+
" \"channelId\": \"directlinespeech\",\n"+
" \"conversation\":\n"+
" {\n"+
" \"id\": \"b18a1c99-7a29-4801-ac0c-579f2c36d52c\",\n"+
" \"isGroup\": false\n"+
" },\n"+
" \"entities\": [],\n"+
" \"from\":\n"+
" {\n"+
" \"id\": \"ConnectedCarAssistant\"\n"+
" },\n"+
" \"id\": \"9f90f0f5-be7d-410c-ad4a-5826751b26b1\",\n"+
" \"locale\": \"en-us\",\n"+
" \"name\": \"WebviewPreFetch\",\n"+
" \"recipient\":\n"+
" {\n"+
" \"id\": \"ef3de4593d4cc9b8\",\n"+
" \"role\": \"user\"\n"+
" },\n"+
" \"replyToId\": \"4d807515-46c1-44a1-b0f8-88457e3c13f2\",\n"+
" \"serviceUrl\": \"urn:botframework:websocket:directlinespeech\",\n"+
" \"text\": \"\",\n"+
" \"timestamp\": \"2019-11-14T17:50:06.8447816Z\",\n"+
" \"type\": \"event\",\n"+
" \"value\":\n"+
" {\n"+
" \"headers\":\n"+
" {\n"+
" \"opal-sessionid\": \"b18a1c99-7a29-4801-ac0c-579f2c36d52c\",\n"+
" \"x-Search-ClientId\": \"ef3de4593d4cc9b8\",\n"+
" \"x-Search-Market\": \"en-us\",\n"+
" \"x-Uqu-RefererType\": \"1\",\n"+
" \"x-Uqu-ResponseFormat\": \"0\"\n"+
" },\n"+
" \"uri\": \"https://www.bing.com/commit/v1?q=pull+down+the+driver+side&visualResponsePreference=0&uqurequestid=4D80751546C144A1B0F888457E3C13F2\",\n"+
" \"userAgent\": \"Mozilla/5.0 (Linux; Android 7.1.1; TB-8704V) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.11 Safari/537.36 BingMobileApp/36 BMABuild/Production BMAConfig/0\"\n"+
" }\n"+
"}\n";
@Test
public void DeserializeActivity() throws IOException {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.findAndRegisterModules();
Activity activity = objectMapper.readValue(this.serializedActivity, Activity.class);
Assert.assertNotNull(activity.getTimestamp());
Assert.assertEquals("b18a1c99-7a29-4801-ac0c-579f2c36d52c", activity.getConversation().getId());
Assert.assertNotNull(activity.getValue());
}
private static final String serializedActivityFromTeams = "{" +
" \"channelId\": \"msteams\"," +
" \"channelData\": {" +
" \"teamsChannelId\": \"19:123cb42aa5a0a7e56f83@thread.skype\"," +
" \"teamsTeamId\": \"19:104f2cb42aa5a0a7e56f83@thread.skype\"," +
" \"channel\": {" +
" \"id\": \"19:4104f2cb42aa5a0a7e56f83@thread.skype\"," +
" \"name\": \"General\" " +
" }," +
" \"team\": {" +
" \"id\": \"19:aab4104f2cb42aa5a0a7e56f83@thread.skype\"," +
" \"name\": \"Kahoot\", " +
" \"aadGroupId\": \"0ac65971-e8a0-49a1-8d41-26089125ea30\"" +
" }," +
" \"notification\": {" +
" \"alert\": \"true\"" +
" }," +
" \"eventType\":\"teamMemberAdded\", " +
" \"tenant\": {" +
" \"id\": \"0-b827-4bb0-9df1-e02faba7ac20\"" +
" }" +
" }" +
"}";
private static final String serializedActivityFromTeamsWithoutTeamsChannelIdorTeamId = "{" +
" \"channelId\": \"msteams\"," +
" \"channelData\": {" +
" \"channel\": {" +
" \"id\": \"channel_id\"," +
" \"name\": \"channel_name\" " +
" }," +
" \"team\": {" +
" \"id\": \"team_id\"," +
" \"name\": \"team_name\", " +
" \"aadGroupId\": \"aad_groupid\"" +
" }," +
" \"notification\": {" +
" \"alert\": \"true\"" +
" }," +
" \"eventType\":\"teamMemberAdded\", " +
" \"tenant\": {" +
" \"id\": \"tenant_id\"" +
" }" +
" }" +
"}";
@Test
public void GetInformationForMicrosoftTeams() throws JsonProcessingException, IOException {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.findAndRegisterModules();
Activity activity = objectMapper.readValue(ActivityTest.serializedActivityFromTeams, Activity.class);
Assert.assertEquals("19:<EMAIL>", activity.teamsGetChannelId());
Assert.assertEquals("19:104f2cb42aa5a0a7e56f83<EMAIL>.skype", activity.teamsGetTeamId());
Assert.assertEquals(true, activity.isTeamsActivity());
activity = objectMapper.readValue(
ActivityTest.serializedActivityFromTeamsWithoutTeamsChannelIdorTeamId, Activity.class);
Assert.assertEquals("channel_id", activity.teamsGetChannelId());
Assert.assertEquals("team_id", activity.teamsGetTeamId());
TeamsChannelData teamsChannelData = activity.getChannelData(TeamsChannelData.class);
Assert.assertEquals("channel_id", teamsChannelData.getChannel().getId());
Assert.assertEquals("channel_name", teamsChannelData.getChannel().getName());
Assert.assertEquals("team_id", teamsChannelData.getTeam().getId());
Assert.assertEquals("team_name", teamsChannelData.getTeam().getName());
Assert.assertEquals("aad_groupid", teamsChannelData.getTeam().getAadGroupId());
Assert.assertEquals(true, teamsChannelData.getNotification().getAlert());
Assert.assertEquals("teamMemberAdded", teamsChannelData.getEventType());
Assert.assertEquals("tenant_id", teamsChannelData.getTenant().getId());
}
}
|
CevaComic/mobile-2020
|
Bogdan Remus Cristian/src/Components/Couriers/Couriers.classes.js
|
<filename>Bogdan Remus Cristian/src/Components/Couriers/Couriers.classes.js
import {fade, makeStyles} from '@material-ui/core/styles'
const useClasses = makeStyles(theme => ({
couriers: {
display: 'flex',
flexDirection: 'column',
width: '100%',
height: '100%',
overflowX: 'scroll'
},
noOnline: {
padding: theme.spacing(2),
paddingLeft: theme.spacing(4),
paddingRight: theme.spacing(4),
marginLeft: theme.spacing(4),
marginRight: theme.spacing(4),
display: 'flex',
justifyContent: 'center',
backgroundColor: 'white',
border: '1px solid #ccc',
color: '#a6a6a6',
borderRadius: '6px'
},
stickHeader: {
backgroundColor: 'rgba(250,250,250,.94)!important'
},
list: {
transition: 'all 0.5s',
'&$listBack': {
marginLeft: '-150px'
}
},
listBack: {},
courierRow: {
paddingLeft: theme.spacing(4),
paddingRight: theme.spacing(4)
},
listItemBlack: {
backgroundColor: '#f0f0f0',
listStyleType: 'none!important',
},
listItemWhite: {
backgroundColor: '#fafafa',
listStyleType: 'none!important',
},
primaryText: {
color: `${theme.palette.secondary.main}`,
fontWeight: '500',
fontSize: '11pt',
marginLeft: '5px',
lineHeight: '0px!important'
},
// avatarBox: {
// width: ''
// },
avatar: {
width: '31px',
height: '31px',
objectFit: 'cover',
borderRadius: '30px',
border: '2px solid white',
boxShadow: theme.shadows[1],
zIndex: 2,
marginTop: '5px',
backgroundColor: '#fafafa',
},
avatarAround: {
marginLeft: '26px!important',
},
listItem: {
paddingTop: '0px!important',
paddingBottom: '0px!important',
height: '46px',
},
icon: {
maxWidth: '42px',
marginRight: '-10px'
},
inputRoot: {
color: 'inherit',
width: '100%'
},
inputInput: {
padding: theme.spacing(3, 3, 3, 3),
width: '0px',
transition: theme.transitions.create('width'),
backgroundColor: 'transparent',
border: '1px solid transparent',
borderRadius: '6px',
textAlign: 'center',
marginLeft: 0,
'&:focus': {
backgroundColor: 'white',
border: '1px solid #ccc',
width: '100%'
},
'&$searchOpen': {
backgroundColor: 'white',
border: '1px solid #ccc',
width: '100%'
}
},
searchOpen: {},
search: {
paddingLeft: theme.spacing(4),
paddingRight: theme.spacing(4),
position: 'relative',
borderRadius: theme.shape.borderRadius,
backgroundColor: fade(theme.palette.common.white, 0.15),
'&:hover': {
backgroundColor: fade(theme.palette.common.white, 0.25)
},
marginLeft: 0,
width: 'calc(100% - 32px)',
marginTop: theme.spacing(3)
},
searchIcon: {
width: theme.spacing(7),
paddingLeft: theme.spacing(0),
height: '100%',
position: 'absolute',
pointerEvents: 'none',
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
'& .MuiSvgIcon-root': {
// zIndex: 1,
fontSize: '22pt',
color: `${theme.palette.secondary.main}`,
fill: `${theme.palette.secondary.main}`
}
},
resultsText: {
display: 'flex',
// justifyContent: 'center',
alignItems: 'center'
},
badge: {
zIndex: '2!important',
marginTop: '22px!important',
marginLeft: '11px!important',
top: 'auto!important',
left: 'auto!important',
},
modal: {
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
backgroundColor: 'rgba(0,0,0, 0.54)',
zIndex: '1500!important',
// width: 'calc(100% - 40px)',
},
modalInner: {
backgroundColor: '#fafafa',
borderRadius: theme.spacing(2),
boxShadow: theme.shadows[5],
width: 'calc(100% - 20px)',
height: 'calc(100% - 125px)',
overflowY: 'scroll',
position: 'relative'
},
}))
export default useClasses
|
windystrife/UnrealEngine_NVIDIAGameWork
|
Engine/Source/Runtime/Slate/Public/Widgets/Docking/SDockTabStack.h
|
<filename>Engine/Source/Runtime/Slate/Public/Widgets/Docking/SDockTabStack.h
// Copyright 1998-2017 Epic Games, Inc. All Rights Reserved.
#pragma once
#include "CoreMinimal.h"
#include "Widgets/DeclarativeSyntaxSupport.h"
class SDockableTab;
/**
* A node in the Docking/Tabbing hierarchy.
* A DockTabStack shows a row of tabs and the content of one selected tab.
* It also supports re-arranging tabs and dragging them out for the stack.
*/
class SLATE_API SDockTabStack
{
public:
SLATE_BEGIN_ARGS(SDockTabStack)
: _IsDocumentArea(false)
{}
SLATE_ARGUMENT( bool, IsDocumentArea )
SLATE_END_ARGS()
void Construct( const FArguments& InArgs );
void AddTab( const TSharedRef<SDockableTab>& InTab, int32 AtLocation = INDEX_NONE );
};
|
KairosSystems/drMouse
|
venv/lib/python2.7/site-packages/py2neo/cypher/delete.py
|
<gh_stars>1-10
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2011-2014, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from py2neo.core import Graph, Node, Path, Relationship
from py2neo.cypher.util import StartOrMatch
from py2neo.util import ustr, xstr
__all__ = ["DeleteStatement"]
def _(*args):
return "".join("_" + ustr(arg) for arg in args)
class DeleteStatement(object):
""" Builder for a Cypher DELETE statement.
"""
#: The graph against which this statement is to be executed.
graph = None
#: The parameters to inject into this statement.
parameters = None
def __init__(self, graph):
self.graph = graph
self.supports_node_labels = self.graph.supports_node_labels
self.entities = []
self.start_or_match_clause = StartOrMatch(self.graph)
self.delete_rels_clause = []
self.delete_nodes_clause = []
self.parameters = {}
def __repr__(self):
return self.string
def __str__(self):
return xstr(self.__unicode__())
def __unicode__(self):
return self.string
def __contains__(self, entity):
return any(e is entity for e in self.entities)
@property
def string(self):
""" The full Cypher statement as a string.
"""
clauses = []
if self.start_or_match_clause:
clauses.append(self.start_or_match_clause.string.rstrip())
if self.delete_rels_clause:
clauses.append("DELETE " + ",".join(self.delete_rels_clause))
if self.delete_nodes_clause:
clauses.append("DELETE " + ",".join(self.delete_nodes_clause))
return "\n".join(clauses)
def post(self):
return self.graph.cypher.post(self.string, self.parameters)
def execute(self):
""" Execute this statement.
"""
if self.string:
self.post().close()
def delete(self, entity):
""" Append an entity to the DELETE clause of this statement.
:arg entity: The entity to delete.
"""
entity = Graph.cast(entity)
index = len(self.entities)
name = _(index)
if isinstance(entity, Node):
self._delete_node(entity, name)
elif isinstance(entity, Relationship):
self._delete_relationship(entity, name)
elif isinstance(entity, Path):
self._delete_path(entity, name)
self.entities.append(entity)
def _delete_node(self, node, name):
if node.bound:
self.start_or_match_clause.node(name, "{%s}" % name)
self.delete_nodes_clause.append(name)
self.parameters[name] = node._id
def _delete_relationship(self, relationship, name):
if relationship.bound:
self.start_or_match_clause.relationship(name, "{%s}" % name)
self.delete_rels_clause.append(name)
self.parameters[name] = relationship._id
def _delete_path(self, path, name):
for i, rel in enumerate(path.relationships):
self._delete_relationship(rel, name + "r" + ustr(i))
for i, node in enumerate(path.nodes):
self._delete_node(node, name + "n" + ustr(i))
|
makandra/serum-rails
|
spec/test_apps/rails-2-3/lib/tasks/pending_migrations.rake
|
<filename>spec/test_apps/rails-2-3/lib/tasks/pending_migrations.rake
# Rake task to warn over the shell when there are pending migrations.
# origin: RM
namespace :db do
desc "Warns if there are pending migrations"
task :warn_if_pending_migrations => :environment do
if defined? ActiveRecord
pending_migrations = ActiveRecord::Migrator.new(:up, 'db/migrate').pending_migrations
if pending_migrations.any?
puts ""
puts "======================================================="
puts "You have #{pending_migrations.size} pending migrations:"
pending_migrations.each do |pending_migration|
puts ' %4d %s' % [pending_migration.version, pending_migration.name]
end
puts "======================================================="
puts ""
end
end
end
end
|
ScalablyTyped/SlinkyTyped
|
a/aws-sdk/src/main/scala/typingsSlinky/awsSdk/configserviceMod/GetAggregateComplianceDetailsByConfigRuleRequest.scala
|
<reponame>ScalablyTyped/SlinkyTyped<gh_stars>10-100
package typingsSlinky.awsSdk.configserviceMod
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait GetAggregateComplianceDetailsByConfigRuleRequest extends StObject {
/**
* The 12-digit account ID of the source account.
*/
var AccountId: typingsSlinky.awsSdk.configserviceMod.AccountId = js.native
/**
* The source region from where the data is aggregated.
*/
var AwsRegion: typingsSlinky.awsSdk.configserviceMod.AwsRegion = js.native
/**
* The resource compliance status. For the GetAggregateComplianceDetailsByConfigRuleRequest data type, AWS Config supports only the COMPLIANT and NON_COMPLIANT. AWS Config does not support the NOT_APPLICABLE and INSUFFICIENT_DATA values.
*/
var ComplianceType: js.UndefOr[typingsSlinky.awsSdk.configserviceMod.ComplianceType] = js.native
/**
* The name of the AWS Config rule for which you want compliance information.
*/
var ConfigRuleName: typingsSlinky.awsSdk.configserviceMod.ConfigRuleName = js.native
/**
* The name of the configuration aggregator.
*/
var ConfigurationAggregatorName: typingsSlinky.awsSdk.configserviceMod.ConfigurationAggregatorName = js.native
/**
* The maximum number of evaluation results returned on each page. The default is 50. You cannot specify a number greater than 100. If you specify 0, AWS Config uses the default.
*/
var Limit: js.UndefOr[typingsSlinky.awsSdk.configserviceMod.Limit] = js.native
/**
* The nextToken string returned on a previous page that you use to get the next page of results in a paginated response.
*/
var NextToken: js.UndefOr[typingsSlinky.awsSdk.configserviceMod.NextToken] = js.native
}
object GetAggregateComplianceDetailsByConfigRuleRequest {
@scala.inline
def apply(
AccountId: AccountId,
AwsRegion: AwsRegion,
ConfigRuleName: ConfigRuleName,
ConfigurationAggregatorName: ConfigurationAggregatorName
): GetAggregateComplianceDetailsByConfigRuleRequest = {
val __obj = js.Dynamic.literal(AccountId = AccountId.asInstanceOf[js.Any], AwsRegion = AwsRegion.asInstanceOf[js.Any], ConfigRuleName = ConfigRuleName.asInstanceOf[js.Any], ConfigurationAggregatorName = ConfigurationAggregatorName.asInstanceOf[js.Any])
__obj.asInstanceOf[GetAggregateComplianceDetailsByConfigRuleRequest]
}
@scala.inline
implicit class GetAggregateComplianceDetailsByConfigRuleRequestMutableBuilder[Self <: GetAggregateComplianceDetailsByConfigRuleRequest] (val x: Self) extends AnyVal {
@scala.inline
def setAccountId(value: AccountId): Self = StObject.set(x, "AccountId", value.asInstanceOf[js.Any])
@scala.inline
def setAwsRegion(value: AwsRegion): Self = StObject.set(x, "AwsRegion", value.asInstanceOf[js.Any])
@scala.inline
def setComplianceType(value: ComplianceType): Self = StObject.set(x, "ComplianceType", value.asInstanceOf[js.Any])
@scala.inline
def setComplianceTypeUndefined: Self = StObject.set(x, "ComplianceType", js.undefined)
@scala.inline
def setConfigRuleName(value: ConfigRuleName): Self = StObject.set(x, "ConfigRuleName", value.asInstanceOf[js.Any])
@scala.inline
def setConfigurationAggregatorName(value: ConfigurationAggregatorName): Self = StObject.set(x, "ConfigurationAggregatorName", value.asInstanceOf[js.Any])
@scala.inline
def setLimit(value: Limit): Self = StObject.set(x, "Limit", value.asInstanceOf[js.Any])
@scala.inline
def setLimitUndefined: Self = StObject.set(x, "Limit", js.undefined)
@scala.inline
def setNextToken(value: NextToken): Self = StObject.set(x, "NextToken", value.asInstanceOf[js.Any])
@scala.inline
def setNextTokenUndefined: Self = StObject.set(x, "NextToken", js.undefined)
}
}
|
gladsonsimoes/ExemplosDeAlgoritmos
|
logica_de_programacao/programacao_orientada_a_objetos/encapsulamento/Encapsulamento.java
|
package com.company.programacao_orientada_a_objetos.encapsulamento;
public class Encapsulamento {
public static void main(String[] args) {
// Cliente cliente = new Cliente();
// cliente.nome = "<NAME>";
// cliente.telefone = "34922334455";
// System.out.println("Nome cliente: " + cliente.nome);
Cliente cliente = new Cliente();
cliente.setNome("<NAME>");
cliente.setTelefone("34922334455");
System.out.println("Nome cliente: " + cliente.getNome());
System.out.println("Primeiro nome: " + cliente.getPrimeiroNome());
System.out.println("Último nome: " + cliente.getUltimoNome());
}
}
|
Jacko1972/PopularMovies2
|
app/src/main/java/com/jacko1972/popularmovies2/service/FullMovieJsonResponse.java
|
<filename>app/src/main/java/com/jacko1972/popularmovies2/service/FullMovieJsonResponse.java<gh_stars>0
package com.jacko1972.popularmovies2.service;
import com.google.gson.annotations.SerializedName;
import com.jacko1972.popularmovies2.model.MovieInfo;
import java.util.List;
public class FullMovieJsonResponse {
@SerializedName("page")
private int page;
@SerializedName("results")
private List<MovieInfo> results;
@SerializedName("total_results")
private int total_results;
@SerializedName("total_pages")
private int total_pages;
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public List<MovieInfo> getResults() {
return results;
}
public void setResults(List<MovieInfo> results) {
this.results = results;
}
public int getTotal_results() {
return total_results;
}
public void setTotal_results(int total_results) {
this.total_results = total_results;
}
public int getTotal_pages() {
return total_pages;
}
public void setTotal_pages(int totalPages) {
this.total_pages = totalPages;
}
}
|
hmrc/income-tax-cis-frontend
|
test/support/builders/models/AuthorisationRequestBuilder.scala
|
<reponame>hmrc/income-tax-cis-frontend
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package support.builders.models
import models.{AuthorisationRequest, User}
import play.api.mvc.AnyContentAsEmpty
import play.api.test.FakeRequest
import support.builders.models.mongo.CisUserDataBuilder.aCisUserData
object AuthorisationRequestBuilder {
val anAuthorisationRequest: AuthorisationRequest[AnyContentAsEmpty.type] = AuthorisationRequest(
user = User(aCisUserData.mtdItId, None, aCisUserData.nino, aCisUserData.sessionId, "affinityGroup"),
request = FakeRequest()
)
}
|
alistairholmes/Digital-Nomad-Jobs
|
app/src/main/java/io/github/alistairholmes/digitalnomadjobs/ui/jobdetail/DetailActivity.java
|
<reponame>alistairholmes/Digital-Nomad-Jobs<filename>app/src/main/java/io/github/alistairholmes/digitalnomadjobs/ui/jobdetail/DetailActivity.java
package io.github.alistairholmes.digitalnomadjobs.ui.jobdetail;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import androidx.browser.customtabs.CustomTabsIntent;
import androidx.core.content.ContextCompat;
import com.amulyakhare.textdrawable.TextDrawable;
import com.amulyakhare.textdrawable.util.ColorGenerator;
import com.bumptech.glide.Glide;
import java.util.Objects;
import butterknife.BindString;
import io.github.alistairholmes.digitalnomadjobs.R;
import io.github.alistairholmes.digitalnomadjobs.data.local.entity.FavoriteJob;
import io.github.alistairholmes.digitalnomadjobs.data.model.Job;
public class DetailActivity extends AppCompatActivity {
public static final String ARG_DETAIL_JOB = "detail_job";
@BindString(R.string.no_description_job)
String no_description;
public TextView tv_JobTitle;
public TextView tv_CompanyName;
public TextView tv_JobDescription;
public int jobID;
public ImageView iv_CompanyLogo;
public final String REMOTEOK_URL = "https://remoteok.io/l/";
private Job job;
private FavoriteJob favoriteJob;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detail);
// toolbar
Toolbar toolbar = findViewById(R.id.app_bar);
setSupportActionBar(toolbar);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
toolbar.setNavigationOnClickListener(view -> onBackPressed());
}
tv_JobTitle = findViewById(R.id.textView_jobtitle);
tv_CompanyName = findViewById(R.id.textView_companyname);
iv_CompanyLogo = findViewById(R.id.imageView_Logo);
tv_JobDescription = findViewById(R.id.textView_description);
if (savedInstanceState == null) {
Intent intent = getIntent();
if (intent.hasExtra(ARG_DETAIL_JOB)) {
if (Objects.requireNonNull(intent.getExtras()).getParcelable(ARG_DETAIL_JOB) instanceof Job) {
this.job = intent.getExtras().getParcelable(ARG_DETAIL_JOB);
} else if (intent.getExtras().getParcelable(ARG_DETAIL_JOB) instanceof FavoriteJob) {
this.favoriteJob = intent.getExtras().getParcelable(ARG_DETAIL_JOB);
}
}
} else {
if (savedInstanceState.getParcelable(ARG_DETAIL_JOB) instanceof Job) {
this.job = savedInstanceState.getParcelable(ARG_DETAIL_JOB);
} else {
this.favoriteJob = savedInstanceState.getParcelable(ARG_DETAIL_JOB);
}
}
//get the intent in the target activity
Intent intent = getIntent();
//get the attached bundle from the intent
Bundle extras = intent.getExtras();
//Extracting the stored data from the bundle
String job_title = extras.getString("JOB_TITLE");
String company_name = extras.getString("COMPANY_NAME");
String company_logo = extras.getString("COMPANY_LOGO");
String job_description = extras.getString("JOB_DESCRIPTION");
jobID = extras.getInt("JOB_ID");
tv_JobTitle.setText(job_title);
tv_CompanyName.setText(company_name);
tv_JobDescription.setText(job_description);
/*Glide.with(this)
.load(company_logo)
.into(iv_CompanyLogo);*/
if (company_logo != null) {
Glide.with(this)
.load(company_logo)
.into(iv_CompanyLogo);
} else {
ColorGenerator generator = ColorGenerator.MATERIAL;
int color = generator.getRandomColor();
TextDrawable drawable = TextDrawable
.builder()
.buildRoundRect(company_name.substring(0, 1).toUpperCase(), color, 50);
iv_CompanyLogo.setImageDrawable(drawable);
}
final String jobURL = REMOTEOK_URL + jobID;
// Set a click listener for when the apply for job button is pressed
findViewById(R.id.btn_applyforjob).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Use a CustomTabsIntent.Builder to configure CustomTabsIntent.
CustomTabsIntent.Builder builder = new CustomTabsIntent.Builder();
// set toolbar color and set custom actions before invoking build()
builder.setToolbarColor(ContextCompat.getColor(DetailActivity.this, R.color.colorAccent));
// Once ready, call CustomTabsIntent.Builder.build() to create a CustomTabsIntent
CustomTabsIntent customTabsIntent = builder.build();
// and launch the desired Url with CustomTabsIntent.launchUrl()
customTabsIntent.launchUrl(DetailActivity.this, Uri.parse(jobURL));
}
});
}
}
|
dalbrx-forcam/java-sdk
|
model-basic/src/main/java/com/forcam/na/ffwebservices/model/workplacegroup/WorkplaceGroupPropertiesWSModel.java
|
////////////////////////////////////////////////////////////////////////////////
//
// Created by MJesser on 16.02.2018.
//
// Copyright (c) 2006 - 2018 FORCAM GmbH. All rights reserved.
////////////////////////////////////////////////////////////////////////////////
package com.forcam.na.ffwebservices.model.workplacegroup;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import javax.xml.bind.annotation.XmlRootElement;
/**
* Contains the properties of a workplace group.
*/
@XmlRootElement(name = "workplaceGroupProperties")
@ApiModel(value = "workplaceGroupProperties")
@JsonPropertyOrder({ "id", "number", "description", "type", "erpContextId" })
public class WorkplaceGroupPropertiesWSModel {
// ------------------------------------------------------------------------
// members
// ------------------------------------------------------------------------
/** The ID of the workplace group. */
private String mId;
/** The name of the workplace group. */
private String mNumber;
/** The description of the workplace group. */
private String mDescription;
/** The type of the workplace group. */
private WorkplaceGroupType mType;
/** The ERP context ID of the workplace group. */
private String mErpContextId;
// ------------------------------------------------------------------------
// getters/setters
// ------------------------------------------------------------------------
@ApiModelProperty(value = "UUID of the workplace group", required = true, position = 0)
public void setId(String id) {
mId = id;
}
public String getId() {
return mId;
}
@ApiModelProperty(value = "The workplace group number which is unique in its ERP context", position = 1)
public void setNumber(String number) {
mNumber = number;
}
public String getNumber() {
return mNumber;
}
@ApiModelProperty(value = "A description of the workplace group", position = 2)
public void setDescription(String description) {
mDescription = description;
}
public String getDescription() {
return mDescription;
}
@ApiModelProperty(value = "Type of the workplace group, either “CAPACITY_GROUP” or “PRODUCTION_LINE”", position = 3)
public void setType(WorkplaceGroupType type) {
mType = type;
}
public WorkplaceGroupType getType() {
return mType;
}
@ApiModelProperty(value = "Universally unique identifier of the ERP context in which the workplace number is a unique identifier for a workplace or a workplace group. In SAP ERP, for example, this context is defined by the client, the company code, and the plant", position = 4)
public void setErpContextId(String erpContextId) {
mErpContextId = erpContextId;
}
public String getErpContextId() {
return mErpContextId;
}
}
|
thomasm1/app-tester
|
bsc/src/pages/forms/wizard/Wizard.js
|
import React from 'react';
import withStyles from 'isomorphic-style-loader/lib/withStyles';
import {
Row,
Col,
Button,
FormGroup,
Label,
Nav,
NavLink,
NavItem,
Progress,
} from 'reactstrap';
import Formsy from 'formsy-react';
import Select2 from 'react-select2-wrapper';
import MaskedInput from 'react-maskedinput';
import Datetime from 'react-datetime';
import { select2CountriesData, select2ShipmentData, cardTypesData } from './data';
import InputValidation from '../../../components/InputValidation/InputValidation';
import Widget from '../../../components/Widget';
import s from './Wizard.scss';
const count = 4;
const StepsComponents = {
Step1: function Step1() {
return (<fieldset>
<FormGroup row>
<Label className="text-md-right" md={3} for="username">Username</Label>
<Col md={8}>
<Col md={10}>
<InputValidation
type="text"
id="username"
name="username"
validations={{ isAlphanumeric: true }}
trigger="change"
required
validationError={{ isAlphanumeric: 'Username can contain any letters or numbers, without spaces' }}
/>
<p className="help-block">Username can contain any letters or numbers, without spaces</p>
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label className="text-md-right" md={3} for="email">Email</Label>
<Col md={8}>
<Col md={10}>
<InputValidation
type="text"
id="email"
name="email"
validations={{ isEmail: true }}
required
validationError={{ isEmail: 'Please provide your E-mail' }}
/>
<p className="help-block">Please provide your E-mail</p>
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label className="text-md-right" md={3} for="address">Address</Label>
<Col md={8}>
<Col md={10}>
<InputValidation
type="text"
id="address"
name="address"
validations={{ isAlpha: true }}
required
validationError={{ isAlpha: 'Please provide your address' }}
/>
<p className="help-block">Please provide your address</p>
</Col>
</Col>
</FormGroup>
</fieldset>);
},
Step2: function Step2() {
return (
<fieldset>
<FormGroup row>
<Label md={3} className="text-md-right" for="country-select">Destination Country</Label>
<Col md={8}>
<Col md={10}>
<Select2
style={{ width: '100%' }}
id="country-selec"
data={select2CountriesData}
/>
<p className="help-block">Please choose your country destination</p>
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label md={3} className="text-md-right" for="courier">Choose shipping option</Label>
<Col md={8}>
<Col md={10}>
<Select2
style={{ width: '100%' }}
id="courier"
data={select2ShipmentData}
/>
<p className="help-block">Please choose your shipping option</p>
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label md={3} className="text-md-right" for="destination">Destination Zip Code</Label>
<Col md={8}>
<Col md={10}>
<MaskedInput
className="form-control" id="destination" mask="111111"
size="6"
/>
<p className="help-block">Please provide your Destination Zip Code</p>
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label md={3} className="text-md-right" for="dest-address">Destination Address</Label>
<Col md={8}>
<Col md={10}>
<InputValidation type="text" id="dest-address" name="dest-address" />
<p className="help-block">Please provide the destination address</p>
</Col>
</Col>
</FormGroup>
</fieldset>
);
},
Step3: function Step3(props) {
return (
<fieldset>
<FormGroup row>
<Label md={3} className="text-md-right" for="name">Name on the Card</Label>
<Col md={8}>
<Col md={10}>
<InputValidation type="text" id="name" name="name" />
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label md={3} className="text-md-right" for="credit-card-type">Choose shipping option</Label>
<Col md={8}>
<Col md={10}>
<Select2
style={{ width: '100%' }}
id="credit-card-type"
data={cardTypesData}
/>
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label md={3} className="text-md-right" for="credit">Credit Card Number</Label>
<Col md={8}>
<Col md={10}>
<InputValidation type="text" id="credit" name="credit" />
</Col>
</Col>
</FormGroup>
<FormGroup row>
<Label md={3} className="text-md-right" for="expiration-data">Expiration Date</Label>
<Col md={8}>
<Col md={10}>
<div className="datepicker">
<Datetime
id="datepicker"
open={props.isDatePickerOpen} //eslint-disable-line
viewMode="days"
/>
</div>
</Col>
</Col>
</FormGroup>
</fieldset>
);
},
Step4: function Step4() {
return (
<fieldset>
<h2>Thank you!</h2>
<p>Your submission has been received.</p>
</fieldset>
);
},
};
class Wizard extends React.Component {
constructor(prop) {
super(prop);
this.state = {
currentStep: 1,
progress: 25,
isDatePickerOpen: false,
};
this.nextStep = this.nextStep.bind(this);
this.previousStep = this.previousStep.bind(this);
}
nextStep() {
let currentStep = this.state.currentStep;
if (currentStep >= count) {
currentStep = count;
} else {
currentStep += 1;
}
this.setState({
currentStep,
progress: (100 / count) * currentStep,
});
}
previousStep() {
let currentStep = this.state.currentStep;
if (currentStep <= 1) {
currentStep = 1;
} else {
currentStep -= 1;
}
this.setState({
currentStep,
progress: (100 / count) * currentStep,
});
}
render() {
const currentStep = this.state.currentStep;
return (
<div className={s.root}>
<h2 className="page-title">Form Wizard
<small>Form validation</small>
</h2>
<Row>
<Col xl={1} className="d-none d-lg-block" />
<Col xl={7} lg={12}>
<Widget
close collapse settings
className={s.formWizard}
title={<div>
<h4>
<i className="fa fa-windows" />
Wizard
<small>Tunable widget</small>
</h4>
<p className="text-muted">An example of complete wizard form in widget.</p></div>}
>
<Nav pills justified className={s.wizardNavigation}>
<NavItem>
<NavLink active={currentStep === 1}>
<small>1.</small>
<strong> Your Details</strong>
</NavLink>
</NavItem>
<NavItem>
<NavLink active={currentStep === 2}>
<small>2.</small>
<strong>Shipping</strong>
</NavLink>
</NavItem>
<NavItem>
<NavLink active={currentStep === 3}>
<small>3.</small>
<strong>Pay</strong>
</NavLink>
</NavItem>
<NavItem>
<NavLink active={currentStep === 4}>
<small>4.</small>
<strong>Thank you!</strong>
</NavLink>
</NavItem>
</Nav>
<Progress value={this.state.progress} color="white" className="progress-sm" />
<div className="tab-content">
<div className="step-body">
<Formsy.Form>
{currentStep === 1 && <StepsComponents.Step1 />}
{currentStep === 2 && <StepsComponents.Step2 />}
{currentStep === 3 && <StepsComponents.Step3 />}
{currentStep === 4 &&
<StepsComponents.Step4 isDatePickerOpen={this.state.isDatePickerOpen} />}
</Formsy.Form>
</div>
<div className="description ml mr">
<ul className="pager wizard">
<li className="previous">
<Button disabled={currentStep === 1} color="primary" onClick={this.previousStep}><i
className="fa fa-caret-left"
/>
Previous</Button>
</li>
{currentStep < count &&
<li className="next">
<Button color="primary" onClick={this.nextStep}>Next <i className="fa fa-caret-right" /></Button>
</li>
}
{currentStep === count &&
<li className="finish">
<Button color="success">Finish <i className="fa fa-check" /></Button>
</li>
}
</ul>
</div>
</div>
</Widget>
</Col>
</Row>
</div>);
}
}
export default withStyles(s)(Wizard);
|
passcod/rollup
|
test/function/samples/cycles-export-star/a.js
|
export * from './b.js';
export function a() {
return 'a';
}
|
darcectech/ECMS
|
dist/resources/app/node_modules/php-parser/src/ast/global.js
|
/*!
* Copyright (C) 2017 Glayzzle (BSD3 License)
* @authors https://github.com/glayzzle/php-parser/graphs/contributors
* @url http://glayzzle.com
*/
"use strict";
var Statement = require('./statement');
var KIND = 'global';
/**
* Imports a variable from the global scope
* @constructor Global
* @extends {Statement}
* @property {Variable[]} items
*/
var Global = Statement.extends(function Global(items, location) {
Statement.apply(this, [KIND, location]);
this.items = items;
});
module.exports = Global;
|
homayoonfarrahi/cycle-time-study
|
rl/rl/examples/run_ppo_time_test.py
|
<filename>rl/rl/examples/run_ppo_time_test.py
import os
import yaml
import argparse
import numpy as np
import matplotlib as mpl
mpl.use("TKAgg")
try:
import pybullet_envs
except ImportError:
print("pybullet_envs not available")
import gym
import torch
from torch.optim import Adam
from tqdm import tqdm
from rl.agent import Agent
from rl.buffers.buffer import Buffer
from rl.nets.policies import MLPPolicy
from rl.nets.valuefs import MLPVF
from rl.learners.ppo_test_time import PPOTestTime
import rl.envs
def main():
# Setup
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', required=True)
parser.add_argument('-s', '--seed', required=False, type=int, default="0")
parser.add_argument('-d', '--device', required=False)
parser.add_argument('-w', '--weights_path', required=False, type=str, default='dt_weights/3/0.0165.pth')
parser.add_argument('-r', '--returns_path', required=False, type=str, default='dt_returns/3/0.0165.csv')
parser.add_argument('-v', '--visualize', required=False, type=int, default='0')
args = parser.parse_args()
if args.device: device = args.device
else: device = 'cuda' if torch.cuda.is_available() else 'cpu'
cfg = yaml.load(open(args.config))
cfg['seed'] = args.seed
if not args.visualize:
dirname = args.weights_path[:-4] + '_checkpoints'
if not os.path.exists(dirname):
os.makedirs(dirname)
# Problem
seed = cfg['seed']
env = gym.make(cfg['env_name'])
env.seed(seed)
# Solution
np.random.seed(seed)
random_state = np.random.get_state()
torch_seed = np.random.randint(1, 2 ** 31 - 1)
torch.manual_seed(torch_seed)
torch.cuda.manual_seed_all(torch_seed)
test_batch_size = 5000
o_dim = env.observation_space.shape[0]
a_dim = env.action_space.shape[0]
pol = MLPPolicy(o_dim, a_dim, h_dim=cfg['h_dim'], device=device)
vf = MLPVF(o_dim, h_dim=cfg['h_dim'], device=device)
np.random.set_state(random_state)
buf = Buffer(o_dim, a_dim, test_batch_size, device=device)
learner = PPOTestTime(pol, buf, cfg['lr'], g=cfg['g'], vf=vf, lm=cfg['lm'],
OptPol=Adam, OptVF=Adam,
u_epi_up=cfg['u_epi_ups'], device=device,
n_itrs=cfg['n_itrs'], n_slices=cfg['n_slices'],
u_adv_scl=cfg['u_adv_scl'],
clip_eps=cfg['clip_eps'],
u_joint_opt=cfg['u_joint_opt'], max_grad_norm=cfg['max_grad_norm']
)
agent = Agent(pol, learner, device=device)
if args.visualize:
pol.load_state_dict(torch.load(args.weights_path))
rets = []
ret = 0
epi_steps = 0
if args.visualize:
env.render()
o = env.reset()
for steps in range(test_batch_size):
a, logp, dist = agent.get_action(o)
op, r, done, infos = env.step(a)
epi_steps += 1
op_ = op
agent.learner.log(o, a, r, op_, logp, dist, done)
o = op
ret += r
if done:
# tqdm.write("{} ( {} ) {:.2f}".format(steps, epi_steps, ret))
# if not args.visualize:
# save_weights(pol, steps, args.weights_path)
# save_returns(steps, ret, args.returns_path)
rets.append(ret)
ret = 0
epi_steps = 0
o = env.reset()
num_trials = 30
ne_default = 10
bs_default = 2000
mbs_default = 50
# test_ne_time(agent, bs_default, mbs_default, num_trials)
# test_bs_time(agent, ne_default, mbs_default, num_trials)
test_mbs_time(agent, ne_default, bs_default, num_trials)
def test_ne_time(agent, bs_default, mbs_default, num_trials):
ne_values = [5, 10, 15, 20, 25, 30, 35, 40, 45, 50]
for ne in ne_values:
print('ne: ', ne)
test_update_time(agent, ne, bs_default, mbs_default, num_trials, 'ne')
def test_bs_time(agent, ne_default, mbs_default, num_trials):
bs_values = [500, 1000, 1500, 2000, 2500, 3000, 3500, 4000, 4500, 5000]
for bs in bs_values:
print('bs: ', bs)
test_update_time(agent, ne_default, bs, mbs_default, num_trials, 'bs')
def test_mbs_time(agent, ne_default, bs_default, num_trials):
mbs_values = [12, 25, 37, 50, 62, 75, 87, 100, 112, 125]
for mbs in mbs_values:
print('mbs: ', mbs)
test_update_time(agent, ne_default, bs_default, mbs, num_trials, 'mbs')
def test_update_time(agent, ne, bs, mbs, num_trials, prefix):
adv_calc_times = []
update_times = []
x_var = {'ne': ne, 'bs': bs, 'mbs': mbs}[prefix]
for i in range(num_trials):
adv_calc_time, update_time = agent.learner.learn_test_time(ne, bs, mbs)
adv_calc_times.append(adv_calc_time)
update_times.append(update_time)
with open('ppo_profiling/' + prefix + '_adv_times.csv', 'a', encoding='utf-8') as adv_times_file:
adv_times_file.write(str(x_var))
for i in range(num_trials):
adv_times_file.write(',' + str(adv_calc_times[i]))
adv_times_file.write('\n')
with open('ppo_profiling/' + prefix + '_update_times.csv', 'a', encoding='utf-8') as update_times_file:
update_times_file.write(str(x_var))
for i in range(num_trials):
update_times_file.write(',' + str(update_times[i]))
update_times_file.write('\n')
def save_weights(pol, steps, path):
torch.save(pol.state_dict(), path)
if steps % 200000 < 150:
torch.save(pol.state_dict(), path[:-4] + '_checkpoints/' + str(steps) + path[-4:])
def save_returns(steps, ret, path):
with open(path, 'a', encoding='utf-8') as returns_file:
returns_file.write(str(steps) + ',' + str(ret) + '\n')
if __name__ == "__main__":
main()
|
magicgh/algorithm-contest-code
|
BashuOJ-Code/2092.cpp
|
#include<iostream>
#include<cstdio>
#include<cstring>
#include<iomanip>
#include<cmath>
#include<limits>
using namespace std;
const double DOUBLE_MAX=(numeric_limits<double>::max)();
struct node
{
double x,y;
}a[151];
int n;
double dist[151][151]={0},s[151]={0};
inline double calc(int x,int y)
{
return sqrt((a[x].x-a[y].x)*(a[x].x-a[y].x)+(a[x].y-a[y].y)*(a[x].y-a[y].y));
}
int main(){
cin>>n;
memset(a,0,sizeof(a));
for(int i=1;i<=n;i++)scanf("%lf%lf",&a[i].x,&a[i].y);
char c;
for(int i=1;i<=n;i++)
for(int j=1;j<=n;j++)
{
cin>>c;
dist[i][j]=DOUBLE_MAX;
if(i==j)dist[i][j]=0;
if(c=='1')dist[i][j]=calc(i,j);
}
for(int k=1;k<=n;k++)
for(int i=1;i<=n;i++)
for(int j=1;j<=n;j++)
if(dist[i][k]!=DOUBLE_MAX&&dist[k][j]!=DOUBLE_MAX)dist[i][j]=min(dist[i][j],dist[i][k]+dist[k][j]);
double r1=-DOUBLE_MAX,r2=DOUBLE_MAX;
for(int i=1;i<=n;i++)
{
for(int j=1;j<=n;j++)
if(dist[i][j]!=DOUBLE_MAX)s[i]=max(s[i],dist[i][j]);
r1=max(r1,s[i]);
}
for(int i=1;i<=n;i++)
{
for(int j=1;j<=n;j++)
if(dist[i][j]==DOUBLE_MAX)r2=min(r2,s[i]+s[j]+calc(i,j));
}
printf("%0.6lf",max(r1,r2));
return 0;
}
|
stevenhankin/ag-grid
|
grid-packages/ag-grid-docs/documentation/doc-pages/charts-navigator/examples/navigator/main.js
|
var options = {
container: document.getElementById('myChart'),
title: {
text: 'Try dragging the Navigator\'s handles to zoom in'
},
subtitle: {
text: 'or the area between them to pan around'
},
data: [
{ label: 'Android', value: 56.9 },
{ label: 'iOS', value: 22.5 },
{ label: 'BlackBerry', value: 6.8 },
{ label: 'Symbian', value: 8.5 },
{ label: 'Bada', value: 2.6 },
{ label: 'Windows', value: 1.9 }
],
series: [{
type: 'column',
xKey: 'label',
yKeys: ['value']
}],
axes: [{
type: 'number',
position: 'left'
}, {
type: 'category',
position: 'bottom'
}],
legend: {
enabled: false
},
navigator: {
enabled: true
}
};
var chart = agCharts.AgChart.create(options);
function toggleEnabled(value) {
options.navigator.enabled = value;
agCharts.AgChart.update(chart, options);
}
|
Shishqa/ShishGL
|
src/Core/Primitives/Color.cpp
|
<gh_stars>0
/*============================================================================*/
#include "Color.hpp"
/*============================================================================*/
using namespace Sh;
/*============================================================================*/
Color::Color(uint8_t red, uint8_t green, uint8_t blue, uint8_t alpha)
: r(red), g(green), b(blue), a(alpha)
{ }
/*----------------------------------------------------------------------------*/
uint8_t multiply(const uint8_t& a, const uint8_t& b) {
return static_cast<uint8_t>(
static_cast<uint16_t>(a) * static_cast<uint16_t>(b) / 255
);
}
uint8_t add(const uint8_t& a, const uint8_t& b) {
uint16_t sum = static_cast<uint16_t>(a) + static_cast<uint16_t>(b);
return static_cast<uint8_t>(sum > 255 ? 255 : sum);
}
Color& Color::operator*=(const uint8_t& intensity) {
r = multiply(r, intensity);
g = multiply(g, intensity);
b = multiply(b, intensity);
return *this;
}
Color Color::operator*(const uint8_t& intensity) {
return (Color{*this} *= intensity);
}
Color& Color::operator*=(const Color& right) {
r = multiply(r, right.r);
r = multiply(g, right.g);
r = multiply(b, right.b);
return *this;
}
Color Color::operator*(const Color& right) {
return (Color{*this} *= right);
}
Color& Color::operator+=(const Color& right) {
r = add(r, right.r);
g = add(g, right.g);
b = add(b, right.b);
return *this;
}
Color Color::operator+(const Color& right) {
return (Color{*this} += right);
}
/*============================================================================*/
const Color Color::NONE ( 0, 0, 0, 0);
const Color Color::INDIAN_RED (205, 92, 92);
const Color Color::LIGHT_CORAL (240, 128, 128);
const Color Color::SALMON (250, 128, 114);
const Color Color::DARK_SALMON (233, 150, 122);
const Color Color::LIGHT_SALMON (255, 160, 122);
const Color Color::CRIMSON (220, 20, 60);
const Color Color::RED (255, 0, 0);
const Color Color::FIRE_BRICK (178, 34, 34);
const Color Color::DARK_RED (139, 0, 0);
const Color Color::PINK (255, 192, 203);
const Color Color::LIGHT_PINK (255, 182, 193);
const Color Color::HOT_PINK (255, 105, 180);
const Color Color::DEEP_PINK (255, 20, 147);
const Color Color::MEDIUM_VIOLET_RED (199, 21, 133);
const Color Color::PALE_VIOLET_RED (219, 112, 147);
const Color Color::CORAL (255, 127, 80);
const Color Color::TOMATO (255, 99, 71);
const Color Color::ORANGE_RED (255, 69, 0);
const Color Color::DARK_ORANGE (255, 140, 0);
const Color Color::ORANGE (255, 165, 0);
const Color Color::GOLD (255, 215, 0);
const Color Color::YELLOW (255, 255, 0);
const Color Color::LIGHT_YELLOW (255, 255, 224);
const Color Color::LEMON_CHIFFON (255, 250, 205);
const Color Color::LIGHT_GOLDENROD_YELLOW (250, 250, 210);
const Color Color::PAPAYA_WHIP (255, 239, 213);
const Color Color::MOCCASIN (255, 228, 181);
const Color Color::PEACH_PUFF (255, 218, 185);
const Color Color::PALE_GOLDENROD (238, 232, 170);
const Color Color::KHAKI (240, 230, 140);
const Color Color::DARK_KHAKI (189, 183, 107);
const Color Color::LAVENDER (230, 230, 250);
const Color Color::THISTLE (216, 191, 216);
const Color Color::PLUM (221, 160, 221);
const Color Color::VIOLET (238, 130, 238);
const Color Color::ORCHID (218, 112, 214);
const Color Color::FUCHSIA (255, 0, 255);
const Color Color::MAGENTA (255, 0, 255);
const Color Color::MEDIUM_ORCHID (186, 85, 211);
const Color Color::MEDIUM_PURPLE (147, 112, 219);
const Color Color::BLUE_VIOLET (138, 43, 226);
const Color Color::DARK_VIOLET (148, 0, 211);
const Color Color::DARK_ORCHID (153, 50, 204);
const Color Color::DARK_MAGENTA (139, 0, 139);
const Color Color::PURPLE (128, 0, 128);
const Color Color::INDIGO ( 75, 0, 130);
const Color Color::SLATE_BLUE (106, 90, 205);
const Color Color::DARK_SLATE_BLUE ( 72, 61, 139);
const Color Color::CORNSILK (255, 248, 220);
const Color Color::BLANCHED_ALMOND (255, 235, 205);
const Color Color::BISQUE (255, 228, 196);
const Color Color::NAVAJO_WHITE (255, 222, 173);
const Color Color::WHEAT (245, 222, 179);
const Color Color::BURLY_WOOD (222, 184, 135);
const Color Color::TAN (210, 180, 140);
const Color Color::ROSY_BROWN (188, 143, 143);
const Color Color::SANDY_BROWN (244, 164, 96);
const Color Color::GOLDENROD (218, 165, 32);
const Color Color::DARK_GOLDEN_ROD (184, 134, 11);
const Color Color::PERU (205, 133, 63);
const Color Color::CHOCOLATE (210, 105, 30);
const Color Color::SADDLE_BROWN (139, 69, 19);
const Color Color::SIENNA (160, 82, 45);
const Color Color::BROWN (165, 42, 42);
const Color Color::MAROON (128, 0, 0);
const Color Color::BLACK ( 0, 0, 0);
const Color Color::GRAY (128, 128, 128);
const Color Color::SILVER (192, 192, 192);
const Color Color::WHITE (255, 255, 255);
const Color Color::OLIVE (128, 128, 0);
const Color Color::LIME ( 0, 255, 0);
const Color Color::GREEN ( 0, 128, 0);
const Color Color::AQUA ( 0, 255, 255);
const Color Color::TEAL ( 0, 128, 128);
const Color Color::BLUE ( 0, 0, 255);
const Color Color::NAVY ( 0, 0, 128);
const Color Color::GREEN_YELLOW (173, 255, 47);
const Color Color::CHARTREUSE (127, 255, 0);
const Color Color::LAWN_GREEN (124, 252, 0);
const Color Color::LIME_GREEN ( 50, 205, 50);
const Color Color::PALE_GREEN (152, 251, 152);
const Color Color::LIGHT_GREEN (144, 238, 144);
const Color Color::MEDIUM_SPRING_GREEN ( 0, 250, 154);
const Color Color::SPRING_GREEN ( 0, 255, 127);
const Color Color::MEDIUM_SEA_GREEN ( 60, 179, 113);
const Color Color::SEA_GREEN ( 46, 139, 87);
const Color Color::FOREST_GREEN ( 34, 139, 34);
const Color Color::DARK_GREEN ( 0, 100, 0);
const Color Color::YELLOW_GREEN (154, 205, 50);
const Color Color::OLIVE_DRAB (107, 142, 35);
const Color Color::DARK_OLIVE_GREEN ( 85, 107, 47);
const Color Color::MEDIUM_AQUAMARINE (102, 205, 170);
const Color Color::DARK_SEA_GREEN (143, 188, 143);
const Color Color::LIGHT_SEA_GREEN ( 32, 178, 170);
const Color Color::DARK_CYAN ( 0, 139, 139);
const Color Color::CYAN ( 0, 255, 255);
const Color Color::LIGHT_CYAN (224, 255, 255);
const Color Color::PALE_TURQUOISE (175, 238, 238);
const Color Color::AQUAMARINE (127, 255, 212);
const Color Color::TURQUOISE ( 64, 224, 208);
const Color Color::MEDIUM_TURQUOISE ( 72, 209, 204);
const Color Color::DARK_TURQUOISE ( 0, 206, 209);
const Color Color::CADET_BLUE ( 95, 158, 160);
const Color Color::STEEL_BLUE ( 70, 130, 180);
const Color Color::LIGHT_STEEL_BLUE (176, 196, 222);
const Color Color::POWDER_BLUE (176, 224, 230);
const Color Color::LIGHT_BLUE (173, 216, 230);
const Color Color::SKY_BLUE (135, 206, 235);
const Color Color::LIGHT_SKY_BLUE (135, 206, 250);
const Color Color::DEEP_SKY_BLUE ( 0, 191, 255);
const Color Color::DODGER_BLUE ( 30, 144, 255);
const Color Color::CORNFLOWER_BLUE (100, 149, 237);
const Color Color::MEDIUM_SLATE_BLUE (123, 104, 238);
const Color Color::ROYAL_BLUE ( 65, 105, 225);
const Color Color::MEDIUM_BLUE ( 0, 0, 205);
const Color Color::DARK_BLUE ( 0, 0, 139);
const Color Color::MIDNIGHT_BLUE ( 25, 25, 112);
const Color Color::SNOW (255, 250, 250);
const Color Color::HONEYDEW (240, 255, 240);
const Color Color::MINT_CREAM (245, 255, 250);
const Color Color::AZURE (240, 255, 255);
const Color Color::ALICE_BLUE (240, 248, 255);
const Color Color::GHOST_WHITE (248, 248, 255);
const Color Color::WHITE_SMOKE (245, 245, 245);
const Color Color::SEASHELL (255, 245, 238);
const Color Color::BEIGE (245, 245, 220);
const Color Color::OLD_LACE (253, 245, 230);
const Color Color::FLORAL_WHITE (255, 250, 240);
const Color Color::IVORY (255, 255, 240);
const Color Color::ANTIQUE_WHITE (250, 235, 215);
const Color Color::LINEN (250, 240, 230);
const Color Color::LAVENDER_BLUSH (255, 240, 245);
const Color Color::MISTY_ROSE (255, 228, 225);
const Color Color::GAINSBORO (220, 220, 220);
const Color Color::LIGHT_GREY (211, 211, 211);
const Color Color::LIGHT_GRAY (211, 211, 211);
const Color Color::DARK_GRAY (169, 169, 169);
const Color Color::DARK_GREY (169, 169, 169);
const Color Color::GREY (128, 128, 128);
const Color Color::DIM_GRAY (105, 105, 105);
const Color Color::DIM_GREY (105, 105, 105);
const Color Color::LIGHT_SLATE_GRAY (119, 136, 153);
const Color Color::LIGHT_SLATE_GREY (119, 136, 153);
const Color Color::SLATE_GRAY (112, 128, 144);
const Color Color::SLATE_GREY (112, 128, 144);
const Color Color::DARK_SLATE_GRAY ( 47, 79, 79);
const Color Color::DARK_SLATE_GREY ( 47, 79, 79);
/*============================================================================*/
|
ib-da-ncirl/dia_crime
|
src/ie/ibuttimer/dia_crime/hadoop/AbstractCsvMapper.java
|
<filename>src/ie/ibuttimer/dia_crime/hadoop/AbstractCsvMapper.java
/*
* The MIT License (MIT)
* Copyright (c) 2020 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package ie.ibuttimer.dia_crime.hadoop;
import ie.ibuttimer.dia_crime.hadoop.misc.DateWritable;
import ie.ibuttimer.dia_crime.misc.*;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.http.util.TextUtils;
import org.apache.log4j.Logger;
import java.io.IOException;
import java.time.*;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.*;
import java.util.stream.Collectors;
import static ie.ibuttimer.dia_crime.misc.Constants.*;
/**
* Base mapper for a csv file:
* - input key : csv file line number
* - input value : csv file line text
* @param <K> output key type
* @param <V> output value type
*/
public abstract class AbstractCsvMapper<K, V> extends AbstractMapper<LongWritable, Text, K, V> {
public static final String DEFAULT_SEPARATOR = ",";
public static final boolean DEFAULT_HAS_HEADER = false;
/** Separator for csv file */
private String separator = DEFAULT_SEPARATOR;
/** Csv file has a header line flag */
private boolean hasHeader = DEFAULT_HAS_HEADER;
/** Number of columns in csv file */
private int numIndices = 0;
// date time formatter for reading input
private DateTimeFormatter dateTimeFormatter;
// date time formatter for outputting date keys
private DateTimeFormatter keyOutDateTimeFormatter;
public static final List<String> DATE_FILTER_PROPS = Arrays.asList(
FILTER_START_DATE_PROP, FILTER_END_DATE_PROP
);
private DateFilter dateFilter = null;
private final Map<String, Integer> indices = new HashMap<>();
private int maxIndex = -1;
@Override
protected void setup(Context context) throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
super.setup(context);
// read basic properties
separator = conf.get(getPropertyPath(SEPARATOR_PROP), DEFAULT_SEPARATOR);
hasHeader = conf.getBoolean(getPropertyPath(HAS_HEADER_PROP), DEFAULT_HAS_HEADER);
numIndices = conf.getInt(getPropertyPath(NUM_INDICES_PROP), 0);
/* set date time formatter properties */
ConfigReader cfgReader = new ConfigReader(getMapperCfg());
dateTimeFormatter = cfgReader.getDateTimeFormatter(conf, DATE_FORMAT_PROP, DateTimeFormatter.ISO_LOCAL_DATE_TIME);
keyOutDateTimeFormatter = cfgReader.getDateTimeFormatter(conf, OUT_KEY_DATE_FORMAT_PROP, DateTimeFormatter.ISO_LOCAL_DATE);
// get date filter
dateFilter = new DateFilter(conf.get(getPropertyPath(FILTER_START_DATE_PROP), ""),
conf.get(getPropertyPath(FILTER_END_DATE_PROP), ""));
setDebugLevel(DebugLevel.getSetting(conf, getMapperCfg()));
if (show(DebugLevel.MEDIUM)) {
getMapperCfg().dumpConfiguration(getLogger(), conf);
}
}
/**
* Initialise property indices
* @param context
* @param propertyIndices
*/
protected void initIndices(Context context, List<String> propertyIndices) {
Configuration conf = context.getConfiguration();
// read the element indices from the configuration
for (String prop : propertyIndices) {
int index = conf.getInt(getPropertyPath(prop), -1);
if (index > maxIndex) {
maxIndex = index;
}
indices.put(prop, index);
}
}
/**
* Get the separator for csv file
* @return separator
*/
public String getSeparator() {
return separator;
}
/**
* Get the csv file has a header line flag
* @return header line flag
*/
public boolean isHasHeader() {
return hasHeader;
}
/**
* Get the number of columns in csv file
* @return number of columns
*/
public int getNumIndices() {
return numIndices;
}
public Map<String, Integer> getIndices() {
return indices;
}
public int getMaxIndex() {
return maxIndex;
}
/**
* Check if the specified key is a header line and if it should be skipped
* @param key Key; line number
* @return True if line should be skipped
*/
public boolean skipHeader(LongWritable key) {
boolean skip = false;
if (key.get() == 0) {
skip = isHasHeader();
}
return skip;
}
/**
* Check if the specified key is a header line or comment line and if it should be skipped
* @param key Key; line number
* @param value Line
* @return True if line should be skipped
*/
public boolean skip(LongWritable key, Text value) {
return skipHeader(key) || skipComment(value);
}
/**
* Get the DateTimeFormatter pattern for format of dates
* @return DateTimeFormatter
*/
public DateTimeFormatter getDateTimeFormatter() {
return dateTimeFormatter;
}
/**
* Get the DateTimeFormatter pattern for format of date output keys
* @return DateTimeFormatter
*/
public DateTimeFormatter getKeyOutDateTimeFormatter() {
return keyOutDateTimeFormatter;
}
/**
* Get the zoned date and time
* @param dateTime String of the date and time
* @param formatter Formatter to use
* @return Converted date and time
*/
public ZonedDateTime getZonedDateTime(String dateTime, DateTimeFormatter formatter) {
ZonedDateTime zdt = ZonedDateTime.of(LocalDateTime.MIN, ZoneId.systemDefault());
try {
zdt = ZonedDateTime.parse(dateTime, formatter);
} catch (DateTimeParseException dpte) {
getLogger().error("Cannot parse '" + dateTime + "' using format " + formatter.toString(), dpte);
}
return zdt;
}
/**
* Get the zoned date and time
* @param dateTime String of the date and time
* @return Converted date and time
*/
public ZonedDateTime getZonedDateTime(String dateTime) {
return Utils.getZonedDateTime(dateTime, getDateTimeFormatter(), getLogger());
}
/**
* Get the date and time
* @param dateTime String of the date and time
* @return Converted date and time
*/
public LocalDateTime getDateTime(String dateTime) {
return Utils.getDateTime(dateTime, getDateTimeFormatter(), getLogger());
}
/**
* Get the date
* @param date String of the date
* @return Converted date
*/
public LocalDate getDate(String date) {
return Utils.getDate(date, getDateTimeFormatter(), getLogger());
}
/**
* Get the date and time and check if it is filtered
* @param dateTime String of the date and time
* @return Pair of filter result (TRUE if passes filter) and converted date and time
*/
public Pair<Boolean, LocalDateTime> getZonedDateTimeAndFilter(String dateTime) {
LocalDateTime ldt = getZonedDateTime(dateTime).toLocalDateTime();
return Pair.of(dateFilter.filter(ldt), ldt);
}
/**
* Get the date and time and check if it is filtered
* @param dateTime String of the date and time
* @return Pair of filter result (TRUE if passes filter) and converted date and time
*/
public Pair<Boolean, LocalDateTime> getDateTimeAndFilter(String dateTime) {
LocalDateTime ldt = getDateTime(dateTime);
return Pair.of(dateFilter.filter(ldt), ldt);
}
/**
* Get the date and check if it is filtered
* @param date String of the date and time
* @param dateFilter Date filter to use
* @return Pair of filter result (TRUE if passes filter) and converted date and time
*/
public Pair<Boolean, LocalDate> getDateAndFilter(String date, DateFilter dateFilter) {
LocalDate ld = getDate(date);
return Pair.of(dateFilter.filter(ld), ld);
}
/**
* Get the date and check if it is filtered
* @param date String of the date and time
* @return Pair of filter result (TRUE if passes filter) and converted date and time
*/
public Pair<Boolean, LocalDate> getDateAndFilter(String date) {
return getDateAndFilter(date, dateFilter);
}
/**
* Return the text to use as an output key
* @param date
* @return
*/
public DateWritable getDateOutKey(LocalDate date) {
return DateWritable.ofDate(date, keyOutDateTimeFormatter);
}
/**
* Read a separated string to a list
* @param value String to split
* @param separator Separator to use
* @return
*/
public List<String> readSeparatedString(String value, String separator) {
return Arrays.stream(value.split(separator))
.map(String::trim)
.collect(Collectors.toList());
}
public List<String> readCommaSeparatedString(String value) {
return readSeparatedString(value, ",");
}
/**
* Base configuration class
*/
public abstract static class AbstractCsvMapperCfg implements ICsvMapperCfg {
public static final Map<String, Property> COMMON_PROPERTIES;
static {
COMMON_PROPERTIES = new HashMap<>();
List.of(
Property.of(OUTPUTTYPES_PATH_PROP, "path to factor types input file", ""),
Property.of(FACTOR_PROP, "list of factors to apply to values", ""),
Property.of(STATS_INPUT_PATH_PROP, "path to statistics file", ""),
Property.of(VARIABLES_PROP, "list of variables to use", ""),
Property.of(DEPENDENT_PROP, "dependent variable to use", "")
).forEach(p -> COMMON_PROPERTIES.put(p.name, p));
}
private final PropertyWrangler propertyWrangler;
private final String propertyRoot;
public AbstractCsvMapperCfg(String propertyRoot) {
this.propertyRoot = propertyRoot;
this.propertyWrangler = new PropertyWrangler(propertyRoot);
}
@Override
public String getRoot() {
return propertyRoot;
}
@Override
public String getPropertyName(String propertyPath) {
return propertyWrangler.getPropertyName(propertyPath);
}
@Override
public String getPropertyPath(String propertyName) {
return propertyWrangler.getPropertyPath(propertyName);
}
@Override
public HashMap<String, String> getPropertyDefaults() {
// create map of possible keys and default values
HashMap<String, String> propDefault = new HashMap<>();
propDefault.put(DEBUG_PROP, DebugLevel.OFF.name());
propDefault.put(SEPARATOR_PROP, DEFAULT_SEPARATOR);
propDefault.put(HAS_HEADER_PROP, Boolean.toString(DEFAULT_HAS_HEADER));
propDefault.put(DATE_FORMAT_PROP, "");
propDefault.put(NUM_INDICES_PROP, "0");
DATE_FILTER_PROPS.forEach((p -> propDefault.put(p, "")));
propDefault.put(OUT_KEY_DATE_FORMAT_PROP, "");
getPropertyIndices().forEach(p -> propDefault.put(p, "-1"));
getRequiredProps().forEach(p -> propDefault.put(p.name, p.defaultValue));
getAdditionalProps().forEach(p -> propDefault.put(p.name, p.defaultValue));
return propDefault;
}
@Override
public Pair<Integer, List<String>> checkConfiguration(Configuration conf) {
int resultCode = ECODE_SUCCESS;
List<String> errors = new ArrayList<>();
// check required properties in config
for (Property prop : getRequiredProps()) {
if (TextUtils.isEmpty(conf.get(getPropertyPath(prop.name)))) {
errors.add("Error: No " + prop.description + " specified, set '" + prop.name + "'.");
resultCode = ECODE_CONFIG_ERROR;
}
}
// check for date filtering
Pair<Integer, List<String>> dateRes = checkDatePairConfiguration(
conf, FILTER_START_DATE_PROP, FILTER_END_DATE_PROP);
if (dateRes.getLeft() != ECODE_SUCCESS) {
errors.addAll(dateRes.getRight());
resultCode = dateRes.getLeft();
}
// check property indices
for (String key : getPropertyIndices()) {
if (conf.getInt(getPropertyPath(key), -1) < 0) {
errors.add("Error: '" + key + "' not specified.");
resultCode = ECODE_CONFIG_ERROR;
}
}
return Pair.of(resultCode, errors);
}
public Pair<Integer, List<String>> checkDatePairConfiguration(Configuration conf, String start, String end) {
int resultCode = ECODE_SUCCESS;
List<String> errors = new ArrayList<>();
LocalDate startDate = null;
LocalDate endDate = null;
for (String key : List.of(start, end)) {
String dateStr = conf.get(getPropertyPath(key), "");
if (!TextUtils.isEmpty(dateStr)) {
try {
LocalDate date = LocalDate.parse(dateStr, DateTimeFormatter.ISO_LOCAL_DATE);
if (key.equals(start)) {
startDate = date;
} else {
endDate = date;
}
} catch (DateTimeParseException dpte) {
errors.add("Error: Invalid '" + key + "' specified, '" + dateStr + "'");
resultCode = ECODE_CONFIG_ERROR;
}
}
}
if (resultCode != Constants.ECODE_CONFIG_ERROR) {
if ((startDate != null) && (endDate != null)) {
Period period = Period.between(startDate, endDate.plusDays(1)); // start inclusive, end exclusive
if (period.isZero() || period.isNegative()) {
errors.add("Error: '" + start + "' after '" + end + "'");
resultCode = ECODE_CONFIG_ERROR;
}
}
}
return Pair.of(resultCode, errors);
}
@Override
public Optional<Property> getProperty(String propertyName) {
Optional<Property> property;
if (COMMON_PROPERTIES.containsKey(propertyName)) {
property = Optional.of(COMMON_PROPERTIES.get(propertyName));
} else {
property = Optional.empty();
}
return property;
}
public void dumpConfiguration(Logger logger, Configuration conf) {
// use info as its the default level
getPropertyDefaults().forEach((p, d) -> {
logger.info(
String.format("%s - %s [%s]", getRoot(), p, conf.get(getPropertyPath(p), "")));
});
}
}
}
|
liuzhenyulive/iBlogs
|
blog-common/src/main/java/site/iblogs/common/conventer/StringCodeToEnumConverterFactory.java
|
<gh_stars>1-10
package site.iblogs.common.conventer;
import com.google.common.collect.Maps;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.convert.converter.ConverterFactory;
import site.iblogs.common.dto.enums.BaseEnum;
import java.util.Map;
/**
* @author: <EMAIL>
* @date: 10/12/2020 22:15
*/
public class StringCodeToEnumConverterFactory implements ConverterFactory<String, BaseEnum> {
private static final Map<Class, Converter> CONVERTERS = Maps.newHashMap();
/**
* 获取一个从 Integer 转化为 T 的转换器,T 是一个泛型,有多个实现
*
* @param targetType 转换后的类型
* @return 返回一个转化器
*/
@Override
public <T extends BaseEnum> Converter<String, T> getConverter(Class<T> targetType) {
Converter<String, T> converter = CONVERTERS.get(targetType);
if (converter == null) {
converter = new StringToEnumConverter<>(targetType);
CONVERTERS.put(targetType, converter);
}
return converter;
}
}
|
billwert/azure-sdk-for-java
|
sdk/scvmm/azure-resourcemanager-scvmm/src/main/java/com/azure/resourcemanager/scvmm/models/InventoryItem.java
|
<filename>sdk/scvmm/azure-resourcemanager-scvmm/src/main/java/com/azure/resourcemanager/scvmm/models/InventoryItem.java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.scvmm.models;
import com.azure.core.management.SystemData;
import com.azure.core.util.Context;
import com.azure.resourcemanager.scvmm.fluent.models.InventoryItemInner;
/** An immutable client-side representation of InventoryItem. */
public interface InventoryItem {
/**
* Gets the id property: Fully qualified resource Id for the resource.
*
* @return the id value.
*/
String id();
/**
* Gets the name property: The name of the resource.
*
* @return the name value.
*/
String name();
/**
* Gets the type property: The type of the resource.
*
* @return the type value.
*/
String type();
/**
* Gets the systemData property: The system data.
*
* @return the systemData value.
*/
SystemData systemData();
/**
* Gets the kind property: Metadata used by portal/tooling/etc to render different UX experiences for resources of
* the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, the resource provider must
* validate and persist this value.
*
* @return the kind value.
*/
String kind();
/**
* Gets the managedResourceId property: Gets the tracked resource id corresponding to the inventory resource.
*
* @return the managedResourceId value.
*/
String managedResourceId();
/**
* Gets the uuid property: Gets the UUID (which is assigned by VMM) for the inventory item.
*
* @return the uuid value.
*/
String uuid();
/**
* Gets the inventoryItemName property: Gets the Managed Object name in VMM for the inventory item.
*
* @return the inventoryItemName value.
*/
String inventoryItemName();
/**
* Gets the provisioningState property: Gets the provisioning state.
*
* @return the provisioningState value.
*/
String provisioningState();
/**
* Gets the inner com.azure.resourcemanager.scvmm.fluent.models.InventoryItemInner object.
*
* @return the inner object.
*/
InventoryItemInner innerModel();
/** The entirety of the InventoryItem definition. */
interface Definition
extends DefinitionStages.Blank, DefinitionStages.WithParentResource, DefinitionStages.WithCreate {
}
/** The InventoryItem definition stages. */
interface DefinitionStages {
/** The first stage of the InventoryItem definition. */
interface Blank extends WithParentResource {
}
/** The stage of the InventoryItem definition allowing to specify parent resource. */
interface WithParentResource {
/**
* Specifies resourceGroupName, vmmServerName.
*
* @param resourceGroupName The name of the resource group.
* @param vmmServerName Name of the VMMServer.
* @return the next definition stage.
*/
WithCreate withExistingVmmServer(String resourceGroupName, String vmmServerName);
}
/**
* The stage of the InventoryItem definition which contains all the minimum required properties for the resource
* to be created, but also allows for any other optional properties to be specified.
*/
interface WithCreate extends DefinitionStages.WithKind {
/**
* Executes the create request.
*
* @return the created resource.
*/
InventoryItem create();
/**
* Executes the create request.
*
* @param context The context to associate with this operation.
* @return the created resource.
*/
InventoryItem create(Context context);
}
/** The stage of the InventoryItem definition allowing to specify kind. */
interface WithKind {
/**
* Specifies the kind property: Metadata used by portal/tooling/etc to render different UX experiences for
* resources of the same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, the
* resource provider must validate and persist this value..
*
* @param kind Metadata used by portal/tooling/etc to render different UX experiences for resources of the
* same type; e.g. ApiApps are a kind of Microsoft.Web/sites type. If supported, the resource provider
* must validate and persist this value.
* @return the next definition stage.
*/
WithCreate withKind(String kind);
}
}
/**
* Refreshes the resource to sync with Azure.
*
* @return the refreshed resource.
*/
InventoryItem refresh();
/**
* Refreshes the resource to sync with Azure.
*
* @param context The context to associate with this operation.
* @return the refreshed resource.
*/
InventoryItem refresh(Context context);
}
|
react-vancouver/website
|
src/components/constructs/Stat/Stat.js
|
<gh_stars>1-10
import React from 'react';
import PropTypes from 'prop-types';
import { withSpacing } from '@utilities/styles/spacing';
import { rootStyles } from './Stat.styles';
import Text from '@elements/Text';
const Stat = ({ className, color, number, title }) => {
return (
<div css={rootStyles} className={className}>
<Text title element="p" color={color} scale={18}>
{number}
</Text>
<Text subheading element="p" color={color}>
{title}
</Text>
</div>
);
};
Stat.propTypes = {
className: PropTypes.string,
color: PropTypes.string,
number: PropTypes.string,
title: PropTypes.string,
};
export default withSpacing(Stat);
|
johnkarasev/canvas-lms
|
docker/config/initializers/active_record.rb
|
<filename>docker/config/initializers/active_record.rb
#
# Copyright (C) 2011 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
require 'active_support/callbacks/suspension'
class ActiveRecord::Base
self.cache_timestamp_format = :usec
public :write_attribute
class << self
delegate :distinct_on, :find_ids_in_batches, to: :all
def find_ids_in_ranges(opts={}, &block)
opts.reverse_merge!(:loose => true)
all.find_ids_in_ranges(opts, &block)
end
attr_accessor :in_migration
# determines if someone started a transaction in addition to the spec fixture transaction
# impossible to just count open transactions, cause by default it won't nest a transaction
# unless specifically requested
def in_transaction_in_test?
return false unless Rails.env.test?
transaction_method = ActiveRecord::ConnectionAdapters::DatabaseStatements.instance_method(:transaction).source_location.first
transaction_regex = /\A#{Regexp.escape(transaction_method)}:\d+:in `transaction'\z/.freeze
# transactions due to spec fixtures are _not_in the callstack, so we only need to find 1
!!caller.find { |s| s =~ transaction_regex && !s.include?('spec_helper.rb') }
end
def default_scope(*)
raise "please don't ever use default_scope. it may seem like a great solution, but I promise, it isn't"
end
end
def read_or_initialize_attribute(attr_name, default_value)
# have to read the attribute again because serialized attributes in Rails 4.2 get duped
read_attribute(attr_name) || (write_attribute(attr_name, default_value) && read_attribute(attr_name))
end
alias :clone :dup
def serializable_hash(options = nil)
result = super
if result.present?
result = result.with_indifferent_access
user_content_fields = options[:user_content] || []
result.keys.each do |name|
if user_content_fields.include?(name.to_s)
result[name] = UserContent.escape(result[name])
end
end
end
if options && options[:include_root]
result = {self.class.base_class.model_name.element => result}
end
result
end
# See ActiveModel#serializable_add_includes
def serializable_add_includes(options = {}, &block)
super(options) do |association, records, opts|
yield association, records, opts.reverse_merge(:include_root => options[:include_root])
end
end
def feed_code
id = self.uuid rescue self.id
"#{self.class.reflection_type_name}_#{id}"
end
def self.all_models
return @all_models if @all_models.present?
@all_models = (ActiveRecord::Base.models_from_files +
[Version]).compact.uniq.reject { |model|
(model < Tableless) ||
model.abstract_class?
}
end
def self.models_from_files
@from_files ||= begin
Dir[
"#{Rails.root}/app/models/**/*.rb",
"#{Rails.root}/vendor/plugins/*/app/models/**/*.rb",
"#{Rails.root}/gems/plugins/*/app/models/**/*.rb",
].sort.each do |file|
next if const_defined?(file.sub(%r{.*/app/models/(.*)\.rb$}, '\1').camelize)
ActiveSupport::Dependencies.require_or_load(file)
end
ActiveRecord::Base.descendants
end
end
def self.maximum_text_length
@maximum_text_length ||= 64.kilobytes-1
end
def self.maximum_long_text_length
@maximum_long_text_length ||= 500.kilobytes-1
end
def self.maximum_string_length
255
end
def self.find_by_asset_string(string, asset_types=nil)
find_all_by_asset_string([string], asset_types)[0]
end
def self.find_all_by_asset_string(strings, asset_types=nil)
# TODO: start checking asset_types, if provided
strings.map{ |str| parse_asset_string(str) }.group_by(&:first).inject([]) do |result, (klass, id_pairs)|
next result if asset_types && !asset_types.include?(klass)
result.concat((klass.constantize.where(id: id_pairs.map(&:last)).to_a rescue []))
end
end
# takes an asset string list, like "course_5,user_7" and turns it into an
# array of [class_name, id] like [ ["Course", 5], ["User", 7] ]
def self.parse_asset_string_list(asset_string_list)
asset_string_list.to_s.split(",").map { |str| parse_asset_string(str) }
end
def self.parse_asset_string(str)
code = asset_string_components(str)
[convert_class_name(code.first), code.last.try(:to_i)]
end
def self.asset_string_components(str)
components = str.split('_', -1)
id = components.pop
[components.join('_'), id.presence]
end
def self.convert_class_name(str)
namespaces = str.split(':')
class_name = namespaces.pop
(namespaces.map(&:camelize) + [class_name.try(:classify)]).join('::')
end
def self.asset_string(id)
"#{self.reflection_type_name}_#{id}"
end
def asset_string
@asset_string ||= {}
@asset_string[Shard.current] ||= self.class.asset_string(id)
end
def global_asset_string
@global_asset_string ||= "#{self.class.reflection_type_name}_#{global_id}"
end
# little helper to keep checks concise and avoid a db lookup
def has_asset?(asset, field = :context)
asset.id == send("#{field}_id") && asset.class.base_class.name == send("#{field}_type")
end
def context_string(field = :context)
send("#{field}_type").underscore + "_" + send("#{field}_id").to_s if send("#{field}_type")
end
def self.asset_string_backcompat_module
@asset_string_backcompat_module ||= Module.new.tap { |m| prepend(m) }
end
def self.define_asset_string_backcompat_method(string_version_name, association_version_name = string_version_name, method = nil)
# just chain to the two methods
unless method
# this is weird, but gets the instance methods defined so they can be chained
begin
self.new.send("#{association_version_name}_id")
rescue
# the db doesn't exist yet; no need to bother with backcompat methods anyway
return
end
define_asset_string_backcompat_method(string_version_name, association_version_name, 'id')
define_asset_string_backcompat_method(string_version_name, association_version_name, 'type')
return
end
asset_string_backcompat_module.class_eval <<-CODE, __FILE__, __LINE__ + 1
def #{association_version_name}_#{method}
res = super
if !res && #{string_version_name}.present?
type, id = ActiveRecord::Base.parse_asset_string(#{string_version_name})
write_attribute(:#{association_version_name}_type, type)
write_attribute(:#{association_version_name}_id, id)
res = super
end
res
end
CODE
end
def export_columns(format = nil)
self.class.content_columns.map(&:name)
end
def to_row(format = nil)
export_columns(format).map { |c| self.send(c) }
end
def is_a_context?
false
end
def cached_context_short_name
if self.respond_to?(:context)
code = self.respond_to?(:context_code) ? self.context_code : self.context.asset_string
@cached_context_name ||= Rails.cache.fetch(['short_name_lookup', code].cache_key) do
self.context.short_name rescue ""
end
else
raise "Can only call cached_context_short_name on items with a context"
end
end
def self.skip_touch_context(skip=true)
@@skip_touch_context = skip
end
def save_without_touching_context
@skip_touch_context = true
self.save
@skip_touch_context = false
end
def touch_context
return if (@@skip_touch_context ||= false || @skip_touch_context ||= false)
if self.respond_to?(:context_type) && self.respond_to?(:context_id) && self.context_type && self.context_id
self.context_type.constantize.where(id: self.context_id).update_all(updated_at: Time.now.utc)
end
rescue
Canvas::Errors.capture_exception(:touch_context, $ERROR_INFO)
end
def touch_user
if self.respond_to?(:user_id) && self.user_id
User.connection.after_transaction_commit do
User.where(:id => self.user_id).update_all(:updated_at => Time.now.utc)
end
end
true
rescue
Canvas::Errors.capture_exception(:touch_user, $ERROR_INFO)
false
end
def context_url_prefix
"#{self.context_type.downcase.pluralize}/#{self.context_id}"
end
# Example:
# obj.to_json(:permissions => {:user => u, :policies => [:read, :write, :update]})
def as_json(options = nil)
options = options.try(:dup) || {}
self.set_serialization_options if self.respond_to?(:set_serialization_options)
except = options.delete(:except) || []
except = Array(except).dup
except.concat(self.class.serialization_excludes) if self.class.respond_to?(:serialization_excludes)
except.concat(self.serialization_excludes) if self.respond_to?(:serialization_excludes)
except.uniq!
methods = options.delete(:methods) || []
methods = Array(methods).dup
methods.concat(self.class.serialization_methods) if self.class.respond_to?(:serialization_methods)
methods.concat(self.serialization_methods) if self.respond_to?(:serialization_methods)
methods.uniq!
options[:except] = except unless except.empty?
options[:methods] = methods unless methods.empty?
# We include a root in all the association json objects (if it's a
# collection), which is different than the rails behavior of just including
# the root in the base json object. Hence the hackies.
#
# We are in the process of migrating away from including the root in all our
# json serializations at all. Once that's done, we can remove this and the
# monkey patch to Serialzer, below.
# ^hahahahahahaha
unless options.key?(:include_root)
options[:include_root] = true
end
hash = serializable_hash(options)
if options[:permissions]
obj_hash = options[:include_root] ? hash[self.class.base_class.model_name.element] : hash
if self.respond_to?(:filter_attributes_for_user)
self.filter_attributes_for_user(obj_hash, options[:permissions][:user], options[:permissions][:session])
end
unless options[:permissions][:include_permissions] == false
permissions_hash = self.rights_status(options[:permissions][:user], options[:permissions][:session], *options[:permissions][:policies])
if self.respond_to?(:serialize_permissions)
permissions_hash = self.serialize_permissions(permissions_hash, options[:permissions][:user], options[:permissions][:session])
end
obj_hash["permissions"] = permissions_hash
end
end
self.revert_from_serialization_options if self.respond_to?(:revert_from_serialization_options)
hash.with_indifferent_access
end
def class_name
self.class.to_s
end
def sanitize_sql(*args)
self.class.send :sanitize_sql_for_conditions, *args
end
def self.reflection_type_name
base_class.name.underscore
end
def wildcard(*args)
self.class.wildcard(*args)
end
def self.wildcard(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
options[:type] ||= :full
value = args.pop
if options[:delimiter]
options[:type] = :full
value = options[:delimiter] + value + options[:delimiter]
delimiter = connection.quote(options[:delimiter])
column_str = "#{delimiter} || %s || #{delimiter}"
args = args.map{ |a| column_str % a.to_s }
end
value = wildcard_pattern(value, options)
cols = args.map{ |col| like_condition(col, '?', !options[:case_sensitive]) }
sanitize_sql_array ["(" + cols.join(" OR ") + ")", *([value] * cols.size)]
end
def self.wildcard_pattern(value, options = {})
value = value.to_s
value = value.downcase unless options[:case_sensitive]
value = value.gsub('\\', '\\\\\\\\').gsub('%', '\\%').gsub('_', '\\_')
value = '%' + value unless options[:type] == :right
value += '%' unless options[:type] == :left
value
end
def self.coalesced_wildcard(*args)
value = args.pop
value = wildcard_pattern(value)
cols = coalesce_chain(args)
sanitize_sql_array ["(#{like_condition(cols, '?', false)})", value]
end
def self.coalesce_chain(cols)
"(#{cols.map{|col| coalesce_clause(col)}.join(" || ' ' || ")})"
end
def self.coalesce_clause(column)
"COALESCE(LOWER(#{column}), '')"
end
def self.like_condition(value, pattern = '?', downcase = true)
value = "LOWER(#{value})" if downcase
"#{value} LIKE #{pattern}"
end
def self.best_unicode_collation_key(col)
if ActiveRecord::Base.configurations[Rails.env]['adapter'] == 'postgresql'
# For PostgreSQL, we can't trust a simple LOWER(column), with any collation, since
# Postgres just defers to the C library which is different for each platform. The best
# choice is the collkey function from pg_collkey which uses ICU to get a full unicode sort.
# If that extension isn't around, casting to a bytea sucks for international characters,
# but at least it's consistent, and orders commas before letters so you don't end up with
# Johnson, Bob sorting before Johns, Jimmy
unless instance_variable_defined?(:@collkey)
@collkey = connection.extension_installed?(:pg_collkey)
end
if @collkey
# The collation level of 3 is the default, but is explicitly specified here and means that
# case, accents and base characters are all taken into account when creating a collation key
# for a string - more at https://pgxn.org/dist/pg_collkey/0.5.1/
"#{@collkey}.collkey(#{col}, '#{Canvas::ICU.locale_for_collation}', false, 3, true)"
else
"CAST(LOWER(replace(#{col}, '\\', '\\\\')) AS bytea)"
end
else
col
end
end
def self.count_by_date(options = {})
column = options[:column] || "created_at"
max_date = (options[:max_date] || Time.zone.now).midnight
num_days = options[:num_days] || 20
min_date = (options[:min_date] || max_date.advance(:days => -(num_days-1))).midnight
offset = max_date.utc_offset
expression = "((#{column} || '-00')::TIMESTAMPTZ AT TIME ZONE '#{Time.zone.tzinfo.name}')::DATE"
result = where(
"#{column} >= ? AND #{column} < ?",
min_date,
max_date.advance(:days => 1)
).
group(expression).
order(expression).
count
return result if result.keys.first.is_a?(Date)
Hash[result.map { |date, count|
[Time.zone.parse(date).to_date, count]
}]
end
def self.rank_sql(ary, col)
ary.each_with_index.inject('CASE '){ |string, (values, i)|
string << "WHEN #{col} IN (" << Array(values).map{ |value| connection.quote(value) }.join(', ') << ") THEN #{i} "
} << "ELSE #{ary.size} END"
end
def self.rank_hash(ary)
ary.each_with_index.inject(Hash.new(ary.size + 1)){ |hash, (values, i)|
Array(values).each{ |value| hash[value] = i + 1 }
hash
}
end
def self.distinct_values(column, include_nil: false)
column = column.to_s
result = if ActiveRecord::Base.configurations[Rails.env]['adapter'] == 'postgresql'
sql = ''
sql << "SELECT NULL AS #{column} WHERE EXISTS (SELECT * FROM #{quoted_table_name} WHERE #{column} IS NULL) UNION ALL (" if include_nil
sql << <<-SQL
WITH RECURSIVE t AS (
SELECT MIN(#{column}) AS #{column} FROM #{quoted_table_name}
UNION ALL
SELECT (SELECT MIN(#{column}) FROM #{quoted_table_name} WHERE #{column} > t.#{column})
FROM t
WHERE t.#{column} IS NOT NULL
)
SELECT #{column} FROM t WHERE #{column} IS NOT NULL
SQL
sql << ")" if include_nil
find_by_sql(sql)
else
conditions = "#{column} IS NOT NULL" unless include_nil
find(:all, :select => "DISTINCT #{column}", :conditions => conditions, :order => column)
end
result.map(&column.to_sym)
end
# direction is nil, :asc, or :desc
def self.nulls(first_or_last, column, direction = nil)
if connection.adapter_name == 'PostgreSQL'
clause = if first_or_last == :first && direction != :desc
" NULLS FIRST"
elsif first_or_last == :last && direction == :desc
" NULLS LAST"
end
"#{column} #{direction.to_s.upcase}#{clause}".strip
else
"#{column} IS#{" NOT" unless first_or_last == :last} NULL, #{column} #{direction.to_s.upcase}".strip
end
end
# set up class-specific getters/setters for a polymorphic association, e.g.
# belongs_to :context, polymorphic: [:course, :account]
def self.belongs_to(name, scope = nil, options={})
options = scope if scope.is_a?(Hash)
if options[:polymorphic] == true
raise "Please pass an array of valid types for polymorphic associations. Use exhaustive: false if you really don't want to validate them"
end
polymorphic_prefix = options.delete(:polymorphic_prefix)
exhaustive = options.delete(:exhaustive)
reflection = super[name.to_s]
if reflection.options[:polymorphic].is_a?(Array) ||
reflection.options[:polymorphic].is_a?(Hash)
reflection.options[:exhaustive] = exhaustive
reflection.options[:polymorphic_prefix] = polymorphic_prefix
add_polymorph_methods(reflection)
end
reflection
end
def self.add_polymorph_methods(reflection)
unless @polymorph_module
@polymorph_module = Module.new
include(@polymorph_module)
end
specifics = []
Array.wrap(reflection.options[:polymorphic]).map do |name|
if name.is_a?(Hash)
specifics.concat(name.to_a)
else
specifics << [name, name.to_s.camelize]
end
end
unless reflection.options[:exhaustive] == false
specific_classes = specifics.map(&:last).sort
validates reflection.foreign_type, inclusion: { in: specific_classes }, allow_nil: true
@polymorph_module.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{reflection.name}=(record)
if record && [#{specific_classes.join(', ')}].none? { |klass| record.is_a?(klass) }
message = "one of #{specific_classes.join(', ')} expected, got \#{record.class}"
raise ActiveRecord::AssociationTypeMismatch, message
end
super
end
RUBY
end
if reflection.options[:polymorphic_prefix] == true
prefix = "#{reflection.name}_"
elsif reflection.options[:polymorphic_prefix]
prefix = "#{reflection.options[:polymorphic_prefix]}_"
end
specifics.each do |(name, class_name)|
# ensure we capture this class's table name
table_name = self.table_name
belongs_to :"#{prefix}#{name}", -> { where(table_name => { reflection.foreign_type => class_name }) },
foreign_key: reflection.foreign_key,
class_name: class_name
correct_type = "#{reflection.foreign_type} && self.class.send(:compute_type, #{reflection.foreign_type}) <= #{class_name}"
@polymorph_module.class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{prefix}#{name}
#{reflection.name} if #{correct_type}
end
def #{prefix}#{name}=(record)
# we don't want to unset it if it's currently some other type, i.e.
# foo.bar = Bar.new
# foo.baz = nil
# foo.bar.should_not be_nil
return if record.nil? && !(#{correct_type})
association(:#{prefix}#{name}).send(:raise_on_type_mismatch!, record) if record
self.#{reflection.name} = record
end
RUBY
end
end
def self.unique_constraint_retry(retries = 1)
# runs the block in a (possibly nested) transaction. if a unique constraint
# violation occurs, it will run it "retries" more times. the nested
# transaction (savepoint) ensures we don't mess up things for the outer
# transaction. useful for possible race conditions where we don't want to
# take a lock (e.g. when we create a submission).
retries.times do |retry_count|
begin
result = transaction(:requires_new => true) { uncached { yield(retry_count) } }
connection.clear_query_cache
return result
rescue ActiveRecord::RecordNotUnique
end
end
result = transaction(:requires_new => true) { uncached { yield(retries) } }
connection.clear_query_cache
result
end
def self.current_xlog_location
Shard.current(shard_category).database_server.unshackle do
Shackles.activate(:master) do
connection.select_value("SELECT pg_current_xlog_location()")
end
end
end
def self.wait_for_replication(start: nil)
return unless Shackles.activate(:slave) { connection.readonly? }
start ||= current_xlog_location
Shackles.activate(:slave) do
while connection.select_value("SELECT pg_last_xlog_replay_location()") < start
sleep 0.1
end
end
end
def self.bulk_insert(records)
return if records.empty?
transaction do
connection.bulk_insert(table_name, records)
end
end
include ActiveSupport::Callbacks::Suspension
# saves the record with all its save callbacks suspended.
def save_without_callbacks
suspend_callbacks(kind: [:validation, :save, (new_record? ? :create : :update)]) { save }
end
def self.touch_all_records
self.find_ids_in_ranges do |min_id, max_id|
self.where(primary_key => min_id..max_id).touch_all
end
end
end
module UsefulFindInBatches
def find_in_batches(options = {}, &block)
# already in a transaction (or transactions don't matter); cursor is fine
if can_use_cursor? && !options[:start]
self.activate { |r| r.find_in_batches_with_cursor(options, &block) }
elsif find_in_batches_needs_temp_table?
raise ArgumentError.new("GROUP and ORDER are incompatible with :start, as is an explicit select without the primary key") if options[:start]
self.activate { |r| r.find_in_batches_with_temp_table(options, &block) }
else
super
end
end
end
ActiveRecord::Relation.prepend(UsefulFindInBatches)
module LockForNoKeyUpdate
def lock(lock_type = true)
if lock_type == :no_key_update
postgres_9_3_or_above = connection.adapter_name == 'PostgreSQL' &&
connection.send(:postgresql_version) >= 90300
lock_type = true
lock_type = 'FOR NO KEY UPDATE' if postgres_9_3_or_above
end
super(lock_type)
end
end
ActiveRecord::Relation.prepend(LockForNoKeyUpdate)
ActiveRecord::Relation.class_eval do
def includes(*args)
return super if args.empty? || args == [nil]
raise "Use preload or eager_load instead of includes"
end
def where!(*args)
raise "where!.not doesn't work in Rails 4.2" if args.empty?
super
end
def uniq(*args)
raise "use #distinct instead of #uniq on relations (Rails 5.1 will delegate uniq to to_a)"
end
def select_values_necessitate_temp_table?
return false unless select_values.present?
selects = select_values.flat_map{|sel| sel.to_s.split(",").map(&:strip) }
id_keys = [primary_key, "*", "#{table_name}.#{primary_key}", "#{table_name}.*"]
id_keys.all?{|k| !selects.include?(k) }
end
private :select_values_necessitate_temp_table?
def find_in_batches_needs_temp_table?
order_values.any? ||
group_values.any? ||
select_values.to_s =~ /DISTINCT/i ||
distinct_value ||
select_values_necessitate_temp_table?
end
private :find_in_batches_needs_temp_table?
def can_use_cursor?
(connection.adapter_name == 'PostgreSQL' &&
(Shackles.environment == :slave ||
connection.readonly? ||
(!Rails.env.test? && connection.open_transactions > 0) ||
in_transaction_in_test?))
end
def find_in_batches_with_cursor(options = {})
batch_size = options[:batch_size] || 1000
klass.transaction do
begin
sql = to_sql
cursor = "#{table_name}_in_batches_cursor_#{sql.hash.abs.to_s(36)}"
connection.execute("DECLARE #{cursor} CURSOR FOR #{sql}")
includes = includes_values + preload_values
klass.unscoped do
batch = connection.uncached { klass.find_by_sql("FETCH FORWARD #{batch_size} FROM #{cursor}") }
while !batch.empty?
ActiveRecord::Associations::Preloader.new.preload(batch, includes) if includes
yield batch
break if batch.size < batch_size
batch = connection.uncached { klass.find_by_sql("FETCH FORWARD #{batch_size} FROM #{cursor}") }
end
end
ensure
unless $!.is_a?(ActiveRecord::StatementInvalid)
connection.execute("CLOSE #{cursor}")
end
end
end
end
def find_in_batches_with_temp_table(options = {})
can_do_it = Rails.env.production? || ActiveRecord::Base.in_migration || ActiveRecord::Base.in_transaction_in_test?
raise "find_in_batches_with_temp_table probably won't work outside a migration
and outside a transaction. Unfortunately, it's impossible to automatically
determine a better way to do it that will work correctly. You can try
switching to slave first (then switching to master if you modify anything
inside your loop), wrapping in a transaction (but be wary of locking records
for the duration of your query if you do any writes in your loop), or not
forcing find_in_batches to use a temp table (avoiding custom selects,
group, or order)." unless can_do_it
if options[:pluck]
pluck = Array(options[:pluck])
pluck_for_select = pluck.map do |column_name|
if column_name.is_a?(Symbol) && column_names.include?(column_name.to_s)
"#{connection.quote_local_table_name(table_name)}.#{connection.quote_column_name(column_name)}"
else
column_name.to_s
end
end
pluck = pluck.map(&:to_s)
end
batch_size = options[:batch_size] || 1000
if pluck
sql = select(pluck_for_select).to_sql
else
sql = to_sql
end
table = "#{table_name}_find_in_batches_temp_table_#{sql.hash.abs.to_s(36)}"
table = table[-63..-1] if table.length > 63
connection.execute "CREATE TEMPORARY TABLE #{table} AS #{sql}"
begin
index = "temp_primary_key"
case connection.adapter_name
when 'PostgreSQL'
begin
old_proc = connection.raw_connection.set_notice_processor {}
if pluck && pluck.any?{|p| p == primary_key.to_s}
connection.execute("CREATE INDEX #{connection.quote_local_table_name(index)} ON #{connection.quote_local_table_name(table)}(#{connection.quote_column_name(primary_key)})")
index = primary_key.to_s
else
pluck.unshift(index) if pluck
connection.execute "ALTER TABLE #{table}
ADD temp_primary_key SERIAL PRIMARY KEY"
end
ensure
connection.raw_connection.set_notice_processor(&old_proc) if old_proc
end
else
raise "Temp tables not supported!"
end
includes = includes_values + preload_values
klass.unscoped do
quoted_plucks = pluck && pluck.map do |column_name|
# Rails 4.2 is going to try to quote them anyway but unfortunately not to the temp table, so just make it explicit
column_names.include?(column_name) ?
"#{connection.quote_local_table_name(table)}.#{connection.quote_column_name(column_name)}" : column_name
end
if pluck
batch = klass.from(table).order(index).limit(batch_size).pluck(*quoted_plucks)
else
sql = "SELECT * FROM #{table} ORDER BY #{index} LIMIT #{batch_size}"
batch = klass.find_by_sql(sql)
end
while !batch.empty?
ActiveRecord::Associations::Preloader.new.preload(batch, includes) if includes
yield batch
break if batch.size < batch_size
if pluck
last_value = pluck.length == 1 ? batch.last : batch.last[pluck.index(index)]
batch = klass.from(table).order(index).where("#{index} > ?", last_value).limit(batch_size).pluck(*quoted_plucks)
else
last_value = batch.last[index]
sql = "SELECT *
FROM #{table}
WHERE #{index} > #{last_value}
ORDER BY #{index} ASC
LIMIT #{batch_size}"
batch = klass.find_by_sql(sql)
end
end
end
ensure
if !$!.is_a?(ActiveRecord::StatementInvalid) || connection.open_transactions == 0
connection.execute "DROP TABLE #{table}"
end
end
end
def polymorphic_where(args)
raise ArgumentError unless args.length == 1
column = args.first.first
values = Array(args.first.last)
original_length = values.length
values = values.compact
raise ArgumentError, "need to call polymorphic_where with at least one object" if values.empty?
sql = (["(#{column}_id=? AND #{column}_type=?)"] * values.length).join(" OR ")
sql << " OR (#{column}_id IS NULL AND #{column}_type IS NULL)" if values.length < original_length
where(sql, *values.map { |value| [value, value.class.base_class.name] }.flatten)
end
def not_recently_touched
scope = self
if((personal_space = Setting.get('touch_personal_space', 0).to_i) != 0)
personal_space -= 1
# truncate to seconds
bound = Time.at(Time.now.to_i - personal_space).utc
scope = scope.where("#{connection.quote_local_table_name(table_name)}.updated_at<?", bound)
end
scope
end
def touch_all
self.activate do |relation|
relation.transaction do
ids_to_touch = relation.not_recently_touched.lock(:no_key_update).order(:id).pluck(:id)
unscoped.where(id: ids_to_touch).update_all(updated_at: Time.now.utc) if ids_to_touch.any?
end
end
end
def distinct_on(*args)
args.map! do |column_name|
if column_name.is_a?(Symbol) && column_names.include?(column_name.to_s)
"#{connection.quote_local_table_name(table_name)}.#{connection.quote_column_name(column_name)}"
else
column_name.to_s
end
end
relation = clone
old_select = relation.select_values
relation.select_values = ["DISTINCT ON (#{args.join(', ')}) "]
relation.distinct_value = false
if old_select.empty?
relation.select_values.first << "*"
else
relation.select_values.first << old_select.uniq.join(', ')
end
relation
end
# if this sql is constructed on one shard then executed on another it wont work
# dont use it for cross shard queries
def union(*scopes)
uniq_identifier = "#{table_name}.#{primary_key}"
scopes << self
sub_query = (scopes).map {|s| s.except(:select, :order).select(uniq_identifier).to_sql}.join(" UNION ALL ")
unscoped.where("#{uniq_identifier} IN (#{sub_query})")
end
# returns batch_size ids at a time, working through the primary key from
# smallest to largest.
#
# note this does a raw connection.select_values, so it doesn't work with scopes
def find_ids_in_batches(options = {})
batch_size = options[:batch_size] || 1000
key = "#{quoted_table_name}.#{primary_key}"
scope = except(:select).select(key).reorder(key).limit(batch_size)
ids = connection.select_values(scope.to_sql)
ids = ids.map(&:to_i) unless options[:no_integer_cast]
while ids.present?
yield ids
break if ids.size < batch_size
last_value = ids.last
ids = connection.select_values(scope.where("#{key}>?", last_value).to_sql)
ids = ids.map(&:to_i) unless options[:no_integer_cast]
end
end
# returns 2 ids at a time (the min and the max of a range), working through
# the primary key from smallest to largest.
def find_ids_in_ranges(options = {})
is_integer = columns_hash[primary_key.to_s].type == :integer
loose_mode = options[:loose] && is_integer
# loose_mode: if we don't care about getting exactly batch_size ids in between
# don't get the max - just get the min and add batch_size so we get that many _at most_
values = loose_mode ? "min(id)" : "min(id), max(id)"
batch_size = options[:batch_size].try(:to_i) || 1000
subquery_scope = except(:select).select("#{quoted_table_name}.#{primary_key} as id").reorder(primary_key).limit(loose_mode ? 1 : batch_size)
subquery_scope = subquery_scope.where("#{quoted_table_name}.#{primary_key} <= ?", options[:end_at]) if options[:end_at]
first_subquery_scope = options[:start_at] ? subquery_scope.where("#{quoted_table_name}.#{primary_key} >= ?", options[:start_at]) : subquery_scope
ids = connection.select_rows("select #{values} from (#{first_subquery_scope.to_sql}) as subquery").first
while ids.first.present?
ids.map!(&:to_i) if is_integer
ids << ids.first + batch_size if loose_mode
yield(*ids)
last_value = ids.last
next_subquery_scope = subquery_scope.where(["#{quoted_table_name}.#{primary_key}>?", last_value])
ids = connection.select_rows("select #{values} from (#{next_subquery_scope.to_sql}) as subquery").first
end
end
end
module UpdateAndDeleteWithJoins
def deconstruct_joins(joins_sql=nil)
unless joins_sql
joins_sql = ''
add_joins!(joins_sql, nil)
end
tables = []
join_conditions = []
joins_sql.strip.split('INNER JOIN')[1..-1].each do |join|
# this could probably be improved
raise "PostgreSQL update_all/delete_all only supports INNER JOIN" unless join.strip =~ /([a-zA-Z0-9'"_\.]+(?:(?:\s+[aA][sS])?\s+[a-zA-Z0-9'"_]+)?)\s+ON\s+(.*)/
tables << $1
join_conditions << $2
end
[tables, join_conditions]
end
def update_all(updates, *args)
return super if joins_values.empty?
stmt = Arel::UpdateManager.new
stmt.set Arel.sql(@klass.send(:sanitize_sql_for_assignment, updates))
from = from_clause.value
stmt.table(from ? Arel::Nodes::SqlLiteral.new(from) : table)
stmt.key = table[primary_key]
sql = stmt.to_sql
binds = bound_attributes.map(&:value_for_database)
binds.map! { |value| connection.quote(value) }
collector = Arel::Collectors::Bind.new
arel.join_sources.each do |node|
connection.visitor.accept(node, collector)
end
binds_in_join = collector.value.count { |x| x.is_a?(Arel::Nodes::BindParam) }
join_sql = collector.substitute_binds(binds).join
tables, join_conditions = deconstruct_joins(join_sql)
unless tables.empty?
sql.concat(' FROM ')
sql.concat(tables.join(', '))
sql.concat(' ')
end
scope = self
join_conditions.each { |join| scope = scope.where(join) }
# skip any binds that are used in the join
binds = scope.bound_attributes[binds_in_join..-1]
binds = binds.map(&:value_for_database)
binds.map! { |value| connection.quote(value) }
sql_string = Arel::Collectors::Bind.new
scope.arel.constraints.each do |node|
connection.visitor.accept(node, sql_string)
end
sql.concat('WHERE ' + sql_string.substitute_binds(binds).join)
connection.update(sql, "#{name} Update")
end
def delete_all
return super if joins_values.empty?
sql = "DELETE FROM #{quoted_table_name} "
join_sql = arel.join_sources.map(&:to_sql).join(" ")
tables, join_conditions = deconstruct_joins(join_sql)
sql.concat('USING ')
sql.concat(tables.join(', '))
sql.concat(' ')
scope = self
join_conditions.each { |join| scope = scope.where(join) }
binds = scope.bound_attributes
binds = binds.map(&:value_for_database)
binds.map! { |value| connection.quote(value) }
sql_string = Arel::Collectors::Bind.new
scope.arel.constraints.each do |node|
connection.visitor.accept(node, sql_string)
end
sql.concat('WHERE ' + sql_string.substitute_binds(binds).join)
connection.delete(sql, "SQL", scope.bind_values)
end
end
ActiveRecord::Relation.prepend(UpdateAndDeleteWithJoins)
module UpdateAndDeleteAllWithLimit
def delete_all(*args)
if limit_value || offset_value
scope = except(:select).select("#{quoted_table_name}.#{primary_key}")
return unscoped.where(primary_key => scope).delete_all
end
super
end
def update_all(updates, *args)
if limit_value || offset_value
scope = except(:select).select("#{quoted_table_name}.#{primary_key}")
return unscoped.where(primary_key => scope).update_all(updates)
end
super
end
end
ActiveRecord::Relation.prepend(UpdateAndDeleteAllWithLimit)
ActiveRecord::Associations::CollectionProxy.class_eval do
def respond_to?(name, include_private = false)
return super if [:marshal_dump, :_dump, 'marshal_dump', '_dump'].include?(name)
super ||
(load_target && target.respond_to?(name, include_private)) ||
proxy_association.klass.respond_to?(name, include_private)
end
def temp_record(*args)
# creates a record with attributes like a child record but is not added to the collection for autosaving
record = klass.unscoped.merge(scope).new(*args)
@association.set_inverse_instance(record)
record
end
def uniq(*args)
raise "use #distinct instead of #uniq on relations (Rails 5.1 will delegate uniq to to_a)"
end
end
ActiveRecord::ConnectionAdapters::AbstractAdapter.class_eval do
def bulk_insert(table_name, records)
keys = records.first.keys
quoted_keys = keys.map{ |k| quote_column_name(k) }.join(', ')
records.each do |record|
execute <<-SQL
INSERT INTO #{quote_table_name(table_name)}
(#{quoted_keys})
VALUES
(#{keys.map{ |k| quote(record[k]) }.join(', ')})
SQL
end
end
end
class ActiveRecord::ConnectionAdapters::AbstractAdapter
# for functions that differ from one adapter to the next, use the following
# method (overriding as needed in non-standard adapters), e.g.
#
# connection.func(:group_concat, :name, '|') ->
# group_concat(name, '|') (default)
# group_concat(name SEPARATOR '|') (mysql)
# string_agg(name::text, '|') (postgres)
def func(name, *args)
"#{name}(#{args.map{ |arg| func_arg_esc(arg) }.join(', ')})"
end
def func_arg_esc(arg)
arg.is_a?(Symbol) ? arg : quote(arg)
end
def group_by(*columns)
# the first item should be the primary key(s) that the other columns are
# functionally dependent on. alternatively, it can be a class, and all
# columns will be inferred from it. this is useful for cases where you want
# to select all columns from one table, and an aggregate from another.
Array(infer_group_by_columns(columns).first).join(", ")
end
def infer_group_by_columns(columns)
columns.map { |col|
col.respond_to?(:columns) ?
col.columns.map { |c|
"#{col.quoted_table_name}.#{quote_column_name(c.name)}"
} :
col
}
end
end
ActiveRecord::Associations::HasOneAssociation.class_eval do
def create_scope
scope = self.scope.scope_for_create.stringify_keys
scope = scope.except(klass.primary_key) unless klass.primary_key.to_s == reflection.foreign_key.to_s
scope
end
end
class ActiveRecord::Migration
VALID_TAGS = [:predeploy, :postdeploy, :cassandra]
# at least one of these tags is required
DEPLOY_TAGS = [:predeploy, :postdeploy]
class << self
def tag(*tags)
raise "invalid tags #{tags.inspect}" unless tags - VALID_TAGS == []
(@tags ||= []).concat(tags).uniq!
end
def tags
@tags ||= []
end
def is_postgres?
connection.adapter_name == 'PostgreSQL'
end
def has_postgres_proc?(procname)
connection.select_value("SELECT COUNT(*) FROM pg_proc WHERE proname='#{procname}'").to_i != 0
end
end
def connection
if self.class.respond_to?(:connection)
return self.class.connection
else
@connection || ActiveRecord::Base.connection
end
end
def tags
self.class.tags
end
end
class ActiveRecord::MigrationProxy
delegate :connection, :tags, :cassandra_cluster, to: :migration
def runnable?
!migration.respond_to?(:runnable?) || migration.runnable?
end
def load_migration
load(filename)
@migration = name.constantize
raise "#{self.name} (#{self.version}) is not tagged as exactly one of predeploy or postdeploy!" unless (@migration.tags & ActiveRecord::Migration::DEPLOY_TAGS).length == 1
@migration
end
end
module MigratorCache
def migrations(paths)
@@migrations_hash ||= {}
@@migrations_hash[paths] ||= super
end
def migrations_paths
@@migrations_paths ||= [File.join(Rails.root, "db/migrate")]
end
end
ActiveRecord::Migrator.singleton_class.prepend(MigratorCache)
module Migrator
def skipped_migrations
pending_migrations(call_super: true).reject(&:runnable?)
end
def pending_migrations(call_super: false)
return super() if call_super
super().select(&:runnable?)
end
def runnable
super.select(&:runnable?)
end
def execute_migration_in_transaction(migration, direct)
old_in_migration, ActiveRecord::Base.in_migration = ActiveRecord::Base.in_migration, true
if defined?(Marginalia)
old_migration_name, Marginalia::Comment.migration = Marginalia::Comment.migration, migration.name
end
if down? && !Rails.env.test? && !$confirmed_migrate_down
require 'highline'
if HighLine.new.ask("Revert migration #{migration.name} (#{migration.version}) ? [y/N/a] > ") !~ /^([ya])/i
raise("Revert not confirmed")
end
$confirmed_migrate_down = true if $1.downcase == 'a'
end
super
ensure
ActiveRecord::Base.in_migration = old_in_migration
Marginalia::Comment.migration = old_migration_name if defined?(Marginalia)
end
end
ActiveRecord::Migrator.prepend(Migrator)
ActiveRecord::Migrator.migrations_paths.concat Dir[Rails.root.join('gems', 'plugins', '*', 'db', 'migrate')]
ActiveRecord::Tasks::DatabaseTasks.migrations_paths = ActiveRecord::Migrator.migrations_paths
ActiveRecord::ConnectionAdapters::SchemaStatements.class_eval do
# in anticipation of having to re-run migrations due to integrity violations or
# killing stuff that is holding locks too long
def add_foreign_key_if_not_exists(from_table, to_table, options = {})
options[:column] ||= "#{to_table.to_s.singularize}_id"
column = options[:column]
case self.adapter_name
when 'PostgreSQL'
foreign_key_name = foreign_key_name(from_table, options)
query = supports_delayed_constraint_validation? ? 'convalidated' : 'conname'
schema = @config[:use_qualified_names] ? quote(shard.name) : 'current_schema()'
value = select_value("SELECT #{query} FROM pg_constraint INNER JOIN pg_namespace ON pg_namespace.oid=connamespace WHERE conname='#{foreign_key_name}' AND nspname=#{schema}")
if supports_delayed_constraint_validation? && value == 'f'
execute("ALTER TABLE #{quote_table_name(from_table)} DROP CONSTRAINT #{quote_table_name(foreign_key_name)}")
elsif value
return
end
add_foreign_key(from_table, to_table, options)
else
foreign_key_name = foreign_key_name(from_table, column, options)
return if foreign_keys(from_table).find { |k| k.options[:name] == foreign_key_name }
add_foreign_key(from_table, to_table, options)
end
end
def remove_foreign_key_if_exists(table, options = {})
begin
remove_foreign_key(table, options)
rescue ActiveRecord::StatementInvalid => e
raise unless e.message =~ /PG(?:::)?Error: ERROR:.+does not exist/
end
end
end
ActiveRecord::Associations::CollectionAssociation.class_eval do
# CollectionAssociation implements uniq for :uniq option, in its
# own special way. re-implement, but as a relation
def distinct
scope.distinct
end
end
module UnscopeCallbacks
method = CANVAS_RAILS5_0 ? "__run_callbacks__" : "run_callbacks"
module_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(*args)
scope = self.class.all.klass.unscoped
scope.scoping { super }
end
RUBY
end
ActiveRecord::Base.send(:include, UnscopeCallbacks)
module MatchWithDiscard
def match(model, name)
result = super
return nil if result && !result.is_a?(ActiveRecord::DynamicMatchers::FindBy)
result
end
end
ActiveRecord::DynamicMatchers::Method.singleton_class.prepend(MatchWithDiscard)
# see https://github.com/rails/rails/issues/18659
class AttributesDefiner
# defines attribute methods when loaded through Marshal
def initialize(klass)
@klass = klass
end
def marshal_dump
@klass
end
def marshal_load(klass)
klass.define_attribute_methods
@klass = klass
end
end
module DefineAttributeMethods
def init_internals
@define_attributes_helper = AttributesDefiner.new(self.class)
super
end
end
ActiveRecord::Base.include(DefineAttributeMethods)
module SkipTouchCallbacks
module Base
def skip_touch_callbacks(name)
@skip_touch_callbacks ||= Set.new
if @skip_touch_callbacks.include?(name)
yield
else
@skip_touch_callbacks << name
yield
@skip_touch_callbacks.delete(name)
end
end
def touch_callbacks_skipped?(name)
(@skip_touch_callbacks && @skip_touch_callbacks.include?(name)) ||
(self.superclass < ActiveRecord::Base && self.superclass.touch_callbacks_skipped?(name))
end
end
module BelongsTo
def touch_record(o, *args)
name = CANVAS_RAILS5_0 ? args[1] : args[2]
return if o.class.touch_callbacks_skipped?(name)
super
end
end
end
ActiveRecord::Base.singleton_class.include(SkipTouchCallbacks::Base)
ActiveRecord::Associations::Builder::BelongsTo.singleton_class.prepend(SkipTouchCallbacks::BelongsTo)
module ReadonlyCloning
def calculate_changes_from_defaults
if @readonly_clone
@changed_attributes = @changed_attributes.dup if @changed_attributes # otherwise changes to the clone will dirty the original
else
super # no reason to do this if we're creating a readonly clone - can take a long time with serialized columns
end
end
end
ActiveRecord::Base.prepend(ReadonlyCloning)
module DupArraysInMutationTracker
# setting a serialized attribute to an array of hashes shouldn't change all the hashes to indifferent access
# when the array gets stored in the indifferent access hash inside the mutation tracker
# not that it really matters too much but having some consistency is nice
def change_to_attribute(*args)
change = super
if change
val = change[1]
change[1] = val.dup if val.is_a?(Array)
end
change
end
end
ActiveRecord::AttributeMutationTracker.prepend(DupArraysInMutationTracker) unless CANVAS_RAILS5_0
module IgnoreOutOfSequenceMigrationDates
def current_migration_number(dirname)
migration_lookup_at(dirname).map do |file|
digits = File.basename(file).split("_").first
next if ActiveRecord::Base.timestamped_migrations && digits.length != 14
digits.to_i
end.compact.max.to_i
end
end
# Thor doesn't call `super` in its `inherited` method, so hook in so that we can hook in later :)
Thor::Group.singleton_class.prepend(Autoextend::ClassMethods)
Autoextend.hook(:"ActiveRecord::Generators::MigrationGenerator",
IgnoreOutOfSequenceMigrationDates,
singleton: true,
method: :prepend,
optional: true)
|
6486449j/Blocks
|
game/block/FurnaceBlock.java
|
package game.block;
import util.BmpRes;
import game.entity.Agent;
import game.item.*;
import game.world.World;
import game.entity.DroppedItem;
import game.entity.Player;
import game.ui.*;
import static util.MathUtil.*;
public class FurnaceBlock extends StoneBlock implements BlockWithUI{
private static final long serialVersionUID=1844677L;
static BmpRes bmp[]=BmpRes.load("Block/FurnaceBlock_",4);
private int burning;
private ShowableItemContainer fuel,items;
public BmpRes getBmp(){return bmp[burning>0?rndi(1,3):0];}
int maxDamage(){return 80;}
public boolean isDeep(){return true;}
public UI getUI(BlockAt ba){
return new UI_MultiPage(){{
addPage(new game.item.Coal(),new UI_ItemList(0,3,4,1,fuel,pl.il));
addPage(new game.item.IronOre(),new UI_ItemList(0,0.5f,4,1,items,pl.il));
addPage(new game.item.IronPickax(),new UI_Craft(Craft.getAllEq(CraftInfo._heat)));
}
}.setBlock(ba);
}
public int getCraftType(){return burning>0?CraftInfo._heat:0;}
public void onPlace(int x,int y){
fuel=ItemList.emptyList(4);
items=ItemList.emptyNonOverlapList(4);
}
public void onFireUp(int x,int y){
if(burning==0){
addFuel();
if(burning>0)World.cur.checkBlock(x,y);
}
}
private void addFuel(){
for(SingleItem si:fuel.toArray())if(!si.isEmpty()){
int v=si.get().fuelVal();
if(v>0){
si.dec();
burning+=v*2;
return;
}
}
}
public boolean onCheck(int x,int y){
if(super.onCheck(x,y))return true;
if(burning>0){
for(SingleItem si:items.toArray())if(!si.isEmpty()){
Item w=si.get();
if(w.heatingTime(true)*rnd()<1)si.set(w.heatingProduct(true));
}
--burning;
if(burning==0)addFuel();
if(burning>0)World.cur.checkBlock(x,y);
}
return false;
}
public void onDestroy(int x,int y){
DroppedItem.dropItems(fuel,x+0.5,y+0.2);
fuel=null;
DroppedItem.dropItems(items,x+0.5,y+0.7);
items=null;
super.onDestroy(x,y);
}
public game.item.SingleItem[] getItems(){return items.toArray();}
};
|
ljfranklin/bosh-bot
|
src/s3.js
|
<gh_stars>1-10
var fs = require('fs')
var AWS = require('aws-sdk')
function NotFoundError (message) {
Error.captureStackTrace(this, this.constructor)
this.name = this.constructor.name
this.message = message
}
function createClient (opts) {
var client = {}
var awsOpts = {
accessKeyId: opts.accessKey,
secretAccessKey: opts.secretKey
}
if (opts.region) {
awsOpts.region = opts.region
}
if (opts.endpoint) {
awsOpts.endpoint = opts.endpoint
}
var s3 = new AWS.S3(awsOpts)
client.upload = function (params, cb) {
if (!opts.endpoint && !opts.region) {
cb(new Error('Must specify either S3 region or S3 endpoint.'))
return
}
var awsParams = {
Bucket: params.bucket,
Key: params.key,
Body: fs.createReadStream(params.localPath)
}
s3.putObject(awsParams, function (err, _) {
if (err) {
cb(new Error(err.message))
return
}
cb(null)
})
}
client.download = function (params, cb) {
if (!opts.endpoint && !opts.region) {
cb(new Error('Must specify either S3 region or S3 endpoint.'))
return
}
var awsParams = {
Bucket: params.bucket,
Key: params.key
}
s3.getObject(awsParams, function (err, data) {
if (err) {
if (err.statusCode === 404) {
cb(new NotFoundError(`The file ${params.key} does not exist in bucket ${params.bucket}.`))
return
} else {
cb(new Error(err.message))
return
}
}
fs.writeFile(params.localPath, data.Body, { mode: 0o600 }, function (err) {
if (err) {
cb(err)
} else {
cb(null)
}
})
})
}
return client
}
module.exports = {
createClient: createClient,
NotFoundError: NotFoundError
}
|
rykrr/Quinterac
|
frontend/src/test/java/ca/queensu/cisc327/afk/R4.java
|
<reponame>rykrr/Quinterac
package ca.queensu.cisc327.afk;
import static org.junit.Assert.*;
import java.util.Arrays;
import org.junit.Test;
public class R4 extends AppTest{
@Test
public void testAppr4T1() throws Exception {
//
runAndTest(getListFromFile("./tests/r4/t1/console_input.txt"),
getListFromFile("./tests/r4/t1/accounts.txt"),
getListFromFile("./tests/r4/t1/console_output.txt"),
getListFromFile("./tests/r4/t1/expected_transactions.txt"));
}
@Test
public void testAppr4T2() throws Exception {
runAndTest(getListFromFile("./tests/r4/t2/console_input.txt"),
getListFromFile("./tests/r4/t2/accounts.txt"),
getListFromFile("./tests/r4/t2/console_output.txt"),
getListFromFile("./tests/r4/t2/expected_transactions.txt"));
}
@Test
public void testAppr4T3() throws Exception {
runAndTest(getListFromFile("./tests/r4/t3/console_input.txt"),
getListFromFile("./tests/r4/t3/accounts.txt"),
getListFromFile("./tests/r4/t3/console_output.txt"),
getListFromFile("./tests/r4/t3/expected_transactions.txt"));
}
}
|
RemiArnaud/blender
|
source/blender/bmesh/intern/bmesh_mesh.h
|
/*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#ifndef __BMESH_MESH_H__
#define __BMESH_MESH_H__
/** \file
* \ingroup bmesh
*/
#include "bmesh_class.h"
struct BMAllocTemplate;
struct BMLoopNorEditDataArray;
struct MLoopNorSpaceArray;
void BM_mesh_elem_toolflags_ensure(BMesh *bm);
void BM_mesh_elem_toolflags_clear(BMesh *bm);
struct BMeshCreateParams {
uint use_toolflags : 1;
};
BMesh *BM_mesh_create(const struct BMAllocTemplate *allocsize,
const struct BMeshCreateParams *params);
void BM_mesh_free(BMesh *bm);
void BM_mesh_data_free(BMesh *bm);
void BM_mesh_clear(BMesh *bm);
void BM_mesh_normals_update(BMesh *bm);
void BM_verts_calc_normal_vcos(BMesh *bm,
const float (*fnos)[3],
const float (*vcos)[3],
float (*vnos)[3]);
void BM_loops_calc_normal_vcos(BMesh *bm,
const float (*vcos)[3],
const float (*vnos)[3],
const float (*pnos)[3],
const bool use_split_normals,
const float split_angle,
float (*r_lnos)[3],
struct MLoopNorSpaceArray *r_lnors_spacearr,
short (*clnors_data)[2],
const int cd_loop_clnors_offset,
const bool do_rebuild);
bool BM_loop_check_cyclic_smooth_fan(BMLoop *l_curr);
void BM_lnorspacearr_store(BMesh *bm, float (*r_lnors)[3]);
void BM_lnorspace_invalidate(BMesh *bm, const bool do_invalidate_all);
void BM_lnorspace_rebuild(BMesh *bm, bool preserve_clnor);
void BM_lnorspace_update(BMesh *bm);
void BM_normals_loops_edges_tag(BMesh *bm, const bool do_edges);
#ifndef NDEBUG
void BM_lnorspace_err(BMesh *bm);
#endif
/* Loop Generics */
struct BMLoopNorEditDataArray *BM_loop_normal_editdata_array_init(BMesh *bm,
const bool do_all_loops_of_vert);
void BM_loop_normal_editdata_array_free(struct BMLoopNorEditDataArray *lnors_ed_arr);
void BM_edges_sharp_from_angle_set(BMesh *bm, const float split_angle);
void bmesh_edit_begin(BMesh *bm, const BMOpTypeFlag type_flag);
void bmesh_edit_end(BMesh *bm, const BMOpTypeFlag type_flag);
void BM_mesh_elem_index_ensure_ex(BMesh *bm, const char htype, int elem_offset[4]);
void BM_mesh_elem_index_ensure(BMesh *bm, const char hflag);
void BM_mesh_elem_index_validate(
BMesh *bm, const char *location, const char *func, const char *msg_a, const char *msg_b);
void BM_mesh_toolflags_set(BMesh *bm, bool use_toolflags);
#ifndef NDEBUG
bool BM_mesh_elem_table_check(BMesh *bm);
#endif
void BM_mesh_elem_table_ensure(BMesh *bm, const char htype);
void BM_mesh_elem_table_init(BMesh *bm, const char htype);
void BM_mesh_elem_table_free(BMesh *bm, const char htype);
BLI_INLINE BMVert *BM_vert_at_index(BMesh *bm, const int index)
{
BLI_assert((index >= 0) && (index < bm->totvert));
BLI_assert((bm->elem_table_dirty & BM_VERT) == 0);
return bm->vtable[index];
}
BLI_INLINE BMEdge *BM_edge_at_index(BMesh *bm, const int index)
{
BLI_assert((index >= 0) && (index < bm->totedge));
BLI_assert((bm->elem_table_dirty & BM_EDGE) == 0);
return bm->etable[index];
}
BLI_INLINE BMFace *BM_face_at_index(BMesh *bm, const int index)
{
BLI_assert((index >= 0) && (index < bm->totface));
BLI_assert((bm->elem_table_dirty & BM_FACE) == 0);
return bm->ftable[index];
}
BMVert *BM_vert_at_index_find(BMesh *bm, const int index);
BMEdge *BM_edge_at_index_find(BMesh *bm, const int index);
BMFace *BM_face_at_index_find(BMesh *bm, const int index);
BMVert *BM_vert_at_index_find_or_table(BMesh *bm, const int index);
BMEdge *BM_edge_at_index_find_or_table(BMesh *bm, const int index);
BMFace *BM_face_at_index_find_or_table(BMesh *bm, const int index);
// XXX
int BM_mesh_elem_count(BMesh *bm, const char htype);
void BM_mesh_remap(BMesh *bm, const uint *vert_idx, const uint *edge_idx, const uint *face_idx);
void BM_mesh_rebuild(BMesh *bm,
const struct BMeshCreateParams *params,
struct BLI_mempool *vpool,
struct BLI_mempool *epool,
struct BLI_mempool *lpool,
struct BLI_mempool *fpool);
typedef struct BMAllocTemplate {
int totvert, totedge, totloop, totface;
} BMAllocTemplate;
extern const BMAllocTemplate bm_mesh_allocsize_default;
extern const BMAllocTemplate bm_mesh_chunksize_default;
#define BMALLOC_TEMPLATE_FROM_BM(bm) \
{ \
(CHECK_TYPE_INLINE(bm, BMesh *), (bm)->totvert), (bm)->totedge, (bm)->totloop, (bm)->totface \
}
#define _VA_BMALLOC_TEMPLATE_FROM_ME_1(me) \
{ \
(CHECK_TYPE_INLINE(me, Mesh *), (me)->totvert), (me)->totedge, (me)->totloop, (me)->totpoly, \
}
#define _VA_BMALLOC_TEMPLATE_FROM_ME_2(me_a, me_b) \
{ \
(CHECK_TYPE_INLINE(me_a, Mesh *), \
CHECK_TYPE_INLINE(me_b, Mesh *), \
(me_a)->totvert + (me_b)->totvert), \
(me_a)->totedge + (me_b)->totedge, (me_a)->totloop + (me_b)->totloop, \
(me_a)->totpoly + (me_b)->totpoly, \
}
#define BMALLOC_TEMPLATE_FROM_ME(...) \
VA_NARGS_CALL_OVERLOAD(_VA_BMALLOC_TEMPLATE_FROM_ME_, __VA_ARGS__)
/* Vertex coords access. */
void BM_mesh_vert_coords_get(BMesh *bm, float (*orco)[3]);
float (*BM_mesh_vert_coords_alloc(BMesh *bm, int *r_vert_len))[3];
void BM_mesh_vert_coords_apply(BMesh *bm, const float (*orco)[3]);
void BM_mesh_vert_coords_apply_with_mat4(BMesh *bm,
const float (*vert_coords)[3],
const float mat[4][4]);
#endif /* __BMESH_MESH_H__ */
|
luhuadong/evm
|
modules/evm/evm_module_i2c.c
|
/****************************************************************************
**
** Copyright (C) 2020 @scriptiot
**
** EVM是一款通用化设计的虚拟机引擎,拥有语法解析前端接口、编译器、虚拟机和虚拟机扩展接口框架。
** 支持js、python、qml、lua等多种脚本语言,纯C开发,零依赖,内置REPL,支持主流 ROM > 40KB, RAM > 2KB的MCU;
** 自带垃圾回收(GC)先进的内存管理,采用最复杂的压缩算法,无内存碎片(大部分解释器都存在内存碎片)
** Version : 1.0
** Email : <EMAIL>
** Website : https://github.com/scriptiot/evm
** https://gitee.com/scriptiot/evm
** Licence: Apache-2.0
****************************************************************************/
#ifdef CONFIG_EVM_I2C
#include "evm_module.h"
//I2C(bus, ...)
static evm_val_t evm_module_i2c(evm_t *e, evm_val_t *p, int argc, evm_val_t *v)
{
EVM_UNUSED(e);
EVM_UNUSED(p);
EVM_UNUSED(argc);
EVM_UNUSED(v);
return EVM_VAL_UNDEFINED;
}
evm_val_t evm_class_i2c(evm_t * e){
evm_builtin_t class_i2c[] = {
{NULL, NULL}
};
return *evm_class_create(e, (evm_native_fn)evm_class_i2c, class_i2c, NULL);
}
#endif
|
APMonitor/pds
|
All_Source_Code/ScaleData/ScaleData_1.py
|
import numpy as np
import matplotlib.pyplot as plt
# Generate a distribution
x = 0.5*np.random.randn(1000)+4
# Standard (mean=0, stdev=1) Scaler
y = (x-np.mean(x))/np.std(x)
# Min-Max (0-1) Scaler
z = (x-np.min(x))/(np.max(x)-np.min(x))
# Plot distributions
plt.figure(figsize=(8,4))
plt.hist(x, bins=30, label='original')
plt.hist(y, alpha=0.7, bins=30, label='standard scaler')
plt.hist(z, alpha=0.7, bins=30, label='minmax scaler')
plt.legend()
plt.show()
|
devs-immortal/Skewer
|
src/main/java/net/immortaldevs/skewer/commands/AddToSkewerCommand.java
|
<filename>src/main/java/net/immortaldevs/skewer/commands/AddToSkewerCommand.java
package net.immortaldevs.skewer.commands;
import com.mojang.brigadier.CommandDispatcher;
import com.mojang.brigadier.arguments.IntegerArgumentType;
import com.mojang.brigadier.context.CommandContext;
import com.mojang.brigadier.exceptions.CommandSyntaxException;
import com.mojang.brigadier.suggestion.SuggestionProvider;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import net.immortaldevs.skewer.condiments.Condiment;
import net.immortaldevs.skewer.items.MultiFoodItem;
import net.immortaldevs.skewer.registry.SkewerRegistries;
import net.immortaldevs.skewer.tag.CondimentTags;
import net.immortaldevs.skewer.tag.SkewerItemTags;
import net.immortaldevs.skewer.Skewer;
import net.minecraft.command.argument.IdentifierArgumentType;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.server.command.ServerCommandSource;
import net.minecraft.text.TranslatableText;
import net.minecraft.util.Identifier;
import net.minecraft.util.registry.Registry;
import java.util.concurrent.CompletableFuture;
import static net.minecraft.server.command.CommandManager.argument;
import static net.minecraft.server.command.CommandManager.literal;
// TODO rewrite or remove
public class AddToSkewerCommand {
public static final SkewerableSuggester SKEWERABLE_SUGGESTER = new SkewerableSuggester();
public static void register(CommandDispatcher<ServerCommandSource> dispatcher) {
// only for dev.
dispatcher.register(
literal(Skewer.MOD_ID+":add")
.then(argument("addition", IdentifierArgumentType.identifier()).suggests(SKEWERABLE_SUGGESTER)
.executes(ctx -> addToSkewer(ctx.getSource(), IdentifierArgumentType.getIdentifier(ctx, "addition"), 1))
.then(argument("amount", IntegerArgumentType.integer(1))
.executes(ctx -> addToSkewer(ctx.getSource(), IdentifierArgumentType.getIdentifier(ctx, "addition"), IntegerArgumentType.getInteger(ctx, "amount")))
)
)
);
}
// todo rewrite
private static int addToSkewer(ServerCommandSource source, Identifier id, int amount) {
PlayerEntity player;
try {
player = source.getPlayer();
} catch (CommandSyntaxException e) {
source.sendError(new TranslatableText("commands.skewer.add.player_failure"));
return 1;
}
ItemStack stack = player.getMainHandStack();
if (!(stack.getItem() instanceof MultiFoodItem)) {
source.sendError(new TranslatableText("commands.skewer.add.item_failure", id));
return 1;
}
Item food = Registry.ITEM.get(id);
Condiment condiment = SkewerRegistries.CONDIMENT.get(id);
// if (!MultiFoodItem.add(stack, food, amount) && !MultiFoodItem.add(stack, condiment, amount)) {
// source.sendError(new TranslatableText("commands.skewer.add.addition_failure", id));
// }
// Success message
return 1;
}
public static class SkewerableSuggester implements SuggestionProvider<ServerCommandSource> {
@Override
public CompletableFuture<Suggestions> getSuggestions(CommandContext<ServerCommandSource> context, SuggestionsBuilder builder) {
// I think condiment tags either don't work, or are being funky for some reason.
// Anyway, who knows if any of this helps:
CondimentTags.SKEWERABLE.values().forEach(condiment -> {
Identifier id = SkewerRegistries.CONDIMENT.getId(condiment);
if (id == null)
System.out.println(condiment);
else
builder.suggest(id.toString());
});
SkewerItemTags.SKEWERABLE.values().forEach(item -> builder.suggest(item.toString()));
return builder.buildFuture();
}
}
}
|
bco-bc/panalysis
|
plot_lj_coulomb.py
|
<gh_stars>0
import sys
import numpy as np
import matplotlib.pyplot as plt
import interaction.lj_coulomb as lj_coulomb
pi = np.pi
e0 = 0.000572766
eps = 2.5 # Default value of the relative permittivity (dielectric constant)
dr = 0.01 # Distance spacing.
r0 = 0.4 # Start value distance.
if len(sys.argv) < 5:
print('Number of arguments: ', len(sys.argv))
print('Argument List: ', str(sys.argv))
print('Usage: python3 plot_lj_coulomb <q1> <q2> <C12> <C6> (<eps>)')
print('Use \'molecular units\'. Relative permittivity \'eps\' is optional, default value is 2.5.')
raise Exception("Missing interaction parameters.")
if len(sys.argv) == 6:
eps = float(sys.argv[5])
q1 = float(sys.argv[1])
q2 = float(sys.argv[2])
C12 = float(sys.argv[3])
C6 = float(sys.argv[4])
param = (q1, q2, eps, C12, C6)
# Calculate interaction.
r = np.arange(0.4, 4.0, dr)
results = lj_coulomb.potential(r, param)
total = results[0]
el = results[1]
lj = results[2]
# Plot graph.
plt.plot(r, total, color='black', label='total')
plt.plot(r, lj, color='red', label='lj')
plt.plot(r, el, color='blue', label='el')
plt.xlabel('r (nm)')
plt.ylabel('U(r)')
plt.legend()
plt.show()
|
wujifengcn/jdchain-framework
|
ledger-model/src/main/java/com/jd/blockchain/transaction/ParticipantOperator.java
|
<gh_stars>1-10
package com.jd.blockchain.transaction;
public interface ParticipantOperator {
/**
* 注册参与方操作;
*
* @return
*/
ParticipantRegisterOperationBuilder participants();
/**
* 参与方状态更新操作;
*
* @return
*/
ParticipantStateUpdateOperationBuilder states();
}
|
Team-SubliMate/SubliMate-Android
|
app/src/main/java/sublimate/com/sublimate/json/ResetAckEvent.java
|
<reponame>Team-SubliMate/SubliMate-Android
package sublimate.com.sublimate.json;
public class ResetAckEvent extends WebSocketEvent {
public static final String EVENT_TYPE = "RESET_ACK";
public ResetAckEvent(int itemId) {
this.type = EVENT_TYPE;
}
}
|
TrainingByPackt/Professional-Nodejs-eLearning
|
Lesson01/arrow_function/02_scoping.js
|
<reponame>TrainingByPackt/Professional-Nodejs-eLearning
function MovieStar(name) {
this.name = name;
}
const movieStar = new MovieStar("Al Pacino");
MovieStar.prototype.stunt = function() {
setTimeout(function cb() {
console.log("Animated Stunt for " + this.name);
}, 1000);
};
movieStar.stunt();
MovieStar.prototype.stunt = function() {
setTimeout((function cb() {
console.log("Animated Stunt for " + this.name);
}).bind(this), 1000);
};
movieStar.stunt();
// Convert this example to use the arrows syntax and demonstrate that the scoping of the bound this scope is
// automatically inherited from the parent scope
// Answer:
// MovieStar.prototype.stunt = function() {
// setTimeout(() => console.log("Animated Stunt for " + this.name), 1000);
// };
//
// movieStar.stunt();
|
BitySA/swissdta
|
tests/test_field_iban.py
|
<reponame>BitySA/swissdta<filename>tests/test_field_iban.py
"""Tests for the Iban field."""
import pytest
from swissdta.fields import Iban
from swissdta.records.record import DTARecord
FIELD_LENGTH = 25
class IbanRecord(DTARecord):
"""Subclass of DTARecord for testing the Iban field."""
field = Iban(length=FIELD_LENGTH)
@pytest.mark.parametrize(('value', 'expected_errors'), (
('CH93 0076 2011 6238 5295 7', tuple()),
('LI21 0881 0000 2324 013A A', tuple()),
('LI22 0881 0000 2324 013A A', ('[field] IBAN INVALID: Invalid checksum digits',)),
('HU42 1177 3016 1111 1018 0000 0000',
("[field] TOO LONG: 'HU42117730161111101800000000' can be at most 25 characters",)),
))
def test_invalid_values(value, expected_errors):
"""Verify that invalid ibans are detected."""
record = IbanRecord()
record.field = value
assert not record.validation_warnings
assert record.validation_errors == expected_errors
@pytest.mark.parametrize(('value', 'expected_value'), (
('CH38 0888 8123 4567 8901 2', 'CH3808888123456789012 '),
('CH9300762011623852957', 'CH9300762011623852957 '),
))
def test_format_values(value, expected_value):
"""Verify that values are formatted correctly."""
record = IbanRecord()
record.field = value
assert record.field == expected_value
assert not record.validation_warnings
assert not record.validation_errors
|
lanjue1/CAP
|
src/components/AntdSelectRegion/index.js
|
import React, { Component } from 'react';
import { Select, Cascader } from 'antd';
import { connect } from 'dva';
import { formatMessage } from 'umi-plugin-react/locale';
import { isString } from 'util';
@connect(({ }) => ({}))
export default class AntdSelectRegion extends Component {
constructor(props) {
super(props);
this.state = {
selected: undefined,
options: [],
isFirst: true,
};
}
componentDidMount() {
this.setValue(this.props);
const { data } = this.props;
if (data && data.length > 0) {
this.setState({ options: this.renderTreeNodes(data) });
}
}
componentWillReceiveProps(nextProps) {
const { value, data } = nextProps;
if (JSON.stringify(this.props.value) !== JSON.stringify(value)) {
this.setValue(nextProps);
}
if (data && JSON.stringify(data) !== JSON.stringify(this.props.data)) {
this.setState({ options: this.renderTreeNodes(data) });
}
}
renderTreeNodes = data =>
data.map(item => {
const { id, partsItems, partsName1, partsName2, partsName3 } = item;
if (partsItems) {
return {
value: id,
label: `${partsName1}${partsName2 ? '/' + partsName2 : ''}${
partsName3 ? '/' + partsName3 : ''
}`,
children: partsItems.length > 0 ? this.renderTreeNodes(partsItems) : undefined,
};
} else {
return {
value: id,
label: `${partsName1}${partsName2 ? '/' + partsName2 : ''}${
partsName3 ? '/' + partsName3 : ''
}`,
};
}
});
queryAllData = () => {
const { dispatch, url } = this.props;
dispatch({
type: 'component/queryPartsOfferDict',
payload: {
params: {},
url,
},
callback: data => {
console.log('data??--1---????',data)
if (!data) return;
this.setState({ options: this.renderTreeNodes(data) });
},
});
};
isRateQueryData = () => {
const { data } = this.props;
if (data.length > 0) {
this.setState({ options: this.renderTreeNodes(data) });
}
};
setValue = props => {
const { value, isParent, split, filter, data, isRate, getFirstValue } = props;
const { options } = this.state;
let newData = [];
if (options.length > 0) return;
if (value && isString(value)) {
let data = value.split(split);
if (isParent) {
if (!filter) {
data = ['', ...data];
}
newData = data.filter((_, index) => index !== 0 && index !== data.length - 1);
if (!getFirstValue) {
this.onChange(newData, null);
}
this.setState({
selected: newData,
});
if (!isRate) {
this.queryRegion(data.length - 3, data);
}
} else {
if (!filter) {
data = ['', ...data];
} else {
// 司机管理 后续需要后台加上
data = ['44857702471028736', ...data];
}
newData = data.filter((_, index) => index !== 0);
if (!getFirstValue) {
this.onChange(newData, null);
}
this.setState({ selected: newData });
if (!isRate) {
this.queryRegion(data.length - 2, data);
}
}
} else {
this.setState({ selected: value });
}
};
queryRegion = (index, selectedData) => {
const { label, isParent } = this.props;
this.queryById({
id: selectedData[index],
callback: data => {
const length = isParent ? 3 : 2;
if (index === selectedData.length - length) {
const newOptions = data.map(item => {
return {
value: item.id,
label: item[label],
isLeaf: item.childNumber !== undefined && item.childNumber === 0 ? true : false,
};
});
this.setState({
options: newOptions,
});
} else {
const targetOptions = data.map(item => {
if (item.id === selectedData[index + 1]) {
return {
value: item.id,
label: item[label],
isLeaf: item.childNumber !== undefined && item.childNumber === 0 ? true : false,
children: this.state.options,
};
} else {
return {
value: item.id,
label: item[label],
isLeaf: item.childNumber !== undefined && item.childNumber === 0 ? true : false,
};
}
});
this.setState({
options: targetOptions,
});
}
if (--index >= 0) {
this.queryRegion(index, selectedData);
}
},
});
};
onFocus = () => {
const { dispatch, url, label, filter, isRate } = this.props;
const { options } = this.state;
if (options.length > 0) return;
this.queryById({
id: '',
callback: data => {
if (!data) return;
this.firstId = data[0].id;
if (filter) {
this.queryById({
id: data[0].id,
callback: data2 => {
if (!data2) return;
this.setState({
options: data2.map(item => {
return {
value: item.id,
label: item[label],
isLeaf: item.childNumber !== undefined && item.childNumber === 0 ? true : false,
};
}),
});
},
});
} else {
this.setState({
options: data.map(item => {
return {
value: item.id,
label: item[label],
isLeaf: item.childNumber !== undefined && item.childNumber === 0 ? true : false,
};
}),
});
}
},
});
};
/**
* 查询options 接口
*/
queryById = ({ id, callback } = {}) => {
const { dispatch, url, paramsLabel, isRate } = this.props;
const params = isRate ? {} : { [paramsLabel]: id };
dispatch({
type: 'common/selectReginList',
payload: {
params,
url,
},
callback: data => {
console.log('data??--2---????',data)
if (!data) return;
callback(data);
},
});
};
/**
* 选中加载数据
*/
loadData = selectedOptions => {
const { dispatch, label, isRate } = this.props;
if (isRate) return;
const targetOption = selectedOptions[selectedOptions.length - 1];
targetOption.loading = true;
this.queryById({
id: targetOption.value,
callback: data => {
if (!data) return;
targetOption.loading = false;
if (data.length === 0) {
} else {
targetOption.children = data.map(item => {
return {
value: item.id,
label: item[label],
isLeaf: item.childNumber !== undefined && item.childNumber === 0 ? true : false,
};
});
}
this.setState({
options: [...this.state.options],
});
},
});
};
triggerChange = value => {
const onChange = this.props.onChange;
if (onChange) {
onChange(value);
}
};
onPopupVisibleChange = value => {
const { isRate } = this.props;
const { isFirst } = this.state;
if (!value) this.setState({ isFirst: true });
if (isFirst && isRate && value) {
this.queryAllData();
this.setState({ isFirst: false });
return;
}
};
/**
* 值改变
*/
onChange = keys => {
this.setState({ selected: keys });
this.triggerChange(keys);
};
filter = (inputValue, path) => {
return path.some(option => {
if (option.children) {
return false;
}
return option.label.toLowerCase().indexOf(inputValue.toLowerCase()) > -1;
});
};
renderSearchData = (inputValue, path) => {
return path.map((item, index) => {
if (item.label.toLowerCase().indexOf(inputValue.toLowerCase()) > -1) {
return (
<span key={index} className="ant-cascader-menu-item-keyword">
{item.label}
</span>
);
}
return `${item.label} => `;
});
};
render() {
const { isOpen, selected, options } = this.state;
const { showSplit, isRate, disabled, cusValue } = this.props;
return (
<Cascader
onFocus={this.onFocus}
allowClear={true}
options={options}
loadData={!isRate ? this.loadData : null}
onPopupVisibleChange={value => {
this.onPopupVisibleChange(value);
}}
disabled={disabled}
value={cusValue || selected}
onChange={this.onChange}
showSearch={
isRate
? { filter: this.filter, render: this.renderSearchData, matchInputWidth: false }
: false
}
displayRender={label => {
return isRate
? label.length > 0
? label[label.length - 1]
: label.join(' => ')
: label.join(' / ');
}}
placeholder={formatMessage({ id: 'form.select.placeholder' })}
changeOnSelect
/>
);
}
}
|
hayden5-mwac/cotidia-account
|
cotidia/account/urls/api.py
|
from django.urls import path
from django.conf.urls import url
from cotidia.account.views import api
from cotidia.account.serializers.group import GroupAdminSerializer
from cotidia.admin.views.api import DynamicListAPIView
ure = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
app_name = "cotidia.account"
urlpatterns = [
url(r"^sign-in$", api.SignIn.as_view(), name="sign-in"),
url(r"^sign-up$", api.SignUp.as_view(), name="sign-up"),
url(
r"^activate/(?P<uuid>" + ure + ")/(?P<token>[a-z0-9\-]+)$",
api.Activate.as_view(),
name="activate",
),
url(
r"^resend-activation-link/(?P<uuid>" + ure + ")$",
api.ResendActivationLink.as_view(),
name="resend-activation-link",
),
url(r"^authenticate$", api.Authenticate.as_view(), name="authenticate"),
url(r"^reset-password$", api.ResetPassword.as_view(), name="reset-password"),
url(
r"^reset-password-validate/(?P<uuid>" + ure + ")/(?P<token>[a-z0-9\-]+)$",
api.ResetPasswordValidate.as_view(),
name="reset-password-validate",
),
url(
r"^set-password/(?P<uuid>" + ure + ")/(?P<token>[a-z0-9\-]+)$",
api.SetPassword.as_view(),
name="set-password",
),
url(r"^update-details$", api.UpdateDetails.as_view(), name="update-details"),
url(r"^change-password$", api.ChangePassword.as_view(), name="change-password"),
path(
"dynamic-list/auth/group",
DynamicListAPIView.as_view(permission_required=["auth.change_group"]),
{
"app_label": "auth",
"model": "group",
"serializer_class": GroupAdminSerializer,
},
name="group-list",
),
]
|
lucmski/twint-docker
|
searchapp/node_modules/@appbaseio/reactivesearch/lib/appbase-js/node_modules/lodash/isSet.js
|
<reponame>lucmski/twint-docker<filename>searchapp/node_modules/@appbaseio/reactivesearch/lib/appbase-js/node_modules/lodash/isSet.js<gh_stars>0
'use strict';
var baseIsSet = require('./_baseIsSet'),
baseUnary = require('./_baseUnary'),
nodeUtil = require('./_nodeUtil');
/* Node.js helper references. */
var nodeIsSet = nodeUtil && nodeUtil.isSet;
/**
* Checks if `value` is classified as a `Set` object.
*
* @static
* @memberOf _
* @since 4.3.0
* @category Lang
* @param {*} value The value to check.
* @returns {boolean} Returns `true` if `value` is a set, else `false`.
* @example
*
* _.isSet(new Set);
* // => true
*
* _.isSet(new WeakSet);
* // => false
*/
var isSet = nodeIsSet ? baseUnary(nodeIsSet) : baseIsSet;
module.exports = isSet;
|
PKUfudawei/cmssw
|
CondTools/Ecal/src/ESDBCopy.cc
|
#include "FWCore/ServiceRegistry/interface/Service.h"
#include "CondCore/DBOutputService/interface/PoolDBOutputService.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "FWCore/Framework/interface/EventSetup.h"
#include "CondTools/Ecal/interface/ESDBCopy.h"
#include <vector>
ESDBCopy::ESDBCopy(const edm::ParameterSet& iConfig)
: m_timetype(iConfig.getParameter<std::string>("timetype")),
m_cacheIDs(),
m_records(),
esPedestalsToken_(esConsumes()),
esADCToGeVConstantToken_(esConsumes()),
esChannelStatusToken_(esConsumes()),
esIntercalibConstantsToken_(esConsumes()),
esWeightStripGroupsToken_(esConsumes()),
esTBWeightsToken_(esConsumes()) {
std::string container;
std::string record;
typedef std::vector<edm::ParameterSet> Parameters;
Parameters toCopy = iConfig.getParameter<Parameters>("toCopy");
for (const auto& iparam : toCopy) {
container = iparam.getParameter<std::string>("container");
record = iparam.getParameter<std::string>("record");
m_cacheIDs.emplace(container, 0);
m_records.emplace(container, record);
}
}
ESDBCopy::~ESDBCopy() {}
void ESDBCopy::analyze(const edm::Event& evt, const edm::EventSetup& evtSetup) {
for (const auto& irec : m_records) {
if (shouldCopy(evtSetup, irec.first)) {
copyToDB(evtSetup, irec.first);
}
}
}
bool ESDBCopy::shouldCopy(const edm::EventSetup& evtSetup, const std::string& container) {
unsigned long long cacheID = 0;
if (container == "ESPedestals") {
cacheID = evtSetup.get<ESPedestalsRcd>().cacheIdentifier();
} else if (container == "ESADCToGeVConstant") {
cacheID = evtSetup.get<ESADCToGeVConstantRcd>().cacheIdentifier();
} else if (container == "ESIntercalibConstants") {
cacheID = evtSetup.get<ESIntercalibConstantsRcd>().cacheIdentifier();
} else if (container == "ESWeightStripGroups") {
cacheID = evtSetup.get<ESWeightStripGroupsRcd>().cacheIdentifier();
} else if (container == "ESTBWeights") {
cacheID = evtSetup.get<ESTBWeightsRcd>().cacheIdentifier();
} else if (container == "ESChannelStatus") {
cacheID = evtSetup.get<ESChannelStatusRcd>().cacheIdentifier();
} else {
throw cms::Exception("Unknown container");
}
if (m_cacheIDs[container] == cacheID) {
return false;
} else {
m_cacheIDs[container] = cacheID;
return true;
}
}
void ESDBCopy::copyToDB(const edm::EventSetup& evtSetup, const std::string& container) {
edm::Service<cond::service::PoolDBOutputService> dbOutput;
if (!dbOutput.isAvailable()) {
throw cms::Exception("PoolDBOutputService is not available");
}
std::string recordName = m_records[container];
if (container == "ESPedestals") {
const auto& obj = evtSetup.getData(esPedestalsToken_);
edm::LogInfo("ESDBCopy") << "ped pointer is: " << &obj;
dbOutput->createOneIOV<const ESPedestals>(obj, dbOutput->beginOfTime(), recordName);
} else if (container == "ESADCToGeVConstant") {
const auto& obj = evtSetup.getData(esADCToGeVConstantToken_);
edm::LogInfo("ESDBCopy") << "adc pointer is: " << &obj;
dbOutput->createOneIOV<const ESADCToGeVConstant>(obj, dbOutput->beginOfTime(), recordName);
} else if (container == "ESChannelStatus") {
const auto& obj = evtSetup.getData(esChannelStatusToken_);
edm::LogInfo("ESDBCopy") << "channel status pointer is: " << &obj;
dbOutput->createOneIOV<const ESChannelStatus>(obj, dbOutput->beginOfTime(), recordName);
} else if (container == "ESIntercalibConstants") {
const auto& obj = evtSetup.getData(esIntercalibConstantsToken_);
edm::LogInfo("ESDBCopy") << "inter pointer is: " << &obj;
dbOutput->createOneIOV<const ESIntercalibConstants>(obj, dbOutput->beginOfTime(), recordName);
} else if (container == "ESWeightStripGroups") {
const auto& obj = evtSetup.getData(esWeightStripGroupsToken_);
edm::LogInfo("ESDBCopy") << "weight pointer is: " << &obj;
dbOutput->createOneIOV<const ESWeightStripGroups>(obj, dbOutput->beginOfTime(), recordName);
} else if (container == "ESTBWeights") {
const auto& obj = evtSetup.getData(esTBWeightsToken_);
edm::LogInfo("ESDBCopy") << "tbweight pointer is: " << &obj;
dbOutput->createOneIOV<const ESTBWeights>(obj, dbOutput->beginOfTime(), recordName);
} else {
throw cms::Exception("Unknown container");
}
edm::LogInfo("ESDBCopy") << "ESDBCopy wrote " << recordName;
}
|
jasonsbarr/types
|
packages/core/lib/predicates/isSealed.js
|
<reponame>jasonsbarr/types
export const isSealed = (obj) => Object.isSealed(obj);
|
pcanto-hopeit/hopeit.engine
|
engine/test/mock_app/mock_app_api_get.py
|
<reponame>pcanto-hopeit/hopeit.engine
"""
Test app api
"""
from typing import Optional
from hopeit.app.logger import app_extra_logger
from hopeit.app.context import EventContext
from mock_app import MockData
logger, extra = app_extra_logger()
__steps__ = ['entry_point']
__api__ = {
"summary": "Test app api",
"description": "Test app api",
"parameters": [
{
"name": "arg1",
"in": "query",
"required": False,
"description": "Argument 1",
"schema": {
"type": "integer"
}
},
{'description': 'Track '
'information: '
'Request-Id',
'in': 'header',
'name': 'X-Track-Request-Id',
'required': False,
'schema': {'type': 'string'}},
{'description': 'Track '
'information: '
'Request-Ts',
'in': 'header',
'name': 'X-Track-Request-Ts',
'required': False,
'schema': {'format': 'date-time',
'type': 'string'}},
{'description': 'Track '
'information: '
'track.session_id',
'in': 'header',
'name': 'X-Track-Session-Id',
'required': True,
'schema': {'default': 'test.session_id',
'type': 'string'}}
],
"responses": {
"200": {
"description": "MockData result",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/MockData"
}
}
}
}
},
"tags": [
"mock_app_api.test", "my_tags"
],
"security": [
{
"auth.basic": []
}
]
}
def entry_point(payload: None, context: EventContext, arg1: Optional[int] = None) -> MockData:
logger.info(context, "mock_app_api_get.entry_point")
return MockData(f"get-{arg1}")
|
ysoftman/test_code
|
golang/csv_sql/db.go
|
package main
import (
"database/sql"
"log"
)
type SQLDataBase struct {
sqlDB *sql.DB
}
var sqldb *SQLDataBase
func NewDB() *SQLDataBase {
db, err := sql.Open("sqlite3", "./sqlite.db")
if err != nil {
log.Fatal("open sqlite err =>", err)
}
// log.Println("db:", db)
return &SQLDataBase{
sqlDB: db,
}
}
func (db *SQLDataBase) CreateTable(stmt string) error {
// log.Printf("stmt => %q\n", stmt)
_, err := db.sqlDB.Exec(stmt)
if err != nil {
log.Printf("stmt => %v, err => %v", stmt, err)
return err
}
return nil
}
func (db *SQLDataBase) SelectData(query string) (map[int]interface{}, error) {
// log.Printf("query => %q\n", query)
rows, err := db.sqlDB.Query(query)
if err != nil {
log.Printf("query => %v, err => %v", query, err)
return nil, err
}
defer rows.Close()
columns, err := rows.Columns()
if err != nil {
log.Printf("query => %v, err => %v", query, err)
return nil, err
}
// log.Println("columns:", columns)
out := map[int]interface{}{}
ptrResults := make([]interface{}, len(columns))
cnt := 0
for rows.Next() {
results := make([]interface{}, len(columns))
for i := range columns {
ptrResults[i] = &results[i]
}
if err := rows.Scan(ptrResults...); err != nil {
log.Printf("query => %v, err => %v", query, err)
return nil, err
}
// log.Println("results:", results)
// log.Println("ptrResults:", ptrResults)
// out = append(out, results)
out[cnt] = results
cnt++
}
return out, nil
}
func (db *SQLDataBase) InsertData(prepareStmt string, datas [][]string) error {
// log.Printf("stmt => %q\n", prepareStmt)
tx, err := db.sqlDB.Begin()
if err != nil {
log.Printf("err => %v", err)
return err
}
stmt, err := db.sqlDB.Prepare(prepareStmt)
if err != nil {
log.Printf("stmt => %v, err => %v", stmt, err)
return err
}
defer stmt.Close()
for _, d := range datas {
// log.Println("==>", d)
temp := make([]interface{}, len(d))
for i, j := range d {
temp[i] = j
}
if _, err := stmt.Exec(temp...); err != nil {
log.Printf("stmt => %v, err => %v", stmt, err)
return err
}
}
tx.Commit()
return nil
}
|
paozer/minishell
|
srcs/libft/srcs/libft.h
|
<filename>srcs/libft/srcs/libft.h
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* libft.h :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: pramella <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2020/03/03 16:25:35 by pramella #+# #+# */
/* Updated: 2020/05/03 15:00:00 by pramella ### ########lyon.fr */
/* */
/* ************************************************************************** */
#ifndef LIBFT_H
# define LIBFT_H
# include <stddef.h>
# include <stdarg.h>
# include <stdlib.h>
# include <unistd.h>
# include <fcntl.h>
# ifndef BUFFER_SIZE
# define BUFFER_SIZE 4
# endif
# define NULL_STR "(null)"
/*
** LIST FUNCTIONS
*/
typedef struct s_list
{
void *content;
struct s_list *next;
} t_list;
void ft_lstadd_back(t_list **alst, t_list *new);
void ft_lstadd_front(t_list **alst, t_list *new);
void ft_lstclear(t_list **lst, void (*del)(void*));
void ft_lstdelone(t_list *lst, void (*del)(void*));
void ft_lstiter(t_list *lst, void (*f)(void *));
t_list *ft_lstlast(t_list *lst);
t_list *ft_lstmap(t_list *lst, void *(*f)(void *),
void (*del)(void *));
t_list *ft_lstnew(void *content);
void ft_lstrm(t_list **begin, t_list *prev, t_list *curr);
int ft_lstsize(t_list *lst);
/*
** STANDARD FUNCTIONS
*/
int ft_atoi(const char *str);
void ft_bzero(void *s, size_t n);
void *ft_calloc(size_t count, size_t size);
int ft_isalnum(int c);
int ft_isalpha(int c);
int ft_isascii(int c);
int ft_isdigit(int c);
int ft_isinset(int c, char *set);
int ft_isprint(int c);
int ft_isspace(int c);
char *ft_itoa(int n);
void *ft_memccpy(void *dst, const void *src, int c,
size_t n);
void *ft_memchr(const void *s, int c, size_t n);
int ft_memcmp(const void *s1, const void *s2, size_t n);
void *ft_memcpy(void *dst, const void *src, size_t n);
void *ft_memmove(void *dst, const void *src, size_t len);
void *ft_memset(void *b, int c, size_t len);
void ft_putchar_fd(char c, int fd);
void ft_putendl_fd(char *s, int fd);
void ft_putnbr_fd(int n, int fd);
void ft_putstr_fd(char *s, int fd);
char **ft_split(char const *s, char *charset);
int ft_split_free(char **split);
char *ft_strcat(char *dest, char *src);
char *ft_strchr(const char *s, int c);
char *ft_strcpy(char *dst, const char *src);
char *ft_strdup(const char *src);
char *ft_strjoin(char const *s1, char const *s2);
size_t ft_strlcat(char *dst, const char *src, size_t size);
size_t ft_strlcpy(char *dst, const char *src, size_t dstsize);
size_t ft_strlen(const char *s);
char *ft_strmapi(char const *s,
char (*f) (unsigned int, char));
int ft_strncmp(const char *s1, const char *s2, size_t n);
char *ft_strnstr(const char *haystack, const char *needle,
size_t len);
char *ft_strrchr(const char *s, int c);
char *ft_strtrim(char const *s1, char const *set);
char *ft_substr(char const *s, unsigned int start,
size_t len);
int ft_tolower(int c);
int ft_toupper(int c);
/*
** GET_NEXT_LINE
*/
typedef struct s_list_fd
{
int fd;
int ret;
char *buf;
struct s_list_fd *next;
} t_list_fd;
int get_next_line(int fd, char **line);
size_t ft_strlen_gnl(const char *s);
char *ft_strjoin_gnl(char const *s1, char const *s2);
char *ft_cleanbuf_gnl(char *str);
void ft_lstclear_gnl(int fd, t_list_fd **head);
/*
** FT_PRINTF
*/
typedef struct s_fields
{
int zero_flag;
int left_flag;
int wd_flag;
int wd;
int prc_flag;
int prc_err_flag;
int prc;
int fd;
char type;
size_t end;
} t_fields;
typedef struct s_nbr
{
long value;
int unsigned_flag;
int len;
char *base;
} t_nbr;
int ft_printf(const char *fmt, ...)
__attribute__((format(printf,1,2)));
t_fields *ft_format_setup(char *fmt, va_list *ap);
void ft_char_conv(t_fields *flags, va_list *ap, int *ret);
void ft_str_conv(t_fields *flags, va_list *ap, int *ret);
void ft_ptr_conv(t_fields *flags, va_list *ap, int *ret);
void ft_mod_conv(t_fields *flags, int *ret);
void ft_dec_conv(t_fields *flags, va_list *ap, int *ret);
void ft_dec_helper(t_fields *flags, t_nbr *nbr);
void ft_dec_relayer(t_fields *flags, t_nbr *nbr, int *ret);
void ft_putnbr_base(long nbr, char *base, int base_len);
int ft_nbr_base_size(long nbr, int base_len);
/*
** FT_FRINTF
*/
int ft_fprintf(int fd, const char *fmt, ...)
__attribute__((format(printf,2,3)));
t_fields *ft_format_setup_fd(char *fmt, va_list *ap);
void ft_char_conv_fd(t_fields *flags, va_list *ap, int *ret);
void ft_str_conv_fd(t_fields *flags, va_list *ap, int *ret);
void ft_ptr_conv_fd(t_fields *flags, va_list *ap, int *ret);
void ft_mod_conv_fd(t_fields *flags, int *ret);
void ft_dec_conv_fd(t_fields *flags, va_list *ap, int *ret);
void ft_dec_helper_fd(t_fields *flags, t_nbr *nbr);
void ft_dec_relayer_fd
(t_fields *flags, t_nbr *nbr, int *ret);
void ft_putnbr_base_fd
(long nbr, char *base, int base_len, int fd);
int ft_nbr_base_size_fd(long nbr, int base_len);
#endif
|
opacut/frontend-components
|
packages/pdf-generator/src/components/Chart.js
|
<reponame>opacut/frontend-components<filename>packages/pdf-generator/src/components/Chart.js
import React, { Component } from 'react';
import ReactDOM from 'react-dom';
import { CircleIconConfig } from '@patternfly/react-icons/dist/js/icons/circle-icon';
import PropTypes from 'prop-types';
import { View, Canvas, Text } from '@react-pdf/renderer';
import { ChartPie } from '@patternfly/react-charts/dist/js/components/ChartPie';
import { ChartDonut } from '@patternfly/react-charts/dist/js/components/ChartDonut';
import { ChartDonutUtilization } from '@patternfly/react-charts/dist/js/components/ChartDonutUtilization';
import { getLightThemeColors } from '@patternfly/react-charts/dist/js/components/ChartUtils/chart-theme';
import Table from './Table';
import styles from '../utils/styles';
import rgbHex from 'rgb-hex';
import flatten from 'lodash/flatten';
const appliedStyles = styles();
const chartMapper = {
pie: {
component: ChartPie,
chartProps: {
allowTooltip: false,
labelRadius: 45,
labels: ({ datum }) => `${datum.y}%`,
style: { labels: { fill: '#FFFFFF' } }
},
showLabels: true,
width: 80
},
donut: {
component: ChartDonut,
width: 80
},
donutUtilization: {
component: ChartDonutUtilization,
width: 80,
colorScale: ([ color ]) => [ color, ...getLightThemeColors('gray').voronoi.colorScale ]
}
};
class Chart extends Component {
getChartData = (currChart) => {
const { data, chartType, colorSchema, ...props } = this.props;
const Chart = currChart.component;
const el = document.createElement('div');
document.body.appendChild(el);
el.style.display = 'none';
ReactDOM.render(
<Chart data={ data } {...currChart.chartProps} { ...props } />,
el,
);
const paths = Array.from(el.querySelectorAll('path')).map((path) => path.getAttribute('d'));
const texts = flatten(Array.from(el.querySelectorAll('text')).map((textEl, key) => (
Array.from(textEl.querySelectorAll('tspan')).map((text) => ({
text: text.innerHTML,
...currChart.showLabels && {
coords: [ textEl.getAttribute('x'), textEl.getAttribute('y') ],
shift: data[key]?.y < 20 ? 0.65 : 0
},
style: text.getAttribute('style').split(';').reduce((acc, curr) => {
const [ key, val ] = curr.split(':');
return {
...acc,
...key && { [key.trim()]: val.trim() }
};
}, {})
}))
)));
// let's clean up the placeholder chart
ReactDOM.unmountComponentAtNode(el);
el.remove();
return [ paths, texts ];
}
render() {
const { data, chartType, colorSchema, ...props } = this.props;
const currChart = chartMapper[chartType] || chartMapper.pie;
const colors = currChart.colorScale ?
currChart.colorScale(getLightThemeColors(colorSchema).voronoi.colorScale) :
getLightThemeColors(colorSchema).voronoi.colorScale;
const [ paths, texts ] = this.getChartData(currChart);
return <View style={[
appliedStyles.flexRow,
{
paddingLeft: 30,
paddinRight: 10,
justifyContent: 'flex-start'
}
]}>
<Canvas
{...props}
style={{
width: currChart.width,
height: 67
}}
paint={({ path, text, fill, scale, translate }) => {
paths.map((onePath, key) => {
scale(key === 0 ? 0.34 : 1);
translate(key === 0 ? 100 : 0, key === 0 ? 100 : 0);
path(onePath)
.fill(colors[key]);
const currText = texts[key];
if (currText) {
const fontSize = parseInt(currText.style['font-size'].replace('px', '')) * 2;
const coords = currText.coords;
const color = rgbHex(
...currText
.style
.fill
.replace(/rgb\(|\)/g, '')
.split(',')
.map(item => parseInt(item, 10))
);
fill(`#${color}`).fontSize(fontSize);
if (coords) {
const [ xshift, yshift ] = [
coords?.[0] > (fontSize + currChart.width) ?
0.5 :
-2 + (currText?.shift || 0),
coords?.[1] > 100 ?
coords?.[0] < (fontSize + currChart.width) ? 0.5 : 1
: -2 - (currText?.shift || 0)
];
text(currText.text, xshift * fontSize, yshift * fontSize);
} else {
text(currText.text, -(currText.text.length * (fontSize / 4)), (24 * key) - fontSize);
}
}
});
}
}
/>
<Table
withHeader
style={
{ width: 'auto', flex: 1 }
}
rowsStyle={{
justifyContent: 'flex-start',
...appliedStyles.compactCellPadding
}}
rows={[
[ 'Legend' ],
...(Array.isArray(data) ? data : [ data ]).map(({ x, y }, key) => [
<Canvas
key={`${key}-bullet`}
style={{
padding: 3,
width: 15,
height: 10
}}
paint={({ path, scale }) => {
scale(0.014);
path(CircleIconConfig.svgPath).fill(colors[key]);
}}
/>,
<Text key={`${key}-text`}>
{x}
</Text>
])
]}
/>
</View>;
}
}
Chart.propTypes = {
colorSchema: PropTypes.oneOf([
'blue',
'cyan',
'default',
'gold',
'gray',
'green',
'multi',
'multiOrdered',
'multiUnordered',
'orange',
'purple'
])
};
Chart.defaultProps = {
colorSchema: 'multiOrdered'
};
export default Chart;
|
0vert1m3/test
|
ee/app/services/ee/merge_requests/merge_service.rb
|
module EE
module MergeRequests
module MergeService
extend ::Gitlab::Utils::Override
override :error_check!
def error_check!
check_size_limit
super
end
def hooks_validation_pass?(merge_request)
# handle_merge_error needs this. We should move that to a separate
# object instead of relying on the order of method calls.
@merge_request = merge_request # rubocop:disable Gitlab/ModuleWithInstanceVariables
return true if project.merge_requests_ff_only_enabled
return true unless project.feature_available?(:push_rules)
push_rule = merge_request.project.push_rule
return true unless push_rule
unless push_rule.commit_message_allowed?(params[:commit_message])
handle_merge_error(log_message: "Commit message does not follow the pattern '#{push_rule.commit_message_regex}'", save_message_on_model: true)
return false
end
unless push_rule.author_email_allowed?(current_user.email)
handle_merge_error(log_message: "Commit author's email '#{current_user.email}' does not follow the pattern '#{push_rule.author_email_regex}'", save_message_on_model: true)
return false
end
true
rescue PushRule::MatchError => e
handle_merge_error(log_message: e.message, save_message_on_model: true)
false
end
private
def check_size_limit
if merge_request.target_project.above_size_limit?
message = ::Gitlab::RepositorySizeError.new(merge_request.target_project).merge_error
raise ::MergeRequests::MergeService::MergeError, message
end
end
end
end
end
|
dplbsd/soc2013
|
head/contrib/llvm/tools/clang/lib/Sema/SemaOpenMP.cpp
|
//===--- SemaOpenMP.cpp - Semantic Analysis for OpenMP constructs ----------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
/// \file
/// \brief This file implements semantic analysis for OpenMP directives and
/// clauses
///
//===----------------------------------------------------------------------===//
#include "clang/Basic/OpenMPKinds.h"
#include "clang/AST/Decl.h"
#include "clang/AST/DeclOpenMP.h"
#include "clang/Lex/Preprocessor.h"
#include "clang/Sema/SemaInternal.h"
#include "clang/Sema/Lookup.h"
using namespace clang;
namespace {
class VarDeclFilterCCC : public CorrectionCandidateCallback {
private:
Sema &Actions;
public:
VarDeclFilterCCC(Sema &S) : Actions(S) { }
virtual bool ValidateCandidate(const TypoCorrection &Candidate) {
NamedDecl *ND = Candidate.getCorrectionDecl();
if (VarDecl *VD = dyn_cast_or_null<VarDecl>(ND)) {
return VD->hasGlobalStorage() &&
Actions.isDeclInScope(ND, Actions.getCurLexicalContext(),
Actions.getCurScope());
}
return false;
}
};
}
Sema::DeclGroupPtrTy Sema::ActOnOpenMPThreadprivateDirective(
SourceLocation Loc,
Scope *CurScope,
ArrayRef<DeclarationNameInfo> IdList) {
SmallVector<DeclRefExpr *, 5> Vars;
for (ArrayRef<DeclarationNameInfo>::iterator I = IdList.begin(),
E = IdList.end();
I != E; ++I) {
LookupResult Lookup(*this, *I, LookupOrdinaryName);
LookupParsedName(Lookup, CurScope, NULL, true);
if (Lookup.isAmbiguous())
continue;
VarDecl *VD;
if (!Lookup.isSingleResult()) {
VarDeclFilterCCC Validator(*this);
TypoCorrection Corrected = CorrectTypo(*I, LookupOrdinaryName, CurScope,
0, Validator);
std::string CorrectedStr = Corrected.getAsString(getLangOpts());
std::string CorrectedQuotedStr = Corrected.getQuoted(getLangOpts());
if (Lookup.empty()) {
if (Corrected.isResolved()) {
Diag(I->getLoc(), diag::err_undeclared_var_use_suggest)
<< I->getName() << CorrectedQuotedStr
<< FixItHint::CreateReplacement(I->getLoc(), CorrectedStr);
} else {
Diag(I->getLoc(), diag::err_undeclared_var_use)
<< I->getName();
}
} else {
Diag(I->getLoc(), diag::err_omp_expected_var_arg_suggest)
<< I->getName() << Corrected.isResolved() << CorrectedQuotedStr
<< FixItHint::CreateReplacement(I->getLoc(), CorrectedStr);
}
if (!Corrected.isResolved()) continue;
VD = Corrected.getCorrectionDeclAs<VarDecl>();
} else {
if (!(VD = Lookup.getAsSingle<VarDecl>())) {
Diag(I->getLoc(), diag::err_omp_expected_var_arg_suggest)
<< I->getName() << 0;
Diag(Lookup.getFoundDecl()->getLocation(), diag::note_declared_at);
continue;
}
}
// OpenMP [2.9.2, Syntax, C/C++]
// Variables must be file-scope, namespace-scope, or static block-scope.
if (!VD->hasGlobalStorage()) {
Diag(I->getLoc(), diag::err_omp_global_var_arg)
<< getOpenMPDirectiveName(OMPD_threadprivate)
<< !VD->isStaticLocal();
Diag(VD->getLocation(), diag::note_forward_declaration) << VD;
continue;
}
// OpenMP [2.9.2, Restrictions, C/C++, p.2]
// A threadprivate directive for file-scope variables must appear outside
// any definition or declaration.
// OpenMP [2.9.2, Restrictions, C/C++, p.3]
// A threadprivate directive for static class member variables must appear
// in the class definition, in the same scope in which the member
// variables are declared.
// OpenMP [2.9.2, Restrictions, C/C++, p.4]
// A threadprivate directive for namespace-scope variables must appear
// outside any definition or declaration other than the namespace
// definition itself.
// OpenMP [2.9.2, Restrictions, C/C++, p.6]
// A threadprivate directive for static block-scope variables must appear
// in the scope of the variable and not in a nested scope.
NamedDecl *ND = cast<NamedDecl>(VD);
if (!isDeclInScope(ND, getCurLexicalContext(), CurScope)) {
Diag(I->getLoc(), diag::err_omp_var_scope)
<< getOpenMPDirectiveName(OMPD_threadprivate) << VD;
Diag(VD->getLocation(), diag::note_forward_declaration) << VD;
continue;
}
// OpenMP [2.9.2, Restrictions, C/C++, p.2-6]
// A threadprivate directive must lexically precede all references to any
// of the variables in its list.
if (VD->isUsed()) {
Diag(I->getLoc(), diag::err_omp_var_used)
<< getOpenMPDirectiveName(OMPD_threadprivate) << VD;
continue;
}
QualType ExprType = VD->getType().getNonReferenceType();
DeclRefExpr *Var = cast<DeclRefExpr>(BuildDeclRefExpr(VD,
ExprType,
VK_RValue,
I->getLoc()).take());
Vars.push_back(Var);
}
if (OMPThreadPrivateDecl *D = CheckOMPThreadPrivateDecl(Loc, Vars)) {
CurContext->addDecl(D);
return DeclGroupPtrTy::make(DeclGroupRef(D));
}
return DeclGroupPtrTy();
}
OMPThreadPrivateDecl *Sema::CheckOMPThreadPrivateDecl(
SourceLocation Loc,
ArrayRef<DeclRefExpr *> VarList) {
SmallVector<DeclRefExpr *, 5> Vars;
for (ArrayRef<DeclRefExpr *>::iterator I = VarList.begin(),
E = VarList.end();
I != E; ++I) {
VarDecl *VD = cast<VarDecl>((*I)->getDecl());
SourceLocation ILoc = (*I)->getLocation();
// OpenMP [2.9.2, Restrictions, C/C++, p.10]
// A threadprivate variable must not have an incomplete type.
if (RequireCompleteType(ILoc, VD->getType(),
diag::err_omp_incomplete_type)) {
continue;
}
// OpenMP [2.9.2, Restrictions, C/C++, p.10]
// A threadprivate variable must not have a reference type.
if (VD->getType()->isReferenceType()) {
Diag(ILoc, diag::err_omp_ref_type_arg)
<< getOpenMPDirectiveName(OMPD_threadprivate) << VD->getType();
Diag(VD->getLocation(), diag::note_forward_declaration) << VD;
continue;
}
// Check if this is a TLS variable.
if (VD->getTLSKind()) {
Diag(ILoc, diag::err_omp_var_thread_local) << VD;
Diag(VD->getLocation(), diag::note_forward_declaration) << VD;
continue;
}
Vars.push_back(*I);
}
return Vars.empty() ?
0 : OMPThreadPrivateDecl::Create(Context,
getCurLexicalContext(),
Loc, Vars);
}
|
influitive/SHAlert
|
SHAlertExample/Vendor/PXEngine.framework/Versions/1.0/Headers/PXMath.h
|
//
// PXMath.h
// PXEngine
//
// Created by <NAME> on 7/28/12.
// Copyright (c) 2012 Pixate, Inc. All rights reserved.
//
#ifndef PXShapeKit_PXMath_h
#define PXShapeKit_PXMath_h
#define DEGREES_TO_RADIANS(angle) ( (angle) / 180.0 * M_PI)
#define RADIANS_TO_DEGREES(radians) ((radians) * 180.0 / M_PI)
#endif
|
timfel/netbeans
|
java/java.hints/test/unit/src/org/netbeans/modules/java/hints/bugs/UnbalancedTest.java
|
<reponame>timfel/netbeans<gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.java.hints.bugs;
import org.netbeans.junit.NbTestCase;
import org.netbeans.junit.RandomlyFails;
import org.netbeans.modules.java.hints.test.api.HintTest;
/**
*
* @author lahvac
*/
public class UnbalancedTest extends NbTestCase {
public UnbalancedTest(String name) {
super(name);
}
public void testArrayWriteOnly() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private byte[] arr;\n" +
" private void t() { arr[0] = 0; }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertContainsWarnings("2:19-2:22:verifier:ERR_UnbalancedArrayWRITE arr");
}
public void testArrayReadOnly1() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private byte[] arr;\n" +
" private void t() { System.err.println(arr[0]); }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertContainsWarnings("2:19-2:22:verifier:ERR_UnbalancedArrayREAD arr");
}
public void testArrayReadOnly2() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private byte[] arr = new byte[0];\n" +
" private void t() { System.err.println(arr[0]); }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertContainsWarnings("2:19-2:22:verifier:ERR_UnbalancedArrayREAD arr");
}
public void testArrayNeg1() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private byte[] arr;\n" +
" private void t() { arr[0] = 0; System.err.println(arr[0]); }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testArrayNeg2() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private byte[] arr;\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testArrayNeg3() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private byte[] arr;\n" +
" private void t() { System.err.println(arr[0]); }\n" +
" private Object g() { return arr; }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testArrayNeg4() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private byte[] arr = {1, 2, 3};\n" +
" private void t() { System.err.println(arr[0]); }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testArrayNeg206855() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private final int aa[][] = new int[3][3];\n" +
" public Test() {\n" +
" aa[0][0] = 1;\n" +
" }\n" +
" public int get() {\n" +
" return aa[0][0];\n" +
" }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
@RandomlyFails
/* ergonomics # 3604: noone holds javac:
private static final java.util.Map org.netbeans.modules.java.hints.bugs.Unbalanced.seen->
java.util.WeakHashMap@4ba0561d-table->
[Ljava.util.WeakHashMap$Entry;@4dfbbcea-[5]->
java.util.WeakHashMap$Entry@14a61465-value->
java.util.HashMap@2a234e7e-table->
[Ljava.util.HashMap$Entry;@6081ad10-[4]->
java.util.HashMap$Entry@519cef33-key->
com.sun.tools.javac.code.Symbol$VarSymbol@7756c69c-type->
com.sun.tools.javac.code.Type$ArrayType@452719a0-elemtype->
com.sun.tools.javac.code.Type@4bf0c8d4-tsym->
com.sun.tools.javac.code.Symbol$ClassSymbol@1a78d426-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@29c50e17-completer->
org.netbeans.lib.nbjavac.services.NBClassReader@3b907285-sourceCompleter->
com.sun.tools.javac.main.JavaCompiler@5e45ffbf-flow->
com.sun.tools.javac.comp.Flow@576cceda-attrEnv->
com.sun.tools.javac.comp.Env@f7e5307-toplevel->
com.sun.tools.javac.tree.JCTree$JCCompilationUnit@40d0726d
at org.netbeans.junit.NbTestCase$4.run(NbTestCase.java:1390)
at org.netbeans.junit.internal.NbModuleLogHandler.whileIgnoringOOME(NbModuleLogHandler.java:170)
at org.netbeans.junit.NbTestCase.assertGC(NbTestCase.java:1348)
at org.netbeans.junit.NbTestCase.assertGC(NbTestCase.java:1324)
at org.netbeans.modules.java.hints.test.api.HintTest.run(HintTest.java:487)
at org.netbeans.modules.java.hints.bugs.UnbalancedTest.testNegForeach209850(UnbalancedTest.java:155)
at org.netbeans.junit.NbTestCase.access$200(NbTestCase.java:95)
at org.netbeans.junit.NbTestCase$2.doSomething(NbTestCase.java:403)
at org.netbeans.junit.NbTestCase$1Guard.run(NbTestCase.java:329)
at java.lang.Thread.run(Thread.java:662)
*
*/
public void testNegForeach209850() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private final int aa[][] = new int[3][3];\n" +
" public void get() {\n" +
" for (int[] span : aa) {\n" +
" System.err.println(span[0] + span[1]);\n" +
" }\n" +
" }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testNeg211248a() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private final String aa[] = \"\".split(\";\");\n" +
" public void get() {\n" +
" System.err.println(aa[0]);\n" +
" }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testNeg211248b() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private final String aa[] = \"\".split(\";\");\n" +
" public void get() {\n" +
" String str;\n" +
" str = aa[0];\n" +
" System.err.println(str);\n" +
" }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testNeg211248c() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private final String aa[] = \"\".split(\";\");\n" +
" public void get() {\n" +
" String str = \"\";\n" +
" str += aa[0];\n" +
" System.err.println(str);\n" +
" }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings();
}
public void testInit1() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private final String aa[] = \"\".split(\";\");\n" +
" public void get() {\n" +
" }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings("2:23-2:25:verifier:ERR_UnbalancedArrayWRITE aa");
}
public void testInit2() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private final String aa[] = new String[] {\";\"};\n" +
" public void get() {\n" +
" }\n" +
"}\n")
.run(Unbalanced.Array.class)
.assertWarnings("2:23-2:25:verifier:ERR_UnbalancedArrayWRITE aa");
}
public void testCollectionWriteOnly1() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll;\n" +
" private void t() { coll.add(\"a\"); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertContainsWarnings("2:35-2:39:verifier:ERR_UnbalancedCollectionWRITE coll");
}
public void testCollectionWriteOnly2() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll;\n" +
" private void t() { coll.add(\"a\"); coll.iterator(); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertContainsWarnings("2:35-2:39:verifier:ERR_UnbalancedCollectionWRITE coll");
}
public void testCollectionReadOnly1() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll;\n" +
" private void t() { String str = coll.get(0); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertContainsWarnings("2:35-2:39:verifier:ERR_UnbalancedCollectionREAD coll");
}
public void testCollectionReadOnly2() throws Exception {//XXX ?
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll;\n" +
" private void t() { String str = coll.remove(0); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertContainsWarnings("2:35-2:39:verifier:ERR_UnbalancedCollectionREAD coll");
}
public void testCollectionReadOnly3() throws Exception {//XXX ?
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll = new java.util.ArrayList<String>(1);\n" +
" private void t() { String str = coll.remove(0); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertContainsWarnings("2:35-2:39:verifier:ERR_UnbalancedCollectionREAD coll");
}
public void testMapReadOnly1() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.Map<String, String> map;\n" +
" private void t() { String str = map.get(\"a\"); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertContainsWarnings("2:42-2:45:verifier:ERR_UnbalancedCollectionREAD map");
}
public void testCollectionNeg1() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll;\n" +
" private void t() { coll.add(\"a\"); System.err.println(coll.get(0)); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertWarnings();
}
public void testCollectionNeg2() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll;\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertWarnings();
}
public void testCollectionNeg3() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll;\n" +
" private void t() { System.err.println(coll.get(0)); }\n" +
" private Object g() { return coll; }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertWarnings();
}
@RandomlyFails
/* local run #2: noone holds javac:
private static final java.util.Map org.netbeans.modules.java.hints.bugs.Unbalanced.seen->
java.util.WeakHashMap@4163e1-table->
[Ljava.util.WeakHashMap$Entry;@713bd2-[8]->
java.util.WeakHashMap$Entry@dfe4-value->
java.util.HashMap@f8ae75-table->
[Ljava.util.HashMap$Entry;@188b5b2-[9]->
java.util.HashMap$Entry@2569ee-key->
com.sun.tools.javac.code.Symbol$VarSymbol@13dff89-owner->
com.sun.tools.javac.code.Symbol$ClassSymbol@5d529e-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@76b9d0-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@ec179-completer->
org.netbeans.lib.nbjavac.services.NBClassReader@1ab1d4a-sourceCompleter->
com.sun.tools.javac.main.JavaCompiler@9cb9f3-flow->
com.sun.tools.javac.comp.Flow@1ebb3a4-attrEnv->
com.sun.tools.javac.comp.Env@ade1b6-toplevel->
com.sun.tools.javac.tree.JCTree$JCCompilationUnit@128ea3f
junit.framework.AssertionFailedError: noone holds javac:
private static final java.util.Map org.netbeans.modules.java.hints.bugs.Unbalanced.seen->
java.util.WeakHashMap@4163e1-table->
[Ljava.util.WeakHashMap$Entry;@713bd2-[8]->
java.util.WeakHashMap$Entry@dfe4-value->
java.util.HashMap@f8ae75-table->
[Ljava.util.HashMap$Entry;@188b5b2-[9]->
java.util.HashMap$Entry@2569ee-key->
com.sun.tools.javac.code.Symbol$VarSymbol@13dff89-owner->
com.sun.tools.javac.code.Symbol$ClassSymbol@5d529e-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@76b9d0-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@ec179-completer->
org.netbeans.lib.nbjavac.services.NBClassReader@1ab1d4a-sourceCompleter->
com.sun.tools.javac.main.JavaCompiler@9cb9f3-flow->
com.sun.tools.javac.comp.Flow@1ebb3a4-attrEnv->
com.sun.tools.javac.comp.Env@ade1b6-toplevel->
com.sun.tools.javac.tree.JCTree$JCCompilationUnit@128ea3f
at org.netbeans.junit.NbTestCase$4.run(NbTestCase.java:1390)
at org.netbeans.junit.internal.NbModuleLogHandler.whileIgnoringOOME(NbModuleLogHandler.java:170)
at org.netbeans.junit.NbTestCase.assertGC(NbTestCase.java:1348)
at org.netbeans.junit.NbTestCase.assertGC(NbTestCase.java:1324)
at org.netbeans.modules.java.hints.test.api.HintTest.run(HintTest.java:487)
at org.netbeans.modules.java.hints.bugs.UnbalancedTest.testCollectionNeg4(UnbalancedTest.java:383)
at org.netbeans.junit.NbTestCase.access$200(NbTestCase.java:95)
at org.netbeans.junit.NbTestCase$2.doSomething(NbTestCase.java:403)
at org.netbeans.junit.NbTestCase$1Guard.run(NbTestCase.java:329)
at java.lang.Thread.run(Thread.java:662)
*/
public void testCollectionNeg4() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll = new java.util.ArrayList<String>(java.util.Arrays.asList(\"foo\"));\n" +
" private void t() { System.err.println(coll.get(0)); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertWarnings();
}
public void testCollectionNegAddTested() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private java.util.List<String> coll = new java.util.ArrayList<String>();\n" +
" public void t1(String str) { if (coll.add(str)) System.err.println(\"\"); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertWarnings();
}
public void testCollectionLocalVariable() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private void t() { java.util.List<String> coll = new java.util.ArrayList<String>(); String str = coll.get(0); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertContainsWarnings("2:46-2:50:verifier:ERR_UnbalancedCollectionREAD coll");
}
public void testCollectionNegNonPrivate() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" java.util.List<String> coll = new java.util.ArrayList<String>();\n" +
" public void t1(String str) { if (coll.add(str)) System.err.println(\"\"); }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertWarnings();
}
@RandomlyFails
/** Local run #1: noone holds javac:
private static final java.util.Map org.netbeans.modules.java.hints.bugs.Unbalanced.seen->
java.util.WeakHashMap@1a8422e-table->
[Ljava.util.WeakHashMap$Entry;@1ce7242-[11]->
java.util.WeakHashMap$Entry@67ec7f-value->
java.util.HashMap@99a269-table->
[Ljava.util.HashMap$Entry;@d42d02-[15]->
java.util.HashMap$Entry@56619-key->
com.sun.tools.javac.code.Symbol$VarSymbol@ed15c7-owner->
com.sun.tools.javac.code.Symbol$MethodSymbol@1c94b8f-owner->
com.sun.tools.javac.code.Symbol$ClassSymbol@1c5cdac-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@15deba3-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@191a832-completer->
org.netbeans.lib.nbjavac.services.NBClassReader@1e41d53-sourceCompleter->
com.sun.tools.javac.main.JavaCompiler@9d85e0-flow->
com.sun.tools.javac.comp.Flow@1e9150a-attrEnv->
com.sun.tools.javac.comp.Env@7fcd7e-toplevel->
com.sun.tools.javac.tree.JCTree$JCCompilationUnit@1b5a415
junit.framework.AssertionFailedError: noone holds javac:
private static final java.util.Map org.netbeans.modules.java.hints.bugs.Unbalanced.seen->
java.util.WeakHashMap@1a8422e-table->
[Ljava.util.WeakHashMap$Entry;@1ce7242-[11]->
java.util.WeakHashMap$Entry@67ec7f-value->
java.util.HashMap@99a269-table->
[Ljava.util.HashMap$Entry;@d42d02-[15]->
java.util.HashMap$Entry@56619-key->
com.sun.tools.javac.code.Symbol$VarSymbol@ed15c7-owner->
com.sun.tools.javac.code.Symbol$MethodSymbol@1c94b8f-owner->
com.sun.tools.javac.code.Symbol$ClassSymbol@1c5cdac-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@15deba3-owner->
com.sun.tools.javac.code.Symbol$PackageSymbol@191a832-completer->
org.netbeans.lib.nbjavac.services.NBClassReader@1e41d53-sourceCompleter->
com.sun.tools.javac.main.JavaCompiler@9d85e0-flow->
com.sun.tools.javac.comp.Flow@1e9150a-attrEnv->
com.sun.tools.javac.comp.Env@7fcd7e-toplevel->
com.sun.tools.javac.tree.JCTree$JCCompilationUnit@1b5a415
*/
public void testCollectionNegEnhForLoop() throws Exception {
HintTest
.create()
.input("package test;\n" +
"import java.util.List;\n" +
"public class Test {\n" +
" public int t1(List<List<String>> ll) { int total = 0; for (List<String> l : ll) total += l.size(); return total; }\n" +
"}\n")
.run(Unbalanced.Collection.class)
.assertWarnings();
}
}
|
dahyeong-yun/prtc_coding-test-py
|
onlineJudge/baekjoon/DS/Stack/Q1874/Q1874.py
|
<filename>onlineJudge/baekjoon/DS/Stack/Q1874/Q1874.py
n = int(input())
cnt = 1
stack = []
result = []
for i in range(1, n+1) :
data = int(input())
while cnt <= data :
stack.append(cnt)
cnt += 1
result.append("+")
if stack[-1] == data :
stack.pop()
result.append("-")
else :
print("NO")
exit(0)
print("\n".join(result))
|
andriyzagoruyko/landing-core
|
resources/js/components/common/Form/Section/index.js
|
import React from 'react';
import PropTypes from 'prop-types';
import clsx from 'clsx';
import { FormGroup, Typography, Divider } from '@material-ui/core/';
import useStyles from './styles';
const FormSection = ({ children, title, divider, dense = false, ...rest }) => {
const classes = useStyles();
return (
<>
<FormGroup row className={clsx(classes.formGroup, {
[classes.formGroupMargin]: !dense,
})}
{...rest}
>
{title && (
<Typography variant="button" className={classes.title}>
{title}
</Typography>
)}
<div className={classes.sectionBody}>
{children}
</div>
</FormGroup>
{divider && <Divider className={classes.divider} />}
</>
)
}
FormSection.propTypes = {
title: PropTypes.string,
divider: PropTypes.bool,
dense: PropTypes.bool,
onDelete: PropTypes.func
};
export default FormSection;
|
vampire-studios/HuskysGadgetMod-Forge
|
src/main/java/io/github/vampirestudios/hgm/api/os/OperatingSystem.java
|
<reponame>vampire-studios/HuskysGadgetMod-Forge
package io.github.vampirestudios.hgm.api.os;
import io.github.vampirestudios.hgm.core.TaskBar;
public interface OperatingSystem {
String name();
String version();
TaskBar taskBar();
int ram();
int storage();
}
|
zhixuan2333/work
|
go/vscode_electron/main.go
|
<filename>go/vscode_electron/main.go
package main
import (
"bufio"
"bytes"
"fmt"
"io/ioutil"
"log"
"net/http"
"os/exec"
"regexp"
"runtime"
)
//vscodeV get vscode version
func vscodeV() []byte {
// Print Go Version
out, err := exec.Command("code", "-v").Output()
if err != nil {
log.Printf("Get vscode version failed: %s\n", err.Error())
}
return out
}
// atLine get string by int
func atLine(f []byte, n int) (s string) {
r := bytes.NewReader(f)
bufReader := bufio.NewReader(r)
for i := 1; ; i++ {
line, _, err := bufReader.ReadLine()
if err != nil {
break
}
if i == n {
s = string(line)
break
}
}
return s
}
// electron get version
func electron(version string) string {
// get .yarnrc file
r, err := http.Get("https://raw.githubusercontent.com/Microsoft/vscode/" + version + "/.yarnrc")
if err != nil {
log.Printf("get electron version failed: %s", err.Error())
}
defer r.Body.Close()
// get .yarnrc version
b, err := ioutil.ReadAll(r.Body)
s := string(b)
// re match version
rule, err := regexp.Compile(`".*?"`)
if err != nil {
log.Printf("re rule is failed: %s\n", err.Error())
}
results := rule.FindAllString(s, -1)
i := results[1]
end := len(i) - 1
re := i[1:end]
return re
}
// get system OS info
func systemversion() string {
OS := runtime.GOOS
if OS == "windows" {
return "win32"
}
if OS == "darwin" {
return "darwin"
}
return "linux"
}
// Open open url
func Open(url, OS string) error {
var cmd *exec.Cmd
if OS == "win32" {
cmd = exec.Command("cmd", "/C", "start", url)
}
if OS == "linux" {
cmd = exec.Command("bash", "-c", "xdg-open", url)
}
if OS == "darwin" {
cmd = exec.Command("open", url)
}
return cmd.Start()
}
func main() {
vscode := vscodeV()
Vversion := atLine(vscode, 1)
arch := atLine(vscode, 3)
yarnrc := electron(Vversion)
OS := systemversion()
fmt.Printf("vscode: %s\n", Vversion)
fmt.Printf("arch: %s\n", arch)
fmt.Printf("version: %s\n", yarnrc)
fmt.Printf("OS: %s\n", OS)
url := "https://github.com/electron/electron/releases/download/v" + yarnrc + "/electron-v" + yarnrc + "-" + OS + "-" + arch + ".zip"
fmt.Println(url)
err := Open(url, OS)
if err != nil {
log.Fatal("Open url failed: ", err)
}
}
|
ZDAutomotive/ZD-SWAG-SDK
|
test/services/seatcontrol/testSet.js
|
<gh_stars>1-10
const swag = require('../../../dist/bundle.cjs');
const SC = new swag.SeatControl({host:'192.168.178.114'});
(async () => {
try {
let conn = await SC.connect();
console.log(conn);
// console.log(SC.socket);
let res = await SC.setPosition([32440,33208,32261,32600])
console.log(res);
// let res2 = await SC.resetPosition()
// console.log(res2);
} catch (error) {
console.log(error)
}
})();
|
flongo/ariatemplates
|
test/aria/ext/filesgenerator/GeneratorTest.js
|
<filename>test/aria/ext/filesgenerator/GeneratorTest.js
/*
* Copyright 2012 <NAME>.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Test case for aria.ext.filesgenerator.Generator
*/
Aria.classDefinition({
$classpath : "test.aria.ext.filesgenerator.GeneratorTest",
$extends : "aria.jsunit.TestCase",
$dependencies : ["aria.ext.filesgenerator.Generator", "aria.utils.Type", "aria.modules.urlService.IUrlService",
"aria.templates.ModuleCtrlFactory"],
$prototype : {
/**
* Test that a generated class indeed can be evaluated as such and instantiated
*/
testGenerateEvalAndInstantiateClass : function () {
var generator = aria.ext.filesgenerator.Generator;
var fileInfo = generator.generateFile(generator.TYPE_CLASS, {
$classpath : "test.my.great.test.ClassDefinition"
});
this.assertEquals(fileInfo.type, "class", "Wrong type of file was generated");
this.assertEquals(fileInfo.classpath, "test.my.great.test.ClassDefinition", "Incorrect classpath parsed");
try {
eval(fileInfo.content);
} catch (e) {
this.assertTrue(false, "Eval'ing the generated class failed");
}
var o = new test.my.great.test.ClassDefinition();
this.assertTrue(typeof o == "object", "Instantiating generated class didn't produce an object");
this.assertTrue(aria.utils.Type.isInstanceOf(o, "test.my.great.test.ClassDefinition"), "Instantiated object is not of the correct type");
Aria.dispose("test.my.great.test.ClassDefinition");
o.$dispose();
},
/**
* Test that an interface can be generated and evaluated
*/
testGenerateAndEvalInterface : function () {
var generator = aria.ext.filesgenerator.Generator;
var fileInfo = generator.generateFile(generator.TYPE_INTERFACE, {
$classpath : "my.great.test.InterfaceDefinition",
$extends : "aria.modules.urlService.IUrlService"
});
this.assertEquals(fileInfo.type, "interface", "Wrong type of file was generated");
this.assertEquals(fileInfo.classpath, "my.great.test.InterfaceDefinition", "Incorrect classpath parsed");
try {
eval(fileInfo.content);
} catch (e) {
this.assertTrue(false, "Eval'ing the generated interface failed");
}
Aria.dispose("my.great.test.InterfaceDefinition");
},
/**
* Test and instantiate a module controller class
*/
testAsyncModuleControllerClass : function () {
var generator = aria.ext.filesgenerator.Generator;
var test = [{
"type" : "moduleControllerInterface",
"classpath" : "aria.my.great.test.IModuleDefinition",
"logicalpath" : "aria/my/great/test/IModuleDefinition.js"
}, {
"type" : "moduleController",
"classpath" : "aria.my.great.test.ModuleDefinition",
"logicalpath" : "aria/my/great/test/ModuleDefinition.js"
}, {
"type" : "flowControllerInterface",
"classpath" : "aria.my.great.test.IModuleDefinitionFlow",
"logicalpath" : "aria/my/great/test/IModuleDefinitionFlow.js"
}, {
"type" : "flowController",
"classpath" : "aria.my.great.test.ModuleDefinitionFlow",
"logicalpath" : "aria/my/great/test/ModuleDefinitionFlow.js"
}];
var fileInfo = generator.generateModuleCtrl("aria.my.great.test.ModuleDefinition", true);
var DM = aria.core.DownloadMgr;
var MCFactory = aria.templates.ModuleCtrlFactory;
for (var i = 0; i < fileInfo.length; i++) {
this.assertEquals(fileInfo[i].type, test[i].type, "Incorrect type of file was generated.");
this.assertEquals(fileInfo[i].classpath, test[i].classpath, "Incorrect classpath parsed");
DM.loadFileContent(test[i].logicalpath, fileInfo[i].content);
}
MCFactory.createModuleCtrl({
classpath : "aria.my.great.test.ModuleDefinition"
}, {
fn : this.moduleControllerLoaded,
scope : this
}, false);
},
moduleControllerLoaded : function (res) {
this.assertTrue(aria.utils.Type.isFunction(aria.my.great.test.IModuleDefinition));
this.assertTrue(aria.utils.Type.isFunction(aria.my.great.test.IModuleDefinitionFlow));
this.assertTrue(aria.utils.Type.isFunction(aria.my.great.test.ModuleDefinitionFlow));
this.assertTrue(aria.utils.Type.isFunction(aria.my.great.test.ModuleDefinition));
this.assertTrue(typeof res.moduleCtrl == "object", "Instantiating generated Module didn't produce an object");
this.assertTrue(aria.utils.Type.isInstanceOf(res.moduleCtrlPrivate, "aria.my.great.test.ModuleDefinition"), "Instantiated object is not of the correct type");
this.assertTrue(aria.utils.Type.isInstanceOf(res.moduleCtrl, "aria.my.great.test.IModuleDefinition"), "Instantiated object is not of the correct type");
res.moduleCtrlPrivate.$dispose();
Aria.dispose("aria.my.great.test.ModuleDefinition");
Aria.dispose("aria.my.great.test.IModuleDefinition");
Aria.dispose("aria.my.great.test.IModuleDefinitionFlow");
Aria.dispose("aria.my.great.test.ModuleDefinitionFlow");
this.notifyTestEnd("testAsyncModuleControllerClass");
},
/**
* Test that a module controller can be generated correctly, with flow and interfaces, with right names
*/
testModuleControllerGeneratedWithFlowWithRightNames : function () {
var generator = aria.ext.filesgenerator.Generator;
var fileInfo = generator.generateModuleCtrl("amadeus.booking.hotel.HotelModule", true);
this.assertEquals(fileInfo.length, 4, "The incorrect number of files were generated. There should be the Ctrl, ICtrl, Flow, IFlow");
var idx = 0;
for (var i = 0; i < 4; i++) {
if (fileInfo[i].type == generator.TYPE_FLOWCONTROLLERINTERFACE) {
idx++;
this.assertEquals(fileInfo[i].classpath, "amadeus.booking.hotel.IHotelModuleFlow");
}
if (fileInfo[i].type == generator.TYPE_MODULECONTROLLERINTERFACE) {
idx++;
this.assertEquals(fileInfo[i].classpath, "amadeus.booking.hotel.IHotelModule");
}
if (fileInfo[i].type == generator.TYPE_MODULECONTROLLER) {
idx++;
this.assertEquals(fileInfo[i].classpath, "amadeus.booking.hotel.HotelModule");
}
if (fileInfo[i].type == generator.TYPE_FLOWCONTROLLER) {
idx++;
this.assertEquals(fileInfo[i].classpath, "amadeus.booking.hotel.HotelModuleFlow");
}
}
this.assertEquals(idx, 4, "The right number of files were generated, but not the right type of files. There should be the Ctrl, ICtrl, Flow, IFlow");
},
/**
* Test that a template can be generated correctly, with script and style, with the right names
*/
testHtmlTemplateGeneratedWithScriptWithRightNames : function () {
var generator = aria.ext.filesgenerator.Generator;
var fileInfo = generator.generateHtmlTemplate("amadeus.booking.hotel.Search", true, true);
this.assertEquals(fileInfo.length, 3, "The incorrect number of files were generated. There should be the template, script, css");
var idx = 0;
for (var i = 0; i < 3; i++) {
if (fileInfo[i].type == generator.TYPE_HTMLTEMPLATE) {
idx++;
this.assertEquals(fileInfo[i].classpath, "amadeus.booking.hotel.Search");
}
if (fileInfo[i].type == generator.TYPE_CSSTEMPLATE) {
idx++;
this.assertEquals(fileInfo[i].classpath, "amadeus.booking.hotel.SearchStyle");
}
if (fileInfo[i].type == generator.TYPE_TEMPLATESCRIPT) {
idx++;
this.assertEquals(fileInfo[i].classpath, "amadeus.booking.hotel.SearchScript");
}
}
this.assertEquals(idx, 3, "The right number of files were generated, but not the right type of files. There should be the template, script, css");
},
/**
* Test content feature for generateHtmlTemplate method
*/
testHtmlTemplateGeneratedWithContent : function () {
var generator = aria.ext.filesgenerator.Generator;
var myClasspath = generator.getUniqueClasspathIn("my.package");
var strPackage = myClasspath.substr(0, 10);
var className = myClasspath.substr(11);
var content = "{macro main()}my content{/macro}";
this.assertTrue(strPackage === "my.package", "Generated classpath is in wrong package");
this.assertTrue(/^[A-Z]\w*$/.test(className) === true, "Generated class name is not a class name.");
var fileInfo = generator.generateHtmlTemplate(myClasspath, false, false, content);
this.assertEquals(fileInfo.length, 1, "The incorrect number of files were generated. There should be only the template");
// content should be available in result template
this.assertTrue(/\{macro main\(\)\}my content\{\/macro\}/.test(fileInfo[0].content), "Content is not available in generated template");
// there should be only one main macro
this.assertTrue(fileInfo[0].content.match(/\{macro main\(/).length == 1, "There can be only one ... macro main");
},
/**
* Test content feature
*/
testHmlTemplateWithContent : function () {
var generator = aria.ext.filesgenerator.Generator;
var myClasspath = generator.getUniqueClasspathIn("my.package");
var strPackage = myClasspath.substr(0, 10);
var className = myClasspath.substr(11);
this.assertTrue(strPackage === "my.package", "Generated classpath is in wrong package");
this.assertTrue(/^[A-Z]\w*$/.test(className) === true, "Generated class name is not a class name.");
var finalTemplate = generator.generateFile(generator.TYPE_HTMLTEMPLATE, {
$classpath : myClasspath,
content : "{macro main()}my content{/macro}"
});
// content should be available in result template
this.assertTrue(/\{macro main\(\)\}my content\{\/macro\}/.test(finalTemplate.content), "Content is not available in generated template");
// there should be only one main macro
this.assertTrue(finalTemplate.content.match(/\{macro main\(/).length == 1, "There can be only one ... macro main");
}
}
});
|
scala-steward/csw
|
examples/src/main/scala/example/tutorial/moderate/shared/SampleValidation.scala
|
<reponame>scala-steward/csw
package example.tutorial.moderate.shared
import csw.params.commands.CommandIssue.{MissingKeyIssue, ParameterValueOutOfRangeIssue, UnsupportedCommandIssue}
import csw.params.commands.CommandResponse.{Accepted, Invalid, ValidateCommandResponse}
import csw.params.commands.{ControlCommand, Setup}
import csw.params.core.models.Id
object SampleValidation {
import SampleInfo._
def doAssemblyValidation(runId: Id, command: ControlCommand): ValidateCommandResponse =
command match {
case s: Setup =>
doAssemblySetupValidation(runId, s)
case a =>
Invalid(runId, UnsupportedCommandIssue("Sample assembly only supports Setup commands."))
}
private def doAssemblySetupValidation(runId: Id, setup: Setup): ValidateCommandResponse =
setup.commandName match {
case `sleep` =>
validateSleep(runId, setup)
case `cancelLongCommand` =>
validateCancel(runId, setup)
case `immediateCommand` | `shortCommand` | `mediumCommand` | `longCommand` | `complexCommand` =>
Accepted(runId)
case _ =>
Invalid(runId, UnsupportedCommandIssue(s"Command: ${setup.commandName.name} is not supported for sample Assembly."))
}
def doHcdValidation(runId: Id, command: ControlCommand): ValidateCommandResponse =
command match {
case s: Setup =>
doHcdSetupValidation(runId, s)
case _ =>
Invalid(runId, UnsupportedCommandIssue("Sample HCD only supports Setup commands."))
}
//#validate
private def doHcdSetupValidation(runId: Id, setup: Setup): ValidateCommandResponse =
setup.commandName match {
case `hcdSleep` =>
validateSleep(runId, setup)
case `hcdCancelLong` =>
validateCancel(runId, setup)
case `hcdShort` | `hcdMedium` | `hcdLong` =>
Accepted(runId)
case _ =>
Invalid(runId, UnsupportedCommandIssue(s"Command: ${setup.commandName.name} is not supported for sample HCD."))
}
private def validateSleep(runId: Id, setup: Setup): ValidateCommandResponse =
if (setup.exists(sleepTimeKey)) {
val sleepTime: Long = setup(sleepTimeKey).head
if (sleepTime < maxSleep)
Accepted(runId)
else
Invalid(runId, ParameterValueOutOfRangeIssue("sleepTime must be < $maxSleep"))
}
else {
Invalid(runId, MissingKeyIssue(s"required sleep command key: $sleepTimeKey is missing."))
}
//#validate
private def validateCancel(runId: Id, setup: Setup): ValidateCommandResponse =
if (setup.exists(cancelKey)) {
Accepted(runId)
}
else {
Invalid(runId, MissingKeyIssue(s"required cancel command key: $cancelKey is missing."))
}
}
|
BronchoDeveloper/lice-spring
|
spring-lice-test/src/main/java/com/lice/AOP/AOPService.java
|
<filename>spring-lice-test/src/main/java/com/lice/AOP/AOPService.java
package com.lice.AOP;
import org.springframework.stereotype.Service;
/**
* description: AOPService <br>
* date: 2019/10/3 23:06 <br>
* author: lc <br>
* version: 1.0 <br>
*/
@Service
public class AOPService {
//被拦截的方法
public void pointCut(){
System.out.println("AOPService pointCut is Executed.....");
}
}
|
benravago/fx.html
|
testsuite/src/domts/level1/core/HC_Node_Get_Owner_Document_Test.java
|
package domts.level1.core;
import org.w3c.dom.*;
import java.util.*;
import org.junit.jupiter.api.*;
import static org.junit.jupiter.api.Assertions.*;
/**
* Evaluate Node.ownerDocument on the second "p" element.
* @author <NAME>
* @see <a href="http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#node-ownerDoc">http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-core#node-ownerDoc</a>
* @see <a href="http://www.w3.org/Bugs/Public/show_bug.cgi?id=251">http://www.w3.org/Bugs/Public/show_bug.cgi?id=251</a>
*/
class HC_Node_Get_Owner_Document_Test extends domts.DOMTestCase {
@BeforeEach
void setup() {
// check if loaded documents are supported for content type
String contentType = getContentType();
preload(contentType, "hc_staff", false);
}
@Test
@DisplayName("http://www.w3.org/2001/DOM_Test_Suite/level1/core/hc_nodegetownerdocument")
void run() throws Throwable {
Document doc;
NodeList elementList;
Node docNode;
Document ownerDocument;
Element docElement;
String elementName;
doc = (Document) load("hc_staff", false);
elementList = doc.getElementsByTagName("p");
docNode = elementList.item(1);
ownerDocument = docNode.getOwnerDocument();
docElement = ownerDocument.getDocumentElement();
elementName = docElement.getNodeName();
if (("image/svg+xml".equals(getContentType()))) {
assertEquals("svg", elementName, "svgNodeName");
} else {
assertTrue(equalsAutoCase("element", "html", elementName), "ownerDocElemTagName");
}
}
}
|
tseiiti/ruby_diversos
|
ruby-and-ms-access/database_access.rb
|
<gh_stars>0
system("cls")
require 'win32ole'
class ActiveRecord
attr_accessor :mdb, :table, :columns
def initialize()
@mdb = File.basename(__FILE__).gsub('.rb', '.mdb')
# nome da tabela
@table = self.class.name.downcase
# verifica colunas da tabela (fields)
get_columns()
end
def add_column(column_name, column_type)
@columns_hash = Hash.new if @columns_hash.nil?
column_type = 'varchar' if column_type == 'string'
column_type = 'longtext' if column_type == 'text'
column_type = 'double' if column_type == 'decimal'
column_type = 'long' if column_type == 'integer'
@columns_hash["#{column_name}"] = "#{column_type}"
end
# def tables
# tables = []
# @catalog.tables.each {|t| tables << t.name if t.type == "TABLE" }
# tables
# end
def save
if @update_id.nil? || @update_id < 1
gen_insert()
else
gen_update()
end
end
def first
select_execute("select top 1 * from #{@table};")
end
def all
select_execute("select * from #{@table};")
end
def where(condicion)
select_execute("select * from #{@table} where #{condicion};")
# if @recordset.count == 1
# get_id()
# end
end
def set_update(id)
@update_id = id
where("#{@table}_id = #{@update_id}")
end
def last
select_execute("select top 1 * from #{@table} order by #{@table}_id desc;")
end
def x
puts ''
puts @fields
puts ''
# gen_update
# where("valor = 201.25")
# puts [*(' '..'z')].sample(60).join
end
# métodos privados
private
# def get_id
# @recordset.first.each do |first_id|
# @update_id = first_id
# break
# end
# end
def create_table
sql = "create table #{@table} (#{@table}_id COUNTER"
# concatena outros campos da tabela
@columns_hash.each do |key, typ|
sql += ",#{key} #{typ}"
end
sql += ",created_at datetime default now(),updated_at datetime default now());"
# apaga as colunas para ser usada no update
@columns_hash = Hash.new
# cria tabela
execute_query(sql)
end
def get_columns
# @catalog = WIN32OLE.new("ADOX.Catalog")
# @catalog.ActiveConnection = @connection
begin
open_connection()
rescue
@catalog = WIN32OLE.new('ADOX.Catalog')
@catalog.Create "Provider=Microsoft.Jet.OLEDB.4.0;Data Source=#{@mdb}"
get_columns()
end
begin
open_connection()
sql ="select top 1 * from #{@table};"
recordset = WIN32OLE.new('ADODB.Recordset')
recordset.Open(sql, @connection)
@fields = Hash.new
recordset.Fields.each do |field|
@fields[field.Name] = field.Type
self.class.send(:attr_accessor, field.Name)
end
@columns = @fields.keys
@connection.Close
rescue
create_table()
get_columns()
end
end
def last_inserted
execute_query("select top 1 #{@table}_id from #{@table} order by #{@table}_id desc;")
end
def gen_update
cols = []
@fields.each do |key, typ|
value = self.instance_variable_get("@#{key}").to_s
next if key =="#{@table}_id" || key == "created_at" || (value.empty? && key != "updated_at")
val = ''
if key == "updated_at"
val = "'#{Time.now.strftime("%d/%m/%Y %H:%M:%S")}'"
elsif [5, 6].include?(typ)
val = "#{value.to_f}"
else
val = "'#{value}'"
end
cols << key + ' = ' + val
end
cols = cols * ", "
sql = "update #{@table} set #{cols} where #{@table}_id = #{@update_id};"
puts sql
execute_query(sql)
@update_id = nil
end
def gen_insert
cols = []
vals = []
@fields.each do |key, typ|
next if key == "#{@table}_id" || key == "created_at" || key == "updated_at"
cols << key
value = instance_variable_get("@#{key}").to_s
unless value.empty?
remove_instance_variable("@#{key}")
end
if typ == 7 and value.empty?
value = "#{Time.now.strftime("%d/%m/%Y")}"
end
if [5, 6].include?(typ)
vals << "#{value.to_f}"
else
vals << "'#{value}'"
end
end
cols = cols * ", "
vals = vals * ", "
sql = "insert into #{@table} (#{cols}) values (#{vals});"
puts sql
execute_query(sql)
end
def select_execute(sql)
puts sql
execute_query(sql)
puts @fields.keys.inspect
puts @recordset.inspect
end
def execute_query(sql)
open_connection()
if sql[0..5] =="select"
recordset = WIN32OLE.new('ADODB.Recordset')
recordset.Open(sql, @connection)
begin
@recordset = recordset.GetRows.transpose
rescue
@recordset = []
end
recordset.Close
else
@connection.Execute(sql)
end
@connection.Close
puts ""
end
def open_connection
connection_string = 'Provider=Microsoft.Jet.OLEDB.4.0;Data Source='
connection_string << @mdb
@connection = WIN32OLE.new('ADODB.Connection')
@connection.Open(connection_string)
end
end
class Teste < ActiveRecord
def initialize()
add_column('nome', 'string')
add_column('descricao', 'text')
add_column('data', 'date')
add_column('valor', 'decimal')
super
end
end
f = Teste.new
# f.nome ="teste " + [*('0'..'9')].sample(2).join
# f.descricao = [*('a'..'z'), ' '].sample(80).join.capitalize
# f.data = Time.now.strftime("%d/%m/%Y")
# f.valor = Random.rand(100...99999) / 100.00
# f.save
# f.where("valor = 620.05")
# f.valor = 222.22
# f.save
# f.first
f.all
f.x
|
SteveKunG/Indicatia
|
common/src/main/java/com/stevekung/indicatia/utils/EnchantedSkullTileEntityRenderer.java
|
package com.stevekung.indicatia.utils;
import java.util.Arrays;
import com.mojang.authlib.GameProfile;
import com.mojang.blaze3d.vertex.PoseStack;
import com.mojang.blaze3d.vertex.VertexConsumer;
import net.minecraft.client.model.SkullModelBase;
import net.minecraft.client.renderer.MultiBufferSource;
import net.minecraft.client.renderer.RenderType;
import net.minecraft.client.renderer.blockentity.SkullBlockRenderer;
import net.minecraft.client.renderer.entity.ItemRenderer;
import net.minecraft.world.level.block.SkullBlock;
public class EnchantedSkullTileEntityRenderer
{
public static void render(GameProfile gameProfile, float rotationYaw, float mouthAnimation, SkullBlock.Type type, PoseStack poseStack, MultiBufferSource multiBufferSource, int combinedLight, int combinedHurt, SkullModelBase skullModelBase, RenderType renderType, boolean glint)
{
poseStack.pushPose();
poseStack.translate(0.5D, 0.0D, 0.5D);
poseStack.scale(-1.0F, -1.0F, 1.0F);
skullModelBase.setupAnim(mouthAnimation, rotationYaw, 0.0F);
VertexConsumer vertexConsumer;
if (gameProfile == null)
{
vertexConsumer = ItemRenderer.getArmorFoilBuffer(multiBufferSource, RenderType.entityCutoutNoCullZOffset(SkullBlockRenderer.SKIN_BY_TYPE.get(type)), false, glint);
}
else
{
vertexConsumer = ItemRenderer.getFoilBufferDirect(multiBufferSource, renderType, false, glint);
}
skullModelBase.renderToBuffer(poseStack, vertexConsumer, combinedLight, combinedHurt, 1.0F, 1.0F, 1.0F, 1.0F);
poseStack.popPose();
}
public static boolean isVanillaHead(SkullBlock.Type skullType)
{
return Arrays.stream(SkullBlock.Types.values()).anyMatch(type -> skullType == type);
}
}
|
kestred/panda3d
|
direct/src/plugin/p3dIntObject.cxx
|
<reponame>kestred/panda3d<filename>direct/src/plugin/p3dIntObject.cxx<gh_stars>1-10
// Filename: p3dIntObject.cxx
// Created by: drose (30Jun09)
//
////////////////////////////////////////////////////////////////////
//
// PANDA 3D SOFTWARE
// Copyright (c) Carnegie Mellon University. All rights reserved.
//
// All use of this software is subject to the terms of the revised BSD
// license. You should have received a copy of this license along
// with this source code in a file named "LICENSE."
//
////////////////////////////////////////////////////////////////////
#include "p3dIntObject.h"
////////////////////////////////////////////////////////////////////
// Function: P3DIntObject::Constructor
// Access: Public
// Description:
////////////////////////////////////////////////////////////////////
P3DIntObject::
P3DIntObject(int value) : _value(value) {
}
////////////////////////////////////////////////////////////////////
// Function: P3DIntObject::Copy Constructor
// Access: Public
// Description:
////////////////////////////////////////////////////////////////////
P3DIntObject::
P3DIntObject(const P3DIntObject ©) :
P3DObject(copy),
_value(copy._value)
{
}
////////////////////////////////////////////////////////////////////
// Function: P3DIntObject::get_type
// Access: Public, Virtual
// Description: Returns the fundamental type of this kind of object.
////////////////////////////////////////////////////////////////////
P3D_object_type P3DIntObject::
get_type() {
return P3D_OT_int;
}
////////////////////////////////////////////////////////////////////
// Function: P3DIntObject::get_bool
// Access: Public, Virtual
// Description: Returns the object value coerced to a boolean, if
// possible.
////////////////////////////////////////////////////////////////////
bool P3DIntObject::
get_bool() {
return (_value != 0);
}
////////////////////////////////////////////////////////////////////
// Function: P3DIntObject::get_int
// Access: Public, Virtual
// Description: Returns the object value coerced to an integer, if
// possible.
////////////////////////////////////////////////////////////////////
int P3DIntObject::
get_int() {
return _value;
}
////////////////////////////////////////////////////////////////////
// Function: P3DIntObject::make_string
// Access: Public, Virtual
// Description: Fills the indicated C++ string object with the value
// of this object coerced to a string.
////////////////////////////////////////////////////////////////////
void P3DIntObject::
make_string(string &value) {
ostringstream strm;
strm << _value;
value = strm.str();
}
|
lechium/tvOS145Headers
|
usr/libexec/dmd/DMDUpdateEnqueuedCommandsOperation.h
|
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import "DMDTaskOperation.h"
@interface DMDUpdateEnqueuedCommandsOperation : DMDTaskOperation
{
}
+ (_Bool)validateRequest:(id)arg1 error:(id *)arg2; // IMP=0x0000000100068bc0
+ (id)requiredEntitlements; // IMP=0x0000000100068a8c
+ (id)whitelistedClassesForRequest; // IMP=0x0000000100068a50
- (void)runWithRequest:(id)arg1; // IMP=0x0000000100068aa0
- (unsigned long long)queueGroup; // IMP=0x0000000100068a98
@end
|
Antholoj/netbeans
|
ide/jumpto/src/org/netbeans/modules/jumpto/type/FilteredListModel.java
|
<reponame>Antholoj/netbeans<filename>ide/jumpto/src/org/netbeans/modules/jumpto/type/FilteredListModel.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.jumpto.type;
import java.util.BitSet;
import javax.swing.ListModel;
import javax.swing.SwingUtilities;
import javax.swing.event.ListDataEvent;
import javax.swing.event.ListDataListener;
import org.netbeans.api.annotations.common.NonNull;
import org.netbeans.api.annotations.common.NullAllowed;
import org.netbeans.modules.jumpto.common.Models.Filter;
final class FilteredListModel implements ListModel, Runnable, ListDataListener {
/** means that the value has not yet been assigned */
private static int NOT_TESTED = Short.MIN_VALUE - 1;
private static int EMPTY_VALUE = Short.MIN_VALUE - 2;
/** skips extensive asserts - needed for performance tests */
private static final boolean skipExpensiveAsserts = Boolean.getBoolean ("org.openide.explorer.view.LazyListModel.skipExpensiveAsserts"); // NOI18N
private boolean log;
private ListModel listModel;
private Filter filter;
/** the value to return when nothing else can be returned */
private Object defaultValue;
/** simple event listener list */
private javax.swing.event.EventListenerList list = new javax.swing.event.EventListenerList ();
/** the size of the original list we now know it has */
private int originalSize;
/** the size we currently pretend to have */
private int size;
/** maps an external index to our internal one
* (NOT_TESTED, means it has not been tested yet, EMPTY_VALUE means for this external index
* we have returned default value)
*/
private int[] external;
/** set with marked values where external is different than EMPTY_VALUE or NOT_TESTED */
private BitSet checked;
/** dirty means that we should really update assumptions */
private boolean markDirty;
private FilteredListModel (ListModel m, Filter f, Object defaultValue) {
this.listModel = m;
this.filter = f;
this.defaultValue = defaultValue;
// JST-PENDING: Weak or not?
m.addListDataListener (this);
}
final Filter getFilter () {
return filter;
}
/** Makes itself dirty and schedules an update.
*/
private void markDirty () {
this.markDirty = true;
SwingUtilities.invokeLater(this);
}
/** When executed, updateYourAssumeptions.
*/
@Override
public void run () {
if (!markDirty) {
return;
}
markDirty = false;
if (log) {
System.err.println("updateYourAssumeptions ();"); // NOI18N
}
updateYourAssumeptions ();
}
/** Notifies removal of inteval from (inclusive) to (exclusive) and
* updates its structures.
*
* !!! as a side effect updates size !!!
*
* @return s - number of removals
*/
private void notifyRemoval (int from, int to) {
ListDataEvent ev = new ListDataEvent (
this, ListDataEvent.INTERVAL_REMOVED, from, to - 1
);
removeInterval (external, from, to);
int cnt = to - from;
size -= cnt;
regenerateCheckedBitSet ();
fireChange (ev);
}
private void regenerateCheckedBitSet () {
checked = new BitSet (size);
for (int i = 0; i < size; i++) {
if (external[i] >= 0) {
checked.set (i);
}
}
}
private int getExternal (int index) {
if (index == size) {
return originalSize;
}
if (index < 0) {
return -1;
}
return external[index];
}
/** Can be called to ask the LazyListModel to update its assumptions,
* especially assumptions about the size to match its current knowledge.
*/
final void updateYourAssumeptions () {
if (external == null) {
return;
}
int i = 0;
LOOP: while (i < size) {
while (getExternal (i) >= 0 && i < size) {
i++;
}
if (i == size) {
break;
}
if (getExternal (i) == NOT_TESTED) {
int minusOneIndex = i - 1;
while (i < size && getExternal (i) == NOT_TESTED) {
i++;
}
int count = i - minusOneIndex - 1;
int from = getExternal (minusOneIndex) + 1;
int to = getExternal (i);
assert from >= 0 : "Value at " + minusOneIndex + "(" + from + ") must be greater than minus one"; // NOI18N
assert to >= 0 : "Value at " + i + "must be greater than minus one but was: " + to; // NOI18N
assert to >= from : "Must be true: " + to + " >= " + from; // NOI18N
int howMuch = count - (to - from);
if (howMuch > 0) {
// we need to notify some kind of removal
notifyRemoval (i - howMuch, i);
i -= howMuch;
}
} else {
int minusTwoIndex = i;
while (i < size && getExternal (i) == EMPTY_VALUE) {
i++;
}
notifyRemoval (minusTwoIndex, i);
i = minusTwoIndex;
}
}
assert externalContraints () : "Constraints failed"; // NOI18N
}
private boolean externalContraints () {
assert external != null : "Not null"; // NOI18N
assert external.length >= size : "Length " + external.length + " >= " + size; // NOI18N
if (!skipExpensiveAsserts) {
for (int i = 1; i < size; i++) {
assert external[i - 1] != NOT_TESTED || external[i] != EMPTY_VALUE : "There cannot be empty value after not tested value"; // NOI18N
assert external[i - 1] != EMPTY_VALUE || external[i] != NOT_TESTED : "Not tested cannot immediatelly follow empty value"; // NOI18N
assert external[i] < 0 || external[i] > external[i - 1] : "If valid index it has to be greater: " + i; // NOI18N
assert external[i] < 0 == !checked.get (i) : "external and checked must be consistent: " + i; // NOI18N
}
}
return true;
}
/** Removes an interval from array */
private static void removeInterval (int[] array, int index0, int index1) {
assert index0 < index1 : "Index1 must be bigger than index0: " + index1 + " > " + index0; // NOI18N
System.arraycopy (array, index1, array, index0, array.length - index1);
}
/** Factory method to create new filtering lazy model.
*/
@NonNull
static FilteredListModel create (
@NonNull final ListModel listModel,
@NonNull final Filter filter,
@NullAllowed final Object defValue) {
return new FilteredListModel(listModel, filter, defValue);
}
//
// Model methods.
//
@Override
public void addListDataListener(ListDataListener l) {
list.add (ListDataListener.class, l);
}
@Override
public void removeListDataListener(ListDataListener l) {
list.remove (ListDataListener.class, l);
}
private void fireChange (ListDataEvent ev) {
if (list.getListenerCount () == 0) {
return ;
}
Object[] arr = list.getListenerList ();
for (int i = arr.length - 1; i >= 0; i -= 2) {
ListDataListener l = (ListDataListener)arr[i];
switch (ev.getType ()) {
case ListDataEvent.CONTENTS_CHANGED: l.contentsChanged (ev); break;
case ListDataEvent.INTERVAL_ADDED: l.intervalAdded (ev); break;
case ListDataEvent.INTERVAL_REMOVED: l.intervalRemoved (ev); break;
default:
throw new IllegalArgumentException ("Unknown type: " + ev.getType ());
}
}
}
/** Is this index accepted.
*/
private boolean accepted (int indx, Object[] result) {
Object v = listModel.getElementAt (indx);
if (filter.accept (v)) {
result[0] = v;
return true;
}
markDirty ();
return false;
}
/** Initialize the bitsets to sizes of the listModel.
*/
private void initialize () {
if (checked == null) {
originalSize = listModel.getSize ();
size = listModel.getSize ();
external = new int[size];
for (int i = 0; i < size; i++) {
external[i] = NOT_TESTED;
}
checked = new BitSet (size);
}
assert externalContraints () : "Constraints failed"; // NOI18N
}
/** this variable is used from tests to prevent creation of elements in
* certain cases.
*/
static Boolean CREATE;
/** If value is not know for given index and CREATE.get() is Boolean.FALSE it returns defaultValue.
*/
@Override
public Object getElementAt(int index) {
initialize ();
if (log) {
System.err.println("model.getElementAt (" + index + ");"); // NOI18N
}
if (external[index] >= 0) {
// we have computed the index, so return it
return listModel.getElementAt (external[index]);
}
if (external[index] == EMPTY_VALUE) {
// default value needs to be used
return defaultValue;
}
if (CREATE != null && !CREATE.booleanValue()) {
assert Thread.holdsLock(CREATE) : "Only one thread (from tests) can access this"; // NOI18N
return defaultValue;
}
// JST: Why there is no BitSet.previousSetBit!!!???
int minIndex = index;
while (minIndex >= 0 && getExternal (minIndex) < 0) {
minIndex--;
}
int maxIndex;
if (checked.get (index)) {
maxIndex = index;
} else {
maxIndex = checked.nextSetBit (index);
if (maxIndex == -1 || maxIndex > size) {
maxIndex = size;
}
}
int myMinIndex = getExternal (minIndex) + 1; // one after the index of the first non-1 index
int myMaxIndex = getExternal (maxIndex);
assert myMaxIndex >= myMaxIndex : "Must be greater"; // NOI18N
if (myMaxIndex != myMinIndex) {
int myIndex = myMinIndex + (index - minIndex) - 1;
if (myIndex >= myMaxIndex) {
myIndex = myMaxIndex - 1;
}
Object[] result = new Object[1];
if (accepted (myIndex, result)) {
assert external[index] == NOT_TESTED : "External index " + index + " still needs to be unset: " + external[index];
external[index] = myIndex;
checked.set (index);
return result[0];
}
boolean checkBefore = true;
boolean checkAfter = true;
for (int i = 1; checkAfter || checkBefore; i++) {
if (checkBefore) {
checkBefore = index - i >= minIndex && myIndex - i >= myMinIndex && getExternal (index - i) == NOT_TESTED;
if (checkBefore && accepted (myIndex - i, result)) {
external[index] = myIndex - i;
checked.set (index);
return result[0];
}
}
if (checkAfter) {
checkAfter = index + i < maxIndex && myIndex + i < myMaxIndex && getExternal (index + i) == NOT_TESTED;
if (checkAfter && accepted (myIndex + i, result)) {
external[index] = myIndex + i;
checked.set (index);
return result[0];
}
}
}
}
markDirty ();
// set default value for all objects in the interval
for (int i = minIndex + 1; i < maxIndex; i++) {
assert external[i] == NOT_TESTED : i + " should not be set: " + external[i]; // NOI18N
external[i] = EMPTY_VALUE;
}
checked.clear (minIndex + 1, maxIndex);
assert external[index] == EMPTY_VALUE : "Should be asigned in the cycle above"; // NOI18N
return defaultValue;
}
@Override
public int getSize() {
initialize ();
return size;
}
//
// Notifications from the underlaying model
//
@Override
public void contentsChanged (@NonNull final ListDataEvent listDataEvent) {
if (external == null) {
return;
}
size = originalSize;
external = new int[size];
for (int i = 0; i < size; i++) {
external[i] = NOT_TESTED;
}
checked = new BitSet (size);
assert externalContraints () : "Constraints failed"; // NOI18N
}
@Override
public void intervalAdded (ListDataEvent listDataEvent) {
if (external == null) {
return;
}
updateYourAssumeptions ();
int first = listDataEvent.getIndex0 ();
int end = listDataEvent.getIndex1 () + 1;
int len = end - first;
int newOriginalSize = originalSize + len;
int newSize = size + len;
int insert = findExternalIndex (first);
int[] newExternal = new int[newSize];
System.arraycopy (external, 0, newExternal, 0, insert);
for (int i = 0; i < len; i++) {
newExternal[insert + i] = NOT_TESTED;
}
for (int i = insert + len; i < newExternal.length; i++) {
int v = external[i - len];
newExternal[i] = v < 0 ? v : v + len;
}
external = newExternal;
size = newSize;
originalSize = newOriginalSize;
regenerateCheckedBitSet ();
fireChange (new ListDataEvent (this, ListDataEvent.INTERVAL_ADDED, insert, insert + len - 1));
assert externalContraints () : "Constraints failed"; // NOI18N
}
/** Finds the appropriate index of given internal index. The state is
* supposed to be after updateYourAssumeptions => no EMPTY_VALUE
*/
private int findExternalIndex (int myIndex) {
int outIndex = 0;
for (int i = -1; i < size; i++) {
if (getExternal (i) == NOT_TESTED) {
outIndex++;
} else {
outIndex = getExternal (i);
}
if (outIndex >= myIndex) {
return i;
}
}
return size;
}
@Override
public void intervalRemoved (ListDataEvent listDataEvent) {
if (external == null) {
return;
}
updateYourAssumeptions ();
int first = listDataEvent.getIndex0 ();
int end = listDataEvent.getIndex1 () + 1;
int len = end - first;
int newOriginalSize = originalSize - len;
int f = findExternalIndex (first);
int e = findExternalIndex (end);
assert f >= 0 : "First index must be above zero: " + f; // NOI18N
assert e >= f : "End index must be above first: " + f + " <= " + e; // NOI18N
int outLen = e - f;
int[] newExternal = (int[])external.clone ();
for (int i = e; i < size; i++) {
int v = external[i];
newExternal[i - outLen] = v < 0 ? v : v - len;
checked.set (i - outLen, v >= 0);
}
external = newExternal;
size -= outLen;
originalSize = newOriginalSize;
if (outLen != 0) {
fireChange (new ListDataEvent (this, ListDataEvent.INTERVAL_REMOVED, f, e - 1));
}
assert externalContraints () : "Constraints failed"; // NOI18N
}
}
|
kuiro5/gatsby-starter-default
|
src/components/SaveTheDate/index.js
|
import React from 'react';
import { Flex, Box } from '@rebass/grid';
import './styles.css';
const SaveTheDate = () => {
return (
<div className='save-the-date'>
<div className='save-the-date-overlay' />
<Flex className='save-the-date-container' alignItems={'center'} justifyContent={'center'}>
<Box>
<h3 className='save-the-date-dt'>September 01, 2019</h3>
<h1 className='save-the-date-title'>Dinner. Drinks. Dancing.</h1>
</Box>
</Flex>
</div>
);
};
export default SaveTheDate;
|
sbn-psi/archive-viewer
|
public/feedback/js/feedback.js
|
/*
Copyright (c) 2019, California Institute of Technology ("Caltech").
U.S. Government sponsorship acknowledged.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions must reproduce the above copyright notice, this list of
conditions and the following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of Caltech nor its operating division, the Jet Propulsion
Laboratory, nor the names of its contributors may be used to endorse or
promote products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
document.addEventListener("DOMContentLoaded", function(){
Feedback(config);
});
(function( window, document, undefined ) {
if ( window.Feedback !== undefined ) {
return;
}
// function to remove elements, input as arrays
var removeElements = function( remove ) {
for (var i = 0, len = remove.length; i < len; i++ ) {
var item = Array.prototype.pop.call( remove );
if ( item !== undefined ) {
if (item.parentNode !== null ) { // check that the item was actually added to DOM
item.parentNode.removeChild( item );
}
}
}
},
loader = function() {
var div = document.createElement("div"), i = 3;
div.className = "feedback-loader";
while (i--) { div.appendChild( document.createElement( "span" )); }
return div;
},
emptyElements = function( el ) {
var item;
while( (( item = el.firstChild ) !== null ? el.removeChild( item ) : false) ) {}
},
element = function( name, text ) {
var el = document.createElement( name );
el.appendChild( document.createTextNode( text ) );
return el;
},
sendButton,
captchaUrl = "https://voight.psi.edu/recaptcha-v3-verify.php",
feedbackUrl = "/email-service/SubmitFeedback",
modal = document.createElement("div"),
modalBody = document.createElement("div"),
modalHeader = document.createElement("div"),
modalFooter = document.createElement("div"),
captchaScore = 0;
window.captchaCallback = function( response ) {};
window.Feedback = function( options ) {
options = options || {};
// default properties
options.host = options.host || "";
options.feedback.header = options.feedback.header || "Help Desk";
options.page = options.page || new window.Feedback.Form();
var glass = document.createElement("div"),
returnMethods = {
// open send feedback modal window
open: function() {
options.page.render();
document.body.appendChild( glass );
button.disabled = true;
// modal close button
var a = element("a", "x");
a.className = "feedback-close";
a.onclick = returnMethods.close;
a.href = "#";
// build header element
modalHeader.appendChild( a );
modalHeader.appendChild( element("h3", options.feedback.header ) );
modalHeader.className = "feedback-header";
modalBody.className = "feedback-body";
emptyElements( modalBody );
modalBody.appendChild( element("p", "How can we help you? Send us your question or feedback and we will get back to you within 1 business day.") );
modalBody.appendChild( options.page.dom );
var links = options.feedback.additionalLinks;
if ( links !== "" ) {
var additionalHelp = element("p", "In the meantime, you may find the following links helpful:"),
additionalLinks = document.createElement("ul");
additionalHelp.className = "additionalHelp";
for (var i = 0; i < links.length; i++) {
additionalLinks.insertAdjacentHTML('beforeend', '<li><a href="' + links[i].url + '">' + links[i].title + '</a></li>');
}
additionalHelp.insertAdjacentElement("beforeend", additionalLinks);
modalBody.insertAdjacentElement("beforeend", additionalHelp);
window.additionalHelp = additionalHelp;
}
// Send button
sendButton = document.createElement("input");
sendButton.type = "submit";
sendButton.value = "Send Feedback";
sendButton.setAttribute("class", "feedback-btn g-recaptcha");
sendButton.setAttribute("data-callback", "captchaCallback");
sendButton.setAttribute("id", "recaptcha");
modalFooter.className = "feedback-footer";
modalFooter.appendChild( sendButton );
modal.setAttribute("id", "feedback-form");
modal.className = "feedback-modal";
modal.appendChild( modalHeader );
modal.appendChild( modalBody );
modal.appendChild( modalFooter );
document.body.appendChild( modal );
window.grecaptcha.render("recaptcha", {sitekey: "<KEY>"});
},
setupClose: function() {
emptyElements( modalBody );
sendButton.disabled = false;
sendButton.value = "Close";
sendButton.onclick = function() {
returnMethods.close();
return false;
};
},
// close modal window
close: function() {
button.disabled = false;
window.grecaptcha.reset();
// remove feedback elements
emptyElements( modalHeader );
emptyElements( modalFooter );
removeElements( [ modal, glass ] );
return false;
},
// send data
send: function( adapter ) {
// make sure send adapter is of right prototype
if ( !(adapter instanceof window.Feedback.Send) ) {
throw new Error( "Adapter is not an instance of Feedback.Send" );
}
data = options.page.data();
emptyElements( modalBody );
modalBody.appendChild( loader() );
// send data to adapter for processing
adapter.send( data, function( success ) {
returnMethods.setupClose();
modalBody.setAttribute("class", "feedback-body confirmation");
var message = document.createElement("p");
if ( success === true ) {
message.innerHTML = 'Thank you for making the PDS a better site.<br/>If you provided an email address, a PDS representative will get back to you as soon as possible.';
} else {
message.innerHTML = 'There was an error sending your feedback.<br/>If the problem persists, please email <a href="mailto:<EMAIL>"><EMAIL></a>.';
}
modalBody.appendChild( message );
if ( window.additionalHelp ) {
modalBody.appendChild( window.additionalHelp );
}
});
},
onloadCallback: function() {
if ( new URLSearchParams(window.location.search).get("feedback") === "true" ) {
returnMethods.open();
}
},
captchaCallback: function( response ) {
if ( document.getElementById("feedback-comment").reportValidity() ) {
$.ajax({
type: "POST",
url: captchaUrl,
data: {response: response},
success: function (data) {
//console.log(data);
captchaScore = parseFloat(data.substring(data.indexOf("float") + 6, data.indexOf("float") + 9));
if (captchaScore > 0.70) {
options.url = options.url || options.host + feedbackUrl;
options.adapter = options.adapter || new window.Feedback.XHR(options.url);
emptyElements(modalBody);
returnMethods.send(options.adapter);
} else {
modalBody.setAttribute("class", "feedback-body suspectedBot");
document.getElementById("recaptcha").disabled = true;
modalBody.insertAdjacentElement("afterbegin", element("p", "Are you a bot? Suspicious behavior detected."));
}
},
error: function (XMLHttpRequest, textStatus, errorThrown) {
modalBody.setAttribute("class", "feedback-body captchaError");
returnMethods.setupClose();
var message = document.createElement("p");
message.innerHTML = '<b>Status: </b>' + textStatus + '; <b>Error: </b>' + errorThrown + '<br/>If the problem persists, please email <a href="mailto:<EMAIL>"><EMAIL></a>.';
modalBody.insertAdjacentElement("afterbegin", message);
if ( window.additionalHelp ) {
modalBody.appendChild( window.additionalHelp );
}
}
});
} else {
return false;
}
window.grecaptcha.reset();
}
};
window.onloadCallback = returnMethods.onloadCallback;
window.captchaCallback = returnMethods.captchaCallback;
glass.className = "feedback-glass";
var button = document.createElement("button");
button.setAttribute("id", "feedback-tab");
if ( Modernizr.touchevents && window.screen.width < 1025 ) {
var $window = $(window),
docHeight = $(document).height(),
rafId;
window.requestAnimationFrame = window.requestAnimationFrame
|| window.mozRequestAnimationFrame
|| window.webkitRequestAnimationFrame
|| window.msRequestAnimationFrame;
$window.on("scroll", function() {
if ( $window.scrollTop() + $window.height() > docHeight - 65 ) {
rafId = window.requestAnimationFrame(function() {
var offset = ($window.scrollTop() - 65) * ($window.scrollTop() - 65) * 0.00001;
button.style.webkitTransform = "translateY(-" + offset + "px)";
button.style.mozTransform = "translateY(-" + offset + "px)";
button.style.transform = "translateY(-" + offset + "px)";
});
} else {
window.cancelAnimationFrame(rafId);
button.style.webkitTransform = "initial";
button.style.mozTransform = "initial";
button.style.transform = "initial";
}
});
} else {
// default properties
options.tab.label = options.tab.label || "Need Help?";
options.tab.color = options.tab.color || "#0b3d91";
options.tab.fontColor = options.tab.fontColor || "#ffffff";
options.tab.fontSize = options.tab.fontSize || "16";
options.tab.size.width = options.tab.size.width || "150";
options.tab.size.height = options.tab.size.height || "60";
options.tab.placement.side = options.tab.placement.side || "right";
options.tab.placement.offset = options.tab.placement.offset || "50";
var useConfig = {
setColors: function(el, color, bgColor) {
el.style.color = color;
el.style.backgroundColor = bgColor;
},
setText: function(el, label, fontSize) {
var p = document.createElement("p");
p.append( document.createTextNode(label) );
if ( fontSize !== "16" ) {
if ( !isNaN(fontSize) ) {
el.setAttribute("class", "noImage");
el.style.fontSize = fontSize + "px";
} else {
console.log("Invalid value for font size. Please check the configuration file.");
}
}
el.appendChild(p);
},
setDimensions: function(el, width, height) {
if ( !isNaN(width) && !isNaN(height) ) {
el.style.width = width + "px";
el.style.height = height + "px";
} else {
if ( isNaN(width) ) {
console.log("Invalid value for tab WIDTH. Please check the configuration file.");
}
if ( isNaN(height) ) {
console.log("Invalid value for tab HEIGHT. Please check the configuration file.");
}
}
},
calculateAdjustment: function(width, height) {
return -0.5 * ( Number(width) - Number(height) ) - 5;
},
calculateMaxOffset: function(width, height) {
return [ window.innerHeight - 0.5 * ( Number(width) + Number(height) ), window.innerWidth - Number(width) ];
},
setPlacement: function(el, side, offset, maxOffset, adjustment) {
if ( !isNaN(offset) ) {
if ( side === "right" || side === "left" ) {
var os = Number(offset) * window.innerHeight / 100,
minOffset = -1 * ( Number(adjustment) + 5 ),
adjust = ( adjustment !== undefined );
if ( os < minOffset ) {
el.style.top = minOffset + "px";
} else if ( os > Number(maxOffset[0]) ) {
el.style.top = maxOffset[0] + "px";
} else {
el.style.top = offset + "vh";
}
if ( side === "right" ) {
if ( adjust ) {
el.style.right = adjustment + "px";
}
} else {
el.setAttribute("class", "left");
if ( adjust ) {
el.style.left = adjustment + "px";
}
}
} else if (side === "top" || side === "bottom" ) {
if ( Number(offset) < 0 ) {
el.style.left = "0";
} else if ( Number(offset) * window.innerWidth / 100 > Number(maxOffset[1]) ) {
el.style.left = maxOffset[1] + "px";
} else {
el.style.left = offset + "vw";
}
if ( side === "top" ) {
el.setAttribute("class", "top");
} else {
el.setAttribute("class", "bottom");
}
} else {
console.log("Invalid value for SIDE of screen to place the tab. The valid options " +
"are LEFT, RIGHT, TOP, or BOTTOM. Please check the configuration file.");
}
} else {
console.log("Invalid value for OFFSET of tab placement. Please check the configuration file.");
}
}
};
useConfig.setColors(button, options.tab.fontColor, options.tab.color);
useConfig.setText(button, options.tab.label, options.tab.fontSize);
var adjustment,
width = Math.max( Number(options.tab.size.width), Number(options.tab.size.height) ),
height = Math.min( Number(options.tab.size.width), Number(options.tab.size.height) ),
defaultWidth = ( width === 150 ),
defaultHeight = ( height === 60 );
if ( !defaultWidth || !defaultHeight ) {
useConfig.setDimensions(button, width, height);
adjustment = useConfig.calculateAdjustment(width, height);
}
var side = options.tab.placement.side.toLowerCase(),
offset = options.tab.placement.offset,
maxOffset = useConfig.calculateMaxOffset(width, height);
if ( offset !== "50" || side !== "right" || adjustment !== undefined ) {
useConfig.setPlacement(button, side, offset, maxOffset, adjustment);
}
}
button.onclick = returnMethods.open;
if ( options.appendTo !== null ) {
((options.appendTo !== undefined) ? options.appendTo : document.body).appendChild( button );
}
return returnMethods;
};
window.Feedback.Page = function() {};
window.Feedback.Page.prototype = {
render: function( dom ) {
this.dom = dom;
},
start: function() {},
close: function() {},
data: function() {
// don't collect data from page by default
return false;
},
end: function() { return true; }
};
window.Feedback.Send = function() {};
window.Feedback.Send.prototype = {
send: function() {}
};
window.Feedback.Form = function( elements ) {
this.elements = elements || [
{
type: "input",
id: "feedback-name",
name: "Name",
label: "Name",
required: false
},
{
type: "input",
id: "feedback-email",
name: "Email",
label: "Email",
required: false
},
{
type: "select",
id: "feedback-type",
name: "Type",
label: "Type",
values: config.feedback.feedbackType,
required: false
},
{
type: "textarea",
id: "feedback-comment",
name: "Comment",
label: "Comment",
required: true
}
];
this.dom = document.createElement("div");
this.dom.className = "feedback-form-container";
};
window.Feedback.Form.prototype = new window.Feedback.Page();
window.Feedback.Form.prototype.render = function() {
var i = 0, len = this.elements.length, item;
emptyElements( this.dom );
for (; i < len; i++) {
item = this.elements[ i ];
var div = document.createElement("div");
div.classList.add("feedback-input");
var formEl = document.createElement( item.type );
formEl.name = item.name;
formEl.id = item.id;
if ( item.required ) {
formEl.required = true;
div.appendChild( element("label", item.label + ": *"));
} else {
div.appendChild( element("label", item.label + ":"));
}
if (item.type === "select") {
var options = item.values.split(",");
for (j = 0; j < options.length; j++) {
var option = document.createElement("option");
option.value = option.textContent = options[j];
formEl.appendChild(option);
}
}
div.appendChild( (item.element = formEl) );
this.dom.appendChild(div);
}
return this;
};
window.Feedback.Form.prototype.end = function() {
// form validation
var i = 0, len = this.elements.length, item;
for (; i < len; i++) {
item = this.elements[ i ];
// check that all required fields are entered
if ( item.required === true && item.element.value.length === 0) {
item.element.className = "feedback-error";
return false;
} else {
item.element.className = "";
}
}
return true;
};
window.Feedback.Form.prototype.data = function() {
var i = 0, len = this.elements.length, item, data = {};
for (; i < len; i++) {
item = this.elements[ i ];
data[ item.name ] = item.element.value;
}
// cache and return data
return ( this._data = data );
};
window.Feedback.XHR = function( url ) {
this.xhr = new XMLHttpRequest();
this.url = url;
};
window.Feedback.XHR.prototype = new window.Feedback.Send();
window.Feedback.XHR.prototype.send = function( data, callback ) {
var xhr = this.xhr;
xhr.onreadystatechange = function() {
if( xhr.readyState == 4 ) {
callback( (xhr.status === 200) );
}
};
var emailData = '';
emailData = 'subject=Feedback from ' + window.location.hostname;
emailData += '&content=';
for (var key in data) {
emailData += key + ': ';
emailData += data[key] + '\n';
}
emailData += '\nLocation: ' + window.location.href + '\n';
xhr.open( "POST", this.url, true);
xhr.setRequestHeader('Content-type','application/x-www-form-urlencoded');
xhr.send(emailData);
};
})( window, document );
|
waynerv/data-structures-and-algorithms-in-python
|
Reference/progs/stack_app1.py
|
<filename>Reference/progs/stack_app1.py
""" 栈的应用1:检查括号配对,后缀表达式求值器,
中缀表达式到后缀表达式的转换,
"""
from stack_list import *
class ESStack(SStack):
def depth(self):
return len(self._elems)
#############################################
######## Parentheses checker #################
def check_parens(text):
parens = "()[]{}"
open_parens = "([{"
opposite = {')': '(', ']': '[', '}': '{'}
def parentheses(text):
i, text_len = 0, len(text)
while True:
while i < text_len and text[i] not in parens:
i += 1
if i >= text_len:
return
yield text[i], i
i += 1
st = SStack()
for pr, i in parentheses(text):
if pr in open_parens: # push open pares into stack
st.push(pr)
elif st.pop() != opposite[pr]:
print("Unmatching is found at", i, "for", pr)
return False
print("All parentheses are correctly matched.")
return True
################################################
####### Suffix expression evaluator ############
def suffix_exp_evaluator(line):
return suf_exp_evaluator(line.split())
def suf_exp_evaluator(exp):
"""exp is a list of items representing a suffix expression.
This function evaluates it and return its value.
"""
operators = "+-*/"
st = ESStack()
for x in exp:
if x not in operators:
st.push(float(x))
continue
if st.depth() < 2:
raise SyntaxError("Short of operand(s).")
a = st.pop() # second argument
b = st.pop() # first argument
if x == '+':
c = b + a
elif x == '-':
c = b - a
elif x == '*':
c = b * a
elif x == '/': # may raise ZeroDivisionError
c = b / a
else:
break
# the else branch is not possible
st.push(c)
if st.depth() == 1:
return st.pop()
raise SyntaxError("Extra operand(s).")
# end suf_exp_evaluator
def suffix_exp_calculator():
"""Repeatly ask for expression input until an 'end'."""
while True:
try:
line = input("Suffix Expression: ")
if line == "end":
return
res = suffix_exp_evaluator(line)
print(res)
except Exception as ex:
print("Error:", type(ex), ex.args)
def demo_suffix():
print(suffix_exp_evaluator("1"))
print(suffix_exp_evaluator("1 2 +"))
print(suffix_exp_evaluator("1 3 + 2 *"))
print(suffix_exp_evaluator("1 3 + 2 5 - *"))
#####################################################
##### Transform infix expression to suffix expression
priority = {'(': 1, '+': 3, '-': 3, '*': 5, '/': 5}
infix_operators = "+-*/()"
def tokens(line):
""" 生成器函数,逐一生成 line 中的一个个单词。单词是一个浮点数或一个运算符。
本函数只能处理二元运算符,不能处理一元运算符,也不能处理带符号的浮点数。 """
i, llen = 0, len(line)
while i < llen:
while line[i].isspace():
i += 1
if i >= llen:
break
if line[i] in infix_operators: # 运算符的情况
yield line[i]
i += 1
continue
j = i + 1 # 下面处理运算对象
while (j < llen and not line[j].isspace() and
line[j] not in infix_operators):
if ((line[j] == 'e' or line[j] == 'E') # 处理负指数
and j+1 < llen and line[j+1] == '-'):
j += 1
j += 1
yield line[i:j] # 生成运算对象子串
i = j
def trans_infix_suffix(line):
st = SStack()
exp = []
for x in tokens(line): # tokens是一个待定义的生成器
if x not in infix_operators: # 运算对象直接送出
exp.append(x)
elif st.is_empty() or x == '(': # 左括号进栈
st.push(x)
elif x == ')': # 处理右括号的分支
while not st.is_empty() and st.top() != '(':
exp.append(st.pop())
if st.is_empty(): # 没找到左括号,就是不配对
raise SyntaxError("Missing '('.")
st.pop() # 弹出左括号,右括号也不进栈
else: # 处理算术运算符,运算符都看作是左结合
while (not st.is_empty() and
priority[st.top()] >= priority[x]):
exp.append(st.pop())
st.push(x) # 算术运算符进栈
while not st.is_empty(): # 送出栈里剩下的运算符
if st.top() == '(': # 如果还有左括号,就是不配对
raise SyntaxError("Extra '('.")
exp.append(st.pop())
return exp
# def trans_infix_suffix(line):
# st = SStack()
# exp = []
# for x in tokens(line):
# if x not in infix_operators:
# exp.append(x)
# elif st.is_empty() or x == '(':
# st.push(x)
# elif x == ')':
# while not st.is_empty() and st.top() != '(':
# exp.append(st.pop())
# if st.is_empty():
# raise SyntaxError("Missing '('.")
# st.pop() # discard left parenthesis
# else: # consider all ops left-associative
# while (not st.is_empty() and
# priority[st.top()] >= priority[x]):
# exp.append(st.pop())
# st.push(x)
#
# while not st.is_empty():
# if st.top() == '(':
# raise SyntaxError("Extra '('.")
# exp.append(st.pop())
# return exp
def test_trans_infix_suffix(s):
print(s)
print(trans_infix_suffix(s))
print("Value:", suf_exp_evaluator(trans_infix_suffix(s)))
def demo_trans():
test_trans_infix_suffix("1.25")
test_trans_infix_suffix("1 + 2")
test_trans_infix_suffix("1 + 2 - 3")
test_trans_infix_suffix("1 + 2 * 3")
test_trans_infix_suffix("7. / 2 * 3")
test_trans_infix_suffix("7.e-2/3*2")
test_trans_infix_suffix("7.0e1/3*2e3")
test_trans_infix_suffix("(1 + 2) * 3")
test_trans_infix_suffix("1 + 2 * 3 - 5")
test_trans_infix_suffix("13 + 2 * (3 - 5)")
test_trans_infix_suffix("(1 + 2) * (3 - 5)")
test_trans_infix_suffix("(1 + (2 * 3 - 5)) / 1.25")
if __name__ == "__main__":
demo_trans()
## check_parens("")
## check_parens("()")
## check_parens("([]{})")
## check_parens("([]{}]")
## check_parens("(abbvbb[hhh]jhg{lkii288}9000)000fhjsh")
## check_parens("jkdsjckd(mfkk[fdjjfk],,,{kckjfc}jskdjkc]kkk")
## suffix_exp_calculator()
pass
|
naveed-ahmad/eui
|
src/components/date_picker/super_date_picker/quick_select_popover/commonly_used_time_ranges.js
|
import PropTypes from 'prop-types';
import React from 'react';
import { commonlyUsedRangeShape } from '../types';
import { EuiI18n } from '../../../i18n';
import { EuiFlexGrid, EuiFlexItem } from '../../../flex';
import { EuiTitle } from '../../../title';
import { EuiLink } from '../../../link';
import { EuiHorizontalRule } from '../../../horizontal_rule';
import { htmlIdGenerator } from '../../../../services';
const generateId = htmlIdGenerator();
export function EuiCommonlyUsedTimeRanges({ applyTime, commonlyUsedRanges }) {
const legendId = generateId();
const links = commonlyUsedRanges.map(({ start, end, label }) => {
const applyCommonlyUsed = () => {
applyTime({ start, end });
};
return (
<EuiFlexItem
key={label}
component="li"
className="euiCommonlyUsedTimeRanges__item">
<EuiLink
onClick={applyCommonlyUsed}
data-test-subj={`superDatePickerCommonlyUsed_${label.replace(
' ',
'_'
)}`}>
{label}
</EuiLink>
</EuiFlexItem>
);
});
return (
<fieldset>
<EuiTitle size="xxxs">
<legend id={legendId} aria-label="Commonly used time ranges">
<EuiI18n
token="euiCommonlyUsedTimeRanges.legend"
default="Commonly used"
/>
</legend>
</EuiTitle>
<div className="euiQuickSelectPopover__section">
<EuiFlexGrid
aria-labelledby={legendId}
gutterSize="s"
columns={2}
direction="column"
responsive={false}
component="ul">
{links}
</EuiFlexGrid>
</div>
<EuiHorizontalRule margin="s" />
</fieldset>
);
}
EuiCommonlyUsedTimeRanges.propTypes = {
applyTime: PropTypes.func.isRequired,
commonlyUsedRanges: PropTypes.arrayOf(commonlyUsedRangeShape).isRequired,
};
|
clayne/Lootman
|
lootman/f4se/ScaleformSerialization.h
|
<gh_stars>1-10
#pragma once
#include "f4se/Serialization.h"
#include "f4se/PluginAPI.h"
class GFxValue;
namespace Serialization
{
template <>
bool WriteData<GFxValue>(const F4SESerializationInterface* intfc, const GFxValue* val);
template <>
bool ReadData<GFxValue>(const F4SESerializationInterface* intfc, GFxValue* val);
};
|
troyblank/halfsies
|
src/components/balance/reducer.spec.js
|
import { assert } from 'chai';
import Chance from 'chance';
import reducer from './reducer';
import { BALANCE_RECEIVED } from './actions';
describe('Balance Reducer', () => {
const chance = new Chance();
it('should return initial state', () => {
assert.deepEqual(reducer(undefined, {}), {});
});
it('should set a balance received', () => {
const amount = chance.natural();
assert.deepEqual(reducer({}, { type: BALANCE_RECEIVED, amount }), { amount });
});
});
|
arkivm/syzkaller
|
tools/syz-crush/crush.go
|
// Copyright 2016 syzkaller project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
// syz-crush replays crash log on multiple VMs. Usage:
// syz-crush -config=config.file execution.log
// Intended for reproduction of particularly elusive crashes.
package main
import (
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"strings"
"sync/atomic"
"time"
"github.com/google/syzkaller/pkg/csource"
"github.com/google/syzkaller/pkg/hash"
"github.com/google/syzkaller/pkg/instance"
"github.com/google/syzkaller/pkg/mgrconfig"
"github.com/google/syzkaller/pkg/osutil"
"github.com/google/syzkaller/pkg/report"
"github.com/google/syzkaller/vm"
)
var (
flagConfig = flag.String("config", "", "manager configuration file")
flagDebug = flag.Bool("debug", false, "dump all VM output to console")
flagRestartTime = flag.Duration("restart_time", 0, "how long to run the test")
flagInfinite = flag.Bool("infinite", true, "by default test is run for ever, -infinite=false to stop on crash")
)
type FileType int
const (
LogFile FileType = iota
CProg
)
func main() {
flag.Parse()
if len(flag.Args()) != 1 || *flagConfig == "" {
fmt.Fprintf(os.Stderr, "usage: syz-crush [flags] <execution.log|creprog.c>\n")
flag.PrintDefaults()
os.Exit(1)
}
cfg, err := mgrconfig.LoadFile(*flagConfig)
if err != nil {
log.Fatal(err)
}
if *flagRestartTime == 0 {
*flagRestartTime *= cfg.Timeouts.VMRunningTime
}
if *flagInfinite {
log.Printf("running infinitely and restarting VM every %v", *flagRestartTime)
} else {
log.Printf("running until crash is found or till %v", *flagRestartTime)
}
vmPool, err := vm.Create(cfg, *flagDebug)
if err != nil {
log.Fatalf("%v", err)
}
reporter, err := report.NewReporter(cfg)
if err != nil {
log.Fatalf("%v", err)
}
reproduceMe := flag.Args()[0]
if cfg.Tag == "" {
// If no tag is given, use reproducer name as the tag.
cfg.Tag = filepath.Base(reproduceMe)
}
runType := LogFile
if strings.HasSuffix(reproduceMe, ".c") {
runType = CProg
}
if runType == CProg {
execprog, err := ioutil.ReadFile(reproduceMe)
if err != nil {
log.Fatalf("error reading source file from '%s'", reproduceMe)
}
cfg.ExecprogBin, err = csource.BuildNoWarn(cfg.Target, execprog)
if err != nil {
log.Fatalf("failed to build source file: %v", err)
}
log.Printf("compiled csource %v to cprog: %v", reproduceMe, cfg.ExecprogBin)
} else {
log.Printf("reproducing from log file: %v", reproduceMe)
}
log.Printf("booting %v test machines...", vmPool.Count())
runDone := make(chan *report.Report)
var shutdown, stoppedWorkers uint32
for i := 0; i < vmPool.Count(); i++ {
go func(index int) {
for {
runDone <- runInstance(cfg, reporter, vmPool, index, *flagRestartTime, runType)
if atomic.LoadUint32(&shutdown) != 0 || !*flagInfinite {
// If this is the last worker then we can close the channel.
if atomic.AddUint32(&stoppedWorkers, 1) == uint32(vmPool.Count()) {
log.Printf("vm-%v: closing channel", index)
close(runDone)
}
break
}
}
log.Printf("vm-%v: done", index)
}(i)
}
shutdownC := make(chan struct{})
osutil.HandleInterrupts(shutdownC)
go func() {
<-shutdownC
atomic.StoreUint32(&shutdown, 1)
close(vm.Shutdown)
}()
var count, crashes int
for rep := range runDone {
count++
if rep != nil {
crashes++
storeCrash(cfg, rep)
}
log.Printf("instances executed: %v, crashes: %v", count, crashes)
}
log.Printf("all done. reproduced %v crashes. reproduce rate %.2f%%", crashes, float64(crashes)/float64(count)*100.0)
}
func storeCrash(cfg *mgrconfig.Config, rep *report.Report) {
id := hash.String([]byte(rep.Title))
dir := filepath.Join(filepath.Dir(flag.Args()[0]), "crashes", id)
osutil.MkdirAll(dir)
index := 0
for ; osutil.IsExist(filepath.Join(dir, fmt.Sprintf("log%v", index))); index++ {
}
log.Printf("saving crash '%v' with index %v in %v", rep.Title, index, dir)
if err := osutil.WriteFile(filepath.Join(dir, "description"), []byte(rep.Title+"\n")); err != nil {
log.Printf("failed to write crash description: %v", err)
}
if err := osutil.WriteFile(filepath.Join(dir, fmt.Sprintf("log%v", index)), rep.Output); err != nil {
log.Printf("failed to write crash log: %v", err)
}
if err := osutil.WriteFile(filepath.Join(dir, fmt.Sprintf("tag%v", index)), []byte(cfg.Tag)); err != nil {
log.Printf("failed to write crash tag: %v", err)
}
if len(rep.Report) > 0 {
if err := osutil.WriteFile(filepath.Join(dir, fmt.Sprintf("report%v", index)), rep.Report); err != nil {
log.Printf("failed to write crash report: %v", err)
}
}
if err := osutil.CopyFile(flag.Args()[0], filepath.Join(dir, fmt.Sprintf("reproducer%v", index))); err != nil {
log.Printf("failed to write crash reproducer: %v", err)
}
}
func runInstance(cfg *mgrconfig.Config, reporter *report.Reporter,
vmPool *vm.Pool, index int, timeout time.Duration, runType FileType) *report.Report {
log.Printf("vm-%v: starting", index)
inst, err := vmPool.Create(index)
if err != nil {
log.Printf("failed to create instance: %v", err)
return nil
}
defer inst.Close()
execprogBin, err := inst.Copy(cfg.ExecprogBin)
if err != nil {
log.Printf("failed to copy execprog: %v", err)
return nil
}
cmd := ""
if runType == LogFile {
// If SyzExecutorCmd is provided, it means that syz-executor is already in
// the image, so no need to copy it.
executorBin := cfg.SysTarget.ExecutorBin
if executorBin == "" {
executorBin, err = inst.Copy(cfg.ExecutorBin)
if err != nil {
log.Printf("failed to copy executor: %v", err)
return nil
}
}
logFile, err := inst.Copy(flag.Args()[0])
if err != nil {
log.Printf("failed to copy log: %v", err)
return nil
}
cmd = instance.ExecprogCmd(execprogBin, executorBin, cfg.TargetOS, cfg.TargetArch, cfg.Sandbox,
true, true, true, cfg.Procs, -1, -1, true, cfg.Timeouts.Slowdown, logFile)
} else {
cmd = execprogBin
}
outc, errc, err := inst.Run(timeout, nil, cmd)
if err != nil {
log.Printf("failed to run execprog: %v", err)
return nil
}
log.Printf("vm-%v: crushing...", index)
rep := inst.MonitorExecution(outc, errc, reporter, vm.ExitTimeout)
if rep != nil {
log.Printf("vm-%v: crash: %v", index, rep.Title)
return rep
}
log.Printf("vm-%v: running long enough, stopping", index)
return nil
}
|
puckel/dgr
|
vendor/github.com/aws/aws-sdk-go/service/codecommit/codecommitiface/interface.go
|
// THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT.
// Package codecommitiface provides an interface for the AWS CodeCommit.
package codecommitiface
import (
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/service/codecommit"
)
// CodeCommitAPI is the interface type for codecommit.CodeCommit.
type CodeCommitAPI interface {
BatchGetRepositoriesRequest(*codecommit.BatchGetRepositoriesInput) (*request.Request, *codecommit.BatchGetRepositoriesOutput)
BatchGetRepositories(*codecommit.BatchGetRepositoriesInput) (*codecommit.BatchGetRepositoriesOutput, error)
CreateBranchRequest(*codecommit.CreateBranchInput) (*request.Request, *codecommit.CreateBranchOutput)
CreateBranch(*codecommit.CreateBranchInput) (*codecommit.CreateBranchOutput, error)
CreateRepositoryRequest(*codecommit.CreateRepositoryInput) (*request.Request, *codecommit.CreateRepositoryOutput)
CreateRepository(*codecommit.CreateRepositoryInput) (*codecommit.CreateRepositoryOutput, error)
DeleteRepositoryRequest(*codecommit.DeleteRepositoryInput) (*request.Request, *codecommit.DeleteRepositoryOutput)
DeleteRepository(*codecommit.DeleteRepositoryInput) (*codecommit.DeleteRepositoryOutput, error)
GetBranchRequest(*codecommit.GetBranchInput) (*request.Request, *codecommit.GetBranchOutput)
GetBranch(*codecommit.GetBranchInput) (*codecommit.GetBranchOutput, error)
GetRepositoryRequest(*codecommit.GetRepositoryInput) (*request.Request, *codecommit.GetRepositoryOutput)
GetRepository(*codecommit.GetRepositoryInput) (*codecommit.GetRepositoryOutput, error)
ListBranchesRequest(*codecommit.ListBranchesInput) (*request.Request, *codecommit.ListBranchesOutput)
ListBranches(*codecommit.ListBranchesInput) (*codecommit.ListBranchesOutput, error)
ListRepositoriesRequest(*codecommit.ListRepositoriesInput) (*request.Request, *codecommit.ListRepositoriesOutput)
ListRepositories(*codecommit.ListRepositoriesInput) (*codecommit.ListRepositoriesOutput, error)
UpdateDefaultBranchRequest(*codecommit.UpdateDefaultBranchInput) (*request.Request, *codecommit.UpdateDefaultBranchOutput)
UpdateDefaultBranch(*codecommit.UpdateDefaultBranchInput) (*codecommit.UpdateDefaultBranchOutput, error)
UpdateRepositoryDescriptionRequest(*codecommit.UpdateRepositoryDescriptionInput) (*request.Request, *codecommit.UpdateRepositoryDescriptionOutput)
UpdateRepositoryDescription(*codecommit.UpdateRepositoryDescriptionInput) (*codecommit.UpdateRepositoryDescriptionOutput, error)
UpdateRepositoryNameRequest(*codecommit.UpdateRepositoryNameInput) (*request.Request, *codecommit.UpdateRepositoryNameOutput)
UpdateRepositoryName(*codecommit.UpdateRepositoryNameInput) (*codecommit.UpdateRepositoryNameOutput, error)
}
var _ CodeCommitAPI = (*codecommit.CodeCommit)(nil)
|
selinabitting/compas_view2
|
scripts/v120_frames.py
|
<filename>scripts/v120_frames.py
from compas_view2 import app
from compas.geometry import Frame
viewer = app.App()
frame = Frame([1, 1, 1], [0.68, 0.68, 0.27], [-0.67, 0.73, -0.15])
viewer.add(frame)
viewer.show()
|
freman/genesysapi
|
client/integrations/patch_integrations_action_draft_responses.go
|
<filename>client/integrations/patch_integrations_action_draft_responses.go
// Code generated by go-swagger; DO NOT EDIT.
package integrations
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"fmt"
"io"
"github.com/go-openapi/runtime"
"github.com/go-openapi/strfmt"
"github.com/freman/genesysapi/models"
)
// PatchIntegrationsActionDraftReader is a Reader for the PatchIntegrationsActionDraft structure.
type PatchIntegrationsActionDraftReader struct {
formats strfmt.Registry
}
// ReadResponse reads a server response into the received o.
func (o *PatchIntegrationsActionDraftReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) {
switch response.Code() {
case 200:
result := NewPatchIntegrationsActionDraftOK()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return result, nil
case 400:
result := NewPatchIntegrationsActionDraftBadRequest()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 401:
result := NewPatchIntegrationsActionDraftUnauthorized()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 403:
result := NewPatchIntegrationsActionDraftForbidden()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 404:
result := NewPatchIntegrationsActionDraftNotFound()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 408:
result := NewPatchIntegrationsActionDraftRequestTimeout()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 413:
result := NewPatchIntegrationsActionDraftRequestEntityTooLarge()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 415:
result := NewPatchIntegrationsActionDraftUnsupportedMediaType()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 429:
result := NewPatchIntegrationsActionDraftTooManyRequests()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 500:
result := NewPatchIntegrationsActionDraftInternalServerError()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 503:
result := NewPatchIntegrationsActionDraftServiceUnavailable()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
case 504:
result := NewPatchIntegrationsActionDraftGatewayTimeout()
if err := result.readResponse(response, consumer, o.formats); err != nil {
return nil, err
}
return nil, result
default:
return nil, runtime.NewAPIError("response status code does not match any response statuses defined for this endpoint in the swagger spec", response, response.Code())
}
}
// NewPatchIntegrationsActionDraftOK creates a PatchIntegrationsActionDraftOK with default headers values
func NewPatchIntegrationsActionDraftOK() *PatchIntegrationsActionDraftOK {
return &PatchIntegrationsActionDraftOK{}
}
/*PatchIntegrationsActionDraftOK handles this case with default header values.
successful operation
*/
type PatchIntegrationsActionDraftOK struct {
Payload *models.Action
}
func (o *PatchIntegrationsActionDraftOK) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftOK %+v", 200, o.Payload)
}
func (o *PatchIntegrationsActionDraftOK) GetPayload() *models.Action {
return o.Payload
}
func (o *PatchIntegrationsActionDraftOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.Action)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftBadRequest creates a PatchIntegrationsActionDraftBadRequest with default headers values
func NewPatchIntegrationsActionDraftBadRequest() *PatchIntegrationsActionDraftBadRequest {
return &PatchIntegrationsActionDraftBadRequest{}
}
/*PatchIntegrationsActionDraftBadRequest handles this case with default header values.
The request could not be understood by the server due to malformed syntax.
*/
type PatchIntegrationsActionDraftBadRequest struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftBadRequest) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftBadRequest %+v", 400, o.Payload)
}
func (o *PatchIntegrationsActionDraftBadRequest) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftBadRequest) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftUnauthorized creates a PatchIntegrationsActionDraftUnauthorized with default headers values
func NewPatchIntegrationsActionDraftUnauthorized() *PatchIntegrationsActionDraftUnauthorized {
return &PatchIntegrationsActionDraftUnauthorized{}
}
/*PatchIntegrationsActionDraftUnauthorized handles this case with default header values.
No authentication bearer token specified in authorization header.
*/
type PatchIntegrationsActionDraftUnauthorized struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftUnauthorized) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftUnauthorized %+v", 401, o.Payload)
}
func (o *PatchIntegrationsActionDraftUnauthorized) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftUnauthorized) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftForbidden creates a PatchIntegrationsActionDraftForbidden with default headers values
func NewPatchIntegrationsActionDraftForbidden() *PatchIntegrationsActionDraftForbidden {
return &PatchIntegrationsActionDraftForbidden{}
}
/*PatchIntegrationsActionDraftForbidden handles this case with default header values.
You are not authorized to perform the requested action.
*/
type PatchIntegrationsActionDraftForbidden struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftForbidden) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftForbidden %+v", 403, o.Payload)
}
func (o *PatchIntegrationsActionDraftForbidden) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftForbidden) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftNotFound creates a PatchIntegrationsActionDraftNotFound with default headers values
func NewPatchIntegrationsActionDraftNotFound() *PatchIntegrationsActionDraftNotFound {
return &PatchIntegrationsActionDraftNotFound{}
}
/*PatchIntegrationsActionDraftNotFound handles this case with default header values.
The requested resource was not found.
*/
type PatchIntegrationsActionDraftNotFound struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftNotFound) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftNotFound %+v", 404, o.Payload)
}
func (o *PatchIntegrationsActionDraftNotFound) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftRequestTimeout creates a PatchIntegrationsActionDraftRequestTimeout with default headers values
func NewPatchIntegrationsActionDraftRequestTimeout() *PatchIntegrationsActionDraftRequestTimeout {
return &PatchIntegrationsActionDraftRequestTimeout{}
}
/*PatchIntegrationsActionDraftRequestTimeout handles this case with default header values.
The client did not produce a request within the server timeout limit. This can be caused by a slow network connection and/or large payloads.
*/
type PatchIntegrationsActionDraftRequestTimeout struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftRequestTimeout) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftRequestTimeout %+v", 408, o.Payload)
}
func (o *PatchIntegrationsActionDraftRequestTimeout) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftRequestTimeout) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftRequestEntityTooLarge creates a PatchIntegrationsActionDraftRequestEntityTooLarge with default headers values
func NewPatchIntegrationsActionDraftRequestEntityTooLarge() *PatchIntegrationsActionDraftRequestEntityTooLarge {
return &PatchIntegrationsActionDraftRequestEntityTooLarge{}
}
/*PatchIntegrationsActionDraftRequestEntityTooLarge handles this case with default header values.
The request is over the size limit. Content-Length: %s
*/
type PatchIntegrationsActionDraftRequestEntityTooLarge struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftRequestEntityTooLarge) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftRequestEntityTooLarge %+v", 413, o.Payload)
}
func (o *PatchIntegrationsActionDraftRequestEntityTooLarge) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftRequestEntityTooLarge) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftUnsupportedMediaType creates a PatchIntegrationsActionDraftUnsupportedMediaType with default headers values
func NewPatchIntegrationsActionDraftUnsupportedMediaType() *PatchIntegrationsActionDraftUnsupportedMediaType {
return &PatchIntegrationsActionDraftUnsupportedMediaType{}
}
/*PatchIntegrationsActionDraftUnsupportedMediaType handles this case with default header values.
Unsupported Media Type - Unsupported or incorrect media type, such as an incorrect Content-Type value in the header.
*/
type PatchIntegrationsActionDraftUnsupportedMediaType struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftUnsupportedMediaType) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftUnsupportedMediaType %+v", 415, o.Payload)
}
func (o *PatchIntegrationsActionDraftUnsupportedMediaType) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftUnsupportedMediaType) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftTooManyRequests creates a PatchIntegrationsActionDraftTooManyRequests with default headers values
func NewPatchIntegrationsActionDraftTooManyRequests() *PatchIntegrationsActionDraftTooManyRequests {
return &PatchIntegrationsActionDraftTooManyRequests{}
}
/*PatchIntegrationsActionDraftTooManyRequests handles this case with default header values.
Rate limit exceeded the maximum. Retry the request in [%s] seconds
*/
type PatchIntegrationsActionDraftTooManyRequests struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftTooManyRequests) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftTooManyRequests %+v", 429, o.Payload)
}
func (o *PatchIntegrationsActionDraftTooManyRequests) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftTooManyRequests) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftInternalServerError creates a PatchIntegrationsActionDraftInternalServerError with default headers values
func NewPatchIntegrationsActionDraftInternalServerError() *PatchIntegrationsActionDraftInternalServerError {
return &PatchIntegrationsActionDraftInternalServerError{}
}
/*PatchIntegrationsActionDraftInternalServerError handles this case with default header values.
The server encountered an unexpected condition which prevented it from fulfilling the request.
*/
type PatchIntegrationsActionDraftInternalServerError struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftInternalServerError) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftInternalServerError %+v", 500, o.Payload)
}
func (o *PatchIntegrationsActionDraftInternalServerError) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftInternalServerError) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftServiceUnavailable creates a PatchIntegrationsActionDraftServiceUnavailable with default headers values
func NewPatchIntegrationsActionDraftServiceUnavailable() *PatchIntegrationsActionDraftServiceUnavailable {
return &PatchIntegrationsActionDraftServiceUnavailable{}
}
/*PatchIntegrationsActionDraftServiceUnavailable handles this case with default header values.
Service Unavailable - The server is currently unavailable (because it is overloaded or down for maintenance).
*/
type PatchIntegrationsActionDraftServiceUnavailable struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftServiceUnavailable) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftServiceUnavailable %+v", 503, o.Payload)
}
func (o *PatchIntegrationsActionDraftServiceUnavailable) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftServiceUnavailable) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
// NewPatchIntegrationsActionDraftGatewayTimeout creates a PatchIntegrationsActionDraftGatewayTimeout with default headers values
func NewPatchIntegrationsActionDraftGatewayTimeout() *PatchIntegrationsActionDraftGatewayTimeout {
return &PatchIntegrationsActionDraftGatewayTimeout{}
}
/*PatchIntegrationsActionDraftGatewayTimeout handles this case with default header values.
The request timed out.
*/
type PatchIntegrationsActionDraftGatewayTimeout struct {
Payload *models.ErrorBody
}
func (o *PatchIntegrationsActionDraftGatewayTimeout) Error() string {
return fmt.Sprintf("[PATCH /api/v2/integrations/actions/{actionId}/draft][%d] patchIntegrationsActionDraftGatewayTimeout %+v", 504, o.Payload)
}
func (o *PatchIntegrationsActionDraftGatewayTimeout) GetPayload() *models.ErrorBody {
return o.Payload
}
func (o *PatchIntegrationsActionDraftGatewayTimeout) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error {
o.Payload = new(models.ErrorBody)
// response payload
if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF {
return err
}
return nil
}
|
1337programming/leviathan
|
nfc/src/DOOM/neo/swf/SWF_Bitstream.h
|
/*
===========================================================================
Doom 3 BFG Edition GPL Source Code
Copyright (C) 1993-2012 id Software LLC, a ZeniMax Media company.
This file is part of the Doom 3 BFG Edition GPL Source Code ("Doom 3 BFG Edition Source Code").
Doom 3 BFG Edition Source Code is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Doom 3 BFG Edition Source Code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Doom 3 BFG Edition Source Code. If not, see <http://www.gnu.org/licenses/>.
In addition, the Doom 3 BFG Edition Source Code is also subject to certain additional terms. You should have received a copy of these additional terms immediately following the terms and conditions of the GNU General Public License which accompanied the Doom 3 BFG Edition Source Code. If not, please request a copy in writing from id Software at the address below.
If you have questions concerning this license or the applicable additional terms, you may contact in writing id Software LLC, c/o ZeniMax Media Inc., Suite 120, Rockville, Maryland 20850 USA.
===========================================================================
*/
#ifndef __SWF_BITSTREAM_H__
#define __SWF_BITSTREAM_H__
class idSWFBitStream {
public:
idSWFBitStream();
idSWFBitStream( const byte * data, uint32 len, bool copy ) { free = false; Load( data, len, copy ); }
~idSWFBitStream() { Free(); }
idSWFBitStream & operator=( idSWFBitStream & other );
void Load( const byte * data, uint32 len, bool copy );
void Free();
const byte * Ptr() { return startp; }
uint32 Length() const { return (uint32)( endp - startp ); }
uint32 Tell() const { return (uint32)( readp - startp ); }
void Seek( int32 offset ) { readp += offset; }
void Rewind() { readp = startp; }
void ResetBits();
int ReadS( unsigned int numBits );
unsigned int ReadU( unsigned int numBits );
bool ReadBool();
const byte * ReadData( int size );
template< typename T >
void ReadLittle( T & val );
uint8 ReadU8();
uint16 ReadU16();
uint32 ReadU32();
int16 ReadS16();
int32 ReadS32();
uint32 ReadEncodedU32();
float ReadFixed8();
float ReadFixed16();
float ReadFloat();
double ReadDouble();
const char * ReadString();
void ReadRect( swfRect_t & rect );
void ReadMatrix( swfMatrix_t & matrix );
void ReadColorXFormRGBA( swfColorXform_t & cxf );
void ReadColorRGB( swfColorRGB_t & color );
void ReadColorRGBA( swfColorRGBA_t & color );
void ReadGradient( swfGradient_t & grad, bool rgba );
void ReadMorphGradient( swfGradient_t & grad );
private:
bool free;
const byte * startp;
const byte * endp;
const byte * readp;
uint64 currentBit;
uint64 currentByte;
int ReadInternalS( uint64 & regCurrentBit, uint64 & regCurrentByte, unsigned int numBits );
unsigned int ReadInternalU( uint64 & regCurrentBit, uint64 & regCurrentByte, unsigned int numBits );
};
/*
========================
idSWFBitStream::ResetBits
========================
*/
ID_INLINE void idSWFBitStream::ResetBits() {
currentBit = 0;
currentByte = 0;
}
/*
========================
idSWFBitStream::ReadLittle
========================
*/
template< typename T >
void idSWFBitStream::ReadLittle( T & val ) {
val = *(T *)ReadData( sizeof( val ) );
idSwap::Little( val );
}
/*
========================
Wrappers for the most basic types
========================
*/
ID_INLINE bool idSWFBitStream::ReadBool() { return ( ReadU( 1 ) != 0 ); }
ID_INLINE uint8 idSWFBitStream::ReadU8() { ResetBits(); return *readp++; }
ID_INLINE uint16 idSWFBitStream::ReadU16() { ResetBits(); readp += 2; return ( readp[-2] | ( readp[-1] << 8 ) ); }
ID_INLINE uint32 idSWFBitStream::ReadU32() { ResetBits(); readp += 4; return ( readp[-4] | ( readp[-3] << 8 ) | ( readp[-2] << 16 ) | ( readp[-1] << 24 ) ); }
ID_INLINE int16 idSWFBitStream::ReadS16() { ResetBits(); readp += 2; return ( readp[-2] | ( readp[-1] << 8 ) ); }
ID_INLINE int32 idSWFBitStream::ReadS32() { ResetBits(); readp += 4; return ( readp[-4] | ( readp[-3] << 8 ) | ( readp[-2] << 16 ) | ( readp[-1] << 24 ) ); }
ID_INLINE float idSWFBitStream::ReadFixed8() { ResetBits(); readp += 2; return SWFFIXED8( ( readp[-2] | ( readp[-1] << 8 ) ) ); }
ID_INLINE float idSWFBitStream::ReadFixed16() { ResetBits(); readp += 4; return SWFFIXED16( ( readp[-4] | ( readp[-3] << 8 ) | ( readp[-2] << 16 ) | ( readp[-1] << 24 ) ) ); }
ID_INLINE float idSWFBitStream::ReadFloat() { ResetBits(); readp += 4; uint32 i = ( readp[-4] | ( readp[-3] << 8 ) | ( readp[-2] << 16 ) | ( readp[-1] << 24 ) ); return (float &)i; }
ID_INLINE double idSWFBitStream::ReadDouble() {
const byte * swfIsRetarded = ReadData( 8 );
byte buffer[8];
buffer[0] = swfIsRetarded[4];
buffer[1] = swfIsRetarded[5];
buffer[2] = swfIsRetarded[6];
buffer[3] = swfIsRetarded[7];
buffer[4] = swfIsRetarded[0];
buffer[5] = swfIsRetarded[1];
buffer[6] = swfIsRetarded[2];
buffer[7] = swfIsRetarded[3];
double d = *(double *)buffer;
idSwap::Little( d );
return d;
}
#endif // !__SWF_BITSTREAM_H__
|
sash2104/library
|
test/aoj/GRL_6_A.test.cpp
|
<reponame>sash2104/library
#define PROBLEM "http://judge.u-aizu.ac.jp/onlinejudge/description.jsp?id=GRL_6_A"
#include "../../graph/max-flow-dinic.hpp"
#include <iostream>
using namespace std;
typedef long long ll;
int main() {
int V, E;
cin >> V >> E;
max_flow mf(V);
for (int i = 0; i < E; ++i) {
int from, to, cap;
cin >> from >> to >> cap;
mf.add_edge(from, to, cap);
}
cout << mf.run(0, V-1) << endl;
}
|
saidinesh5/rune_vm
|
src/wasm_backends/Backends.hpp
|
<filename>src/wasm_backends/Backends.hpp
//
// Created by <NAME> - github.com/delimbetov - on 20.03.2021
// Copyright (c) HAMMER OF THE GODS INC. - hotg.ai
//
#pragma once
// include all backends here
#include <wasm_backends/wasm3/Wasm3Engine.hpp>
|
deatil/lakego-admin
|
pkg/lakego-admin/admin/model/action_log.go
|
<reponame>deatil/lakego-admin<gh_stars>10-100
package model
import (
"time"
"strconv"
"gorm.io/gorm"
"github.com/deatil/lakego-admin/lakego/support/hash"
"github.com/deatil/lakego-admin/lakego/support/random"
"github.com/deatil/lakego-admin/lakego/facade/database"
)
type ActionLog struct {
ID string `gorm:"column:id;type:char(32);not null;primaryKey;" json:"id"`
Name string `gorm:"column:name;not null;type:varchar(250);" json:"name"`
Url string `gorm:"column:url;type:text;" json:"url"`
Method string `gorm:"column:method;type:varchar(10);" json:"method"`
Info string `gorm:"column:info;type:text;" json:"info"`
Useragent string `gorm:"column:useragent;type:text;" json:"useragent"`
Time int `gorm:"column:time;type:int(10);" json:"time"`
Ip string `gorm:"column:ip;type:varchar(50);" json:"ip"`
Status string `gorm:"column:status;type:char(3);" json:"status"`
}
func (this *ActionLog) BeforeCreate(tx *gorm.DB) error {
id := hash.MD5(strconv.FormatInt(time.Now().Unix(), 10) + random.String(10))
this.ID = id
return nil
}
func NewActionLog() *gorm.DB {
return database.New().Model(&ActionLog{})
}
|
yslai/AliPhysics
|
PWGGA/Hyperon/AliAnalysisTaskSigma0Run2.cxx
|
#include "AliAnalysisTaskSigma0Run2.h"
#include "AliAnalysisManager.h"
#include "AliInputEventHandler.h"
#include "AliMCEvent.h"
#include "AliMultSelection.h"
#include "AliPIDResponse.h"
ClassImp(AliAnalysisTaskSigma0Run2)
//____________________________________________________________________________________________________
AliAnalysisTaskSigma0Run2::AliAnalysisTaskSigma0Run2()
: AliAnalysisTaskSE("AliAnalysisTaskSigma0Run2"),
fAliEventCuts(),
fInputEvent(nullptr),
fMCEvent(nullptr),
fV0Reader(nullptr),
fV0ReaderName("NoInit"),
fV0Cuts(nullptr),
fAntiV0Cuts(nullptr),
fPhotonV0Cuts(nullptr),
fSigmaCuts(nullptr),
fAntiSigmaCuts(nullptr),
fSigmaPhotonCuts(nullptr),
fAntiSigmaPhotonCuts(nullptr),
fIsMC(false),
fIsHeavyIon(false),
fIsLightweight(false),
fV0PercentileMax(100.f),
fTrigger(AliVEvent::kINT7),
fGammaArray(nullptr),
fOutputContainer(nullptr),
fQA(nullptr),
fHistCutQA(nullptr),
fHistRunNumber(nullptr),
fHistCutBooking(nullptr),
fHistCentralityProfileBefore(nullptr),
fHistCentralityProfileAfter(nullptr),
fHistCentralityProfileCoarseAfter(nullptr),
fHistTriggerBefore(nullptr),
fHistTriggerAfter(nullptr) {}
//____________________________________________________________________________________________________
AliAnalysisTaskSigma0Run2::AliAnalysisTaskSigma0Run2(const char *name)
: AliAnalysisTaskSE(name),
fAliEventCuts(),
fInputEvent(nullptr),
fMCEvent(nullptr),
fV0Reader(nullptr),
fV0ReaderName("NoInit"),
fV0Cuts(nullptr),
fAntiV0Cuts(nullptr),
fPhotonV0Cuts(nullptr),
fSigmaCuts(nullptr),
fAntiSigmaCuts(nullptr),
fSigmaPhotonCuts(nullptr),
fAntiSigmaPhotonCuts(nullptr),
fIsMC(false),
fIsHeavyIon(false),
fIsLightweight(false),
fV0PercentileMax(100.f),
fTrigger(AliVEvent::kINT7),
fGammaArray(nullptr),
fOutputContainer(nullptr),
fQA(nullptr),
fHistCutQA(nullptr),
fHistRunNumber(nullptr),
fHistCutBooking(nullptr),
fHistCentralityProfileBefore(nullptr),
fHistCentralityProfileAfter(nullptr),
fHistCentralityProfileCoarseAfter(nullptr),
fHistTriggerBefore(nullptr),
fHistTriggerAfter(nullptr){
DefineInput(0, TChain::Class());
DefineOutput(1, TList::Class());
}
//____________________________________________________________________________________________________
void AliAnalysisTaskSigma0Run2::UserExec(Option_t * /*option*/) {
AliVEvent *fInputEvent = InputEvent();
if (fIsMC) fMCEvent = MCEvent();
// PREAMBLE - CHECK EVERYTHING IS THERE
if (!fInputEvent) {
AliError("No Input event");
return;
}
if (fIsMC && !fMCEvent) {
AliError("No MC event");
return;
}
if (!fV0Cuts || !fAntiV0Cuts || !fPhotonV0Cuts) {
AliError("V0 Cuts missing");
return;
}
fV0Reader =
(AliV0ReaderV1 *)AliAnalysisManager::GetAnalysisManager()->GetTask(
fV0ReaderName.Data());
if (!fV0Reader) {
AliError("No V0 reader");
return;
}
if (!fSigmaCuts || !fAntiSigmaCuts || !fSigmaPhotonCuts ||
!fAntiSigmaPhotonCuts) {
AliError("Sigma0 Cuts missing");
return;
}
// EVENT SELECTION
if (!AcceptEvent(fInputEvent)) return;
// LAMBDA SELECTION
fV0Cuts->SelectV0(fInputEvent, fMCEvent);
// LAMBDA SELECTION
fAntiV0Cuts->SelectV0(fInputEvent, fMCEvent);
// PHOTON V0 SELECTION
fPhotonV0Cuts->SelectV0(fInputEvent, fMCEvent);
// PHOTON SELECTION
fGammaArray = fV0Reader->GetReconstructedGammas(); // Gammas from default Cut
std::vector<AliSigma0ParticleV0> gammaConvContainer;
CastToVector(gammaConvContainer, fInputEvent);
// Sigma0 selection
fSigmaCuts->SelectPhotonMother(fInputEvent, fMCEvent, gammaConvContainer,
fV0Cuts->GetV0s());
// Sigma0 selection
fAntiSigmaCuts->SelectPhotonMother(fInputEvent, fMCEvent, gammaConvContainer,
fAntiV0Cuts->GetV0s());
// Sigma0 selection
fSigmaPhotonCuts->SelectPhotonMother(
fInputEvent, fMCEvent, fPhotonV0Cuts->GetV0s(), fV0Cuts->GetV0s());
// Sigma0 selection
fAntiSigmaPhotonCuts->SelectPhotonMother(
fInputEvent, fMCEvent, fPhotonV0Cuts->GetV0s(), fAntiV0Cuts->GetV0s());
// flush the data
PostData(1, fOutputContainer);
}
//____________________________________________________________________________________________________
bool AliAnalysisTaskSigma0Run2::AcceptEvent(AliVEvent *event) {
if (!fIsLightweight) {
fHistRunNumber->Fill(0.f, event->GetRunNumber());
FillTriggerHisto(fHistTriggerBefore);
}
fHistCutQA->Fill(0);
// EVENT SELECTION
if (!fAliEventCuts.AcceptEvent(event)) return false;
if (!fIsLightweight) {
FillTriggerHisto(fHistTriggerAfter);
}
fHistCutQA->Fill(1);
Float_t lPercentile = 300;
AliMultSelection *MultSelection = 0x0;
MultSelection = (AliMultSelection *)event->FindListObject("MultSelection");
if (!MultSelection) {
// If you get this warning (and lPercentiles 300) please check that the
// AliMultSelectionTask actually ran (before your task)
AliWarning("AliMultSelection object not found!");
} else {
lPercentile = MultSelection->GetMultiplicityPercentile("V0M");
}
if (!fIsLightweight) fHistCentralityProfileBefore->Fill(lPercentile);
// MULTIPLICITY SELECTION
if (fTrigger == AliVEvent::kHighMultV0 && fV0PercentileMax < 100.f) {
if (lPercentile > fV0PercentileMax) return false;
if (!fIsLightweight) {
fHistCentralityProfileAfter->Fill(lPercentile);
}
fHistCutQA->Fill(2);
}
bool isConversionEventSelected =
((AliConvEventCuts *)fV0Reader->GetEventCuts())
->EventIsSelected(event, static_cast<AliMCEvent *>(fMCEvent));
if (!isConversionEventSelected) return false;
if (!fIsLightweight) fHistCentralityProfileCoarseAfter->Fill(lPercentile);
fHistCutQA->Fill(3);
return true;
}
//____________________________________________________________________________________________________
void AliAnalysisTaskSigma0Run2::CastToVector(
std::vector<AliSigma0ParticleV0> &container, const AliVEvent *inputEvent) {
for (int iGamma = 0; iGamma < fGammaArray->GetEntriesFast(); ++iGamma) {
auto *PhotonCandidate =
dynamic_cast<AliAODConversionPhoton *>(fGammaArray->At(iGamma));
if (!PhotonCandidate) continue;
AliSigma0ParticleV0 phot(PhotonCandidate, inputEvent);
if(fIsMC) {
const int label = phot.MatchToMC(fMCEvent, 22, {{11, -11}});
}
container.push_back(phot);
}
}
//____________________________________________________________________________________________________
void AliAnalysisTaskSigma0Run2::UserCreateOutputObjects() {
if (fOutputContainer != nullptr) {
delete fOutputContainer;
fOutputContainer = nullptr;
}
if (fOutputContainer == nullptr) {
fOutputContainer = new TList();
fOutputContainer->SetOwner(kTRUE);
}
fQA = new TList();
fQA->SetName("EventCuts");
fQA->SetOwner(true);
if (fTrigger != AliVEvent::kINT7) {
fAliEventCuts.SetManualMode();
if (!fIsHeavyIon) fAliEventCuts.SetupRun2pp();
fAliEventCuts.fTriggerMask = fTrigger;
}
fV0Reader =
(AliV0ReaderV1 *)AliAnalysisManager::GetAnalysisManager()->GetTask(
fV0ReaderName.Data());
if (!fV0Reader) {
AliError("No V0 reader");
return;
}
if (fV0Reader->GetEventCuts() &&
fV0Reader->GetEventCuts()->GetCutHistograms()) {
fOutputContainer->Add(fV0Reader->GetEventCuts()->GetCutHistograms());
}
if (fV0Reader->GetConversionCuts() &&
fV0Reader->GetConversionCuts()->GetCutHistograms()) {
fOutputContainer->Add(fV0Reader->GetConversionCuts()->GetCutHistograms());
}
if (fV0Reader->GetProduceV0FindingEfficiency() &&
fV0Reader->GetV0FindingEfficiencyHistograms()) {
fOutputContainer->Add(fV0Reader->GetV0FindingEfficiencyHistograms());
}
if (fV0Reader->GetProduceImpactParamHistograms()) {
fOutputContainer->Add(fV0Reader->GetImpactParamHistograms());
}
fHistCutQA = new TH1F("fHistCutQA", ";;Entries", 5, 0, 5);
fHistCutQA->GetXaxis()->SetBinLabel(1, "Event");
fHistCutQA->GetXaxis()->SetBinLabel(2, "AliEventCuts");
fHistCutQA->GetXaxis()->SetBinLabel(3, "Multiplicity selection");
fHistCutQA->GetXaxis()->SetBinLabel(4, "AliConversionCuts");
fQA->Add(fHistCutQA);
if (!fIsLightweight) {
fHistRunNumber = new TProfile("fHistRunNumber", ";;Run Number", 1, 0, 1);
fQA->Add(fHistRunNumber);
fHistCutBooking = new TProfile("fHistCutBooking", ";;Cut value", 1, 0, 1);
fHistCutBooking->GetXaxis()->SetBinLabel(1, "V0 percentile");
fQA->Add(fHistCutBooking);
fHistCutBooking->Fill(0.f, fV0PercentileMax);
fAliEventCuts.AddQAplotsToList(fQA);
fHistCentralityProfileBefore =
new TH1F("fHistCentralityProfileBefore", "; V0 percentile (%); Entries",
1000, 0, 5);
fHistCentralityProfileAfter =
new TH1F("fHistCentralityProfileAfter", "; V0 percentile (%); Entries",
1000, 0, 5);
fHistCentralityProfileCoarseAfter =
new TH1F("fHistCentralityProfileCoarseAfter",
"; V0 percentile (%); Entries", 100, 0, 100);
fQA->Add(fHistCentralityProfileBefore);
fQA->Add(fHistCentralityProfileAfter);
fQA->Add(fHistCentralityProfileCoarseAfter);
fHistTriggerBefore = new TH1F("fHistTriggerBefore", ";;Entries", 50, 0, 50);
fHistTriggerBefore->GetXaxis()->LabelsOption("u");
fHistTriggerBefore->GetXaxis()->SetBinLabel(1, "kMB");
fHistTriggerBefore->GetXaxis()->SetBinLabel(2, "kINT1");
fHistTriggerBefore->GetXaxis()->SetBinLabel(3, "kINT7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(4, "kMUON");
fHistTriggerBefore->GetXaxis()->SetBinLabel(5, "kHighMult");
fHistTriggerBefore->GetXaxis()->SetBinLabel(6, "kHighMultSPD");
fHistTriggerBefore->GetXaxis()->SetBinLabel(7, "kEMC1");
fHistTriggerBefore->GetXaxis()->SetBinLabel(8, "kCINT5");
fHistTriggerBefore->GetXaxis()->SetBinLabel(9, "kINT5");
fHistTriggerBefore->GetXaxis()->SetBinLabel(10, "kCMUS5");
fHistTriggerBefore->GetXaxis()->SetBinLabel(11, "kMUSPB");
fHistTriggerBefore->GetXaxis()->SetBinLabel(12, "kINT7inMUON");
fHistTriggerBefore->GetXaxis()->SetBinLabel(13, "kMuonSingleHighPt7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(14, "kMUSH7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(15, "kMUSHPB");
fHistTriggerBefore->GetXaxis()->SetBinLabel(16, "kMuonLikeLowPt7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(17, "kMUL7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(18, "kMuonLikePB");
fHistTriggerBefore->GetXaxis()->SetBinLabel(19, "kMuonUnlikeLowPt7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(20, "kMUU7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(21, "kMuonUnlikePB");
fHistTriggerBefore->GetXaxis()->SetBinLabel(22, "kEMC7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(23, "kEMC8");
fHistTriggerBefore->GetXaxis()->SetBinLabel(24, "kMUS7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(25, "kMuonSingleLowPt7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(26, "kPHI1");
fHistTriggerBefore->GetXaxis()->SetBinLabel(27, "kPHI7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(28, "kPHI8");
fHistTriggerBefore->GetXaxis()->SetBinLabel(29, "kPHOSPb");
fHistTriggerBefore->GetXaxis()->SetBinLabel(30, "kEMCEJE");
fHistTriggerBefore->GetXaxis()->SetBinLabel(31, "kEMCEGA");
fHistTriggerBefore->GetXaxis()->SetBinLabel(32, "kHighMultV0");
fHistTriggerBefore->GetXaxis()->SetBinLabel(33, "kCentral");
fHistTriggerBefore->GetXaxis()->SetBinLabel(34, "kSemiCentral");
fHistTriggerBefore->GetXaxis()->SetBinLabel(35, "kDG");
fHistTriggerBefore->GetXaxis()->SetBinLabel(36, "kDG5");
fHistTriggerBefore->GetXaxis()->SetBinLabel(37, "kZED");
fHistTriggerBefore->GetXaxis()->SetBinLabel(38, "kSPI7");
fHistTriggerBefore->GetXaxis()->SetBinLabel(39, "kSPI");
fHistTriggerBefore->GetXaxis()->SetBinLabel(40, "kINT8");
fHistTriggerBefore->GetXaxis()->SetBinLabel(41, "kMuonSingleLowPt8");
fHistTriggerBefore->GetXaxis()->SetBinLabel(42, "kMuonSingleHighPt8");
fHistTriggerBefore->GetXaxis()->SetBinLabel(43, "kMuonLikeLowPt8");
fHistTriggerBefore->GetXaxis()->SetBinLabel(44, "kMuonUnlikeLowPt8");
fHistTriggerBefore->GetXaxis()->SetBinLabel(45, "kMuonUnlikeLowPt0");
fHistTriggerBefore->GetXaxis()->SetBinLabel(46, "kUserDefined");
fHistTriggerBefore->GetXaxis()->SetBinLabel(47, "kTRD");
fHistTriggerBefore->GetXaxis()->SetBinLabel(48, "kFastOnly");
fHistTriggerBefore->GetXaxis()->SetBinLabel(49, "kAny");
fHistTriggerBefore->GetXaxis()->SetBinLabel(50, "kAnyINT");
fQA->Add(fHistTriggerBefore);
fHistTriggerAfter = new TH1F("fHistTriggerAfter", ";;Entries", 50, 0, 50);
fHistTriggerAfter->GetXaxis()->LabelsOption("u");
fHistTriggerAfter->GetXaxis()->SetBinLabel(1, "kMB");
fHistTriggerAfter->GetXaxis()->SetBinLabel(2, "kINT1");
fHistTriggerAfter->GetXaxis()->SetBinLabel(3, "kINT7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(4, "kMUON");
fHistTriggerAfter->GetXaxis()->SetBinLabel(5, "kHighMult");
fHistTriggerAfter->GetXaxis()->SetBinLabel(6, "kHighMultSPD");
fHistTriggerAfter->GetXaxis()->SetBinLabel(7, "kEMC1");
fHistTriggerAfter->GetXaxis()->SetBinLabel(8, "kCINT5");
fHistTriggerAfter->GetXaxis()->SetBinLabel(9, "kINT5");
fHistTriggerAfter->GetXaxis()->SetBinLabel(10, "kCMUS5");
fHistTriggerAfter->GetXaxis()->SetBinLabel(11, "kMUSPB");
fHistTriggerAfter->GetXaxis()->SetBinLabel(12, "kINT7inMUON");
fHistTriggerAfter->GetXaxis()->SetBinLabel(13, "kMuonSingleHighPt7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(14, "kMUSH7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(15, "kMUSHPB");
fHistTriggerAfter->GetXaxis()->SetBinLabel(16, "kMuonLikeLowPt7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(17, "kMUL7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(18, "kMuonLikePB");
fHistTriggerAfter->GetXaxis()->SetBinLabel(19, "kMuonUnlikeLowPt7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(20, "kMUU7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(21, "kMuonUnlikePB");
fHistTriggerAfter->GetXaxis()->SetBinLabel(22, "kEMC7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(23, "kEMC8");
fHistTriggerAfter->GetXaxis()->SetBinLabel(24, "kMUS7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(25, "kMuonSingleLowPt7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(26, "kPHI1");
fHistTriggerAfter->GetXaxis()->SetBinLabel(27, "kPHI7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(28, "kPHI8");
fHistTriggerAfter->GetXaxis()->SetBinLabel(29, "kPHOSPb");
fHistTriggerAfter->GetXaxis()->SetBinLabel(30, "kEMCEJE");
fHistTriggerAfter->GetXaxis()->SetBinLabel(31, "kEMCEGA");
fHistTriggerAfter->GetXaxis()->SetBinLabel(32, "kHighMultV0");
fHistTriggerAfter->GetXaxis()->SetBinLabel(33, "kCentral");
fHistTriggerAfter->GetXaxis()->SetBinLabel(34, "kSemiCentral");
fHistTriggerAfter->GetXaxis()->SetBinLabel(35, "kDG");
fHistTriggerAfter->GetXaxis()->SetBinLabel(36, "kDG5");
fHistTriggerAfter->GetXaxis()->SetBinLabel(37, "kZED");
fHistTriggerAfter->GetXaxis()->SetBinLabel(38, "kSPI7");
fHistTriggerAfter->GetXaxis()->SetBinLabel(39, "kSPI");
fHistTriggerAfter->GetXaxis()->SetBinLabel(40, "kINT8");
fHistTriggerAfter->GetXaxis()->SetBinLabel(41, "kMuonSingleLowPt8");
fHistTriggerAfter->GetXaxis()->SetBinLabel(42, "kMuonSingleHighPt8");
fHistTriggerAfter->GetXaxis()->SetBinLabel(43, "kMuonLikeLowPt8");
fHistTriggerAfter->GetXaxis()->SetBinLabel(44, "kMuonUnlikeLowPt8");
fHistTriggerAfter->GetXaxis()->SetBinLabel(45, "kMuonUnlikeLowPt0");
fHistTriggerAfter->GetXaxis()->SetBinLabel(46, "kUserDefined");
fHistTriggerAfter->GetXaxis()->SetBinLabel(47, "kTRD");
fHistTriggerAfter->GetXaxis()->SetBinLabel(48, "kFastOnly");
fHistTriggerAfter->GetXaxis()->SetBinLabel(49, "kAny");
fHistTriggerAfter->GetXaxis()->SetBinLabel(50, "kAnyINT");
fQA->Add(fHistTriggerAfter);
}
fOutputContainer->Add(fQA);
if (fV0Cuts) fV0Cuts->InitCutHistograms(TString("Lambda"));
if (fAntiV0Cuts) fAntiV0Cuts->InitCutHistograms(TString("AntiLambda"));
if (fPhotonV0Cuts) fPhotonV0Cuts->InitCutHistograms(TString("Photon"));
if (fSigmaCuts) fSigmaCuts->InitCutHistograms(TString("Sigma0"));
if (fAntiSigmaCuts) fAntiSigmaCuts->InitCutHistograms(TString("AntiSigma0"));
if (fSigmaPhotonCuts)
fSigmaPhotonCuts->InitCutHistograms(TString("Sigma0Photon"));
if (fAntiSigmaPhotonCuts)
fAntiSigmaPhotonCuts->InitCutHistograms(TString("AntiSigma0Photon"));
if (fV0Cuts && fV0Cuts->GetCutHistograms()) {
fOutputContainer->Add(fV0Cuts->GetCutHistograms());
}
if (fAntiV0Cuts && fAntiV0Cuts->GetCutHistograms()) {
fOutputContainer->Add(fAntiV0Cuts->GetCutHistograms());
}
if (fPhotonV0Cuts && fPhotonV0Cuts->GetCutHistograms()) {
fOutputContainer->Add(fPhotonV0Cuts->GetCutHistograms());
}
if (fSigmaCuts && fSigmaCuts->GetCutHistograms()) {
fOutputContainer->Add(fSigmaCuts->GetCutHistograms());
}
if (fAntiSigmaCuts && fAntiSigmaCuts->GetCutHistograms()) {
fOutputContainer->Add(fAntiSigmaCuts->GetCutHistograms());
}
if (fSigmaPhotonCuts && fSigmaPhotonCuts->GetCutHistograms()) {
fOutputContainer->Add(fSigmaPhotonCuts->GetCutHistograms());
}
if (fAntiSigmaPhotonCuts && fAntiSigmaPhotonCuts->GetCutHistograms()) {
fOutputContainer->Add(fAntiSigmaPhotonCuts->GetCutHistograms());
}
PostData(1, fOutputContainer);
}
//____________________________________________________________________________________________________
void AliAnalysisTaskSigma0Run2::FillTriggerHisto(TH1F *histo) {
if (fInputHandler->IsEventSelected() & AliVEvent::kMB) histo->Fill(0);
if (fInputHandler->IsEventSelected() & AliVEvent::kINT1) histo->Fill(1);
if (fInputHandler->IsEventSelected() & AliVEvent::kINT7) histo->Fill(2);
if (fInputHandler->IsEventSelected() & AliVEvent::kMUON) histo->Fill(3);
if (fInputHandler->IsEventSelected() & AliVEvent::kHighMult) histo->Fill(4);
if (fInputHandler->IsEventSelected() & AliVEvent::kHighMultSPD)
histo->Fill(5);
if (fInputHandler->IsEventSelected() & AliVEvent::kEMC1) histo->Fill(6);
if (fInputHandler->IsEventSelected() & AliVEvent::kCINT5) histo->Fill(7);
if (fInputHandler->IsEventSelected() & AliVEvent::kINT5) histo->Fill(8);
if (fInputHandler->IsEventSelected() & AliVEvent::kCMUS5) histo->Fill(9);
if (fInputHandler->IsEventSelected() & AliVEvent::kMUSPB) histo->Fill(10);
if (fInputHandler->IsEventSelected() & AliVEvent::kINT7inMUON)
histo->Fill(11);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonSingleHighPt7)
histo->Fill(12);
if (fInputHandler->IsEventSelected() & AliVEvent::kMUSH7) histo->Fill(13);
if (fInputHandler->IsEventSelected() & AliVEvent::kMUSHPB) histo->Fill(14);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonLikeLowPt7)
histo->Fill(15);
if (fInputHandler->IsEventSelected() & AliVEvent::kMUL7) histo->Fill(16);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonLikePB)
histo->Fill(17);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonUnlikeLowPt7)
histo->Fill(18);
if (fInputHandler->IsEventSelected() & AliVEvent::kMUU7) histo->Fill(19);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonUnlikePB)
histo->Fill(20);
if (fInputHandler->IsEventSelected() & AliVEvent::kEMC7) histo->Fill(21);
if (fInputHandler->IsEventSelected() & AliVEvent::kEMC8) histo->Fill(22);
if (fInputHandler->IsEventSelected() & AliVEvent::kMUS7) histo->Fill(23);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonSingleLowPt7)
histo->Fill(24);
if (fInputHandler->IsEventSelected() & AliVEvent::kPHI1) histo->Fill(25);
if (fInputHandler->IsEventSelected() & AliVEvent::kPHI7) histo->Fill(26);
if (fInputHandler->IsEventSelected() & AliVEvent::kPHI8) histo->Fill(27);
if (fInputHandler->IsEventSelected() & AliVEvent::kPHOSPb) histo->Fill(28);
if (fInputHandler->IsEventSelected() & AliVEvent::kEMCEJE) histo->Fill(29);
if (fInputHandler->IsEventSelected() & AliVEvent::kEMCEGA) histo->Fill(30);
if (fInputHandler->IsEventSelected() & AliVEvent::kHighMultV0)
histo->Fill(31);
if (fInputHandler->IsEventSelected() & AliVEvent::kCentral) histo->Fill(32);
if (fInputHandler->IsEventSelected() & AliVEvent::kSemiCentral)
histo->Fill(33);
if (fInputHandler->IsEventSelected() & AliVEvent::kDG) histo->Fill(34);
if (fInputHandler->IsEventSelected() & AliVEvent::kDG5) histo->Fill(35);
if (fInputHandler->IsEventSelected() & AliVEvent::kZED) histo->Fill(36);
if (fInputHandler->IsEventSelected() & AliVEvent::kSPI7) histo->Fill(37);
if (fInputHandler->IsEventSelected() & AliVEvent::kSPI) histo->Fill(38);
if (fInputHandler->IsEventSelected() & AliVEvent::kINT8) histo->Fill(39);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonSingleLowPt8)
histo->Fill(40);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonSingleHighPt8)
histo->Fill(41);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonLikeLowPt8)
histo->Fill(42);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonUnlikeLowPt8)
histo->Fill(43);
if (fInputHandler->IsEventSelected() & AliVEvent::kMuonUnlikeLowPt0)
histo->Fill(44);
if (fInputHandler->IsEventSelected() & AliVEvent::kUserDefined)
histo->Fill(45);
if (fInputHandler->IsEventSelected() & AliVEvent::kTRD) histo->Fill(46);
if (fInputHandler->IsEventSelected() & AliVEvent::kFastOnly) histo->Fill(47);
if (fInputHandler->IsEventSelected() & AliVEvent::kAny) histo->Fill(48);
if (fInputHandler->IsEventSelected() & AliVEvent::kAnyINT) histo->Fill(49);
}
|
jainsakshi2395/linux
|
drivers/pci/controller/mobiveil/pcie-mobiveil.c
|
<filename>drivers/pci/controller/mobiveil/pcie-mobiveil.c<gh_stars>10-100
// SPDX-License-Identifier: GPL-2.0
/*
* PCIe host controller driver for Mobiveil PCIe Host controller
*
* Copyright (c) 2018 Mobiveil Inc.
* Copyright 2019 NXP
*
* Author: <NAME> <<EMAIL>>
* <NAME> <<EMAIL>>
*/
#include <linux/delay.h>
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/pci.h>
#include <linux/platform_device.h>
#include "pcie-mobiveil.h"
/*
* mobiveil_pcie_sel_page - routine to access paged register
*
* Registers whose address greater than PAGED_ADDR_BNDRY (0xc00) are paged,
* for this scheme to work extracted higher 6 bits of the offset will be
* written to pg_sel field of PAB_CTRL register and rest of the lower 10
* bits enabled with PAGED_ADDR_BNDRY are used as offset of the register.
*/
static void mobiveil_pcie_sel_page(struct mobiveil_pcie *pcie, u8 pg_idx)
{
u32 val;
val = readl(pcie->csr_axi_slave_base + PAB_CTRL);
val &= ~(PAGE_SEL_MASK << PAGE_SEL_SHIFT);
val |= (pg_idx & PAGE_SEL_MASK) << PAGE_SEL_SHIFT;
writel(val, pcie->csr_axi_slave_base + PAB_CTRL);
}
static void __iomem *mobiveil_pcie_comp_addr(struct mobiveil_pcie *pcie,
u32 off)
{
if (off < PAGED_ADDR_BNDRY) {
/* For directly accessed registers, clear the pg_sel field */
mobiveil_pcie_sel_page(pcie, 0);
return pcie->csr_axi_slave_base + off;
}
mobiveil_pcie_sel_page(pcie, OFFSET_TO_PAGE_IDX(off));
return pcie->csr_axi_slave_base + OFFSET_TO_PAGE_ADDR(off);
}
static int mobiveil_pcie_read(void __iomem *addr, int size, u32 *val)
{
if ((uintptr_t)addr & (size - 1)) {
*val = 0;
return PCIBIOS_BAD_REGISTER_NUMBER;
}
switch (size) {
case 4:
*val = readl(addr);
break;
case 2:
*val = readw(addr);
break;
case 1:
*val = readb(addr);
break;
default:
*val = 0;
return PCIBIOS_BAD_REGISTER_NUMBER;
}
return PCIBIOS_SUCCESSFUL;
}
static int mobiveil_pcie_write(void __iomem *addr, int size, u32 val)
{
if ((uintptr_t)addr & (size - 1))
return PCIBIOS_BAD_REGISTER_NUMBER;
switch (size) {
case 4:
writel(val, addr);
break;
case 2:
writew(val, addr);
break;
case 1:
writeb(val, addr);
break;
default:
return PCIBIOS_BAD_REGISTER_NUMBER;
}
return PCIBIOS_SUCCESSFUL;
}
u32 mobiveil_csr_read(struct mobiveil_pcie *pcie, u32 off, size_t size)
{
void __iomem *addr;
u32 val;
int ret;
addr = mobiveil_pcie_comp_addr(pcie, off);
ret = mobiveil_pcie_read(addr, size, &val);
if (ret)
dev_err(&pcie->pdev->dev, "read CSR address failed\n");
return val;
}
void mobiveil_csr_write(struct mobiveil_pcie *pcie, u32 val, u32 off,
size_t size)
{
void __iomem *addr;
int ret;
addr = mobiveil_pcie_comp_addr(pcie, off);
ret = mobiveil_pcie_write(addr, size, val);
if (ret)
dev_err(&pcie->pdev->dev, "write CSR address failed\n");
}
bool mobiveil_pcie_link_up(struct mobiveil_pcie *pcie)
{
if (pcie->ops->link_up)
return pcie->ops->link_up(pcie);
return (mobiveil_csr_readl(pcie, LTSSM_STATUS) &
LTSSM_STATUS_L0_MASK) == LTSSM_STATUS_L0;
}
void program_ib_windows(struct mobiveil_pcie *pcie, int win_num,
u64 cpu_addr, u64 pci_addr, u32 type, u64 size)
{
u32 value;
u64 size64 = ~(size - 1);
if (win_num >= pcie->ppio_wins) {
dev_err(&pcie->pdev->dev,
"ERROR: max inbound windows reached !\n");
return;
}
value = mobiveil_csr_readl(pcie, PAB_PEX_AMAP_CTRL(win_num));
value &= ~(AMAP_CTRL_TYPE_MASK << AMAP_CTRL_TYPE_SHIFT | WIN_SIZE_MASK);
value |= type << AMAP_CTRL_TYPE_SHIFT | 1 << AMAP_CTRL_EN_SHIFT |
(lower_32_bits(size64) & WIN_SIZE_MASK);
mobiveil_csr_writel(pcie, value, PAB_PEX_AMAP_CTRL(win_num));
mobiveil_csr_writel(pcie, upper_32_bits(size64),
PAB_EXT_PEX_AMAP_SIZEN(win_num));
mobiveil_csr_writel(pcie, lower_32_bits(cpu_addr),
PAB_PEX_AMAP_AXI_WIN(win_num));
mobiveil_csr_writel(pcie, upper_32_bits(cpu_addr),
PAB_EXT_PEX_AMAP_AXI_WIN(win_num));
mobiveil_csr_writel(pcie, lower_32_bits(pci_addr),
PAB_PEX_AMAP_PEX_WIN_L(win_num));
mobiveil_csr_writel(pcie, upper_32_bits(pci_addr),
PAB_PEX_AMAP_PEX_WIN_H(win_num));
pcie->ib_wins_configured++;
}
/*
* routine to program the outbound windows
*/
void program_ob_windows(struct mobiveil_pcie *pcie, int win_num,
u64 cpu_addr, u64 pci_addr, u32 type, u64 size)
{
u32 value;
u64 size64 = ~(size - 1);
if (win_num >= pcie->apio_wins) {
dev_err(&pcie->pdev->dev,
"ERROR: max outbound windows reached !\n");
return;
}
/*
* program Enable Bit to 1, Type Bit to (00) base 2, AXI Window Size Bit
* to 4 KB in PAB_AXI_AMAP_CTRL register
*/
value = mobiveil_csr_readl(pcie, PAB_AXI_AMAP_CTRL(win_num));
value &= ~(WIN_TYPE_MASK << WIN_TYPE_SHIFT | WIN_SIZE_MASK);
value |= 1 << WIN_ENABLE_SHIFT | type << WIN_TYPE_SHIFT |
(lower_32_bits(size64) & WIN_SIZE_MASK);
mobiveil_csr_writel(pcie, value, PAB_AXI_AMAP_CTRL(win_num));
mobiveil_csr_writel(pcie, upper_32_bits(size64),
PAB_EXT_AXI_AMAP_SIZE(win_num));
/*
* program AXI window base with appropriate value in
* PAB_AXI_AMAP_AXI_WIN0 register
*/
mobiveil_csr_writel(pcie,
lower_32_bits(cpu_addr) & (~AXI_WINDOW_ALIGN_MASK),
PAB_AXI_AMAP_AXI_WIN(win_num));
mobiveil_csr_writel(pcie, upper_32_bits(cpu_addr),
PAB_EXT_AXI_AMAP_AXI_WIN(win_num));
mobiveil_csr_writel(pcie, lower_32_bits(pci_addr),
PAB_AXI_AMAP_PEX_WIN_L(win_num));
mobiveil_csr_writel(pcie, upper_32_bits(pci_addr),
PAB_AXI_AMAP_PEX_WIN_H(win_num));
pcie->ob_wins_configured++;
}
int mobiveil_bringup_link(struct mobiveil_pcie *pcie)
{
int retries;
/* check if the link is up or not */
for (retries = 0; retries < LINK_WAIT_MAX_RETRIES; retries++) {
if (mobiveil_pcie_link_up(pcie))
return 0;
usleep_range(LINK_WAIT_MIN, LINK_WAIT_MAX);
}
dev_err(&pcie->pdev->dev, "link never came up\n");
return -ETIMEDOUT;
}
|
extcpp/base
|
examples/scope_guard.cpp
|
<reponame>extcpp/base
// Copyright - 2020 - <NAME> <<EMAIL>>
// Please see LICENSE.md for license or visit https://github.com/extcpp/basics
#include <ext/util/scope_guard.hpp>
#include <iostream>
#include <stdexcept>
void fun() {
std::cout << "fun" << std::endl;
}
struct functor {
void operator()() {
std::cout << "functor " << std::endl;
};
};
int main() {
::ext::util::scope_guard{fun};
::ext::util::scope_guard{&fun};
auto lambda_fun = []() {
std::cout << "lambda_fun" << std::endl;
};
::ext::util::scope_guard{lambda_fun};
::ext::util::scope_guard{std::move(lambda_fun)};
functor func;
::ext::util::scope_guard{func};
::ext::util::scope_guard{std::move(func)};
std::cout << std::endl;
try {
std::cout << "enter scope 1" << std::endl;
EXT_SCOPE_EXIT {
std::cout << "exit callback" << std::endl;
};
EXT_SCOPE_SUCCESS {
std::cout << "success callback" << std::endl;
};
EXT_SCOPE_FAIL {
std::cout << "fail callback" << std::endl;
};
throw std::logic_error("error for example");
std::cout << "exit scope 1" << std::endl;
} catch (const std::exception& e) {
std::cout << "caught - " << e.what() << std::endl;
}
std::cout << std::endl;
try {
std::cout << "enter scope 2" << std::endl;
EXT_SCOPE_EXIT {
std::cout << "exit callback" << std::endl;
};
EXT_SCOPE_SUCCESS {
std::cout << "success callback" << std::endl;
throw std::logic_error("throw during success callback");
};
EXT_SCOPE_FAIL {
std::cout << "fail callback" << std::endl;
};
std::cout << "exit scope 2" << std::endl;
} catch (const std::exception& e) {
std::cout << "caught - " << e.what() << std::endl;
}
std::cout << std::endl;
try {
std::cout << "enter scope 3" << std::endl;
// will result in termination
EXT_SCOPE_EXIT {
std::cout << "exit callback" << std::endl;
throw std::logic_error("throw during exit callback");
};
EXT_SCOPE_SUCCESS {
std::cout << "success callback" << std::endl;
};
EXT_SCOPE_FAIL {
std::cout << "fail callback" << std::endl;
};
std::cout << "exit scope 3" << std::endl;
} catch (const std::exception& e) {
std::cout << "caught - " << e.what() << std::endl;
}
std::cout << std::endl;
return 0;
}
|
chanakawickramasinghe/fitness-life
|
dashboard/src/utils/time.js
|
<reponame>chanakawickramasinghe/fitness-life
function timeCal(time){
let a = time.split(':');
let mins = (+a[0])*60 + (+a[1]) + 30;
let hours = (mins / 60);
let rhours = Math.floor(hours);
let minutes = (hours - rhours) * 60;
let rminutes = Math.round(minutes);
let nextAvailTime = rhours + ":" + rminutes;
return nextAvailTime;
}
export default timeCal;
|
dvdciri/DeepImagePreview-Project
|
app/src/main/java/com/davidecirillo/menupreview/base/FragmentTabAdapter.java
|
package com.davidecirillo.menupreview.base;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import java.util.ArrayList;
import java.util.List;
public class FragmentTabAdapter extends FragmentPagerAdapter {
private List<Fragment> mFragments;
public FragmentTabAdapter(FragmentManager fm) {
super(fm);
mFragments = new ArrayList<>();
}
public void addFragment(Fragment fragment) {
mFragments.add(fragment);
}
/**
* Return the Fragment associated with a specified position.
* @param position
*/
@Override
public Fragment getItem(int position) {
return mFragments.get(position);
}
/**
* Return the number of views available.
*/
@Override
public int getCount() {
return mFragments.size();
}
}
|
e0543860/tp
|
src/main/java/dash/logic/commands/personcommand/DeletePersonCommand.java
|
package dash.logic.commands.personcommand;
import static java.util.Objects.requireNonNull;
import java.util.List;
import dash.commons.core.Messages;
import dash.commons.core.index.Index;
import dash.logic.commands.Command;
import dash.logic.commands.CommandResult;
import dash.logic.commands.exceptions.CommandException;
import dash.model.Model;
import dash.model.person.Person;
/**
* Deletes a person identified using it's displayed index from the address book.
*/
public class DeletePersonCommand extends Command {
public static final String COMMAND_WORD = "delete";
public static final String MESSAGE_USAGE = "Format: " + COMMAND_WORD
+ " INDEX\n"
+ "Example: " + COMMAND_WORD + " 2";
public static final String MESSAGE_DELETE_PERSON_SUCCESS = "Deleted Person: %1$s";
private final Index targetIndex;
public DeletePersonCommand(Index targetIndex) {
this.targetIndex = targetIndex;
}
@Override
public CommandResult execute(Model model) throws CommandException {
requireNonNull(model);
List<Person> lastShownList = model.getFilteredPersonList();
if (targetIndex.getZeroBased() >= lastShownList.size()) {
throw new CommandException(Messages.MESSAGE_INVALID_PERSON_DISPLAYED_INDEX);
}
Person personToDelete = lastShownList.get(targetIndex.getZeroBased());
model.deletePeopleFromTasks(personToDelete);
model.deletePerson(personToDelete);
return new CommandResult(String.format(MESSAGE_DELETE_PERSON_SUCCESS, personToDelete));
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof DeletePersonCommand // instanceof handles nulls
&& targetIndex.equals(((DeletePersonCommand) other).targetIndex)); // state check
}
}
|
gustavodsf/js_projects
|
ps_bus/server/app/router.js
|
'use strict';
// router.js
// Attach the routes to the App
const index = require('./routes/index');
const users = require('./routes/users.router');
const workload = require('./routes/workload.router');
const model = require('./routes/model.router');
/**
* This method configure the routes subpaths to the application
* @method function
* @param {express()} app The app generated by calling the express() function
* @return {void}
*/
const route = function(app) {
// configure the routes
app.use('/', index);
app.use('/workloads', workload(app));
app.use('/users', users(app));
app.use('/model', model(app));
};
module.exports = {
configure: route
};
|
Mafioso/happ-backend
|
happ/auth/urls.py
|
from django.conf.urls import url
from rest_framework_jwt.views import obtain_jwt_token, refresh_jwt_token
from .views import (
FacebookLogin,
UserRegister,
FacebookUserRegister,
PasswordChange,
PasswordReset,
PasswordResetConfirm,
EmailConfirmationRequest,
EmailConfirmation,
AdminLogin,
)
urlpatterns = [
url(r'admin/login/$', AdminLogin.as_view(), name='admin-login-api'),
url(r'login/$', obtain_jwt_token, name='login'),
url(r'login/facebook/$', FacebookLogin.as_view(), name='facebook-login'),
url(r'refresh/$', refresh_jwt_token, name='refresh'),
url(r'register/$', UserRegister.as_view(), name='register'),
url(r'register/facebook/$', FacebookUserRegister.as_view(), name='facebook-register'),
url(r'password/change/$', PasswordChange.as_view(), name='password-change'),
url(r'password/reset/$', PasswordReset.as_view(), name='password-reset'),
url(r'password/reset/confirm/$', PasswordResetConfirm.as_view(), name='password-reset-confirm'),
url(r'email/confirm/request/$', EmailConfirmationRequest.as_view(), name='email-confirm-request'),
url(r'email/confirm/$', EmailConfirmation.as_view(), name='email-confirm'),
]
|
hanpengfei/beilu-android-open-project
|
app/src/main/java/com/scj/beilu/app/ui/mine/MineAccountSettingFrag.java
|
<gh_stars>0
package com.scj.beilu.app.ui.mine;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.Bundle;
import androidx.annotation.Nullable;
import android.view.View;
import com.mx.pro.lib.mvp.network.config.AppConfig;
import com.scj.beilu.app.R;
import com.scj.beilu.app.base.BaseMvpFragment;
import com.scj.beilu.app.mvp.mine.MineAccountPre;
import com.scj.beilu.app.util.ToastUtils;
import com.scj.beilu.app.widget.ItemLayout;
/**
* @author Mingxun
* @time on 2019/4/12 19:51
*/
public class MineAccountSettingFrag extends BaseMvpFragment<MineAccountPre.MineAccountView, MineAccountPre>
implements MineAccountPre.MineAccountView, View.OnClickListener {
private ItemLayout mItemLayoutCache, mItemLayoutAccount;
@Override
public MineAccountPre createPresenter() {
return new MineAccountPre(mFragmentActivity);
}
@Nullable
@Override
public int getLayout() {
return R.layout.frag_account_setting;
}
@Override
public void initView() {
super.initView();
mItemLayoutCache = findViewById(R.id.item_val_cache);
mItemLayoutAccount = findViewById(R.id.item_account);
mItemLayoutCache.setOnClickListener(this);
mItemLayoutAccount.setOnClickListener(this);
}
@Override
public void onLazyInitView(@Nullable Bundle savedInstanceState) {
super.onLazyInitView(savedInstanceState);
getPresenter().getCacheSize();
}
@Override
public void onCacheSize(String cacheSize) {
mItemLayoutCache.setRightText(cacheSize);
}
private SharedPreferences mSharedPreferences;
private String mToken;
private boolean getToken() {
if (mSharedPreferences == null) {
mSharedPreferences = mFragmentActivity.getApplicationContext().getSharedPreferences(AppConfig.PREFS_NAME, Context.MODE_PRIVATE);
}
mToken = mSharedPreferences.getString(AppConfig.USER_TOKEN, null);
return mToken == null;
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.item_val_cache:
getPresenter().clearCache();
break;
case R.id.item_account:
if (getToken()) {
ToastUtils.showToast(mFragmentActivity, "你当前没有登录任何账号");
} else {
start(new MineAccountManagerFrag());
}
break;
}
}
}
|
SSouik/pyutil
|
tests/string/endswith_test.py
|
<reponame>SSouik/pyutil
import pytest
from pyutil import endswith
def test_endswith_when_string_is_empty():
actual = endswith("", "foo")
expected = False
assert actual == expected
def test_endswith_when_target_is_empty():
actual = endswith("foo", "")
expected = False
assert actual == expected
def test_endswith_when_string_ends_with_target():
actual = endswith("foo", "foo")
expected = True
assert actual == expected
def test_endswith_when_string_ends_with_target_2():
actual = endswith("foobar", "bar")
expected = True
assert actual == expected
def test_endswith_when_string_ends_with_target_3():
actual = endswith("abc123", "3")
expected = True
assert actual == expected
def test_endswith_when_string_ends_with_target_4():
actual = endswith("abc123", "123")
expected = True
assert actual == expected
def test_endswith_when_string_ends_with_target_5():
actual = endswith("foobar", "foobar")
expected = True
assert actual == expected
def test_endswith_when_string_does_not_end_with_target():
actual = endswith("foo", "123")
expected = False
assert actual == expected
def test_endswith_when_string_does_not_end_with_target_2():
actual = endswith("foo", "f")
expected = False
assert actual == expected
def test_endswith_when_string_does_not_end_with_target_3():
actual = endswith("foo", "fo")
expected = False
assert actual == expected
def test_endswith_when_string_does_not_end_with_target_4():
actual = endswith("abc123", "13")
expected = False
assert actual == expected
def test_endswith_when_string_does_not_end_with_target_5():
actual = endswith("abc123", "bar")
expected = False
assert actual == expected
def test_endswith_when_string_is_not_string():
with pytest.raises(TypeError):
endswith(123, "foo")
def test_endswith_when_target_is_not_string():
with pytest.raises(TypeError):
endswith("123", 123)
|
rnorth/testpackage-containers
|
modules/cassandra/src/main/java/org/testcontainers/containers/CassandraContainer.java
|
package org.testcontainers.containers;
import com.datastax.driver.core.Cluster;
import com.github.dockerjava.api.command.InspectContainerResponse;
import org.apache.commons.io.IOUtils;
import org.testcontainers.containers.delegate.CassandraDatabaseDelegate;
import org.testcontainers.delegate.DatabaseDelegate;
import org.testcontainers.ext.ScriptUtils;
import org.testcontainers.ext.ScriptUtils.ScriptLoadException;
import org.testcontainers.utility.DockerImageName;
import org.testcontainers.utility.MountableFile;
import javax.script.ScriptException;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Optional;
/**
* Cassandra container
*
* Supports 2.x and 3.x Cassandra versions
*
* @author <NAME>
*/
public class CassandraContainer<SELF extends CassandraContainer<SELF>> extends GenericContainer<SELF> {
private static final DockerImageName DEFAULT_IMAGE_NAME = DockerImageName.parse("cassandra");
private static final String DEFAULT_TAG = "3.11.2";
@Deprecated
public static final String IMAGE = DEFAULT_IMAGE_NAME.getUnversionedPart();
public static final Integer CQL_PORT = 9042;
private static final String CONTAINER_CONFIG_LOCATION = "/etc/cassandra";
private static final String USERNAME = "cassandra";
private static final String PASSWORD = "<PASSWORD>";
private String configLocation;
private String initScriptPath;
private boolean enableJmxReporting;
/**
* @deprecated use {@link #CassandraContainer(DockerImageName)} instead
*/
@Deprecated
public CassandraContainer() {
this(DEFAULT_IMAGE_NAME.withTag(DEFAULT_TAG));
}
public CassandraContainer(String dockerImageName) {
this(DockerImageName.parse(dockerImageName));
}
public CassandraContainer(DockerImageName dockerImageName) {
super(dockerImageName);
dockerImageName.assertCompatibleWith(DEFAULT_IMAGE_NAME);
addExposedPort(CQL_PORT);
this.enableJmxReporting = false;
withEnv("CASSANDRA_SNITCH", "GossipingPropertyFileSnitch");
withEnv(
"JVM_OPTS",
"-Dcassandra.skip_wait_for_gossip_to_settle=0 -Dcassandra.initial_token=0"
);
withEnv("HEAP_NEWSIZE", "128M");
withEnv("MAX_HEAP_SIZE", "1024M");
}
@Override
protected void configure() {
optionallyMapResourceParameterAsVolume(CONTAINER_CONFIG_LOCATION, configLocation);
}
@Override
protected void containerIsStarted(InspectContainerResponse containerInfo) {
runInitScriptIfRequired();
}
/**
* Load init script content and apply it to the database if initScriptPath is set
*/
private void runInitScriptIfRequired() {
if (initScriptPath != null) {
try {
URL resource = Thread.currentThread().getContextClassLoader().getResource(initScriptPath);
if (resource == null) {
logger().warn("Could not load classpath init script: {}", initScriptPath);
throw new ScriptLoadException("Could not load classpath init script: " + initScriptPath + ". Resource not found.");
}
String cql = IOUtils.toString(resource, StandardCharsets.UTF_8);
DatabaseDelegate databaseDelegate = getDatabaseDelegate();
ScriptUtils.executeDatabaseScript(databaseDelegate, initScriptPath, cql);
} catch (IOException e) {
logger().warn("Could not load classpath init script: {}", initScriptPath);
throw new ScriptLoadException("Could not load classpath init script: " + initScriptPath, e);
} catch (ScriptException e) {
logger().error("Error while executing init script: {}", initScriptPath, e);
throw new ScriptUtils.UncategorizedScriptException("Error while executing init script: " + initScriptPath, e);
}
}
}
/**
* Map (effectively replace) directory in Docker with the content of resourceLocation if resource location is not null
*
* Protected to allow for changing implementation by extending the class
*
* @param pathNameInContainer path in docker
* @param resourceLocation relative classpath to resource
*/
protected void optionallyMapResourceParameterAsVolume(String pathNameInContainer, String resourceLocation) {
Optional.ofNullable(resourceLocation)
.map(MountableFile::forClasspathResource)
.ifPresent(mountableFile -> withCopyFileToContainer(mountableFile, pathNameInContainer));
}
/**
* Initialize Cassandra with the custom overridden Cassandra configuration
* <p>
* Be aware, that Docker effectively replaces all /etc/cassandra content with the content of config location, so if
* Cassandra.yaml in configLocation is absent or corrupted, then Cassandra just won't launch
*
* @param configLocation relative classpath with the directory that contains cassandra.yaml and other configuration files
*/
public SELF withConfigurationOverride(String configLocation) {
this.configLocation = configLocation;
return self();
}
/**
* Initialize Cassandra with init CQL script
* <p>
* CQL script will be applied after container is started (see using WaitStrategy)
*
* @param initScriptPath relative classpath resource
*/
public SELF withInitScript(String initScriptPath) {
this.initScriptPath = initScriptPath;
return self();
}
/**
* Initialize Cassandra client with JMX reporting enabled or disabled
*/
public SELF withJmxReporting(boolean enableJmxReporting) {
this.enableJmxReporting = enableJmxReporting;
return self();
}
/**
* Get username
*
* By default Cassandra has authenticator: AllowAllAuthenticator in cassandra.yaml
* If username and password need to be used, then authenticator should be set as PasswordAuthenticator
* (through custom Cassandra configuration) and through CQL with default cassandra-cassandra credentials
* user management should be modified
*/
public String getUsername() {
return USERNAME;
}
/**
* Get password
*
* By default Cassandra has authenticator: AllowAllAuthenticator in cassandra.yaml
* If username and password need to be used, then authenticator should be set as PasswordAuthenticator
* (through custom Cassandra configuration) and through CQL with default cassandra-cassandra credentials
* user management should be modified
*/
public String getPassword() {
return PASSWORD;
}
/**
* Get configured Cluster
*
* Can be used to obtain connections to Cassandra in the container
*/
public Cluster getCluster() {
return getCluster(this, enableJmxReporting);
}
public static Cluster getCluster(ContainerState containerState, boolean enableJmxReporting) {
final Cluster.Builder builder = Cluster.builder()
.addContactPoint(containerState.getHost())
.withPort(containerState.getMappedPort(CQL_PORT));
if (!enableJmxReporting) {
builder.withoutJMXReporting();
}
return builder.build();
}
public static Cluster getCluster(ContainerState containerState) {
return getCluster(containerState, false);
}
private DatabaseDelegate getDatabaseDelegate() {
return new CassandraDatabaseDelegate(this);
}
}
|
akatona84/cruise-control
|
cruise-control/src/main/java/com/linkedin/kafka/cruisecontrol/monitor/sampling/holder/ValueHolder.java
|
/*
* Copyright 2019 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information.
*/
package com.linkedin.kafka.cruisecontrol.monitor.sampling.holder;
/**
* An interface to unify the {@link ValueAndTime}, {@link ValueAndCount}, and {@link ValueMax}. The meaning of recording
* a value differs depending on the custom implementation of this interface.
*/
interface ValueHolder {
/**
* @param value The value to record.
* @param time The time to record (if relevant)
*/
void recordValue(double value, long time);
/**
* Reset the value holder history to the clean state after creation.
*/
void reset();
/**
* @return The value associated with the holder.
*/
double value();
/**
* @param assertNonZeroCount True to assert that at least a single record exist, false otherwise.
* @return The value associated with the holder, or custom value if no record exists when non zero count is asserted.
*/
double value(boolean assertNonZeroCount);
}
|
Fliros228/StreamCraft
|
org/apache/http/io/SessionInputBuffer.java
|
package org.apache.http.io;
import java.io.IOException;
import org.apache.http.util.CharArrayBuffer;
public interface SessionInputBuffer {
int read(byte[] paramArrayOfbyte, int paramInt1, int paramInt2) throws IOException;
int read(byte[] paramArrayOfbyte) throws IOException;
int read() throws IOException;
int readLine(CharArrayBuffer paramCharArrayBuffer) throws IOException;
String readLine() throws IOException;
@Deprecated
boolean isDataAvailable(int paramInt) throws IOException;
HttpTransportMetrics getMetrics();
}
/* Location: C:\Users\Main\AppData\Roaming\StreamCraf\\updates\Launcher.jar!\org\apache\http\io\SessionInputBuffer.class
* Java compiler version: 6 (50.0)
* JD-Core Version: 1.1.3
*/
|
elko-dev/spawn
|
web/prompt_test.go
|
<reponame>elko-dev/spawn
package web
import (
reflect "reflect"
"testing"
"github.com/elko-dev/spawn/constants"
)
func TestWhenWebIsSelectedReturnsClientSelectionsOfReact(t *testing.T) {
expected := []string{constants.ReactClientLanguageType}
actual := getClientLangaugeSelections("Web")
if !reflect.DeepEqual(actual, expected) {
t.Log("Incorrect error, expected ", expected, " got ", actual)
t.Fail()
}
}
func TestWhenMobileIsSelectedReturnsClientSelectionsOfReactNative(t *testing.T) {
expected := []string{"React Native"}
actual := getClientLangaugeSelections("Mobile")
if !reflect.DeepEqual(actual, expected) {
t.Log("Incorrect error, expected ", expected, " got ", actual)
t.Fail()
}
}
|
mariusvn/turbo-engine
|
src/SceneManager.cpp
|
<reponame>mariusvn/turbo-engine<filename>src/SceneManager.cpp<gh_stars>1-10
#include <turbo/SceneManager.hpp>
#include <stdexcept>
namespace turbo {
SceneManager::SceneManager() {
}
void SceneManager::register_scene(Scene* scene, const char* name) {
Scene* test = this->scenes["test"];
if (!this->scenes.empty() && this->scenes.find(name) != this->scenes.end()) {
throw std::runtime_error(std::string("The scene \"") + name + "\" already exists");
}
this->scenes[name] = scene;
}
void SceneManager::set_active_scene(const char* name) {
if (this->scenes.find(name) == this->scenes.end()) {
throw std::runtime_error(std::string("The scene \"") + name + "\" doesn't exists");
}
if (this->active_scene) {
this->active_scene->unload();
}
Scene* tmp = this->scenes[name];
tmp->load();
this->active_scene = tmp;
ONLYIMGUI(
this->debug.scene_name = name;
this->debug.set_root_gameobject(this->active_scene->get_root_gameobject());
);
}
Scene* SceneManager::get_active_scene() const {
return this->active_scene;
}
}
|
expjazz/rails_managing_tasks_app
|
app/controllers/chats_controller.rb
|
class ChatsController < ApplicationController
def create
@chatroom = Chatroom.new
@recipient_id = chat_room_params
if @chatroom.save
respond_to do |format|
format.js { render partial: 'notice/alerts', locals: { test: @recipient_id } }
end
else
flash[:alert] = 'You chat session was not created'
end
end
private
def chat_room_params
params.require(:id)
end
end
|
3658BOSONS/UG3
|
TeamCode/src/main/java/org/firstinspires/ftc/teamcode/opmodes/ShooterTuner.java
|
<filename>TeamCode/src/main/java/org/firstinspires/ftc/teamcode/opmodes/ShooterTuner.java<gh_stars>0
package org.firstinspires.ftc.teamcode.opmodes;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.teamcode.hardware.Shooter;
import org.firstinspires.ftc.teamcode.utils.BulkReadHandler;
@TeleOp(name="Shooter Tuner", group="test")
@Config
public class ShooterTuner extends LinearOpMode {
private Shooter shooter;
private BulkReadHandler bulk;
private FtcDashboard dashboard;
public static double P = 150;
public static double I = 4;
public static double D = 1;
private boolean lastR;
private boolean shooting;
@Override
public void runOpMode(){
shooter = new Shooter(this);
lastR = false;
shooting = false;
bulk = new BulkReadHandler(this);
dashboard = FtcDashboard.getInstance();
waitForStart();
while (opModeIsActive()){
boolean r = gamepad1.right_bumper;
if(r && !lastR){
if(shooting){
shooter.cutPower();
shooting = false;
}else{
shooter.setPid(P, I, D);
shooter.spinUp();
shooting = true;
}
}
lastR = r;
bulk.tick(false, true);
TelemetryPacket packet = new TelemetryPacket();
packet.put("VELO: ", shooter.getShooterVelo());
packet.put("TARGET: ", 4000);
dashboard.sendTelemetryPacket(packet);
}
}
}
|
Vodafone/wiremock
|
src/test/java/com/github/tomakehurst/wiremock/recording/ProxiedServeEventFiltersTest.java
|
/*
* Copyright (C) 2011 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.tomakehurst.wiremock.recording;
import com.github.tomakehurst.wiremock.client.ResponseDefinitionBuilder;
import com.github.tomakehurst.wiremock.common.Timing;
import com.github.tomakehurst.wiremock.http.ResponseDefinition;
import com.github.tomakehurst.wiremock.matching.MockRequest;
import com.github.tomakehurst.wiremock.matching.RequestPattern;
import com.github.tomakehurst.wiremock.stubbing.ServeEvent;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import static com.github.tomakehurst.wiremock.client.WireMock.anyUrl;
import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
import static com.github.tomakehurst.wiremock.http.RequestMethod.GET;
import static com.github.tomakehurst.wiremock.http.RequestMethod.POST;
import static com.github.tomakehurst.wiremock.matching.MockRequest.mockRequest;
import static com.github.tomakehurst.wiremock.matching.RequestPatternBuilder.newRequestPattern;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class ProxiedServeEventFiltersTest {
@Test
public void applyWithUniversalRequestPattern() {
ServeEvent serveEvent = proxiedServeEvent(mockRequest());
ProxiedServeEventFilters filters = new ProxiedServeEventFilters(RequestPattern.ANYTHING, null, false);
assertTrue(filters.apply(serveEvent));
// Should default to RequestPattern.ANYTHING when passing null for filters
filters = new ProxiedServeEventFilters(null, null, false);
assertTrue(filters.apply(serveEvent));
}
@Test
public void applyWithUnproxiedServeEvent() {
ServeEvent serveEvent = toServeEvent(null, null, ResponseDefinition.ok());
ProxiedServeEventFilters filters = new ProxiedServeEventFilters(null, null, false);
assertFalse(filters.apply(serveEvent));
}
@Test
public void applyWithMethodPattern() {
ProxiedServeEventFilters filters = new ProxiedServeEventFilters(newRequestPattern(GET, anyUrl()).build(), null, false);
MockRequest request = mockRequest().method(GET).url("/foo");
assertTrue(filters.apply(proxiedServeEvent(request)));
assertTrue(filters.apply(proxiedServeEvent(request.url("/bar"))));
assertFalse(filters.apply(proxiedServeEvent(request.method(POST))));
}
@Test
public void applyWithIds() {
List<UUID> ids = Arrays.asList(
UUID.fromString("00000000-0000-0000-0000-000000000000"),
UUID.fromString("00000000-0000-0000-0000-000000000001")
);
ProxiedServeEventFilters filters = new ProxiedServeEventFilters(null, ids, false);
assertTrue(filters.apply(proxiedServeEvent(ids.get(0))));
assertTrue(filters.apply(proxiedServeEvent(ids.get(1))));
assertFalse(filters.apply(proxiedServeEvent(UUID.fromString("00000000-0000-0000-0000-000000000002"))));
}
@Test
public void applyWithMethodAndUrlPattern() {
ProxiedServeEventFilters filters = new ProxiedServeEventFilters(
newRequestPattern(GET, urlEqualTo("/foo")).build(),
null,
false
);
MockRequest request = mockRequest().method(GET).url("/foo");
assertTrue(filters.apply(proxiedServeEvent(request)));
assertFalse(filters.apply(proxiedServeEvent(request.url("/bar"))));
assertFalse(filters.apply(proxiedServeEvent(request.method(POST))));
}
@Test
public void applyWithIdsAndMethodPattern() {
MockRequest request = mockRequest().method(GET).url("/foo");
List<UUID> ids = Arrays.asList(
UUID.fromString("00000000-0000-0000-0000-000000000000"),
UUID.fromString("00000000-0000-0000-0000-000000000001")
);
ProxiedServeEventFilters filters = new ProxiedServeEventFilters(
newRequestPattern(GET, anyUrl()).build(),
ids,
false
);
assertTrue(filters.apply(proxiedServeEvent(ids.get(0), request)));
assertFalse(filters.apply(proxiedServeEvent(UUID.fromString("00000000-0000-0000-0000-000000000002"), request)));
assertFalse(filters.apply(proxiedServeEvent(ids.get(0), request.method(POST))));
}
private ServeEvent toServeEvent(UUID id, MockRequest request, ResponseDefinition responseDefinition) {
return new ServeEvent(
id,
request != null ? request.asLoggedRequest() : null,
null,
responseDefinition,
null,
true,
Timing.UNTIMED);
}
private ServeEvent proxiedServeEvent(UUID id, MockRequest request) {
return toServeEvent(
id,
request,
new ResponseDefinitionBuilder().proxiedFrom("http://localhost").build()
);
}
private ServeEvent proxiedServeEvent(MockRequest request) {
return proxiedServeEvent(null, request);
}
private ServeEvent proxiedServeEvent(UUID id) {
return proxiedServeEvent(id, null);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.