repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
PloadyFree/bacs-learn-current
external-api-sybon/src/main/java/istu/bacs/externalapi/sybon/SybonSubmitResult.java
<filename>external-api-sybon/src/main/java/istu/bacs/externalapi/sybon/SybonSubmitResult.java package istu.bacs.externalapi.sybon; import lombok.Data; import java.util.List; @Data public class SybonSubmitResult { private Integer id; private SybonBuildResult buildResult; private List<SybonTestGroupResult> testGroupResults; }
dataplumber/dmas
inventory/src/main/java/gov/nasa/podaac/inventory/model/DatasetSpatial.java
<gh_stars>1-10 //Copyright 2008, by the California Institute of Technology. //ALL RIGHTS RESERVED. United States Government Sponsorship acknowledged. package gov.nasa.podaac.inventory.model; import java.io.Serializable; import com.vividsolutions.jts.geom.Polygon; /** * @author clwong * $Id: DatasetSpatial.java 5056 2010-06-11 22:21:41Z gangl $ */ @SuppressWarnings("serial") public class DatasetSpatial implements Serializable { // private Integer datasetId; private Polygon spatialGeometry; private Dataset dataset; private DatasetElement datasetElement = new DatasetElement(); public DatasetElement getDatasetElement() { return datasetElement; } public void setDatasetElement(DatasetElement de) { this.datasetElement = de; } // public Integer getDatasetId() { // return datasetId; // } // public void setDatasetId(Integer datasetId) { // this.datasetId = datasetId; // } public Polygon getSpatialGeometry() { return spatialGeometry; } public void setSpatialGeometry(Polygon spatialGeometry) { this.spatialGeometry = spatialGeometry; } public Dataset getDataset() { return dataset; } public void setDataset(Dataset dataset) { this.dataset = dataset; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((dataset.getDatasetId() == null) ? 0 : dataset.getDatasetId().hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; final DatasetSpatial other = (DatasetSpatial) obj; if (dataset.getDatasetId() == null) { if (other.dataset.getDatasetId() != null) return false; } else if (!dataset.getDatasetId().equals(other.dataset.getDatasetId())) return false; else if(!this.getDatasetElement().equals(other.getDatasetElement())) return false; return true; } }
yumilceh/libqi
tests/qi/testutils/testutils.hpp
#pragma once #include <memory> #include <string> #include <vector> #include <chrono> #include <boost/optional.hpp> #include <boost/config.hpp> #include <gtest/gtest.h> #include <ka/typetraits.hpp> #include <ka/utility.hpp> #include <qi/clock.hpp> #include <qi/future.hpp> namespace test { /// Runs a process from construction to destruction. /// At destruction, the process is killed with SIGKILL. class ScopedProcess { static const std::chrono::milliseconds defaultWaitReadyDuration; using Strings = std::vector<std::string>; public: explicit ScopedProcess(const std::string& executable, const Strings& arguments = Strings{}, std::chrono::milliseconds waitReadyDuration = defaultWaitReadyDuration); ~ScopedProcess(); // non-copyable ScopedProcess(const ScopedProcess&) = delete; ScopedProcess& operator=(const ScopedProcess&) = delete; int pid() const { return _pid; } private: std::string _executable; int _pid; }; /// Predicate P template <typename P> static testing::AssertionResult verifyBeforeDuration(P pred, qi::NanoSeconds dura, qi::NanoSeconds period = qi::MilliSeconds{10}) { while (!pred()) { if (dura == qi::NanoSeconds::zero()) { return testing::AssertionFailure() << "Predicate was not true before " << boost::chrono::duration_cast<qi::MicroSeconds>(dura).count() << " microseconds"; } if (period >= dura) dura = qi::NanoSeconds::zero(); else dura -= period; qi::sleepFor(period); } return testing::AssertionSuccess(); } static const auto defaultFutureWaitDuration = qi::Seconds{ 3 }; namespace detail { struct DoNothing { template<typename T> void operator()(const qi::Future<T>&) const {} }; template<typename T> struct AssignValue { T* t; void operator()(const qi::Future<T>& fut) const { *t = fut.value(); } }; struct AssignError { std::string* err; template<typename T> void operator()(const qi::Future<T>& fut) const { *err = fut.error(); } }; template <typename T> testing::Message messageOfUnexpectedState(const qi::Future<T>& fut) { testing::Message msg; switch (fut.wait(0)) { case qi::FutureState_None: msg << "the future is invalid as it is not tied to a promise"; break; case qi::FutureState_Running: msg << "the future has timed out"; break; case qi::FutureState_Canceled: msg << "the future has been canceled"; break; case qi::FutureState_FinishedWithError: msg << "the future has an error '" << fut.error() << "'"; break; case qi::FutureState_FinishedWithValue: msg << "the future has an value '" << fut.value() << "'"; break; } return msg; } template <typename T, typename Proc = DoNothing> testing::AssertionResult finishesWithState(qi::Future<T> fut, qi::FutureState expected, Proc onSuccess = {}, qi::MilliSeconds delay = defaultFutureWaitDuration) { const auto state = fut.wait(delay); if (state == expected) { onSuccess(fut); return testing::AssertionSuccess(); } return testing::AssertionFailure() << messageOfUnexpectedState(fut); } } inline detail::DoNothing willDoNothing() { return {}; } template<typename T> detail::AssignValue<T> willAssignValue(T& t) { return {&t}; } inline detail::AssignError willAssignError(std::string& err) { return {&err}; } template <typename T, typename Proc = detail::DoNothing> testing::AssertionResult finishesWithValue(qi::Future<T> fut, Proc onSuccess = {}, qi::MilliSeconds delay = defaultFutureWaitDuration) { return detail::finishesWithState(fut, qi::FutureState_FinishedWithValue, onSuccess, delay); } template <typename T, typename... Args> testing::AssertionResult finishesWithValue(qi::FutureSync<T> fut, Args&&... args) { return finishesWithValue(fut.async(), ka::fwd<Args>(args)...); } template <typename T, typename Proc = detail::DoNothing> testing::AssertionResult finishesWithError(qi::Future<T> fut, Proc onSuccess = {}, qi::MilliSeconds delay = defaultFutureWaitDuration) { return detail::finishesWithState(fut, qi::FutureState_FinishedWithError, onSuccess, delay); } template <typename T, typename... Args> testing::AssertionResult finishesWithError(qi::FutureSync<T> fut, Args&&... args) { return finishesWithError(fut.async(), ka::fwd<Args>(args)...); } template <typename T, typename Proc = detail::DoNothing> testing::AssertionResult finishesAsCanceled(qi::Future<T> fut, Proc onSuccess = {}, qi::MilliSeconds delay = defaultFutureWaitDuration) { return detail::finishesWithState(fut, qi::FutureState_Canceled, onSuccess, delay); } template <typename T, typename... Args> testing::AssertionResult finishesAsCanceled(qi::FutureSync<T> fut, Args&&... args) { return finishesAsCanceled(fut.async(), ka::fwd<Args>(args)...); } template <typename T, typename Proc = detail::DoNothing> testing::AssertionResult isStillRunning(qi::Future<T> fut, Proc onSuccess = {}, qi::MilliSeconds delay = defaultFutureWaitDuration) { return detail::finishesWithState(fut, qi::FutureState_Running, onSuccess, delay); } template <typename T, typename... Args> testing::AssertionResult isStillRunning(qi::FutureSync<T> fut, Args&&... args) { return isStillRunning(fut.async(), ka::fwd<Args>(args)...); } static const auto defaultConnectionAttemptTimeout = qi::Seconds{10}; /// Preconditions: attempts >= 0 /// /// With Network N: /// qi::TcpMessageSocket<N> S, qi::Url U template<class S, class U> qi::Future<void> attemptConnect(S& socket, U url, qi::MilliSeconds timeout = defaultConnectionAttemptTimeout) { qi::Future<void> result; qi::FutureState state = qi::FutureState_None; const auto deadline = qi::SteadyClock::now() + timeout; while (qi::SteadyClock::now() < deadline && state != qi::FutureState_FinishedWithValue) { if (state != qi::FutureState_Running) result = socket.connect(url).async(); state = result.wait(defaultConnectionAttemptTimeout); } return result; } } // namespace test
plewto/Pigiron
smf/header.go
<filename>smf/header.go package smf /* ** header.go defines MIDI file header chunk. ** */ import ( "fmt" "os" "github.com/plewto/pigiron/pigerr" ) var headerID chunkID = [4]byte{0x4d, 0x54, 0x68, 0x64} // SMFheadr strut implements Chunk interface for MIDI file headers. // type Header struct { format int trackCount int // NOTE: trackCount may be greater then actual track count. division int } func (h *Header) String() string { msg := "smf.Header format: %d trackCount: %d division: %d" return fmt.Sprintf(msg, h.format, h.trackCount, h.division) } func (h *Header) ID() chunkID { return headerID } // h.Format() return MIDI file format // func (h *Header) Format() int { return h.format } // h.Division() returns MIDI file clock division. // func (h *Header) Division() int { return h.division } func (h *Header) Length() int { return 6 } // h.Dump() displays contents of MIDI header chunk. // func (h *Header) Dump() { fmt.Println("Header:") fmt.Printf("\tformat : %4d\n", h.format) fmt.Printf("\tchuckCount : %4d\n", h.trackCount) fmt.Printf("\tdivision : %4d\n", h.division) } // readHeader function reads MIDI file header chuck from file. // func readHeader(f *os.File) (header *Header, err error) { var id chunkID var length int id, length, err = readChunkPreamble(f) if err != nil { return } if !id.eq(headerID) { msg := "Expected header id '%s', got '%s'" err = fmt.Errorf(msg, headerID, id) return } if length != 6 { msg := "Unusual SMF header length, expected 6, got %d\n" err = fmt.Errorf(msg, length) return } var data = make([]byte, length) var count = 0 count, err = f.Read(data) if count != length { msg := "SMF Header data count inconsistent, expected %d bytes, read %d" err = fmt.Errorf(msg, count, length) return } if err != nil { msg := "smf.readHeader could not read Header chunk\n" msg += fmt.Sprintf("%s", err) err = fmt.Errorf(msg) return } // DO NOT replace above lines with readRawChunk() // It may not detect non-smf files and attmpt to read // huge amounts of data. // var format, trackCount, division int format, data, _ = TakeShort(data) trackCount, data, _ = TakeShort(data) division, _, _ = TakeShort(data) header = &Header{format, trackCount, division} if format < 0 || 2 < format { dflt := 0 errmsg := "MIDI file has unsupported format: %d, using default %d" pigerr.Warning(fmt.Sprintf(errmsg, format, dflt)) header.format = dflt } if division < 24 || 960 < division { dflt := 24 msg1 := "MIDI file has out of bounds clock division" msg2 := fmt.Sprintf("Expected division between 24 and 960, got %d", division) msg3 := fmt.Sprintf("Using default %d", dflt) pigerr.Warning(msg1, msg2, msg3) header.division = dflt } return }
bridges-wood/CiphersApp
src/main/java/com/polytonic/cipher/FileIO.java
package com.polytonic.cipher; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; /** * Class that manages all File IO operations for the project. * <p> * The primary serialisation / deserialistaion framework in use is * {@link com.esotericsoftware.kryo.Kryo} * </p> * * @author <NAME> * */ public class FileIO { // Hashed dictionary and bi-gram files. public final String DICTIONARY_HASH_PATH = "src/main/resources/dictionary.htb"; public final String DICTIONARY_TEXT_PATH = "src/main/resources/dictionary.txt"; public final String BIGRAM_WORD_HASH_PATH = "src/main/resources/2grams.htb"; public final String BIGRAM_WORD_TEXT_PATH = "src/main/resources/2grams.txt"; // Words in frequency order but with no occurrences. public final String TRIGRAM_WORD_TEXT_PATH = "src/main/resources/3grams.txt"; public final String QUADGRAM_WORD_TEXT_PATH = "src/main/resources/4grams.txt"; public final String PENTAGRAM_WORD_TEXT_PATH = "src/main/resources/5grams.txt"; // Character-index-form maps. public final String MONOGRAM_CIF_PATH = "src/main/resources/1gramsMap.cif"; public final String BIGRAM_CIF_PATH = "src/main/resources/2gramsMap.cif"; public final String TRIGRAM_CIF_PATH = "src/main/resources/3gramsMap.cif"; public final String QUADGRAM_CIF_PATH = "src/main/resources/4gramsMap.cif"; public final String PENTAGRAM_CIF_PATH = "src/main/resources/5gramsMap.cif"; // Words in frequency order with occurrences. public final String MONOGRAM_COUNTS_PATH = "src/main/resources/1gramsFrequencies.txt"; public final String BIGRAM_COUNTS_PATH = "src/main/resources/2gramsFrequencies.txt"; public final String TRIGRAM_COUNTS_PATH = "src/main/resources/3gramsFrequencies.txt"; public final String QUADGRAM_COUNTS_PATH = "src/main/resources/4gramsFrequencies.txt"; public final String PENTAGRAM_COUNTS_PATH = "src/main/resources/5gramsFrequencies.txt"; // Maps corresponding to above files. public final String MONOGRAM_COUNTS_MAP_PATH = "src/main/resources/1gramsFrequencies.tmp"; public final String BIGRAM_COUNTS_MAP_PATH = "src/main/resources/2gramsFrequencies.tmp"; public final String TRIGRAM_COUNTS_MAP_PATH = "src/main/resources/3gramsFrequencies.tmp"; public final String QUADGRAM_COUNTS_MAP_PATH = "src/main/resources/4gramsFrequencies.tmp"; public final String PENTAGRAM_COUNTS_MAP_PATH = "src/main/resources/5gramsFrequencies.tmp"; // Characters in frequency order with occurrences. public final String MONOGRAM_TEXT_PATH = "src/main/resources/1l.txt"; public final String BIGRAM_TEXT_PATH = "src/main/resources/2l.txt"; public final String TRIGRAM_TEXT_PATH = "src/main/resources/3l.txt"; public final String QUADGRAM_TEXT_PATH = "src/main/resources/4l.txt"; public final String PENTAGRAM_TEXT_PATH = "src/main/resources/5l.txt"; // Maps corresponding to above files with log probabilities. public final String MONOGRAM_LOG_MAP_PATH = "src/main/resources/1l.ltmp"; public final String BIGRAM_LOG_MAP_PATH = "src/main/resources/2l.ltmp"; public final String TRIGRAM_LOG_MAP_PATH = "src/main/resources/3l.ltmp"; public final String QUADGRAM_LOG_MAP_PATH = "src/main/resources/4l.ltmp"; public final String PENTAGRAM_LOG_MAP_PATH = "src/main/resources/5l.ltmp"; // Maps corresponding to above files with direct probabilities. public final String MONOGRAM_MAP_PATH = "src/main/resources/1l.tmp"; public final String BIGRAM_MAP_PATH = "src/main/resources/2l.tmp"; public final String TRIGRAM_MAP_PATH = "src/main/resources/3l.tmp"; public final String QUADGRAM_MAP_PATH = "src/main/resources/4l.tmp"; public final String PENTAGRAM_MAP_PATH = "src/main/resources/5l.tmp"; // Other relevant files. public final String MOST_PROBABLE_TEXT_PATH = "src/main/resources/mostProbable.txt"; public final String LETTER_FREQUENCIES_TEXT_PATH = "src/main/resources/letterFrequencies.txt"; public final String LETTER_FREQUENCIES_MAP_PATH = "src/main/resources/letterFrequencies.tmp"; private final long FNV1_64_INIT = 0xcbf29ce484222325L; private final long FNV1_PRIME_64 = 1099511628211L; private final Kryo kyro = new Kryo(); public FileIO() { // Registration is required for proper serialisation. kyro.register(new HashMap<Long, String>().getClass()); kyro.register(new TreeMap<Character, Double>().getClass()); kyro.register(new TreeMap<String, Double>().getClass()); kyro.register(new LinkedList<String>().getClass()); kyro.register(new TreeMap<String, LinkedList<String>>().getClass()); } /** * Returns all lines in a text file as separate words in a string array. * * @param filename The name of the file to be retrieved. * @return A string array of each line in the file. */ public String[] readFile(String filename) { List<String> lines = new LinkedList<String>(); File file = new File(filename); try { BufferedReader br = new BufferedReader( new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)); String line; while ((line = br.readLine()) != null) { /* * Avoids having a NullPointerException as we automatically detect once the end * of the file has been reached. */ lines.add(line); } br.close(); } catch (FileNotFoundException f) { System.err.println("Text file '" + filename + "' to be read not found."); } catch (IOException e) { System.err.println("Buffered reader failed to read '" + filename + "' correctly."); } return lines.parallelStream().toArray(String[]::new); // Fastest possible encoding to array. } /** * Gives the 64 bit FNV-1a hash of an input string. * * @param text The text from which the hash is to be generated. * @return The hash of the input data. */ public long hash64(String text) { byte[] data = text.getBytes(); int length = data.length; long hash = FNV1_64_INIT; /* * FNV-1a is used instead of FNV-1 as it has better avalanche characteristics * for short strings. */ for (int i = 0; i < length; i++) { hash ^= (data[i] & 0xff); // XOR hash *= FNV1_PRIME_64; // Multiply } return hash; } /** * Creates a hash-map using FNV1-a and each line of a given file of type <Long, * String>. * * @param filename The name of the file which lines are to be read from. * @param outputFilename The name of the file to which the hashtable is saved. */ public void generateHashMap(String filename, String outputFilename) { File fromFile = new File(filename); File toFile = new File(outputFilename); HashMap<Long, String> hashTable = new HashMap<Long, String>(); try { BufferedReader br = new BufferedReader( new InputStreamReader(new FileInputStream(fromFile), StandardCharsets.US_ASCII)); String line; while ((line = br.readLine()) != null) { hashTable.put(hash64(line), line); // Puts each line into the hashtable. } br.close(); Output out = new Output(new FileOutputStream(toFile)); kyro.writeClassAndObject(out, hashTable); /* * Writing both the class and the object means that we can avoid unchecked * casting on loading back into memory. */ out.close(); } catch (FileNotFoundException f) { System.err.println("Text file '" + filename + "' to be hashed not found."); } catch (IOException i) { System.err.println("Buffered reader failed to read '" + filename + "' correctly."); } } /** * Returns the hash-map that was stored in a given file. 10x Faster than * previous method. * * @param filename * @return Loaded hash-map. */ public HashMap<Long, String> readHashTable(String filename) { File fromFile = new File(filename); try { Input in = new Input(new FileInputStream(fromFile)); return (HashMap<Long, String>) kyro.readClassAndObject(in); } catch (FileNotFoundException f) { System.err.println("Hashtable '" + filename + "' to be read not found."); } return null; } /** * Cleans a given piece of text. * * @param text The text to be cleaned. * @return The same text with only alphabetic characters, in lower case. */ public String cleanText(String text) { return text.replaceAll("[^a-zA-Z ]", "").toLowerCase(); } /** * Removes spaces from text. * * @param text Text to have spaces removed from. * @return Un-spaced text. */ public String deSpace(String text) { return text.replaceAll("\\s+", ""); } /** * Returns the tree-map of letter frequencies that was stored in a specific * file. * * @param filename * @return Loaded map of letter frequencies. */ public TreeMap<Character, Double> readLetterFrequencies(String filename) { File fromFile = new File(filename); try { Input in = new Input(new FileInputStream(fromFile)); return (TreeMap<Character, Double>) kyro.readClassAndObject(in); } catch (FileNotFoundException e) { System.err.println("Map '" + filename + "' to be read not found."); } return null; } /** * Generates a tree-map of letter frequencies based on a text file. * * @param filename The file name of the text file to be read. */ public void generateLetterFrequencies(String filename, String outputFilename) { File fromFile = new File(filename); File toFile = new File(outputFilename); Map<Character, Double> treeMap = new TreeMap<Character, Double>(); try { BufferedReader br = new BufferedReader( new InputStreamReader(new FileInputStream(fromFile), StandardCharsets.US_ASCII)); String line; while ((line = br.readLine()) != null) { String[] parts = line.split(","); treeMap.put(parts[0].charAt(0), Double.valueOf(parts[1])); // Puts each line into the map. } br.close(); Output out = new Output(new FileOutputStream(toFile)); kyro.writeClassAndObject(out, treeMap); out.close(); } catch (FileNotFoundException f) { System.err.println("Text file '" + filename + "' to be hashed not found."); } catch (IOException i) { System.err.println("Buffered reader failed to read '" + filename + "' correctly."); } } /** * Generates and saves a TreeMap from a file containing n-grams in English and * their relative appearances in Google's trillion word corpus. * * @param filename The file the n-grams are to be fetched from. * @param outputFilename The file the n-gram map is to be stored in. * @param characters True if the n-grams are for characters rather than * words. * @param log True if the log probabilities for the n-grams are to be * stored. */ public void generateNGramMap(String filename, String outputFilename, boolean characters, boolean log) { File toFile = new File(outputFilename); TreeMap<String, Double> chances = new TreeMap<String, Double>(); String[] lines = this.readFile(filename); double total = 0d; for (String line : lines) { String[] splitLine = line.split(","); if (characters) { chances.put(splitLine[0], Double.valueOf(splitLine[1])); total += Double.valueOf(splitLine[1]); } else { StringBuilder nGram = new StringBuilder(); for (int i = 0; i < splitLine.length - 1; i++) { nGram.append(splitLine[i] + ","); } nGram.deleteCharAt(nGram.length() - 1); // Removes the final comma in the n-gram. double toInsert = Double.valueOf(splitLine[splitLine.length - 1]); chances.put(nGram.toString(), Double.valueOf(toInsert)); total += toInsert; } } for (String key : chances.keySet()) { Double toInsert = 0d; if (!log) { toInsert = chances.get(key) / total; } else { toInsert = Math.log10(chances.get(key) / total); /* * For every key, the log is taken to avoid numerical underflow when operating * with such small probabilities. */ } chances.put(key, toInsert); } try { Output out = new Output(new FileOutputStream(toFile)); kyro.writeClassAndObject(out, chances); out.close(); } catch (FileNotFoundException f) { System.err.println("Save location '" + outputFilename + "' not found."); } } /** * Loads a TreeMap from a file containing NGrams in English and their respective * log probabilities. * * @param filename The file from which the map is to be loaded from. */ public TreeMap<String, Double> loadNgramMap(String filename) { File location = new File(filename); try { Input in = new Input(new FileInputStream(location)); return (TreeMap<String, Double>) kyro.readClassAndObject(in); } catch (FileNotFoundException e) { System.err.println("Treemap '" + filename + "' to be read not found."); } return null; } /** * Loads a TreeMap containing p-equivalent n-grams. * * @param filename The file the map is to be recovered from. * @return The TreeMap of p-equivalent n-grams. */ public TreeMap<String, LinkedList<String>> loadCharacterIndexForm(String filename) { File inFile = new File(filename); try { Input in = new Input(new FileInputStream(inFile)); return (TreeMap<String, LinkedList<String>>) kyro.readClassAndObject(in); } catch (FileNotFoundException e) { System.err.println("Character-Index-Form Map '" + filename + "' to be loaded not found."); } return null; } /** * Generates a map that stores a character index form encoding and all possible * words that fit the pattern. * * @param filename The filename that the words to be encoded are stored * in. * @param outputFilename The filename for the map to be stored in. */ public void generateCharacterIndexFormMap(String filename, String outputFilename) { TreeMap<String, LinkedList<String>> map = new TreeMap<String, LinkedList<String>>(); File toFile = new File(outputFilename); PredictWords p = new PredictWords(); String[] lines = this.readFile(filename); for (String line : lines) { Integer[] key; if (line.contains(",")) { key = p.toLinear(p.encodePhrase(line.split("s"))); } else { key = p.encodeWord(line); } String keyS = p.toString(key); if (map.containsKey(keyS)) { map.get(keyS).add(line); } else { LinkedList<String> toPut = new LinkedList<String>(); toPut.add(line); map.put(p.toString(key), toPut); } } try { Output out = new Output(new FileOutputStream(toFile)); kyro.writeClassAndObject(out, map); out.close(); } catch (FileNotFoundException f) { System.err.println("Location for Character-Index-Form Map to be stored '" + outputFilename + "' could not be reached."); } } }
cloudify-cosmo/cloudify-gcp-plugin
cloudify_gcp/pubsub/subscription.py
<filename>cloudify_gcp/pubsub/subscription.py # ####### # Copyright (c) 2018-2020 Cloudify Platform Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Standard library imports from __future__ import unicode_literals # Third-party imports from cloudify import ctx from cloudify.decorators import operation # Local imports from .. import constants from .. import utils from ..gcp import check_response from ..pubsub import PubSubBase class Subscription(PubSubBase): def __init__(self, config, logger, name, topic, push_config=None, ack_deadline_seconds=0,): """ Create Subscription object :param config: gcp auth file :param logger: logger object :param name: name of the subscription :param topic: name of the topic to subscribe for :param push_config: The name of the topic from which this subscription is receiving messages :param ack_deadline_seconds: This value is the maximum time after a subscriber receives a message before the subscriber should acknowledge the message """ super(Subscription, self).__init__( config, logger, utils.get_gcp_resource_name(name),) self.name = name self.topic = topic self.push_config = push_config self.ack_deadline_seconds = ack_deadline_seconds @check_response def create(self): """ Create GCP Subscription. :return: REST response contains a newly created instance of Subscription { "name": string, "topic": string, "pushConfig": { object(PushConfig) }, "ackDeadlineSeconds": number, } """ self.logger.info("Create Subscription '{0}'".format(self.name)) return self.discovery_pubsub.subscriptions().create( name=self.subscription_path, body=self.to_dict()).execute() def to_dict(self): body = dict() # Topic path body['topic'] = self.topic_path # only set the ``ackDeadlineSeconds`` if it is already set by the user if self.ack_deadline_seconds: body['ackDeadlineSeconds'] = self.ack_deadline_seconds # If this ``pushConfig`` empty then it is a pull delivery body['pushConfig'] = {} # If it is a push delivery then we need to check the following data if self.push_config: # Get the ``push_endpoint`` data if self.push_config.get('push_endpoint'): body['pushConfig']['pushEndpoint'] =\ self.push_config['push_endpoint'] # Get the ``attributes`` data if self.push_config.get('attributes'): body['pushConfig']['attributes'] =\ self.push_config['attributes'] return body @check_response def delete(self): """ Delete GCP Subscription. :return: REST response body will be empty """ self.logger.info("Delete Subscription '{0}'".format(self.name)) return self.discovery_pubsub.subscriptions().delete( subscription=self.subscription_path).execute() @check_response def get(self): return self.discovery_pubsub.subscriptions().get( subscription=self.subscription_path).execute() @property def subscription_path(self): return 'projects/{0}/subscriptions/{1}'.format(self.project, self.name) @property def topic_path(self): return 'projects/{0}/topics/{1}'.format(self.project, self.topic) @operation(resumable=True) @utils.retry_on_failure('Retrying creating subscription') @utils.throw_cloudify_exceptions def create(topic, name, push_config=None, ack_deadline_seconds=0, **kwargs): if utils.resource_created(ctx, constants.NAME): return gcp_config = utils.get_gcp_config() if not name: name = ctx.node.id name = utils.get_final_resource_name(name) subscription = Subscription(gcp_config, ctx.logger, name, topic, push_config, ack_deadline_seconds) resource = utils.create(subscription) ctx.instance.runtime_properties.update( {'name_path': resource.get(constants.NAME), 'topic_path': resource.get('topic'), 'push_config': resource.get('pushConfig'), 'ack_deadline_seconds': resource.get('ackDeadlineSeconds') } ) ctx.instance.runtime_properties['topic'] = topic ctx.instance.runtime_properties[constants.NAME] = name @operation(resumable=True) @utils.retry_on_failure('Retrying deleting subscription') @utils.throw_cloudify_exceptions def delete(**kwargs): gcp_config = utils.get_gcp_config() name = ctx.instance.runtime_properties.get(constants.NAME) topic = ctx.instance.runtime_properties.get('topic') if name: subscription = Subscription(gcp_config, ctx.logger, name, topic) utils.delete_if_not_external(subscription) if not utils.is_object_deleted(subscription): ctx.operation.retry('subscription is not yet deleted. Retrying:', constants.RETRY_DEFAULT_DELAY)
ksowmya/cloudstack-1
plugins/alert-handlers/snmp-alerts/src/org/apache/cloudstack/alert/snmp/SnmpHelper.java
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License package org.apache.cloudstack.alert.snmp; import com.cloud.utils.exception.CloudRuntimeException; import org.snmp4j.CommunityTarget; import org.snmp4j.PDU; import org.snmp4j.Snmp; import org.snmp4j.mp.SnmpConstants; import org.snmp4j.smi.OID; import org.snmp4j.smi.OctetString; import org.snmp4j.smi.UdpAddress; import org.snmp4j.smi.UnsignedInteger32; import org.snmp4j.smi.VariableBinding; import org.snmp4j.transport.DefaultUdpTransportMapping; import java.io.IOException; public class SnmpHelper { private Snmp _snmp; private CommunityTarget _target; public SnmpHelper(String address, String community) { _target = new CommunityTarget(); _target.setCommunity(new OctetString(community)); _target.setVersion(SnmpConstants.version2c); _target.setAddress(new UdpAddress(address)); try { _snmp = new Snmp(new DefaultUdpTransportMapping()); } catch (IOException e) { _snmp = null; throw new CloudRuntimeException(" Error in crearting snmp object, " + e.getMessage()); } } public void sendSnmpTrap(SnmpTrapInfo snmpTrapInfo) { try { if (_snmp != null) { _snmp.send(createPDU(snmpTrapInfo), _target, null, null); } } catch (IOException e) { throw new CloudRuntimeException(" Error in sending SNMP Trap, " + e.getMessage()); } } private PDU createPDU(SnmpTrapInfo snmpTrapInfo) { PDU trap = new PDU(); trap.setType(PDU.TRAP); int alertType = snmpTrapInfo.getAlertType() + 1; if (alertType > 0) { trap.add(new VariableBinding(SnmpConstants.snmpTrapOID, getOID(CsSnmpConstants.TRAPS_PREFIX + alertType))); if (snmpTrapInfo.getDataCenterId() != 0) { trap.add(new VariableBinding(getOID(CsSnmpConstants.DATA_CENTER_ID), new UnsignedInteger32(snmpTrapInfo.getDataCenterId()))); } if (snmpTrapInfo.getPodId() != 0) { trap.add(new VariableBinding(getOID(CsSnmpConstants.POD_ID), new UnsignedInteger32(snmpTrapInfo .getPodId()))); } if (snmpTrapInfo.getClusterId() != 0) { trap.add(new VariableBinding(getOID(CsSnmpConstants.CLUSTER_ID), new UnsignedInteger32(snmpTrapInfo .getClusterId()))); } if (snmpTrapInfo.getMessage() != null) { trap.add(new VariableBinding(getOID(CsSnmpConstants.MESSAGE), new OctetString(snmpTrapInfo.getMessage ()))); } else { throw new CloudRuntimeException(" What is the use of alert without message "); } if (snmpTrapInfo.getGenerationTime() != null) { trap.add(new VariableBinding(getOID(CsSnmpConstants.GENERATION_TIME), new OctetString(snmpTrapInfo.getGenerationTime().toString()))); } else { trap.add(new VariableBinding(getOID(CsSnmpConstants.GENERATION_TIME))); } } else { throw new CloudRuntimeException(" Invalid alert Type "); } return trap; } private OID getOID(String oidString) { return new OID(oidString); } }
fossabot/lauth
api/post_userinfo.go
package api import ( "github.com/gin-gonic/gin" "github.com/macrat/lauth/errors" "github.com/macrat/lauth/metrics" ) type PostUserInfoRequest struct { GetUserInfoRequest AccessToken string `form:"access_token" header:"-"` } func (req *PostUserInfoRequest) Bind(c *gin.Context) *errors.Error { if e := (&req.GetUserInfoRequest).Bind(c); e != nil { return e } if err := c.ShouldBind(&req); err != nil { return &errors.Error{ Err: err, Reason: errors.InvalidToken, Description: "failed to parse request body", } } return nil } func (req PostUserInfoRequest) GetToken() (string, *errors.Error) { token, err := req.GetUserInfoRequest.GetToken() if err == nil { return token, nil } if req.AccessToken == "" { return "", &errors.Error{ Reason: errors.InvalidToken, Description: "access token is required", } } return req.AccessToken, nil } func (api *LauthAPI) PostUserInfo(c *gin.Context) { report := metrics.StartUserinfo(c) defer report.Close() c.Header("Cache-Control", "no-store") c.Header("Pragma", "no-cache") var req PostUserInfoRequest if e := (&req).Bind(c); e != nil { report.SetError(e) errors.SendJSON(c, e) return } rawToken, e := req.GetToken() if e != nil { report.SetError(e) errors.SendJSON(c, e) return } api.sendUserInfo(c, report, req.Origin, rawToken) }
KaviiChathuranga/Student-Management-javaFX-Desktop
2ndSemProject_Common/src/lk/ijse/student/service/custom/Batch_TeacherService.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package lk.ijse.student.service.custom; import java.util.List; import lk.ijse.student.dto.BatchDto; import lk.ijse.student.dto.Batch_SubjectDTO; import lk.ijse.student.dto.SubjectDto; import lk.ijse.student.service.SuperService; /** * * @author Kavindu */ public interface Batch_TeacherService extends SuperService{ public boolean addBatch_Teacher(BatchDto batchDto,List<SubjectDto>subjectDtos)throws Exception; public boolean updateBatch_Teacher(Batch_SubjectDTO batch_TeacherDTO)throws Exception; public boolean removeBatch_Teacher(String id)throws Exception; public List<Batch_SubjectDTO> searchBatch_subject(String id)throws Exception; public List<Batch_SubjectDTO>getAll()throws Exception; }
c7yrus/alyson-v3
src/utils/vertx/events/incoming/layout.events.incoming.js
// import { // LAYOUT_CHANGE, // CMD_VIEW as VIEW_CHANGE, // CMD_POPUP as POPUP_CHANGE, // SUB_LAYOUT as SUBLAYOUT_CODE, // CMD_NOTIFICATION as NOTIFICATION_MESSAGE, // SUBLAYOUT_CHANGE, // CMD_RELOAD as RELOAD_PAGE, // } from 'constants'; export const CMD_LAYOUT = message => ({ type: 'LAYOUT_CHANGE', payload: message, }); export const CMD_VIEW = message => ({ type: 'VIEW_CHANGE', payload: message, }); export const CMD_POPUP = message => ({ type: 'POPUP_CHANGE', payload: message, }); export const CMD_NOTIFICATION = message => ({ type: 'NOTIFICATION_MESSAGE', payload: message, }); export const SUB_LAYOUT = message => ({ type: 'SUBLAYOUT_CODE', payload: message, }); export const CMD_SUBLAYOUT = message => ({ type: 'SUBLAYOUT_CHANGE', payload: message, }); export const CMD_RELOAD = message => ({ type: 'RELOAD_PAGE', payload: message, });
niejuqian/TestDemo
app/src/main/java/com/testdemo/util/DBHelper.java
<gh_stars>1-10 package com.testdemo.util; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.os.Environment; import android.text.TextUtils; import android.util.Log; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; /** * @AUTHOR:<EMAIL> * @DATETIME:2017 04 05 17:56 * @DESC: */ public class DBHelper { private static final String SXTC_CITIES_DB = "sxtc_cities.db"; private static final String DB_PATH = Environment.getExternalStorageDirectory() + "/sqtc/"; private SQLiteDatabase db; Context context; private static DBHelper dbHelper = new DBHelper(); public void init(Context context){ this.context = context; } public static DBHelper getInstance(){ return dbHelper; } //创建数据库 public void createDb() { File dbPath = new File(DB_PATH); File dbFile = new File(DB_PATH + SXTC_CITIES_DB); if (!dbPath.exists()) { dbPath.mkdir(); } if (!dbFile.exists()) { dbFile.mkdir(); } if (dbPath.exists() && dbFile.exists()) { db = SQLiteDatabase.openOrCreateDatabase(DB_PATH + SXTC_CITIES_DB, null); } else { Log.e("DBHelper","数据库目录创建失败"); } } public void executeSql(String sql) { if (TextUtils.isEmpty(sql)) return; db.execSQL(sql); } public List<String> readSql() { List<String> sqlList = new ArrayList<String>(); InputStream inputStream = null; try { inputStream = context.getAssets().open("sqt_area.sql"); } catch (IOException e) { e.printStackTrace(); } if (null == inputStream) return null; byte[] buff = new byte[1024]; String[] sqlArr; StringBuffer str = new StringBuffer(); int count; try { while ((count = inputStream.read(buff)) != -1) { str.append(new String(buff, 0, count)); } sqlArr = str.toString().split("(;//s*//n//r)|(;//s*//n)"); for (int i = 0; i < sqlArr.length; i++) { String sql = sqlArr[i].replaceAll("--.*", "").trim(); if (!sql.equals("")) { sqlList.add(sql); Log.e("DBHelper","执行SQL:" + sql); } } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return sqlList; } /** * 加载解析sql文件 * @return * @throws Exception */ public List<String> loadSql() { List<String> sqlList = new ArrayList<String>(); try { InputStream sqlFileIn = context.getAssets().open("sqt_area.sql"); StringBuffer sqlSb = new StringBuffer(); byte[] buff = new byte[1024]; int byteRead = 0; while ((byteRead = sqlFileIn.read(buff)) != -1) { sqlSb.append(new String(buff, 0, byteRead,"utf-8")); } sqlFileIn.close(); // Windows 下换行是 \r\n, Linux 下是 \n String[] sqlArr = sqlSb.toString().split("(;\\s*\\r\\n)|(;\\s*\\n)"); for (int i = 0; i < sqlArr.length; i++) { String sql = sqlArr[i].replaceAll("--.*", "").trim(); if (!sql.equals("")) { sqlList.add(sql); } } return sqlList; } catch (Exception ex) { ex.printStackTrace(); } return null; } }
Jamaika1/grok
src/lib/jp2/codestream/markers/PPMMarker.h
/* * Copyright (C) 2016-2021 Grok Image Compression Inc. * * This source code is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This source code is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * * This source code incorporates work covered by the BSD 2-clause license. * Please see the LICENSE file in the root directory for details. * */ #pragma once #include <vector> namespace grk { struct grk_ppx { uint8_t *m_data; /* m_data == nullptr => Zppx not read yet */ uint32_t m_data_size; }; class PPMMarker { public: PPMMarker(); ~PPMMarker(); /** * Read a PPM marker (Packed headers, main header) * * @param p_header_data the data contained in the POC box. * @param header_size the size of the data contained in the POC marker. */ bool read(uint8_t *p_header_data, uint16_t header_size); /** * Merges all PPM markers read (Packed headers, main header) * */ bool merge(void); std::vector<grkBufferU8> m_tile_packet_headers; private: /** number of ppm markers (reserved size) */ uint32_t markers_count; /** ppm markers data (table indexed by Zppm) */ grk_ppx *markers; /** packet header storage original buffer */ uint8_t *buffer; }; } /* namespace grk */
sorskoot/RosieBot
vue.config.js
var FriendlyErrorsWebpackPlugin = require('friendly-errors-webpack-plugin'); module.exports = { pluginOptions: { electronBuilder: { removeElectronJunk: false // True by default } }, configureWebpack: { plugins: [ new FriendlyErrorsWebpackPlugin(), ], }, lintOnSave: false, devServer: { disableHostCheck: true } };
LevelFourAB/silo
silo-search-api/src/main/java/se/l4/silo/index/search/query/UserQuery.java
<reponame>LevelFourAB/silo package se.l4.silo.index.search.query; import org.eclipse.collections.api.list.ImmutableList; import org.eclipse.collections.api.list.primitive.ImmutableFloatList; import se.l4.silo.index.search.QueryClause; import se.l4.silo.index.search.internal.UserQueryImpl; /** * Query from a user that applies to one or more fields. */ public interface UserQuery extends QueryClause { /** * Context to apply. */ enum Context { /** * Standard context, perform a full-text search. */ STANDARD, /** * Type ahead context. */ TYPE_AHEAD } /** * Get the fields to search in. * * @return */ ImmutableList<String> getFields(); /** * Get boosts for fields given by {@link #getFields()}. * * @return */ ImmutableFloatList getBoosts(); /** * Get the context of this query. * * @return */ Context getContext(); /** * Get the query to match against. * * @return */ String getQuery(); static Builder create() { return UserQueryImpl.create(); } static Matcher matcher(String query) { return matcher(query, Context.STANDARD); } static Matcher matcher(String query, Context context) { return UserQueryImpl.matcher(query, context); } /** * Matcher that can be used to easily perform a {@link UserQuery} on a * single field. */ interface Matcher extends se.l4.silo.index.Matcher<String> { /** * Get the context of this query. * * @return */ Context getContext(); /** * The query submitted. * * @return */ String getQuery(); } public interface Builder { /** * Add a field to query. * * @param field * @return */ Builder addField(String field); /** * Add a field to query. * * @param field * @param boost * @return */ Builder addField(String field, float boost); /** * Set the query. * * @param query * @return */ Builder withQuery(String query); /** * Set the context of the query. * */ Builder withContext(Context context); /** * Build the instance. * * @return */ UserQuery build(); } }
nathants/c-utils
util/read.h
#pragma once #include "util.h" #include "lz4.h" typedef struct readbuf_s { // public u8 *buffer; i32 bytes; // private FILE **files; u8 **buffers; i32 bytes_left; i32 bytes_read; i32 *offset; i32 *chunk_size; bool lz4; u8 *lz4_buf; i32 lz4_size; } readbuf_t; readbuf_t rbuf_init(FILE **files, i32 num_files, bool lz4) { readbuf_t *buf; MALLOC(buf, sizeof(readbuf_t)); buf->files = files; MALLOC(buf->buffers, sizeof(u8*) * num_files); MALLOC(buf->offset, sizeof(i32) * num_files); MALLOC(buf->chunk_size, sizeof(i32) * num_files); for (i32 i = 0; i < num_files; i++) { buf->chunk_size[i] = BUFFER_SIZE; buf->offset[i] = BUFFER_SIZE; MALLOC(buf->buffers[i], BUFFER_SIZE); } buf->lz4 = lz4; if (lz4) MALLOC(buf->lz4_buf, BUFFER_SIZE_LZ4); return *buf; } #define DECOMPRESS(buf) \ do { \ i32 decompressed_size = LZ4_decompress_safe(buf->lz4_buf, buf->buffers[file], buf->lz4_size, BUFFER_SIZE); \ ASSERT(buf->chunk_size[file] == decompressed_size, "fatal: decompress size mismatch\n"); \ } while(0) inlined void read_bytes(readbuf_t *buf, i32 size, i32 file) { buf->bytes_left = buf->chunk_size[file] - buf->offset[file]; // ------------------------------------ bytes left in the current chunk buf->bytes = size; ASSERT(buf->bytes_left >= 0, "fatal: negative bytes_left: %d\n", buf->bytes_left); if (buf->bytes_left == 0) { // --------------------------------------------------------------------- time to read the next chunk buf->bytes_read = fread_unlocked(&buf->chunk_size[file], 1, sizeof(i32), buf->files[file]); // - try read chunk size switch (buf->bytes_read) { case sizeof(i32): // ----------------------------------------------------------------------- read chunk size succeeded ASSERT(buf->chunk_size[file] <= BUFFER_SIZE, "fatal: bad chunk size: %d\n", buf->chunk_size[file]); #ifdef READ_GROWING // when defined hold all data in ram for sorting MALLOC(buf->buffers[file], buf->chunk_size[file]); #endif if (buf->lz4) { FREAD(&buf->lz4_size, sizeof(i32), buf->files[file]); // --------------------------- read compressed size FREAD(buf->lz4_buf, buf->lz4_size, buf->files[file]); // --------------------------- read compressed chunk DECOMPRESS(buf); } else FREAD(buf->buffers[file], buf->chunk_size[file], buf->files[file]); // ------------- read the chunk body buf->offset[file] = 0; // -------------------------------------------------------------- start at the beggining of the new chunk buf->bytes_left = buf->chunk_size[file]; // -------------------------------------------- bytes_left is the new chunk size ASSERT(size <= buf->bytes_left, "fatal: diskread, not possible, chunk sizes are known\n"); break; case 0: // --------------------------------------------------------------------------------- read chunk size failed ASSERT(!ferror_unlocked(buf->files[file]), "fatal: read error\n"); buf->chunk_size[file] = 0; buf->offset[file] = 0; buf->bytes = 0; break; default: ASSERT(0, "fatal: impossible\n"); } } else ASSERT(size <= buf->bytes_left, "fatal: ramread, not possible, chunk sizes are known\n"); buf->buffer = buf->buffers[file] + buf->offset[file]; // ------------------------------------------- update the buffer position for the current read buf->offset[file] += buf->bytes; // ---------------------------------------------------------------- update the buffer offset } inlined void read_bytes_assert(readbuf_t *buf, i32 size, i32 file) { read_bytes(buf, size, file); ASSERT(buf->bytes == size, "didnt read enough, only got: %d, expected: %d\n", (buf)->bytes, size); }
tristan-chauveau/otoroshi
manual/src/main/paradox/snippets/tcp-proxy.js
const proxy = require("node-tcp-proxy"); const hosts = ["192.168.1.40", "192.168.1.41", "192.168.1.42"]; const portsHttp = [8080, 8080, 8080]; const portsHttps = [8443, 8443, 8443]; const proxyHttp = proxy.createProxy(80, hosts, portsHttp, { tls: false }); const proxyHttps = proxy.createProxy(443, hosts, portsHttps, { tls: false });
blit/stupidedi
lib/stupidedi/guides/004010/segment_reqs.rb
# frozen_string_literal: true module Stupidedi using Refinements module Guides module FortyTen # # @see X222.pdf 2.2 Implementation Usage # module SegmentReqs # Declares that this segment must always be sent. Required segments # within Situational loops only occur when the loop occurs. Required = Schema::SegmentReq.new(true, false, "R") # Use of this segment varies, depending on data content and business # context as described in the defining rule. The defining rule is # documented in a Situational Rule attached to the segment. # # There are two forms of Situational Rules. The first is "Required when # <condition>. If not required by <condition>, the segment may be # provided at the discretion of the sender, but it cannot be required or # requested by the receiver.' # # The other form is "Required when <condition>. If not required by # <condition>, do not send". The segment described by the rule cannot # occur except when the condition holds. Situational = Schema::SegmentReq.new(false, false, "S") end end end end
OSADP/Pikalert-Vehicle-Data-Translator-
libs/vdt4/vdt_point.cc
#include <math.h> #include "vdt_point.hh" using namespace std; vdt_point::vdt_point() { }; const double vdt_point::operator[](const int n) { if (n == 0) { return get_latitude(); } return get_longitude(); } bool vdt_point::operator==(const vdt_point& o) { return (o.get_latitude() == get_latitude() && o.get_longitude() == get_longitude()); } bool vdt_point::operator!=(const vdt_point& o) { return (o.get_latitude() != get_latitude() || o.get_longitude() != get_longitude()); } ostream& operator<<(ostream &ostr, const vdt_point &point) { ostr << "latitude: " << point.get_latitude() << " longitude: " << point.get_longitude() << " elevation: " << point.get_elevation() << endl; return ostr; } const double vdt_point::GRC_EARTH_RADIUS = 6371.229; double vdt_point::radians(double degrees) { return (M_PI / 180.0) * degrees; } double vdt_point::great_circle_distance(double lat1, double lon1, double lat2, double lon2) { // Return the great circle distance between two lat, lons double rlat1 = radians(lat1); double rlat2 = radians(lat2); double rlon1 = radians(lon1); double rlon2 = radians(lon2); double delta_lat = rlat2 - rlat1; double delta_lon = rlon2 - rlon1; double temp = pow(sin(delta_lat / 2.0), 2) + cos(rlat1) * cos(rlat2) * pow(sin(delta_lon / 2.0), 2); double distance = GRC_EARTH_RADIUS * 2 * atan2(sqrt(temp), sqrt(1 - temp)); return distance; }
twigkit/aws-sdk-java
aws-java-sdk-swf-libraries/src/main/java/com/amazonaws/services/simpleworkflow/flow/LambdaFunctionFailedException.java
<filename>aws-java-sdk-swf-libraries/src/main/java/com/amazonaws/services/simpleworkflow/flow/LambdaFunctionFailedException.java /* * Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not * use this file except in compliance with the License. A copy of the License is * located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.simpleworkflow.flow; @SuppressWarnings("serial") public class LambdaFunctionFailedException extends LambdaFunctionException { private String detail; public LambdaFunctionFailedException(String message, Throwable cause) { super(message, cause); } public LambdaFunctionFailedException(String message) { super(message); } public LambdaFunctionFailedException(long eventId, String name, String id, String detail) { super("failed", eventId, name, id); this.detail = detail; } public String getDetail() { return detail; } public void setDetail(String detail) { this.detail = detail; } }
cszczepaniak/fivethirtyeight-riddler
SpellingBee/go/1.12/utils/utils_test.go
package utils import ( "testing" "github.com/cszczepaniak/fivethirtyeight-riddler/SpellingBee/board" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestBoardSubsets(t *testing.T) { type boardConfig struct { middle rune others []rune } tests := []struct { b boardConfig nExp int }{{ b: boardConfig{ middle: 'a', others: []rune{'b', 'c', 'd', 'e', 'f', 'g'}, }, nExp: 64, }} for _, tc := range tests { b, err := board.New(tc.b.middle, tc.b.others) require.NoError(t, err) combs := BoardSubsets(b) assert.Equal(t, tc.nExp, len(combs)) } }
TomatoYoung/beegfs
client_module/source/common/net/message/storage/listing/ListDirFromOffsetMsg.h
<filename>client_module/source/common/net/message/storage/listing/ListDirFromOffsetMsg.h #ifndef LISTDIRFROMOFFSETMSG_H_ #define LISTDIRFROMOFFSETMSG_H_ #include <common/net/message/NetMessage.h> #include <common/storage/Path.h> #include <common/storage/EntryInfo.h> /** * This message supports only serialization. (deserialization not implemented) */ struct ListDirFromOffsetMsg; typedef struct ListDirFromOffsetMsg ListDirFromOffsetMsg; static inline void ListDirFromOffsetMsg_init(ListDirFromOffsetMsg* this); static inline void ListDirFromOffsetMsg_initFromEntryInfo(ListDirFromOffsetMsg* this, const EntryInfo* entryInfo, int64_t serverOffset, unsigned maxOutNames, bool filterDots); // virtual functions extern void ListDirFromOffsetMsg_serializePayload(NetMessage* this, SerializeCtx* ctx); struct ListDirFromOffsetMsg { NetMessage netMessage; int64_t serverOffset; unsigned maxOutNames; bool filterDots; // for serialization const EntryInfo* entryInfoPtr; // not owned by this object! // for deserialization EntryInfo entryInfo; }; extern const struct NetMessageOps ListDirFromOffsetMsg_Ops; void ListDirFromOffsetMsg_init(ListDirFromOffsetMsg* this) { NetMessage_init(&this->netMessage, NETMSGTYPE_ListDirFromOffset, &ListDirFromOffsetMsg_Ops); } /** * @param entryInfo just a reference, so do not free it as long as you use this object! * @param filterDots true if you don't want "." and ".." in the result list. */ void ListDirFromOffsetMsg_initFromEntryInfo(ListDirFromOffsetMsg* this, const EntryInfo* entryInfo, int64_t serverOffset, unsigned maxOutNames, bool filterDots) { ListDirFromOffsetMsg_init(this); this->entryInfoPtr = entryInfo; this->serverOffset = serverOffset; this->maxOutNames = maxOutNames; this->filterDots = filterDots; } #endif /*LISTDIRFROMOFFSETMSG_H_*/
tgsoverly/pybee-open-close
pxbee/drivers/timer/timer.c
<reponame>tgsoverly/pybee-open-close<filename>pxbee/drivers/timer/timer.c /**************************************************************************** * Copyright (C) 2012 - 2015 Digi International Inc, All Rights Reserved. * * This software is provided as instructional material without charge * by Digi International for use by its employees and customers * subject to the following terms. * * PERMISSION * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software, to deal with it without restriction, * including without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of it, and to permit persons to * whom it is furnished to do so, provided the above copyright notice * and this permission notice are included in all derived works * and the use of this software is restricted to Digi products. * * WARRANTY * THIS SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE, OR NONINFRINGEMENT. * * LIABILITY * IN NO EVENT SHALL DIGI INTERNATIONAL BE LIABLE FOR ANY CLAIM, DAMAGES, * OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT, OR OTHERWISE, * ARISING FROM, OUT OF, OR IN CONNECTION WITH THE SOFTWARE, OR THE USE * OR OTHER DEALINGS WITH THE SOFTWARE. * ****************************************************************************/ /** * @ingroup api_timer * * @defgroup api_timer * * @section api_timer_overview Timer API Overview * * This API contains the functions to configure and control the High * Resolution Timer component on programmable XBee devices. * * - Timer interrupt request functions: * - timer_config() * - timer_set_timeout() * - timer_get_timeout() * - timer_set_mode() * - timer_get_mode() * - timer_irq_enable() * - timer_enable() */ #include <timer.h> #ifdef ENABLE_TIMER #define TIMER1_MODE_MASK (1 << 0) #define TIMER2_MODE_MASK (1 << 1) #define TIMER3_MODE_MASK (1 << 2) uint8_t timers_mode = 0; /** * @ingroup api_timer * @brief Function that configures the settings of the timer component. * @param[in] timer timer to configure (use the name of the component * defined by the config.xml) * @param[in] enable TRUE for enabling, FALSE for disabling the timer. * @param[in] mode desired mode for the timer (use defined macros). * @param[in] timeout desired time for the timer to timeout * @retval 0 On success. * @retval -EINVAL If \b timer is not a valid timer. * @retval -EOVERFLOW If \b u_secs is not in supported ranges. */ int timer_config(timer_t timer, bool_t enable, timermode_t mode, uint32_t timeout) { int ret; if ( (timer < TPM1) || (timer > TPM3) ) return -EINVAL; timer_set_mode(timer, mode); timer_enable(timer, enable); ret = timer_set_timeout(timer, timeout); if (ret < 0) return ret; return 0; } /** * @ingroup api_timer * @brief Function that sets the timeout of the timer interrupt if it is * configured as One-Shot timer or the period if it is configured * as a periodic timer. * @param[in] timer timer to set timeout (use the name of the component * defined by config.xml). * @param[in] u_secs Timeout of interrupt in microseconds. * @retval 0 On success. * @retval -EINVAL If \b timer is not a valid timer. * @retval -EOVERFLOW If \b u_secs is not in supported ranges. */ int timer_set_timeout(timer_t timer, uint32_t timeout) { int prescaler; uint8_t tpmsc, clk_source; if ( (timer < TPM1) || (timer > TPM3) ) return -EINVAL; prescaler = tpm_set_prescaler(timer, timeout); if (prescaler < 0) return -EOVERFLOW; tpmsc = REG_TPMSC(timer); clk_source = tpmsc & (TPM_CLKSRC_BUS| TPM_CLKSRC_FIX); REG_TPMSC(timer) &= ~(uint8_t)(TPM_CLKSRC_BUS| TPM_CLKSRC_FIX); REG_TPMMOD(timer) = tpm_compute_modulo(tpmsc, timeout); REG_TPMSC(timer) |= clk_source; return 0; } /** * @ingroup api_timer * @brief Function that returns the timeout of the timer interrupt if it * is configured as One-Shot timer or the period if it is * configured as a periodic timer * @param[in] timer timer to get timeout (use the name of the component * defined by config.xml). * @retval uint32_t configured timeout (in microseconds). */ uint32_t timer_get_timeout(timer_t timer) { return tpm_modulo_to_usec(REG_TPMSC(timer), REG_TPMMOD(timer)); } /** * @ingroup api_timer * @brief Function that enables timer interrupt. * @param[in] timer timer to enable IRQ (use the name of the component * defined by config.xml). * @param[in] enable TRUE to enable, FALSE to disable IRQ. * @retval 0 on success. * @retval -EINVAL If \b timer is not a valid timer. */ int timer_irq_enable(timer_t timer, bool_t enable) { if ( (timer < TPM1) || (timer > TPM3) ) return -EINVAL; if (enable) REG_TPMSC(timer) |= TPM1SC_TOIE_MASK; else REG_TPMSC(timer) &= ~(uint8_t)TPM1SC_TOIE_MASK; return 0; } /** * @ingroup api_timer * @brief Function that sets timer's mode. * @param[in] timer timer to set mode (use the name of the component defined * by the config.xml). * @param[in] mode Timer mode: ONE_SHOT or PERIODIC. This determines how * many times the interrupt will be entered. * @retval -EINVAL If \b timer is not a valid timer. * @retval -ERANGE out of range. */ int timer_set_mode(timer_t timer, timermode_t mode) { if ( (timer < TPM1) || (timer > TPM3) ) return -EINVAL; if (mode == ONE_SHOT) { if (timer == TPM1) timers_mode |= TIMER1_MODE_MASK; else if (timer == TPM2) timers_mode |= TIMER2_MODE_MASK; else timers_mode |= TIMER3_MODE_MASK; } else if (mode == PERIODIC) { if (timer == TPM1) timers_mode &= ~TIMER1_MODE_MASK; else if (timer == TPM2) timers_mode &= ~TIMER2_MODE_MASK; else timers_mode &= ~TIMER3_MODE_MASK; } else { return -ERANGE; } return 0; } /** * @ingroup api_timer * @brief Function that returns timer's mode configuration. * @param[in] timer timer to get mode (use the name of the component * defined by config.xml). * @retval timermode_t Timer mode: ONE_SHOT or PERIODIC. This * determines how many times the interrupt will be * entered. * NO_VALID if bad timer is passed as argument. */ timermode_t timer_get_mode(timer_t timer) { switch(timer) { case TPM1: return (timers_mode & TIMER1_MODE_MASK ? ONE_SHOT : PERIODIC); case TPM2: return (timers_mode & TIMER2_MODE_MASK ? ONE_SHOT : PERIODIC); case TPM3: return (timers_mode & TIMER3_MODE_MASK ? ONE_SHOT : PERIODIC); } return NO_VALID; } #pragma INLINE static void timer_clear_tof(timer_t timer) { REG_TPMSC(timer) &= ~(uint8_t)TPM1SC_TOF_MASK; } /** * @ingroup api_timer * @brief Function that enables/disables the timer module. * @param[in] timer timer to enable IRQ (use the name of the component * defined by config.xml). * @param[in] enable TRUE for enabling, FALSE for disabling module. * @retval nothing */ void timer_enable(timer_t timer, uint8_t enable) { tpm_clock_gating(timer, enable); if (enable) { REG_TPMCNT(timer) = 0x00; timer_clear_tof(timer); timer_irq_enable(timer, TRUE); } } #ifdef TPM1_OVERFLOW_INTERRUPT #pragma TRAP_PROC void tpm1ov_isr(void) { if (timers_mode & TIMER1_MODE_MASK) { /* One-Shot */ TPM1SC_TOIE = 0; timer_enable(TPM1, FALSE); } timer_clear_tof(TPM1); tpm1ov_irq(); } #endif /* ENABLE_TPM1_OVERFLOW_INTERRUPT */ #ifdef TPM2_OVERFLOW_INTERRUPT #pragma TRAP_PROC void tpm2ov_isr(void) { if (timers_mode & TIMER2_MODE_MASK) { /* One-Shot */ TPM2SC_TOIE = 0; timer_enable(TPM2, FALSE); } timer_clear_tof(TPM2); tpm2ov_irq(); } #endif /* ENABLE_TPM2_OVERFLOW_INTERRUPT */ #ifdef TPM3_OVERFLOW_INTERRUPT #pragma TRAP_PROC void tpm3ov_isr(void) { if (timers_mode & TIMER3_MODE_MASK) { /* One-Shot */ TPM3SC_TOIE = 0; timer_enable(TPM3, FALSE); } timer_clear_tof(TPM3); tpm3ov_irq(); } #endif /* ENABLE_TPM3_OVERFLOW_INTERRUPT */ #endif /* ENABLE_TIMER */
wkboys/CustomView
app/src/main/java/com/template/customview/RectRefreshTextView.java
<reponame>wkboys/CustomView package com.template.customview; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Rect; import android.util.AttributeSet; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView; import java.util.ArrayList; import java.util.List; public class RectRefreshTextView extends SurfaceView { private Paint mPaint; public RectRefreshTextView(Context context) { super(context); init(); } public RectRefreshTextView(Context context, AttributeSet attrs) { super(context, attrs); init(); } public RectRefreshTextView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(); } private void init() { mPaint = new Paint(); // mPaint.setColor(Color.argb(0x1F, 0xFF, 0xFF, 0xFF)); mPaint.setColor(Color.RED); mPaint.setTextSize(30); getHolder().addCallback(new SurfaceHolder.Callback() { @Override public void surfaceCreated(SurfaceHolder holder) { drawText(holder); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceDestroyed(SurfaceHolder holder) { } }); } private List<Integer> mInts=new ArrayList<>(); private void drawText(final SurfaceHolder holder) { new Thread(new Runnable() { @Override public void run() { // for (int i = 0; i < 10; i++) { // Canvas canvas = holder.lockCanvas(); // mInts.add(i); // if (canvas != null) { // for (int num:mInts){ // canvas.drawText(num + "", num * 30, 50, mPaint); // } // } // try { // Thread.sleep(800); // } catch (InterruptedException e) { // e.printStackTrace(); // } // holder.unlockCanvasAndPost(canvas); // } //先进行清屏操作 while (true) { Rect dirtyRect = new Rect(0, 0, 1, 1); Canvas canvas = holder.lockCanvas(dirtyRect); Rect canvasRect = canvas.getClipBounds(); if (getWidth() == canvasRect.width() && getHeight() == canvasRect.height()) { canvas.drawColor(Color.BLACK); holder.unlockCanvasAndPost(canvas); Log.d("qijian","clear canvas"); } else { holder.unlockCanvasAndPost(canvas); break; } } //画图 for (int i = 0; i < 10; i++) { int itemWidth = 50; int itemHeight = 50; Rect rect = new Rect(i*itemWidth,0,(i+1)*itemWidth-10,itemHeight); Canvas canvas = holder.lockCanvas(rect); if (canvas != null) { canvas.drawColor(Color.GREEN); canvas.drawText(i + "", i*itemWidth+10, itemHeight/2, mPaint); } try { holder.unlockCanvasAndPost(canvas); Thread.sleep(800); } catch (Exception e) { e.printStackTrace(); } } // //先进行清屏操作 // while (true) { // Rect dirtyRect = new Rect(0, 0, 1, 1); // Canvas canvas = holder.lockCanvas(dirtyRect); // Rect canvasRect = canvas.getClipBounds(); // if (getWidth() == canvasRect.width() && getHeight() == canvasRect.height()) { // canvas.drawColor(Color.BLACK); // holder.unlockCanvasAndPost(canvas); // } else { // holder.unlockCanvasAndPost(canvas); // break; // } // } // // //画图 // for (int i = 0; i < 10; i++) { // //画大方 // if (i == 0) { // Canvas canvas = holder.lockCanvas(new Rect(10, 10, 600, 600)); // dumpCanvasRect(canvas); // canvas.drawColor(Color.RED); // holder.unlockCanvasAndPost(canvas); // } // // //画中方 // if (i == 1) { // Canvas canvas = holder.lockCanvas(new Rect(30, 30, 570, 570)); // dumpCanvasRect(canvas); // canvas.drawColor(Color.GREEN); // holder.unlockCanvasAndPost(canvas); // } // // //画小方 // if (i == 2) { // Canvas canvas = holder.lockCanvas(new Rect(60, 60, 540, 540)); // dumpCanvasRect(canvas); // canvas.drawColor(Color.BLUE); // holder.unlockCanvasAndPost(canvas); // } // // //画圆形 // if (i == 3) { // Canvas canvas = holder.lockCanvas(new Rect(200, 200, 400, 400)); // dumpCanvasRect(canvas); // mPaint.setColor(Color.argb(0x3F, 0xFF, 0xFF, 0xFF)); // canvas.drawCircle(300, 300, 100, mPaint); // holder.unlockCanvasAndPost(canvas); // } // //写字 // if (i == 4) { // Canvas canvas = holder.lockCanvas(new Rect(250, 250, 350, 350)); // dumpCanvasRect(canvas); // mPaint.setColor(Color.RED); // canvas.drawText(i + "", 300, 300, mPaint); // holder.unlockCanvasAndPost(canvas); // } // // // try { // Thread.sleep(800); // } catch (Exception e) { // e.printStackTrace(); // } // } } }).start(); } private void dumpCanvasRect(Canvas canvas) { if (canvas != null) { Rect rect = canvas.getClipBounds(); Log.d("qijian", "left:" + rect.left + " top:" + rect.top + " right:" + rect.right + " bottom:" + rect.bottom); } } }
maciejg-git/vue-bootstrap-icons
dist-mdi/mdi/chevron-triple-right.js
import { h } from 'vue' export default { name: "ChevronTripleRight", vendor: "Mdi", type: "", tags: ["chevron","triple","right"], render() { return h( "svg", {"xmlns":"http://www.w3.org/2000/svg","width":"24","height":"24","viewBox":"0 0 24 24","class":"v-icon","fill":"currentColor","data-name":"mdi-chevron-triple-right","innerHTML":"<path d='M14.58,16.59L19.17,12L14.58,7.41L16,6L22,12L16,18L14.58,16.59M8.58,16.59L13.17,12L8.58,7.41L10,6L16,12L10,18L8.58,16.59M2.58,16.59L7.17,12L2.58,7.41L4,6L10,12L4,18L2.58,16.59Z' />"}, ) } }
ninalofrese/androidcore
ComponentsActivities/revisaorecycler/src/main/java/com/example/revisaorecycler/adapters/AnimalAdapter.java
<filename>ComponentsActivities/revisaorecycler/src/main/java/com/example/revisaorecycler/adapters/AnimalAdapter.java package com.example.revisaorecycler.adapters; import android.graphics.drawable.Drawable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import androidx.annotation.NonNull; import androidx.recyclerview.widget.RecyclerView; import com.example.revisaorecycler.R; import com.example.revisaorecycler.interfaces.RecyclerViewOnClick; import com.example.revisaorecycler.models.Animal; import java.util.List; public class AnimalAdapter extends RecyclerView.Adapter<AnimalAdapter.ViewHolder> { private List<Animal> listaAnimais; private RecyclerViewOnClick listener; public AnimalAdapter(List<Animal> listaAnimais, RecyclerViewOnClick listener) { this.listaAnimais = listaAnimais; this.listener = listener; } @NonNull @Override public AnimalAdapter.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_animal, parent, false); return new ViewHolder(view); } @Override public void onBindViewHolder(@NonNull AnimalAdapter.ViewHolder holder, int position) { final Animal animal = listaAnimais.get(position); holder.onBind(animal); holder.itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { listener.onClick(animal); } }); } @Override public int getItemCount() { return listaAnimais.size(); } public class ViewHolder extends RecyclerView.ViewHolder { private ImageView image; private TextView especie; public ViewHolder(@NonNull View itemView) { super(itemView); image = itemView.findViewById(R.id.imageView); especie = itemView.findViewById(R.id.textViewNome); } public void onBind(Animal animal) { Drawable drawable = itemView.getResources().getDrawable(animal.getImagem()); image.setImageDrawable(drawable); especie.setText(animal.getRaca()); } } }
FlorianLance/exvr
cpp-projects/exvr-designer/experiment/experiment.hpp
/*********************************************************************************** ** exvr-designer ** ** MIT License ** ** Copyright (c) [2018] [<NAME>][EPFL-LNCO] ** ** Permission is hereby granted, free of charge, to any person obtaining a copy ** ** of this software and associated documentation files (the "Software"), to deal ** ** in the Software without restriction, including without limitation the rights ** ** to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ** ** copies of the Software, and to permit persons to whom the Software is ** ** furnished to do so, subject to the following conditions: ** ** ** ** The above copyright notice and this permission notice shall be included in all ** ** copies or substantial portions of the Software. ** ** ** ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ** ** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ** ** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ** ** AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ** ** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ** ** OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ** ** SOFTWARE. ** ************************************************************************************/ #pragma once // std #include <optional> // local // # data #include "data/routine.hpp" #include "data/isi.hpp" #include "data/loop.hpp" #include "data/settings.hpp" #include "data/gui.hpp" #include "data/interval.hpp" #include "data/argument.hpp" #include "data/components_manager.hpp" #include "data/states.hpp" // # resources #include "resources/resources_manager.hpp" // # experiment #include "randomizer.hpp" namespace tool::ex{ // ############################################################################### CONSTANTS constexpr static int UpdateFlow = 0b1; constexpr static int UpdateSelection = 0b10; constexpr static int UpdateComponents = 0b100; constexpr static int UpdateRoutines = 0b1000; constexpr static int UpdateUI = 0b10000; constexpr static int UpdateResources = 0b100000; constexpr static int UpdateSettings = 0b1000000; constexpr static int ResetUI = 0b10000000; constexpr static int UpdateAll = UpdateComponents | UpdateFlow | UpdateSelection | UpdateUI | UpdateRoutines | UpdateResources | UpdateSettings; //enum class Update{ // ELEMENT, select_element,unselect_elements, add_element, remove_element, move_element, modify_element_name, // ROUTINE, select_condition, move_condition, // CONDITION, modify_condition, // ACTION, fill_action, remove_actions, //}; //struct Up{ // Up(bool all) : m_all(all){ // } // Up(std_v1<Update> updates){ // for(auto &update : updates){ // m_updates.emplace(update); // } // } // bool check(Update u) const{ // return m_all ? true : (m_updates.count(u) != 0); // } //private: // bool m_all = false; // std::unordered_set<Update> m_updates; //}; class Experiment; using ExperimentUP = std::unique_ptr<Experiment>; class Experiment : public QObject{ Q_OBJECT public: Experiment(QString nVersion); // getters // # element std_v1<Element*> get_elements() const; std_v1<Element*> get_elements_from_type(Element::Type type) const; Element *get_element(ElementKey elementKey) const; // ## routine Routine *get_routine(ElementKey routineKey) const; // ### condition Condition *get_condition(ConditionKey conditionKey) const; Condition *get_condition(ElementKey routineKey, ConditionKey conditionKey) const; // #### action Action *get_action(ElementKey routineKey, ConditionKey conditionKey, ActionKey actionKey) const; // ## isi Isi *get_isi(ElementKey isiKey) const; // ## loop Loop *get_loop(ElementKey loopKey) const; std_v1<Loop*> get_loops() const; // # components Component *get_component(ComponentKey componentKey) const; // ## config Config *get_config(ComponentKey componentKey, ConfigKey configKey) const; inline Settings *settings() noexcept{return &m_settings;} inline GUI *gui() noexcept{return &m_gui;} // check void check_elements(); void check_legacy_conditions(); void check_integrity(); // update flags inline void set_update_all_flag() noexcept{updateFlag = UpdateAll;} inline void add_to_update_flag(int flag) noexcept{updateFlag |= flag;} inline void reset_update_flag() noexcept{updateFlag = 0;} inline int update_flag() const noexcept {return updateFlag;} template<class T> std_v1<T*> get_elements_from_type() const{ std_v1<T*> children; for(const auto &elem : elements){ if(auto child = dynamic_cast<T*>(elem.get()); child != nullptr){ children.emplace_back(child); } } return children; } template<class T> T* get_element_from_type_and_id(ElementKey key) const{ for(const auto &elem : elements){ if(T* child = dynamic_cast<T*>(elem.get()); child != nullptr){ if(elem->key() == key.v){ return child; } } } return nullptr; } public slots: // update void compute_loops_levels(); void update_conditions(); // # randomization void update_randomization_seed(unsigned int seed); // files void new_experiment(); void clean_experiment(); void set_instance_name(QString instanceName); // settings void reset_settings(); void update_settings(Settings settings); // resources void add_resources(Resource::Type type, QStringList filesPath); void update_resource_path(QString currentPath, QString newPath); void update_resource_alias(QString currentAlias, QString newAlias); void select_resource(Resource::Type type, size_t index); void remove_resource(Resource::Type type, size_t index); void clean_resources(Resource::Type type); void update_reload_resource_code(int reloadCode); // ui void toggle_actions_connections_separation(); void toggle_design_mode(); void toggle_follow_condition_mode(); // # infos void update_connector_dialog_with_info(ElementKey elementKey, ConditionKey conditionKey, ConnectorKey connectorKey, QStringView id, QStringView value); void update_component_dialog_with_info(ComponentKey componentKey, ConfigKey configKey, QStringView id, QStringView value); // # states void update_exp_launcher_state(ExpLauncherState state, QStringView infos); void update_exp_state(ExpState state, QStringView infos); // components void add_component(Component::Type type, RowId id); void duplicate_component(ComponentKey componentKey); void remove_component(ComponentKey componentKey); void update_component_position(ComponentKey componentKey, RowId id); void update_component_name(ComponentKey componentKey, QString name); void sort_components_by_category(); void sort_components_by_type(); void sort_components_by_name(); // # components configs void select_config_in_component(ComponentKey componentKey, RowId id); void insert_config_in_component(ComponentKey componentKey, RowId id, QString configName); void copy_config_from_component(ComponentKey componentKey, RowId id, QString configName); void remove_config_from_component(ComponentKey componentKey, RowId id); void move_config_in_component(ComponentKey componentKey, RowId from, RowId to); void rename_config_in_component(ComponentKey componentKey, RowId id, QString configName); // ## arguments void new_arg(ComponentKey componentKey, ConfigKey configKey, Arg arg, bool initConfig); void arg_updated(ComponentKey componentKey, ConfigKey configKey, Arg arg, bool initConfig); void arg_removed(ComponentKey componentKey, ConfigKey configKey, QStringView argName, bool initConfig); void swap_arg(ComponentKey componentKey, ConfigKey configKey, QStringView argName1, QStringView argName2, bool initConfig); // elements size_t get_element_position(Element *element) const; void unselect_all_elements(bool updateSignal = true) noexcept; void select_element(ElementKey elementKey, bool updateSignal = true); void add_element(Element::Type type, size_t index); void remove_element(Element *elemToDelete); void remove_element_of_key(ElementKey elementKey); void duplicate_element(ElementKey elementKey); void clean_current_routine_condition(ElementKey routineKey); void clean_all_routine_conditions(ElementKey routineKey); void set_duration_for_all_routine_conditions(ElementKey routineKey); void remove_selected_element(); void move_left(size_t id); void move_right(size_t id); void update_element_name(ElementKey elementKey, QString elemName); // # loop void select_loop_set(ElementKey loopKey, QString setName); void add_loop_sets(ElementKey loopKey, QString sets, RowId id); void modify_loop_set_name(ElementKey loopKey, QString setName, RowId id); void modify_loop_set_occurrencies_nb(ElementKey loopKey, int setOccuranciesNb, RowId id); void modify_loop_type(ElementKey loopKey, Loop::Mode type); void modify_loop_nb_reps(ElementKey loopKey, int nbReps); void modify_loop_N(ElementKey loopKey, int N); void remove_set(ElementKey loopKey, RowId id); void sort_loop_sets_lexico(ElementKey loopKey); void sort_loop_sets_num(ElementKey loopKey); void move_loop_set_up(ElementKey loopKey, RowId id); void move_loop_set_down(ElementKey loopKey, RowId id); void load_loop_sets_file(ElementKey loopKey, QString path); void reload_loop_sets_file(ElementKey loopKey); // # isi void add_isi_interval(ElementKey isiKey, qreal value, RowId id); void remove_isi_interval(ElementKey isiKey, RowId id); void set_isi_randomize(ElementKey isiKey, bool randomize); void modify_isi_interval(ElementKey isiKey, qreal value, RowId id); void move_isi_interval_up(ElementKey isiKey, RowId id); void move_isi_interval_down(ElementKey isiKey, RowId id); // # routine void select_routine_condition(ElementKey routineKey, RowId conditionTabId); void move_routine_condition_down(ElementKey routineKey, RowId id); void move_routine_condition_up(ElementKey routineKey, RowId id); void set_routine_as_randomizer(ElementKey routineKey, bool isARandomizer); // ## condition void update_condition_timeline(ElementKey routineKey, ConditionKey conditionKey, double duration, double scale, double uiFactorSize); void copy_to_conditions(ElementKey routineKey, ConditionKey conditionKey, std_v1<std::pair<ElementKey,ConditionKey>> conditionsToBeEcrased, bool copyActions, bool copyConnections); void delete_actions_from_condition(ElementKey routineKey, ConditionKey conditionKey); void fill_actions_from_condition(ElementKey routineKey, ConditionKey conditionKey); void clean_actions_from_condition(ElementKey routineKey, ConditionKey conditionKey); // ### action void add_action(ElementKey routineKey, ConditionKey conditionKey, ComponentKey componentKey, std::optional<ConfigKey> ConfigKey, bool fillUpdateTimeline, bool fillVisibilityTimeline); void add_action_to_all_conditions(ElementKey routineKey, ComponentKey componentKey, std::optional<ConfigKey> ConfigKey, bool fillUpdateTimeline, bool fillVisibilityTimeline); void add_action_to_all_routines_conditions(ComponentKey componentKey, std::optional<ConfigKey> ConfigKey, bool fillUpdateTimeline, bool fillVisibilityTimeline); void modify_action(ElementKey routineKey, ConditionKey conditionKey, ComponentKey componentKey, bool changeConfig, bool changeUpdateTimeline, bool changeVisibilityTimeline, ConfigKey ConfigKey, bool fillUpdateTimeline, bool fillVisibilityTimeline); void modify_action_to_all_conditions(ElementKey routineKey, ComponentKey componentKey, bool changeConfig, bool changeUpdateTimeline, bool changeVisibilityTimeline, ConfigKey ConfigKey, bool fillUpdateTimeline, bool fillVisibilityTimeline); void modify_action_to_all_routines_conditions(ComponentKey componentKey, bool changeConfig, bool changeUpdateTimeline, bool changeVisibilityTimeline, ConfigKey ConfigKey, bool fillUpdateTimeline, bool fillVisibilityTimeline); void remove_action_from_all_selected_routine_conditions(ElementKey routineKey, ComponentKey componentKey); void remove_action_from_all_routines_conditions(ComponentKey componentKey); void remove_action_from_condition(ElementKey routineKey, ConditionKey conditionKey, ActionKey actionKey, bool update = true); void fill_action(ElementKey routineKey, ConditionKey conditionKey, ActionKey actionKey, bool update, bool visibility); void clean_action(ElementKey routineKey, ConditionKey conditionKey, ActionKey actionKey, bool update, bool visibility); void move_action_up(ElementKey routineKey, ConditionKey conditionKey, ActionKey actionKey); void move_action_down(ElementKey routineKey, ConditionKey conditionKey, ActionKey actionKey); // #### timeline void add_timeline_interval(ElementKey routineKey,ConditionKey conditionKey, ActionKey actionKey, bool updateTimeline, TimelineKey timelineKey, Interval interval); void remove_timeline_interval(ElementKey routineKey,ConditionKey conditionKey, ActionKey actionKey, bool updateTimeline, TimelineKey timelineKey, Interval interval); // #### config void select_action_config(ElementKey routineKey, ConditionKey conditionKey, ActionKey actionKey, RowId configTabId); // ### connection void create_connection(ElementKey routineKey,ConditionKey conditionKey, Connection *connection); void delete_connections(ElementKey routineKey, ConditionKey conditionKey); // ### component node void create_component_node(ElementKey routineKey,ConditionKey conditionKey, ComponentKey componentKey, QPointF pos); void move_component_node(ElementKey routineKey,ConditionKey conditionKey, ComponentKey componentKey, QPointF pos); // ### connector node void create_connector_node(ElementKey routineKey,ConditionKey conditionKey, Connector *connector); void modify_connector_node(ElementKey routineKey,ConditionKey conditionKey, ConnectorKey connectorKey, QString name, Arg arg); void move_connector_node(ElementKey routineKey,ConditionKey conditionKey, ConnectorKey connectorKey, QPointF pos); void duplicate_connector_node(ElementKey routineKey,ConditionKey conditionKey, ConnectorKey connectorKey); void set_connector_input_connection_validity(ElementKey routineKey, ConditionKey conditionKey, ConnectorKey connectorKey, bool state); // ### connections / connectors / components void delete_selected_nodes(ElementKey routineKey, ConditionKey conditionKey); void unselect_nodes_and_connections(ElementKey routineKey, ConditionKey conditionKey, bool doUpdate); void delete_nodes_and_connections(ElementKey routineKey, ConditionKey conditionKey, std_v1<ConnectorKey> connectorsKey, std_v1<ComponentKey> componentsKey, std_v1<ConnectionKey> connectionsKey, bool doUpdate); void select_nodes_and_connections(ElementKey routineKey, ConditionKey conditionKey, std_v1<ConnectorKey> connectorsKey, std_v1<ComponentKey> componentsKey, std_v1<ConnectionKey> connectionsKey, bool doUpdate); private : // clean void remove_elements_not_in_flow(); public : Randomizer randomizer; // elements std_v1<ElementUP> elements; std_v1<LoopUP> loops; // not in the flow Element *selectedElement = nullptr; Routine *lastRoutineSelected = nullptr; Isi *lastIsiSelected = nullptr; // states States states; // infos using UiKey = QStringView; using UiValue = QStringView; template<class T1,class T2> using umap = std::unordered_map<T1,T2>; umap<int, umap<int, umap<int, umap<UiKey, UiValue>>>> connectorsInfo; umap<int, umap<int, umap<UiKey, UiValue>>> componentsInfo; private : // update int updateFlag = 0; GUI m_gui; Settings m_settings; // components ComponentsManager *m_compM = nullptr; // resources ResourcesManager *m_resM = nullptr; }; }
oddevan/cordcutdb
config/initializers/omniauth.rb
<reponame>oddevan/cordcutdb<filename>config/initializers/omniauth.rb Rails.application.config.middleware.use OmniAuth::Builder do provider :github, ENV['CCDB_GITHUB_ID'], ENV['CCDB_GITHUB_SECRET'], scope: "user:email" end
alexwawl/leetcode-solutions-javascript-python
solutions/0240_Searcha2DMatrixII.js
/** * @param {number[][]} matrix * @param {number} target * @return {boolean} */ var searchMatrix = function(matrix, target) { function search(x1, y1, x2, y2){ if(x2 < x1 || y2 < y1 || x1 >= matrix.length || y1 >= matrix[0].length || y2 < 0 || x2 < 0){ return false; } let middle_x = Math.floor((x2-x1)/2) + x1; let middle_y = Math.floor((y2-y1)/2) + y1; let current = matrix[middle_x][middle_y]; if(current == target){ return true; } else if(current < target){ //search in: whole down (+1 from middle WHY?) and right top (+1 from middle WHY?) return search(x1, middle_y+1, x2, y2) || // whole down search(middle_x+1, y1, x2, middle_y) // } else if (current > target){ //search in: whole up (+1 from middle WHY?) and left down (-1 from middle WHY?) return search(x1, y1, x2, middle_y-1) || // whole up search(x1, middle_y, middle_x-1,y2) } else { return false; } } if(matrix.length === 0 || matrix[0].length === 0){ return false } return search(0, 0 , matrix.length-1, matrix[0].length-1) };
mikenakis/Public
kit/kit/src/mikenakis/kit/ExceptionDiffersFromExpectedException.java
<filename>kit/kit/src/mikenakis/kit/ExceptionDiffersFromExpectedException.java package mikenakis.kit; import java.util.Optional; /** * "Exception Differs from Expected" {@link UncheckedException}. * * @author michael.gr */ public class ExceptionDiffersFromExpectedException extends UncheckedException { public final Class<? extends Throwable> expectedExceptionClass; public final Throwable actualException; public ExceptionDiffersFromExpectedException( Class<? extends Throwable> expectedExceptionClass, Throwable actualException ) { super( Optional.of( actualException ) ); this.expectedExceptionClass = expectedExceptionClass; this.actualException = actualException; } }
ut-osa/syncchar
linux-2.6.16-unmod/include/asm-ppc/mpc8260.h
<reponame>ut-osa/syncchar /* * Since there are many different boards and no standard configuration, * we have a unique include file for each. Rather than change every * file that has to include MPC8260 configuration, they all include * this one and the configuration switching is done here. */ #ifdef __KERNEL__ #ifndef __ASM_PPC_MPC8260_H__ #define __ASM_PPC_MPC8260_H__ #include <linux/config.h> #ifdef CONFIG_8260 #ifdef CONFIG_EST8260 #include <platforms/est8260.h> #endif #ifdef CONFIG_SBC82xx #include <platforms/sbc82xx.h> #endif #ifdef CONFIG_SBS8260 #include <platforms/sbs8260.h> #endif #ifdef CONFIG_RPX8260 #include <platforms/rpx8260.h> #endif #ifdef CONFIG_WILLOW #include <platforms/willow.h> #endif #ifdef CONFIG_TQM8260 #include <platforms/tqm8260.h> #endif #if defined(CONFIG_PQ2ADS) || defined (CONFIG_PQ2FADS) #include <platforms/pq2ads.h> #endif #ifdef CONFIG_PCI_8260 #include <syslib/m82xx_pci.h> #endif /* Make sure the memory translation stuff is there if PCI not used. */ #ifndef _IO_BASE #define _IO_BASE 0 #endif #ifndef _ISA_MEM_BASE #define _ISA_MEM_BASE 0 #endif #ifndef PCI_DRAM_OFFSET #define PCI_DRAM_OFFSET 0 #endif /* Map 256MB I/O region */ #ifndef IO_PHYS_ADDR #define IO_PHYS_ADDR 0xe0000000 #endif #ifndef IO_VIRT_ADDR #define IO_VIRT_ADDR IO_PHYS_ADDR #endif enum ppc_sys_devices { MPC82xx_CPM_FCC1, MPC82xx_CPM_FCC2, MPC82xx_CPM_FCC3, MPC82xx_CPM_I2C, MPC82xx_CPM_SCC1, MPC82xx_CPM_SCC2, MPC82xx_CPM_SCC3, MPC82xx_CPM_SCC4, MPC82xx_CPM_SPI, MPC82xx_CPM_MCC1, MPC82xx_CPM_MCC2, MPC82xx_CPM_SMC1, MPC82xx_CPM_SMC2, MPC82xx_CPM_USB, MPC82xx_SEC1, }; #ifndef __ASSEMBLY__ /* The "residual" data board information structure the boot loader * hands to us. */ extern unsigned char __res[]; #endif #ifndef BOARD_CHIP_NAME #define BOARD_CHIP_NAME "" #endif #endif /* CONFIG_8260 */ #endif /* !__ASM_PPC_MPC8260_H__ */ #endif /* __KERNEL__ */
lokesh-coder/lesyjs
benchmark/benchmark.suite.js
const b = require("benny"); const commander = require("commander"); const yargs = require("yargs"); const { build: gluegun } = require("gluegun"); const lesy = require("@lesy/compiler"); const { commanderCode, gluegunCode, yargsCode, lesyCode } = require("./libs"); const options = { // minSamples: 100 }; b.suite( "Performance test", b.add( "lesy", async () => { await lesyCode(lesy); }, options, ), b.add( "commander", () => { commanderCode(commander); }, options, ), b.add( "yargs", () => { yargsCode(yargs); }, options, ), b.add( "glugun", async () => { await gluegunCode(gluegun); }, options, ), b.cycle(), b.complete(), b.save({ file: "perf", version: "1.0.0", folder: "./results" }), b.save({ file: "perf", format: "chart.html", folder: "./results" }), );
0lOveLycaT0/IT-PROJECT-EXTRA-PACK
IT-PROJECT EXTRA PACK PE/jni/mcpe/level/biome/Biome.h
<reponame>0lOveLycaT0/IT-PROJECT-EXTRA-PACK<gh_stars>1-10 #pragma once class BiomeDecorator; class Random; class BlockPos; class BlockSource; class LevelChunk; class Biome { public: enum BiomeType { }; public: static Biome *beaches; static Biome *birchForest; static Biome *birchForestHills; static Biome *birchForestHillsMutated; static Biome *birchForestMutated; static Biome *coldBeach; static Biome *deepOcean; static Biome *desert; static Biome *desertHills; static Biome *desertMutated; static Biome *extremeHills; static Biome *extremeHillsMutated; static Biome *extremeHillsWithTrees; static Biome *extremeHillsWithTreesMutated; static Biome *forest; static Biome *forestHills; static Biome *forestMutated; static Biome *frozenOcean; static Biome *frozenRiver; static Biome *hell; static Biome *iceFlats; static Biome *iceFlatsMutated; static Biome *iceMountains; static Biome *jungle; static Biome *jungleEdge; static Biome *jungleEdgeMutated; static Biome *jungleHills; static Biome *jungleMutated; static Biome *mesa; static Biome *mesaClearRock; static Biome *mesaClearRockMutated; static Biome *mesaMutated; static Biome *mesaRock; static Biome *mesaRockMutated; static Biome *mushroomIsland; static Biome *mushroomIslandShore; static Biome *ocean; static Biome *plains; static Biome *plainsMutated; static Biome *redwoodTaiga; static Biome *redwoodTaigaHills; static Biome *redwoodTaigaHillsMutated; static Biome *redwoodTaigaMutated; static Biome *river; static Biome *roofedForest; static Biome *roofedForestMutated; static Biome *savanna; static Biome *savannaMutated; static Biome *savannaRock; static Biome *savannaRockMutated; static Biome *sky; static Biome *smallerExtremeHills; static Biome *stoneBeach; static Biome *swampland; static Biome *swamplandMutated; static Biome *taiga; static Biome *taigaCold; static Biome *taigaColdHills; static Biome *taigaColdMutated; static Biome *taigaHills; static Biome *taigaMutated; public: static Biome *mBiomes[61]; char filler[156]; public: Biome(int, Biome::BiomeType, BiomeDecorator *); Biome(){} virtual void setColor(int); virtual void setColor(int, bool); virtual ~Biome(); virtual void getTreeFeature(Random *); virtual void getGrassFeature(Random *); virtual void getTemperature(); virtual void adjustScale(float); virtual void adjustDepth(float); virtual void getSkyColor(float); virtual void getCreatureProbability(); virtual void getFoliageColor(); virtual void getRandomFlowerTypeAndData(Random &, BlockPos const &); virtual void decorate(BlockSource *, Random &, BlockPos const &, bool, float); virtual void buildSurfaceAt(Random &, LevelChunk &, BlockPos const &, float,short); virtual void getGrassColor(BlockPos const &); virtual void refreshBiome(unsigned int); virtual void getTemperatureCategory() const; virtual void isSame(Biome *); virtual void isHumid(); virtual void createMutatedCopy(int); public: static void initBiomes(); };
deepdialog/deepdialog
deepdialog/nlg/__init__.py
# -*- coding: utf-8 -*- """NLG Module."""
LaurentPlagne/Legolas
Legolas/BlockMatrix/Structures/TriDiagonal/TriDiagonalScalarGaussSeidelSolver.hxx
<reponame>LaurentPlagne/Legolas<filename>Legolas/BlockMatrix/Structures/TriDiagonal/TriDiagonalScalarGaussSeidelSolver.hxx #ifndef __TRIDIAGONALSCALARGAUSSSEIDELSOLVER_HXX__ #define __TRIDIAGONALSCALARGAUSSSEIDELSOLVER_HXX__ #include "Legolas/Vector/Vector.hxx" #include "Legolas/BlockMatrix/ScalarMatrixSolver.hxx" namespace Legolas{ class TriDiagonalScalarGaussSeidelSolver{ public: template <class SCALAR_MATRIX> class Engine : public ScalarMatrixSolver<SCALAR_MATRIX>{ typedef typename SCALAR_MATRIX::RealType RealType; typedef Legolas::MultiVector<RealType,1> V1D; public: virtual inline std::string name( void ) const { return "TriDiagonalGaussSeidelScalarSolver" ;} virtual VirtualSolver * clone( void ) const { return new Engine(*this); } inline void solve(const SCALAR_MATRIX & A, const V1D & B, V1D & X){ const int n=A.nrows(); if (n==1){ X[0]=B[0]/A.diagonalGetElement(0); } else{ if (n>1){ this->iterationControler().initialize(A,X); do { X[0]=(B[0]-A.upperDiagonalGetElement(1)*X[1])/A.diagonalGetElement(0); for (int i=1 ; i < n-1 ; i++ ){ X[i]=(B[i]-A.lowerDiagonalGetElement(i)*X[i-1]-A.upperDiagonalGetElement(i)*X[i+1])/A.diagonalGetElement(i); } X[n-1]=(B[n-1]-A.lowerDiagonalGetElement(n-1)*X[n-2])/A.diagonalGetElement(n-1); }while(!this->iterationControler().end(X)); } } } }; }; } #endif
gschizas/fallenswordhelper
dist/resources/beta/1524/30/injectUpgradeAlert-8dcbea16.js
<reponame>gschizas/fallenswordhelper<filename>dist/resources/beta/1524/30/injectUpgradeAlert-8dcbea16.js<gh_stars>1-10 import{u as a,j as s,H as e,a4 as o,a as t}from"./calfSystem-ebf4b17d.js" import{n,d as p,p as d}from"./parseGoldUpgrades-99e79a94.js" function r(a){t(3,d,[a])}function f(){const s=e("lastUpgradeCheck") s&&o<s||a({cmd:"points",type:1}).then(r)}function c(){s()&&n()&&(e("needToDoUpgrade")?p():f())}export default c //# sourceMappingURL=injectUpgradeAlert-8dcbea16.js.map
anandundavia/ProjectEuler
problem=016/Solution.java
import java.math.BigInteger; final class Solution { private static int solve(int exponent) { final char chars[] = BigInteger.TWO.pow(exponent).toString(10).toCharArray(); int sum = 0; for (final char c : chars) { sum += c - '0'; } return sum; } public static void main(String[] args) throws Exception { System.out.println(solve(15)); System.out.println(solve(1000)); } } // If this solution helped you, please star the repo!
npocmaka/Windows-Server-2003
enduser/sakit/sak/frameworks/localui/ldm/display.h
//#-------------------------------------------------------------- // // File: display.h // // Synopsis: This file holds the declarations of the // CDisplay class .The class is responsible // for displaying the appropriate bitmap on // the local display // // History: 5/28/99 // // Copyright (C) 1999-2000 Microsoft Corporation // All rights reserved. // //#-------------------------------------------------------------- #ifndef __DISPLAY_H_ #define __DISPLAY_H_ #include "sacom.h" #define SA_DISPLAY_SHUTTING_DOWN 0x00000002 // OS is shutting down #define SA_DISPLAY_CHECK_DISK 0x00000010 // autochk.exe is running class CDisplay { public: // // constructor // CDisplay () :m_bInitialized (false), m_pSaDisplay (NULL) { } // // destructor // ~CDisplay () { m_pSaDisplay = NULL; } // // send the shutting down message to the local display // a member // HRESULT Shutdown (); // // send the busy message to the local display // a member // HRESULT Busy (); // // lock the driver to prevent writes // HRESULT Lock (); // // Unlock the driver to allow writes // HRESULT Unlock (); private: // // method to carry out the initialization // HRESULT InternalInitialize (VOID); // // signifies initialized // bool m_bInitialized; // // handle to helper object // CComPtr<ISaDisplay> m_pSaDisplay; }; // end of CDisplay class declaration #endif __DISPLAY_H_
jatinssaluja/lorawan-stack
pkg/gatewayconfigurationserver/gcsv2/middleware.go
// Copyright © 2019 The Things Network Foundation, The Things Industries B.V. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package gcsv2 import ( "fmt" "strings" echo "github.com/labstack/echo/v4" "go.thethings.network/lorawan-stack/pkg/auth" "google.golang.org/grpc/metadata" ) func (s *Server) normalizeAuthorization(next echo.HandlerFunc) echo.HandlerFunc { return func(c echo.Context) error { ctx := c.Request().Context() authorization := c.Request().Header.Get(echo.HeaderAuthorization) if authorization == "" { return next(c) } authorizationParts := strings.SplitN(authorization, " ", 2) if len(authorizationParts) != 2 { return errUnauthenticated.New() } authType, authValue := strings.ToLower(authorizationParts[0]), authorizationParts[1] switch authType { case "bearer", "key": tokenType, _, _, err := auth.SplitToken(authValue) if err == nil && (tokenType == auth.APIKey || tokenType == auth.AccessToken) { authType = "bearer" } else { authType = "key" } default: return errUnauthenticated.New() } md := metadata.New(map[string]string{ "authorization": fmt.Sprintf("%s %s", authType, authValue), }) if ctxMd, ok := metadata.FromIncomingContext(ctx); ok { md = metadata.Join(ctxMd, md) } ctx = metadata.NewIncomingContext(ctx, md) c.SetRequest(c.Request().WithContext(ctx)) return next(c) } }
t-rasmud/itextpdf
itext/src/main/java/com/itextpdf/text/pdf/CFFFontSubset.java
/* * * This file is part of the iText (R) project. Copyright (c) 1998-2020 iText Group NV * Authors: <NAME>, <NAME>, et al. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License version 3 * as published by the Free Software Foundation with the addition of the * following permission added to Section 15 as permitted in Section 7(a): * FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY * ITEXT GROUP. ITEXT GROUP DISCLAIMS THE WARRANTY OF NON INFRINGEMENT * OF THIRD PARTY RIGHTS * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Affero General Public License for more details. * You should have received a copy of the GNU Affero General Public License * along with this program; if not, see http://www.gnu.org/licenses or write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA, 02110-1301 USA, or download the license from the following URL: * http://itextpdf.com/terms-of-use/ * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License. * * In accordance with Section 7(b) of the GNU Affero General Public License, * a covered work must retain the producer line in every PDF that is created * or manipulated using iText. * * You can be released from the requirements of the license by purchasing * a commercial license. Buying such a license is mandatory as soon as you * develop commercial activities involving the iText software without * disclosing the source code of your own applications. * These activities include: offering paid services to customers as an ASP, * serving PDFs on the fly in a web application, shipping iText with a closed * source product. * * For more information, please contact iText Software Corp. at this * address: <EMAIL> */ package com.itextpdf.text.pdf; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; /** * This Class subsets a CFF Type Font. The subset is preformed for CID fonts and NON CID fonts. * The Charstring is subsetted for both types. For CID fonts only the FDArray which are used are embedded. * The Lsubroutines of the FDArrays used are subsetted as well. The Subroutine subset supports both Type1 and Type2 * formatting although only tested on Type2 Format. * For Non CID the Lsubroutines are subsetted. On both types the Gsubroutines is subsetted. * A font which was not of CID type is transformed into CID as a part of the subset process. * The CID synthetic creation was written by <NAME> (<EMAIL>) * @author <NAME> (<EMAIL>) and <NAME> (<EMAIL>) */ public class CFFFontSubset extends CFFFont { /** * The Strings in this array represent Type1/Type2 operator names */ static final String SubrsFunctions[] = { "RESERVED_0","hstem","RESERVED_2","vstem","vmoveto","rlineto","hlineto","vlineto", "rrcurveto","RESERVED_9","callsubr","return","escape","RESERVED_13", "endchar","RESERVED_15","RESERVED_16","RESERVED_17","hstemhm","hintmask", "cntrmask","rmoveto","hmoveto","vstemhm","rcurveline","rlinecurve","vvcurveto", "hhcurveto","shortint","callgsubr","vhcurveto","hvcurveto" }; /** * The Strings in this array represent Type1/Type2 escape operator names */ static final String SubrsEscapeFuncs[] = { "RESERVED_0","RESERVED_1","RESERVED_2","and","or","not","RESERVED_6", "RESERVED_7","RESERVED_8","abs","add","sub","div","RESERVED_13","neg", "eq","RESERVED_16","RESERVED_17","drop","RESERVED_19","put","get","ifelse", "random","mul","RESERVED_25","sqrt","dup","exch","index","roll","RESERVED_31", "RESERVED_32","RESERVED_33","hflex","flex","hflex1","flex1","RESERVED_REST" }; /** * Operator codes for unused CharStrings and unused local and global Subrs */ static final byte ENDCHAR_OP = 14; static final byte RETURN_OP = 11; /** * A HashMap containing the glyphs used in the text after being converted * to glyph number by the CMap */ HashMap<Integer, int[]> GlyphsUsed; /** * The GlyphsUsed keys as an ArrayList */ ArrayList<Integer> glyphsInList; /** * A HashSet for keeping the FDArrays being used by the font */ HashSet<Integer> FDArrayUsed = new HashSet<Integer>(); /** * A HashMaps array for keeping the subroutines used in each FontDict */ HashMap<Integer, int[]>[] hSubrsUsed; /** * The SubroutinesUsed HashMaps as ArrayLists */ ArrayList<Integer>[] lSubrsUsed; /** * A HashMap for keeping the Global subroutines used in the font */ HashMap<Integer, int[]> hGSubrsUsed = new HashMap<Integer, int[]>(); /** * The Global SubroutinesUsed HashMaps as ArrayLists */ ArrayList<Integer> lGSubrsUsed = new ArrayList<Integer>(); /** * A HashMap for keeping the subroutines used in a non-cid font */ HashMap<Integer, int[]> hSubrsUsedNonCID = new HashMap<Integer, int[]>(); /** * The SubroutinesUsed HashMap as ArrayList */ ArrayList<Integer> lSubrsUsedNonCID = new ArrayList<Integer>(); /** * An array of the new Indexes for the local Subr. One index for each FontDict */ byte[][] NewLSubrsIndex; /** * The new subroutines index for a non-cid font */ byte[] NewSubrsIndexNonCID; /** * The new global subroutines index of the font */ byte[] NewGSubrsIndex; /** * The new CharString of the font */ byte[] NewCharStringsIndex; /** * The bias for the global subroutines */ int GBias = 0; /** * The linked list for generating the new font stream */ LinkedList<Item> OutputList; /** * Number of arguments to the stem operators in a subroutine calculated recursively */ int NumOfHints=0; /** * C'tor for CFFFontSubset * @param rf - The font file * @param GlyphsUsed - a HashMap that contains the glyph used in the subset */ public CFFFontSubset(RandomAccessFileOrArray rf,HashMap<Integer, int[]> GlyphsUsed){ // Use CFFFont c'tor in order to parse the font file. super(rf); this.GlyphsUsed = GlyphsUsed; //Put the glyphs into a list glyphsInList = new ArrayList<Integer>(GlyphsUsed.keySet()); for (int i=0;i<fonts.length;++i) { // Read the number of glyphs in the font seek(fonts[i].charstringsOffset); fonts[i].nglyphs = getCard16(); // Jump to the count field of the String Index seek(stringIndexOffset); fonts[i].nstrings = getCard16()+standardStrings.length; // For each font save the offset array of the charstring fonts[i].charstringsOffsets = getIndex(fonts[i].charstringsOffset); // Process the FDSelect if exist if (fonts[i].fdselectOffset>=0) { // Process the FDSelect readFDSelect(i); // Build the FDArrayUsed hashmap BuildFDArrayUsed(i); } if (fonts[i].isCID) // Build the FD Array used Hash Map ReadFDArray(i); // compute the charset length fonts[i].CharsetLength = CountCharset(fonts[i].charsetOffset,fonts[i].nglyphs); } } /** * Calculates the length of the charset according to its format * @param Offset The Charset Offset * @param NumofGlyphs Number of glyphs in the font * @return the length of the Charset */ int CountCharset(int Offset,int NumofGlyphs){ int format; int Length=0; seek(Offset); // Read the format format = getCard8(); // Calc according to format switch (format){ case 0: Length = 1+2*NumofGlyphs; break; case 1: Length = 1+3*CountRange(NumofGlyphs,1); break; case 2: Length = 1+4*CountRange(NumofGlyphs,2); break; default: break; } return Length; } /** * Function calculates the number of ranges in the Charset * @param NumofGlyphs The number of glyphs in the font * @param Type The format of the Charset * @return The number of ranges in the Charset data structure */ int CountRange(int NumofGlyphs,int Type){ int num=0; char Sid; int i=1,nLeft; while (i<NumofGlyphs){ num++; Sid = getCard16(); if (Type==1) nLeft = getCard8(); else nLeft = getCard16(); i += nLeft+1; } return num; } /** * Read the FDSelect of the font and compute the array and its length * @param Font The index of the font being processed */ protected void readFDSelect(int Font) { // Restore the number of glyphs int NumOfGlyphs = fonts[Font].nglyphs; int[] FDSelect = new int[NumOfGlyphs]; // Go to the beginning of the FDSelect seek(fonts[Font].fdselectOffset); // Read the FDSelect's format fonts[Font].FDSelectFormat = getCard8(); switch(fonts[Font].FDSelectFormat){ // Format==0 means each glyph has an entry that indicated // its FD. case 0: for (int i=0;i<NumOfGlyphs;i++) { FDSelect[i] = getCard8(); } // The FDSelect's Length is one for each glyph + the format // for later use fonts[Font].FDSelectLength = fonts[Font].nglyphs+1; break; case 3: // Format==3 means the ranges version // The number of ranges int nRanges = getCard16(); int l=0; // Read the first in the first range int first = getCard16(); for (int i=0;i<nRanges;i++) { // Read the FD index int fd = getCard8(); // Read the first of the next range int last = getCard16(); // Calc the steps and write to the array int steps = last-first; for (int k=0;k<steps;k++) { FDSelect[l] = fd; l++; } // The last from this iteration is the first of the next first = last; } // Store the length for later use fonts[Font].FDSelectLength = 1+2+nRanges*3+2; break; default: break; } // Save the FDSelect of the font fonts[Font].FDSelect = FDSelect; } /** * Function reads the FDSelect and builds the FDArrayUsed HashMap According to the glyphs used * @param Font the Number of font being processed */ protected void BuildFDArrayUsed(int Font) { int[] FDSelect = fonts[Font].FDSelect; // For each glyph used for (int i=0;i<glyphsInList.size();i++) { // Pop the glyphs index int glyph = glyphsInList.get(i).intValue(); // Pop the glyph's FD int FD = FDSelect[glyph]; // Put the FD index into the FDArrayUsed HashMap FDArrayUsed.add(Integer.valueOf(FD)); } } /** * Read the FDArray count, offsize and Offset array * @param Font */ protected void ReadFDArray(int Font) { seek(fonts[Font].fdarrayOffset); fonts[Font].FDArrayCount = getCard16(); fonts[Font].FDArrayOffsize = getCard8(); // Since we will change values inside the FDArray objects // We increase its offsize to prevent errors if (fonts[Font].FDArrayOffsize < 4) fonts[Font].FDArrayOffsize++; fonts[Font].FDArrayOffsets = getIndex(fonts[Font].fdarrayOffset); } /** * The Process function extracts one font out of the CFF file and returns a * subset version of the original. * @param fontName - The name of the font to be taken out of the CFF * @return The new font stream * @throws IOException */ public byte[] Process(String fontName)throws IOException{ try { // Verify that the file is open buf.reOpen(); // Find the Font that we will be dealing with int j; for (j=0; j<fonts.length; j++) if (fontName.equals(fonts[j].name)) break; if (j==fonts.length) return null; // Calc the bias for the global subrs if (gsubrIndexOffset >= 0) GBias = CalcBias(gsubrIndexOffset,j); // Prepare the new CharStrings Index BuildNewCharString(j); // Prepare the new Global and Local Subrs Indices BuildNewLGSubrs(j); // Build the new file byte[] Ret = BuildNewFile(j); return Ret; } finally { try { buf.close(); } catch (Exception e) { // empty on purpose } } } /** * Function calcs bias according to the CharString type and the count * of the subrs * @param Offset The offset to the relevant subrs index * @param Font the font * @return The calculated Bias */ protected int CalcBias(int Offset,int Font) { seek(Offset); int nSubrs = getCard16(); // If type==1 -> bias=0 if (fonts[Font].CharstringType == 1) return 0; // else calc according to the count else if (nSubrs < 1240) return 107; else if (nSubrs < 33900) return 1131; else return 32768; } /** *Function uses BuildNewIndex to create the new index of the subset charstrings * @param FontIndex the font * @throws IOException */ protected void BuildNewCharString(int FontIndex) throws IOException { NewCharStringsIndex = BuildNewIndex(fonts[FontIndex].charstringsOffsets,GlyphsUsed,ENDCHAR_OP); } /** * Function builds the new local & global subsrs indices. IF CID then All of * the FD Array lsubrs will be subsetted. * @param Font the font * @throws IOException */ @SuppressWarnings("unchecked") protected void BuildNewLGSubrs(int Font)throws IOException { // If the font is CID then the lsubrs are divided into FontDicts. // for each FD array the lsubrs will be subsetted. if(fonts[Font].isCID) { // Init the hashmap-array and the arraylist-array to hold the subrs used // in each private dict. hSubrsUsed = new HashMap[fonts[Font].fdprivateOffsets.length]; lSubrsUsed = new ArrayList[fonts[Font].fdprivateOffsets.length]; // A [][] which will store the byte array for each new FD Array lsubs index NewLSubrsIndex = new byte[fonts[Font].fdprivateOffsets.length][]; // An array to hold the offset for each Lsubr index fonts[Font].PrivateSubrsOffset = new int[fonts[Font].fdprivateOffsets.length]; // A [][] which will store the offset array for each lsubr index fonts[Font].PrivateSubrsOffsetsArray = new int[fonts[Font].fdprivateOffsets.length][]; // Put the FDarrayUsed into a list ArrayList<Integer> FDInList = new ArrayList<Integer>(FDArrayUsed); // For each FD array which is used subset the lsubr for (int j=0;j<FDInList.size();j++) { // The FDArray index, Hash Map, Array List to work on int FD = FDInList.get(j).intValue(); hSubrsUsed[FD] = new HashMap<Integer, int[]>(); lSubrsUsed[FD] = new ArrayList<Integer>(); //Reads the private dicts looking for the subr operator and // store both the offset for the index and its offset array BuildFDSubrsOffsets(Font,FD); // Verify that FDPrivate has a LSubrs index if(fonts[Font].PrivateSubrsOffset[FD]>=0) { //Scans the Charstring data storing the used Local and Global subroutines // by the glyphs. Scans the Subrs recursively. BuildSubrUsed(Font,FD,fonts[Font].PrivateSubrsOffset[FD],fonts[Font].PrivateSubrsOffsetsArray[FD],hSubrsUsed[FD],lSubrsUsed[FD]); // Builds the New Local Subrs index NewLSubrsIndex[FD] = BuildNewIndex(fonts[Font].PrivateSubrsOffsetsArray[FD],hSubrsUsed[FD],RETURN_OP); } } } // If the font is not CID && the Private Subr exists then subset: else if (fonts[Font].privateSubrs>=0) { // Build the subrs offsets; fonts[Font].SubrsOffsets = getIndex(fonts[Font].privateSubrs); //Scans the Charstring data storing the used Local and Global subroutines // by the glyphs. Scans the Subrs recursively. BuildSubrUsed(Font,-1,fonts[Font].privateSubrs,fonts[Font].SubrsOffsets,hSubrsUsedNonCID,lSubrsUsedNonCID); } // For all fonts subset the Global Subroutines // Scan the Global Subr Hashmap recursively on the Gsubrs BuildGSubrsUsed(Font); if (fonts[Font].privateSubrs>=0) // Builds the New Local Subrs index NewSubrsIndexNonCID = BuildNewIndex(fonts[Font].SubrsOffsets,hSubrsUsedNonCID,RETURN_OP); //Builds the New Global Subrs index NewGSubrsIndex = BuildNewIndexAndCopyAllGSubrs(gsubrOffsets, RETURN_OP); } /** * The function finds for the FD array processed the local subr offset and its * offset array. * @param Font the font * @param FD The FDARRAY processed */ protected void BuildFDSubrsOffsets(int Font,int FD) { // Initiate to -1 to indicate lsubr operator present fonts[Font].PrivateSubrsOffset[FD] = -1; // Goto beginning of objects seek(fonts[Font].fdprivateOffsets[FD]); // While in the same object: while (getPosition() < fonts[Font].fdprivateOffsets[FD]+fonts[Font].fdprivateLengths[FD]) { getDictItem(); // If the dictItem is the "Subrs" then find and store offset, if (key=="Subrs") fonts[Font].PrivateSubrsOffset[FD] = ((Integer)args[0]).intValue()+fonts[Font].fdprivateOffsets[FD]; } //Read the lsubr index if the lsubr was found if (fonts[Font].PrivateSubrsOffset[FD] >= 0) fonts[Font].PrivateSubrsOffsetsArray[FD] = getIndex(fonts[Font].PrivateSubrsOffset[FD]); } /** * Function uses ReadAsubr on the glyph used to build the LSubr & Gsubr HashMap. * The HashMap (of the lsubr only) is then scanned recursively for Lsubr & Gsubrs * calls. * @param Font the font * @param FD FD array processed. 0 indicates function was called by non CID font * @param SubrOffset the offset to the subr index to calc the bias * @param SubrsOffsets the offset array of the subr index * @param hSubr HashMap of the subrs used * @param lSubr ArrayList of the subrs used */ protected void BuildSubrUsed(int Font,int FD,int SubrOffset,int[] SubrsOffsets,HashMap<Integer, int[]> hSubr,ArrayList<Integer> lSubr) { // Calc the Bias for the subr index int LBias = CalcBias(SubrOffset,Font); // For each glyph used find its GID, start & end pos for (int i=0;i<glyphsInList.size();i++) { int glyph = glyphsInList.get(i).intValue(); int Start = fonts[Font].charstringsOffsets[glyph]; int End = fonts[Font].charstringsOffsets[glyph+1]; // IF CID: if (FD >= 0) { EmptyStack(); NumOfHints=0; // Using FDSELECT find the FD Array the glyph belongs to. int GlyphFD = fonts[Font].FDSelect[glyph]; // If the Glyph is part of the FD being processed if (GlyphFD == FD) // Find the Subrs called by the glyph and insert to hash: ReadASubr(Start,End,GBias,LBias,hSubr,lSubr,SubrsOffsets); } else // If the font is not CID //Find the Subrs called by the glyph and insert to hash: ReadASubr(Start,End,GBias,LBias,hSubr,lSubr,SubrsOffsets); } // For all Lsubrs used, check recursively for Lsubr & Gsubr used for (int i=0;i<lSubr.size();i++) { // Pop the subr value from the hash int Subr = lSubr.get(i).intValue(); // Ensure the Lsubr call is valid if (Subr < SubrsOffsets.length-1 && Subr>=0) { // Read and process the subr int Start = SubrsOffsets[Subr]; int End = SubrsOffsets[Subr+1]; ReadASubr(Start,End,GBias,LBias,hSubr,lSubr,SubrsOffsets); } } } /** * Function scans the Glsubr used ArrayList to find recursive calls * to Gsubrs and adds to Hashmap & ArrayList * @param Font the font */ protected void BuildGSubrsUsed(int Font) { int LBias = 0; int SizeOfNonCIDSubrsUsed = 0; if (fonts[Font].privateSubrs>=0) { LBias = CalcBias(fonts[Font].privateSubrs,Font); SizeOfNonCIDSubrsUsed = lSubrsUsedNonCID.size(); } // For each global subr used for (int i=0;i<lGSubrsUsed.size();i++) { //Pop the value + check valid int Subr = lGSubrsUsed.get(i).intValue(); if (Subr < gsubrOffsets.length-1 && Subr>=0) { // Read the subr and process int Start = gsubrOffsets[Subr]; int End = gsubrOffsets[Subr+1]; if (fonts[Font].isCID) ReadASubr(Start,End,GBias,0,hGSubrsUsed,lGSubrsUsed,null); else { ReadASubr(Start,End,GBias,LBias,hSubrsUsedNonCID,lSubrsUsedNonCID,fonts[Font].SubrsOffsets); if (SizeOfNonCIDSubrsUsed < lSubrsUsedNonCID.size()) { for (int j=SizeOfNonCIDSubrsUsed;j<lSubrsUsedNonCID.size();j++) { //Pop the value + check valid int LSubr = lSubrsUsedNonCID.get(j).intValue(); if (LSubr < fonts[Font].SubrsOffsets.length-1 && LSubr>=0) { // Read the subr and process int LStart = fonts[Font].SubrsOffsets[LSubr]; int LEnd = fonts[Font].SubrsOffsets[LSubr+1]; ReadASubr(LStart,LEnd,GBias,LBias,hSubrsUsedNonCID,lSubrsUsedNonCID,fonts[Font].SubrsOffsets); } } SizeOfNonCIDSubrsUsed = lSubrsUsedNonCID.size(); } } } } } /** * The function reads a subrs (glyph info) between begin and end. * Adds calls to a Lsubr to the hSubr and lSubrs. * Adds calls to a Gsubr to the hGSubr and lGSubrs. * @param begin the start point of the subr * @param end the end point of the subr * @param GBias the bias of the Global Subrs * @param LBias the bias of the Local Subrs * @param hSubr the HashMap for the lSubrs * @param lSubr the ArrayList for the lSubrs */ protected void ReadASubr(int begin,int end,int GBias,int LBias,HashMap<Integer, int[]> hSubr,ArrayList<Integer> lSubr,int[] LSubrsOffsets) { // Clear the stack for the subrs EmptyStack(); NumOfHints = 0; // Goto beginning of the subr seek(begin); while (getPosition() < end) { // Read the next command ReadCommand(); int pos = getPosition(); Object TopElement=null; if (arg_count > 0) TopElement = args[arg_count-1]; int NumOfArgs = arg_count; // Check the modification needed on the Argument Stack according to key; HandelStack(); // a call to a Lsubr if (key=="callsubr") { // Verify that arguments are passed if (NumOfArgs > 0) { // Calc the index of the Subrs int Subr = ((Integer)TopElement).intValue() + LBias; // If the subr isn't in the HashMap -> Put in if (!hSubr.containsKey(Integer.valueOf (Subr))) { hSubr.put(Integer.valueOf(Subr),null); lSubr.add(Integer.valueOf(Subr)); } CalcHints(LSubrsOffsets[Subr],LSubrsOffsets[Subr+1],LBias,GBias,LSubrsOffsets); seek(pos); } } // a call to a Gsubr else if (key=="callgsubr") { // Verify that arguments are passed if (NumOfArgs > 0) { // Calc the index of the Subrs int Subr = ((Integer)TopElement).intValue() + GBias; // If the subr isn't in the HashMap -> Put in if (!hGSubrsUsed.containsKey(Integer.valueOf (Subr))) { hGSubrsUsed.put(Integer.valueOf(Subr),null); lGSubrsUsed.add(Integer.valueOf(Subr)); } CalcHints(gsubrOffsets[Subr],gsubrOffsets[Subr+1],LBias,GBias,LSubrsOffsets); seek(pos); } } // A call to "stem" else if (key == "hstem" || key == "vstem" || key == "hstemhm" || key == "vstemhm") { // Increment the NumOfHints by the number couples of of arguments NumOfHints += NumOfArgs / 2; } // A call to "mask" else if (key == "hintmask" || key == "cntrmask") { // if stack is not empty the reason is vstem implicit definition // See Adobe Technical Note #5177, page 25, hintmask usage example. NumOfHints += NumOfArgs/2; // Compute the size of the mask int SizeOfMask = NumOfHints/8; if (NumOfHints%8 != 0 || SizeOfMask == 0) SizeOfMask++; // Continue the pointer in SizeOfMask steps for (int i=0;i<SizeOfMask;i++) getCard8(); } } } /** * Function Checks how the current operator effects the run time stack after being run * An operator may increase or decrease the stack size */ protected void HandelStack() { // Find out what the operator does to the stack int StackHandel = StackOpp(); if (StackHandel < 2) { // The operators that enlarge the stack by one if (StackHandel==1) PushStack(); // The operators that pop the stack else { // Abs value for the for loop StackHandel *= -1; for (int i=0;i<StackHandel;i++) PopStack(); } } // All other flush the stack else EmptyStack(); } /** * Function checks the key and return the change to the stack after the operator * @return The change in the stack. 2-> flush the stack */ protected int StackOpp() { if (key == "ifelse") return -3; if (key == "roll" || key == "put") return -2; if (key == "callsubr" || key == "callgsubr" || key == "add" || key == "sub" || key == "div" || key == "mul" || key == "drop" || key == "and" || key == "or" || key == "eq") return -1; if (key == "abs" || key == "neg" || key == "sqrt" || key == "exch" || key == "index" || key == "get" || key == "not" || key == "return") return 0; if (key == "random" || key == "dup") return 1; return 2; } /** * Empty the Type2 Stack * */ protected void EmptyStack() { // Null the arguments for (int i=0; i<arg_count; i++) args[i]=null; arg_count = 0; } /** * Pop one element from the stack * */ protected void PopStack() { if (arg_count>0) { args[arg_count-1]=null; arg_count--; } } /** * Add an item to the stack * */ protected void PushStack() { arg_count++; } /** * The function reads the next command after the file pointer is set */ protected void ReadCommand() { key = null; boolean gotKey = false; // Until a key is found while (!gotKey) { // Read the first Char char b0 = getCard8(); // decode according to the type1/type2 format if (b0 == 28) // the two next bytes represent a short int; { int first = getCard8(); int second = getCard8(); args[arg_count] = Integer.valueOf(first<<8 | second); arg_count++; continue; } if (b0 >= 32 && b0 <= 246) // The byte read is the byte; { args[arg_count] = Integer.valueOf(b0 - 139); arg_count++; continue; } if (b0 >= 247 && b0 <= 250) // The byte read and the next byte constitute a short int { int w = getCard8(); args[arg_count] = Integer.valueOf((b0-247)*256 + w + 108); arg_count++; continue; } if (b0 >= 251 && b0 <= 254)// Same as above except negative { int w = getCard8(); args[arg_count] = Integer.valueOf(-(b0-251)*256 - w - 108); arg_count++; continue; } if (b0 == 255)// The next for bytes represent a double. { int first = getCard8(); int second = getCard8(); int third = getCard8(); int fourth = getCard8(); args[arg_count] = Integer.valueOf(first<<24 | second<<16 | third<<8 | fourth); arg_count++; continue; } if (b0<=31 && b0 != 28) // An operator was found.. Set Key. { gotKey=true; // 12 is an escape command therefore the next byte is a part // of this command if (b0 == 12) { int b1 = getCard8(); if (b1>SubrsEscapeFuncs.length-1) b1 = SubrsEscapeFuncs.length-1; key = SubrsEscapeFuncs[b1]; } else key = SubrsFunctions[b0]; continue; } } } /** * The function reads the subroutine and returns the number of the hint in it. * If a call to another subroutine is found the function calls recursively. * @param begin the start point of the subr * @param end the end point of the subr * @param LBias the bias of the Local Subrs * @param GBias the bias of the Global Subrs * @param LSubrsOffsets The Offsets array of the subroutines * @return The number of hints in the subroutine read. */ protected int CalcHints(int begin,int end,int LBias,int GBias,int[] LSubrsOffsets) { // Goto beginning of the subr seek(begin); while (getPosition() < end) { // Read the next command ReadCommand(); int pos = getPosition(); Object TopElement = null; if (arg_count>0) TopElement = args[arg_count-1]; int NumOfArgs = arg_count; //Check the modification needed on the Argument Stack according to key; HandelStack(); // a call to a Lsubr if (key=="callsubr") { if (NumOfArgs>0) { int Subr = ((Integer)TopElement).intValue() + LBias; CalcHints(LSubrsOffsets[Subr],LSubrsOffsets[Subr+1],LBias,GBias,LSubrsOffsets); seek(pos); } } // a call to a Gsubr else if (key=="callgsubr") { if (NumOfArgs>0) { int Subr = ((Integer)TopElement).intValue() + GBias; CalcHints(gsubrOffsets[Subr],gsubrOffsets[Subr+1],LBias,GBias,LSubrsOffsets); seek(pos); } } // A call to "stem" else if (key == "hstem" || key == "vstem" || key == "hstemhm" || key == "vstemhm") // Increment the NumOfHints by the number couples of of arguments NumOfHints += NumOfArgs/2; // A call to "mask" else if (key == "hintmask" || key == "cntrmask") { // Compute the size of the mask int SizeOfMask = NumOfHints/8; if (NumOfHints%8 != 0 || SizeOfMask == 0) SizeOfMask++; // Continue the pointer in SizeOfMask steps for (int i=0;i<SizeOfMask;i++) getCard8(); } } return NumOfHints; } /** * Function builds the new offset array, object array and assembles the index. * used for creating the glyph and subrs subsetted index * @param Offsets the offset array of the original index * @param Used the hashmap of the used objects * @param OperatorForUnusedEntries the operator inserted into the data stream for unused entries * @return the new index subset version * @throws IOException */ protected byte[] BuildNewIndex(int[] Offsets,HashMap<Integer, int[]> Used,byte OperatorForUnusedEntries) throws IOException { int unusedCount = 0; int Offset=0; int[] NewOffsets = new int[Offsets.length]; // Build the Offsets Array for the Subset for (int i=0;i<Offsets.length;++i) { NewOffsets[i] = Offset; // If the object in the offset is also present in the used // HashMap then increment the offset var by its size if (Used.containsKey(Integer.valueOf(i))) { Offset += Offsets[i+1] - Offsets[i]; } else { // Else the same offset is kept in i+1. unusedCount++; } } // Offset var determines the size of the object array byte[] NewObjects = new byte[Offset+unusedCount]; // Build the new Object array int unusedOffset = 0; for (int i=0;i<Offsets.length-1;++i) { int start = NewOffsets[i]; int end = NewOffsets[i+1]; NewOffsets[i] = start+unusedOffset; // If start != End then the Object is used // So, we will copy the object data from the font file if (start != end) { // All offsets are Global Offsets relative to the beginning of the font file. // Jump the file pointer to the start address to read from. buf.seek(Offsets[i]); // Read from the buffer and write into the array at start. buf.readFully(NewObjects, start+unusedOffset, end-start); } else { NewObjects[start+unusedOffset] = OperatorForUnusedEntries; unusedOffset++; } } NewOffsets[Offsets.length-1] += unusedOffset; // Use AssembleIndex to build the index from the offset & object arrays return AssembleIndex(NewOffsets,NewObjects); } /** * Function builds the new offset array, object array and assembles the index. * used for creating the glyph and subrs subsetted index * * @param Offsets the offset array of the original index * @param OperatorForUnusedEntries the operator inserted into the data stream for unused entries * @return the new index subset version * @throws java.io.IOException */ protected byte[] BuildNewIndexAndCopyAllGSubrs(int[] Offsets, byte OperatorForUnusedEntries) throws java.io.IOException { int unusedCount = 0; int Offset = 0; int[] NewOffsets = new int[Offsets.length]; // Build the Offsets Array for the Subset for (int i = 0; i < Offsets.length - 1; ++i) { NewOffsets[i] = Offset; Offset += Offsets[i + 1] - Offsets[i]; } // Else the same offset is kept in i+1. NewOffsets[Offsets.length - 1] = Offset; unusedCount++; // Offset var determines the size of the object array byte[] NewObjects = new byte[Offset + unusedCount]; // Build the new Object array int unusedOffset = 0; for (int i = 0; i < Offsets.length - 1; ++i) { int start = NewOffsets[i]; int end = NewOffsets[i + 1]; NewOffsets[i] = start + unusedOffset; // If start != End then the Object is used // So, we will copy the object data from the font file if (start != end) { // All offsets are Global Offsets relative to the beginning of the font file. // Jump the file pointer to the start address to read from. buf.seek(Offsets[i]); // Read from the buffer and write into the array at start. buf.readFully(NewObjects, start + unusedOffset, end - start); } else { NewObjects[start + unusedOffset] = OperatorForUnusedEntries; unusedOffset++; } } NewOffsets[Offsets.length - 1] += unusedOffset; // Use AssembleIndex to build the index from the offset & object arrays return AssembleIndex(NewOffsets, NewObjects); } /** * Function creates the new index, inserting the count,offsetsize,offset array * and object array. * @param NewOffsets the subsetted offset array * @param NewObjects the subsetted object array * @return the new index created */ protected byte[] AssembleIndex(int[] NewOffsets,byte[] NewObjects) { // Calc the index' count field char Count = (char)(NewOffsets.length-1); // Calc the size of the object array int Size = NewOffsets[NewOffsets.length-1]; // Calc the Offsize byte Offsize; if (Size < 0xff) Offsize = 1; else if (Size < 0xffff) Offsize = 2; else if (Size < 0xffffff) Offsize = 3; else Offsize = 4; // The byte array for the new index. The size is calc by // Count=2, Offsize=1, OffsetArray = Offsize*(Count+1), The object array byte[] NewIndex = new byte[2+1+Offsize*(Count+1)+NewObjects.length]; // The counter for writing int Place = 0; // Write the count field NewIndex[Place++] = (byte) (Count >>> 8 & 0xff); NewIndex[Place++] = (byte) (Count >>> 0 & 0xff); // Write the offsize field NewIndex[Place++] = Offsize; // Write the offset array according to the offsize for (int newOffset : NewOffsets) { // The value to be written int Num = newOffset-NewOffsets[0]+1; // Write in bytes according to the offsize switch (Offsize) { case 4: NewIndex[Place++] = (byte) (Num >>> 24 & 0xff); case 3: NewIndex[Place++] = (byte) (Num >>> 16 & 0xff); case 2: NewIndex[Place++] = (byte) (Num >>> 8 & 0xff); case 1: NewIndex[Place++] = (byte) (Num >>> 0 & 0xff); } } // Write the new object array one by one for (byte newObject : NewObjects) { NewIndex[Place++] = newObject; } // Return the new index return NewIndex; } /** * The function builds the new output stream according to the subset process * @param Font the font * @return the subsetted font stream */ protected byte[] BuildNewFile(int Font) { // Prepare linked list for new font components OutputList = new LinkedList<Item>(); // copy the header of the font CopyHeader(); // create a name index BuildIndexHeader(1,1,1); OutputList.addLast(new UInt8Item((char)( 1+fonts[Font].name.length() ))); OutputList.addLast(new StringItem(fonts[Font].name)); // create the topdict Index BuildIndexHeader(1,2,1); OffsetItem topdictIndex1Ref = new IndexOffsetItem(2); OutputList.addLast(topdictIndex1Ref); IndexBaseItem topdictBase = new IndexBaseItem(); OutputList.addLast(topdictBase); // Initialize the Dict Items for later use OffsetItem charsetRef = new DictOffsetItem(); OffsetItem charstringsRef = new DictOffsetItem(); OffsetItem fdarrayRef = new DictOffsetItem(); OffsetItem fdselectRef = new DictOffsetItem(); OffsetItem privateRef = new DictOffsetItem(); // If the font is not CID create the following keys if ( !fonts[Font].isCID ) { // create a ROS key OutputList.addLast(new DictNumberItem(fonts[Font].nstrings)); OutputList.addLast(new DictNumberItem(fonts[Font].nstrings+1)); OutputList.addLast(new DictNumberItem(0)); OutputList.addLast(new UInt8Item((char)12)); OutputList.addLast(new UInt8Item((char)30)); // create a CIDCount key OutputList.addLast(new DictNumberItem(fonts[Font].nglyphs)); OutputList.addLast(new UInt8Item((char)12)); OutputList.addLast(new UInt8Item((char)34)); // Sivan's comments // What about UIDBase (12,35)? Don't know what is it. // I don't think we need FontName; the font I looked at didn't have it. } // Go to the TopDict of the font being processed seek(topdictOffsets[Font]); // Run until the end of the TopDict while (getPosition() < topdictOffsets[Font+1]) { int p1 = getPosition(); getDictItem(); int p2 = getPosition(); // The encoding key is disregarded since CID has no encoding if (key=="Encoding" // These keys will be added manually by the process. || key=="Private" || key=="FDSelect" || key=="FDArray" || key=="charset" || key=="CharStrings" ) { }else { //OtherWise copy key "as is" to the output list OutputList.add(new RangeItem(buf,p1,p2-p1)); } } // Create the FDArray, FDSelect, Charset and CharStrings Keys CreateKeys(fdarrayRef,fdselectRef,charsetRef,charstringsRef); // Mark the end of the top dict area OutputList.addLast(new IndexMarkerItem(topdictIndex1Ref,topdictBase)); // Copy the string index if (fonts[Font].isCID) OutputList.addLast(getEntireIndexRange(stringIndexOffset)); // If the font is not CID we need to append new strings. // We need 3 more strings: Registry, Ordering, and a FontName for one FD. // The total length is at most "Adobe"+"Identity"+63 = 76 else CreateNewStringIndex(Font); // copy the new subsetted global subroutine index OutputList.addLast(new RangeItem(new RandomAccessFileOrArray(NewGSubrsIndex),0,NewGSubrsIndex.length)); // deal with fdarray, fdselect, and the font descriptors // If the font is CID: if (fonts[Font].isCID) { // copy the FDArray, FDSelect, charset // Copy FDSelect // Mark the beginning OutputList.addLast(new MarkerItem(fdselectRef)); // If an FDSelect exists copy it if (fonts[Font].fdselectOffset>=0) OutputList.addLast(new RangeItem(buf,fonts[Font].fdselectOffset,fonts[Font].FDSelectLength)); // Else create a new one else CreateFDSelect(fdselectRef,fonts[Font].nglyphs); // Copy the Charset // Mark the beginning and copy entirely OutputList.addLast(new MarkerItem(charsetRef)); OutputList.addLast(new RangeItem(buf,fonts[Font].charsetOffset,fonts[Font].CharsetLength)); // Copy the FDArray // If an FDArray exists if (fonts[Font].fdarrayOffset>=0) { // Mark the beginning OutputList.addLast(new MarkerItem(fdarrayRef)); // Build a new FDArray with its private dicts and their LSubrs Reconstruct(Font); } else // Else create a new one CreateFDArray(fdarrayRef,privateRef,Font); } // If the font is not CID else { // create FDSelect CreateFDSelect(fdselectRef,fonts[Font].nglyphs); // recreate a new charset CreateCharset(charsetRef,fonts[Font].nglyphs); // create a font dict index (fdarray) CreateFDArray(fdarrayRef,privateRef,Font); } // if a private dict exists insert its subsetted version if (fonts[Font].privateOffset>=0) { // Mark the beginning of the private dict IndexBaseItem PrivateBase = new IndexBaseItem(); OutputList.addLast(PrivateBase); OutputList.addLast(new MarkerItem(privateRef)); OffsetItem Subr = new DictOffsetItem(); // Build and copy the new private dict CreateNonCIDPrivate(Font,Subr); // Copy the new LSubrs index CreateNonCIDSubrs(Font,PrivateBase,Subr); } // copy the charstring index OutputList.addLast(new MarkerItem(charstringsRef)); // Add the subsetted charstring OutputList.addLast(new RangeItem(new RandomAccessFileOrArray(NewCharStringsIndex),0,NewCharStringsIndex.length)); // now create the new CFF font int[] currentOffset = new int[1]; currentOffset[0] = 0; // Count and save the offset for each item Iterator<Item> listIter = OutputList.iterator(); while ( listIter.hasNext() ) { Item item = listIter.next(); item.increment(currentOffset); } // Compute the Xref for each of the offset items listIter = OutputList.iterator(); while ( listIter.hasNext() ) { Item item = listIter.next(); item.xref(); } int size = currentOffset[0]; byte[] b = new byte[size]; // Emit all the items into the new byte array listIter = OutputList.iterator(); while ( listIter.hasNext() ) { Item item = listIter.next(); item.emit(b); } // Return the new stream return b; } /** * Function Copies the header from the original fileto the output list */ protected void CopyHeader() { seek(0); int major = getCard8(); int minor = getCard8(); int hdrSize = getCard8(); int offSize = getCard8(); nextIndexOffset = hdrSize; OutputList.addLast(new RangeItem(buf,0,hdrSize)); } /** * Function Build the header of an index * @param Count the count field of the index * @param Offsize the offsize field of the index * @param First the first offset of the index */ protected void BuildIndexHeader(int Count,int Offsize,int First) { // Add the count field OutputList.addLast(new UInt16Item((char)Count)); // count // Add the offsize field OutputList.addLast(new UInt8Item((char)Offsize)); // offSize // Add the first offset according to the offsize switch(Offsize){ case 1: OutputList.addLast(new UInt8Item((char)First)); // first offset break; case 2: OutputList.addLast(new UInt16Item((char)First)); // first offset break; case 3: OutputList.addLast(new UInt24Item((char)First)); // first offset break; case 4: OutputList.addLast(new UInt32Item((char)First)); // first offset break; default: break; } } /** * Function adds the keys into the TopDict * @param fdarrayRef OffsetItem for the FDArray * @param fdselectRef OffsetItem for the FDSelect * @param charsetRef OffsetItem for the CharSet * @param charstringsRef OffsetItem for the CharString */ protected void CreateKeys(OffsetItem fdarrayRef,OffsetItem fdselectRef,OffsetItem charsetRef,OffsetItem charstringsRef) { // create an FDArray key OutputList.addLast(fdarrayRef); OutputList.addLast(new UInt8Item((char)12)); OutputList.addLast(new UInt8Item((char)36)); // create an FDSelect key OutputList.addLast(fdselectRef); OutputList.addLast(new UInt8Item((char)12)); OutputList.addLast(new UInt8Item((char)37)); // create an charset key OutputList.addLast(charsetRef); OutputList.addLast(new UInt8Item((char)15)); // create a CharStrings key OutputList.addLast(charstringsRef); OutputList.addLast(new UInt8Item((char)17)); } /** * Function takes the original string item and adds the new strings * to accommodate the CID rules * @param Font the font */ protected void CreateNewStringIndex(int Font) { String fdFontName = fonts[Font].name+"-OneRange"; if (fdFontName.length() > 127) fdFontName = fdFontName.substring(0,127); String extraStrings = "Adobe"+"Identity"+fdFontName; int origStringsLen = stringOffsets[stringOffsets.length-1] - stringOffsets[0]; int stringsBaseOffset = stringOffsets[0]-1; byte stringsIndexOffSize; if (origStringsLen+extraStrings.length() <= 0xff) stringsIndexOffSize = 1; else if (origStringsLen+extraStrings.length() <= 0xffff) stringsIndexOffSize = 2; else if (origStringsLen+extraStrings.length() <= 0xffffff) stringsIndexOffSize = 3; else stringsIndexOffSize = 4; OutputList.addLast(new UInt16Item((char)(stringOffsets.length-1+3))); // count OutputList.addLast(new UInt8Item((char)stringsIndexOffSize)); // offSize for (int stringOffset : stringOffsets) OutputList.addLast(new IndexOffsetItem(stringsIndexOffSize, stringOffset-stringsBaseOffset)); int currentStringsOffset = stringOffsets[stringOffsets.length-1] - stringsBaseOffset; //l.addLast(new IndexOffsetItem(stringsIndexOffSize,currentStringsOffset)); currentStringsOffset += "Adobe".length(); OutputList.addLast(new IndexOffsetItem(stringsIndexOffSize,currentStringsOffset)); currentStringsOffset += "Identity".length(); OutputList.addLast(new IndexOffsetItem(stringsIndexOffSize,currentStringsOffset)); currentStringsOffset += fdFontName.length(); OutputList.addLast(new IndexOffsetItem(stringsIndexOffSize,currentStringsOffset)); OutputList.addLast(new RangeItem(buf,stringOffsets[0],origStringsLen)); OutputList.addLast(new StringItem(extraStrings)); } /** * Function creates new FDSelect for non-CID fonts. * The FDSelect built uses a single range for all glyphs * @param fdselectRef OffsetItem for the FDSelect * @param nglyphs the number of glyphs in the font */ protected void CreateFDSelect(OffsetItem fdselectRef,int nglyphs) { OutputList.addLast(new MarkerItem(fdselectRef)); OutputList.addLast(new UInt8Item((char)3)); // format identifier OutputList.addLast(new UInt16Item((char)1)); // nRanges OutputList.addLast(new UInt16Item((char)0)); // Range[0].firstGlyph OutputList.addLast(new UInt8Item((char)0)); // Range[0].fd OutputList.addLast(new UInt16Item((char)nglyphs)); // sentinel } /** * Function creates new CharSet for non-CID fonts. * The CharSet built uses a single range for all glyphs * @param charsetRef OffsetItem for the CharSet * @param nglyphs the number of glyphs in the font */ protected void CreateCharset(OffsetItem charsetRef,int nglyphs) { OutputList.addLast(new MarkerItem(charsetRef)); OutputList.addLast(new UInt8Item((char)2)); // format identifier OutputList.addLast(new UInt16Item((char)1)); // first glyph in range (ignore .notdef) OutputList.addLast(new UInt16Item((char)(nglyphs-1))); // nLeft } /** * Function creates new FDArray for non-CID fonts. * The FDArray built has only the "Private" operator that points to the font's * original private dict * @param fdarrayRef OffsetItem for the FDArray * @param privateRef OffsetItem for the Private Dict * @param Font the font */ protected void CreateFDArray(OffsetItem fdarrayRef,OffsetItem privateRef,int Font) { OutputList.addLast(new MarkerItem(fdarrayRef)); // Build the header (count=offsize=first=1) BuildIndexHeader(1,1,1); // Mark OffsetItem privateIndex1Ref = new IndexOffsetItem(1); OutputList.addLast(privateIndex1Ref); IndexBaseItem privateBase = new IndexBaseItem(); // Insert the private operands and operator OutputList.addLast(privateBase); // Calc the new size of the private after subsetting // Origianl size int NewSize = fonts[Font].privateLength; // Calc the original size of the Subr offset in the private int OrgSubrsOffsetSize = CalcSubrOffsetSize(fonts[Font].privateOffset,fonts[Font].privateLength); // Increase the ptivate's size if (OrgSubrsOffsetSize != 0) NewSize += 5-OrgSubrsOffsetSize; OutputList.addLast(new DictNumberItem(NewSize)); OutputList.addLast(privateRef); OutputList.addLast(new UInt8Item((char)18)); // Private OutputList.addLast(new IndexMarkerItem(privateIndex1Ref,privateBase)); } /** * Function reconstructs the FDArray, PrivateDict and LSubr for CID fonts * @param Font the font */ void Reconstruct(int Font) { // Init for later use OffsetItem[] fdPrivate = new DictOffsetItem[fonts[Font].FDArrayOffsets.length-1]; IndexBaseItem[] fdPrivateBase = new IndexBaseItem[fonts[Font].fdprivateOffsets.length]; OffsetItem[] fdSubrs = new DictOffsetItem[fonts[Font].fdprivateOffsets.length]; // Reconstruct each type ReconstructFDArray(Font,fdPrivate); ReconstructPrivateDict(Font,fdPrivate,fdPrivateBase,fdSubrs); ReconstructPrivateSubrs(Font,fdPrivateBase,fdSubrs); } /** * Function subsets the FDArray and builds the new one with new offsets * @param Font The font * @param fdPrivate OffsetItem Array (one for each FDArray) */ void ReconstructFDArray(int Font,OffsetItem[] fdPrivate) { // Build the header of the index BuildIndexHeader(fonts[Font].FDArrayCount,fonts[Font].FDArrayOffsize,1); // For each offset create an Offset Item OffsetItem[] fdOffsets = new IndexOffsetItem[fonts[Font].FDArrayOffsets.length-1]; for (int i=0;i<fonts[Font].FDArrayOffsets.length-1;i++) { fdOffsets[i] = new IndexOffsetItem(fonts[Font].FDArrayOffsize); OutputList.addLast(fdOffsets[i]); } // Declare beginning of the object array IndexBaseItem fdArrayBase = new IndexBaseItem(); OutputList.addLast(fdArrayBase); // For each object check if that FD is used. // if is used build a new one by changing the private object // Else do nothing // At the end of each object mark its ending (Even if wasn't written) for (int k=0; k<fonts[Font].FDArrayOffsets.length-1; k++) { // if (FDArrayUsed.contains(Integer.valueOf(k))) // { // Goto beginning of objects seek(fonts[Font].FDArrayOffsets[k]); while (getPosition() < fonts[Font].FDArrayOffsets[k+1]) { int p1 = getPosition(); getDictItem(); int p2 = getPosition(); // If the dictItem is the "Private" then compute and copy length, // use marker for offset and write operator number if (key=="Private") { // Save the original length of the private dict int NewSize = ((Integer)args[0]).intValue(); // Save the size of the offset to the subrs in that private int OrgSubrsOffsetSize = CalcSubrOffsetSize(fonts[Font].fdprivateOffsets[k],fonts[Font].fdprivateLengths[k]); // Increase the private's length accordingly if (OrgSubrsOffsetSize != 0) NewSize += 5-OrgSubrsOffsetSize; // Insert the new size, OffsetItem and operator key number OutputList.addLast(new DictNumberItem(NewSize)); fdPrivate[k] = new DictOffsetItem(); OutputList.addLast(fdPrivate[k]); OutputList.addLast(new UInt8Item((char)18)); // Private // Go back to place seek(p2); } // Else copy the entire range else // other than private OutputList.addLast(new RangeItem(buf,p1,p2-p1)); } // } // Mark the ending of the object (even if wasn't written) OutputList.addLast(new IndexMarkerItem(fdOffsets[k],fdArrayBase)); } } /** * Function Adds the new private dicts (only for the FDs used) to the list * @param Font the font * @param fdPrivate OffsetItem array one element for each private * @param fdPrivateBase IndexBaseItem array one element for each private * @param fdSubrs OffsetItem array one element for each private */ void ReconstructPrivateDict(int Font,OffsetItem[] fdPrivate,IndexBaseItem[] fdPrivateBase, OffsetItem[] fdSubrs) { // For each fdarray private dict check if that FD is used. // if is used build a new one by changing the subrs offset // Else do nothing for (int i=0;i<fonts[Font].fdprivateOffsets.length;i++) { // if (FDArrayUsed.contains(Integer.valueOf(i))) // { // Mark beginning OutputList.addLast(new MarkerItem(fdPrivate[i])); fdPrivateBase[i] = new IndexBaseItem(); OutputList.addLast(fdPrivateBase[i]); // Goto beginning of objects seek(fonts[Font].fdprivateOffsets[i]); while (getPosition() < fonts[Font].fdprivateOffsets[i]+fonts[Font].fdprivateLengths[i]) { int p1 = getPosition(); getDictItem(); int p2 = getPosition(); // If the dictItem is the "Subrs" then, // use marker for offset and write operator number if (key=="Subrs") { fdSubrs[i] = new DictOffsetItem(); OutputList.addLast(fdSubrs[i]); OutputList.addLast(new UInt8Item((char)19)); // Subrs } // Else copy the entire range else OutputList.addLast(new RangeItem(buf,p1,p2-p1)); } // } } } /** * Function Adds the new LSubrs dicts (only for the FDs used) to the list * @param Font The index of the font * @param fdPrivateBase The IndexBaseItem array for the linked list * @param fdSubrs OffsetItem array for the linked list */ void ReconstructPrivateSubrs(int Font,IndexBaseItem[] fdPrivateBase, OffsetItem[] fdSubrs) { // For each private dict for (int i=0;i<fonts[Font].fdprivateLengths.length;i++) { // If that private dict's Subrs are used insert the new LSubrs // computed earlier if (fdSubrs[i]!= null && fonts[Font].PrivateSubrsOffset[i] >= 0) { OutputList.addLast(new SubrMarkerItem(fdSubrs[i],fdPrivateBase[i])); if(NewLSubrsIndex[i]!=null) OutputList.addLast(new RangeItem(new RandomAccessFileOrArray(NewLSubrsIndex[i]),0,NewLSubrsIndex[i].length)); } } } /** * Calculates how many byte it took to write the offset for the subrs in a specific * private dict. * @param Offset The Offset for the private dict * @param Size The size of the private dict * @return The size of the offset of the subrs in the private dict */ int CalcSubrOffsetSize(int Offset,int Size) { // Set the size to 0 int OffsetSize = 0; // Go to the beginning of the private dict seek(Offset); // Go until the end of the private dict while (getPosition() < Offset+Size) { int p1 = getPosition(); getDictItem(); int p2 = getPosition(); // When reached to the subrs offset if (key=="Subrs") { // The Offsize (minus the subrs key) OffsetSize = p2-p1-1; } // All other keys are ignored } // return the size return OffsetSize; } /** * Function computes the size of an index * @param indexOffset The offset for the computed index * @return The size of the index */ protected int countEntireIndexRange(int indexOffset) { // Go to the beginning of the index seek(indexOffset); // Read the count field int count = getCard16(); // If count==0 -> size=2 if (count==0) return 2; else { // Read the offsize field int indexOffSize = getCard8(); // Go to the last element of the offset array seek(indexOffset+2+1+count*indexOffSize); // The size of the object array is the value of the last element-1 int size = getOffset(indexOffSize)-1; // Return the size of the entire index return 2+1+(count+1)*indexOffSize+size; } } /** * The function creates a private dict for a font that was not CID * All the keys are copied as is except for the subrs key * @param Font the font * @param Subr The OffsetItem for the subrs of the private */ void CreateNonCIDPrivate(int Font,OffsetItem Subr) { // Go to the beginning of the private dict and read until the end seek(fonts[Font].privateOffset); while (getPosition() < fonts[Font].privateOffset+fonts[Font].privateLength) { int p1 = getPosition(); getDictItem(); int p2 = getPosition(); // If the dictItem is the "Subrs" then, // use marker for offset and write operator number if (key=="Subrs") { OutputList.addLast(Subr); OutputList.addLast(new UInt8Item((char)19)); // Subrs } // Else copy the entire range else OutputList.addLast(new RangeItem(buf,p1,p2-p1)); } } /** * the function marks the beginning of the subrs index and adds the subsetted subrs * index to the output list. * @param Font the font * @param PrivateBase IndexBaseItem for the private that's referencing to the subrs * @param Subrs OffsetItem for the subrs */ void CreateNonCIDSubrs(int Font,IndexBaseItem PrivateBase,OffsetItem Subrs) { // Mark the beginning of the Subrs index OutputList.addLast(new SubrMarkerItem(Subrs,PrivateBase)); // Put the subsetted new subrs index if (NewSubrsIndexNonCID != null) { OutputList.addLast(new RangeItem(new RandomAccessFileOrArray(NewSubrsIndexNonCID),0,NewSubrsIndexNonCID.length)); } } }
LukeSkywalker92/heuslertools
heuslertools/squid/load_squid_data.py
""" Functions to load SQUID data from measurement files """ import numpy as np from heuslertools.tools.data_handling import search_data_start import warnings def load_squid_data(file): """ Loads a SQUID measurement and returns the field, temperature and long moment data. Deprecated, use SQUIDMeasurement instead. """ warnings.warn("load_squid_data is deprecated. Use SQUIDMeasurement object instead.", DeprecationWarning, stacklevel=2) data = {} data["field"], data["temperature"], data["long_moment"] = np.loadtxt(fname=file, skiprows=search_data_start(file, "[Data]", None)+1, delimiter=",", usecols = (2,3,4), unpack=True) return data
spiritl7db/uni-app
src/platforms/h5/service/api/network/socket.js
const { invokeCallbackHandler: invoke } = UniServiceJSBridge const eventNames = ['open', 'close', 'error', 'message'] const callbacks = {} const socketTasks = [] /** * SocketTask */ class SocketTask { /** * WebSocket实例 */ _webSocket /** * 构造函数 * @param {string} url * @param {Array} protocols */ constructor (url, protocols, callback) { let error try { const webSocket = this._webSocket = new WebSocket(url, protocols) webSocket.binaryType = 'arraybuffer' this._callbacks = {} eventNames.forEach(name => { this._callbacks[name] = [] webSocket.addEventListener(name, event => { const res = name === 'message' ? { data: event.data } : {} this._callbacks[name].forEach(callback => { try { callback(res) } catch (e) { console.error(`thirdScriptError\n${e};at socketTask.on${name[0].toUpperCase() + name.substr(1)} callback function\n`, e) } }) if (this === socketTasks[0] && callbacks[name]) { invoke(callbacks[name], res) } if (name === 'error' || name === 'close') { const index = socketTasks.indexOf(this) if (index >= 0) { socketTasks.splice(index, 1) } } }) }) const propertys = ['CLOSED', 'CLOSING', 'CONNECTING', 'OPEN', 'readyState'] propertys.forEach((property) => { Object.defineProperty(this, property, { get () { return webSocket[property] } }) }) } catch (e) { error = e } callback(error, this) } /** * 发送 * @param {any} data */ send (options = {}) { const data = options.data const ws = this._webSocket try { if (ws.readyState !== ws.OPEN) { throw new Error('SocketTask.readyState is not OPEN') } ws.send(data) this._callback(options, 'sendSocketMessage:ok') } catch (error) { this._callback(options, `sendSocketMessage:fail ${error}`) } } /** * 关闭 * @param {number} code * @param {string} reason */ close (options = {}) { const ws = this._webSocket const arrgs = [] arrgs.push(options.code || 1000) if (typeof options.reason === 'string') { arrgs.push(options.reason) } try { ws.close(...arrgs) this._callback(options, 'sendSocketMessage:ok') } catch (error) { this._callback(options, `sendSocketMessage:fail ${error}`) } } /** * 通用回调处理 */ _callback ({ success, fail, complete }, errMsg) { const data = { errMsg } if (/:ok$/.test(errMsg)) { if (typeof success === 'function') { success(data) } } else { if (typeof fail === 'function') { fail(data) } } if (typeof complete === 'function') { complete(data) } } } eventNames.forEach(item => { const name = item[0].toUpperCase() + item.substr(1) SocketTask.prototype[`on${name}`] = function (callback) { this._callbacks[item].push(callback) } }) /** * 创建一个 WebSocket 连接 * @param {any} data 数据 * @return {SocketTask} */ export function connectSocket ({ url, protocols }, callbackId) { return new SocketTask(url, protocols, (error, socketTask) => { if (!error) { socketTasks.push(socketTask) } invoke(callbackId, { errMsg: 'connectSocket:' + (error ? `fail ${error}` : 'ok') }) }) } /** * 通过 WebSocket 连接发送数据 * @param {any} options * @param {string} callbackId */ export function sendSocketMessage (options, callbackId) { const socketTask = socketTasks[0] if (socketTask && socketTask.readyState === socketTask.OPEN) { socketTask.send(Object.assign({}, options, { complete (res) { invoke(callbackId, res) } })) } else { invoke(callbackId, { errMsg: 'sendSocketMessage:fail WebSocket is not connected ' }) } } /** * 关闭WebSocket连接 * @param {any} options * @param {string} callbackId */ export function closeSocket (options, callbackId) { const socketTask = socketTasks[0] if (socketTask) { socketTask.close(Object.assign({}, options, { complete (res) { invoke(callbackId, res) } })) } else { invoke(callbackId, { errMsg: 'closeSocket:fail WebSocket is not connected' }) } } /** * 监听事件 * @param {string} method */ function on (method) { return function (callbackId) { callbacks[method] = callbackId } } /** * 监听WebSocket连接打开事件 * @param {Function} cb */ export const onSocketOpen = on('open') /** * 监听WebSocket错误 * @param {Function} cb */ export const onSocketError = on('error') /** * 监听WebSocket接受到服务器的消息事件 * @param {Function} cb */ export const onSocketMessage = on('message') /** * 监听WebSocket关闭 * @param {Function} callback */ export const onSocketClose = on('close')
zmyer/cyclops-react
cyclops-futurestream/src/test/java/cyclops/futurestream/react/lazy/sequence/FutureStreamCollectableTest.java
<gh_stars>100-1000 package cyclops.futurestream.react.lazy.sequence; import com.oath.cyclops.types.foldable.Folds; import cyclops.futurestream.LazyReact; import cyclops.streams.CollectableTest; public class FutureStreamCollectableTest extends CollectableTest { @Override public <T> Folds<T> of(T... values) { return LazyReact.sequentialBuilder().of(values); } }
HeyRobotShop/beatoftheday
db/schema.rb
<reponame>HeyRobotShop/beatoftheday # This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # This file is the source Rails uses to define your schema when running `rails # db:schema:load`. When creating a new database, `rails db:schema:load` tends to # be faster and is potentially less error prone than running all of your # migrations from scratch. Old migrations may fail to apply correctly if those # migrations use external dependencies or application code. # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 2020_09_03_032541) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" create_table "active_admin_comments", force: :cascade do |t| t.string "namespace" t.text "body" t.string "resource_type" t.bigint "resource_id" t.string "author_type" t.bigint "author_id" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.index ["author_type", "author_id"], name: "index_active_admin_comments_on_author_type_and_author_id" t.index ["namespace"], name: "index_active_admin_comments_on_namespace" t.index ["resource_type", "resource_id"], name: "index_active_admin_comments_on_resource_type_and_resource_id" end create_table "admin_users", force: :cascade do |t| t.string "email", default: "", null: false t.string "encrypted_password", default: "", null: false t.string "reset_password_token" t.datetime "reset_password_sent_at" t.datetime "remember_created_at" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.index ["email"], name: "index_admin_users_on_email", unique: true t.index ["reset_password_token"], name: "index_admin_users_on_reset_password_token", unique: true end create_table "attachments", force: :cascade do |t| t.bigint "user_id" t.bigint "track_id", null: false t.string "url", null: false t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.string "name", null: false t.float "size_mb" t.index ["track_id"], name: "index_attachments_on_track_id" t.index ["user_id"], name: "index_attachments_on_user_id" end create_table "comments", force: :cascade do |t| t.bigint "user_id", null: false t.bigint "track_id", null: false t.string "text", null: false t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.index ["track_id"], name: "index_comments_on_track_id" t.index ["user_id"], name: "index_comments_on_user_id" end create_table "likes", force: :cascade do |t| t.bigint "user_id" t.bigint "track_id", null: false t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.boolean "baked", default: false, null: false t.index ["track_id"], name: "index_likes_on_track_id" t.index ["user_id"], name: "index_likes_on_user_id" end create_table "tracks", force: :cascade do |t| t.bigint "user_id", null: false t.string "link" t.string "audio_type" t.string "name" t.string "photo" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.bigint "rebound_track_id" t.boolean "video", default: false t.index ["rebound_track_id"], name: "index_tracks_on_rebound_track_id" t.index ["user_id"], name: "index_tracks_on_user_id" end create_table "users", force: :cascade do |t| t.string "email", default: "", null: false t.string "artist_name", null: false t.string "encrypted_password", default: "", null: false t.string "reset_password_token" t.datetime "reset_password_sent_at" t.datetime "remember_created_at" t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false t.index ["email"], name: "index_users_on_email", unique: true t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true end add_foreign_key "attachments", "tracks" add_foreign_key "attachments", "users" add_foreign_key "comments", "tracks" add_foreign_key "comments", "users" add_foreign_key "likes", "tracks" add_foreign_key "likes", "users" add_foreign_key "tracks", "users" end
kirbasyunus/ProjectEuler-solution-with-c
euler4/main.c
<filename>euler4/main.c #include <stdio.h> #include <stdlib.h> int palindrome(int n); int main() { int c=101101, a=99, b=99; if(palindrome(c)==1) printf("78"); else{ printf("11"); } } int palindrome(int a){ int n,b=0,c=0,i; int array[10]; n=a; //printf("is your number palindrome? : "); //scanf("%d",&n); for(i=0;n>0;i++){ b=b+1; c=n%10; array[i]=c; n=n/10; } if(array[0]==array[5]&&array[1]==array[4]&&array[2]==array[3]){ printf("%d\n",1); return 1; } else{ printf("%d\n",0); } }
gmaclennan/mapbox-gl-js
bench/benchmarks/map_load.js
'use strict'; const Evented = require('../../src/util/evented'); const formatNumber = require('../lib/format_number'); const createMap = require('../lib/create_map'); module.exports = function() { const evented = new Evented(); const mapsOnPage = 6; evented.fire('log', { message: `Creating ${mapsOnPage} maps` }); let loaded = 0; const maps = []; const start = Date.now(); for (let i = 0; i < mapsOnPage; i++) { const map = maps[i] = createMap({ style: { version: 8, sources: {}, layers: [] } }); map.on('load', onload.bind(null, map)); map.on('error', (err) => { evented.fire('error', err); }); } function onload () { if (++loaded >= mapsOnPage) { const duration = Date.now() - start; for (let i = 0; i < maps.length; i++) { maps[i].remove(); } evented.fire('end', { message: `${formatNumber(duration)} ms`, score: duration }); done(); } } function done () { } return evented; };
111andre111/logstash
tools/benchmark-cli/src/test/java/org/logstash/benchmark/cli/MainTest.java
<filename>tools/benchmark-cli/src/test/java/org/logstash/benchmark/cli/MainTest.java /* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.logstash.benchmark.cli; import java.io.File; import java.nio.file.Path; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.logstash.benchmark.cli.ui.UserInput; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; /** * Tests for {@link Main}. * todo: These tests are ignored for now, their runtime is simply unreasonable for any CI scenario. * We will have to find a reasonable trade-off here for making sure the benchmark code is functional * without increasing test runtime by many minutes. */ public final class MainTest { @Rule public final TemporaryFolder temp = new TemporaryFolder(); @Test public void downloadsDependenciesForGithub() throws Exception { final File pwd = temp.newFolder(); Main.main(String.format("--workdir=%s", pwd.getAbsolutePath())); final Path logstash = pwd.toPath().resolve("logstash").resolve("logstash-master"); assertThat(logstash.toFile().exists(), is(true)); final File jruby = pwd.toPath().resolve("jruby").toFile(); assertThat(jruby.exists(), is(true)); assertThat(jruby.isDirectory(), is(true)); assertThat(logstash.resolve("Gemfile").toFile().exists(), is(true)); } /** * @throws Exception On Failure * @todo cleanup path here, works though if you plug in a correct path */ @Test public void runsAgainstLocal() throws Exception { final File pwd = temp.newFolder(); Main.main(String.format( "--version=local:%s", System.getProperty("logstash.benchmark.test.local.path") ), String.format("--workdir=%s", pwd.getAbsolutePath())); } /** * @throws Exception On Failure */ @Test public void runsAgainstRelease() throws Exception { final File pwd = temp.newFolder(); Main.main( String.format("--%s=5.5.0", UserInput.DISTRIBUTION_VERSION_PARAM), String.format("--workdir=%s", pwd.getAbsolutePath()) ); } /** * @throws Exception On Failure */ @Test public void runsRepeatedDatasetAgainstRelease() throws Exception { final File pwd = temp.newFolder(); Main.main( String.format("--%s=5.5.0", UserInput.DISTRIBUTION_VERSION_PARAM), String.format("--workdir=%s", pwd.getAbsolutePath()), String.format("--%s=%d", UserInput.REPEAT_PARAM, 2) ); } /** * @throws Exception On Failure */ @Test public void runsApacheAgainstRelease() throws Exception { final File pwd = temp.newFolder(); Main.main( String.format("--%s=5.5.0", UserInput.DISTRIBUTION_VERSION_PARAM), String.format("--%s=apache", UserInput.TEST_CASE_PARAM), String.format("--workdir=%s", pwd.getAbsolutePath()) ); } /** * @throws Exception On Failure */ @Test public void runsRepeatApacheAgainstRelease() throws Exception { final File pwd = temp.newFolder(); Main.main( String.format("--%s=5.5.0", UserInput.DISTRIBUTION_VERSION_PARAM), String.format("--%s=apache", UserInput.TEST_CASE_PARAM), String.format("--workdir=%s", pwd.getAbsolutePath()), String.format("--%s=%d", UserInput.REPEAT_PARAM, 2) ); } /** * @throws Exception On Failure */ @Test public void runsCustomAgainstLocal() throws Exception { Main.main( String.format("--%s=custom", UserInput.TEST_CASE_PARAM), String.format("--%s=%s", UserInput.TEST_CASE_CONFIG_PARAM, System.getProperty("logstash.benchmark.test.config.path") ), String.format("--%s=%s", UserInput.LOCAL_VERSION_PARAM, System.getProperty("logstash.benchmark.test.local.path")) ); } }
grassrootza/grassroot-platform
grassroot-core/src/main/java/za/org/grassroot/core/domain/notification/EventReminderNotification.java
<filename>grassroot-core/src/main/java/za/org/grassroot/core/domain/notification/EventReminderNotification.java package za.org.grassroot.core.domain.notification; import za.org.grassroot.core.domain.User; import za.org.grassroot.core.domain.task.EventLog; import za.org.grassroot.core.enums.NotificationDetailedType; import javax.persistence.DiscriminatorValue; import javax.persistence.Entity; @Entity @DiscriminatorValue("EVENT_REMINDER") public class EventReminderNotification extends EventNotification { @Override public NotificationDetailedType getNotificationDetailedType() { return null; } @Override public User getSender() { return getEventLog().getUser(); } private EventReminderNotification() { // for JPA } public EventReminderNotification(User target, String message, EventLog eventLog) { super(target, message, eventLog); } }
haydarai/rheem
rheem-core/src/main/java/org/qcri/rheem/core/api/Configuration.java
package org.qcri.rheem.core.api; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.qcri.rheem.core.api.configuration.CollectionProvider; import org.qcri.rheem.core.api.configuration.ConstantValueProvider; import org.qcri.rheem.core.api.configuration.ExplicitCollectionProvider; import org.qcri.rheem.core.api.configuration.FunctionalCollectionProvider; import org.qcri.rheem.core.api.configuration.FunctionalKeyValueProvider; import org.qcri.rheem.core.api.configuration.FunctionalValueProvider; import org.qcri.rheem.core.api.configuration.KeyValueProvider; import org.qcri.rheem.core.api.configuration.MapBasedKeyValueProvider; import org.qcri.rheem.core.api.configuration.ValueProvider; import org.qcri.rheem.core.api.exception.RheemException; import org.qcri.rheem.core.function.FlatMapDescriptor; import org.qcri.rheem.core.function.FunctionDescriptor; import org.qcri.rheem.core.function.MapPartitionsDescriptor; import org.qcri.rheem.core.function.PredicateDescriptor; import org.qcri.rheem.core.mapping.Mapping; import org.qcri.rheem.core.optimizer.ProbabilisticDoubleInterval; import org.qcri.rheem.core.optimizer.cardinality.CardinalityEstimate; import org.qcri.rheem.core.optimizer.cardinality.CardinalityEstimator; import org.qcri.rheem.core.optimizer.cardinality.FallbackCardinalityEstimator; import org.qcri.rheem.core.optimizer.channels.ChannelConversion; import org.qcri.rheem.core.optimizer.costs.IntervalLoadEstimator; import org.qcri.rheem.core.optimizer.costs.LoadProfileEstimator; import org.qcri.rheem.core.optimizer.costs.LoadProfileToTimeConverter; import org.qcri.rheem.core.optimizer.costs.LoadToTimeConverter; import org.qcri.rheem.core.optimizer.costs.NestableLoadProfileEstimator; import org.qcri.rheem.core.optimizer.costs.TimeToCostConverter; import org.qcri.rheem.core.optimizer.enumeration.PlanEnumerationPruningStrategy; import org.qcri.rheem.core.plan.rheemplan.ElementaryOperator; import org.qcri.rheem.core.plan.rheemplan.ExecutionOperator; import org.qcri.rheem.core.plan.rheemplan.OutputSlot; import org.qcri.rheem.core.platform.Platform; import org.qcri.rheem.core.plugin.Plugin; import org.qcri.rheem.core.profiling.InstrumentationStrategy; import org.qcri.rheem.core.profiling.OutboundInstrumentationStrategy; import org.qcri.rheem.core.util.Actions; import org.qcri.rheem.core.util.ReflectionUtils; import org.qcri.rheem.core.util.fs.FileSystem; import org.qcri.rheem.core.util.fs.FileSystems; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.Map; import java.util.Optional; import java.util.OptionalDouble; import java.util.OptionalLong; import java.util.Properties; import java.util.Random; import java.util.function.ToDoubleFunction; import static org.qcri.rheem.core.util.ReflectionUtils.instantiateDefault; /** * Describes both the configuration of a {@link RheemContext} and {@link Job}s. */ public class Configuration { private static final Logger logger = LoggerFactory.getLogger(Configuration.class); private static final String DEFAULT_CONFIGURATION_FILE = "rheem-core-defaults.properties"; private static final Configuration defaultConfiguration = new Configuration((Configuration) null); static { defaultConfiguration.name = "default"; Actions.doSafe(() -> bootstrapCardinalityEstimationProvider(defaultConfiguration)); Actions.doSafe(() -> bootstrapSelectivityProviders(defaultConfiguration)); Actions.doSafe(() -> bootstrapLoadAndTimeEstimatorProviders(defaultConfiguration)); Actions.doSafe(() -> bootstrapPruningProviders(defaultConfiguration)); Actions.doSafe(() -> bootstrapProperties(defaultConfiguration)); Actions.doSafe(() -> bootstrapPlugins(defaultConfiguration)); } private static final String BASIC_PLUGIN = "org.qcri.rheem.basic.RheemBasics.defaultPlugin()"; private String name = "(no name)"; private final Configuration parent; private KeyValueProvider<OutputSlot<?>, CardinalityEstimator> cardinalityEstimatorProvider; private KeyValueProvider<FunctionDescriptor, ProbabilisticDoubleInterval> udfSelectivityProvider; private KeyValueProvider<ExecutionOperator, LoadProfileEstimator> operatorLoadProfileEstimatorProvider; private KeyValueProvider<FunctionDescriptor, LoadProfileEstimator> functionLoadProfileEstimatorProvider; private MapBasedKeyValueProvider<String, LoadProfileEstimator> loadProfileEstimatorCache; private KeyValueProvider<Platform, LoadProfileToTimeConverter> loadProfileToTimeConverterProvider; private KeyValueProvider<Platform, TimeToCostConverter> timeToCostConverterProvider; private ValueProvider<ToDoubleFunction<ProbabilisticDoubleInterval>> costSquasherProvider; private KeyValueProvider<Platform, Long> platformStartUpTimeProvider; private ExplicitCollectionProvider<Platform> platformProvider; private ExplicitCollectionProvider<Mapping> mappingProvider; private ExplicitCollectionProvider<ChannelConversion> channelConversionProvider; private CollectionProvider<Class<PlanEnumerationPruningStrategy>> pruningStrategyClassProvider; private ValueProvider<InstrumentationStrategy> instrumentationStrategyProvider; private KeyValueProvider<String, String> properties; /** * Creates a new top-level instance that bases directly from the default instance. Will try to load the * user configuration file. * * @see #getDefaultConfiguration() */ public Configuration() { this(findUserConfigurationFile()); } /** * Creates a new top-level instance that bases directly from the default instance and loads the specified * configuration file. * * @see #getDefaultConfiguration() * @see #load(String) */ public Configuration(String configurationFileUrl) { this(getDefaultConfiguration()); if (configurationFileUrl != null) { this.name = configurationFileUrl; this.load(configurationFileUrl); } } /** * Basic constructor. */ private Configuration(Configuration parent) { this.parent = parent; if (this.parent != null) { // Providers for platforms. this.platformProvider = new ExplicitCollectionProvider<>(this, this.parent.platformProvider); this.mappingProvider = new ExplicitCollectionProvider<>(this, this.parent.mappingProvider); this.channelConversionProvider = new ExplicitCollectionProvider<>(this, this.parent.channelConversionProvider); // Providers for cardinality estimation. this.cardinalityEstimatorProvider = new MapBasedKeyValueProvider<>(this.parent.cardinalityEstimatorProvider, this); this.udfSelectivityProvider = new MapBasedKeyValueProvider<>(this.parent.udfSelectivityProvider, this); // Providers for cost functions. this.operatorLoadProfileEstimatorProvider = new MapBasedKeyValueProvider<>(this.parent.operatorLoadProfileEstimatorProvider, this); this.functionLoadProfileEstimatorProvider = new MapBasedKeyValueProvider<>(this.parent.functionLoadProfileEstimatorProvider, this); this.loadProfileEstimatorCache = new MapBasedKeyValueProvider<>(this.parent.loadProfileEstimatorCache, this); this.loadProfileToTimeConverterProvider = new MapBasedKeyValueProvider<>(this.parent.loadProfileToTimeConverterProvider, this); this.timeToCostConverterProvider = new MapBasedKeyValueProvider<>(this.parent.timeToCostConverterProvider, this); this.platformStartUpTimeProvider = new MapBasedKeyValueProvider<>(this.parent.platformStartUpTimeProvider, this); this.costSquasherProvider = new ConstantValueProvider<>(this, this.parent.costSquasherProvider); // Providers for plan enumeration. this.pruningStrategyClassProvider = new ExplicitCollectionProvider<>(this, this.parent.pruningStrategyClassProvider); this.instrumentationStrategyProvider = new ConstantValueProvider<>(this, this.parent.instrumentationStrategyProvider); // Properties. this.properties = new MapBasedKeyValueProvider<>(this.parent.properties, this); } } private static String findUserConfigurationFile() { final String systemProperty = System.getProperty("rheem.configuration"); if (systemProperty != null) { logger.info("Using configuration at {}.", systemProperty); return systemProperty; } final URL classPathResource = ReflectionUtils.getResourceURL("rheem.properties"); if (classPathResource != null) { logger.info("Using configuration at {}.", classPathResource); return classPathResource.toString(); } logger.info("Using blank configuration."); return null; } /** * Adjusts this instance to the properties specified in the given file. * * @param configurationUrl URL to the configuration file */ public void load(String configurationUrl) { final Optional<FileSystem> fileSystem = FileSystems.getFileSystem(configurationUrl); if (!fileSystem.isPresent()) { throw new RheemException(String.format("Could not access %s.", configurationUrl)); } try (InputStream configInputStream = fileSystem.get().open(configurationUrl)) { this.load(configInputStream); } catch (Exception e) { throw new RheemException(String.format("Could not load configuration from %s.", configurationUrl), e); } } /** * Adjusts this instance to the properties specified in the given file. * * @param configInputStream of the file */ public void load(InputStream configInputStream) { try { final Properties properties = new Properties(); properties.load(configInputStream); for (Map.Entry<Object, Object> propertyEntry : properties.entrySet()) { final String key = propertyEntry.getKey().toString(); final String value = propertyEntry.getValue().toString(); this.handleConfigurationFileEntry(key, value); } } catch (IOException e) { throw new RheemException("Could not load configuration.", e); } finally { IOUtils.closeQuietly(configInputStream); } } /** * Handle a just loaded property. * * @param key the property's key * @param value the property's value */ private void handleConfigurationFileEntry(String key, String value) { switch (key) { case "rheem.core.optimizer.cost.squash": if (!(this.costSquasherProvider instanceof ConstantValueProvider)) { logger.warn("Cannot update cost estimate provider."); } else if ("expectation".equals(value)) { ((ConstantValueProvider<ToDoubleFunction<ProbabilisticDoubleInterval>>) this.costSquasherProvider).setValue( ProbabilisticDoubleInterval::getGeometricMeanEstimate ); } else if ("random".equals(value)) { final int salt = new Random().nextInt(); ((ConstantValueProvider<ToDoubleFunction<ProbabilisticDoubleInterval>>) this.costSquasherProvider).setValue( cost -> cost.hashCode() * salt + cost.hashCode() ); } else { logger.warn("Cannot set unknown cost comparator \"{}\".", value); } break; default: this.setProperty(key, value); break; } } /** * Returns the global default instance. It will be the fallback for all other instances and should only modified * to provide default values. */ public static Configuration getDefaultConfiguration() { return defaultConfiguration; } private static void bootstrapPlugins(Configuration configuration) { configuration.setPlatformProvider(new ExplicitCollectionProvider<>(configuration)); configuration.setMappingProvider(new ExplicitCollectionProvider<>(configuration)); configuration.setChannelConversionProvider(new ExplicitCollectionProvider<>(configuration)); try { Plugin basicPlugin = ReflectionUtils.evaluate(BASIC_PLUGIN); basicPlugin.configure(configuration); } catch (Exception e) { if (logger.isDebugEnabled()) { logger.debug("Could not load basic plugin.", e); } else { logger.warn("Could not load basic plugin."); } } } private static void bootstrapCardinalityEstimationProvider(final Configuration configuration) { // Safety net: provide a fallback estimator. KeyValueProvider<OutputSlot<?>, CardinalityEstimator> fallbackProvider = new FunctionalKeyValueProvider<OutputSlot<?>, CardinalityEstimator>( outputSlot -> new FallbackCardinalityEstimator(), configuration ).withSlf4jWarning("Creating fallback cardinality estimator for {}."); // Default option: Implementations define their estimators. KeyValueProvider<OutputSlot<?>, CardinalityEstimator> defaultProvider = new FunctionalKeyValueProvider<>(fallbackProvider, (outputSlot, requestee) -> { assert outputSlot.getOwner().isElementary() : String.format("Cannot provide estimator for composite %s.", outputSlot.getOwner()); final ElementaryOperator operator = (ElementaryOperator) outputSlot.getOwner(); // Instance-level estimator? if (operator.getCardinalityEstimator(outputSlot.getIndex()) != null) { return operator.getCardinalityEstimator(outputSlot.getIndex()); } // Type-level estimator? return operator .createCardinalityEstimator(outputSlot.getIndex(), configuration) .orElse(null); }); // Customizable layer: Users can override manually. KeyValueProvider<OutputSlot<?>, CardinalityEstimator> overrideProvider = new MapBasedKeyValueProvider<>(defaultProvider); configuration.setCardinalityEstimatorProvider(overrideProvider); } private static void bootstrapSelectivityProviders(Configuration configuration) { // Selectivity of UDFs { // Safety net: provide a fallback selectivity. KeyValueProvider<FunctionDescriptor, ProbabilisticDoubleInterval> fallbackProvider = new FunctionalKeyValueProvider<FunctionDescriptor, ProbabilisticDoubleInterval>( functionDescriptor -> { if (functionDescriptor instanceof PredicateDescriptor) { return new ProbabilisticDoubleInterval(0.1, 1, 0.9d); } else if (functionDescriptor instanceof FlatMapDescriptor) { return new ProbabilisticDoubleInterval(0.1, 1, 0.9d); } else if (functionDescriptor instanceof MapPartitionsDescriptor) { return new ProbabilisticDoubleInterval(0.1, 1, 0.9d); } else { throw new RheemException("Cannot provide fallback selectivity for " + functionDescriptor); } }, configuration ).withSlf4jWarning("Using fallback selectivity for {}."); // Built-in option: Let the PredicateDescriptor provide its selectivity. KeyValueProvider<FunctionDescriptor, ProbabilisticDoubleInterval> builtInProvider = new FunctionalKeyValueProvider<>( fallbackProvider, functionDescriptor -> FunctionDescriptor.getSelectivity(functionDescriptor).orElse(null) ); // Customizable layer: Users can override manually. KeyValueProvider<FunctionDescriptor, ProbabilisticDoubleInterval> overrideProvider = new MapBasedKeyValueProvider<>(builtInProvider); configuration.setUdfSelectivityProvider(overrideProvider); } } private static void bootstrapLoadAndTimeEstimatorProviders(Configuration configuration) { { // Safety net: provide a fallback selectivity. KeyValueProvider<ExecutionOperator, LoadProfileEstimator> fallbackProvider = new FunctionalKeyValueProvider<ExecutionOperator, LoadProfileEstimator>( (operator, requestee) -> { final Configuration conf = requestee.getConfiguration(); return new NestableLoadProfileEstimator( IntervalLoadEstimator.createIOLinearEstimator( null, conf.getLongProperty("rheem.core.fallback.udf.cpu.lower"), conf.getLongProperty("rheem.core.fallback.udf.cpu.upper"), conf.getDoubleProperty("rheem.core.fallback.udf.cpu.confidence"), CardinalityEstimate.EMPTY_ESTIMATE ), IntervalLoadEstimator.createIOLinearEstimator( null, conf.getLongProperty("rheem.core.fallback.udf.ram.lower"), conf.getLongProperty("rheem.core.fallback.udf.ram.upper"), conf.getDoubleProperty("rheem.core.fallback.udf.ram.confidence"), CardinalityEstimate.EMPTY_ESTIMATE ) ); }, configuration ).withSlf4jWarning("Creating fallback load estimator for {}."); // Built-in option: let the ExecutionOperators provide the LoadProfileEstimator. KeyValueProvider<ExecutionOperator, LoadProfileEstimator> builtInProvider = new FunctionalKeyValueProvider<>( fallbackProvider, (operator, requestee) -> operator.createLoadProfileEstimator(requestee.getConfiguration()).orElse(null) ); // Customizable layer: Users can override manually. KeyValueProvider<ExecutionOperator, LoadProfileEstimator> overrideProvider = new MapBasedKeyValueProvider<>(builtInProvider); configuration.setOperatorLoadProfileEstimatorProvider(overrideProvider); } { // Safety net: provide a fallback selectivity. KeyValueProvider<FunctionDescriptor, LoadProfileEstimator> fallbackProvider = new FunctionalKeyValueProvider<FunctionDescriptor, LoadProfileEstimator>( (operator, requestee) -> { final Configuration conf = requestee.getConfiguration(); return new NestableLoadProfileEstimator( IntervalLoadEstimator.createIOLinearEstimator( null, conf.getLongProperty("rheem.core.fallback.operator.cpu.lower"), conf.getLongProperty("rheem.core.fallback.operator.cpu.upper"), conf.getDoubleProperty("rheem.core.fallback.operator.cpu.confidence"), CardinalityEstimate.EMPTY_ESTIMATE ), IntervalLoadEstimator.createIOLinearEstimator( null, conf.getLongProperty("rheem.core.fallback.operator.ram.lower"), conf.getLongProperty("rheem.core.fallback.operator.ram.upper"), conf.getDoubleProperty("rheem.core.fallback.operator.ram.confidence"), CardinalityEstimate.EMPTY_ESTIMATE ) ); }, configuration ).withSlf4jWarning("Creating fallback load estimator for {}."); // Built-in layer: let the FunctionDescriptors provide the LoadProfileEstimators themselves. KeyValueProvider<FunctionDescriptor, LoadProfileEstimator> builtInProvider = new FunctionalKeyValueProvider<>( fallbackProvider, functionDescriptor -> functionDescriptor.getLoadProfileEstimator().orElse(null) ); // Customizable layer: Users can override manually. KeyValueProvider<FunctionDescriptor, LoadProfileEstimator> overrideProvider = new MapBasedKeyValueProvider<>(builtInProvider); configuration.setFunctionLoadProfileEstimatorProvider(overrideProvider); } { // Safety net: provide a fallback start up costs. final KeyValueProvider<Platform, Long> builtinProvider = new FunctionalKeyValueProvider<>( (platform, requestee) -> platform.getInitializeMillis(requestee.getConfiguration()), configuration ); // Override layer. KeyValueProvider<Platform, Long> overrideProvider = new MapBasedKeyValueProvider<>(builtinProvider); configuration.setPlatformStartUpTimeProvider(overrideProvider); } { // Safety net: provide a fallback start up costs. final KeyValueProvider<Platform, LoadProfileToTimeConverter> fallbackProvider = new FunctionalKeyValueProvider<Platform, LoadProfileToTimeConverter>( platform -> LoadProfileToTimeConverter.createDefault( LoadToTimeConverter.createLinearCoverter(0.0000005), // 1 CPU with 2 GHz LoadToTimeConverter.createLinearCoverter(0.00001), // 10 ms to read/write 1 MB LoadToTimeConverter.createLinearCoverter(0.00001), // 10 ms to receive/send 1 MB (cpuEstimate, diskEstimate, networkEstimate) -> cpuEstimate.plus(diskEstimate).plus(networkEstimate) ), configuration ) .withSlf4jWarning("Using fallback load-to-time converter for {}."); final KeyValueProvider<Platform, LoadProfileToTimeConverter> defaultProvider = new FunctionalKeyValueProvider<>( fallbackProvider, (platform, requestee) -> platform.createLoadProfileToTimeConverter( requestee.getConfiguration() ) ); final KeyValueProvider<Platform, LoadProfileToTimeConverter> overrideProvider = new MapBasedKeyValueProvider<>(defaultProvider, false); configuration.setLoadProfileToTimeConverterProvider(overrideProvider); } { // Safety net: provide a fallback start up costs. final KeyValueProvider<Platform, TimeToCostConverter> fallbackProvider = new FunctionalKeyValueProvider<Platform, TimeToCostConverter>( platform -> new TimeToCostConverter(0d, 1d), configuration ).withSlf4jWarning("Using fallback time-to-cost converter for {}."); final KeyValueProvider<Platform, TimeToCostConverter> builtInProvider = new FunctionalKeyValueProvider<>( fallbackProvider, (platform, requestee) -> platform.createTimeToCostConverter( requestee.getConfiguration() ) ); final KeyValueProvider<Platform, TimeToCostConverter> overrideProvider = new MapBasedKeyValueProvider<>(builtInProvider, false); configuration.setTimeToCostConverterProvider(overrideProvider); } { configuration.setLoadProfileEstimatorCache(new MapBasedKeyValueProvider<>(configuration, true)); } } private static void bootstrapPruningProviders(Configuration configuration) { { // By default, load pruning from the rheem.core.optimizer.pruning.strategies property. CollectionProvider<Class<PlanEnumerationPruningStrategy>> propertyBasedProvider = new FunctionalCollectionProvider<>( config -> { final String strategyClassNames = config.getStringProperty("rheem.core.optimizer.pruning.strategies"); if (strategyClassNames == null || strategyClassNames.isEmpty()) { return Collections.emptySet(); } Collection<Class<PlanEnumerationPruningStrategy>> strategyClasses = new LinkedList<>(); for (String strategyClassName : strategyClassNames.split(",")) { try { @SuppressWarnings("unchecked") final Class<PlanEnumerationPruningStrategy> strategyClass = (Class<PlanEnumerationPruningStrategy>) Class.forName(strategyClassName); strategyClasses.add(strategyClass); } catch (ClassNotFoundException e) { logger.warn("Illegal pruning strategy class name: \"{}\".", strategyClassName); } } return strategyClasses; }, configuration ); CollectionProvider<Class<PlanEnumerationPruningStrategy>> overrideProvider = new ExplicitCollectionProvider<>(configuration, propertyBasedProvider); configuration.setPruningStrategyClassProvider(overrideProvider); } { ValueProvider<ToDoubleFunction<ProbabilisticDoubleInterval>> defaultProvider = new ConstantValueProvider<>(ProbabilisticDoubleInterval::getGeometricMeanEstimate, configuration); ValueProvider<ToDoubleFunction<ProbabilisticDoubleInterval>> overrideProvider = new ConstantValueProvider<>(defaultProvider); configuration.setCostSquasherProvider(overrideProvider); } { ValueProvider<InstrumentationStrategy> defaultProvider = new ConstantValueProvider<>(new OutboundInstrumentationStrategy(), configuration); ValueProvider<InstrumentationStrategy> configProvider = new FunctionalValueProvider<>( requestee -> { Optional<String> optInstrumentationtStrategyClass = requestee.getConfiguration().getOptionalStringProperty("rheem.core.optimizer.instrumentation"); if (!optInstrumentationtStrategyClass.isPresent()) { return null; } return instantiateDefault(optInstrumentationtStrategyClass.get()); }, defaultProvider ); ValueProvider<InstrumentationStrategy> overrideProvider = new ConstantValueProvider<>(configProvider); configuration.setInstrumentationStrategyProvider(overrideProvider); } } private static void bootstrapProperties(Configuration configuration) { // Here, we could put some default values. final KeyValueProvider<String, String> defaultProperties = new MapBasedKeyValueProvider<>(configuration, false); configuration.setProperties(defaultProperties); configuration.load(ReflectionUtils.loadResource(DEFAULT_CONFIGURATION_FILE)); // Set some dynamic properties. configuration.setProperty("rheem.core.log.cardinalities", StringUtils.join( Arrays.asList(System.getProperty("user.home"), ".rheem", "cardinalities.json"), File.separator )); configuration.setProperty("rheem.core.log.executions", StringUtils.join( Arrays.asList(System.getProperty("user.home"), ".rheem", "executions.json"), File.separator )); // Supplement with a customizable layer. final KeyValueProvider<String, String> customizableProperties = new MapBasedKeyValueProvider<>(defaultProperties); configuration.setProperties(customizableProperties); } /** * Creates a child instance. */ public Configuration fork() { return new Configuration(this); } /** * Creates a child instance. * * @param name for the child instance */ public Configuration fork(String name) { final Configuration configuration = new Configuration(this); configuration.name = name; return configuration; } public KeyValueProvider<OutputSlot<?>, CardinalityEstimator> getCardinalityEstimatorProvider() { return this.cardinalityEstimatorProvider; } public void setCardinalityEstimatorProvider( KeyValueProvider<OutputSlot<?>, CardinalityEstimator> cardinalityEstimatorProvider) { this.cardinalityEstimatorProvider = cardinalityEstimatorProvider; } public KeyValueProvider<FunctionDescriptor, ProbabilisticDoubleInterval> getUdfSelectivityProvider() { return this.udfSelectivityProvider; } public void setUdfSelectivityProvider( KeyValueProvider<FunctionDescriptor, ProbabilisticDoubleInterval> udfSelectivityProvider) { this.udfSelectivityProvider = udfSelectivityProvider; } public KeyValueProvider<ExecutionOperator, LoadProfileEstimator> getOperatorLoadProfileEstimatorProvider() { return this.operatorLoadProfileEstimatorProvider; } public void setOperatorLoadProfileEstimatorProvider(KeyValueProvider<ExecutionOperator, LoadProfileEstimator> operatorLoadProfileEstimatorProvider) { this.operatorLoadProfileEstimatorProvider = operatorLoadProfileEstimatorProvider; } public KeyValueProvider<FunctionDescriptor, LoadProfileEstimator> getFunctionLoadProfileEstimatorProvider() { return this.functionLoadProfileEstimatorProvider; } public void setFunctionLoadProfileEstimatorProvider(KeyValueProvider<FunctionDescriptor, LoadProfileEstimator> functionLoadProfileEstimatorProvider) { this.functionLoadProfileEstimatorProvider = functionLoadProfileEstimatorProvider; } public MapBasedKeyValueProvider<String, LoadProfileEstimator> getLoadProfileEstimatorCache() { return this.loadProfileEstimatorCache; } public void setLoadProfileEstimatorCache(MapBasedKeyValueProvider<String, LoadProfileEstimator> loadProfileEstimatorCache) { this.loadProfileEstimatorCache = loadProfileEstimatorCache; } public ExplicitCollectionProvider<Platform> getPlatformProvider() { return this.platformProvider; } public void setPlatformProvider(ExplicitCollectionProvider<Platform> platformProvider) { this.platformProvider = platformProvider; } public ExplicitCollectionProvider<Mapping> getMappingProvider() { return mappingProvider; } public void setMappingProvider(ExplicitCollectionProvider<Mapping> mappingProvider) { this.mappingProvider = mappingProvider; } public ExplicitCollectionProvider<ChannelConversion> getChannelConversionProvider() { return channelConversionProvider; } public void setChannelConversionProvider(ExplicitCollectionProvider<ChannelConversion> channelConversionProvider) { this.channelConversionProvider = channelConversionProvider; } public CollectionProvider<Class<PlanEnumerationPruningStrategy>> getPruningStrategyClassProvider() { return this.pruningStrategyClassProvider; } public void setPruningStrategyClassProvider(CollectionProvider<Class<PlanEnumerationPruningStrategy>> pruningStrategyClassProvider) { this.pruningStrategyClassProvider = pruningStrategyClassProvider; } public ValueProvider<InstrumentationStrategy> getInstrumentationStrategyProvider() { return this.instrumentationStrategyProvider; } public void setInstrumentationStrategyProvider(ValueProvider<InstrumentationStrategy> instrumentationStrategyProvider) { this.instrumentationStrategyProvider = instrumentationStrategyProvider; } public KeyValueProvider<Platform, Long> getPlatformStartUpTimeProvider() { return this.platformStartUpTimeProvider; } public void setPlatformStartUpTimeProvider(KeyValueProvider<Platform, Long> platformStartUpTimeProvider) { this.platformStartUpTimeProvider = platformStartUpTimeProvider; } public void setProperties(KeyValueProvider<String, String> properties) { this.properties = properties; } public KeyValueProvider<String, String> getProperties() { return this.properties; } public void setProperty(String key, String value) { this.properties.set(key, value); } public String getStringProperty(String key) { return this.properties.provideFor(key); } public Optional<String> getOptionalStringProperty(String key) { return this.properties.optionallyProvideFor(key); } public String getStringProperty(String key, String fallback) { return this.getOptionalStringProperty(key).orElse(fallback); } public KeyValueProvider<Platform, LoadProfileToTimeConverter> getLoadProfileToTimeConverterProvider() { return this.loadProfileToTimeConverterProvider; } public void setLoadProfileToTimeConverterProvider(KeyValueProvider<Platform, LoadProfileToTimeConverter> loadProfileToTimeConverterProvider) { this.loadProfileToTimeConverterProvider = loadProfileToTimeConverterProvider; } public KeyValueProvider<Platform, TimeToCostConverter> getTimeToCostConverterProvider() { return timeToCostConverterProvider; } public void setTimeToCostConverterProvider(KeyValueProvider<Platform, TimeToCostConverter> timeToCostConverterProvider) { this.timeToCostConverterProvider = timeToCostConverterProvider; } public ValueProvider<ToDoubleFunction<ProbabilisticDoubleInterval>> getCostSquasherProvider() { return this.costSquasherProvider; } public void setCostSquasherProvider(ValueProvider<ToDoubleFunction<ProbabilisticDoubleInterval>> costSquasherProvider) { this.costSquasherProvider = costSquasherProvider; } public OptionalLong getOptionalLongProperty(String key) { final Optional<String> longValue = this.properties.optionallyProvideFor(key); if (longValue.isPresent()) { return OptionalLong.of(Long.valueOf(longValue.get())); } else { return OptionalLong.empty(); } } public long getLongProperty(String key) { final OptionalLong optionalLongProperty = this.getOptionalLongProperty(key); if (!optionalLongProperty.isPresent()) { throw new RheemException(String.format("No value for \"%s\".", key)); } return optionalLongProperty.getAsLong(); } public long getLongProperty(String key, long fallback) { return this.getOptionalLongProperty(key).orElse(fallback); } public OptionalDouble getOptionalDoubleProperty(String key) { final Optional<String> optionalDouble = this.properties.optionallyProvideFor(key); if (optionalDouble.isPresent()) { return OptionalDouble.of(Double.valueOf(optionalDouble.get())); } else { return OptionalDouble.empty(); } } public double getDoubleProperty(String key) { return this.getOptionalDoubleProperty(key).getAsDouble(); } public double getDoubleProperty(String key, double fallback) { return this.getOptionalDoubleProperty(key).orElse(fallback); } public Optional<Boolean> getOptionalBooleanProperty(String key) { return this.properties.optionallyProvideFor(key).map(Boolean::valueOf); } public boolean getBooleanProperty(String key) { return this.getOptionalBooleanProperty(key).get(); } public boolean getBooleanProperty(String key, boolean fallback) { return this.getOptionalBooleanProperty(key).orElse(fallback); } public Configuration getParent() { return parent; } @Override public String toString() { return String.format("%s[%s]", this.getClass().getSimpleName(), this.name); } public String getName() { return this.name; } }
jdsimcoe/ervell
src/v2/components/ChannelMetadata/components/ChannelMetadataCollaborators/components/CollaboratorsListItem/components/CollaboratorsListItemUser/index.js
<reponame>jdsimcoe/ervell import React, { Component } from 'react' import { propType } from 'graphql-anywhere' import Link from 'v2/components/ChannelMetadata/components/ChannelMetadataCollaborators/components/CollaboratorsListItem/components/Link' import collaboratorLinkFragment from 'v2/components/ChannelMetadata/components/ChannelMetadataCollaborators/fragments/collaboratorLink' export default class CollaboratorsListItemUser extends Component { static propTypes = { user: propType(collaboratorLinkFragment).isRequired, } render() { const { user: { href, name }, } = this.props return ( <strong> <Link href={href} length={name.length}> {name} </Link> </strong> ) } }
scottyw/tetromino
gameboy/memory/mbc1.go
package memory import ( "fmt" ) type mbc1 struct { // ROM and RAM data and mask read from the cart rom [][0x4000]byte ram [][0x2000]byte // Record of what as written between 0x0000 and 0x8000 enabledRegion uint8 romRegion uint8 ramRegion uint8 modeRegion uint8 // Selected ROM and RAM banks computed from written values on an MBC-type basis ramEnabled bool romBank0 int romBankX int ramBank int } func newMBC1(rom [][0x4000]byte, ram [][0x2000]byte) mbc { return &mbc1{ rom: rom, ram: ram, romBank0: 0, romBankX: 1, } } func (m *mbc1) Read(addr uint16) uint8 { switch { case addr < 0x4000: return m.rom[m.romBank0][addr] case addr < 0x8000: offset := addr - 0x4000 return m.rom[m.romBankX][offset] case addr < 0xa000: panic(fmt.Sprintf("mbc1 has no read mapping for address 0x%04x", addr)) case addr < 0xc000: if m.ramEnabled { offset := addr - 0xa000 return m.ram[m.ramBank][offset] } return 0xff default: panic(fmt.Sprintf("mbc1 has no read mapping for address 0x%04x", addr)) } } func (m *mbc1) Write(addr uint16, value uint8) { switch { case addr < 0x2000: m.enabledRegion = value case addr < 0x4000: m.romRegion = value case addr < 0x6000: m.ramRegion = value case addr < 0x8000: m.modeRegion = value case addr < 0xa000: panic(fmt.Sprintf("mbc1 has no write mapping for address 0x%04x", addr)) case addr < 0xc000: offset := addr - 0xa000 if m.ramEnabled { m.ram[m.ramBank][offset] = value } default: panic(fmt.Sprintf("mbc1 has no write mapping for address 0x%04x", addr)) } // Check if RAM is enabled m.ramEnabled = m.enabledRegion&0x0f == 0x0a // Check ROM bank 0 if m.modeRegion&0x01 == 0 { m.romBank0 = 0 } else { m.romBank0 = int((m.ramRegion & 0x03) << 5) m.romBank0 = m.romBank0 % len(m.rom) } // Check ROM bank 1 m.romBankX = int((m.romRegion & 0x1f)) if m.romBankX == 0 { m.romBankX = 1 } m.romBankX |= int((m.ramRegion & 0x03 << 5)) m.romBankX = m.romBankX % len(m.rom) // Check RAM bank if m.ramEnabled { if m.modeRegion&0x01 == 0 { m.ramBank = 0 } else { m.ramBank = int(m.ramRegion & 0x03) m.ramBank = m.ramBank % len(m.ram) } } } func (m *mbc1) DumpRAM() []byte { var dump []byte for _, r := range m.ram { dump = append(dump, r[:]...) } return dump }
ruibaby/ExamOnline
src/main/Java/cc/ryanc/filter/SetCharacterEncodingFilter.java
package cc.ryanc.filter; import javax.servlet.*; import javax.servlet.annotation.WebFilter; import javax.servlet.http.HttpServletRequest; import java.io.IOException; /** * Created with IntelliJ IDEA. * Author: RYAN0UP * Date: 2017/9/13 * 设置编码格式的过滤器 */ @WebFilter(filterName = "SetCharacterEncodingFilter") public class SetCharacterEncodingFilter implements Filter { protected FilterConfig filterConfig; private String encoding = "UTF-8"; public void init(FilterConfig filterConfig) throws ServletException { this.filterConfig = filterConfig; if (filterConfig.getInitParameter("encoding") != null) encoding = filterConfig.getInitParameter("encoding"); } public void doFilter(ServletRequest req, ServletResponse resp, FilterChain filterChain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) req; request.setCharacterEncoding(encoding); filterChain.doFilter(req, resp); } public void destroy() { this.encoding = null; } }
ProjectBlackFalcon/DatBot
DatBot.ProtocolBuilder/Utils/messages/game/context/roleplay/havenbag/HavenBagPackListMessage.java
<gh_stars>1-10 package protocol.network.messages.game.context.roleplay.havenbag; import java.io.IOException; import java.util.ArrayList; import java.util.List; import protocol.utils.ProtocolTypeManager; import protocol.network.util.types.BooleanByteWrapper; import protocol.network.NetworkMessage; import protocol.network.util.DofusDataReader; import protocol.network.util.DofusDataWriter; import protocol.network.Network; import protocol.network.NetworkMessage; @SuppressWarnings("unused") public class HavenBagPackListMessage extends NetworkMessage { public static final int ProtocolId = 6620; private List<Integer> packIds; public List<Integer> getPackIds() { return this.packIds; } public void setPackIds(List<Integer> packIds) { this.packIds = packIds; }; public HavenBagPackListMessage(){ } public HavenBagPackListMessage(List<Integer> packIds){ this.packIds = packIds; } @Override public void Serialize(DofusDataWriter writer) { try { writer.writeShort(this.packIds.size()); int _loc2_ = 0; while( _loc2_ < this.packIds.size()){ writer.writeByte(this.packIds.get(_loc2_)); _loc2_++; } } catch (Exception e){ e.printStackTrace(); } } @Override public void Deserialize(DofusDataReader reader) { try { int _loc2_ = reader.readShort(); int _loc3_ = 0; this.packIds = new ArrayList<Integer>(); while( _loc3_ < _loc2_){ int _loc15_ = reader.readByte(); this.packIds.add(_loc15_); _loc3_++; } } catch (Exception e){ e.printStackTrace(); } } }
gmulders/abacus
abacus-core/src/main/java/org/gertje/abacus/nodes/AbstractTermNode.java
package org.gertje.abacus.nodes; import org.gertje.abacus.token.Token; /** * Abstract super class for all term nodes. */ public abstract class AbstractTermNode extends AbstractExpressionNode implements BinaryOperationNode { protected ExpressionNode lhs; protected ExpressionNode rhs; public AbstractTermNode(ExpressionNode lhs, ExpressionNode rhs, Token token, int precedence) { super(precedence, token); this.lhs = lhs; this.rhs = rhs; } @Override public boolean getIsConstant() { return false; } public ExpressionNode getLhs() { return lhs; } public void setLhs(ExpressionNode lhs) { this.lhs = lhs; } public ExpressionNode getRhs() { return rhs; } public void setRhs(ExpressionNode rhs) { this.rhs = rhs; } }
nikoncode/hibernate-types
hibernate-types-55/src/main/java/com/vladmihalcea/hibernate/type/array/internal/BooleanArrayTypeDescriptor.java
<filename>hibernate-types-55/src/main/java/com/vladmihalcea/hibernate/type/array/internal/BooleanArrayTypeDescriptor.java package com.vladmihalcea.hibernate.type.array.internal; /** * @author <EMAIL> * @version 2.9.13 */ public class BooleanArrayTypeDescriptor extends AbstractArrayTypeDescriptor<boolean[]> { public BooleanArrayTypeDescriptor() { super(boolean[].class); } @Override protected String getSqlArrayType() { String sqlArrayType = super.getSqlArrayType(); return sqlArrayType != null ? sqlArrayType : "boolean"; } }
kevinwallimann/enceladus
menas/ui/service/ValidationResult.js
<gh_stars>10-100 /* * Copyright 2018 ABSA Group Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ class ValidationResult { constructor(result, isValid, errorMessage) { this._result = result; this._isValid = isValid; this._errorMessage = errorMessage; } get result() { return this._result; } get isValid() { return this._isValid; } get errorMessage() { return this._errorMessage; } } class ValidResult extends ValidationResult { constructor(result) { super(result, true); } } class InvalidResult extends ValidationResult { constructor(errorMessage) { super(null, false, errorMessage); } }
gangserver/py_test
python/bitcoin/ch04/04_19.py
import pandas as pd df = pd.read_excel("data/ohlc.xlsx") df = df.set_index("date") df.to_excel("data/ohlc-2.xlsx") print(df)
ronnyp07/pdv
modules/sales/client/controllers/sales.client.controller.js
<reponame>ronnyp07/pdv<gh_stars>0 (function () { 'use strict'; // Sales controller angular .module('sales') .controller('SalesController', SalesController); SalesController.$inject = [ '$scope', '$state', 'Authentication', 'sales', 'ParameterRestServices', 'ProductRestServices', 'InventoryRestServices', 'CartService', '$q', 'SalesRestServices', '$timeout', '$document', 'SucursalsService', 'CustomerRestServices', '$modal', 'CreditpaysRestServices', 'MovimientoRestServices', '$rootScope', 'NcfsRestServices']; function SalesController ( $scope, $state, Authentication, sale, ParameterRestServices, ProductRestServices, InventoryRestServices, CartService, $q, SalesRestServices, $timeout, $document, SucursalsService, CustomerRestServices, $modal, CreditpaysRestServices, MovimientoRestServices, $rootScope, NcfsRestServices) { var vm = this; vm.sale = sale; $rootScope.nav = true; vm.parameterServices = ParameterRestServices; vm.selectedCategory = 'Categoria'; vm.authentication = Authentication; vm.authentication.hideNavBar = true; vm.userimageURL = vm.authentication.user.profileImageURL; vm.productServices = ProductRestServices; vm.cajaturnoInfo = vm.authentication.cajaturno.get('cajaturno'); vm.focusinControl = {}; //vm.sucursalInfo = SucursalsService.get({sucursalId: vm.sale.cajaturnoInfo.sucursalId}); vm.productServices.sucursalSearch = vm.cajaturnoInfo.sucursalId; vm.customerServices = CustomerRestServices; vm.salesServices = SalesRestServices; vm.inventoryServices = InventoryRestServices; vm.inventoryServices.saveMode = 'create'; vm.ncfServices = NcfsRestServices; vm.creditServices = CreditpaysRestServices; vm.movimientoServices = MovimientoRestServices; vm.setCategoryValue = function(value){ vm.selectedCategory = value; }; vm.parameterServices.categoryTree('Categoria'); vm.parameterServices.getParamsFilterByParent('', vm.parameterServices.paramEnum.headers.tipo_factura).then(function(data){ vm.tipoFacturaList = data; }); vm.CartService = CartService; vm.CartService.resetCart(); vm.CartService.state = 'ventas'; vm.parameterServices.getParamsFilterByParent('', vm.parameterServices.paramEnum.headers.tipo_sales_pago).then(function(data){ vm.tipoPagoList = data; }); function init(){ //resetProductFilter(); vm.customerServices.scrollMore(); vm.product = {}; vm.product.descuento = 0; vm.product.qt = 1; vm.product.efectivo = 0; vm.product.tarjeta = 0; vm.product.cheque = 0; vm.product.transferencia = 0; vm.product.vales = 0; vm.product.total = 0; vm.product.itbs = 0; vm.product.pagado = 0; vm.product.change = 0; vm.product.subtotal = 0; vm.product.discount = 0; vm.salesPedingList = []; vm.productServices.isPOS = null; } init(); //Filtra por categoria y anade la categoria actual //Created By: <NAME> vm.getCategories = function(category){ category = !category ? 'Categoria' : category; vm.selectedCategory = category; vm.categorieList = []; resetProductCounter(); var param = []; vm.parameterServices.categoryTree(category).then(function(){ vm.categorieList = vm.parameterServices.category.ancestors; vm.categorieList.push(vm.parameterServices.category._id); _.remove(vm.categorieList, function(n) { return n === 'Categoria'; }); if(vm.parameterServices.children.length > 0){ _.forEach(vm.parameterServices.children, function(i){ param.push(i._id); }); } vm.productServices.category = category === 'Categoria' ? null: category; // vm.parameterServices.category._id; vm.productServices.loadScrollproducts(); }, function(error){ alertify.alert('Ha ocurrido un error en el sistema!'); }); }; //vm.getCategoryProducts vm.getCategoryProducts = function(value){ //resetProductCounter(); vm.selectedCategory = _.isObject(value) ? value._id : value; vm.getCategories(vm.selectedCategory); //vm.parameterServices.categoryTree(value._id ? value._id : value); vm.productServices.category = vm.selectedCategory; //vm.productServices.loadScrollproducts(); }; vm.showCreateCustomer = function(){ }; //Search the product vm.getProductFilter = function(param){ var defer = $q.defer(); var parm = { bardCode: param, sucursalId: vm.cajaturnoInfo.sucursalId, isPOS: true }; vm.productServices.getProductFilter(parm) .then(function(data){ defer.resolve(data); }); return defer.promise; }; vm.payOrder = function(value, type){ resetPayValue(type); vm.pay = value; vm.product.tipoPago = type; if(vm.product.formaPago !== 'credito'){ if(value > 0){ if(value >= vm.product.total){ vm.product.change = vm.CartService.getChange(value); vm.product.pagado = value; }else{ vm.product.pagado = 0; vm.product.change = 0; } }else{ vm.product.pagado = 0; vm.product.change = 0; vm.pay = 0; } }else{ vm.product.credito = getCredito(); } }; function resetPayValue(type){ vm.product.efectivo = type === vm.parameterServices.paramEnum.details.tipo_sales_pago_efectivo ? vm.product.efectivo : 0; vm.product.cheque = type === vm.parameterServices.paramEnum.details.tipo_sales_pago_cheque ? vm.product.cheque : 0; vm.product.tarjeta = type === vm.parameterServices.paramEnum.details.tipo_sales_pago_tarjeta ? vm.product.tarjeta : 0; vm.product.vales = type === vm.parameterServices.paramEnum.details.tipo_sales_pago_vales ? vm.product.vales : 0; vm.product.tran = type === vm.parameterServices.paramEnum.details.tipo_sales_pago_tran ? vm.product.tranferencia : 0; } // vm.productList = [{ // precios: {uno: {pVenta : 200}}, // name: 'test' // }]; //Action //When a product is selected from the list vm.selectedItem = function(product){ // _.forEach(vm.productList, function (i) { // i.name = product.name; // vm.p = i; // console.log(i); // }); var search = {}; search.bardCode = product.bardCode; search.sucursalId = product.sucursalId; vm.productServices.getProductFilter(search).then(function(data){ var newProduct = data[0]; vm.CartService.addToCart(newProduct, vm.product.qt).then(function(result){ vm.salesCart = []; vm.salesServices.selectedProduct = product; vm.cartList = vm.CartService.getCartItems(); resetCartPrice(vm.cartList); vm.product.qt = 1; }); }); }; vm.setPageMode = function(status){ var cart = vm.CartService.getCartItems(); if(cart.length > 0){ vm.product.formaPago = status; } }; vm.setCredit = function(){ var cart = vm.CartService.getCartItems(); if(cart.length > 0){ vm.product.tipoPago = vm.tipoPagoList[0]._id; vm.pay = 0; // vm.product.tipoPago = ; vm.product.formaPago = 'credito'; vm.product.cantPagos = 1; vm.product.rango = 'week'; vm.product.interes = 10; vm.product.interesAmount = getInteresAmount(); vm.product.credito = getCredito(); vm.totalPagado = vm.product.total + getInteresAmount(); vm.setDatePay(); } }; function getInteresAmount(){ return (Number(vm.product.total) * Number(vm.product.interes ? vm.product.interes : 0)) / 100; } function getCredito(){ var credit = 0; if(Number(vm.pay) >= Number(vm.product.total) ){ return credit; }else{ credit = (Number(vm.product.total) - vm.pay) + getInteresAmount(); } return credit; } vm.setDatePay = function(){ moment.locale('es'); vm.product.rangoList = []; if(vm.product.rango === 'Q'){ var pagosDetails =(Number(vm.product.credito) + Number(vm.product.interesAmount)) / Number(vm.product.cantPagos); var quincena = 15; for(var i = 1 ; i <= vm.product.cantPagos ; i++){ vm.product.rangoList.push({numero: i, date: moment().add(quincena, 'days').format('DD/MM/YYYY'), day: moment().add(quincena, 'days').format('dddd', 'es'), cantidad: pagosDetails}); quincena += quincena; } }else{ var pagosDetails = (Number(vm.product.credito) + Number(vm.product.interesAmount)) / Number(vm.product.cantPagos); for(var i = 1 ; i <= vm.product.cantPagos ; i++){ vm.product.rangoList.push({numero: i, date: moment().add(i, vm.product.rango).format('DD/MM/YYYY'), day: moment().add(i, vm.product.rango).format('dddd', 'es'), cantidad: pagosDetails}); } }; }; vm.interesChange = function(){ vm.product.interesAmount = getInteresAmount(); vm.setDatePay(); vm.product.credito = getCredito(); vm.totalPagado = vm.product.total + getInteresAmount(); }; //var width = window.innerWidth; vm.saveOrder = function(order){ vm.product.sucursalId = vm.cajaturnoInfo.sucursalId; vm.product.cart = vm.CartService.getCartItems(); vm.product.fecha_venta = moment().format(); vm.product.documentType = 'Ticket'; vm.product.customer = vm.salesServices.selectedCustomer ? vm.salesServices.selectedCustomer : null; vm.product.caja = vm.cajaturnoInfo.caja; vm.product.cajaturno = vm.cajaturnoInfo._id; vm.product.credito = vm.product.formaPago === 'credito' ? vm.product.credito : null; vm.product.rango = vm.product.formaPago === 'credito' ? vm.product.rango : null; vm.product.interes = vm.product.formaPago === 'credito' ? vm.product.interes : null; vm.product.status = order === 'hold' ? vm.parameterServices.paramEnum.details.sales_status_espera : null; vm.product.isPending = vm.product.formaPago === 'credito' ? true : false; vm.product.interesAmount = vm.product.formaPago === 'credito' ? vm.product.interesAmount : null; vm.salesServices.printMode = true; vm.ticketDate = moment().format('DD/MM/YYYY'); vm.ticketTime = moment().format("hh: mm A"); vm.salesServices.isSaving = true; vm.printReport(); if(!vm.salesServices.selectedSale){ vm.salesServices.create(vm.product).then(function(data){ if(order !== 'hold'){ saveProces(data); vm.salesServices.isSaving = false; } }); }else{ vm.product.status = order === 'hold' ? vm.parameterServices.paramEnum.details.sales_status_espera : null; vm.product._id = vm.salesServices.selectedSale._id; vm.salesServices.update(vm.product).then(function(data){ saveProces(data); vm.salesServices.selectedSale = null; vm.salesServices.isSaving = false; }); // console.log(order); // console.log(vm.product.rangoList); } }; function saveProces(data){ vm.product.ticketNumber = data.salesId; if(vm.product.formaPago === 'credito'){ angular.forEach(vm.product.rangoList, function(item){ item.order = data._id; vm.creditServices.create(item); }); if(vm.pay > 0){ var movimiento = { customer : vm.product.customer, sales : data._id, caja: vm.product.caja, cajaturno : vm.product.cajaturno, tipoMovimiento: vm.parameterServices.paramEnum.details.tipo_movimiento_ac, tipoPago: vm.product.tipoPago, montoTotal: vm.pay, }; vm.movimientoServices.create(movimiento).then(function(){ }); } } // vm.printReport(); vm.inventoryServices.getMaxInventory(vm.cajaturnoInfo.sucursalId) .then(function(inventoryData){ if(inventoryData.length > 0){ vm.inventoryServices.invOutPutField(vm.product.cart, inventoryData[0]).then(function(inventory){ vm.productServices.decremetProductStuck(vm.product.cart).then(function(){ }); }); } }); } vm.setSelectedProduct = function(product){ vm.salesServices.selectedProduct = product; }; vm.resetCustomer = function(){ vm.salesServices.selectedCustomer = null; }; vm.printRecive = function(){ vm.printReport(); }; vm.setHold = function(){ var cart = vm.CartService.getCartItems(); if(cart.length > 0){ //if(vm.productServices.product.listproductPromotion.length > 0 && vm.salesPedingList.length <= 5 && !vm.salesServices.selectedSale){ vm.saveOrder('hold'); vm.nexOrder(); }else{ alertify.error('No se puede poner orden en espera'); } }; vm.setAccionValue = function(value){ vm.product.qt = vm.product.qt + '' + value; }; function resetProductCounter(){ vm.percent = 0; vm.priceChangeAmount = 0; vm.productServices.hasMore = true; vm.productServices.isLoading = false; vm.productServices.page = 1; vm.productServices.productList = []; } //After click for the next order vm.nexOrder = function(){ vm.clearCart(); vm.salesServices.printMode = false; vm.resetCustomer(); init(); }; //Button cancelar order vm.cancelOrder = function(){ vm.product.formaPago = null; vm.product.change = 0; vm.product.pagado = 0; vm.product.tarjeta = 0; vm.product.vales = 0; vm.product.tranferencia = 0; vm.product.cheque = 0; vm.product.efectivo = 0; vm.pay = 0; }; vm.discountQuantity = function(){ if(vm.salesServices.selectedProduct){ vm.CartService.discountQuantity(vm.salesServices.selectedProduct, vm.product.qt ? vm.product.qt : 1).then(function(){ var cart = vm.CartService.getCartItems(); vm.product.qt = 1; resetCartPrice(cart); }); } }; vm.removeItem = function(item){ vm.CartService.removeFromCart(item).then(function(){ var cart = vm.CartService.getCart(); vm.cart = cart.items; //vm.productServices.product.listproductPromotion = cart.items; }); }; vm.saveField = function(index, product) { if(vm.editMode){ vm.CartService.updateItemQuantityByIndex(index, vm.product.Editvalue).then(function(data){ resetCartPrice(); }); } }; vm.changePrice = function(price){ var cart = vm.CartService.getCartItems(); //if(vm.productServices.product.listproductPromotion.length > 0){ if(cart.length > 0){ vm.CartService.getItemIndex(vm.salesServices.selectedProduct).then(function(index){ vm.CartService.updateItemDiscount(index, vm.percent !== 0 ? vm.percent : ''); vm.CartService.updateItemPrice(index, price).then(function(){ // var cart = vm.CartService.getCart(); //vm.productServices.product.listproductPromotion = cart.items; vm.product.qt = 1; resetCartPrice(cart); }); }); } }; vm.priceChange = function(priceAmount){ vm.percent = vm.CartService.percentDiscount(vm.CartService.getDiscount(priceAmount, vm.salesServices.selectedProduct.precios.uno.pVenta), vm.salesServices.selectedProduct.precios.uno.pVenta); }; vm.changeSelected = function(newPrice){ vm.priceChange(newPrice.p_ventaNeto); vm.saveChangePrice(newPrice.pVenta); }; //vm.priceChange vm.percentChange = function(){ vm.priceChangeAmount = vm.productServices.amountRemoveTax(vm.salesServices.selectedProduct.precios.uno.pVenta, vm.productServices.getTaxAmount(vm.salesServices.selectedProduct.precios.uno.pVenta, vm.percent)); }; vm.setNewPrice = function(price){ console.log(price); }; vm.saveChangePrice = function(newPrice){ if(newPrice) { vm.changePrice(newPrice); } vm.cancelPriceChange(); }; vm.openSalesList = function(){ vm.salesServices.getSales({cajaturno: vm.cajaturnoInfo._id}).then(function(pedingOrder){ vm.salesPedingList = pedingOrder; }); salesPopUp(); }; function resetCartPrice(data){ vm.product.discount = vm.CartService.getTotalDiscount(data); vm.product.subtotal = vm.CartService.getSubTotal(data); vm.product.itbs = vm.CartService.getTotalTax(); vm.product.total = vm.CartService.getTotalCart(); vm.product.Editvalue = null; } vm.saveFieldPrices = function(index, product){ if(vm.editModePrices){ vm.CartService.updateItemCost(index, vm.product.EditvaluePrices).then(function(data){ resetCartPrice(data); // vm.product.subtotal = vm.CartService.getSubTotal(data.items); // vm.product.total = vm.CartService.getTotalCart(); // vm.product.itbs = vm.CartService.getTotalTax(); // vm.product.EditvaluePrices = null; }); } }; vm.clearCart = function(){ vm.CartService.resetCart().then(function(data){ //vm.productServices.product.listproductPromotion = []; vm.cartList = null; resetCartPrice(data.items); // vm.product.sudata total = vm.CartService.getSubTotal(data.items); // vm.product.total = vm.CartService.getTotalCart(); // vm.product.itbs = vm.CartService.getTotalTax(); }); }; vm.showCreateCustomer = function(){ vm.createCustomerModal = $modal({ scope: $scope, 'templateUrl': 'modules/sales/partials/customer-add.html', show: true // placement: 'center' }); }; vm.selectCustomer = function(){ vm.createModal = $modal({ scope: $scope, 'templateUrl': 'modules/sales/partials/customers.tpl.html', show: true // placement: 'center' }); //resetProductFilter(); }; function resetFilter(){ vm.customerServices.hasMore = true; vm.customerServices.isLoading = false; vm.customerServices.page = 1; vm.customerServices.customersList = []; } vm.pedingSale = function(sales){ vm.CartService.setCartItems(sales.cart); vm.cartList = vm.CartService.getCartItems(); vm.salesServices.selectedSale = sales; vm.product.discount = vm.CartService.getTotalDiscount(sales.cart); vm.product.subtotal = vm.CartService.getSubTotal(sales.cart); vm.product.total = vm.CartService.getTotalCart(); vm.product.itbs = vm.CartService.getTotalTax(); vm.salesServices.selectedCustomer = sales.customer; //vm.productServices.product.listproductPromotion = sales.cart; vm.createModalSales.hide(); }; vm.getHold = function(){ var val = { status : vm.parameterServices.paramEnum.details.sales_status_espera, cajaturno : vm.cajaturnoInfo._id }; vm.salesServices.getSales(val).then(function(pedingOrder){ vm.salesPedingList = pedingOrder; }); salesPopUp(); }; function salesPopUp(){ vm.createModalSales = $modal({ scope: $scope, 'templateUrl': 'modules/sales/partials/sales-details.html', show: true }); } //Open la ventana de comprovante vm.setComprovante = function(){ var param = { sucursalId: vm.productServices.sucursalSearch, noNcf: vm.comprovante }; vm.ncfServices.getNcfFilter(param).then(function(data){ if(data.length > 0){ vm.salesServices.selectedNcf = data[0]; if(Number(vm.salesServices.selectedNcf.secInicial) < Number(vm.salesServices.selectedNcf.secFinal)){ if(vm.comprovante !== '02'){ vm.createModalComprovante = $modal({ scope: $scope, 'templateUrl': 'modules/sales/partials/comprovante.html', show: true, backdrop: 'static' }); } }else{ vm.comprovante = ''; alertify.alert('El comprovante no tiene secuencia disponible').setHeader('<i class="fa fa-warning"></i> '); } }else{ vm.comprovante = ''; alertify.alert('El comprovante no tiene secuencia disponible').setHeader('<i class="fa fa-warning"></i> '); } }, function(err){ alertify.alert('Se ha producido un error en el sistema').setHeader('<i class="fa fa-warning"></i> ');; }); }; //Cancelar ncf vm.cancelNcf = function(){ vm.comprovante = ''; vm.createModalComprovante.hide(); }; //Set ncf vm.saveNcf = function($isValid, ncf){ if(!$isValid){ if(vm.ncfServices.validateNCF(vm.rnc)){ vm.ncfSerie = vm.salesServices.selectedNcf.serie + vm.salesServices.selectedNcf.dn + vm.salesServices.selectedNcf.pe + vm.salesServices.selectedNcf.ai + vm.salesServices.selectedNcf.code; vm.cancelNcf(); }else{ alertify.alert('RNC/Cédula invalida').setHeader('<i class="fa fa-warning"></i> '); } } }; //reset NCF after selected vm.resetNCF = function(){ vm.ncfSerie = null; vm.salesServices.selectedNcf = null; }; //Open price list vm.openPriceModal = function(){ if(vm.salesServices.selectedProduct !== null){ vm.percent = 0; vm.priceChangeAmount = 0; vm.priceModal = $modal({ scope: $scope, 'templateUrl': 'modules/sales/partials/product-price.html', show: true, backdrop: 'static' }); } }; vm.cancelPriceChange = function(){ vm.priceModal.hide(); vm.salesServices.selectedProduct = null; }; vm.removeHoldSale = function(sale){ vm.salesServices.delete(sale); vm.salesServices.selectedSale = null; vm.nexOrder(); }; vm.loadMore = function(){ vm.customerServices.scrollMore(); }; vm.loadMoreProduct = function() { vm.productServices.scrollMore(); }; vm.setClient = function(client){ vm.salesServices.selectedCustomer = client; vm.CartService.setClient(client); vm.createModal.hide(); }; vm.printReport = function(){ vm.isPrinting = true; // var defer = $q.defer(); $timeout(function(){ var printSection = document.getElementById('printSection'); //var ticketContainer = document.getElementById('ticketContainer'); function printElement(elem) { printSection.innerHTML = ''; //console.log(elem); printSection.appendChild(elem); //window.print(); } if (!printSection) { printSection = document.createElement('div'); printSection.id = 'printSection'; document.body.appendChild(printSection); } //var target = angular.element(document.querySelector('#printThisElement')); var elemToPrint = document.getElementById("printThisElement"); window.print(); // console.log(elemToPrint); // if (elemToPrint) { // printElement(elemToPrint); // } }, 2000); //return defer.promise; }; } })();
LXD312569496/ByteX
HookProguard/src/main/java/com/ss/android/ugc/bytex/hookproguard/MemberSpecificationHolder.java
package com.ss.android.ugc.bytex.hookproguard; import proguard.MemberSpecification; import proguard.util.ClassNameParser; import proguard.util.StringMatcher; import proguard.util.StringParser; class MemberSpecificationHolder { private final MemberSpecification instance; private StringMatcher methodNameMatcher; private StringMatcher descMatcher; MemberSpecificationHolder(MemberSpecification instance, StringParser parser) { this.instance = instance; if (instance.name != null) { this.methodNameMatcher = parser.parse(instance.name); } if (instance.descriptor != null) { this.descMatcher = new ClassNameParser().parse(instance.descriptor); } } MemberSpecification getInstance() { return instance; } boolean match(String name, String desc) { return (methodNameMatcher == null || methodNameMatcher.matches(name)) && (descMatcher == null || descMatcher.matches(desc)); } }
lebui89x/maychufoodnow
src/main/java/com/food/ordering/zinger/model/notification/NotificationModel.java
<filename>src/main/java/com/food/ordering/zinger/model/notification/NotificationModel.java package com.food.ordering.zinger.model.notification; import com.food.ordering.zinger.constant.Enums; public class NotificationModel { Enums.NotificationType type; String title; String message; String payload; public Enums.NotificationType getType() { return type; } public void setType(Enums.NotificationType type) { this.type = type; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String getPayload() { return payload; } public void setPayload(String payload) { this.payload = payload; } @Override public String toString() { return "NotificationModel{" + "type=" + type + ", title='" + title + '\'' + ", message='" + message + '\'' + ", payload='" + payload + '\'' + '}'; } }
eddylu1s/PoGoTelegramBot
locales.js
process.env.DEFAULT_LOCALE = 'nl' process.env.LOCALES = '[["nl","Nederlands"], ["en","English"], ["es", "Español"]]'
ajchdev/outside-event
node_modules/@wordpress/hooks/build-module/createHooks.js
<reponame>ajchdev/outside-event import _classCallCheck from "@babel/runtime/helpers/esm/classCallCheck"; /** * Internal dependencies */ import createAddHook from './createAddHook'; import createRemoveHook from './createRemoveHook'; import createHasHook from './createHasHook'; import createRunHook from './createRunHook'; import createCurrentHook from './createCurrentHook'; import createDoingHook from './createDoingHook'; import createDidHook from './createDidHook'; /** * Internal class for constructing hooks. Use `createHooks()` function * * Note, it is necessary to expose this class to make its type public. * * @private */ export var _Hooks = function _Hooks() { _classCallCheck(this, _Hooks); /** @type {import('.').Store} actions */ this.actions = Object.create(null); this.actions.__current = []; /** @type {import('.').Store} filters */ this.filters = Object.create(null); this.filters.__current = []; this.addAction = createAddHook(this, 'actions'); this.addFilter = createAddHook(this, 'filters'); this.removeAction = createRemoveHook(this, 'actions'); this.removeFilter = createRemoveHook(this, 'filters'); this.hasAction = createHasHook(this, 'actions'); this.hasFilter = createHasHook(this, 'filters'); this.removeAllActions = createRemoveHook(this, 'actions', true); this.removeAllFilters = createRemoveHook(this, 'filters', true); this.doAction = createRunHook(this, 'actions'); this.applyFilters = createRunHook(this, 'filters', true); this.currentAction = createCurrentHook(this, 'actions'); this.currentFilter = createCurrentHook(this, 'filters'); this.doingAction = createDoingHook(this, 'actions'); this.doingFilter = createDoingHook(this, 'filters'); this.didAction = createDidHook(this, 'actions'); this.didFilter = createDidHook(this, 'filters'); }; /** @typedef {_Hooks} Hooks */ /** * Returns an instance of the hooks object. * * @return {Hooks} A Hooks instance. */ function createHooks() { return new _Hooks(); } export default createHooks; //# sourceMappingURL=createHooks.js.map
vorushin/moodbox_aka_risovaska
client/moc_qtsingleapplication.cpp
/**************************************************************************** ** Meta object code from reading C++ file 'qtsingleapplication.h' ** ** Created: Tue May 5 10:34:51 2009 ** by: The Qt Meta Object Compiler version 61 (Qt 4.5.1) ** ** WARNING! All changes made in this file will be lost! *****************************************************************************/ #include "qtsingleapplication.h" #if !defined(Q_MOC_OUTPUT_REVISION) #error "The header file 'qtsingleapplication.h' doesn't include <QObject>." #elif Q_MOC_OUTPUT_REVISION != 61 #error "This file was generated using the moc from 4.5.1. It" #error "cannot be used with the include files from this version of Qt." #error "(The moc has changed too much.)" #endif QT_BEGIN_MOC_NAMESPACE static const uint qt_meta_data_QtSingleApplication[] = { // content: 2, // revision 0, // classname 0, 0, // classinfo 4, 12, // methods 0, 0, // properties 0, 0, // enums/sets 0, 0, // constructors // signals: signature, parameters, type, tag, flags 29, 21, 20, 20, 0x05, // slots: signature, parameters, type, tag, flags 75, 59, 54, 20, 0x0a, 100, 21, 54, 20, 0x2a, 121, 20, 20, 20, 0x0a, 0 // eod }; static const char qt_meta_stringdata_QtSingleApplication[] = { "QtSingleApplication\0\0message\0" "messageReceived(QString)\0bool\0" "message,timeout\0sendMessage(QString,int)\0" "sendMessage(QString)\0activateWindow()\0" }; const QMetaObject QtSingleApplication::staticMetaObject = { { &QApplication::staticMetaObject, qt_meta_stringdata_QtSingleApplication, qt_meta_data_QtSingleApplication, 0 } }; const QMetaObject *QtSingleApplication::metaObject() const { return &staticMetaObject; } void *QtSingleApplication::qt_metacast(const char *_clname) { if (!_clname) return 0; if (!strcmp(_clname, qt_meta_stringdata_QtSingleApplication)) return static_cast<void*>(const_cast< QtSingleApplication*>(this)); return QApplication::qt_metacast(_clname); } int QtSingleApplication::qt_metacall(QMetaObject::Call _c, int _id, void **_a) { _id = QApplication::qt_metacall(_c, _id, _a); if (_id < 0) return _id; if (_c == QMetaObject::InvokeMetaMethod) { switch (_id) { case 0: messageReceived((*reinterpret_cast< const QString(*)>(_a[1]))); break; case 1: { bool _r = sendMessage((*reinterpret_cast< const QString(*)>(_a[1])),(*reinterpret_cast< int(*)>(_a[2]))); if (_a[0]) *reinterpret_cast< bool*>(_a[0]) = _r; } break; case 2: { bool _r = sendMessage((*reinterpret_cast< const QString(*)>(_a[1]))); if (_a[0]) *reinterpret_cast< bool*>(_a[0]) = _r; } break; case 3: activateWindow(); break; default: ; } _id -= 4; } return _id; } // SIGNAL 0 void QtSingleApplication::messageReceived(const QString & _t1) { void *_a[] = { 0, const_cast<void*>(reinterpret_cast<const void*>(&_t1)) }; QMetaObject::activate(this, &staticMetaObject, 0, _a); } QT_END_MOC_NAMESPACE
schloepke/gamemachine
server/java/server/src/user/java/plugins/core/FactionService.java
<reponame>schloepke/gamemachine package plugins.core; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Strings; import io.gamemachine.messages.FactionStanding; import io.gamemachine.messages.FactionStandings; import io.gamemachine.messages.Factions; import plugins.core.combat.ClientDbLoader; import io.gamemachine.core.CharacterService; import io.gamemachine.messages.Character; public class FactionService { private final Logger logger = LoggerFactory.getLogger(FactionService.class); private CharacterService cs; private ConcurrentHashMap<String, FactionStanding> factionStandings = new ConcurrentHashMap<String, FactionStanding>(); private FactionService() { cs = CharacterService.instance(); saveClientCatalog(); loadStandings(); } private static class LazyHolder { private static final FactionService INSTANCE = new FactionService(); } public static FactionService instance() { return LazyHolder.INSTANCE; } public void saveStanding(FactionStanding standing) { FactionStanding.db().save(standing); loadStanding(standing); } public FactionStandings getFactionStandings() { FactionStandings standings = new FactionStandings(); standings.standings = new ArrayList(factionStandings.values()); return standings; } public FactionStandings getFactionStandings(String characterId) { FactionStandings standingsContainer = new FactionStandings(); List<FactionStanding> standings = new ArrayList<FactionStanding>(); for (FactionStanding standing : factionStandings.values()) { if (!Strings.isNullOrEmpty(standing.meCharacterId) && standing.meCharacterId.equals(characterId)) { standings.add(standing); } else if (standing.type == FactionStanding.Type.FactionToFaction) { standings.add(standing); } } standingsContainer.standings = standings; return standingsContainer; } public FactionStanding getStanding(String meId, String themId) { String key = getKey(meId, themId); FactionStanding standing; if (factionStandings.containsKey(key)) { standing = factionStandings.get(key); } else { Character me = cs.find(meId); Character them = cs.find(themId); String qs = "faction_standing_type = ? ANd faction_standing_me_character_id = ? ANd faction_standing_them_character_id = ?"; standing = FactionStanding.db().findFirst(qs, FactionStanding.Type.CharacterToCharacter.getNumber(), me.id, them.id); if (standing == null) { standing = new FactionStanding(); standing.id = UUID.randomUUID().toString(); standing.type = FactionStanding.Type.CharacterToCharacter; standing.meCharacterId = me.id; standing.standing = getStanding(me.faction, them.faction).standing; } } return standing; } public FactionStanding getStanding(String meId, Factions.Faction them) { String key = getKey(meId, them); FactionStanding standing; if (factionStandings.containsKey(key)) { standing = factionStandings.get(key); } else { Character me = cs.find(meId); String qs = "faction_standing_type = ? ANd faction_standing_me_character_id = ? ANd faction_standing_them = ?"; standing = FactionStanding.db().findFirst(qs, FactionStanding.Type.CharacterToFaction.getNumber(), me.id, them.getNumber()); if (standing == null) { standing = new FactionStanding(); standing.id = UUID.randomUUID().toString(); standing.type = FactionStanding.Type.CharacterToCharacter; standing.meCharacterId = me.id; standing.standing = getStanding(me.faction, them).standing; } } return standing; } public FactionStanding getStanding(Factions.Faction me, Factions.Faction them) { String key = getKey(me, them); if (factionStandings.containsKey(key)) { return factionStandings.get(key); } else { throw new RuntimeException("No such factionStanding " + me.toString() + " -> " + them.toString()); } } private void loadStanding(FactionStanding standing) { String key = null; if (standing.type == FactionStanding.Type.CharacterToCharacter) { key = getKey(standing.meCharacterId, standing.themCharacterId); } else if (standing.type == FactionStanding.Type.CharacterToFaction) { key = getKey(standing.meCharacterId, standing.them); } else if (standing.type == FactionStanding.Type.FactionToFaction) { key = getKey(standing.me, standing.them); } else { throw new RuntimeException("Invalid faction type " + standing.type.toString()); } factionStandings.put(key, standing); } private String getKey(String meId, String themId) { return FactionStanding.Type.CharacterToCharacter.getNumber() + "_" + meId + "_" + themId; } private String getKey(String meId, Factions.Faction them) { return FactionStanding.Type.CharacterToFaction.getNumber() + "_" + meId + "_" + them.getNumber(); } private String getKey(Factions.Faction me, Factions.Faction them) { return FactionStanding.Type.FactionToFaction.getNumber() + "_" + me.getNumber() + "_" + them.getNumber(); } private void saveClientCatalog() { FactionStandings standings = ClientDbLoader.getFactionStandings(); for (FactionStanding standing : standings.standings) { FactionStanding.db().save(standing); } } private void loadStandings() { List<FactionStanding> standings = FactionStanding.db().findAll(); for (FactionStanding standing : standings) { loadStanding(standing); } } }
mirotik666/-web-project-portfol
node_modules/ramda-fantasy/src/lift3.js
<filename>node_modules/ramda-fantasy/src/lift3.js var curryN = require('ramda/src/curryN'); module.exports = curryN(4, function lift3(f, a1, a2, a3) { return a1.map(f).ap(a2).ap(a3); });
artyomtserashkovich/HostOcean
HostOcean/host-ocean/src/state/signalr/index.js
import * as queueActions from "./../../modules/mainpage/actions/queuesActions"; import * as requestsActions from "./../../modules/requests/actions"; export default (emit, connection) => { connection.on("onUserLeftQueue", data => { emit(queueActions.removeUserFromQueue(data)); }); connection.on("onUserTakeQueue", data => { emit(queueActions.addUserToQueue(data)); }); connection.on("onUserQueuesSwap", data => { emit(queueActions.swapUsers(data)) }); connection.on("onRequestCreated", data => { emit(requestsActions.requestCreated(data)); }); connection.on("onRequestUpdated", data => { emit(requestsActions.requestUpdated(data)); }); };
ampatspell/ember-cli-documents
addon/document/internal/-query-loader-state.js
<filename>addon/document/internal/-query-loader-state.js import createState from './-create-state'; const defaults = { isLoading: false, isLoaded: false, isError: false, error: null }; const computed = [ 'isLoadable' ]; const stateMixin = Class => class QueryLoaderStateMixin extends Class { constructor(loader) { super(); this._loader = loader; } get isLoadable() { return this._loader._isLoadable; } onLoading(changed) { this.set({ isLoading: true, isError: false, error: null }, changed); } onLoaded(changed) { this.set({ isLoading: false, isLoaded: true, isError: false, error: null }, changed); } onError(error, changed) { this.set({ isLoading: false, isError: true, error }, changed); } onLoadScheduled(changed) { this.set({ isLoading: true, isError: false, error: null }, changed); } onReloadScheduled(changed) { this.onLoadScheduled(changed); this.set({ isLoaded: false }, changed); } } const extend = State => class QueryLoaderState extends stateMixin(State) {} const { keys, State } = createState({ defaults, computed, extend }); export { defaults, computed, stateMixin, keys }; export default State;
MinerArcana/Naming
src/main/java/com/minerarcana/naming/worlddata/WeakEntityReference.java
<reponame>MinerArcana/Naming package com.minerarcana.naming.worlddata; import net.minecraft.entity.Entity; import net.minecraft.world.IWorld; import net.minecraft.world.server.ServerWorld; import javax.annotation.Nullable; import java.lang.ref.WeakReference; import java.util.UUID; public class WeakEntityReference { private final UUID entityUUID; private WeakReference<Entity> entityReference; public WeakEntityReference(UUID entityUUID) { this.entityUUID = entityUUID; } public WeakEntityReference(Entity entity) { this.entityReference = new WeakReference<>(entity); this.entityUUID = entity.getUUID(); } @Nullable public Entity get(IWorld world) { if (entityReference.get() == null && world instanceof ServerWorld) { entityReference = new WeakReference<>(((ServerWorld) world).getEntity(entityUUID)); } return entityReference.get(); } }
FCC-Alumni/alumni-network
client/src/assets/dropdowns/countries.js
<reponame>FCC-Alumni/alumni-network<gh_stars>10-100 export const countries = [ { key: "", text: "---", value: "" }, { flag: "ad", key: "ad", text: "Andorra", value: "ad" }, { flag: "ae", key: "ae", text: "United Arab Emirates", value: "ae" }, { flag: "af", key: "af", text: "Afghanistan", value: "af" }, { flag: "ax", key: "ax", text: "Åland Islands", value: "ax" }, { flag: "ag", key: "ag", text: "Antigua", value: "ag" }, { flag: "ai", key: "ai", text: "Anguilla", value: "ai" }, { flag: "al", key: "al", text: "Albania", value: "al" }, { flag: "am", key: "am", text: "Armenia", value: "am" }, { flag: "an", key: "an", text: "Netherlands Antilles", value: "an" }, { flag: "ao", key: "ao", text: "Angola", value: "ao" }, { flag: "ar", key: "ar", text: "Argentina", value: "ar" }, { flag: "as", key: "as", text: "American Samoa", value: "as" }, { flag: "at", key: "at", text: "Austria", value: "at" }, { flag: "au", key: "au", text: "Australia", value: "au" }, { flag: "aw", key: "aw", text: "Aruba", value: "aw" }, { flag: "az", key: "az", text: "Azerbaijan", value: "az" }, { flag: "ba", key: "ba", text: "Bosnia", value: "ba" }, { flag: "bb", key: "bb", text: "Barbados", value: "bb" }, { flag: "bd", key: "bd", text: "Bangladesh", value: "bd" }, { flag: "be", key: "be", text: "Belgium", value: "be" }, { flag: "bf", key: "bf", text: "Burkina Faso", value: "bf" }, { flag: "bg", key: "bg", text: "Bulgaria", value: "bg" }, { flag: "bh", key: "bh", text: "Bahrain", value: "bh" }, { flag: "bi", key: "bi", text: "Burundi", value: "bi" }, { flag: "bj", key: "bj", text: "Benin", value: "bj" }, { flag: "bm", key: "bm", text: "Bermuda", value: "bm" }, { flag: "bn", key: "bn", text: "Brunei", value: "bn" }, { flag: "bo", key: "bo", text: "Bolivia", value: "bo" }, { flag: "br", key: "br", text: "Brazil", value: "br" }, { flag: "bs", key: "bs", text: "Bahamas", value: "bs" }, { flag: "bt", key: "bt", text: "Bhutan", value: "bt" }, { flag: "bv", key: "bv", text: "Bouvet Island", value: "bv" }, { flag: "bw", key: "bw", text: "Botswana", value: "bw" }, { flag: "by", key: "by", text: "Belarus", value: "by" }, { flag: "bz", key: "bz", text: "Belize", value: "bz" }, { flag: "ca", key: "ca", text: "Canada", value: "ca" }, { flag: "cc", key: "cc", text: "Cocos Islands", value: "cc" }, { flag: "cd", key: "cd", text: "Congo", value: "cd" }, { flag: "cf", key: "cf", text: "Central African Republic", value: "cf" }, { flag: "cg", key: "cg", text: "Congo Brazzaville", value: "cg" }, { flag: "ch", key: "ch", text: "Switzerland", value: "ch" }, { flag: "ci", key: "ci", text: "Cote Divoire", value: "ci" }, { flag: "ck", key: "ck", text: "Cook Islands", value: "ck" }, { flag: "cl", key: "cl", text: "Chile", value: "cl" }, { flag: "cm", key: "cm", text: "Cameroon", value: "cm" }, { flag: "cn", key: "cn", text: "China", value: "cn" }, { flag: "co", key: "co", text: "Colombia", value: "co" }, { flag: "cr", key: "cr", text: "Costa Rica", value: "cr" }, { flag: "cs", key: "cs", text: "Serbia", value: "cs" }, { flag: "cu", key: "cu", text: "Cuba", value: "cu" }, { flag: "cv", key: "cv", text: "Cape Verde", value: "cv" }, { flag: "cx", key: "cx", text: "Christmas Island", value: "cx" }, { flag: "cy", key: "cy", text: "Cyprus", value: "cy" }, { flag: "cz", key: "cz", text: "Czech Republic", value: "cz" }, { flag: "de", key: "de", text: "Germany", value: "de" }, { flag: "dj", key: "dj", text: "Djibouti", value: "dj" }, { flag: "dk", key: "dk", text: "Denmark", value: "dk" }, { flag: "dm", key: "dm", text: "Dominica", value: "dm" }, { flag: "do", key: "do", text: "Dominican Republic", value: "do" }, { flag: "dz", key: "dz", text: "Algeria", value: "dz" }, { flag: "ec", key: "ec", text: "Ecuador", value: "ec" }, { flag: "ee", key: "ee", text: "Estonia", value: "ee" }, { flag: "eg", key: "eg", text: "Egypt", value: "eg" }, { flag: "eh", key: "eh", text: "Western Sahara", value: "eh" }, { flag: "er", key: "er", text: "Eritrea", value: "er" }, { flag: "es", key: "es", text: "Spain", value: "es" }, { flag: "et", key: "et", text: "Ethiopia", value: "et" }, { flag: "eu", key: "eu", text: "European Union", value: "eu" }, { flag: "fi", key: "fi", text: "Finland", value: "fi" }, { flag: "fj", key: "fj", text: "Fiji", value: "fj" }, { flag: "fk", key: "fk", text: "Falkland Islands", value: "fk" }, { flag: "fm", key: "fm", text: "Micronesia", value: "fm" }, { flag: "fo", key: "fo", text: "Faroe Islands", value: "fo" }, { flag: "fr", key: "fr", text: "France", value: "fr" }, { flag: "ga", key: "ga", text: "Gabon", value: "ga" }, { flag: "gb", key: "gb", text: "United Kingdom", value: "gb" }, { flag: "gb sct", key: "gb sct", text: "Scotland", value: "gb sct" }, { flag: "gb wls", key: "gb wls", text: "Wales", value: "gb wls" }, { flag: "gd", key: "gd", text: "Grenada", value: "gd" }, { flag: "ge", key: "ge", text: "Georgia", value: "ge" }, { flag: "gf", key: "gf", text: "French Guiana", value: "gf" }, { flag: "gh", key: "gh", text: "Ghana", value: "gh" }, { flag: "gi", key: "gi", text: "Gibraltar", value: "gi" }, { flag: "gl", key: "gl", text: "Greenland", value: "gl" }, { flag: "gm", key: "gm", text: "Gambia", value: "gm" }, { flag: "gn", key: "gn", text: "Guinea", value: "gn" }, { flag: "gp", key: "gp", text: "Guadeloupe", value: "gp" }, { flag: "gq", key: "gq", text: "Equatorial Guinea", value: "gq" }, { flag: "gr", key: "gr", text: "Greece", value: "gr" }, { flag: "gs", key: "gs", text: "Sandwich Islands", value: "gs" }, { flag: "gt", key: "gt", text: "Guatemala", value: "gt" }, { flag: "gu", key: "gu", text: "Guam", value: "gu" }, { flag: "gw", key: "gw", text: "Guinea-bissau", value: "gw" }, { flag: "gy", key: "gy", text: "Guyana", value: "gy" }, { flag: "hk", key: "hk", text: "Hong Kong", value: "hk" }, { flag: "hm", key: "hm", text: "Heard Island", value: "hm" }, { flag: "hn", key: "hn", text: "Honduras", value: "hn" }, { flag: "hr", key: "hr", text: "Croatia", value: "hr" }, { flag: "ht", key: "ht", text: "Haiti", value: "ht" }, { flag: "hu", key: "hu", text: "Hungary", value: "hu" }, { flag: "id", key: "id", text: "Indonesia", value: "id" }, { flag: "ie", key: "ie", text: "Ireland", value: "ie" }, { flag: "il", key: "il", text: "Israel", value: "il" }, { flag: "in", key: "in", text: "India", value: "in" }, { flag: "io", key: "io", text: "Indian Ocean Territory", value: "io" }, { flag: "iq", key: "iq", text: "Iraq", value: "iq" }, { flag: "ir", key: "ir", text: "Iran", value: "ir" }, { flag: "is", key: "is", text: "Iceland", value: "is" }, { flag: "it", key: "it", text: "Italy", value: "it" }, { flag: "jm", key: "jm", text: "Jamaica", value: "jm" }, { flag: "jo", key: "jo", text: "Jordan", value: "jo" }, { flag: "jp", key: "jp", text: "Japan", value: "jp" }, { flag: "ke", key: "ke", text: "Kenya", value: "ke" }, { flag: "kg", key: "kg", text: "Kyrgyzstan", value: "kg" }, { flag: "kh", key: "kh", text: "Cambodia", value: "kh" }, { flag: "ki", key: "ki", text: "Kiribati", value: "ki" }, { flag: "km", key: "km", text: "Comoros", value: "km" }, { flag: "kn", key: "kn", text: "Saint Kitts And Nevis", value: "kn" }, { flag: "kp", key: "kp", text: "North Korea", value: "kp" }, { flag: "kr", key: "kr", text: "South Korea", value: "kr" }, { flag: "kw", key: "kw", text: "Kuwait", value: "kw" }, { flag: "ky", key: "ky", text: "Cayman Islands", value: "ky" }, { flag: "kz", key: "kz", text: "Kazakhstan", value: "kz" }, { flag: "la", key: "la", text: "Laos", value: "la" }, { flag: "lb", key: "lb", text: "Lebanon", value: "lb" }, { flag: "lc", key: "lc", text: "<NAME>", value: "lc" }, { flag: "li", key: "li", text: "Liechtenstein", value: "li" }, { flag: "lk", key: "lk", text: "<NAME>", value: "lk" }, { flag: "lr", key: "lr", text: "Liberia", value: "lr" }, { flag: "ls", key: "ls", text: "Lesotho", value: "ls" }, { flag: "lt", key: "lt", text: "Lithuania", value: "lt" }, { flag: "lu", key: "lu", text: "Luxembourg", value: "lu" }, { flag: "lv", key: "lv", text: "Latvia", value: "lv" }, { flag: "ly", key: "ly", text: "Libya", value: "ly" }, { flag: "ma", key: "ma", text: "Morocco", value: "ma" }, { flag: "mc", key: "mc", text: "Monaco", value: "mc" }, { flag: "md", key: "md", text: "Moldova", value: "md" }, { flag: "me", key: "me", text: "Montenegro", value: "me" }, { flag: "mg", key: "mg", text: "Madagascar", value: "mg" }, { flag: "mh", key: "mh", text: "Marshall Islands", value: "mh" }, { flag: "mk", key: "mk", text: "Macedonia", value: "mk" }, { flag: "ml", key: "ml", text: "Mali", value: "ml" }, { flag: "mm", key: "mm", text: "Burma", value: "mm" }, { flag: "mn", key: "mn", text: "Mongolia", value: "mn" }, { flag: "mo", key: "mo", text: "Macau", value: "mo" }, { flag: "mp", key: "mp", text: "Northern Mariana Islands", value: "mp" }, { flag: "mq", key: "mq", text: "Martinique", value: "mq" }, { flag: "mr", key: "mr", text: "Mauritania", value: "mr" }, { flag: "ms", key: "ms", text: "Montserrat", value: "ms" }, { flag: "mt", key: "mt", text: "Malta", value: "mt" }, { flag: "mu", key: "mu", text: "Mauritius", value: "mu" }, { flag: "mv", key: "mv", text: "Maldives", value: "mv" }, { flag: "mw", key: "mw", text: "Malawi", value: "mw" }, { flag: "mx", key: "mx", text: "Mexico", value: "mx" }, { flag: "my", key: "my", text: "Malaysia", value: "my" }, { flag: "mz", key: "mz", text: "Mozambique", value: "mz" }, { flag: "na", key: "na", text: "Namibia", value: "na" }, { flag: "nc", key: "nc", text: "New Caledonia", value: "nc" }, { flag: "ne", key: "ne", text: "Niger", value: "ne" }, { flag: "nf", key: "nf", text: "Norfolk Island", value: "nf" }, { flag: "ng", key: "ng", text: "Nigeria", value: "ng" }, { flag: "ni", key: "ni", text: "Nicaragua", value: "ni" }, { flag: "nl", key: "nl", text: "Netherlands", value: "nl" }, { flag: "no", key: "no", text: "Norway", value: "no" }, { flag: "np", key: "np", text: "Nepal", value: "np" }, { flag: "nr", key: "nr", text: "Nauru", value: "nr" }, { flag: "nu", key: "nu", text: "Niue", value: "nu" }, { flag: "nz", key: "nz", text: "New Zealand", value: "nz" }, { flag: "om", key: "om", text: "Oman", value: "om" }, { flag: "pa", key: "pa", text: "Panama", value: "pa" }, { flag: "pe", key: "pe", text: "Peru", value: "pe" }, { flag: "pf", key: "pf", text: "French Polynesia", value: "pf" }, { flag: "pg", key: "pg", text: "New Guinea", value: "pg" }, { flag: "ph", key: "ph", text: "Philippines", value: "ph" }, { flag: "pk", key: "pk", text: "Pakistan", value: "pk" }, { flag: "pl", key: "pl", text: "Poland", value: "pl" }, { flag: "pm", key: "pm", text: "<NAME>", value: "pm" }, { flag: "pn", key: "pn", text: "Pitcairn Islands", value: "pn" }, { flag: "pr", key: "pr", text: "Puerto Rico", value: "pr" }, { flag: "ps", key: "ps", text: "Palestine", value: "ps" }, { flag: "pt", key: "pt", text: "Portugal", value: "pt" }, { flag: "pw", key: "pw", text: "Palau", value: "pw" }, { flag: "py", key: "py", text: "Paraguay", value: "py" }, { flag: "qa", key: "qa", text: "Qatar", value: "qa" }, { flag: "re", key: "re", text: "Reunion", value: "re" }, { flag: "ro", key: "ro", text: "Romania", value: "ro" }, { flag: "rs", key: "rs", text: "Serbia", value: "rs" }, { flag: "ru", key: "ru", text: "Russia", value: "ru" }, { flag: "rw", key: "rw", text: "Rwanda", value: "rw" }, { flag: "sa", key: "sa", text: "Saudi Arabia", value: "sa" }, { flag: "sb", key: "sb", text: "Solomon Islands", value: "sb" }, { flag: "sc", key: "sc", text: "Seychelles", value: "sc" }, { flag: "sd", key: "sd", text: "Sudan", value: "sd" }, { flag: "se", key: "se", text: "Sweden", value: "se" }, { flag: "sg", key: "sg", text: "Singapore", value: "sg" }, { flag: "sh", key: "sh", text: "<NAME>", value: "sh" }, { flag: "si", key: "si", text: "Slovenia", value: "si" }, { flag: "sj", key: "sj", text: "<NAME>", value: "sj" }, { flag: "sk", key: "sk", text: "Slovakia", value: "sk" }, { flag: "sl", key: "sl", text: "Sierra Leone", value: "sl" }, { flag: "sm", key: "sm", text: "San Marino", value: "sm" }, { flag: "sn", key: "sn", text: "Senegal", value: "sn" }, { flag: "so", key: "so", text: "Somalia", value: "so" }, { flag: "sr", key: "sr", text: "Suriname", value: "sr" }, { flag: "st", key: "st", text: "Sao Tome", value: "st" }, { flag: "sv", key: "sv", text: "El Salvador", value: "sv" }, { flag: "sy", key: "sy", text: "Syria", value: "sy" }, { flag: "sz", key: "sz", text: "Swaziland", value: "sz" }, { flag: "tc", key: "tc", text: "Turks and Caicos Islands", value: "tc" }, { flag: "td", key: "td", text: "Chad", value: "td" }, { flag: "tf", key: "tf", text: "French Territories", value: "tf" }, { flag: "tg", key: "tg", text: "Togo", value: "tg" }, { flag: "th", key: "th", text: "Thailand", value: "th" }, { flag: "tj", key: "tj", text: "Tajikistan", value: "tj" }, { flag: "tk", key: "tk", text: "Tokelau", value: "tk" }, { flag: "tl", key: "tl", text: "Timorleste", value: "tl" }, { flag: "tm", key: "tm", text: "Turkmenistan", value: "tm" }, { flag: "tn", key: "tn", text: "Tunisia", value: "tn" }, { flag: "to", key: "to", text: "Tonga", value: "to" }, { flag: "tr", key: "tr", text: "Turkey", value: "tr" }, { flag: "tt", key: "tt", text: "Trinidad", value: "tt" }, { flag: "tv", key: "tv", text: "Tuvalu", value: "tv" }, { flag: "tw", key: "tw", text: "Taiwan", value: "tw" }, { flag: "tz", key: "tz", text: "Tanzania", value: "tz" }, { flag: "ua", key: "ua", text: "Ukraine", value: "ua" }, { flag: "ug", key: "ug", text: "Uganda", value: "ug" }, { flag: "um", key: "um", text: "Us Minor Islands", value: "um" }, { flag: "us", key: "us", text: "United States", value: "us" }, { flag: "uy", key: "uy", text: "Uruguay", value: "uy" }, { flag: "uz", key: "uz", text: "Uzbekistan", value: "uz" }, { flag: "va", key: "va", text: "Vatican City", value: "va" }, { flag: "vc", key: "vc", text: "Saint Vincent", value: "vc" }, { flag: "ve", key: "ve", text: "Venezuela", value: "ve" }, { flag: "vg", key: "vg", text: "British Virgin Islands", value: "vg" }, { flag: "vi", key: "vi", text: "Us Virgin Islands", value: "vi" }, { flag: "vn", key: "vn", text: "Vietnam", value: "vn" }, { flag: "vu", key: "vu", text: "Vanuatu", value: "vu" }, { flag: "wf", key: "wf", text: "Wallis And Futuna", value: "wf" }, { flag: "ws", key: "ws", text: "Samoa", value: "ws" }, { flag: "ye", key: "ye", text: "Yemen", value: "ye" }, { flag: "yt", key: "yt", text: "Mayotte", value: "yt" }, { flag: "za", key: "za", text: "South Africa", value: "za" }, { flag: "zm", key: "zm", text: "Zambia", value: "zm" }, { flag: "zw", key: "zw", text: "Zimbabwe", value: "zw" }, ]; export const countryCodes = { ad: "Andorra", ae: "United Arab Emirates", af: "Afghanistan", ag: "Antigua", ai: "Anguilla", al: "Albania", am: "Armenia", an: "Netherlands Antilles", ao: "Angola", ar: "Argentina", as: "American Samoa", at: "Austria", au: "Australia", aw: "Aruba", ax: "Åland Islands", az: "Azerbaijan", ba: "Bosnia", bb: "Barbados", bd: "Bangladesh", be: "Belgium", bf: "Burkina Faso", bg: "Bulgaria", bh: "Bahrain", bi: "Burundi", bj: "Benin", bm: "Bermuda", bn: "Brunei", bo: "Bolivia", br: "Brazil", bs: "Bahamas", bt: "Bhutan", bv: "Bouvet Island", bw: "Botswana", by: "Belarus", bz: "Belize", ca: "Canada", cc: "Cocos Islands", cd: "Congo", cf: "Central African Republic", cg: "Congo Brazzaville", ch: "Switzerland", ci: "Cote Divoire", ck: "Cook Islands", cl: "Chile", cm: "Cameroon", cn: "China", co: "Colombia", cr: "Costa Rica", cs: "Serbia", cu: "Cuba", cv: "Cape Verde", cx: "Christmas Island", cy: "Cyprus", cz: "Czech Republic", de: "Germany", dj: "Djibouti", dk: "Denmark", dm: "Dominica", do: "Dominican Republic", dz: "Algeria", ec: "Ecuador", ee: "Estonia", eg: "Egypt", eh: "Western Sahara", er: "Eritrea", es: "Spain", et: "Ethiopia", eu: "European Union", fi: "Finland", fj: "Fiji", fk: "Falkland Islands", fm: "Micronesia", fo: "Faroe Islands", fr: "France", ga: "Gabon", gb: "United Kingdom", gb_sct: "Scotland", gb_wls: "Wales", gd: "Grenada", ge: "Georgia", gf: "French Guiana", gh: "Ghana", gi: "Gibraltar", gl: "Greenland", gm: "Gambia", gn: "Guinea", gp: "Guadeloupe", gq: "Equatorial Guinea", gr: "Greece", gs: "Sandwich Islands", gt: "Guatemala", gu: "Guam", gw: "Guinea-bissau", gy: "Guyana", hk: "Hong Kong", hm: "Heard Island", hn: "Honduras", hr: "Croatia", ht: "Haiti", hu: "Hungary", id: "Indonesia", ie: "Ireland", il: "Israel", in: "India", io: "Indian Ocean Territory", iq: "Iraq", ir: "Iran", is: "Iceland", it: "Italy", jm: "Jamaica", jo: "Jordan", jp: "Japan", ke: "Kenya", kg: "Kyrgyzstan", kh: "Cambodia", ki: "Kiribati", km: "Comoros", kn: "Saint Kitts And Nevis", kp: "North Korea", kr: "South Korea", kw: "Kuwait", ky: "Cayman Islands", kz: "Kazakhstan", la: "Laos", lb: "Lebanon", lc: "Saint Lucia", li: "Liechtenstein", lk: "Sri Lanka", lr: "Liberia", ls: "Lesotho", lt: "Lithuania", lu: "Luxembourg", lv: "Latvia", ly: "Libya", ma: "Morocco", mc: "Monaco", md: "Moldova", me: "Montenegro", mg: "Madagascar", mh: "Marshall Islands", mk: "Macedonia", ml: "Mali", mm: "Burma", mn: "Mongolia", mo: "Macau", mp: "Northern Mariana Islands", mq: "Martinique", mr: "Mauritania", ms: "Montserrat", mt: "Malta", mu: "Mauritius", mv: "Maldives", mw: "Malawi", mx: "Mexico", my: "Malaysia", mz: "Mozambique", na: "Namibia", nc: "New Caledonia", ne: "Niger", nf: "Norfolk Island", ng: "Nigeria", ni: "Nicaragua", nl: "Netherlands", no: "Norway", np: "Nepal", nr: "Nauru", nu: "Niue", nz: "New Zealand", om: "Oman", pa: "Panama", pe: "Peru", pf: "French Polynesia", pg: "New Guinea", ph: "Philippines", pk: "Pakistan", pl: "Poland", pm: "Saint Pierre", pn: "Pitcairn Islands", pr: "P<NAME>", ps: "Palestine", pt: "Portugal", pw: "Palau", py: "Paraguay", qa: "Qatar", re: "Reunion", ro: "Romania", rs: "Serbia", ru: "Russia", rw: "Rwanda", sa: "Saudi Arabia", sb: "Solomon Islands", sc: "Seychelles", sd: "Sudan", se: "Sweden", sg: "Singapore", sh: "<NAME>", si: "Slovenia", sj: "<NAME>", sk: "Slovakia", sl: "Sierra Leone", sm: "San Marino", sn: "Senegal", so: "Somalia", sr: "Suriname", st: "Sao Tome", sv: "El Salvador", sy: "Syria", sz: "Swaziland", tc: "Turks and Caicos Islands", td: "Chad", tf: "French Territories", tg: "Togo", th: "Thailand", tj: "Tajikistan", tk: "Tokelau", tl: "Timorleste", tm: "Turkmenistan", tn: "Tunisia", to: "Tonga", tr: "Turkey", tt: "Trinidad", tv: "Tuvalu", tw: "Taiwan", tz: "Tanzania", ua: "Ukraine", ug: "Uganda", um: "Us Minor Islands", us: "United States", uy: "Uruguay", uz: "Uzbekistan", va: "Vatican City", vc: "<NAME>", ve: "Venezuela", vg: "British Virgin Islands", vi: "Us Virgin Islands", vn: "Vietnam", vu: "Vanuatu", wf: "Wallis And Futuna", ws: "Samoa", ye: "Yemen", yt: "Mayotte", za: "South Africa", zm: "Zambia", zw: "Zimbabwe" };
Wujingli/OpenWebGlobeDataProcessing
source/apps/tilerenderer/main.cpp
/******************************************************************************* # ____ __ __ _ _____ _ _ # # / __ \ \ \ / / | | / ____| | | | # # | | | |_ __ ___ _ __ \ /\ / /__| |__ | | __| | ___ | |__ ___ # # | | | | '_ \ / _ \ '_ \ \/ \/ / _ \ '_ \| | |_ | |/ _ \| '_ \ / _ \ # # | |__| | |_) | __/ | | \ /\ / __/ |_) | |__| | | (_) | |_) | __/ # # \____/| .__/ \___|_| |_|\/ \/ \___|_.__/ \_____|_|\___/|_.__/ \___| # # | | # # |_| # # # # (c) 2011 by # # University of Applied Sciences Northwestern Switzerland # # Institute of Geomatics Engineering # # <EMAIL> # ******************************************************************************** * Licensed under MIT License. Read the file LICENSE for more information * *******************************************************************************/ // This is the version without mpi intended for regular // workstations. Multi cores are supported (OpenMP) and highly recommended. //------------------------------------------------------------------------------ // Some code adapted from: generate_tiles.py // Found at: http://trac.openstreetmap.org/browser/applications/rendering/mapnik //------------------------------------------------------------------------------ #include <mapnik/map.hpp> #include <mapnik/datasource_cache.hpp> #include <mapnik/font_engine_freetype.hpp> #include <mapnik/agg_renderer.hpp> #include <mapnik/filter_factory.hpp> #include <mapnik/color_factory.hpp> #include <mapnik/image_util.hpp> #include <mapnik/config_error.hpp> #include <mapnik/load_map.hpp> #include <mapnik/envelope.hpp> #include <mapnik/proj_transform.hpp> #include <iostream> #include <string> #include <boost/filesystem.hpp> #include "render_tile.h" #include <string/FilenameUtils.h> #include <string/StringUtils.h> #include <io/FileSystem.h> #include <math/mathutils.h> #include "ogprocess.h" #include "app/ProcessingSettings.h" #include "errors.h" #include <boost/program_options.hpp> #include <omp.h> #include "functions.h" namespace po = boost::program_options; //------------------------------------------------------------------------------------ int main ( int argc , char** argv) { po::options_description desc("Program-Options"); desc.add_options() ("mapnik_dir", po::value<std::string>(), "mapnik path") ("map_file", po::value<std::string>(), "map configurations file") ("output_path", po::value<std::string>(), "output path") ("numthreads", po::value<int>(), "force number of threads") ("min_zoom", po::value<int>(), "[optional] min zoom level") ("max_zoom", po::value<int>(), "[optional] max zoom level") ("expired_list", po::value<std::string>(), "[optional] list of expired tiles for update rendering (global rendering will be disabled)") ("bounds", po::value<std::vector<double>>(), "[optional] boundaries (default: -180.0 -90.0 180.0 90.0)") ("verbose", "[optional] Verbose mode") ("no_override", "[opional] overriding existing tiles disabled") ("enable_locking", "[opional] lock files to prevent concurrency on parallel processes") ; po::positional_options_description p; p.add("bounds", -1); po::variables_map vm; po::store(po::command_line_parser(argc, argv).options(desc).positional(p).run(), vm); po::notify(vm); //--------------------------------------------------------------------------- // init options: boost::shared_ptr<ProcessingSettings> qSettings = ProcessingUtils::LoadAppSettings(); if (!qSettings) { std::cout << "Error in configuration! Check setup.xml\n"; return ERROR_CONFIG; } //--------------------------------------------------------------------------- // create logger boost::shared_ptr<Logger> qLogger = ProcessingUtils::CreateLogger("tile_renderer", qSettings); if (!qLogger) { std::cout << "Error in configuration! Check setup.xml\n"; return ERROR_CONFIG; } bool bError = false; bool bVerbose = false; bool bOverrideTiles = true; bool bLockEnabled = false; //---------------------------------------- // Read commandline try { po::store(po::parse_command_line(argc, argv, desc), vm); po::notify(vm); } catch (std::exception&) { bError = true; } std::string mapnik_dir; if(vm.count("mapnik_dir")) { mapnik_dir = vm["mapnik_dir"].as<std::string>(); if(!(mapnik_dir.at(mapnik_dir.length()-1) == '\\' || mapnik_dir.at(mapnik_dir.length()-1) == '/')) mapnik_dir = mapnik_dir + "/"; } else bError = true; std::string map_file; if(vm.count("map_file")) map_file = vm["map_file"].as<std::string>(); else bError = true; std::string output_path; if(vm.count("output_path")) { output_path = vm["output_path"].as<std::string>(); if(!(output_path.at(output_path.length()-1) == '\\' || output_path.at(output_path.length()-1) == '/')) output_path = output_path + "/"; } else bError = true; int minZoom = 1; if(vm.count("min_zoom")) minZoom = vm["min_zoom"].as<int>(); int maxZoom = 18; if(vm.count("max_zoom")) maxZoom = vm["max_zoom"].as<int>(); if(vm.count("verbose")) bVerbose = true; if(vm.count("no_override")) bOverrideTiles = false; if(vm.count("enable_locking")) bLockEnabled = true; bool bUpdateMode = false; std::string expire_list; if(vm.count("expire_list")) { expire_list = vm["expire_list"].as<std::string>(); bUpdateMode = true; } // CH Bounds double bounds[4] = {5.955870,46.818020,10.492030,47.808380}; double bounds[4] = {-180.0,-90.0,180.0,90.0}; if(vm.count("bounds")) { std::vector<double> dv = vm["bounds"].as<std::vector<double>>(); if(dv.size() != 4) bError = true; else { bounds[0] = dv[0]; bounds[1] = dv[1]; bounds[2] =dv [2]; bounds[3] = dv[3]; } } std::ostringstream oss; oss << "Render boundaries: " << bounds[0] << ", " << bounds[1] << ", " << bounds[2] << ", " << bounds[3] << "\n"; oss << "Render Map File: " << map_file << "\n"; oss << "Min-Zoom: " << minZoom << "\n"; oss << "Max-Zoom: " << maxZoom << "\n"; qLogger->Info(oss.str()); if (vm.count("numthreads")) { int n = vm["numthreads"].as<int>(); if (n>0 && n<65) { std::ostringstream oss; oss << "Forcing number of threads to " << n; qLogger->Info(oss.str()); omp_set_num_threads(n); } } //--------------------------------------------------------------------------- if (bError) { std::ostringstream oss; qLogger->Error("Wrong parameters!"); std::ostringstream sstr; sstr << desc; qLogger->Info("\n" + sstr.str()); return ERROR_PARAMS; } bool tsmScheme = false; using namespace mapnik; try { { std::ostringstream oss; oss << "Generating map ...... \n"; qLogger->Info(oss.str()); } GoogleProjection gProj = GoogleProjection(maxZoom); // maxlevel 12 //projection merc = projection("+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +no_defs +over"); //projection longlat = projection("+proj=latlong +datum=WGS84"); double dummy = 0.0; //proj_transform transform = proj_transform(longlat,merc); std::ostringstream oss; #ifdef _DEBUG std::string plugin_path = mapnik_dir + "input/debug/"; oss << "..load plugins from "<<plugin_path<<"\n"; #else std::string plugin_path = mapnik_dir + "input/release/"; oss << "..load plugins from "<<plugin_path<<"\n"; #endif qLogger->Info(oss.str()); datasource_cache::instance()->register_datasources(plugin_path.c_str()); std::string font_dir = mapnik_dir + "fonts/dejavu-fonts-ttf-2.30/ttf/"; { std::stringstream oss; oss << "..looking for DejaVuSans fonts in... " << font_dir << "\n"; qLogger->Info(oss.str()); } if (boost::filesystem3::exists( font_dir ) ) { boost::filesystem3::directory_iterator end_itr; // default construction yields past-the-end for ( boost::filesystem3::directory_iterator itr( font_dir ); itr != end_itr; ++itr ) { if (!boost::filesystem3::is_directory(itr->status()) ) { freetype_engine::register_font(itr->path().string()); } } } // Generate map container Map m(256,256); m.set_background(color_factory::from_string("white")); load_map(m,map_file); projection mapnikProj = projection(m.srs()); if(!FileSystem::DirExists(output_path)) FileSystem::makedir(output_path); if(!bUpdateMode) { std::stringstream oss; oss << "[Rendermode: Normal] Start rendering tiles..\n"; qLogger->Info(oss.str()); double avtps = 0.0; int avtps_it = 0; int total_tiles = 0; clock_t t_0, t_1; t_0 = clock(); for(int z = minZoom; z < maxZoom + 1; z++) { ituple px0 = gProj.geoCoord2Pixel(dtuple(bounds[0], bounds[3]),z); ituple px1 = gProj.geoCoord2Pixel(dtuple(bounds[2], bounds[1]),z); // check if we have directories in place std::string szoom = StringUtils::IntegerToString(z, 10); if(!FileSystem::DirExists(output_path + szoom)) FileSystem::makedir(output_path + szoom); for(int x = int(px0.a/256.0); x <= int(px1.a/256.0) +1; x++) { // Validate x co-ordinate if((x < 0) || (x >= math::Pow2(z))) continue; // check if we have directories in place std::string str_x = StringUtils::IntegerToString(x,10); if(!FileSystem::DirExists(output_path + szoom + "/" + str_x)) FileSystem::makedir(output_path + szoom + "/" + str_x); int tileCount = 0; int low = int(px0.b/256.0); int high = int(px1.b/256.0)+1; #ifndef _DEBUG #pragma omp parallel shared(low,high,x,z,m,gProj,mapnikProj,tsmScheme, output_path, szoom,str_x,tileCount) { #pragma omp for #endif for(int y = low; y <= high; y++) { // Validate x co-ordinate if((y < 0) || (y >= math::Pow2(z))) continue; // flip y to match OSGEO TMS spec std::string str_y; std::stringstream ss; if(tsmScheme) { ss << math::Pow2(z-1); str_y = ss.str(); } else { ss << y; str_y = ss.str(); } std::string tile_uri = output_path + szoom + '/' + str_x + '/' + str_y + ".png"; // Submit tile to be rendered _renderTile(tile_uri,m,x,y,z,gProj,mapnikProj,bVerbose, bOverrideTiles, bLockEnabled); tileCount++; } #ifndef _DEBUG } #endif total_tiles += tileCount; if(tileCount % 1000 == 0) { std::stringstream oss; oss << ".. " << total_tiles << " tiles processed!\n"; qLogger->Info(oss.str()); } } } { t_1 = clock(); double time=(double(t_1-t_0)/double(CLOCKS_PER_SEC)); double tps = total_tiles/time; std::stringstream oss; oss << ">>> Finished rendering " << total_tiles << " tiles at " << tps << " tiles per second! TOTAL TIME: " << time << "<<<\n"; qLogger->Info(oss.str()); } } else { std::stringstream oss; oss << "[Rendermode: Update] Start rendering tiles..\n reading expire list...\n"; qLogger->Info(oss.str()); std::vector<Tile> vExpireList = _readExpireList(expire_list); clock_t t_0,t_1; t_0 = clock(); int tileCount = 0; #pragma omp parallel shared(qLogger,vExpireList,m,gProj,mapnikProj,tsmScheme, output_path,tileCount) { #pragma omp for for(int i = 0; i < vExpireList.size(); i++) { std::stringstream ss; Tile t = vExpireList[i]; std::string szoom = StringUtils::IntegerToString(t.zoom, 10); if(!FileSystem::DirExists(output_path + szoom)) FileSystem::makedir(output_path + szoom); std::string str_x = StringUtils::IntegerToString(t.x,10); if(!FileSystem::DirExists(output_path + szoom + "/" + str_x)) FileSystem::makedir(output_path + szoom + "/" + str_x); ss << output_path << t.zoom << "/" << t.x << "/" << t.y << ".png"; std::string tile_uri = ss.str(); _renderTile(tile_uri,m,t.x,t.y,t.zoom,gProj,mapnikProj); if(tileCount % 1000) { std::stringstream oss; oss << ".." << tileCount << " tiles processed!\n"; qLogger->Info(oss.str()); } } } { t_1 = clock(); double time=(double(t_1-t_0)/double(CLOCKS_PER_SEC)); double tps = tileCount/time; std::stringstream oss; oss << ">>> Finished rendering " << tileCount << " tiles at " << tps << " tiles per second! TOTAL TIME: " << time << "<<<\n"; qLogger->Info(oss.str()); } } } catch ( const mapnik::config_error & ex ) { std::cerr << "### Configuration error: " << ex.what() << std::endl; return ERROR_CONFIG; } catch ( const std::exception & ex ) { std::cerr << "### std::exception: " << ex.what() << std::endl; return ERROR_MAPNIK; } catch ( ... ) { std::cerr << "### Unknown exception." << std::endl; return EXIT_FAILURE; } return EXIT_SUCCESS; }
npocmaka/Windows-Server-2003
drivers/wdm/audio/drivers/usbaudio.2/typei.c
//+------------------------------------------------------------------------- // // Microsoft Windows // // Copyright (C) Microsoft Corporation, 1999 - 2000 // // File: typei.c // //-------------------------------------------------------------------------- #include "common.h" #include "perf.h" #define LOW_WATERMARK 5 extern ULONG TraceEnable; extern TRACEHANDLE LoggerHandle; NTSTATUS RtAudioTypeIGetPlayPosition( IN PFILE_OBJECT PinFileObject, OUT PUCHAR *ppPlayPosition, OUT PLONG plOffset) { PKSPIN pKsPin; PPIN_CONTEXT pPinContext; PTYPE1_PIN_CONTEXT pT1PinContext; ULONG ulCurrentFrame; PLIST_ENTRY ple; PISO_URB_INFO pIsoUrbInfoTemp; PUCHAR pPlayPosInUrb = NULL; LONG lPlayPosOffset = 0; PURB pUrb; ULONG ulStartFrame; KIRQL Irql; NTSTATUS ntStatus; ULONG MinFramesAhead=MAX_ULONG; // // Get the KSPIN from the file object // pKsPin = (PKSPIN)KsGetObjectFromFileObject( PinFileObject ); if (!pKsPin) { return STATUS_UNSUCCESSFUL; } pPinContext = pKsPin->Context; pT1PinContext = pPinContext->pType1PinContext; // // search the pending transfers to see which one is going out now // KeAcquireSpinLock( &pPinContext->PinSpinLock, &Irql ); // // Get the current frame counter so we know where the hardware is // ntStatus = GetCurrentUSBFrame( pPinContext, &ulCurrentFrame ); if (NT_SUCCESS(ntStatus)) { for(ple = pT1PinContext->UrbInUseList.Flink; ple != &pT1PinContext->UrbInUseList; ple = ple->Flink) { pIsoUrbInfoTemp = (PISO_URB_INFO)ple; pUrb = pIsoUrbInfoTemp->pUrb; // DbgLog("CHECK", &pT1PinContext->UrbInUseList, pIsoUrbInfoTemp, pUrb, 0); // // see if this urb is the one that is currently being played // ulStartFrame = pUrb->UrbIsochronousTransfer.StartFrame; if (ulStartFrame != 0) { DbgLog("RT1BPos", ulCurrentFrame, ulStartFrame, 0, 0); if ( (ulCurrentFrame - ulStartFrame) < pUrb->UrbIsochronousTransfer.NumberOfPackets ) { pPlayPosInUrb=(PUCHAR)pUrb->UrbIsochronousTransfer.TransferBuffer; lPlayPosOffset=(ulCurrentFrame - ulStartFrame); // This measurement is valid. Make sure we don't lose it // because of any earlier FramesAhead measurements. MinFramesAhead=MAX_ULONG; break; } else { ULONG FramesAhead; FramesAhead=(ulStartFrame-ulCurrentFrame); if (FramesAhead<MinFramesAhead) { MinFramesAhead=FramesAhead; pPlayPosInUrb=(PUCHAR)pUrb->UrbIsochronousTransfer.TransferBuffer; lPlayPosOffset=-(LONG)FramesAhead; } } } else { // Start Frame is not set yet _DbgPrintF( DEBUGLVL_TERSE, ("'[RtAudioTypeIGetPlayPosition] Start Frame is not set for pUrb: %x\n", pUrb)); } } } KeReleaseSpinLock(&pPinContext->PinSpinLock, Irql); // Clear out the closest URB information if it is too far from the // current position. If the closest URB in our list is more than 150ms // away from the current position, then we drop the data on the floor. // Note that we ALWAYS set the MinFramesAhead to 0xffffffff in the // case when we find a position inside an URB - so that this code never // clears that position information. if (MinFramesAhead!=MAX_ULONG && MinFramesAhead>150) { pPlayPosInUrb = NULL; lPlayPosOffset = 0; _DbgPrintF( DEBUGLVL_TERSE, ("'[RtAudioTypeIGetPlayPosition] Couldn't find matching urb!\n")); } *ppPlayPosition = pPlayPosInUrb; *plOffset = lPlayPosOffset; DbgLog("RtPos", pPlayPosInUrb, lPlayPosOffset, 0, 0); return ntStatus; } NTSTATUS TypeIAsyncEPPollCallback( IN PDEVICE_OBJECT DeviceObject, IN PIRP pIrp, PSYNC_ENDPOINT_INFO pSyncEPInfo ) { PPIN_CONTEXT pPinContext = pSyncEPInfo->pContext; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; ULONG SRWhole; ULONG SRFraction; ULONG SampleRate; KIRQL Irql; SRWhole = (((ULONG)pSyncEPInfo->Buffer[2]<<2) | ((ULONG)pSyncEPInfo->Buffer[1]>>6)) * 1000; SRFraction = (((ULONG)pSyncEPInfo->Buffer[1]<<4) | ((ULONG)pSyncEPInfo->Buffer[0]>>4)) & 0x3FF; SRFraction = (SRFraction*1000) / 1024; SampleRate = SRWhole + SRFraction; DbgLog("T1AsECB", SampleRate, (ULONG)pSyncEPInfo->Buffer[2], (ULONG)pSyncEPInfo->Buffer[1], (ULONG)pSyncEPInfo->Buffer[0]); if ( SampleRate && ( SampleRate != pT1PinContext->ulCurrentSampleRate )) { KeAcquireSpinLock( &pPinContext->PinSpinLock, &Irql ); pT1PinContext->ulCurrentSampleRate = SampleRate; pT1PinContext->fSampleRateChanged = TRUE; KeReleaseSpinLock( &pPinContext->PinSpinLock, Irql ); } pSyncEPInfo->ulNextPollFrame = pSyncEPInfo->pUrb->UrbIsochronousTransfer.StartFrame + pSyncEPInfo->ulRefreshRate; KeAcquireSpinLock(&pPinContext->PinSpinLock, &Irql); pSyncEPInfo->fSyncRequestInProgress = FALSE; KeSetEvent( &pSyncEPInfo->SyncPollDoneEvent, 0, FALSE ); KeReleaseSpinLock(&pPinContext->PinSpinLock, Irql); return ( STATUS_MORE_PROCESSING_REQUIRED ); } VOID TypeIAsyncEndpointPoll( PDEVICE_OBJECT pNextDeviceObject, PSYNC_ENDPOINT_INFO pSyncEPInfo ) { PURB pUrb = pSyncEPInfo->pUrb; PIRP pIrp = pSyncEPInfo->pIrp; PIO_STACK_LOCATION nextStack; // First Reset the pipe. ResetUSBPipe( pNextDeviceObject, pSyncEPInfo->hSyncPipeHandle ); RtlZeroMemory(pUrb, GET_ISO_URB_SIZE(1)); pUrb->UrbIsochronousTransfer.Hdr.Length = (USHORT)GET_ISO_URB_SIZE(1); pUrb->UrbIsochronousTransfer.Hdr.Function = URB_FUNCTION_ISOCH_TRANSFER; pUrb->UrbIsochronousTransfer.PipeHandle = pSyncEPInfo->hSyncPipeHandle; pUrb->UrbIsochronousTransfer.TransferFlags = USBD_START_ISO_TRANSFER_ASAP | USBD_TRANSFER_DIRECTION_IN; pUrb->UrbIsochronousTransfer.NumberOfPackets = 1; pUrb->UrbIsochronousTransfer.IsoPacket[0].Offset = 0; pUrb->UrbIsochronousTransfer.TransferBuffer = pSyncEPInfo->Buffer; pUrb->UrbIsochronousTransfer.TransferBufferLength = SYNC_ENDPOINT_DATA_SIZE; IoInitializeIrp( pIrp, IoSizeOfIrp(pNextDeviceObject->StackSize), pNextDeviceObject->StackSize ); nextStack = IoGetNextIrpStackLocation(pIrp); ASSERT(nextStack != NULL); nextStack->MajorFunction = IRP_MJ_INTERNAL_DEVICE_CONTROL; nextStack->Parameters.Others.Argument1 = pUrb; nextStack->Parameters.DeviceIoControl.IoControlCode = IOCTL_INTERNAL_USB_SUBMIT_URB; IoSetCompletionRoutine( pIrp, TypeIAsyncEPPollCallback, pSyncEPInfo, TRUE, TRUE, TRUE ); IoCallDriver(pNextDeviceObject, pIrp); } NTSTATUS TypeIRenderBytePosition( PPIN_CONTEXT pPinContext, PKSAUDIO_POSITION pPosition ) { PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; PISO_URB_INFO pIsoUrbInfo; ULONG ulStartFrame, ulCurrentFrame; PLIST_ENTRY ple; PURB pUrb; KIRQL irql; NTSTATUS ntStatus = STATUS_SUCCESS; pPosition->PlayOffset = 0; KeAcquireSpinLock( &pPinContext->PinSpinLock, &irql ); if ( pPinContext->fStreamStartedFlag ) { KeReleaseSpinLock( &pPinContext->PinSpinLock, irql ); ntStatus = GetCurrentUSBFrame( pPinContext, &ulCurrentFrame ); if (NT_SUCCESS(ntStatus)) { KeAcquireSpinLock( &pPinContext->PinSpinLock, &irql ); DbgLog("T1BPos1", pPinContext, pT1PinContext, ulCurrentFrame, 0); for( ple = pT1PinContext->UrbInUseList.Flink; ple != &pT1PinContext->UrbInUseList; ple = ple->Flink) { ULONG ulNumPackets; pIsoUrbInfo = (PISO_URB_INFO)ple; pUrb = pIsoUrbInfo->pUrb; ulNumPackets = pUrb->UrbIsochronousTransfer.NumberOfPackets; ulStartFrame = pUrb->UrbIsochronousTransfer.StartFrame; if (ulStartFrame != 0) { DbgLog("T1BPos2", ulStartFrame, ulCurrentFrame, ulNumPackets, 0); // Determine if this is the current Frame being rendered. if (( ulCurrentFrame - ulStartFrame ) < ulNumPackets ){ PUSBD_ISO_PACKET_DESCRIPTOR pIsoPacket = &pUrb->UrbIsochronousTransfer.IsoPacket[ulCurrentFrame - ulStartFrame]; ULONG ulFrameBytes = (( ulCurrentFrame - ulStartFrame ) == (ulNumPackets-1)) ? pIsoUrbInfo->ulTransferBufferLength-pIsoPacket[0].Offset : pIsoPacket[1].Offset-pIsoPacket[0].Offset; DbgLog("StrtFr1", ulStartFrame, ulCurrentFrame, ulNumPackets, ulFrameBytes); DbgLog("StrtFr2", pUrb, pIsoPacket, pIsoUrbInfo->ulTransferBufferLength, 0); ASSERT((LONG)ulFrameBytes > 0); pPosition->PlayOffset += pIsoPacket[0].Offset; // If this is the current frame determine if there have been // multiple position requests during this frame. If so, "interpolate". if ( ulCurrentFrame == pPinContext->ulCurrentFrame ){ if ( pPinContext->ulFrameRepeatCount++ < 8 ) { pPosition->PlayOffset += pPinContext->ulFrameRepeatCount* (ulFrameBytes>>3); } else { pPosition->PlayOffset += ulFrameBytes; // Possible repeat here } } else { pPinContext->ulFrameRepeatCount = 0; pPinContext->ulCurrentFrame = ulCurrentFrame; } break; } else if (( ulCurrentFrame - ulStartFrame ) < 0x7fffffff){ // Current position is past this urb. // Add this URB's byte count to total pPosition->PlayOffset += pIsoUrbInfo->ulTransferBufferLength; } } } pPosition->PlayOffset += pPinContext->ullTotalBytesReturned; KeReleaseSpinLock( &pPinContext->PinSpinLock, irql ); } } else { pPosition->PlayOffset += pPinContext->ullTotalBytesReturned; KeReleaseSpinLock( &pPinContext->PinSpinLock, irql ); } #if DBG { if ( pPinContext->ullOldPlayOffset > pPosition->PlayOffset ) TRAP; pPinContext->ullOldPlayOffset = pPosition->PlayOffset; } #endif return ntStatus; } NTSTATUS TypeI1MsCompleteCallback( IN PDEVICE_OBJECT pDeviceObject, IN PIRP pIrp, PMSEC_BUF_INFO p1MsBufInfo ) { PPIN_CONTEXT pPinContext = (PPIN_CONTEXT)p1MsBufInfo->pContext; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; KIRQL Irql; // Check for errors and Decrement outstanding URB count KeAcquireSpinLock( &pPinContext->PinSpinLock, &Irql ); if ( p1MsBufInfo->pUrb->UrbIsochronousTransfer.Hdr.Status ) { pPinContext->fUrbError = TRUE; } if ( 0 == InterlockedDecrement(&pPinContext->ulOutstandingUrbCount) ) { pPinContext->fUrbError = TRUE ; pPinContext->fStreamStartedFlag = FALSE; KeSetEvent( &pPinContext->PinStarvationEvent, 0, FALSE ); } pPinContext->ullTotalBytesReturned += p1MsBufInfo->ulTransferBufferLength; DbgLog("RetUrb1", p1MsBufInfo->ulTransferBufferLength, pPinContext->ullTotalBytesReturned, p1MsBufInfo->pUrb, 0 ); // Remove from the pending list RemoveEntryList(&p1MsBufInfo->List); // Put 1ms info structure back on queue. InsertTailList( &pT1PinContext->MSecBufList, &p1MsBufInfo->List ); KeReleaseSpinLock( &pPinContext->PinSpinLock, Irql ); // release 1ms resource semaphore KeReleaseSemaphore( &pT1PinContext->MsecBufferSemaphore, 0, 1, FALSE ); return ( STATUS_MORE_PROCESSING_REQUIRED ); } VOID TypeIBuild1MsecIsocRequest( PMSEC_BUF_INFO p1MsBufInfo ) { PPIN_CONTEXT pPinContext = (PPIN_CONTEXT)p1MsBufInfo->pContext; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; PURB pUrb = p1MsBufInfo->pUrb; PIRP pIrp = p1MsBufInfo->pIrp; PIO_STACK_LOCATION nextStack; KIRQL Irql; RtlZeroMemory(pUrb, GET_ISO_URB_SIZE(1)); pUrb->UrbIsochronousTransfer.Hdr.Length = (USHORT)GET_ISO_URB_SIZE(1); pUrb->UrbIsochronousTransfer.Hdr.Function = URB_FUNCTION_ISOCH_TRANSFER; pUrb->UrbIsochronousTransfer.PipeHandle = pPinContext->hPipeHandle; pUrb->UrbIsochronousTransfer.TransferFlags = USBD_START_ISO_TRANSFER_ASAP; pUrb->UrbIsochronousTransfer.NumberOfPackets = 1; pUrb->UrbIsochronousTransfer.TransferBuffer = p1MsBufInfo->pBuffer; pUrb->UrbIsochronousTransfer.TransferBufferLength = p1MsBufInfo->ulTransferBufferLength; IoInitializeIrp( pIrp, IoSizeOfIrp(pPinContext->pNextDeviceObject->StackSize), pPinContext->pNextDeviceObject->StackSize ); nextStack = IoGetNextIrpStackLocation(pIrp); ASSERT(nextStack != NULL); nextStack->MajorFunction = IRP_MJ_INTERNAL_DEVICE_CONTROL; nextStack->Parameters.Others.Argument1 = pUrb; nextStack->Parameters.DeviceIoControl.IoControlCode = IOCTL_INTERNAL_USB_SUBMIT_URB; IoSetCompletionRoutine ( pIrp, TypeI1MsCompleteCallback, p1MsBufInfo, TRUE, TRUE, TRUE ); InterlockedIncrement(&pPinContext->ulOutstandingUrbCount); KeAcquireSpinLock(&pPinContext->PinSpinLock, &Irql); InsertTailList( &pT1PinContext->UrbInUseList, &p1MsBufInfo->List ); KeReleaseSpinLock(&pPinContext->PinSpinLock, Irql); IoCallDriver(pPinContext->pNextDeviceObject, pIrp); } NTSTATUS TypeICompleteCallback ( IN PDEVICE_OBJECT pDeviceObject, IN PIRP pIrp, PKSSTREAM_POINTER pKsStreamPtr ) { PPIN_CONTEXT pPinContext = pKsStreamPtr->Pin->Context; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; PISO_URB_INFO pIsoUrbInfo = pKsStreamPtr->Context; PURB pUrb = pIsoUrbInfo->pUrb; NTSTATUS ntStatus; KIRQL Irql; LOGICAL Glitch = FALSE; LARGE_INTEGER currentPC; ntStatus = pIrp->IoStatus.Status; if ( pUrb->UrbIsochronousTransfer.Hdr.Status ) { DbgLog("UrbErr1", pKsStreamPtr->Pin, pPinContext, pKsStreamPtr, pUrb->UrbIsochronousTransfer.Hdr.Status ); ntStatus = STATUS_DEVICE_DATA_ERROR; } KeAcquireSpinLock(&pPinContext->PinSpinLock, &Irql); if ( !NT_SUCCESS(ntStatus) ) { pPinContext->fUrbError = TRUE ; pPinContext->fStreamStartedFlag = FALSE; DbgLog("UrbErr2", pKsStreamPtr->Pin, pPinContext, pKsStreamPtr, ntStatus ); } if ( 0 == InterlockedDecrement(&pPinContext->ulOutstandingUrbCount) ) { Glitch = TRUE; pPinContext->fUrbError = TRUE ; pPinContext->fStreamStartedFlag = FALSE; KeSetEvent( &pPinContext->PinStarvationEvent, 0, FALSE ); } // else if ( !pPinContext->fStreamStartedFlag && !pPinContext->fUrbError ) { // pPinContext->fStreamStartedFlag = TRUE; // } pPinContext->ullTotalBytesReturned += pIsoUrbInfo->ulTransferBufferLength; DbgLog("RetUrb", pIsoUrbInfo->ulTransferBufferLength, pPinContext->ullTotalBytesReturned, pUrb, pKsStreamPtr ); RemoveEntryList(&pIsoUrbInfo->List); KeReleaseSpinLock(&pPinContext->PinSpinLock, Irql); FreeMem ( pIsoUrbInfo ); if (LoggerHandle && TraceEnable) { currentPC = KeQueryPerformanceCounter (NULL); if (Glitch) { if (!pPinContext->GraphJustStarted) { if (pPinContext->StarvationDetected==FALSE) { pPinContext->StarvationDetected = TRUE; PerfLogGlitch((ULONG_PTR)pPinContext, TRUE,currentPC.QuadPart,pPinContext->LastStateChangeTimeSample); } //if } } else if (pPinContext->StarvationDetected) { pPinContext->StarvationDetected = FALSE; PerfLogGlitch((ULONG_PTR)pPinContext, FALSE,currentPC.QuadPart,pPinContext->LastStateChangeTimeSample); } //if pPinContext->LastStateChangeTimeSample = currentPC.QuadPart; } //if pPinContext->GraphJustStarted = FALSE; // If error, set status code if (!NT_SUCCESS (ntStatus)) { KsStreamPointerSetStatusCode (pKsStreamPtr, ntStatus); } // Free Irp IoFreeIrp( pIrp ); // Delete the stream pointer to release the buffer. KsStreamPointerDelete( pKsStreamPtr ); return ( STATUS_MORE_PROCESSING_REQUIRED ); } NTSTATUS TypeILockDelayCompleteCallback ( IN PDEVICE_OBJECT pDeviceObject, IN PIRP pIrp, PKSSTREAM_POINTER pKsStreamPtr ) { PPIN_CONTEXT pPinContext = pKsStreamPtr->Pin->Context; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; PISO_URB_INFO pIsoUrbInfo = pKsStreamPtr->Context; PURB pUrb = pIsoUrbInfo->pUrb; NTSTATUS ntStatus; KIRQL Irql; ntStatus = pIrp->IoStatus.Status; if ( pUrb->UrbIsochronousTransfer.Hdr.Status ) { DbgLog("UrbErr1", pKsStreamPtr->Pin, pPinContext, pKsStreamPtr, pUrb->UrbIsochronousTransfer.Hdr.Status ); ntStatus = STATUS_DEVICE_DATA_ERROR; } KeAcquireSpinLock(&pPinContext->PinSpinLock, &Irql); if ( !NT_SUCCESS(ntStatus) ) { pPinContext->fUrbError = TRUE ; } if ( 0 == InterlockedDecrement(&pPinContext->ulOutstandingUrbCount) ) { pPinContext->fUrbError = TRUE ; KeSetEvent( &pPinContext->PinStarvationEvent, 0, FALSE ); } KeReleaseSpinLock(&pPinContext->PinSpinLock, Irql); // Free our URB storage FreeMem( pIsoUrbInfo ); // Free Irp IoFreeIrp( pIrp ); // Free the stream pointer and data buffer. FreeMem( pKsStreamPtr ); return ( STATUS_MORE_PROCESSING_REQUIRED ); } NTSTATUS TypeIBuildIsochRequest( PKSSTREAM_POINTER pKsStreamPtr, PVOID pCompletionRoutine ) { PPIN_CONTEXT pPinContext = pKsStreamPtr->Pin->Context; PKSSTREAM_POINTER_OFFSET pKsStreamPtrOffsetIn = &pKsStreamPtr->OffsetIn; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; ULONG ulSampleCount = pKsStreamPtrOffsetIn->Remaining / pT1PinContext->ulBytesPerSample; ULONG ulNumberOfPackets = ulSampleCount / pT1PinContext->ulSamplesPerPacket; ULONG ulCurrentPacketSize, i = 0; ULONG ulUrbSize = GET_ISO_URB_SIZE( ulNumberOfPackets ); ULONG ulDataOffset = 0; PIO_STACK_LOCATION nextStack; PISO_URB_INFO pIsoUrbInfo; PURB pUrb; PIRP pIrp; KIRQL Irql; NTSTATUS ntStatus = STATUS_SUCCESS; ASSERT( (pKsStreamPtrOffsetIn->Remaining % pT1PinContext->ulBytesPerSample) == 0 ); pIrp = IoAllocateIrp( pPinContext->pNextDeviceObject->StackSize, FALSE ); if ( !pIrp ) { if (pCompletionRoutine == TypeILockDelayCompleteCallback) { FreeMem( pKsStreamPtr ); } return STATUS_INSUFFICIENT_RESOURCES; } pIsoUrbInfo = AllocMem( NonPagedPool, sizeof( ISO_URB_INFO ) + ulUrbSize ); if (!pIsoUrbInfo) { IoFreeIrp(pIrp); if (pCompletionRoutine == TypeILockDelayCompleteCallback) { FreeMem( pKsStreamPtr ); } return STATUS_INSUFFICIENT_RESOURCES; } pIsoUrbInfo->pUrb = pUrb = (PURB)(pIsoUrbInfo + 1); pKsStreamPtr->Context = pIsoUrbInfo; RtlZeroMemory(pUrb, ulUrbSize); pUrb->UrbIsochronousTransfer.Hdr.Length = (USHORT)ulUrbSize; pUrb->UrbIsochronousTransfer.Hdr.Function = URB_FUNCTION_ISOCH_TRANSFER; pUrb->UrbIsochronousTransfer.PipeHandle = pPinContext->hPipeHandle; pUrb->UrbIsochronousTransfer.TransferFlags = USBD_START_ISO_TRANSFER_ASAP; pUrb->UrbIsochronousTransfer.TransferBuffer = pKsStreamPtrOffsetIn->Data; ulCurrentPacketSize = ( ((pT1PinContext->ulLeftoverFraction+pT1PinContext->ulFractionSize) >= MS_PER_SEC) + pT1PinContext->ulSamplesPerPacket ); DbgLog( "BldPreL", ulCurrentPacketSize, ulSampleCount, pKsStreamPtrOffsetIn->Data, 0 ); while ( ulSampleCount >= ulCurrentPacketSize ) { pUrb->UrbIsochronousTransfer.IsoPacket[i++].Offset = ulDataOffset; pUrb->UrbIsochronousTransfer.NumberOfPackets++; ASSERT( pUrb->UrbIsochronousTransfer.NumberOfPackets <= ulNumberOfPackets ); pT1PinContext->ulLeftoverFraction += pT1PinContext->ulFractionSize; pT1PinContext->ulLeftoverFraction %= MS_PER_SEC; DbgLog( "BldLp", ulCurrentPacketSize, ulSampleCount, pKsStreamPtrOffsetIn->Data, ulDataOffset ); ulDataOffset += ulCurrentPacketSize * pT1PinContext->ulBytesPerSample; pKsStreamPtrOffsetIn->Remaining -= ulCurrentPacketSize * pT1PinContext->ulBytesPerSample; ulSampleCount -= ulCurrentPacketSize; ulCurrentPacketSize = ( ((pT1PinContext->ulLeftoverFraction+pT1PinContext->ulFractionSize) >= MS_PER_SEC) + pT1PinContext->ulSamplesPerPacket ); } pUrb->UrbIsochronousTransfer.TransferBufferLength = ulDataOffset; pIsoUrbInfo->ulTransferBufferLength = ulDataOffset; pKsStreamPtrOffsetIn->Data += ulDataOffset; // Gotta save off the leftovers before submitting this Urb. if ( pKsStreamPtrOffsetIn->Remaining ) { PMSEC_BUF_INFO pCurrent1MsBuf; DbgLog( "BldRemn", pKsStreamPtrOffsetIn->Remaining, pKsStreamPtrOffsetIn->Count, pKsStreamPtrOffsetIn->Data, ulDataOffset); KeWaitForSingleObject( &pT1PinContext->MsecBufferSemaphore, Executive, KernelMode, FALSE, NULL ); KeAcquireSpinLock( &pPinContext->PinSpinLock, &Irql ); if ( !IsListEmpty( &pT1PinContext->MSecBufList )) { pCurrent1MsBuf = (PMSEC_BUF_INFO)pT1PinContext->MSecBufList.Flink; KeReleaseSpinLock( &pPinContext->PinSpinLock, Irql ); pCurrent1MsBuf->ulTransferBufferLength = pKsStreamPtrOffsetIn->Remaining; // Copy next partial to next 1ms buffer RtlCopyMemory( pCurrent1MsBuf->pBuffer, pKsStreamPtrOffsetIn->Data, pKsStreamPtrOffsetIn->Remaining ); pT1PinContext->ulPartialBufferSize = (ulCurrentPacketSize*pT1PinContext->ulBytesPerSample) - pKsStreamPtrOffsetIn->Remaining; DbgLog( "PartBuf", ulCurrentPacketSize, ulSampleCount, pT1PinContext->ulPartialBufferSize, 0 ); pT1PinContext->ulLeftoverFraction += pT1PinContext->ulFractionSize; pT1PinContext->ulLeftoverFraction %= MS_PER_SEC; } else { KeReleaseSpinLock( &pPinContext->PinSpinLock, Irql ); } } pIrp->IoStatus.Status = STATUS_SUCCESS; nextStack = IoGetNextIrpStackLocation(pIrp); ASSERT(nextStack != NULL); nextStack->MajorFunction = IRP_MJ_INTERNAL_DEVICE_CONTROL; nextStack->Parameters.Others.Argument1 = pUrb; nextStack->Parameters.DeviceIoControl.IoControlCode = IOCTL_INTERNAL_USB_SUBMIT_URB; IoSetCompletionRoutine ( pIrp, pCompletionRoutine, pKsStreamPtr, TRUE, TRUE, TRUE ); InterlockedIncrement( &pPinContext->ulOutstandingUrbCount ); // Add Urb to InUse list if (pCompletionRoutine == TypeICompleteCallback) { KeAcquireSpinLock(&pPinContext->PinSpinLock, &Irql); // DbgLog("ADD", &pT1PinContext->UrbInUseList, pIsoUrbInfo, pUrb, 0); InsertTailList( &pT1PinContext->UrbInUseList, &pIsoUrbInfo->List ); KeReleaseSpinLock(&pPinContext->PinSpinLock, Irql); } ntStatus = IoCallDriver( pPinContext->pNextDeviceObject, pIrp ); if ( NT_SUCCESS(ntStatus) ) { if (pCompletionRoutine == TypeICompleteCallback) { KeAcquireSpinLock(&pPinContext->PinSpinLock, &Irql); pPinContext->fStreamStartedFlag = TRUE; KeReleaseSpinLock(&pPinContext->PinSpinLock, Irql); } } return ntStatus; } NTSTATUS TypeILockDelay( PKSPIN pKsPin ) { PPIN_CONTEXT pPinContext = pKsPin->Context; PUSBAUDIO_DATARANGE pUsbAudioDataRange = pPinContext->pUsbAudioDataRange; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; ULONG ulLockFrames = 0; ULONG ulLockSamples; ULONG ulDelayBytes; NTSTATUS ntStatus = STATUS_SUCCESS; // Only values 1 and 2 are defined ASSERT(pUsbAudioDataRange->pAudioEndpointDescriptor->bLockDelayUnits < 3); // Calculate the size of the delay for the current sample rate. switch ( pUsbAudioDataRange->pAudioEndpointDescriptor->bLockDelayUnits ) { case EP_LOCK_DELAY_UNITS_MS: // Delay is in milliseconds. ulLockFrames = (ULONG)pUsbAudioDataRange->pAudioEndpointDescriptor->wLockDelay; break; case EP_LOCK_DELAY_UNITS_SAMPLES: // Delay is in samples. Adjust to nearest ms boundry. ulLockFrames = (ULONG)pUsbAudioDataRange->pAudioEndpointDescriptor->wLockDelay / pT1PinContext->ulSamplesPerPacket; break; default: ntStatus = STATUS_INVALID_PARAMETER; break; } // Ensure that at least something is sent down to the device if ( ulLockFrames == 0 ) { ulLockFrames++; } if ( NT_SUCCESS(ntStatus) ) { PKSSTREAM_POINTER pKsStreamPtr; ULONG ulAllocSize; // Calculate the number of the samples to fill the frames and // create the pseudo queue pointer for the zeroed data buffer. ulLockSamples = ulLockFrames * pT1PinContext->ulSamplesPerPacket + (( ulLockFrames * pT1PinContext->ulFractionSize ) / MS_PER_SEC); ulDelayBytes = ulLockSamples * pT1PinContext->ulBytesPerSample; DbgLog( "LockD", ulLockFrames, ulLockSamples, pT1PinContext->ulCurrentSampleRate, pT1PinContext->ulBytesPerSample ); _DbgPrintF( DEBUGLVL_TERSE, ("[TypeILockDelay] ulLockFrames: %x ulLockSamples: %x DelayBytes %x\n", ulLockFrames, ulLockSamples, ulDelayBytes)); ulAllocSize = sizeof(KSSTREAM_POINTER) + ulDelayBytes; pKsStreamPtr = AllocMem( NonPagedPool, ulAllocSize ); if ( pKsStreamPtr ) { KIRQL Irql; KeAcquireSpinLock( &pPinContext->PinSpinLock, &Irql ); // // NOTE: Resetting the sample rate will cause kmixer and usbaudio to be out of sync // w.r.t. their leftover fractions. // // This might have the side effect of breaking synchronous devices, of which none // exist as of today, Feb. 21, 2000. // //pT1PinContext->fSampleRateChanged = FALSE; KeReleaseSpinLock( &pPinContext->PinSpinLock, Irql ); RtlZeroMemory( pKsStreamPtr, ulAllocSize ); pKsStreamPtr->Pin = pKsPin; pKsStreamPtr->OffsetIn.Data = (PUCHAR)(pKsStreamPtr+1); pKsStreamPtr->OffsetIn.Count = ulDelayBytes; pKsStreamPtr->OffsetIn.Remaining = ulDelayBytes; ntStatus = TypeIBuildIsochRequest( pKsStreamPtr, TypeILockDelayCompleteCallback ); if ( !NT_SUCCESS(ntStatus) ) { _DbgPrintF( DEBUGLVL_TERSE,("[TypeILockDelay] Status Error: %x\n", ntStatus )); } } else ntStatus = STATUS_INSUFFICIENT_RESOURCES; } return ntStatus; } NTSTATUS TypeIProcessStreamPtr( PKSPIN pKsPin ) { PPIN_CONTEXT pPinContext = pKsPin->Context; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; PKSSTREAM_POINTER pKsStreamPtr, pKsCloneStreamPtr; PKSSTREAM_POINTER_OFFSET pKsStreamPtrOffsetIn; PMSEC_BUF_INFO pCurrent1MsBuf; KIRQL irql; NTSTATUS ntStatus = STATUS_SUCCESS; // Check for a data error. If error flag set abort the pipe and start again. if ( pPinContext->fUrbError ) { AbortUSBPipe( pPinContext ); } KeAcquireSpinLock( &pPinContext->PinSpinLock, &irql ); if ( pT1PinContext->fSampleRateChanged ) { pT1PinContext->ulSamplesPerPacket = pT1PinContext->ulCurrentSampleRate / MS_PER_SEC; pT1PinContext->ulFractionSize = pT1PinContext->ulCurrentSampleRate % MS_PER_SEC; pT1PinContext->fSampleRateChanged = FALSE; DbgLog( "T1CSRCh", pT1PinContext->ulCurrentSampleRate, pT1PinContext->ulSamplesPerPacket, pT1PinContext->ulFractionSize, pT1PinContext->ulLeftoverFraction ); } KeReleaseSpinLock( &pPinContext->PinSpinLock, irql ); if ( pT1PinContext->fLockDelayRequired ) { pT1PinContext->fLockDelayRequired = FALSE; ntStatus = TypeILockDelay( pKsPin ); } // Get the next Stream pointer from queue pKsStreamPtr = KsPinGetLeadingEdgeStreamPointer( pKsPin, KSSTREAM_POINTER_STATE_LOCKED ); if ( !pKsStreamPtr ) { _DbgPrintF(DEBUGLVL_VERBOSE,("[TypeIProcessStreamPtr] Leading edge is NULL\n")); return STATUS_SUCCESS; } DbgLog("T1Proc", pKsPin, pPinContext, pKsStreamPtr, pPinContext->fUrbError); // Clone Stream pointer to keep queue moving. if ( NT_SUCCESS( KsStreamPointerClone( pKsStreamPtr, NULL, 0, &pKsCloneStreamPtr ) ) ) { // Get a pointer to the data information from the stream pointer pKsStreamPtrOffsetIn = &pKsCloneStreamPtr->OffsetIn; // Set the write offset for position info pPinContext->ullWriteOffset += pKsStreamPtrOffsetIn->Count; DbgLog("ByteCnt", pKsStreamPtrOffsetIn->Data, pKsStreamPtrOffsetIn->Count, 0, 0); // Copy partial ms data to current 1ms buffer and send if full if ( pT1PinContext->ulPartialBufferSize ) { KeAcquireSpinLock(&pPinContext->PinSpinLock, &irql); pCurrent1MsBuf = (PMSEC_BUF_INFO)RemoveHeadList(&pT1PinContext->MSecBufList); KeReleaseSpinLock(&pPinContext->PinSpinLock, irql); RtlCopyMemory( pCurrent1MsBuf->pBuffer + pCurrent1MsBuf->ulTransferBufferLength, pKsStreamPtrOffsetIn->Data, pT1PinContext->ulPartialBufferSize ); pCurrent1MsBuf->ulTransferBufferLength += pT1PinContext->ulPartialBufferSize; TypeIBuild1MsecIsocRequest( pCurrent1MsBuf ); pKsStreamPtrOffsetIn->Remaining -= pT1PinContext->ulPartialBufferSize; pKsStreamPtrOffsetIn->Data += pT1PinContext->ulPartialBufferSize; pT1PinContext->ulPartialBufferSize = 0; } // Create the URB for the majority of the data ntStatus = TypeIBuildIsochRequest( pKsCloneStreamPtr, TypeICompleteCallback ); if ( NT_SUCCESS(ntStatus)) ntStatus = STATUS_SUCCESS; // If there is a sync endpoint, poll it for feedback if ( pPinContext->pUsbAudioDataRange->pSyncEndpointDescriptor ) { ULONG ulCurrentFrame; if (NT_SUCCESS( GetCurrentUSBFrame(pPinContext, &ulCurrentFrame)) && (LONG)(ulCurrentFrame-pT1PinContext->SyncEndpointInfo.ulNextPollFrame) >= 0) { KeAcquireSpinLock(&pPinContext->PinSpinLock, &irql); if ( !pT1PinContext->SyncEndpointInfo.fSyncRequestInProgress ) { pT1PinContext->SyncEndpointInfo.fSyncRequestInProgress = TRUE; KeReleaseSpinLock(&pPinContext->PinSpinLock, irql); TypeIAsyncEndpointPoll( pPinContext->pNextDeviceObject, &pT1PinContext->SyncEndpointInfo ); } else KeReleaseSpinLock(&pPinContext->PinSpinLock, irql); } } } // Unlock the stream pointer. This will really only unlock after last clone is deleted. KsStreamPointerUnlock( pKsStreamPtr, TRUE ); return ntStatus; } NTSTATUS TypeIStateChange( PKSPIN pKsPin, KSSTATE OldKsState, KSSTATE NewKsState ) { PPIN_CONTEXT pPinContext = pKsPin->Context; PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; NTSTATUS ntStatus = STATUS_SUCCESS; KIRQL irql; switch(NewKsState) { case KSSTATE_STOP: KeAcquireSpinLock( &pPinContext->PinSpinLock, &irql ); // Need to reset position counters and stream running flag pPinContext->fStreamStartedFlag = FALSE; pPinContext->ullWriteOffset = 0; pPinContext->ullTotalBytesReturned = 0; pPinContext->ulCurrentFrame = 0; pPinContext->ulFrameRepeatCount = 0; // Reset to original Sample rate pT1PinContext->ulCurrentSampleRate = pT1PinContext->ulOriginalSampleRate; pT1PinContext->fSampleRateChanged = TRUE; pT1PinContext->ulLeftoverFraction = 0; KeReleaseSpinLock( &pPinContext->PinSpinLock, irql ); pPinContext->StarvationDetected = FALSE; break; case KSSTATE_ACQUIRE: break; case KSSTATE_PAUSE: KeAcquireSpinLock( &pPinContext->PinSpinLock, &irql ); // Reset to original Sample rate on Async endpoints // Don't do for adaptive endpoints, or else we will have to do a copy // which is bad for real-time mixing if ( pPinContext->pUsbAudioDataRange->pSyncEndpointDescriptor ) { pT1PinContext->ulCurrentSampleRate = pT1PinContext->ulOriginalSampleRate; pT1PinContext->fSampleRateChanged = TRUE; pT1PinContext->ulLeftoverFraction = 0; } KeReleaseSpinLock( &pPinContext->PinSpinLock, irql ); break; case KSSTATE_RUN: pPinContext->GraphJustStarted = TRUE; break; } return ntStatus; } NTSTATUS TypeIRenderStreamInit( PKSPIN pKsPin ) { PPIN_CONTEXT pPinContext = pKsPin->Context; PUSBAUDIO_DATARANGE pUsbAudioDataRange = pPinContext->pUsbAudioDataRange; PKSALLOCATOR_FRAMING_EX pKsAllocatorFramingEx; PTYPE1_PIN_CONTEXT pT1PinContext; PWAVEFORMATEX pWavFormat; PMSEC_BUF_INFO pMsInfo; ULONG_PTR pMSBuffers; ULONG_PTR pUrbs; NTSTATUS ntStatus; ULONG BufferSize; ULONG NumPages, i; // In order to ensure that none of the 1ms buffers cross a page boundary, we // are careful to allocate enough space so that we never have to straddle one // of the audio buffers across a page boundary. We also make sure to adjust // any that would cross a page boundary, up to the start of the next page. // This is to prevent a copy by lower levels of the usb stack, since the UHCD // usb hardware cannot deal with a 1ms block that crosses a page boundary. // Furthermore, all of the 1ms buffers must be quadword aligned on 64 bit machines. // First we calculate how many aligned 1 ms buffers fit in a page. i=PAGE_SIZE/(pPinContext->ulMaxPacketSize + sizeof(PVOID)-1); if (!i) { // If we get here it will be because we finally have USB audio devices // that support such high sampling rates and sample sizes that they require a datarate // higher than 1 PAGE per ms. On x86 that would be 4,096,000 bytes per second. // That is more than the bandwidth of the USB bus, although it can be supported on USB2. // Calculate how many pages per ms we need. i=(pPinContext->ulMaxPacketSize + sizeof(PVOID)-1)/PAGE_SIZE; if ((pPinContext->ulMaxPacketSize + sizeof(PVOID)-1)%PAGE_SIZE) { i++; } // Now calculate the total number of pages that we need. NumPages=NUM_1MSEC_BUFFERS*i; } else { // Now calculate how many pages we need for the 1ms buffers. NumPages=NUM_1MSEC_BUFFERS/i; if (NUM_1MSEC_BUFFERS%i) { NumPages++; } } pPinContext->pType1PinContext=NULL; // Allocate space for Type I stream specific information. // In order to make sure that the system doesn't shift our allocation and thus // invalidate our space calculations and our code for shifting buffers that cross // page boundaries, we round this allocation up to an even number of pages. pT1PinContext = AllocMem( NonPagedPool, (( NumPages*PAGE_SIZE + sizeof(TYPE1_PIN_CONTEXT) + NUM_1MSEC_BUFFERS * (GET_ISO_URB_SIZE( 1 ) + sizeof(PVOID)-1) + PAGE_SIZE-1)/PAGE_SIZE)*PAGE_SIZE ); if ( !pT1PinContext ) { return STATUS_INSUFFICIENT_RESOURCES; } pMSBuffers = (ULONG_PTR)pT1PinContext; // Bag the Type1 context for easy cleanup. KsAddItemToObjectBag(pKsPin->Bag, pT1PinContext, FreeMem); // Set pointers for 1 MS buffers and URBs (even though they may not be used) pUrbs = pMSBuffers + NumPages*PAGE_SIZE; pT1PinContext = pPinContext->pType1PinContext = (PTYPE1_PIN_CONTEXT)((pUrbs + (NUM_1MSEC_BUFFERS * (GET_ISO_URB_SIZE(1) + sizeof(PVOID)-1)))&~(sizeof(PVOID)-1)); // Fill in 1ms buffer information structures and init the semaphore pMsInfo = pT1PinContext->MSBufInfos; InitializeListHead(&pT1PinContext->MSecBufList); for (i=0; i<NUM_1MSEC_BUFFERS; i++, pMsInfo++) { pMsInfo->pContext = pPinContext; pMsInfo->pBuffer = (PUCHAR)pMSBuffers; pMsInfo->pUrb = (PURB)pUrbs; // Calculate the location of the next ms buffer. If the next buffer crosses // a page boundary then start it at the beginning of the next page. pMSBuffers+=pPinContext->ulMaxPacketSize+sizeof(PVOID)-1; pMSBuffers&=~(sizeof(PVOID)-1); if ((pMSBuffers^(pMSBuffers+pPinContext->ulMaxPacketSize))&~(PAGE_SIZE-1)) { pMSBuffers&=~(PAGE_SIZE-1); pMSBuffers+=PAGE_SIZE; } // Calculate the next urb location. pUrbs+=GET_ISO_URB_SIZE(1)+sizeof(PVOID)-1; pUrbs&=~(sizeof(PVOID)-1); pMsInfo->pIrp = IoAllocateIrp( pPinContext->pNextDeviceObject->StackSize, FALSE ); if ( !pMsInfo->pIrp ) { return STATUS_INSUFFICIENT_RESOURCES; } // Bag the irps for easy cleanup. KsAddItemToObjectBag(pKsPin->Bag, pMsInfo->pIrp, IoFreeIrp); InsertTailList( &pT1PinContext->MSecBufList, &pMsInfo->List ); } // Initialize the semaphore for the 1ms buffer structures KeInitializeSemaphore( &pT1PinContext->MsecBufferSemaphore, NUM_1MSEC_BUFFERS, NUM_1MSEC_BUFFERS ); // Initialize the list head for in use list InitializeListHead(&pT1PinContext->UrbInUseList); // Initialize Packet size and Leftover counters. pWavFormat = &((PKSDATAFORMAT_WAVEFORMATEX)pKsPin->ConnectionFormat)->WaveFormatEx; pT1PinContext->ulOriginalSampleRate = pWavFormat->nSamplesPerSec; pT1PinContext->ulCurrentSampleRate = pWavFormat->nSamplesPerSec; pT1PinContext->ulBytesPerSample = ((ULONG)pWavFormat->wBitsPerSample >> 3) * (ULONG)pWavFormat->nChannels; pT1PinContext->ulPartialBufferSize = 0; pT1PinContext->fSampleRateChanged = TRUE; pT1PinContext->fLockDelayRequired = FALSE; pT1PinContext->ulLeftoverFraction = 0; // Set the current Sample rate ntStatus = SetSampleRate(pKsPin, &pT1PinContext->ulCurrentSampleRate); if (!NT_SUCCESS(ntStatus)) { return ntStatus; } if ( pUsbAudioDataRange->pSyncEndpointDescriptor ) { PSYNC_ENDPOINT_INFO pSyncEndpointInfo = &pT1PinContext->SyncEndpointInfo; PUSB_INTERRUPT_ENDPOINT_DESCRIPTOR pInterruptEndpointDescriptor = (PUSB_INTERRUPT_ENDPOINT_DESCRIPTOR)pUsbAudioDataRange->pSyncEndpointDescriptor; pSyncEndpointInfo->pUrb = AllocMem( NonPagedPool, GET_ISO_URB_SIZE( 1 ) ); if ( !pSyncEndpointInfo->pUrb ) { return STATUS_INSUFFICIENT_RESOURCES; } KsAddItemToObjectBag(pKsPin->Bag, pSyncEndpointInfo->pUrb, FreeMem); pSyncEndpointInfo->pIrp = IoAllocateIrp( pPinContext->pNextDeviceObject->StackSize, FALSE ); if ( !pSyncEndpointInfo->pIrp ) { return STATUS_INSUFFICIENT_RESOURCES; } KsAddItemToObjectBag(pKsPin->Bag, pSyncEndpointInfo->pIrp, IoFreeIrp); pSyncEndpointInfo->fSyncRequestInProgress = FALSE; pSyncEndpointInfo->ulNextPollFrame = 0; pSyncEndpointInfo->hSyncPipeHandle = NULL; pSyncEndpointInfo->pContext = pPinContext; pSyncEndpointInfo->ulRefreshRate = 1<<(ULONG)pInterruptEndpointDescriptor->bRefresh; _DbgPrintF( DEBUGLVL_VERBOSE, ("ulRefreshRate: %d\n",pSyncEndpointInfo->ulRefreshRate)); KeInitializeEvent( &pSyncEndpointInfo->SyncPollDoneEvent, SynchronizationEvent, FALSE ); ASSERT(pSyncEndpointInfo->ulRefreshRate >= 32); // Make sure refresh is reasonable for ( i=0; i<pPinContext->ulNumberOfPipes; i++ ) { if ( (ULONG)pPinContext->Pipes[i].EndpointAddress == (ULONG)pUsbAudioDataRange->pSyncEndpointDescriptor->bEndpointAddress ) { pSyncEndpointInfo->hSyncPipeHandle = pPinContext->Pipes[i].PipeHandle; break; } } if ( !pSyncEndpointInfo->hSyncPipeHandle ) { return STATUS_DEVICE_DATA_ERROR; } } // Need to check for lock delay (Note: If async this is illegal) else if (( pUsbAudioDataRange->pAudioEndpointDescriptor->bLockDelayUnits ) && ( pUsbAudioDataRange->pAudioEndpointDescriptor->wLockDelay )) { pT1PinContext->fLockDelayRequired = TRUE; } // Set up allocator framing based on gBufferDuration which is read from // the registry. gBufferDuration is the desired duration in usec. BufferSize = (ULONG)(((pT1PinContext->ulCurrentSampleRate * (ULONGLONG)gBufferDuration) + 0 )/1000000) * pT1PinContext->ulBytesPerSample; // Make sure we always have space for at least one sample. if (!BufferSize) { BufferSize = pT1PinContext->ulBytesPerSample; } pKsAllocatorFramingEx = (PKSALLOCATOR_FRAMING_EX)pKsPin->Descriptor->AllocatorFraming; pKsAllocatorFramingEx->FramingItem[0].FramingRange.Range.MinFrameSize = pKsAllocatorFramingEx->FramingItem[0].FramingRange.Range.MaxFrameSize = BufferSize; pKsAllocatorFramingEx->FramingItem[0].FramingRange.Range.Stepping = pT1PinContext->ulBytesPerSample; // Return success return STATUS_SUCCESS; } NTSTATUS TypeIRenderStreamClose( PKSPIN pKsPin ) { PPIN_CONTEXT pPinContext = pKsPin->Context; KIRQL irql; // Should not be necessary since close should never happen while // there are outstanding requests as they have stream pointers attached // Still, it couldn't hurt... USBAudioPinWaitForStarvation( pKsPin ); // If this is an Async endpoint device make sure no Async Poll // requests are still outstanding. if ( pPinContext->pUsbAudioDataRange->pSyncEndpointDescriptor ) { PTYPE1_PIN_CONTEXT pT1PinContext = pPinContext->pType1PinContext; KeAcquireSpinLock(&pPinContext->PinSpinLock, &irql); if ( pT1PinContext->SyncEndpointInfo.fSyncRequestInProgress ) { KeResetEvent( &pT1PinContext->SyncEndpointInfo.SyncPollDoneEvent ); KeReleaseSpinLock(&pPinContext->PinSpinLock, irql); KeWaitForSingleObject( &pT1PinContext->SyncEndpointInfo.SyncPollDoneEvent, Executive, KernelMode, FALSE, NULL ); } else KeReleaseSpinLock(&pPinContext->PinSpinLock, irql); } return STATUS_SUCCESS; }
josehu07/SplitFS
kernel/linux-5.4/drivers/mtd/devices/docg3.h
/* SPDX-License-Identifier: GPL-2.0-or-later */ /* * Handles the M-Systems DiskOnChip G3 chip * * Copyright (C) 2011 <NAME> */ #ifndef _MTD_DOCG3_H #define _MTD_DOCG3_H #include <linux/mtd/mtd.h> /* * Flash memory areas : * - 0x0000 .. 0x07ff : IPL * - 0x0800 .. 0x0fff : Data area * - 0x1000 .. 0x17ff : Registers * - 0x1800 .. 0x1fff : Unknown */ #define DOC_IOSPACE_IPL 0x0000 #define DOC_IOSPACE_DATA 0x0800 #define DOC_IOSPACE_SIZE 0x2000 /* * DOC G3 layout and adressing scheme * A page address for the block "b", plane "P" and page "p": * address = [bbbb bPpp pppp] */ #define DOC_ADDR_PAGE_MASK 0x3f #define DOC_ADDR_BLOCK_SHIFT 6 #define DOC_LAYOUT_NBPLANES 2 #define DOC_LAYOUT_PAGES_PER_BLOCK 64 #define DOC_LAYOUT_PAGE_SIZE 512 #define DOC_LAYOUT_OOB_SIZE 16 #define DOC_LAYOUT_WEAR_SIZE 8 #define DOC_LAYOUT_PAGE_OOB_SIZE \ (DOC_LAYOUT_PAGE_SIZE + DOC_LAYOUT_OOB_SIZE) #define DOC_LAYOUT_WEAR_OFFSET (DOC_LAYOUT_PAGE_OOB_SIZE * 2) #define DOC_LAYOUT_BLOCK_SIZE \ (DOC_LAYOUT_PAGES_PER_BLOCK * DOC_LAYOUT_PAGE_SIZE) /* * ECC related constants */ #define DOC_ECC_BCH_M 14 #define DOC_ECC_BCH_T 4 #define DOC_ECC_BCH_PRIMPOLY 0x4443 #define DOC_ECC_BCH_SIZE 7 #define DOC_ECC_BCH_COVERED_BYTES \ (DOC_LAYOUT_PAGE_SIZE + DOC_LAYOUT_OOB_PAGEINFO_SZ + \ DOC_LAYOUT_OOB_HAMMING_SZ) #define DOC_ECC_BCH_TOTAL_BYTES \ (DOC_ECC_BCH_COVERED_BYTES + DOC_LAYOUT_OOB_BCH_SZ) /* * Blocks distribution */ #define DOC_LAYOUT_BLOCK_BBT 0 #define DOC_LAYOUT_BLOCK_OTP 0 #define DOC_LAYOUT_BLOCK_FIRST_DATA 6 #define DOC_LAYOUT_PAGE_BBT 4 /* * Extra page OOB (16 bytes wide) layout */ #define DOC_LAYOUT_OOB_PAGEINFO_OFS 0 #define DOC_LAYOUT_OOB_HAMMING_OFS 7 #define DOC_LAYOUT_OOB_BCH_OFS 8 #define DOC_LAYOUT_OOB_UNUSED_OFS 15 #define DOC_LAYOUT_OOB_PAGEINFO_SZ 7 #define DOC_LAYOUT_OOB_HAMMING_SZ 1 #define DOC_LAYOUT_OOB_BCH_SZ 7 #define DOC_LAYOUT_OOB_UNUSED_SZ 1 #define DOC_CHIPID_G3 0x200 #define DOC_ERASE_MARK 0xaa #define DOC_MAX_NBFLOORS 4 /* * Flash registers */ #define DOC_CHIPID 0x1000 #define DOC_TEST 0x1004 #define DOC_BUSLOCK 0x1006 #define DOC_ENDIANCONTROL 0x1008 #define DOC_DEVICESELECT 0x100a #define DOC_ASICMODE 0x100c #define DOC_CONFIGURATION 0x100e #define DOC_INTERRUPTCONTROL 0x1010 #define DOC_READADDRESS 0x101a #define DOC_DATAEND 0x101e #define DOC_INTERRUPTSTATUS 0x1020 #define DOC_FLASHSEQUENCE 0x1032 #define DOC_FLASHCOMMAND 0x1034 #define DOC_FLASHADDRESS 0x1036 #define DOC_FLASHCONTROL 0x1038 #define DOC_NOP 0x103e #define DOC_ECCCONF0 0x1040 #define DOC_ECCCONF1 0x1042 #define DOC_ECCPRESET 0x1044 #define DOC_HAMMINGPARITY 0x1046 #define DOC_BCH_HW_ECC(idx) (0x1048 + idx) #define DOC_PROTECTION 0x1056 #define DOC_DPS0_KEY 0x105c #define DOC_DPS1_KEY 0x105e #define DOC_DPS0_ADDRLOW 0x1060 #define DOC_DPS0_ADDRHIGH 0x1062 #define DOC_DPS1_ADDRLOW 0x1064 #define DOC_DPS1_ADDRHIGH 0x1066 #define DOC_DPS0_STATUS 0x106c #define DOC_DPS1_STATUS 0x106e #define DOC_ASICMODECONFIRM 0x1072 #define DOC_CHIPID_INV 0x1074 #define DOC_POWERMODE 0x107c /* * Flash sequences * A sequence is preset before one or more commands are input to the chip. */ #define DOC_SEQ_RESET 0x00 #define DOC_SEQ_PAGE_SIZE_532 0x03 #define DOC_SEQ_SET_FASTMODE 0x05 #define DOC_SEQ_SET_RELIABLEMODE 0x09 #define DOC_SEQ_READ 0x12 #define DOC_SEQ_SET_PLANE1 0x0e #define DOC_SEQ_SET_PLANE2 0x10 #define DOC_SEQ_PAGE_SETUP 0x1d #define DOC_SEQ_ERASE 0x27 #define DOC_SEQ_PLANES_STATUS 0x31 /* * Flash commands */ #define DOC_CMD_READ_PLANE1 0x00 #define DOC_CMD_SET_ADDR_READ 0x05 #define DOC_CMD_READ_ALL_PLANES 0x30 #define DOC_CMD_READ_PLANE2 0x50 #define DOC_CMD_READ_FLASH 0xe0 #define DOC_CMD_PAGE_SIZE_532 0x3c #define DOC_CMD_PROG_BLOCK_ADDR 0x60 #define DOC_CMD_PROG_CYCLE1 0x80 #define DOC_CMD_PROG_CYCLE2 0x10 #define DOC_CMD_PROG_CYCLE3 0x11 #define DOC_CMD_ERASECYCLE2 0xd0 #define DOC_CMD_READ_STATUS 0x70 #define DOC_CMD_PLANES_STATUS 0x71 #define DOC_CMD_RELIABLE_MODE 0x22 #define DOC_CMD_FAST_MODE 0xa2 #define DOC_CMD_RESET 0xff /* * Flash register : DOC_FLASHCONTROL */ #define DOC_CTRL_VIOLATION 0x20 #define DOC_CTRL_CE 0x10 #define DOC_CTRL_UNKNOWN_BITS 0x08 #define DOC_CTRL_PROTECTION_ERROR 0x04 #define DOC_CTRL_SEQUENCE_ERROR 0x02 #define DOC_CTRL_FLASHREADY 0x01 /* * Flash register : DOC_ASICMODE */ #define DOC_ASICMODE_RESET 0x00 #define DOC_ASICMODE_NORMAL 0x01 #define DOC_ASICMODE_POWERDOWN 0x02 #define DOC_ASICMODE_MDWREN 0x04 #define DOC_ASICMODE_BDETCT_RESET 0x08 #define DOC_ASICMODE_RSTIN_RESET 0x10 #define DOC_ASICMODE_RAM_WE 0x20 /* * Flash register : DOC_ECCCONF0 */ #define DOC_ECCCONF0_WRITE_MODE 0x0000 #define DOC_ECCCONF0_READ_MODE 0x8000 #define DOC_ECCCONF0_AUTO_ECC_ENABLE 0x4000 #define DOC_ECCCONF0_HAMMING_ENABLE 0x1000 #define DOC_ECCCONF0_BCH_ENABLE 0x0800 #define DOC_ECCCONF0_DATA_BYTES_MASK 0x07ff /* * Flash register : DOC_ECCCONF1 */ #define DOC_ECCCONF1_BCH_SYNDROM_ERR 0x80 #define DOC_ECCCONF1_UNKOWN1 0x40 #define DOC_ECCCONF1_PAGE_IS_WRITTEN 0x20 #define DOC_ECCCONF1_UNKOWN3 0x10 #define DOC_ECCCONF1_HAMMING_BITS_MASK 0x0f /* * Flash register : DOC_PROTECTION */ #define DOC_PROTECT_FOUNDRY_OTP_LOCK 0x01 #define DOC_PROTECT_CUSTOMER_OTP_LOCK 0x02 #define DOC_PROTECT_LOCK_INPUT 0x04 #define DOC_PROTECT_STICKY_LOCK 0x08 #define DOC_PROTECT_PROTECTION_ENABLED 0x10 #define DOC_PROTECT_IPL_DOWNLOAD_LOCK 0x20 #define DOC_PROTECT_PROTECTION_ERROR 0x80 /* * Flash register : DOC_DPS0_STATUS and DOC_DPS1_STATUS */ #define DOC_DPS_OTP_PROTECTED 0x01 #define DOC_DPS_READ_PROTECTED 0x02 #define DOC_DPS_WRITE_PROTECTED 0x04 #define DOC_DPS_HW_LOCK_ENABLED 0x08 #define DOC_DPS_KEY_OK 0x80 /* * Flash register : DOC_CONFIGURATION */ #define DOC_CONF_IF_CFG 0x80 #define DOC_CONF_MAX_ID_MASK 0x30 #define DOC_CONF_VCCQ_3V 0x01 /* * Flash register : DOC_READADDRESS */ #define DOC_READADDR_INC 0x8000 #define DOC_READADDR_ONE_BYTE 0x4000 #define DOC_READADDR_ADDR_MASK 0x1fff /* * Flash register : DOC_POWERMODE */ #define DOC_POWERDOWN_READY 0x80 /* * Status of erase and write operation */ #define DOC_PLANES_STATUS_FAIL 0x01 #define DOC_PLANES_STATUS_PLANE0_KO 0x02 #define DOC_PLANES_STATUS_PLANE1_KO 0x04 /* * DPS key management * * Each floor of docg3 has 2 protection areas: DPS0 and DPS1. These areas span * across block boundaries, and define whether these blocks can be read or * written. * The definition is dynamically stored in page 0 of blocks (2,3) for DPS0, and * page 0 of blocks (4,5) for DPS1. */ #define DOC_LAYOUT_DPS_KEY_LENGTH 8 /** * struct docg3_cascade - Cascade of 1 to 4 docg3 chips * @floors: floors (ie. one physical docg3 chip is one floor) * @base: IO space to access all chips in the cascade * @bch: the BCH correcting control structure * @lock: lock to protect docg3 IO space from concurrent accesses */ struct docg3_cascade { struct mtd_info *floors[DOC_MAX_NBFLOORS]; void __iomem *base; struct bch_control *bch; struct mutex lock; }; /** * struct docg3 - DiskOnChip driver private data * @dev: the device currently under control * @cascade: the cascade this device belongs to * @device_id: number of the cascaded DoCG3 device (0, 1, 2 or 3) * @if_cfg: if true, reads are on 16bits, else reads are on 8bits * @reliable: if 0, docg3 in normal mode, if 1 docg3 in fast mode, if 2 in * reliable mode * Fast mode implies more errors than normal mode. * Reliable mode implies that page 2*n and 2*n+1 are clones. * @bbt: bad block table cache * @oob_write_ofs: offset of the MTD where this OOB should belong (ie. in next * page_write) * @oob_autoecc: if 1, use only bytes 0-7, 15, and fill the others with HW ECC * if 0, use all the 16 bytes. * @oob_write_buf: prepared OOB for next page_write */ struct docg3 { struct device *dev; struct docg3_cascade *cascade; unsigned int device_id:4; unsigned int if_cfg:1; unsigned int reliable:2; int max_block; u8 *bbt; loff_t oob_write_ofs; int oob_autoecc; u8 oob_write_buf[DOC_LAYOUT_OOB_SIZE]; }; #define doc_err(fmt, arg...) dev_err(docg3->dev, (fmt), ## arg) #define doc_info(fmt, arg...) dev_info(docg3->dev, (fmt), ## arg) #define doc_dbg(fmt, arg...) dev_dbg(docg3->dev, (fmt), ## arg) #define doc_vdbg(fmt, arg...) dev_vdbg(docg3->dev, (fmt), ## arg) #endif /* * Trace events part */ #undef TRACE_SYSTEM #define TRACE_SYSTEM docg3 #if !defined(_MTD_DOCG3_TRACE) || defined(TRACE_HEADER_MULTI_READ) #define _MTD_DOCG3_TRACE #include <linux/tracepoint.h> TRACE_EVENT(docg3_io, TP_PROTO(int op, int width, u16 reg, int val), TP_ARGS(op, width, reg, val), TP_STRUCT__entry( __field(int, op) __field(unsigned char, width) __field(u16, reg) __field(int, val)), TP_fast_assign( __entry->op = op; __entry->width = width; __entry->reg = reg; __entry->val = val;), TP_printk("docg3: %s%02d reg=%04x, val=%04x", __entry->op ? "write" : "read", __entry->width, __entry->reg, __entry->val) ); #endif /* This part must be outside protection */ #undef TRACE_INCLUDE_PATH #undef TRACE_INCLUDE_FILE #define TRACE_INCLUDE_PATH . #define TRACE_INCLUDE_FILE docg3 #include <trace/define_trace.h>
stereoboy/isaac_sdk_20191213
engine/gems/bayes_filters/examples/ekf_body_1d.cpp
<gh_stars>1-10 /* Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. NVIDIA CORPORATION and its licensors retain all intellectual property and proprietary rights in and to this software, related documentation and any modifications thereto. Any use, reproduction, disclosure or distribution of this software and related documentation without an express license agreement from NVIDIA CORPORATION is strictly prohibited. */ #include <cmath> #include <random> #include <thread> #include "engine/gems/bayes_filters/extended_kalman_filter.hpp" #include "engine/gems/sight/sight.hpp" #include "engine/gems/state/state.hpp" namespace isaac { struct State : state::State<float, 3> { ISAAC_STATE_VAR(0, position); ISAAC_STATE_VAR(1, speed); ISAAC_STATE_VAR(2, acceleration); }; struct Control : state::State<float, 1> { ISAAC_STATE_VAR(0, jerk); }; struct Observation : state::State<float, 1> { ISAAC_STATE_VAR(0, position); }; auto CreatePredictionModel() { return EkfPredictionModel<State, Control>{ [](State& x, float dt, const Control& u) { x.position() += dt * x.speed(); x.speed() += dt * x.acceleration(); x.acceleration() += dt * u.jerk(); }, [](const State& x, float dt, const Control& u) { EkfPredictJacobian<State> J = EkfPredictJacobian<State>::Zero(); J(State::kI_position, State::kI_position) = 1.0f; J(State::kI_position, State::kI_speed) = dt; J(State::kI_speed, State::kI_speed) = 1.0f; J(State::kI_speed, State::kI_acceleration) = dt; J(State::kI_acceleration, State::kI_acceleration) = 1.0f; return J; }, [](const State& x) { EkfCovariance<State> noise = EkfCovariance<State>::Zero(); noise(State::kI_position, State::kI_position) = 0.1f; noise(State::kI_speed, State::kI_speed) = 0.1f; noise(State::kI_acceleration, State::kI_acceleration) = 1.0f; return noise; }}; } auto CreateObservationModel() { return EkfObservationModel<State, Observation>{ [](const State& x) { Observation z; z.position() = x.position(); return z; }, [](const Observation& lhs, const Observation& rhs) { Observation z; z.elements = lhs.elements - rhs.elements; return z; }, [](const State& x) { EkfObserveJacobian<State, Observation> J; J(Observation::kI_position, State::kI_position) = 1.0f; J(Observation::kI_position, State::kI_speed) = 0.0f; J(Observation::kI_position, State::kI_acceleration) = 0.0f; return J; }, [](const State& x, const Observation& z) { EkfCovariance<Observation> noise; noise(Observation::kI_position, Observation::kI_position) = 1.0f; return noise; }}; } void Main() { constexpr int kNumIterations = 5000; constexpr float kDT = 0.05f; auto prediction_model = CreatePredictionModel(); auto observation_model = CreateObservationModel(); float time = 0.0f; State actual; actual.position() = 0.0f; actual.speed() = 0.0f; actual.acceleration() = 0.0f; State state = actual; Control control; EkfCovariance<State> covariance = EkfCovariance<State>::Identity(); EkfCovariance<State> actual_covariance = EkfCovariance<State>::Identity(); std::default_random_engine rng; std::normal_distribution<float> noise_predict_position(0.0f, 0.1f); std::normal_distribution<float> noise_predict_speed(0.0f, 0.1f); std::normal_distribution<float> noise_predict_acceleration(0.0f, 1.0f); std::normal_distribution<float> noise_observe(0.0f, 1.0f); for (int i = 0; i < kNumIterations; i++) { time += kDT; control.jerk() = -std::cos(time); // third derivative of std::sin(time) prediction_model.predict(actual, actual_covariance, kDT, control); actual.position() += noise_predict_position(rng); actual.speed() += noise_predict_speed(rng); actual.acceleration() += noise_predict_acceleration(rng); sight::Plot("EkfExampleRigidBody1D.actual_position", actual.position()); prediction_model.predict(state, covariance, kDT, control); Observation z = observation_model.observe_state(actual); z.position() += noise_observe(rng); observation_model.observe(state, covariance, z); sight::Plot("EkfExampleRigidBody1D.observed_position", z.position()); sight::Plot("EkfExampleRigidBody1D.estimated_position", state.position()); std::this_thread::sleep_for(std::chrono::duration<double, std::milli>(1.0f / kDT)); } } } // namespace isaac int main(int argc, char** argv) { isaac::Main(); }
UnProgrammatore/CCQ
serial/src/factor_base.c
#include "factor_base.h" /* Questa funzione deve ritornare una base di fattori per il crivello quadratico. Dato N il numero da fattorizzare si considerano i numeri primi, trovati con il crivello di Eratostene minori di exp((1/2)sqrt(log(N)loglog(N))), successivamente vanno esclusi dalla lista quei numeri per i quali il simbolo di Legendre (N|p)!=1. */ /* Inizializzo all'esterno l'array con il numero giusto secondo l'approssimazione di elementi. La funzione prende come parametro erat che è un array di dimensione dim_erat inizializzato all'esterno mediante il crivello di eratostene, calcola i numeri primi della base p tali per cui (N|p)=1 e li mette nel vettore fb di dimensione massima fb_dim. La funzione tiene memoria di quanti primi ha inserito in fb, questo valore andrà a sovrascrivere fb_dim. num_call è un parametro ausiliario che permette di aumentare la dimensione della base di fattori. Scorro tutti gli elementi della base di fattori ritornata dalla funzione eratostene(), quelli che hanno legendre(base.value)!=1 vanno eliminati Siamo sicuri che il primo elemento della base ritornata da eratostene sia 2, quindi non dobbiamo eliminarlo. solutions* è il vettore che contiene le soluzioni dell'equazione x^2 = n mod p, anche questo vettore va allocato all'esterno */ void factor_base_erat(mpz_t N, unsigned int* erat, unsigned int dim_erat, unsigned int* fb, unsigned int* fb_dim pair* solutions){ int res = 0; for(int i=0; i<dim_erat; ++i){ pair *eq; int ls = legendre_sol(N,erat[i],&eq); if(ls==1){ fb[res]=erat[i]; solutions[res]->sol1 = eq.sol1; solutions[res]->sol2 = eq.sol2; res++; } } *fb_dim = res; } /* Precalcolo la dimensione della base di fattori secondo l'approssimazione di Pomerance mpfr_t pomerance_approx; mpfr_init(pomerance_approx); double d_exp; d_exp = (double)(1/2+num_call)*(log(N)*log(log(N))); d_exp = ceil(d_exp); unsigned int ui_exp = (unsigned int)d_exp mpfr_t e; mpfr_init(e); mpfr_set_d(e, 2,71828, GMP_RNDN); mpfr_pow_ui(pomerance_approx, e, ui_exp, GMP_RNDN ); */
doom38/jython_v2.2.1
bugtests/test308.py
""" Test import of java class from sys.path zipfile. """ import support import zipfile, time support.compileJava("test308d/test308j.java") def addZipEntry(zip, name, data): entry = zipfile.ZipInfo() entry.filename = name entry.date_time = time.gmtime(time.time()) zip.writestr(entry, data) zip = zipfile.ZipFile("test308.zip", "w", zipfile.ZIP_DEFLATED) addZipEntry(zip, "test308m.py", """ import test308j assert test308j().foo() == "bar" """) zip.write("test308d/test308j.class", "test308j.class") zip.close() import sys sys.path.append("test308.zip") import test308m
pengwu/scapy_env
venv/share/pyshared/pyx/deco.py
# -*- encoding: utf-8 -*- # # # Copyright (C) 2002-2011 <NAME> <<EMAIL>> # Copyright (C) 2003-2011 <NAME> <<EMAIL>> # Copyright (C) 2002-2011 <NAME> <<EMAIL>> # # This file is part of PyX (http://pyx.sourceforge.net/). # # PyX is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # PyX is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PyX; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA # TODO: # - should we improve on the arc length -> arg parametrization routine or # should we at least factor it out? import sys, math import attr, canvas, canvasitem, color, path, normpath, style, trafo, unit, deformer _marker = object() # # Decorated path # class decoratedpath(canvasitem.canvasitem): """Decorated path The main purpose of this class is during the drawing (stroking/filling) of a path. It collects attributes for the stroke and/or fill operations. """ def __init__(self, path, strokepath=None, fillpath=None, styles=None, strokestyles=None, fillstyles=None, ornaments=None, fillrule=style.fillrule.nonzero_winding): self.path = path # global style for stroking and filling and subdps self.styles = styles # styles which apply only for stroking and filling self.strokestyles = strokestyles self.fillstyles = fillstyles # the decoratedpath can contain additional elements of the # path (ornaments), e.g., arrowheads. if ornaments is None: self.ornaments = canvas.canvas() else: self.ornaments = ornaments # the fillrule is either fillrule.nonzero_winding or fillrule.even_odd self.fillrule = fillrule self.nostrokeranges = None def ensurenormpath(self): """convert self.path into a normpath""" assert self.nostrokeranges is None or isinstance(self.path, path.normpath), "you don't understand what you are doing" self.path = self.path.normpath() def excluderange(self, begin, end): assert isinstance(self.path, path.normpath), "you don't understand what this is about" if self.nostrokeranges is None: self.nostrokeranges = [(begin, end)] else: ibegin = 0 while ibegin < len(self.nostrokeranges) and self.nostrokeranges[ibegin][1] < begin: ibegin += 1 if ibegin == len(self.nostrokeranges): self.nostrokeranges.append((begin, end)) return iend = len(self.nostrokeranges) - 1 while 0 <= iend and end < self.nostrokeranges[iend][0]: iend -= 1 if iend == -1: self.nostrokeranges.insert(0, (begin, end)) return if self.nostrokeranges[ibegin][0] < begin: begin = self.nostrokeranges[ibegin][0] if end < self.nostrokeranges[iend][1]: end = self.nostrokeranges[iend][1] self.nostrokeranges[ibegin:iend+1] = [(begin, end)] def bbox(self): pathbbox = self.path.bbox() ornamentsbbox = self.ornaments.bbox() if ornamentsbbox is not None: return ornamentsbbox + pathbbox else: return pathbbox def strokepath(self): if self.nostrokeranges: splitlist = [] for begin, end in self.nostrokeranges: splitlist.append(begin) splitlist.append(end) split = self.path.split(splitlist) # XXX properly handle closed paths? result = split[0] for i in range(2, len(split), 2): result += split[i] return result else: return self.path def processPS(self, file, writer, context, registry, bbox): # draw (stroke and/or fill) the decoratedpath on the canvas # while trying to produce an efficient output, e.g., by # not writing one path two times # small helper def _writestyles(styles, context, registry, bbox): for style in styles: style.processPS(file, writer, context, registry, bbox) if self.strokestyles is None and self.fillstyles is None: if not len(self.ornaments): raise RuntimeError("Path neither to be stroked nor filled nor decorated in another way") # just draw additional elements of decoratedpath self.ornaments.processPS(file, writer, context, registry, bbox) return strokepath = self.strokepath() fillpath = self.path # apply global styles if self.styles: file.write("gsave\n") context = context() _writestyles(self.styles, context, registry, bbox) if self.fillstyles is not None: file.write("newpath\n") fillpath.outputPS(file, writer) if self.strokestyles is not None and strokepath is fillpath: # do efficient stroking + filling if respective paths are identical file.write("gsave\n") if self.fillstyles: _writestyles(self.fillstyles, context(), registry, bbox) if self.fillrule.even_odd: file.write("eofill\n") else: file.write("fill\n") file.write("grestore\n") acontext = context() if self.strokestyles: file.write("gsave\n") _writestyles(self.strokestyles, acontext, registry, bbox) file.write("stroke\n") # take linewidth into account for bbox when stroking a path bbox += strokepath.bbox().enlarged_pt(0.5*acontext.linewidth_pt) if self.strokestyles: file.write("grestore\n") else: # only fill fillpath - for the moment if self.fillstyles: file.write("gsave\n") _writestyles(self.fillstyles, context(), registry, bbox) if self.fillrule.even_odd: file.write("eofill\n") else: file.write("fill\n") bbox += fillpath.bbox() if self.fillstyles: file.write("grestore\n") if self.strokestyles is not None and (strokepath is not fillpath or self.fillstyles is None): # this is the only relevant case still left # Note that a possible stroking has already been done. acontext = context() if self.strokestyles: file.write("gsave\n") _writestyles(self.strokestyles, acontext, registry, bbox) file.write("newpath\n") strokepath.outputPS(file, writer) file.write("stroke\n") # take linewidth into account for bbox when stroking a path bbox += strokepath.bbox().enlarged_pt(0.5*acontext.linewidth_pt) if self.strokestyles: file.write("grestore\n") # now, draw additional elements of decoratedpath self.ornaments.processPS(file, writer, context, registry, bbox) # restore global styles if self.styles: file.write("grestore\n") def processPDF(self, file, writer, context, registry, bbox): # draw (stroke and/or fill) the decoratedpath on the canvas def _writestyles(styles, context, registry, bbox): for style in styles: style.processPDF(file, writer, context, registry, bbox) def _writestrokestyles(strokestyles, context, registry, bbox): context.fillattr = 0 for style in strokestyles: style.processPDF(file, writer, context, registry, bbox) context.fillattr = 1 def _writefillstyles(fillstyles, context, registry, bbox): context.strokeattr = 0 for style in fillstyles: style.processPDF(file, writer, context, registry, bbox) context.strokeattr = 1 if self.strokestyles is None and self.fillstyles is None: if not len(self.ornaments): raise RuntimeError("Path neither to be stroked nor filled nor decorated in another way") # just draw additional elements of decoratedpath self.ornaments.processPDF(file, writer, context, registry, bbox) return strokepath = self.strokepath() fillpath = self.path # apply global styles if self.styles: file.write("q\n") # gsave context = context() _writestyles(self.styles, context, registry, bbox) if self.fillstyles is not None: fillpath.outputPDF(file, writer) if self.strokestyles is not None and strokepath is fillpath: # do efficient stroking + filling file.write("q\n") # gsave acontext = context() if self.fillstyles: _writefillstyles(self.fillstyles, acontext, registry, bbox) if self.strokestyles: _writestrokestyles(self.strokestyles, acontext, registry, bbox) if self.fillrule.even_odd: file.write("B*\n") else: file.write("B\n") # both stroke and fill # take linewidth into account for bbox when stroking a path bbox += strokepath.bbox().enlarged_pt(0.5*acontext.linewidth_pt) file.write("Q\n") # grestore else: # only fill fillpath - for the moment if self.fillstyles: file.write("q\n") # gsave _writefillstyles(self.fillstyles, context(), registry, bbox) if self.fillrule.even_odd: file.write("f*\n") else: file.write("f\n") # fill bbox += fillpath.bbox() if self.fillstyles: file.write("Q\n") # grestore if self.strokestyles is not None and (strokepath is not fillpath or self.fillstyles is None): # this is the only relevant case still left # Note that a possible stroking has already been done. acontext = context() if self.strokestyles: file.write("q\n") # gsave _writestrokestyles(self.strokestyles, acontext, registry, bbox) strokepath.outputPDF(file, writer) file.write("S\n") # stroke # take linewidth into account for bbox when stroking a path bbox += strokepath.bbox().enlarged_pt(0.5*acontext.linewidth_pt) if self.strokestyles: file.write("Q\n") # grestore # now, draw additional elements of decoratedpath self.ornaments.processPDF(file, writer, context, registry, bbox) # restore global styles if self.styles: file.write("Q\n") # grestore # # Path decorators # class deco: """decorators In contrast to path styles, path decorators depend on the concrete path to which they are applied. In particular, they don't make sense without any path and can thus not be used in canvas.set! """ def decorate(self, dp, texrunner): """apply a style to a given decoratedpath object dp decorate accepts a decoratedpath object dp, applies PathStyle by modifying dp in place. """ pass # # stroked and filled: basic decos which stroked and fill, # respectively the path # class _stroked(deco, attr.exclusiveattr): """stroked is a decorator, which draws the outline of the path""" def __init__(self, styles=[]): attr.exclusiveattr.__init__(self, _stroked) self.styles = attr.mergeattrs(styles) attr.checkattrs(self.styles, [style.strokestyle]) def __call__(self, styles=[]): # XXX or should we also merge self.styles return _stroked(styles) def decorate(self, dp, texrunner): if dp.strokestyles is not None: raise RuntimeError("Cannot stroke an already stroked path") dp.strokestyles = self.styles stroked = _stroked() stroked.clear = attr.clearclass(_stroked) class _filled(deco, attr.exclusiveattr): """filled is a decorator, which fills the interior of the path""" def __init__(self, styles=[]): attr.exclusiveattr.__init__(self, _filled) self.styles = attr.mergeattrs(styles) attr.checkattrs(self.styles, [style.fillstyle]) def __call__(self, styles=[]): # XXX or should we also merge self.styles return _filled(styles) def decorate(self, dp, texrunner): if dp.fillstyles is not None: raise RuntimeError("Cannot fill an already filled path") dp.fillstyles = self.styles filled = _filled() filled.clear = attr.clearclass(_filled) # # Arrows # # helper function which constructs the arrowhead def _arrowhead(anormpath, arclenfrombegin, direction, size, angle, constriction, constrictionlen): """helper routine, which returns an arrowhead from a given anormpath - arclenfrombegin: position of arrow in arc length from the start of the path - direction: +1 for an arrow pointing along the direction of anormpath or -1 for an arrow pointing opposite to the direction of normpath - size: size of the arrow as arc length - angle. opening angle - constriction: boolean to indicate whether the constriction point is to be taken into account or not - constrictionlen: arc length of constriction. (not used when constriction is false) """ # arc length and coordinates of tip tx, ty = anormpath.at(arclenfrombegin) # construct the template for the arrow by cutting the path at the # corresponding length arrowtemplate = anormpath.split([arclenfrombegin, arclenfrombegin - direction * size])[1] # from this template, we construct the two outer curves of the arrow arrowl = arrowtemplate.transformed(trafo.rotate(-angle/2.0, tx, ty)) arrowr = arrowtemplate.transformed(trafo.rotate( angle/2.0, tx, ty)) # now come the joining backward parts if constriction: # constriction point (cx, cy) lies on path cx, cy = anormpath.at(arclenfrombegin - direction * constrictionlen) arrowcr= path.line(*(arrowr.atend() + (cx,cy))) arrow = arrowl.reversed() << arrowr << arrowcr else: arrow = arrowl.reversed() << arrowr arrow[-1].close() return arrow _base = 6 * unit.v_pt class arrow(deco, attr.attr): """arrow is a decorator which adds an arrow to either side of the path""" def __init__(self, attrs=[], pos=1, reversed=0, size=_base, angle=45, constriction=0.8): self.attrs = attr.mergeattrs([style.linestyle.solid, filled] + attrs) attr.checkattrs(self.attrs, [deco, style.fillstyle, style.strokestyle]) self.pos = pos self.reversed = reversed self.size = size self.angle = angle self.constriction = constriction # calculate absolute arc length of constricition # Note that we have to correct this length because the arrowtemplates are rotated # by self.angle/2 to the left and right. Hence, if we want no constriction, i.e., for # self.constriction = 1, we actually have a length which is approximately shorter # by the given geometrical factor. if self.constriction is not None: self.constrictionlen = self.size * self.constriction * math.cos(math.radians(self.angle/2.0)) else: # if we do not want a constriction, i.e. constriction is None, we still # need constrictionlen for cutting the path self.constrictionlen = self.size * 1 * math.cos(math.radians(self.angle/2.0)) def __call__(self, attrs=None, pos=None, reversed=None, size=None, angle=None, constriction=_marker): if attrs is None: attrs = self.attrs if pos is None: pos = self.pos if reversed is None: reversed = self.reversed if size is None: size = self.size if angle is None: angle = self.angle if constriction is _marker: constriction = self.constriction return arrow(attrs=attrs, pos=pos, reversed=reversed, size=size, angle=angle, constriction=constriction) def decorate(self, dp, texrunner): dp.ensurenormpath() anormpath = dp.path arclenfrombegin = (1-self.reversed)*self.constrictionlen + self.pos * (anormpath.arclen() - self.constrictionlen) direction = self.reversed and -1 or 1 arrowhead = _arrowhead(anormpath, arclenfrombegin, direction, self.size, self.angle, self.constriction is not None, self.constrictionlen) # add arrowhead to decoratedpath dp.ornaments.draw(arrowhead, self.attrs) # exlude part of the path from stroking when the arrow is strictly at the begin or the end if self.pos == 0 and self.reversed: dp.excluderange(0, min(self.size, self.constrictionlen)) elif self.pos == 1 and not self.reversed: dp.excluderange(anormpath.end() - min(self.size, self.constrictionlen), anormpath.end()) arrow.clear = attr.clearclass(arrow) # arrows at begin of path barrow = arrow(pos=0, reversed=1) barrow.SMALL = barrow(size=_base/math.sqrt(64)) barrow.SMALl = barrow(size=_base/math.sqrt(32)) barrow.SMAll = barrow(size=_base/math.sqrt(16)) barrow.SMall = barrow(size=_base/math.sqrt(8)) barrow.Small = barrow(size=_base/math.sqrt(4)) barrow.small = barrow(size=_base/math.sqrt(2)) barrow.normal = barrow(size=_base) barrow.large = barrow(size=_base*math.sqrt(2)) barrow.Large = barrow(size=_base*math.sqrt(4)) barrow.LArge = barrow(size=_base*math.sqrt(8)) barrow.LARge = barrow(size=_base*math.sqrt(16)) barrow.LARGe = barrow(size=_base*math.sqrt(32)) barrow.LARGE = barrow(size=_base*math.sqrt(64)) # arrows at end of path earrow = arrow() earrow.SMALL = earrow(size=_base/math.sqrt(64)) earrow.SMALl = earrow(size=_base/math.sqrt(32)) earrow.SMAll = earrow(size=_base/math.sqrt(16)) earrow.SMall = earrow(size=_base/math.sqrt(8)) earrow.Small = earrow(size=_base/math.sqrt(4)) earrow.small = earrow(size=_base/math.sqrt(2)) earrow.normal = earrow(size=_base) earrow.large = earrow(size=_base*math.sqrt(2)) earrow.Large = earrow(size=_base*math.sqrt(4)) earrow.LArge = earrow(size=_base*math.sqrt(8)) earrow.LARge = earrow(size=_base*math.sqrt(16)) earrow.LARGe = earrow(size=_base*math.sqrt(32)) earrow.LARGE = earrow(size=_base*math.sqrt(64)) class text(deco, attr.attr): """a simple text decorator""" def __init__(self, text, textattrs=[], angle=0, relangle=None, textdist=0.2, relarclenpos=0.5, arclenfrombegin=None, arclenfromend=None, texrunner=None): if arclenfrombegin is not None and arclenfromend is not None: raise ValueError("either set arclenfrombegin or arclenfromend") self.text = text self.textattrs = textattrs self.angle = angle self.relangle = relangle self.textdist = textdist self.relarclenpos = relarclenpos self.arclenfrombegin = arclenfrombegin self.arclenfromend = arclenfromend self.texrunner = texrunner def decorate(self, dp, texrunner): if self.texrunner: texrunner = self.texrunner import text as textmodule textattrs = attr.mergeattrs([textmodule.halign.center, textmodule.vshift.mathaxis] + self.textattrs) dp.ensurenormpath() if self.arclenfrombegin is not None: param = dp.path.begin() + self.arclenfrombegin elif self.arclenfromend is not None: param = dp.path.end() - self.arclenfromend else: # relarcpos is used, when neither arcfrombegin nor arcfromend is given param = self.relarclenpos * dp.path.arclen() x, y = dp.path.at(param) if self.relangle is not None: a = dp.path.trafo(param).apply_pt(math.cos(self.relangle*math.pi/180), math.sin(self.relangle*math.pi/180)) b = dp.path.trafo(param).apply_pt(0, 0) angle = math.atan2(a[1] - b[1], a[0] - b[0]) else: angle = self.angle*math.pi/180 t = texrunner.text(x, y, self.text, textattrs) t.linealign(self.textdist, math.cos(angle), math.sin(angle)) dp.ornaments.insert(t) class curvedtext(deco, attr.attr): """a text decorator for curved text - text: is typeset along the path to which this decorator is applied - relarclenpos: position for the base point of the text (default: 0) - arlenfrombegin, arclenfromend: alternative ways of specifying the position of the base point; use of relarclenpos, arclenfrombegin and arclenfromend is mutually exclusive - textattrs, texrunner: standard text arguments (defaults: [] resp None) """ # defaulttextattrs = [textmodule.halign.center] # TODO: not possible due to cyclic import issue def __init__(self, text, textattrs=[], relarclenpos=0.5, arclenfrombegin=None, arclenfromend=None, texrunner=None, exclude=None): if arclenfrombegin is not None and arclenfromend is not None: raise ValueError("either set arclenfrombegin or arclenfromend") self.text = text self.textattrs = textattrs self.relarclenpos = relarclenpos self.arclenfrombegin = arclenfrombegin self.arclenfromend = arclenfromend self.texrunner = texrunner self.exclude = exclude def decorate(self, dp, texrunner): if self.texrunner: texrunner = self.texrunner import text as textmodule self.defaulttextattrs = [textmodule.halign.center] dp.ensurenormpath() if self.arclenfrombegin is not None: textpos = dp.path.begin() + self.arclenfrombegin elif self.arclenfromend is not None: textpos = dp.path.end() - self.arclenfromend else: # relarcpos is used if neither arcfrombegin nor arcfromend is given textpos = self.relarclenpos * dp.path.arclen() textattrs = self.defaulttextattrs + self.textattrs t = texrunner.text(0, 0, self.text, textattrs, singlecharmode=1) t.ensuredvicanvas() c = canvas.canvas() for item in t.dvicanvas.items: bbox = item.bbox() if bbox: x = item.bbox().center()[0] atrafo = dp.path.trafo(textpos+x) c.insert(item, [trafo.translate(-x, 0), atrafo]) if self.exclude is not None: dp.excluderange(textpos+bbox.left()-self.exclude, textpos+bbox.right()+self.exclude) else: c.insert(item) dp.ornaments.insert(c) class shownormpath(deco, attr.attr): def decorate(self, dp, texrunner): r_pt = 2 dp.ensurenormpath() for normsubpath in dp.path.normsubpaths: for i, normsubpathitem in enumerate(normsubpath.normsubpathitems): if isinstance(normsubpathitem, normpath.normcurve_pt): dp.ornaments.stroke(normpath.normpath([normpath.normsubpath([normsubpathitem])]), [color.rgb.green]) else: dp.ornaments.stroke(normpath.normpath([normpath.normsubpath([normsubpathitem])]), [color.rgb.blue]) for normsubpath in dp.path.normsubpaths: for i, normsubpathitem in enumerate(normsubpath.normsubpathitems): if isinstance(normsubpathitem, normpath.normcurve_pt): dp.ornaments.stroke(path.line_pt(normsubpathitem.x0_pt, normsubpathitem.y0_pt, normsubpathitem.x1_pt, normsubpathitem.y1_pt), [style.linestyle.dashed, color.rgb.red]) dp.ornaments.stroke(path.line_pt(normsubpathitem.x2_pt, normsubpathitem.y2_pt, normsubpathitem.x3_pt, normsubpathitem.y3_pt), [style.linestyle.dashed, color.rgb.red]) dp.ornaments.draw(path.circle_pt(normsubpathitem.x1_pt, normsubpathitem.y1_pt, r_pt), [filled([color.rgb.red])]) dp.ornaments.draw(path.circle_pt(normsubpathitem.x2_pt, normsubpathitem.y2_pt, r_pt), [filled([color.rgb.red])]) for normsubpath in dp.path.normsubpaths: for i, normsubpathitem in enumerate(normsubpath.normsubpathitems): if not i: x_pt, y_pt = normsubpathitem.atbegin_pt() dp.ornaments.draw(path.circle_pt(x_pt, y_pt, r_pt), [filled]) x_pt, y_pt = normsubpathitem.atend_pt() dp.ornaments.draw(path.circle_pt(x_pt, y_pt, r_pt), [filled]) class linehatched(deco, attr.exclusiveattr, attr.clearclass): """draws a pattern with explicit lines This class acts as a drop-in replacement for postscript patterns from the pattern module which are not understood by some printers""" def __init__(self, dist, angle, strokestyles=[], cross=0): attr.clearclass.__init__(self, _filled) attr.exclusiveattr.__init__(self, linehatched) self.dist = dist self.angle = angle self.strokestyles = attr.mergeattrs([style.linewidth.THIN] + strokestyles) attr.checkattrs(self.strokestyles, [style.strokestyle]) self.cross = cross def __call__(self, dist=None, angle=None, strokestyles=None, cross=None): if dist is None: dist = self.dist if angle is None: angle = self.angle if strokestyles is None: strokestyles = self.strokestyles if cross is None: cross = self.cross return linehatched(dist, angle, strokestyles, cross) def _decocanvas(self, angle, dp, texrunner): dp.ensurenormpath() dist_pt = unit.topt(self.dist) c = canvas.canvas([canvas.clip(dp.path)]) llx_pt, lly_pt, urx_pt, ury_pt = dp.path.bbox().highrestuple_pt() center_pt = 0.5*(llx_pt+urx_pt), 0.5*(lly_pt+ury_pt) radius_pt = 0.5*math.hypot(urx_pt-llx_pt, ury_pt-lly_pt) + dist_pt n = int(2*radius_pt / dist_pt) + 1 for i in range(n): x_pt = center_pt[0] - radius_pt + i*dist_pt c.stroke(path.line_pt(x_pt, center_pt[1]-radius_pt, x_pt, center_pt[1]+radius_pt), [trafo.rotate_pt(angle, center_pt[0], center_pt[1])] + self.strokestyles) return c def decorate(self, dp, texrunner): dp.ornaments.insert(self._decocanvas(self.angle, dp, texrunner)) if self.cross: dp.ornaments.insert(self._decocanvas(self.angle+90, dp, texrunner)) def merge(self, attrs): # act as attr.clearclass and as attr.exclusiveattr at the same time newattrs = attr.exclusiveattr.merge(self, attrs) return attr.clearclass.merge(self, newattrs) linehatched.clear = attr.clearclass(linehatched) _hatch_base = 0.1 * unit.v_cm linehatched0 = linehatched(_hatch_base, 0) linehatched0.SMALL = linehatched0(_hatch_base/math.sqrt(64)) linehatched0.SMALL = linehatched0(_hatch_base/math.sqrt(64)) linehatched0.SMALl = linehatched0(_hatch_base/math.sqrt(32)) linehatched0.SMAll = linehatched0(_hatch_base/math.sqrt(16)) linehatched0.SMall = linehatched0(_hatch_base/math.sqrt(8)) linehatched0.Small = linehatched0(_hatch_base/math.sqrt(4)) linehatched0.small = linehatched0(_hatch_base/math.sqrt(2)) linehatched0.normal = linehatched0(_hatch_base) linehatched0.large = linehatched0(_hatch_base*math.sqrt(2)) linehatched0.Large = linehatched0(_hatch_base*math.sqrt(4)) linehatched0.LArge = linehatched0(_hatch_base*math.sqrt(8)) linehatched0.LARge = linehatched0(_hatch_base*math.sqrt(16)) linehatched0.LARGe = linehatched0(_hatch_base*math.sqrt(32)) linehatched0.LARGE = linehatched0(_hatch_base*math.sqrt(64)) linehatched45 = linehatched(_hatch_base, 45) linehatched45.SMALL = linehatched45(_hatch_base/math.sqrt(64)) linehatched45.SMALl = linehatched45(_hatch_base/math.sqrt(32)) linehatched45.SMAll = linehatched45(_hatch_base/math.sqrt(16)) linehatched45.SMall = linehatched45(_hatch_base/math.sqrt(8)) linehatched45.Small = linehatched45(_hatch_base/math.sqrt(4)) linehatched45.small = linehatched45(_hatch_base/math.sqrt(2)) linehatched45.normal = linehatched45(_hatch_base) linehatched45.large = linehatched45(_hatch_base*math.sqrt(2)) linehatched45.Large = linehatched45(_hatch_base*math.sqrt(4)) linehatched45.LArge = linehatched45(_hatch_base*math.sqrt(8)) linehatched45.LARge = linehatched45(_hatch_base*math.sqrt(16)) linehatched45.LARGe = linehatched45(_hatch_base*math.sqrt(32)) linehatched45.LARGE = linehatched45(_hatch_base*math.sqrt(64)) linehatched90 = linehatched(_hatch_base, 90) linehatched90.SMALL = linehatched90(_hatch_base/math.sqrt(64)) linehatched90.SMALl = linehatched90(_hatch_base/math.sqrt(32)) linehatched90.SMAll = linehatched90(_hatch_base/math.sqrt(16)) linehatched90.SMall = linehatched90(_hatch_base/math.sqrt(8)) linehatched90.Small = linehatched90(_hatch_base/math.sqrt(4)) linehatched90.small = linehatched90(_hatch_base/math.sqrt(2)) linehatched90.normal = linehatched90(_hatch_base) linehatched90.large = linehatched90(_hatch_base*math.sqrt(2)) linehatched90.Large = linehatched90(_hatch_base*math.sqrt(4)) linehatched90.LArge = linehatched90(_hatch_base*math.sqrt(8)) linehatched90.LARge = linehatched90(_hatch_base*math.sqrt(16)) linehatched90.LARGe = linehatched90(_hatch_base*math.sqrt(32)) linehatched90.LARGE = linehatched90(_hatch_base*math.sqrt(64)) linehatched135 = linehatched(_hatch_base, 135) linehatched135.SMALL = linehatched135(_hatch_base/math.sqrt(64)) linehatched135.SMALl = linehatched135(_hatch_base/math.sqrt(32)) linehatched135.SMAll = linehatched135(_hatch_base/math.sqrt(16)) linehatched135.SMall = linehatched135(_hatch_base/math.sqrt(8)) linehatched135.Small = linehatched135(_hatch_base/math.sqrt(4)) linehatched135.small = linehatched135(_hatch_base/math.sqrt(2)) linehatched135.normal = linehatched135(_hatch_base) linehatched135.large = linehatched135(_hatch_base*math.sqrt(2)) linehatched135.Large = linehatched135(_hatch_base*math.sqrt(4)) linehatched135.LArge = linehatched135(_hatch_base*math.sqrt(8)) linehatched135.LARge = linehatched135(_hatch_base*math.sqrt(16)) linehatched135.LARGe = linehatched135(_hatch_base*math.sqrt(32)) linehatched135.LARGE = linehatched135(_hatch_base*math.sqrt(64)) crosslinehatched0 = linehatched(_hatch_base, 0, cross=1) crosslinehatched0.SMALL = crosslinehatched0(_hatch_base/math.sqrt(64)) crosslinehatched0.SMALl = crosslinehatched0(_hatch_base/math.sqrt(32)) crosslinehatched0.SMAll = crosslinehatched0(_hatch_base/math.sqrt(16)) crosslinehatched0.SMall = crosslinehatched0(_hatch_base/math.sqrt(8)) crosslinehatched0.Small = crosslinehatched0(_hatch_base/math.sqrt(4)) crosslinehatched0.small = crosslinehatched0(_hatch_base/math.sqrt(2)) crosslinehatched0.normal = crosslinehatched0 crosslinehatched0.large = crosslinehatched0(_hatch_base*math.sqrt(2)) crosslinehatched0.Large = crosslinehatched0(_hatch_base*math.sqrt(4)) crosslinehatched0.LArge = crosslinehatched0(_hatch_base*math.sqrt(8)) crosslinehatched0.LARge = crosslinehatched0(_hatch_base*math.sqrt(16)) crosslinehatched0.LARGe = crosslinehatched0(_hatch_base*math.sqrt(32)) crosslinehatched0.LARGE = crosslinehatched0(_hatch_base*math.sqrt(64)) crosslinehatched45 = linehatched(_hatch_base, 45, cross=1) crosslinehatched45.SMALL = crosslinehatched45(_hatch_base/math.sqrt(64)) crosslinehatched45.SMALl = crosslinehatched45(_hatch_base/math.sqrt(32)) crosslinehatched45.SMAll = crosslinehatched45(_hatch_base/math.sqrt(16)) crosslinehatched45.SMall = crosslinehatched45(_hatch_base/math.sqrt(8)) crosslinehatched45.Small = crosslinehatched45(_hatch_base/math.sqrt(4)) crosslinehatched45.small = crosslinehatched45(_hatch_base/math.sqrt(2)) crosslinehatched45.normal = crosslinehatched45 crosslinehatched45.large = crosslinehatched45(_hatch_base*math.sqrt(2)) crosslinehatched45.Large = crosslinehatched45(_hatch_base*math.sqrt(4)) crosslinehatched45.LArge = crosslinehatched45(_hatch_base*math.sqrt(8)) crosslinehatched45.LARge = crosslinehatched45(_hatch_base*math.sqrt(16)) crosslinehatched45.LARGe = crosslinehatched45(_hatch_base*math.sqrt(32)) crosslinehatched45.LARGE = crosslinehatched45(_hatch_base*math.sqrt(64)) class colorgradient(deco, attr.attr): """inserts pieces of the path in different colors""" def __init__(self, grad, attrs=[], steps=20): self.attrs = attrs self.grad = grad self.steps = steps def decorate(self, dp, texrunner): dp.ensurenormpath() l = dp.path.arclen() colors = [self.grad.select(n, self.steps) for n in range(self.steps)] colors.reverse() params = dp.path.arclentoparam([l*i/float(self.steps) for i in range(self.steps)]) params.reverse() c = canvas.canvas() # treat the end pieces separately c.stroke(dp.path.split(params[1])[1], attr.mergeattrs([colors[0]] + self.attrs)) for n in range(1,self.steps-1): c.stroke(dp.path.split([params[n-1],params[n+1]])[1], attr.mergeattrs([colors[n]] + self.attrs)) c.stroke(dp.path.split(params[-2])[0], attr.mergeattrs([colors[-1]] + self.attrs)) dp.ornaments.insert(c) class brace(deco, attr.attr): r"""draws a nicely curled brace In most cases, the original line is not wanted use canvas.canvas.draw(..) for it Geometrical parameters: inner /\ strokes ____________/ \__________ / bar bar \ outer / \ strokes totalheight distance from the jaws to the middle cap barthickness thickness of the main bars innerstrokesthickness thickness of the two ending strokes outerstrokesthickness thickness of the inner strokes at the middle cap innerstrokesrelheight height of the inner/outer strokes, relative to the total height outerstrokesrelheight this determines the angle of the main bars! should be around 0.5 Note: if innerstrokesrelheight + outerstrokesrelheight == 1 then the main bars will be aligned parallel to the connecting line between the endpoints outerstrokesangle angle of the two ending strokes innerstrokesangle angle between the inner strokes at the middle cap slantstrokesangle extra slanting of the inner/outer strokes innerstrokessmoothness smoothing parameter for the inner + outer strokes outerstrokessmoothness should be around 1 (allowed: [0,infty)) middlerelpos position of the middle cap (0 == left, 1 == right) """ # This code is experimental because it is unclear # how the brace fits into the concepts of PyX # # Some thoughts: # - a brace needs to be decoratable with text # it needs stroking and filling attributes # - the brace is not really a box: # it has two "anchor" points that are important for aligning it to other things # and one "anchor" point (plus direction) for aligning other things # - a brace is not a deformer: # it does not look at anything else than begin/endpoint of a path # - a brace might be a connector (which is to be dissolved into the box concept later?) def __init__(self, reverse=1, stretch=None, dist=None, fillattrs=[], totalheight=12*unit.x_pt, barthickness=0.5*unit.x_pt, innerstrokesthickness=0.25*unit.x_pt, outerstrokesthickness=0.25*unit.x_pt, innerstrokesrelheight=0.6, outerstrokesrelheight=0.7, innerstrokesangle=30, outerstrokesangle=25, slantstrokesangle=5, innerstrokessmoothness=2.0, outerstrokessmoothness=2.5, middlerelpos=0.5): self.fillattrs = fillattrs self.reverse = reverse self.stretch = stretch self.dist = dist self.totalheight = totalheight self.barthickness = barthickness self.innerstrokesthickness = innerstrokesthickness self.outerstrokesthickness = outerstrokesthickness self.innerstrokesrelheight = innerstrokesrelheight self.outerstrokesrelheight = outerstrokesrelheight self.innerstrokesangle = innerstrokesangle self.outerstrokesangle = outerstrokesangle self.slantstrokesangle = slantstrokesangle self.innerstrokessmoothness = innerstrokessmoothness self.outerstrokessmoothness = outerstrokessmoothness self.middlerelpos = middlerelpos def __call__(self, **kwargs): for name in ["reverse", "stretch", "dist", "fillattrs", "totalheight", "barthickness", "innerstrokesthickness", "outerstrokesthickness", "innerstrokesrelheight", "outerstrokesrelheight", "innerstrokesangle", "outerstrokesangle", "slantstrokesangle", "innerstrokessmoothness", "outerstrokessmoothness", "middlerelpos"]: if not kwargs.has_key(name): kwargs[name] = self.__dict__[name] return brace(**kwargs) def _halfbracepath_pt(self, length_pt, height_pt, ilength_pt, olength_pt, # <<< ithick_pt, othick_pt, bthick_pt, cos_iangle, sin_iangle, cos_oangle, sin_oangle, cos_slangle, sin_slangle): ismooth = self.innerstrokessmoothness osmooth = self.outerstrokessmoothness # these two parameters are not important enough to be seen outside inner_cap_param = 1.5 outer_cap_param = 2.5 outerextracurved = 0.6 # in (0, 1] # 1.0 will lead to F=G, the outer strokes will not be curved at their ends. # The smaller, the more curvature # build an orientation path (three straight lines) # # \q1 # / \ # / \ # _/ \______________________________________q5 # q2 q3 q4 \ # \ # \ # \q6 # # get the points for that: q1 = (0, height_pt - inner_cap_param * ithick_pt + 0.5*ithick_pt/sin_iangle) q2 = (q1[0] + ilength_pt * sin_iangle, q1[1] - ilength_pt * cos_iangle) q6 = (length_pt, 0) q5 = (q6[0] - olength_pt * sin_oangle, q6[1] + olength_pt * cos_oangle) bardir = (q5[0] - q2[0], q5[1] - q2[1]) bardirnorm = math.hypot(*bardir) bardir = (bardir[0]/bardirnorm, bardir[1]/bardirnorm) ismoothlength_pt = ilength_pt * ismooth osmoothlength_pt = olength_pt * osmooth if bardirnorm < ismoothlength_pt + osmoothlength_pt: ismoothlength_pt = bardirnorm * ismoothlength_pt / (ismoothlength_pt + osmoothlength_pt) osmoothlength_pt = bardirnorm * osmoothlength_pt / (ismoothlength_pt + osmoothlength_pt) q3 = (q2[0] + ismoothlength_pt * bardir[0], q2[1] + ismoothlength_pt * bardir[1]) q4 = (q5[0] - osmoothlength_pt * bardir[0], q5[1] - osmoothlength_pt * bardir[1]) # # P _O # / | \A2 # / A1\ \ # / \ B2C2________D2___________E2_______F2___G2 # \______________________________________ \ # B1,C1 D1 E1 F1 G1 \ # \ \ # \ \H2 # H1\_/I2 # I1 # # the halfbraces meet in P and A1: P = (0, height_pt) A1 = (0, height_pt - inner_cap_param * ithick_pt) # A2 is A1, shifted by the inner thickness A2 = (A1[0] + ithick_pt * cos_iangle, A1[1] + ithick_pt * sin_iangle) s, t = deformer.intersection(P, A2, (cos_slangle, sin_slangle), (sin_iangle, -cos_iangle)) O = (P[0] + s * cos_slangle, P[1] + s * sin_slangle) # from D1 to E1 is the straight part of the brace # also back from E2 to D1 D1 = (q3[0] + bthick_pt * bardir[1], q3[1] - bthick_pt * bardir[0]) D2 = (q3[0] - bthick_pt * bardir[1], q3[1] + bthick_pt * bardir[0]) E1 = (q4[0] + bthick_pt * bardir[1], q4[1] - bthick_pt * bardir[0]) E2 = (q4[0] - bthick_pt * bardir[1], q4[1] + bthick_pt * bardir[0]) # I1, I2 are the control points at the outer stroke I1 = (q6[0] - 0.5 * othick_pt * cos_oangle, q6[1] - 0.5 * othick_pt * sin_oangle) I2 = (q6[0] + 0.5 * othick_pt * cos_oangle, q6[1] + 0.5 * othick_pt * sin_oangle) # get the control points for the curved parts of the brace s, t = deformer.intersection(A1, D1, (sin_iangle, -cos_iangle), bardir) B1 = (D1[0] + t * bardir[0], D1[1] + t * bardir[1]) s, t = deformer.intersection(A2, D2, (sin_iangle, -cos_iangle), bardir) B2 = (D2[0] + t * bardir[0], D2[1] + t * bardir[1]) s, t = deformer.intersection(E1, I1, bardir, (-sin_oangle, cos_oangle)) G1 = (E1[0] + s * bardir[0], E1[1] + s * bardir[1]) s, t = deformer.intersection(E2, I2, bardir, (-sin_oangle, cos_oangle)) G2 = (E2[0] + s * bardir[0], E2[1] + s * bardir[1]) # at the inner strokes: use curvature zero at both ends C1 = B1 C2 = B2 # at the outer strokes: use curvature zero only at the connection to # the straight part F1 = (outerextracurved * G1[0] + (1 - outerextracurved) * E1[0], outerextracurved * G1[1] + (1 - outerextracurved) * E1[1]) F2 = (outerextracurved * G2[0] + (1 - outerextracurved) * E2[0], outerextracurved * G2[1] + (1 - outerextracurved) * E2[1]) # the tip of the outer stroke, endpoints of the bezier curve H1 = (I1[0] - outer_cap_param * othick_pt * sin_oangle, I1[1] + outer_cap_param * othick_pt * cos_oangle) H2 = (I2[0] - outer_cap_param * othick_pt * sin_oangle, I2[1] + outer_cap_param * othick_pt * cos_oangle) #for qq in [A1,B1,C1,D1,E1,F1,G1,H1,I1, # A2,B2,C2,D2,E2,F2,G2,H2,I2, # O,P # ]: # cc.fill(path.circle(qq[0], qq[1], 0.5), [color.rgb.green]) # now build the right halfbrace bracepath = path.path(path.moveto_pt(*A1)) bracepath.append(path.curveto_pt(B1[0], B1[1], C1[0], C1[1], D1[0], D1[1])) bracepath.append(path.lineto_pt(E1[0], E1[1])) bracepath.append(path.curveto_pt(F1[0], F1[1], G1[0], G1[1], H1[0], H1[1])) # the tip of the right halfbrace bracepath.append(path.curveto_pt(I1[0], I1[1], I2[0], I2[1], H2[0], H2[1])) # the rest of the right halfbrace bracepath.append(path.curveto_pt(G2[0], G2[1], F2[0], F2[1], E2[0], E2[1])) bracepath.append(path.lineto_pt(D2[0], D2[1])) bracepath.append(path.curveto_pt(C2[0], C2[1], B2[0], B2[1], A2[0], A2[1])) # the tip in the middle of the brace bracepath.append(path.curveto_pt(O[0], O[1], O[0], O[1], P[0], P[1])) return bracepath # >>> def _bracepath(self, x0_pt, y0_pt, x1_pt, y1_pt): # <<< height_pt = unit.topt(self.totalheight) totallength_pt = math.hypot(x1_pt - x0_pt, y1_pt - y0_pt) leftlength_pt = self.middlerelpos * totallength_pt rightlength_pt = totallength_pt - leftlength_pt ithick_pt = unit.topt(self.innerstrokesthickness) othick_pt = unit.topt(self.outerstrokesthickness) bthick_pt = unit.topt(self.barthickness) # create the left halfbrace with positive slanting # because we will mirror this part cos_iangle = math.cos(math.radians(0.5*self.innerstrokesangle - self.slantstrokesangle)) sin_iangle = math.sin(math.radians(0.5*self.innerstrokesangle - self.slantstrokesangle)) cos_oangle = math.cos(math.radians(self.outerstrokesangle - self.slantstrokesangle)) sin_oangle = math.sin(math.radians(self.outerstrokesangle - self.slantstrokesangle)) cos_slangle = math.cos(math.radians(-self.slantstrokesangle)) sin_slangle = math.sin(math.radians(-self.slantstrokesangle)) ilength_pt = self.innerstrokesrelheight * height_pt / cos_iangle olength_pt = self.outerstrokesrelheight * height_pt / cos_oangle bracepath = self._halfbracepath_pt(leftlength_pt, height_pt, ilength_pt, olength_pt, ithick_pt, othick_pt, bthick_pt, cos_iangle, sin_iangle, cos_oangle, sin_oangle, cos_slangle, sin_slangle).reversed().transformed(trafo.mirror(90)) # create the right halfbrace with negative slanting cos_iangle = math.cos(math.radians(0.5*self.innerstrokesangle + self.slantstrokesangle)) sin_iangle = math.sin(math.radians(0.5*self.innerstrokesangle + self.slantstrokesangle)) cos_oangle = math.cos(math.radians(self.outerstrokesangle + self.slantstrokesangle)) sin_oangle = math.sin(math.radians(self.outerstrokesangle + self.slantstrokesangle)) cos_slangle = math.cos(math.radians(-self.slantstrokesangle)) sin_slangle = math.sin(math.radians(-self.slantstrokesangle)) ilength_pt = self.innerstrokesrelheight * height_pt / cos_iangle olength_pt = self.outerstrokesrelheight * height_pt / cos_oangle bracepath = bracepath << self._halfbracepath_pt(rightlength_pt, height_pt, ilength_pt, olength_pt, ithick_pt, othick_pt, bthick_pt, cos_iangle, sin_iangle, cos_oangle, sin_oangle, cos_slangle, sin_slangle) return bracepath.transformed( # two trafos for matching the given endpoints trafo.translate_pt(x0_pt, y0_pt) * trafo.rotate_pt(math.degrees(math.atan2(y1_pt-y0_pt, x1_pt-x0_pt))) * # one trafo to move the brace's left outer stroke to zero trafo.translate_pt(leftlength_pt, 0)) # >>> def decorate(self, dp, texrunner): dp.ensurenormpath() x0_pt, y0_pt = dp.path.atbegin_pt() x1_pt, y1_pt = dp.path.atend_pt() if self.reverse: x0_pt, y0_pt, x1_pt, y1_pt = x1_pt, y1_pt, x0_pt, y0_pt if self.stretch is not None: xm, ym = 0.5*(x0_pt+x1_pt), 0.5*(y0_pt+y1_pt) x0_pt, y0_pt = xm + self.stretch*(x0_pt-xm), ym + self.stretch*(y0_pt-ym) x1_pt, y1_pt = xm + self.stretch*(x1_pt-xm), ym + self.stretch*(y1_pt-ym) if self.dist is not None: d = unit.topt(self.dist) dx, dy = dp.path.rotation_pt(dp.path.begin()).apply_pt(0, 1) x0_pt += d*dx; y0_pt += d*dy dx, dy = dp.path.rotation_pt(dp.path.end()).apply_pt(0, 1) x1_pt += d*dx; y1_pt += d*dy dp.ornaments.fill(self._bracepath(x0_pt, y0_pt, x1_pt, y1_pt), self.fillattrs) brace.clear = attr.clearclass(brace) leftbrace = brace(reverse=0, middlerelpos=0.55, innerstrokesrelheight=0.6, outerstrokesrelheight=0.7, slantstrokesangle=-10) rightbrace = brace(reverse=1, middlerelpos=0.45, innerstrokesrelheight=0.6, outerstrokesrelheight=0.7, slantstrokesangle=10) belowbrace = brace(reverse=1, middlerelpos=0.55, innerstrokesrelheight=0.7, outerstrokesrelheight=0.9, slantstrokesangle=-10) abovebrace = brace(reverse=0, middlerelpos=0.45, innerstrokesrelheight=0.7, outerstrokesrelheight=0.9, slantstrokesangle=-10) straightbrace = brace(innerstrokesrelheight=0.5, outerstrokesrelheight=0.5, innerstrokesangle=30, outerstrokesangle=30, slantstrokesangle=0, innerstrokessmoothness=1.0, outerstrokessmoothness=1.0)
negarineh/PIOSP
PIOSP/app/controllers/surveyResults.js
<filename>PIOSP/app/controllers/surveyResults.js<gh_stars>0 /*eslint no-undef: "error"*/ /*eslint-env node*/ var SurveyResults = require('../models/surveyResults'); /** * it creates records for survey results by receiving answers, descriptions, name of photo ,and id of participant * * @param {string} options received answers * @param {string} descriptions received descriptions * @param {string} photo received name of photos * @param {number} id received participant id * @returns {object} callback * */ module.exports.saveResults = function(options, descripitons, photo, id){ var surveyResults = new SurveyResults(); surveyResults.answer = options; surveyResults.description = descripitons; surveyResults.photo = photo.slice(6, photo.length-4); surveyResults.Id = id; surveyResults.category = findCategory(photo, options); surveyResults.save(); function findCategory(photoNames, options) { if (photoNames.indexOf('Bee_') > -1) return 'Bee'; if (photoNames.indexOf('Wasp') > -1) return 'Wasp'; if (photoNames.indexOf('Beetle') > -1) return 'Beetle'; if (photoNames.indexOf('Fly') > -1) return 'Fly'; if ((photoNames.indexOf('Butter-Moth') > -1)||(photoNames.indexOf('butterfly') > -1)) return 'Moth/Butterfly'; if (options.indexOf("Don't know") > -1) return "Don't know"; } }; /** * it returns all answers in surveyResults collection * * @returns {object} callback * */ module.exports.reportAnswers = function (callback) { SurveyResults.find({}, callback); }; /** * it receives answers parameter and also limit number and size number from front end and * base on that will show just number of results (pagination) * * @param {answers} answers received selected answer * @param {number} limit received limitation number * @param {number} size received number for size of answers * @returns {object} callback * */ module.exports.reportAnswersCategory = function (answers, limit, size, callback) { SurveyResults.find({answer: answers}) .sort({ _id: -1 }) .limit(limit) .skip(size) .exec(callback); }; module.exports.reportOnAnswersCategory = function (answers, callback) { SurveyResults.find({answer: answers}, callback); }; /** * it receives answer parameter and will calculate number of this answer * * @param {string} answer received answer parameter * @returns {object} callback * */ module.exports.reportAnswersCategoryCount = function (answers, callback) { SurveyResults.count({answer: answers}, callback); }; /** * it receives phot name parameter and also limit number and size number from front end and * base on that will show just number of results (pagination) * * @param {answers} photo received selected photo name * @param {number} limit received limitation number * @param {number} size received number for size of answers * @returns {object} callback * */ module.exports.reportAnswersPhotoId = function (photos, limit, size, callback) { SurveyResults.find({photo: photos}) .sort({ _id: -1 }) .limit(limit) .skip(size) .exec(callback); }; module.exports.reportAnswersPhotoIdCount = function (photos, callback) { SurveyResults.count({photo: photos}, callback); }; /** * it calculates the number of correct answers base on category * * @returns {object} callback * */ module.exports.equal = function (callback){ SurveyResults.aggregate([{ $group: { _id: "$category", "count": { $sum: 1 }, // simply count all questions per category "correct": { $sum: { // and sum up the correct ones in a field called "correct" $cond: [ // ...where "correct ones" means { $eq: [ "$category", "$answer" ] }, // that "category" needs to match "answer" 1, 0 ] } } } }, { $project: { // this is just to effectively rename the "_id" field into "category" - may or may not be needed _id: 0, "category": "$_id", "count": "$count", "correct": "$correct" } }]).exec(callback); }; /** * it calculates the number of correct and incorrect answers base on category * * @returns {object} callback * */ module.exports.notEqual = function (callback){ SurveyResults.aggregate([{ $group: { _id: "$category", "count": { $sum: 1 }, // simply count all questions per category "inCorrect": { $sum: { // and sum up the correct ones in a field called "correct" $cond: [ // ...where "correct ones" means { $ne: [ "$category", "$answer" ] }, // that "category" needs to match "answer" 1, 0 ] } }, "correct": { $sum: { // and sum up the correct ones in a field called "InCorrect" $cond: [ // ...where "correct ones" means { $eq: [ "$category", "$answer" ] }, // that "category" needs to match "answer" 1, 0 ] } } } }, { $project: { // this is just to effectively rename the "_id" field into "category" - may or may not be needed _id: 0, "category": "$_id", "count": "$count", "correct": "$correct", "inCorrect": "$inCorrect", } }]).exec(callback); }; module.exports.totalCorrectAnswers = function (callback){ SurveyResults.aggregate([{ $group: { _id: "answers", "count": { $sum: 1 }, // simply count all questions per category "correct": { $sum: { // and sum up the correct ones in a field called "correct" $cond: [ // ...where "correct ones" means { $eq: [ "$category", "$answer" ] }, // that "category" needs to match "answer" 1, 0 ] } }, "inCorrect": { $sum: { // and sum up the correct ones in a field called "correct" $cond: [ // ...where "correct ones" means { $ne: [ "$category", "$answer" ] }, // that "category" needs to match "answer" 1, 0 ] } }, } }, { $project: { // this is just to effectively rename the "_id" field into "category" - may or may not be needed _id: 0, "category": "$_id", "correct": "$correct", "inCorrect": "$inCorrect" } }]).exec(callback); }; module.exports.surveyResultsUpdate = function(callback){ SurveyResults.find({}, callback); };
clegoues/cil
test/small2/regparm0.c
<reponame>clegoues/cil // regparm0.c // test of the regparm(0) problem in linux/arch/i386/kernel/signal.c // first, problematic prototype; basically, the regparm(0) is // parsed as associated with the return type (int), and hence a // no-op; the regparm(3) should be what's attached to do_signal __attribute__((regparm(0))) int do_signal(int *regs, int *oldset) __attribute__((regparm(2))) __attribute__((regparm(3))); // call this function int main() { int r=6, o=5; return do_signal(&o, &r) - 11; } // now an implementation which will die if its args are mis-passed int do_signal(int *regs, int *oldset) { return *regs + *oldset; }
code-krypt/maggi
maggi-container/src/test/java/com/drykode/maggi/container/drools/sample/DroolsSampleCodeGenerator.java
package com.drykode.maggi.container.drools.sample; import com.drykode.maggi.container.engines.drools.models.SessionInput; import com.drykode.maggi.container.engines.drools.models.SessionOutputCollector; public class DroolsSampleCodeGenerator { public static void main(String[] args) { SessionInput sessionInput = SessionInput.builder().input("abc").build(); SessionOutputCollector soc = new SessionOutputCollector(); droolsCode(sessionInput, soc); } private static void droolsCode(SessionInput si, SessionOutputCollector soc) { String result = si.getInput(); soc.add(result); } }
StJivko/SoftUni
PBJava/src/NestedLoopLab/Travelling.java
package NestedLoopLab; import java.util.Scanner; public class Travelling { public static void main(String[] args) { Scanner scanner = new Scanner(System.in); double savings = 0; double saved = 0; while (true) { String country = scanner.nextLine(); if (country.equals("End")){ break; } double moneyNeeded = Double.parseDouble(scanner.nextLine()); while (moneyNeeded > savings ) { savings = Double.parseDouble(scanner.nextLine()); saved += savings; if (saved >= moneyNeeded){ saved =0; savings = 0; break; } } System.out.printf("Going to %s!%n", country); } } }
JamesLoveCurry/bione_input
biapp-frs-web/target/biapp-frs-web/js/datashow/jQuery.exLabel.js
/** * create by kanglg on 2015.07.21 * ver 0.2.2 */ (function($){ // 基础类 function Base(options) { this.options = options; this.events = {}; this.build(); } // 数据绑定 function DataBinder(object_id) { var pubSub = jQuery({}); var data_attr = "bind-" + object_id, message = object_id + ":change"; $(document).on("change", "[data-" + data_attr + "]", function(evt) { var $input = $(this); pubSub.trigger(message, [ $input.data(data_attr), $input.val() ]); }); pubSub.on(message, function(evt, prop_name, new_val) { $("[data-" + data_attr + "=" + prop_name + "]").each(function() { var $bound = jQuery(this); if ($bound.is("input, textarea, select")) { $bound.val(new_val); } else { $bound.html(new_val); } }); }); return pubSub; } function getId(prev) { prev = prev || getId.managerIdPrev; var id = prev + (1000 + getId.managerCount); getId.managerCount++; return id; } getId.managerIdPrev = 'id'; getId.managerCount = 0; $.extend(Base.prototype, { // 初始化 build: function() { var options = this.options, target = this; if (options) { var pn = "", name = ""; $.each(options, function(k, v) { if (k.indexOf('on') == 0) { name = k.substr(2); pn = name.substr(0, 1).toLowerCase() + name.substr(1); target.bind(pn, v); } }); } }, // 绑定触发器 bind: function(arg, handler) { if (typeof arg == "string") { var cal = this.events[arg]; if (!cal) { cal = $.Callbacks(); this.events[arg] = cal; } cal.add(handler); } }, // 触发 trigger: function(arg, data) { var cal = this.events[arg]; if (cal) { cal.fire(data); } }, // 解除绑定 unbind: function(arg, handler) { var cal = this.events[arg]; if (cal) { if (handler) { cal.remove(handler); } else { cal.empty(); } } } }); // 标签组件 function LabelItem(options, pad) { Base.call(this, options); this.pad = pad; $.extend(this, options); this._init(); this._render(); }; LabelItem.prototype = new Base(); $.extend(LabelItem.prototype, { _init: function() { var binder = this._binder = new DataBinder(this.lId); binder.on(this.lId + ":change", this, function(evt, attr_name, new_val, initiator) { if (!initiator) { evt.data.setText(new_val); } }); }, _render: function() { var t = this; var $dom = $('<div class="label-item"/>').attr({ 'lId': this.lId }); var $text = $('<div class="text"/>'); $text.text(this.text); $text.attr('data-bind-' + this.lId, 'text'); $dom.append($text); this.content = $dom; $dom.bind('click', this, function() { t.trigger('click', t); }); if (this.css) { this.setCss(this.css); } if (this.showClose && this.showClose == true) { $dom.addClass('close'); var close = $('<div class="icon"></div>').appendTo($dom); close.bind('click', this, t.close); } if (this.isCheck != null) { var icon = $('<div class="icon"></div>').appendTo($dom); $dom.addClass('check'); if (this.isCheck == false) { $dom.addClass('uncheck'); } $dom.bind('click.check', this, t.check); $dom.css('cursor', 'pointer') } }, close: function(e) { var l = e.data; l.trigger('close'); l.pad.closeLabel(l.lId); }, check: function(e) { var l = e.data; l.isCheck = !l.isCheck; var $dom = l.content; if (l.isCheck == true) { $dom.removeClass('uncheck'); } else { $dom.addClass('uncheck'); } l.trigger('check', [l.isCheck, l]); }, setText: function(val) { this.text = val; this._binder.trigger(this.lId + ":change", [ 'text', val, this ] ); }, setCss: function(css) { this.css = css; this.content.css(css); } }); function ExLabel(target, options) { Base.call(this, options); this.target = target; this.data = {}; this._render(); } $.extend(ExLabel.prototype, Base.prototype, { _render: function() { var tar = this.target; tar.addClass('labelPad'); }, getId: function() { return getId('Label'); }, clear: function() { this.target.empty(); this.data = {}; }, addBefore: function(label, param) { this.addLabel(param, label, true); }, addAfter: function(label, param) { this.addLabel(param, label, false); }, addLabel: function(param, target, isBefore) { if (!param) { return; } var data = $.extend({ lId: this.getId() }, param); if (this.trigger('beforeAdd', data) == false) { return; } if (param instanceof Array == true) { for (var i = 0; i < param.length; i++) { this.addLabel(param[i]); } return; } var label = { content: $('<div class="label-item"/>').attr('lId', data.lId), text: $('<div class="text"/>') }; var label = new LabelItem(data, this); label.events = $.extend({}, this.events, label.events); // data.label = label; this.data[data.lId] = label; if (target) { if (true == isBefore) { target.label.content.before(label.content); } else { target.label.content.after(label.content); } } else { label.content.appendTo(this.target); } this.trigger('afterAdd', data); return data; }, getLabel: function() { var data = this.data; var result = []; var args = arguments; $.each(data, function(i, n) { if (args.length == 0 || (args.length == 1 && n.lId == args[0]) || (args.length == 2 && n[args[0]] != null && n[args[0]] != undefined && n[args[0]] == args[1])) { result.push(n); } }); return result; }, closeLabel: function(lId) { var label = this.data[lId]; if (this.trigger('beforeClose', label) == false) { return; } if(this.options && this.options.beforeCloseLabel && this.options.beforeCloseLabel(label) == false) return false; if (label) { label.content.remove(); } this.trigger('afterClose', label); delete this.data[lId]; }, removeLabel: function(lId) { var label = this.data[lId]; if (label) { label.content.remove(); } delete this.data[lId]; }, removeAllLable : function(){ var base = this; $.each(base.data, function(i, n) { base.removeLabel(i); }); } }); // jQuery扩展 $.fn.extend({ exLabel: function(options) { return new ExLabel(this, options); } }); })(jQuery);
karanchawla/ai_for_robotics
5_deep_learning/solution/01_DL_framework/Support.py
<reponame>karanchawla/ai_for_robotics # Copyright 2017 <NAME>, ASL, ETH Zurich, Switzerland # Copyright 2017 <NAME>, ASL, ETH Zurich, Switzerland # Copyright 2017 <NAME>, ASL, ETH Zurich, Switzerland import numpy as np class Variables(): """ Variable structure for networks variables / parameters and their gradients """ weights = [] biases = [] def __init__(self): self.weights = [] self.biases = [] def __len__(self): if len(self.weights) == len(self.biases): return len(self.weights) else: print("Dimension mismatch.") raise def __mul__(self, factor): new_p = Variables() for w, b in zip(self.weights, self.biases): new_p.weights.append(w * factor) new_p.biases.append(b * factor) return new_p def __add__(self, other_variables): assert len(self.weights) == len( other_variables.weights), 'Number of weight entries have to match.' assert len(self.biases) == len( other_variables.biases), 'Number of bias entries have to match.' new_p = Variables() for w, b, o_w, o_b in zip(self.weights, self.biases, other_variables.weights, other_variables.biases): new_p.weights.append(w + o_w) new_p.biases.append(b + o_b) return new_p def __sub__(self, other_variables): return self.__add__(other_variables * (-1)) def __eq__(self, other_variables): assert len(self.weights) == len( other_variables.weights), 'Number of weight entries have to match.' assert len(self.biases) == len( other_variables.biases), 'Number of bias entries have to match.' variables_equal = True for i in range(len(self.weights)): if not np.all(self.weights[i] == other_variables.weights[i]): variables_equal = False break if not np.all(self.biases[i] == other_variables.biases[i]): variables_equal = False break return variables_equal def __ne__(self, other_variables): return not self.__eq__(other_variables) # Methods def computeScore(network, x, labels): n_samples = x.shape[0] correct_classifications = 0.0 for i in range(n_samples): if np.argmax(network.output(x[i, :])) == np.argmax(labels[i, :]): correct_classifications += 1.0 return correct_classifications / float(n_samples)
cthoyt/pybel-tools
src/pybel_tools/document_utils.py
<gh_stars>1-10 # -*- coding: utf-8 -*- """Utilities to merge multiple BEL documents on the same topic.""" import logging from typing import Iterable, Mapping, Optional, Set, TextIO, Union from xml.etree import ElementTree import pandas as pd import requests from bel_resources import make_knowledge_header from pybel.utils import ensure_quotes __all__ = [ 'write_boilerplate', ] logger = logging.getLogger(__name__) abstract_url_fmt = "http://togows.dbcls.jp/entry/ncbi-pubmed/{}/abstract" title_url_fmt = "http://togows.dbcls.jp/entry/ncbi-pubmed/{}/title" #: SO gives short citation information so_url_fmt = "http://togows.dbcls.jp/entry/ncbi-pubmed/{}/so" def make_pubmed_abstract_group(pmids: Iterable[Union[str, int]]) -> Iterable[str]: """Build a skeleton for the citations' statements. :param pmids: A list of PubMed identifiers :return: An iterator over the lines of the citation section """ for pmid in set(pmids): yield '' res = requests.get(title_url_fmt.format(pmid)) title = res.content.decode('utf-8').strip() yield f'SET Citation = {{"{title}", "{pmid}"}}' res = requests.get(abstract_url_fmt.format(pmid)) abstract = res.content.decode('utf-8').strip() yield f'SET Evidence = "{abstract}"' yield '\nUNSET Evidence\nUNSET Citation' def _sanitize(s): if s is not None: return s.strip().replace('\n', '') #: Allows for querying the Entrez Gene Summary utility by formatting with an entrez id or list of comma seperated ids PUBMED_GENE_QUERY_URL = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=gene&id={}' def get_entrez_gene_data(entrez_ids: Iterable[Union[str, int]]): """Get gene info from Entrez.""" url = PUBMED_GENE_QUERY_URL.format(','.join(str(x).strip() for x in entrez_ids)) response = requests.get(url) tree = ElementTree.fromstring(response.content) return { element.attrib['uid']: { 'summary': _sanitize(element.find('Summary').text), 'description': element.find('Description').text } for element in tree.findall('./DocumentSummarySet/DocumentSummary') } def make_pubmed_gene_group(entrez_ids: Iterable[Union[str, int]]) -> Iterable[str]: """Build a skeleton for gene summaries. :param entrez_ids: A list of Entrez Gene identifiers to query the PubMed service :return: An iterator over statement lines for NCBI Entrez Gene summaries """ url = PUBMED_GENE_QUERY_URL.format(','.join(str(x).strip() for x in entrez_ids)) response = requests.get(url) tree = ElementTree.fromstring(response.content) for x in tree.findall('./DocumentSummarySet/DocumentSummary'): yield '\n# {}'.format(x.find('Description').text) yield 'SET Citation = {{"Other", "PubMed Gene", "{}"}}'.format(x.attrib['uid']) yield 'SET Evidence = "{}"'.format(x.find('Summary').text.strip().replace('\n', '')) yield '\nUNSET Evidence\nUNSET Citation' def write_boilerplate( name: str, version: Optional[str] = None, description: Optional[str] = None, authors: Optional[str] = None, contact: Optional[str] = None, copyright: Optional[str] = None, licenses: Optional[str] = None, disclaimer: Optional[str] = None, namespace_url: Optional[Mapping[str, str]] = None, namespace_patterns: Optional[Mapping[str, str]] = None, annotation_url: Optional[Mapping[str, str]] = None, annotation_patterns: Optional[Mapping[str, str]] = None, annotation_list: Optional[Mapping[str, Set[str]]] = None, pmids: Optional[Iterable[Union[str, int]]] = None, entrez_ids: Optional[Iterable[Union[str, int]]] = None, file: Optional[TextIO] = None, ) -> None: """Write a boilerplate BEL document, with standard document metadata, definitions. :param name: The unique name for this BEL document :param contact: The email address of the maintainer :param description: A description of the contents of this document :param authors: The authors of this document :param version: The version. Defaults to current date in format ``YYYYMMDD``. :param copyright: Copyright information about this document :param licenses: The license applied to this document :param disclaimer: The disclaimer for this document :param namespace_url: an optional dictionary of {str name: str URL} of namespaces :param namespace_patterns: An optional dictionary of {str name: str regex} namespaces :param annotation_url: An optional dictionary of {str name: str URL} of annotations :param annotation_patterns: An optional dictionary of {str name: str regex} of regex annotations :param annotation_list: An optional dictionary of {str name: set of names} of list annotations :param pmids: A list of PubMed identifiers to auto-populate with citation and abstract :param entrez_ids: A list of Entrez identifiers to autopopulate the gene summary as evidence :param file: A writable file or file-like. If None, defaults to :data:`sys.stdout` """ lines = make_knowledge_header( name=name, version=version or '1.0.0', description=description, authors=authors, contact=contact, copyright=copyright, licenses=licenses, disclaimer=disclaimer, namespace_url=namespace_url, namespace_patterns=namespace_patterns, annotation_url=annotation_url, annotation_patterns=annotation_patterns, annotation_list=annotation_list, ) for line in lines: print(line, file=file) if pmids is not None: for line in make_pubmed_abstract_group(pmids): print(line, file=file) if entrez_ids is not None: for line in make_pubmed_gene_group(entrez_ids): print(line, file=file) def replace_selventa_namespaces(path: str) -> None: """Update SFAM/SCOM namespaces to FamPlex.""" df = pd.read_csv( 'https://raw.githubusercontent.com/johnbachman/famplex/master/equivalences.csv', names=['namespace', 'label', 'famplex'] ) # Filter to namespace BEL df = df[df.namespace == 'BEL'] mapping_dict = {} for _, label, famplex in df.values: for p in 'SCOMP', 'SFAM': mapping_dict[f'{p}:{ensure_quotes(label)}'] = f'FPLX:{ensure_quotes(famplex)}' lines = [] with open(path) as file: for line in file: for k, v in mapping_dict.items(): if k in line: print(f'Upgrating line {k} to {v}') line = line.replace(k, v) lines.append(line.strip('\n')) with open(path, 'w') as file: for line in lines: print(line, file=file)
soloplxya/tp
src/test/java/woofareyou/model/ModelManagerTest.java
package woofareyou.model; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static woofareyou.model.Model.PREDICATE_SHOW_ALL_PETS; import static woofareyou.testutil.Assert.assertThrows; import static woofareyou.testutil.TypicalPets.BOBA; import static woofareyou.testutil.TypicalPets.PIZZA; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import org.junit.jupiter.api.Test; import woofareyou.commons.core.GuiSettings; import woofareyou.model.pet.NameContainsKeywordsPredicate; import woofareyou.testutil.PetBookBuilder; public class ModelManagerTest { private ModelManager modelManager = new ModelManager(); @Test public void constructor() { assertEquals(new UserPrefs(), modelManager.getUserPrefs()); assertEquals(new GuiSettings(), modelManager.getGuiSettings()); assertEquals(new PetBook(), new PetBook(modelManager.getPetBook())); } @Test public void setUserPrefs_nullUserPrefs_throwsNullPointerException() { assertThrows(NullPointerException.class, () -> modelManager.setUserPrefs(null)); } @Test public void setUserPrefs_validUserPrefs_copiesUserPrefs() { UserPrefs userPrefs = new UserPrefs(); userPrefs.setPetBookFilePath(Paths.get("pet/book/file/path")); userPrefs.setGuiSettings(new GuiSettings(1, 2, 3, 4)); modelManager.setUserPrefs(userPrefs); assertEquals(userPrefs, modelManager.getUserPrefs()); // Modifying userPrefs should not modify modelManager's userPrefs UserPrefs oldUserPrefs = new UserPrefs(userPrefs); userPrefs.setPetBookFilePath(Paths.get("new/pet/book/file/path")); assertEquals(oldUserPrefs, modelManager.getUserPrefs()); } @Test public void setGuiSettings_nullGuiSettings_throwsNullPointerException() { assertThrows(NullPointerException.class, () -> modelManager.setGuiSettings(null)); } @Test public void setGuiSettings_validGuiSettings_setsGuiSettings() { GuiSettings guiSettings = new GuiSettings(1, 2, 3, 4); modelManager.setGuiSettings(guiSettings); assertEquals(guiSettings, modelManager.getGuiSettings()); } @Test public void setPetBookFilePath_nullPath_throwsNullPointerException() { assertThrows(NullPointerException.class, () -> modelManager.setPetBookFilePath(null)); } @Test public void setPetBookFilePath_validPath_setsPetBookFilePath() { Path path = Paths.get("pet/book/file/path"); modelManager.setPetBookFilePath(path); assertEquals(path, modelManager.getPetBookFilePath()); } @Test public void hasPet_nullPet_throwsNullPointerException() { assertThrows(NullPointerException.class, () -> modelManager.hasPet(null)); } @Test public void hasPet_petNotInWoofAreYou_returnsFalse() { assertFalse(modelManager.hasPet(BOBA)); } @Test public void hasPet_petInWoofAreYou_returnsTrue() { modelManager.addPet(BOBA); assertTrue(modelManager.hasPet(BOBA)); } @Test public void getFilteredPetList_modifyList_throwsUnsupportedOperationException() { assertThrows(UnsupportedOperationException.class, () -> modelManager.getFilteredPetList().remove(0)); } @Test public void equals() { PetBook petBook = new PetBookBuilder().withPet(BOBA).withPet(PIZZA).build(); PetBook differentPetBook = new PetBook(); UserPrefs userPrefs = new UserPrefs(); // same values -> returns true modelManager = new ModelManager(petBook, userPrefs); ModelManager modelManagerCopy = new ModelManager(petBook, userPrefs); assertTrue(modelManager.equals(modelManagerCopy)); // same object -> returns true assertTrue(modelManager.equals(modelManager)); // null -> returns false assertFalse(modelManager.equals(null)); // different types -> returns false assertFalse(modelManager.equals(5)); // different petBook -> returns false assertFalse(modelManager.equals(new ModelManager(differentPetBook, userPrefs))); // different filteredList -> returns false String[] keywords = BOBA.getName().fullName.split("\\s+"); modelManager.updateFilteredPetList(new NameContainsKeywordsPredicate(Arrays.asList(keywords))); assertFalse(modelManager.equals(new ModelManager(petBook, userPrefs))); // resets modelManager to initial state for upcoming tests modelManager.updateFilteredPetList(PREDICATE_SHOW_ALL_PETS); // different userPrefs -> returns false UserPrefs differentUserPrefs = new UserPrefs(); differentUserPrefs.setPetBookFilePath(Paths.get("differentFilePath")); assertFalse(modelManager.equals(new ModelManager(petBook, differentUserPrefs))); } }
labibramadhan/hybrid-apps-development
node_modules/rs-todo/src/server/boot.js
<gh_stars>0 import localTodo from '../models/localTodo'; import encryptor from 'simple-encryptor'; import {encryptPersist, decryptLoaded} from 'rs-helper/lib/server'; module.exports = (params) => { const app = params.app; const conf = params.conf; const io = params.io; const ec = encryptor(conf.key); const defaultHook = () => { io.emit('localTodo:changed'); } app.models.localTodo.observe('after save', (ctx, next) => { defaultHook(); next(); }); app.models.localTodo.observe('after delete', (ctx, next) => { defaultHook(); next(); }); app.models.localTodo.observe('persist', (ctx, next) => { encryptPersist(ec, localTodo.properties, ctx); next(); }); app.models.localTodo.observe('loaded', (ctx, next) => { decryptLoaded(ec, localTodo.properties, ctx); next(); }); }
Nanosim-LIG/opencl-ruby
lib/opencl_ruby_ffi/Image.rb
using OpenCLRefinements if RUBY_VERSION.scan(/\d+/).collect(&:to_i).first >= 2 module OpenCL # Creates an Image # # ==== Attributes # # * +context+ - Context the created Image will be associated to # * +format+ - an ImageFormat # * +desc+ - an ImageDesc # # ==== Options # # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image # * +:host_ptr+ - if provided, the Pointer (or convertible to Pointer using to_ptr) to the memory area to use # * +:properties+ - if provided, an array of :cl_mem_properties (OpenCL 3.0) def self.create_image( context, format, desc, options = {} ) flags = get_flags( options ) host_ptr = options[:host_ptr] error = MemoryPointer::new( :cl_int ) if context.platform.version_number < 3.0 then img_ptr = clCreateImage( context, flags, format, desc, host_ptr, error ) else properties = get_mem_properties( options ) img_ptr = clCreateImageWithProperties( context, properties, flags, format, desc, host_ptr, error ) end error_check(error.read_cl_int) return Image::new(img_ptr, false) end # Creates a 1D Image # # ==== Attributes # # * +context+ - Context the created Image will be associated to # * +format+ - an ImageFormat # * +width+ - width of the image # # ==== Options # # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image # * +:host_ptr+ - if provided, the Pointer (or convertible to Pointer using to_ptr) to the memory area to use def self.create_image_1d( context, format, width, options = {} ) if context.platform.version_number > 1.1 then desc = ImageDesc::new(Mem::IMAGE1D, width, 0, 0, 0, 0, 0, 0, 0, nil) return create_image( context, format, desc, options ) else error_check(INVALID_OPERATION) end end class << self alias :create_image_1D :create_image_1d end # Creates a 2D Image # # ==== Attributes # # * +context+ - Context the created Image will be associated to # * +format+ - an ImageFormat # * +width+ - width of the image # # ==== Options # # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image # * +:host_ptr+ - if provided, the Pointer (or convertible to Pointer using to_ptr) to the memory area to use # * +:row_pitch+ - if provided the row_pitch of data in host_ptr def self.create_image_2d( context, format, width, height, options = {} ) row_pitch = 0 row_pitch = options[:row_pitch] if options[:row_pitch] if context.platform.version_number > 1.1 then desc = ImageDesc::new(Mem::IMAGE2D, width, height, 0, 0, row_pitch, 0, 0, 0, nil) return create_image( context, format, desc, options ) end flags = get_flags( options ) host_ptr = options[:host_ptr] error = MemoryPointer::new( :cl_int ) img_ptr = clCreateImage2D( context, flags, format, width, height, row_pitch, host_ptr, error ) error_check(error.read_cl_int) return Image::new(img_ptr, false) end class << self alias :create_image_2D :create_image_2d end # Creates a 3D Image # # ==== Attributes # # * +context+ - Context the created Image will be associated to # * +format+ - an ImageFormat # * +width+ - width of the image # # ==== Options # # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image # * +:host_ptr+ - if provided, the Pointer (or convertible to Pointer using to_ptr) to the memory area to use # * +:row_pitch+ - if provided the row_pitch of data in host_ptr # * +:slice_pitch+ - if provided the slice_pitch of data in host_ptr def self.create_image_3d( context, format, width, height, depth, options = {} ) row_pitch = 0 row_pitch = options[:row_pitch] if options[:row_pitch] slice_pitch = 0 slice_pitch = options[:slice_pitch] if options[:slice_pitch] if context.platform.version_number > 1.1 then desc = ImageDesc::new(Mem::IMAGE3D, width, height, depth, 0, row_pitch, slice_pitch, 0, 0, nil) return create_image( context, format, desc, options ) end flags = get_flags( options ) host_ptr = options[:host_ptr] error = MemoryPointer::new( :cl_int ) img_ptr = clCreateImage3D( context, flags, format, width, height, depth, row_pitch, slice_pitch, host_ptr, error ) error_check(error.read_cl_int) return Image::new(img_ptr, false) end class << self alias :create_image_3D :create_image_3d end # Creates an Image from an OpenGL render buffer # # ==== Attributes # # * +context+ - Context the created Image will be associated to # * +renderbuf+ - opengl render buffer # * +options+ - a hash containing named options # # ==== Options # # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image def self.create_from_gl_renderbuffer( context, renderbuffer, options = {} ) flags = get_flags( options ) error = MemoryPointer::new( :cl_int ) img = clCreateFromGLRenderbuffer( context, flags, renderbuffer, error ) error_check(error.read_cl_int) return Image::new( img, false ) end class << self alias :create_from_GL_renderbuffer :create_from_gl_renderbuffer end # Creates an Image from an OpenGL texture # # ==== Attributes # # * +context+ - Context the created Image will be associated to # * +texture_target+ - a :GLenum defining the image type of texture # * +texture+ - a :GLuint specifying the name of the texture # * +options+ - a hash containing named options # # ==== Options # # * +:miplevel+ - a :GLint specifying the mipmap level to be used (default 0) # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image def self.create_from_gl_texture( context, texture_target, texture, options = {} ) if context.platform.version_number < 1.2 then error_check(INVALID_OPERATION) end flags = get_flags( options ) miplevel = 0 miplevel = options[:miplevel] if options[:miplevel] error = MemoryPointer::new( :cl_int ) img = clCreateFromGLTexture( context, flags, texture_target, miplevel, texture, error ) error_check(error.read_cl_int) return Image::new( img, false ) end class << self alias :create_from_GL_texture :create_from_gl_texture end # Creates an Image from an OpenGL 2D texture # # ==== Attributes # # * +texture_target+ - a :GLenum defining the image type of texture # * +texture+ - a :GLuint specifying the name of the texture # * +options+ - a hash containing named options # # ==== Options # # * +:miplevel+ - a :GLint specifying the mipmap level to be used (default 0) # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image def self.create_from_gl_texture_2d( context, texture_target, texture, options = {} ) if context.platform.version_number > 1.1 then return create_from_gl_texture( context, texture_target, texture, options ) end flags = get_flags( options ) miplevel = 0 miplevel = options[:miplevel] if options[:miplevel] error = MemoryPointer::new( :cl_int ) img = clCreateFromGLTexture2D( context, flags, texture_target, miplevel, texture, error ) error_check(error.read_cl_int) return Image::new( img, false ) end class << self alias :create_from_GL_texture_2D :create_from_gl_texture_2d end # Creates an Image from an OpenGL 3D texture # # ==== Attributes # # * +texture_target+ - a :GLenum defining the image type of texture # * +texture+ - a :GLuint specifying the name of the texture # * +options+ - a hash containing named options # # ==== Options # # * +:miplevel+ - a :GLint specifying the mipmap level to be used (default 0) # * +:flags+ - a single or an Array of :cl_mem_flags specifying the flags to be used when creating the Image def self.create_from_gl_texture_3d( context, texture_target, texture, options = {} ) if context.platform.version_number > 1.1 then return create_from_gl_texture( context, texture_target, texture, options ) end flags = get_flags( options ) miplevel = 0 miplevel = options[:miplevel] if options[:miplevel] error = MemoryPointer::new( :cl_int ) img = clCreateFromGLTexture3D( context, flags, texture_target, miplevel, texture, error ) error_check(error.read_cl_int) return Image::new( img, false ) end class << self alias :create_from_GL_texture_3D :create_from_gl_texture_3d end # Maps the cl_mem OpenCL objects of type CL_MEM_OBJECT_IMAGE* class Image #< Mem def inspect h = height d = depth f = flags return "#<#{self.class.name}: #{format.channel_order}, #{format.channel_data_type}, #{width}#{h != 0 ? "x#{h}" : ""}#{d != 0 ? "x#{d}" : ""} (#{size})#{f.to_i != 0 ? " (#{f})" : "" }>" end # Returns the ImageFormat corresponding to the image def format image_format = MemoryPointer::new( ImageFormat ) error = OpenCL.clGetImageInfo( self, FORMAT, image_format.size, image_format, nil) error_check(error) return ImageFormat::new( image_format ) end get_info("Image", :size_t, "element_size") get_info("Image", :size_t, "row_pitch") get_info("Image", :size_t, "slice_pitch") get_info("Image", :size_t, "width") get_info("Image", :size_t, "height") get_info("Image", :size_t, "depth") def pixel_size s = size / width s /= height if height != 0 s /= depth if depth != 0 return s end module OpenCL12 extend InnerGenerator get_info("Image", :size_t, "array_size") # Returns the associated Buffer if any, nil otherwise def buffer ptr = MemoryPointer::new( Buffer ) error = OpenCL.clGetImageInfo(self, BUFFER, Buffer.size, ptr, nil) error_check(error) return nil if ptr.null? return Buffer::new(ptr.read_pointer) end get_info("Image", :cl_uint, "num_mip_levels") get_info("Image", :cl_uint, "num_samples") # Returns the ImageDesc corresponding to the Image def desc return ImageDesc::new( self.type, self.width, self.height, self.depth, self.array_size, self.row_pitch, self.slice_pitch, self.num_mip_levels, self.num_samples, self.buffer ) end end register_extension( :v12, OpenCL12, "platform.version_number >= 1.2" ) end end
andreho/haxxor
src/main/java/net/andreho/haxxor/api/impl/HxMethodReferenceImpl.java
package net.andreho.haxxor.api.impl; import net.andreho.haxxor.Hx; import net.andreho.haxxor.api.HxAnnotation; import net.andreho.haxxor.api.HxMember; import net.andreho.haxxor.api.HxMethod; import net.andreho.haxxor.api.HxMethodBody; import net.andreho.haxxor.api.HxParameter; import net.andreho.haxxor.api.HxType; import java.util.Collection; import java.util.List; import java.util.Optional; /** * Created by a.hofmann on 30.05.2015. */ public class HxMethodReferenceImpl implements HxMethod { protected HxType declaringType; protected String name; protected String returnType; protected String[] parameterTypes; protected HxMethod method; public HxMethodReferenceImpl(final Hx haxxor, final String declaringType, final String methodName, final String returnType, final String...parameterTypes) { this.declaringType = haxxor.reference(declaringType); this.name = methodName; this.returnType = returnType; this.parameterTypes = parameterTypes; } private boolean isAvailable() { return this.method != null; } public HxMethod toMethod() { HxMethod method = this.method; if (method == null) { this.method = method = declaringType.findMethod(returnType, name, parameterTypes) .orElseThrow(this::complainAboutMissingMethod); } return method; } private IllegalStateException complainAboutMissingMethod() { final StringBuilder builder = new StringBuilder("Method not found: "); builder.append(declaringType).append(".").append(name).append('('); if(parameterTypes.length > 0) { builder.append(parameterTypes[0]); for (int i = 1; i < parameterTypes.length; i++) { builder.append(',').append(parameterTypes[i]); } } return new IllegalStateException(builder.append(')').append(returnType).toString()); } @Override public HxMethod clone() { return clone(getName()); } @Override public HxMethod clone(String name) { return toMethod().clone(name); } @Override public HxMethod clone(final String name, final int parts) { return toMethod().clone(name, parts); } @Override public Hx getHaxxor() { return declaringType.getHaxxor(); } @Override public int getIndex() { return toMethod().getIndex(); } @Override public String getName() { return name; } @Override public HxMethod setBody(final HxMethodBody methodBody) { return toMethod().setBody(methodBody); } @Override public boolean hasBody() { return toMethod().hasBody(); } @Override public HxMethodBody getBody() { return toMethod().getBody(); } @Override public HxMethod setModifiers(int modifiers) { toMethod().setModifiers(modifiers); return this; } @Override public int getModifiers() { return toMethod().getModifiers(); } @Override public HxType getReturnType() { //Because the return type doesn't identify a method itself return toMethod().getReturnType(); } @Override public HxMethod setReturnType(HxType returnType) { toMethod().setReturnType(returnType); return this; } @Override public Object getDefaultValue() { return toMethod().getDefaultValue(); } @Override public HxMethod setDefaultValue(Object value) { toMethod().setDefaultValue(value); return this; } @Override public List<HxParameter> getParameters() { return toMethod().getParameters(); } @Override public HxMethod setParameters(final List<HxParameter> parameters) { toMethod().setParameters(parameters); return this; } @Override public List<HxType> getExceptionTypes() { return toMethod().getExceptionTypes(); } @Override public HxMethod setExceptionTypes(final List<HxType> exceptionTypes) { toMethod().setExceptionTypes(exceptionTypes); return this; } @Override public HxMethod addParameter(final HxParameter parameter) { toMethod().addParameter(parameter); return this; } @Override public HxMethod addParameterAt(final int index, final HxParameter parameter) { toMethod().addParameterAt(index, parameter); return this; } @Override public HxMethod setParameterAt(final int index, final HxParameter parameter) { toMethod().setParameterAt(index, parameter); return this; } @Override public HxMethod setExceptionTypes(HxType... exceptionTypes) { toMethod().setExceptionTypes(exceptionTypes); return this; } @Override public HxParameter getParameterAt(final int index) { return toMethod().getParameterAt(index); } @Override public HxMethod addAnnotation(HxAnnotation annotation) { toMethod().addAnnotation(annotation); return this; } @Override public HxMethod addRepeatableAnnotationIfNeeded(final HxAnnotation annotation, final String repeatableAnnotationClassname) { toMethod().addRepeatableAnnotationIfNeeded(annotation, repeatableAnnotationClassname); return this; } @Override public HxMethod setAnnotations(Collection<HxAnnotation> annotations) { toMethod().setAnnotations(annotations); return this; } @Override public List<HxAnnotation> getAnnotations() { return toMethod().getAnnotations(); } @Override public boolean isAnnotationPresent(String type) { return toMethod().isAnnotationPresent(type); } @Override public Optional<HxAnnotation> getAnnotation(String type) { return toMethod().getAnnotation(type); } @Override public List<HxAnnotation> getAnnotationsByType(String type) { return toMethod().getAnnotationsByType(type); } @Override public Optional<String> getGenericSignature() { return toMethod().getGenericSignature(); } @Override public HxMethod setGenericSignature(String genericSignature) { toMethod().setGenericSignature(genericSignature); return this; } @Override public HxType getDeclaringMember() { if(!isAvailable()) { return declaringType; } return toMethod().getDeclaringMember(); } @Override public HxMethod setDeclaringMember(HxMember declaringMember) { toMethod().setDeclaringMember(declaringMember); return this; } @Override public boolean hasDescriptor(final String descriptor) { return toMethod().hasDescriptor(descriptor); } }
OptionMetrics/petl
examples/transform/maps.py
from __future__ import absolute_import, print_function, division # fieldmap() ############ import petl as etl from collections import OrderedDict table1 = [['id', 'sex', 'age', 'height', 'weight'], [1, 'male', 16, 1.45, 62.0], [2, 'female', 19, 1.34, 55.4], [3, 'female', 17, 1.78, 74.4], [4, 'male', 21, 1.33, 45.2], [5, '-', 25, 1.65, 51.9]] mappings = OrderedDict() # rename a field mappings['subject_id'] = 'id' # translate a field mappings['gender'] = 'sex', {'male': 'M', 'female': 'F'} # apply a calculation to a field mappings['age_months'] = 'age', lambda v: v * 12 # apply a calculation to a combination of fields mappings['bmi'] = lambda rec: rec['weight'] / rec['height']**2 # transform and inspect the output table2 = etl.fieldmap(table1, mappings) table2 # rowmap() ########## import petl as etl table1 = [['id', 'sex', 'age', 'height', 'weight'], [1, 'male', 16, 1.45, 62.0], [2, 'female', 19, 1.34, 55.4], [3, 'female', 17, 1.78, 74.4], [4, 'male', 21, 1.33, 45.2], [5, '-', 25, 1.65, 51.9]] def rowmapper(row): transmf = {'male': 'M', 'female': 'F'} return [row[0], transmf[row['sex']] if row['sex'] in transmf else None, row.age * 12, row.height / row.weight ** 2] table2 = etl.rowmap(table1, rowmapper, fields=['subject_id', 'gender', 'age_months', 'bmi']) table2 # rowmapmany() ############## import petl as etl table1 = [['id', 'sex', 'age', 'height', 'weight'], [1, 'male', 16, 1.45, 62.0], [2, 'female', 19, 1.34, 55.4], [3, '-', 17, 1.78, 74.4], [4, 'male', 21, 1.33]] def rowgenerator(row): transmf = {'male': 'M', 'female': 'F'} yield [row[0], 'gender', transmf[row['sex']] if row['sex'] in transmf else None] yield [row[0], 'age_months', row.age * 12] yield [row[0], 'bmi', row.height / row.weight ** 2] table2 = etl.rowmapmany(table1, rowgenerator, fields=['subject_id', 'variable', 'value']) table2.lookall()
JamesRyanATX/drone
spec/lib/drone/status_spec.rb
<filename>spec/lib/drone/status_spec.rb<gh_stars>0 require 'spec_helper' describe Drone::Status do subject { Drone::Status.new } before { subject.reset } describe "#log_error" do before do 5.times { |i| subject.log_capture(i) } 2.times { |i| subject.log_error } end it "increments the error count" do expect(subject.error_count).to eq(2) end it "enables error rate calculation" do expect(subject.error_rate).to eq(0.4) end end describe "#log_capture" do before do subject.log_capture(1.0) subject.log_capture(2.0) subject.log_capture(5.0) end it "increments the capture count" do expect(subject.capture_count).to eq(3) end it "enables capture average calculation" do expect(subject.capture_average).to eq(2.67) end it "enables captures/second rate calculation" do expect(subject.capture_rate).to eq(0.37) end end describe "#all" do context "with no capture data" do it "contains empty metrics" do expect(subject.all).to eq({ capture_average: 0.0, capture_count: 0, capture_rate: 0.0, error_count: 0, error_rate: 0.0, target_count: 0 }) end end context "with capture data" do before do end it "contains non-empty metrics" do expect(subject.all).to eq({ capture_average: 0.0, capture_count: 0, capture_rate: 0.0, error_count: 0, error_rate: 0.0, target_count: 0 }) end end end end
abrams27/mimuw
sem4/so/zadania/zad5/minix_source/usr/src/minix/servers/vfs/const.h
#ifndef __VFS_CONST_H__ #define __VFS_CONST_H__ /* Tables sizes */ #define NR_FILPS 1024 /* # slots in filp table */ #define NR_LOCKS 8 /* # slots in the file locking table */ #define NR_MNTS 16 /* # slots in mount table */ #define NR_VNODES 1024 /* # slots in vnode table */ #define NR_WTHREADS 9 /* # slots in worker thread table */ #define NR_NONEDEVS NR_MNTS /* # slots in nonedev bitmap */ /* Miscellaneous constants */ #define SU_UID ((uid_t) 0) /* super_user's uid_t */ #define SYS_UID ((uid_t) 0) /* uid_t for system processes and INIT */ #define SYS_GID ((gid_t) 0) /* gid_t for system processes and INIT */ #define FP_BLOCKED_ON_NONE 0 /* not blocked */ #define FP_BLOCKED_ON_PIPE 1 /* susp'd on pipe */ #define FP_BLOCKED_ON_LOCK 2 /* susp'd on lock */ #define FP_BLOCKED_ON_POPEN 3 /* susp'd on pipe open */ #define FP_BLOCKED_ON_SELECT 4 /* susp'd on select */ #define FP_BLOCKED_ON_OTHER 5 /* blocked on other process, check fp_task to find out */ /* test if the process is blocked on something */ #define fp_is_blocked(fp) ((fp)->fp_blocked_on != FP_BLOCKED_ON_NONE) #define INVALID_THREAD ((thread_t) -1) /* known-invalid thread ID */ #define SYMLOOP 16 #define LABEL_MAX 16 /* maximum label size (including '\0'). Should * not be smaller than 16 or bigger than * M_PATH_STRING_MAX. */ #define FSTYPE_MAX VFS_NAMELEN /* maximum file system type size */ /* possible select() operation types; read, write, errors */ #define SEL_RD CDEV_OP_RD #define SEL_WR CDEV_OP_WR #define SEL_ERR CDEV_OP_ERR #define SEL_NOTIFY CDEV_NOTIFY /* not a real select operation */ /* special driver endpoint for CTTY_MAJOR; must be able to pass isokendpt() */ #define CTTY_ENDPT VFS_PROC_NR #endif
joevandyk/monkeycharger
vendor/plugins/active_merchant/test/remote_tests/remote_pay_junction_test.rb
require File.dirname(__FILE__) + '/../test_helper' class PayJunctionTest < Test::Unit::TestCase include ActiveMerchant::Billing cattr_accessor :current_invoice AMOUNT = 250 def setup @gateway = PayJunctionGateway.new( :login => 'pj-ql-01', :password => '<PASSWORD>' ) @creditcard = credit_card('44332<PASSWORD>') @valid_verification_value = '123' @invalid_verification_value = '1234' @valid_address = { :address1 => '123 Test St.', :address2 => nil, :city => 'Somewhere', :state => 'CA', :zip => '90001' } @invalid_address = { :address1 => '187 Apple Tree Lane.', :address2 => nil, :city => 'Woodside', :state => 'CA', :zip => '94062' } end def test_successful_purchase assert response = @gateway.purchase(AMOUNT, @creditcard, :order_id => generate_order_id) #p response assert_equal Response, response.class assert_match /APPROVAL/, response.message assert_equal 'capture', response.params["posture"], 'Should be captured funds' assert_equal 'charge', response.params["transaction_action"] assert_equal true, response.success? end def test_successful_purchase_with_cvv @creditcard.verification_value = @valid_verification_value assert response = @gateway.purchase(AMOUNT, @creditcard, :order_id => generate_order_id) assert_equal Response, response.class assert_match /APPROVAL/, response.message assert_equal 'capture', response.params["posture"], 'Should be captured funds' assert_equal 'charge', response.params["transaction_action"] assert_equal true, response.success? end def test_successful_authorize assert response = @gateway.authorize( AMOUNT, @creditcard, :order_id => generate_order_id) assert_equal Response, response.class assert_match /APPROVAL/, response.message assert_equal 'hold', response.params["posture"], 'Should be a held charge' assert_equal 'charge', response.params["transaction_action"] assert_equal true, response.success? end def test_successful_capture order_id = generate_order_id auth = @gateway.authorize(AMOUNT, @creditcard, :order_id => order_id) assert auth.success? response = @gateway.capture(AMOUNT, auth.authorization, :order_id => order_id) assert response.success? assert_equal 'capture', response.params["posture"], 'Should be a capture' assert_equal auth.authorization, response.authorization, "Should maintain transaction ID across request" end def test_successful_credit purchase = @gateway.purchase(AMOUNT, @creditcard, :order_id => generate_order_id) assert purchase.success? assert response = @gateway.credit(success_price, purchase.authorization) assert_equal Response, response.class assert_equal 'refund', response.params["transaction_action"] assert_equal true, response.success? end def test_successful_void order_id = generate_order_id purchase = @gateway.purchase(AMOUNT, @creditcard, :order_id => order_id) assert purchase.success? assert response = @gateway.void(AMOUNT, purchase.authorization, :order_id => order_id) assert_equal Response, response.class assert_equal true, response.success? assert_equal 'void', response.params["posture"], 'Should be a capture' assert_equal purchase.authorization, response.authorization, "Should maintain transaction ID across request" end def test_successful_instant_purchase # this takes advatange of the PayJunction feature where another # transaction can be executed if you have the transaction ID of a # previous successful transaction. purchase = @gateway.purchase( AMOUNT, @creditcard, :order_id => generate_order_id) assert purchase.success? assert response = @gateway.purchase(AMOUNT, purchase.authorization, :order_id => generate_order_id) assert_equal Response, response.class assert_match /APPROVAL/, response.message assert_equal 'capture', response.params["posture"], 'Should be captured funds' assert_equal 'charge', response.params["transaction_action"] assert_not_equal purchase.authorization, response.authorization, 'Should have recieved new transaction ID' assert_equal true, response.success? end def test_successful_recurring assert response = @gateway.recurring(AMOUNT, @creditcard, :periodicity => :monthly, :payments => 12, :order_id => generate_order_id) assert_equal Response, response.class assert_match /APPROVAL/, response.message assert_equal 'charge', response.params["transaction_action"] assert_equal true, response.success? end def test_should_send_invoice order_id = generate_order_id response = @gateway.purchase(AMOUNT, @creditcard, :order_id => order_id) assert response.success? assert_equal order_id, response.params["invoice_number"], 'Should have set invoice' end private def success_price 200 + rand(200) end end
evis-market/web-interface-backend
src/app/conf/environ.py
<reponame>evis-market/web-interface-backend """Read .env file""" import environ import os.path env = environ.Env( DEBUG=(bool, False), DEBUG_SQL=(bool, False), ACCESS_TOKEN_LIFETIME_MINUTES=(int, 10), REFRESH_TOKEN_LIFETIME_DAYS=(int, 60), HTTP_PORT=(int, 8000), ) if os.path.exists('.env'): environ.Env.read_env('.env') elif os.path.exists('../.env'): environ.Env.read_env('../.env') __all__ = [ env, ]
Catfish30/codigotecsup
tec-proyecto/src/components/ModalNotas.js
import React from 'react' import { useState } from 'react'; import {Button,Modal} from 'react-bootstrap' export default function ModalNotas({value,actualizarInput,manejarSubmit}) { const [show, setShow] = useState(false); const handleClose = () => setShow(false); const handleShow = () => setShow(true); return ( <div> <Button variant="warning ms-5 " onClick={handleShow} > Actualizar Notas </Button> <Modal show={show} onHide={handleClose}> <Modal.Header closeButton > <Modal.Title >Actualizar Notas</Modal.Title> </Modal.Header> <Modal.Body> <form onSubmit={(e) => {manejarSubmit(e)}}> <div className="row justify-content-around my-1"> <div className="col-2"> <label className="form-label">Nota 1</label> </div> <div className="col-2"> <input type="number" className="form-control" name="nota_1" value={value.nota_1} onChange={(e) => {actualizarInput(e)}}></input> </div> </div> <div className="row justify-content-around my-1"> <div className="col-2"> <label className="form-label">Nota 2</label> </div> <div className="col-2"> <input type="number" className="form-control" name="nota_2" value={value.nota_2} onChange={(e) => {actualizarInput(e)}}></input> </div> </div> <div className="row justify-content-around my-1"> <div className="col-2"> <label className="form-label">Nota 3</label> </div> <div className="col-2"> <input type="number" className="form-control" name="nota_3" value={value.nota_3} onChange={(e) => {actualizarInput(e)}}></input> </div> </div> <div className="row justify-content-around my-1"> <div className="col-2"> <label className="form-label">Nota 4</label> </div> <div className="col-2"> <input type="number" className="form-control" name="nota_4" value={value.nota_4} onChange={(e) => {actualizarInput(e)}}></input> </div> </div> <Modal.Footer> <Button variant="secondary" onClick={handleClose}> Cancelar </Button> <button className="btn btn-primary" onClick={handleClose} type="submit"> Guardar Cambios </button> </Modal.Footer> </form> </Modal.Body> </Modal> </div> ) }
osoco/better-ways-of-thinking-about-software
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/common/djangoapps/terrain/stubs/ecommerce.py
""" Stub implementation of ecommerce service for acceptance tests """ import re import six.moves.urllib.parse from .http import StubHttpRequestHandler, StubHttpService class StubEcommerceServiceHandler(StubHttpRequestHandler): # pylint: disable=missing-class-docstring # pylint: disable=missing-function-docstring def do_GET(self): pattern_handlers = { '/api/v2/orders/$': self.get_orders_list, } if self.match_pattern(pattern_handlers): return self.send_response(404, content='404 Not Found') def match_pattern(self, pattern_handlers): """ Find the correct handler method given the path info from the HTTP request. """ path = six.moves.urllib.parse.urlparse(self.path).path for pattern in pattern_handlers: match = re.match(pattern, path) if match: pattern_handlers[pattern](**match.groupdict()) return True return None def get_orders_list(self): """ Stubs the orders list endpoint. """ orders = { 'results': [ { 'status': 'Complete', 'number': 'Edx-123', 'total_excl_tax': '100.00', 'date_placed': '2016-04-21T23:14:23Z', 'lines': [ { 'title': 'Test Course', 'line_price_excl_tax': '100.00', 'product': { 'product_class': 'Seat' } } ], } ] } orders = self.server.config.get('orders', orders) self.send_json_response(orders) class StubEcommerceService(StubHttpService): HANDLER_CLASS = StubEcommerceServiceHandler
ta-forever/downlords-taf-client
src/main/java/com/faforever/client/util/ZipUtil.java
package com.faforever.client.util; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; public class ZipUtil { /** * Zip a list of file into one zip file. * * @param files * files to zip * @param targetZipFile * target zip file * @throws IOException * IO error exception can be thrown when copying ... */ public static void zipFile(final File[] files, final File targetZipFile) throws IOException { FileOutputStream fos = new FileOutputStream(targetZipFile); ZipOutputStream zos = new ZipOutputStream(fos); byte[] buffer = new byte[128]; for (int i = 0; i < files.length; i++) { File currentFile = files[i]; if (!currentFile.isDirectory() && currentFile.exists()) { ZipEntry entry = new ZipEntry(currentFile.getName()); FileInputStream fis = new FileInputStream(currentFile); zos.putNextEntry(entry); int read = 0; while ((read = fis.read(buffer)) != -1) { zos.write(buffer, 0, read); } zos.closeEntry(); fis.close(); } } zos.close(); fos.close(); } }
JackChan1999/boohee_v5.6
src/main/java/android/support/v7/app/AppCompatDelegateImplV11.java
<filename>src/main/java/android/support/v7/app/AppCompatDelegateImplV11.java package android.support.v7.app; import android.content.Context; import android.util.AttributeSet; import android.view.View; import android.view.Window; class AppCompatDelegateImplV11 extends AppCompatDelegateImplV7 { AppCompatDelegateImplV11(Context context, Window window, AppCompatCallback callback) { super(context, window, callback); } View callActivityOnCreateView(View parent, String name, Context context, AttributeSet attrs) { return null; } }
coderoom-cn/javaStudy
src/main/java/cn/coderoom/thread/advance/package-info.java
<filename>src/main/java/cn/coderoom/thread/advance/package-info.java /** * * @package cn.coderoom.thread.advance * @author lim * @email <EMAIL> * @date 2020/1/10 17:14 */ package cn.coderoom.thread.advance;
bacndcmc/IoT-data-simulator
ui/public-src/src/app/screens/Main/components/definition/DefinitionWizard/DefinitionWizard.js
import React, { Component } from "react"; import { observable, action, computed } from "mobx"; import { inject, observer } from "mobx-react"; import glamorous from "glamorous"; import { css} from 'glamor' import Button from "material-ui/Button"; import Radio, { RadioGroup } from "material-ui/Radio"; import TextField from "material-ui/TextField"; import { FormLabel, FormControl, FormControlLabel } from "material-ui/Form"; import KeyboardArrowLeft from "material-ui-icons/KeyboardArrowLeft"; import KeyboardArrowRight from "material-ui-icons/KeyboardArrowRight"; import Dialog, { DialogActions, DialogContent, DialogTitle } from "material-ui/Dialog"; import Stepper from "../../Stepper"; import DatasetStep from "./DatasetStep"; import SchemaStep from "./SchemaStep"; import FinalizeStep from "./FinalizeStep"; import JsonSchema from "models/schema/JsonSchema"; import { throttle } from "util/function"; import SelectSchemaModal from './SchemaStep/SelectSchemaModal'; ////////////////////////////////////////////////////////////////////// const DefinitionWizardLayout = glamorous.div({ display: "flex", flexDirection: "column", minHeight: "0", height: "100%", width: "100%", position: "relative" }); const StepOuterContainer = glamorous.div({ display: "flex", minHeight: "0", height: "100%", overflow: "auto", transform: "translate3d(0, 0, 0) scale(1)" }); const NextButton = glamorous(Button)({}); NextButton.defaultProps = { raised: true, style: { marginRight: "15px" } }; const StepContainer = glamorous.div({ padding: "30px", width: "100%" }); const StepActionsContainer = glamorous.div({ margin: "15px 0" }); const StepTitle = glamorous(FormLabel, { withProps: { style: { padding: "10px 0" } } })({}); const CopyFromButtonLayout = glamorous.span({}); const CopyFromButton = glamorous(Button, { withProps: { style: { fontSize: "13px", background: "rgb(230, 230, 230)" } } })(); const SchemaSkipSection = glamorous.div({ display: 'flex', flexDirection: 'row', alignItems: 'center', justifyContent: 'flex-end', flex: '0 0 auto' }); const OrSpan = glamorous.span({ fontSize: '13px', marginRight: '10px' }); const BackButton = glamorous(Button, { withProps: { style: { flex: '0 0 85px' } } })() @observer export default class DefinitionWizard extends React.Component { @observable isConfirmShown = false; @action.bound openConfirm = () => { this.isConfirmShown = true; }; @action.bound closeConfirm = () => { this.isConfirmShown = false; }; @action.bound onConfirm = () => { this.closeConfirm(); this.props.store.openSchemaSelectModal(); }; render() { let { store, onComplete } = this.props; let { setSelectionMethod, selectionMethod, fileToUpload, datasets, wizardCurrentStep: step, definition, isSchemaSkipAllowed } = store; return ( <DefinitionWizardLayout> <Stepper steps={3} activeStep={step} nextButton={ <SchemaSkipSection> {isSchemaSkipAllowed && ( <span> <Button onClick={store.onSchemaSkip} dense>Skip</Button> <OrSpan> or </OrSpan> </span> )} <Button dense onClick={store.nextHandler} disabled={!store.isNextActive} > {store.nextLabel} {<KeyboardArrowRight />} </Button> </SchemaSkipSection> } backButton={ <BackButton dense onClick={store.backHandler} disabled={!store.isBackActive} > {<KeyboardArrowLeft />} Back </BackButton> } /> <StepOuterContainer> {step === 0 && ( <StepContainer> <StepTitle component="legend"> <b>1. Select/Create dataset</b> </StepTitle> <DatasetStep store={store} /> </StepContainer> )} {step === 1 && ( <StepContainer> <StepTitle component="legend"> <b> 2. Create schema {!store.definition.id && ( <span> <span> or </span> <CopyFromButtonLayout> <CopyFromButton onClick={this.openConfirm} > Copy from definition </CopyFromButton> </CopyFromButtonLayout> </span> )} </b> </StepTitle> <SchemaStep store={store} /> {!definition.id && ( <Dialog ignoreBackdropClick ignoreEscapeKeyUp open={this.isConfirmShown} > <DialogTitle>Warning</DialogTitle> <DialogContent> When definition selected, all your current schema changes will be lost. Do you want to proceed? </DialogContent> <DialogActions> <Button onClick={this.closeConfirm} color="primary" > No </Button> <Button onClick={this.onConfirm} color="accent" > Yes </Button> </DialogActions> </Dialog> )} <SelectSchemaModal key="select-schema-modal" store={store} /> </StepContainer> )} {step === 2 && ( <StepContainer> <StepTitle component="legend"> <b>3. Finalize form</b> </StepTitle> <FinalizeStep store={store} /> </StepContainer> )} </StepOuterContainer> </DefinitionWizardLayout> ); } }
achimtimis/L-Project
quiz-service/quiz-service-app/src/main/java/app/repository/IQuestionEntityDao.java
<gh_stars>0 package app.repository; import app.domain.questions.QuestionEntity; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; /** * Created by achy_ on 6/8/2017. */ @Repository public interface IQuestionEntityDao extends JpaRepository<QuestionEntity, Long>{ }
Dbevan/SunderingShadows
d/verhedin/virtual/wadi/landing.c
// Wadi al-Emir River Landing // for Verhedin // Thorn@ShadowGate // 13 June 2000 // Shamelessly ripped off from Tristan's // Shadow Dock and altered to make a dock // with only the basic amentities // landing.c #include <std.h> #include <daemons.h> #define BOAT "/d/verhedin/virtual/wadi/obj/boat.c" #define BOAT_DIR "/d/save/boats/wadi_al-emir/" inherit ROOM; int is_virtual() { return 1; } int is_water() { return 1; } int is_dock() { return 1; } int clean_up(){return 1;} void create(){ object ob; ::create(); } void init(){ ::init(); add_action("retrieve","retrieve"); add_action("__Read_me","read"); } string query_long(string str){ string hold; hold = room::query_long(); hold += "\nThere is a sign here listing what can be done, please read it.\n"; return hold; } int __Read_me(string str){ if(!str || str != "sign") return 0; write( @THORN This is a boat landing. From here you can do any number of things concerning water transportation. retrieve boat --------- Will call boat to dock if you own one THORN ); return 1; } void get_boat(object ob, object player){ if(!objectp(ob)) return; tell_room(TO,"%^BOLD%^A small boat sails into the area for use.",player); tell_object(player,"%^BOLD%^Your boat sails into the area!\n"); ob->move(TO); ob->make_me(); } void get_boat2(object ob, object player){ if(!objectp(ob)) return; tell_room(TO,"%^BOLD%^A small boat sails into the area for use.",player); tell_object(player,"%^BOLD%^Your boat sails into the area!\n"); ob->move(TO); ob->make_me(); ob->set_owner(player->query_name()); } int retrieve(string str){ string file, file2; object ob; if(!str || str[0..3] != "boat") return notify_fail("Retrieve what?\n"); if((int)SAVE_D->is_member("tsarven_boat_owners",TPQN) == -1) return notify_fail("We have no record of you owning a boat!\n"); if(!file_exists(BOAT_DIR+TPQN+"boat.c")){ write("Error in loading boat object: notify an immortal!"); write_file(BOAT_DIR+"bad_boats","No "+TPQN+"boat.c on "+ctime(time())+"\n"); return 1; } file = BOAT_DIR+TPQN+"boat.c"; file2 = read_file(BOAT); rm(file); write_file(file,file2); if(!ob = find_object_or_load(BOAT_DIR+TPQN+"boat")){ write("Error in loading boat object: notify an immortal!"); write_file(BOAT_DIR+"bad_boats","No "+TPQN+"boat.c on "+ctime(time())+"\n"); return 1; } if(objectp(environment(ob)) && environment(ob) == TO) return notify_fail("Umm, you're standing next to your boat.\n"); call_out("get_boat2",5,ob,TP); write("%^BOLD%^%^CYAN%^You summon your boat from where ever it may be!"); tell_room(TO,"%^BOLD%^%^CYAN%^"+TPQCN+" summons "+TP->query_possessive()+" boat!",TP); return 1; }
kowalt/neuroCloud
BE/nnCloudRESTService/src/main/java/com/mycompany/nncloudrestservice/logic/Register.java
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.mycompany.nncloudrestservice.logic; import com.mycompany.nncloudrestservice.daos.UserDAO; import com.mycompany.nncloudrestservice.exceptions.UserExistsException; import com.mycompany.nncloudrestservice.pojo.User; import com.mycompany.nncloudrestservice.utils.SafeHashUtil; import java.util.Calendar; import org.json.JSONObject; /** * * @author Tomasz */ public class Register { public void registerNewUser(JSONObject regData) throws UserExistsException { User user = new User(); user.setActivated(false); user.setEmail(regData.getString("email")); user.setLogin(regData.getString("login")); user.setInfo_to_admin(regData.getString("info_to_admin")); //set password String givenPassword = regData.getString("password"); String givenEncryptedPassword = SafeHashUtil.getHash(givenPassword); user.setPassword(<PASSWORD>); user.setRegistered(Calendar.getInstance().getTime()); user.setNetworks(null); UserDAO udao = new UserDAO(); udao.addItem(user); } }