repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
emcleod/OG-Platform
projects/OG-Analytics/src/main/java/com/opengamma/analytics/math/interpolation/NotAKnotCubicSplineInterpolator1D.java
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.math.interpolation; import com.opengamma.analytics.math.interpolation.data.Interpolator1DDataBundle; /** * */ public class NotAKnotCubicSplineInterpolator1D extends PiecewisePolynomialInterpolator1D { /** Serialization version */ private static final long serialVersionUID = 1L; /** * Default constructor where the interpolation method is fixed */ public NotAKnotCubicSplineInterpolator1D() { super(new CubicSplineInterpolator()); } @Override public Interpolator1DDataBundle getDataBundle(final double[] x, final double[] y, final double leftCond, final double rightCond) { throw new IllegalArgumentException("No degrees of freedom at endpoints for this interpolation method"); } @Override public Interpolator1DDataBundle getDataBundleFromSortedArrays(final double[] x, final double[] y, final double leftCond, final double rightCond) { throw new IllegalArgumentException("No degrees of freedom at endpoints for this interpolation method"); } }
Tony031218/OI
luogu/codes/P1201.cpp
/************************************************************* * > File Name : P1201.cpp * > Author : Tony * > Created Time : 2019/06/19 15:59:41 * > Algorithm : [STL]map **************************************************************/ #include <bits/stdc++.h> using namespace std; inline int read() { int x = 0; int f = 1; char ch = getchar(); while (!isdigit(ch)) {if (ch == '-') f = -1; ch = getchar();} while (isdigit(ch)) {x = x * 10 + ch - 48; ch = getchar();} return x * f; } string name[15]; map<string, int> _map; int main() { int np = read(); for (int i = 1; i <= np; ++i) { cin >> name[i]; } for (int i = 1; i <= np; ++i) { string str; cin >> str; int m = read(), p = read(); int num = (p != 0) ? m / p : 0; _map[str] -= num * p; for (int j = 1; j <= p; ++j) { string str2; cin >> str2; _map[str2] += num; } } for (int i = 1; i <= np; ++i) { cout << name[i] << " " << _map[name[i]] << endl; } return 0; }
chrisxu333/uDB
src/include/storage/buffer_pool/LRU_cache.h
#ifndef UDB_LRU_CACHE_H #define UDB_LRU_CACHE_H #include "include/common/type.h" #include <map> namespace udb { class LruCache{ public: LruCache(); ~LruCache(); /** * * */ void insert(frame_id_t frame_id); frame_id_t replace(); bool update(frame_id_t frame_id); private: // LRU node struct struct Frame{ frame_id_t frame_id_; Frame* nxt_frame_; Frame* prev_frame_; Frame():frame_id_(-1), nxt_frame_(nullptr), prev_frame_(nullptr){} Frame(frame_id_t frame_id):frame_id_(frame_id), nxt_frame_(nullptr), prev_frame_(nullptr){} }; /** * @param frame_id * Move a frame to the front of LRU list. * */ void moveToFront(Frame* frame); /** * @param frame_id * Add a new frame to the front of LRU list. * */ void addFront(Frame* frame); /** * @return frame_id_t * Remove least recent used from tail of LRU list. * */ void removeNode(Frame* frame); Frame* head; Frame* tail; std::map<frame_id_t, Frame*> lru_map_; }; } // namespace udb #endif
Tonytan123/DesignPatternJavaImplementation
com.tucker.java.design.pattern/abstractfactory/ColorFactory.java
<filename>com.tucker.java.design.pattern/abstractfactory/ColorFactory.java package abstractfactory; import java.util.HashMap; import java.util.Map; /** * @author tonyc */ public class ColorFactory extends AbstractFactory{ public static Map<String, Color> colorMap = new HashMap<>(); public static void init() { colorMap.put("blue", new Blue()); colorMap.put("red", new Red()); colorMap.put("yellow", new Yellow()); } @Override public Shape createShape(String name) { return null; } @Override public Color createColor(String name) { init(); return colorMap.get(name); } }
Hans-Halverson/flow
tests/autofix_empty_object/c.js
<filename>tests/autofix_empty_object/c.js // @flow const obj = { f: {} }; obj.f = { x: 1 }; obj.f = { x: "a" }; module.exports = obj;
spring-projects/spring-integration-dsl-scala
spring-integration-dsl-scala-core/src/main/scala/org/springframework/integration/dsl/MessagingBridgeEndpointDsl.scala
<gh_stars>10-100 package org.springframework.integration.dsl import org.springframework.beans.factory.support.BeanDefinitionBuilder import org.springframework.integration.handler.BridgeHandler import java.util.UUID import org.w3c.dom.Element import org.w3c.dom.Document import org.springframework.util.StringUtils private[dsl] class MessagingBridge(name: String = "$br_" + UUID.randomUUID().toString.substring(0, 8)) extends SimpleEndpoint(name, null) { override def build(document: Document, targetDefinitionFunction: Function1[Any, Tuple2[String, String]], compositionInitFunction: Function2[BaseIntegrationComposition, AbstractChannel, Unit], inputChannel:AbstractChannel, outputChannel:AbstractChannel): Element = { require(inputChannel != null, "'inputChannel' must be provided") require(outputChannel != null, "'outputChannel' must be provided") val element = document.createElement("int:bridge") element.setAttribute("id", this.name) element.setAttribute("input-channel", inputChannel.name); element.setAttribute("output-channel", outputChannel.name); element } }
MrYang-Jia/edb
src/main/java/com/edbplus/db/proxy/EDbRelProxy.java
<reponame>MrYang-Jia/edb<gh_stars>1-10 /** * Copyright (c) 2021 , YangZhiJia 杨志佳 (<EMAIL>). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.edbplus.db.proxy; import com.edbplus.db.EDbPro; import com.edbplus.db.jpa.util.JpaRelUtil; import lombok.Setter; import net.sf.cglib.proxy.Enhancer; import net.sf.cglib.proxy.MethodInterceptor; import net.sf.cglib.proxy.MethodProxy; import java.lang.reflect.Method; import java.lang.reflect.Type; /** * @ClassName EDbRelProxy * @Description: Jpa关系对象 * @Author 杨志佳 * @Date 2020/11/18 * @Version V1.0 **/ public class EDbRelProxy implements MethodInterceptor { public static String jpaEdbRelKey = "edb.EDbRel"; //要代理的原始对象 private Object oriJpa ; // 自定义字段 @Setter private String fields; @Setter private Integer pageNo=1; @Setter private Integer pageSize=10; // 数据对象 private EDbPro eDbPro; // 1- 创建代理对象 public <T> T createProcy(T target,EDbPro eDbPro){ // 需要判断是否已经是代理对象,如果是的话,无须二次代理 // System.out.println("需要被代理的类:"+this.getClass().getSimpleName()); // 赋予操作对象 this.oriJpa = target; // 赋予数据库对象 this.eDbPro = eDbPro; //1-Enhancer类是CGLib中的一个字节码增强器,它可以方便的对你想要处理的类进行扩展 Enhancer enhancer=new Enhancer(); //2-将被代理类HuGe设置成父类 enhancer.setSuperclass(this.oriJpa.getClass()); //3-设置拦截器 enhancer.setCallback(this); //4-动态生成一个代理类 Object objProxy = enhancer.create(); return (T) objProxy; } //2-实现MethodInterceptor的intercept方法 public Object intercept(Object obj, Method method, Object[] args, MethodProxy proxy) throws Throwable { // System.out.println("before: " + method); //调用proxy.invoke()方法,会报java.lang.StackOverflowError错误,原因是invoke()内部会一直被反复调用 //Object object = proxy.invoke(obj, args); // Object object = proxy.invokeSuper(obj, args); Object object = null; // 获取调用方法的返回对象 Type returnType = method.getAnnotatedReturnType().getType(); // 如果返回类型有值,才进行关系扩展 if(returnType!= null ){ // 同步的方式,获取指定对象 object = JpaRelUtil.getRelObject(null,fields,pageNo,pageSize,eDbPro,oriJpa,returnType.getTypeName(),method,true,false); } // returnType.getTypeName() // 如果不是rel对象,则返回原属性方法值 if(object == null){ // object = proxy.invokeSuper(obj, args); // 触发原对象方法的返回结果 object = method.invoke(oriJpa,args); } return object; } }
pwinckles/ocfl-java
ocfl-java-core/src/main/java/edu/wisc/library/ocfl/core/validation/SimpleInventoryValidator.java
<reponame>pwinckles/ocfl-java /* * The MIT License (MIT) * * Copyright (c) 2019 University of Wisconsin Board of Regents * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package edu.wisc.library.ocfl.core.validation; import edu.wisc.library.ocfl.api.model.DigestAlgorithm; import edu.wisc.library.ocfl.api.model.InventoryType; import edu.wisc.library.ocfl.api.model.ValidationCode; import edu.wisc.library.ocfl.api.model.ValidationIssue; import edu.wisc.library.ocfl.api.model.ValidationResults; import edu.wisc.library.ocfl.api.model.VersionNum; import edu.wisc.library.ocfl.api.util.Enforce; import edu.wisc.library.ocfl.core.validation.model.SimpleInventory; import java.net.URI; import java.net.URISyntaxException; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.DateTimeParseException; import java.util.BitSet; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.TreeSet; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.regex.Pattern; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoField.HOUR_OF_DAY; import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; import static java.time.temporal.ChronoField.MONTH_OF_YEAR; import static java.time.temporal.ChronoField.NANO_OF_SECOND; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; import static java.time.temporal.ChronoField.YEAR; /** * Validates the contents of a SimpleInventory object against the OCFL v1.0 spec */ public class SimpleInventoryValidator { private static final Pattern VALID_VERSION = Pattern.compile("^v\\d+$"); private static final VersionNum VERSION_ZERO = VersionNum.fromInt(0); private static final List<String> ALLOWED_CONTENT_DIGESTS = List.of( DigestAlgorithm.sha512.getOcflName(), DigestAlgorithm.sha256.getOcflName() ); private static final Map<String, Integer> DIGEST_LENGTHS = Map.of( DigestAlgorithm.md5.getOcflName(), 32, DigestAlgorithm.sha1.getOcflName(), 40, DigestAlgorithm.sha256.getOcflName(), 64, DigestAlgorithm.sha512.getOcflName(), 128, DigestAlgorithm.blake2b512.getOcflName(), 128, DigestAlgorithm.blake2b160.getOcflName(), 40, DigestAlgorithm.blake2b256.getOcflName(), 64, DigestAlgorithm.blake2b384.getOcflName(), 96, DigestAlgorithm.sha512_256.getOcflName(), 64 ); private static final DateTimeFormatter RFC3339_FORMAT = new DateTimeFormatterBuilder() .parseCaseInsensitive() .appendValue(YEAR, 4) .appendLiteral('-') .appendValue(MONTH_OF_YEAR, 2) .appendLiteral('-') .appendValue(DAY_OF_MONTH, 2) .appendLiteral('T') .appendValue(HOUR_OF_DAY, 2) .appendLiteral(':') .appendValue(MINUTE_OF_HOUR, 2) .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2) .optionalStart() .appendFraction(NANO_OF_SECOND, 0, 9, true) .optionalEnd() .appendOffset("+HH:MM", "Z") .toFormatter(); private final BitSet lowerHexChars; public SimpleInventoryValidator() { lowerHexChars = new BitSet(); for (int i = '0'; i <= '9'; i++) { lowerHexChars.set(i); } for (int i = 'a'; i <= 'f'; i++) { lowerHexChars.set(i); } } /** * Validates the specified inventory and returns an object contain any issues that were found. * * @param inventory the inventory to validate * @param inventoryPath the path to the inventory * @return the validation results */ public ValidationResults validateInventory(SimpleInventory inventory, String inventoryPath) { Enforce.notNull(inventory, "inventory cannot be null"); Enforce.notNull(inventoryPath, "inventoryPath cannot be null"); var results = new ValidationResultsBuilder(); results.addIssue(notBlank(inventory.getId(), ValidationCode.E036, "Inventory id must be set in %s", inventoryPath)) .addIssue(ifNotNull(inventory.getId(), () -> isTrue(isUri(inventory.getId()), ValidationCode.W005, "Inventory id should be a URI in %s. Found: %s", inventoryPath, inventory.getId()))) .addIssue(notNull(inventory.getType(), ValidationCode.E036, "Inventory type must be set in %s", inventoryPath)) .addIssue(ifNotNull(inventory.getType(), () -> isTrue(inventory.getType().equals(InventoryType.OCFL_1_0.getId()), ValidationCode.E038, "Inventory type must equal '%s' in %s", InventoryType.OCFL_1_0.getId(), inventoryPath))) .addIssue(notNull(inventory.getDigestAlgorithm(), ValidationCode.E036, "Inventory digest algorithm must be set in %s", inventoryPath)) .addIssue(notNull(inventory.getHead(), ValidationCode.E036, "Inventory head must be set in %s", inventoryPath)); if (inventory.getDigestAlgorithm() != null) { if (!ALLOWED_CONTENT_DIGESTS.contains(inventory.getDigestAlgorithm())) { results.addIssue(ValidationCode.E025, "Inventory digest algorithm must be one of %s in %s. Found: %s", ALLOWED_CONTENT_DIGESTS, inventoryPath, inventory.getDigestAlgorithm()); } else { results.addIssue(isTrue(DigestAlgorithm.sha512.getOcflName().equals(inventory.getDigestAlgorithm()), ValidationCode.W004, "Inventory digest algorithm should be %s in %s. Found: %s", DigestAlgorithm.sha512.getOcflName(), inventoryPath, inventory.getDigestAlgorithm())); } } if (inventory.getHead() != null) { parseAndValidateVersionNum(inventory.getHead(), inventoryPath, results); } if (inventory.getContentDirectory() != null) { var content = inventory.getContentDirectory(); results.addIssue(isFalse(content.contains("/"), ValidationCode.E017, "Inventory content directory cannot contain '/' in %s", inventoryPath)) .addIssue(isFalse(content.equals(".") || content.equals(".."), ValidationCode.E018, "Inventory content directory cannot equal '.' or '..' in %s", inventoryPath)); } validateInventoryVersionNumbers(inventory, inventoryPath, results); validateInventoryManifest(inventory, inventoryPath, results); validateInventoryVersions(inventory, inventoryPath, results); validateInventoryFixity(inventory, inventoryPath, results); return results.build(); } private void validateInventoryManifest(SimpleInventory inventory, String inventoryPath, ValidationResultsBuilder results) { if (inventory.getManifest() != null) { var digests = new HashSet<String>(inventory.getManifest().size()); for (var digest : inventory.getManifest().keySet()) { var digestLower = digest.toLowerCase(); if (!isDigestValidHex(digestLower, inventory.getDigestAlgorithm())) { results.addIssue(ValidationCode.E096, "Inventory manifest digests must be valid in %s. Found: %s", inventoryPath, digest); } if (digests.contains(digestLower)) { results.addIssue(ValidationCode.E096, "Inventory manifest cannot contain duplicates of digest %s in %s", digestLower, inventoryPath); } else { digests.add(digestLower); } } validateDigestPathsMap(inventory.getManifest(), path -> results.addIssue(ValidationCode.E100, "Inventory manifest cannot contain content paths that begin or end with '/' in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E101, "Inventory manifest content paths must be unique in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E099, "Inventory manifest cannot contain blank content path parts in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E099, "Inventory manifest cannot contain content path parts equal to '.' or '..' in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E101, "Inventory manifest content paths must be non-conflicting in %s. Found conflicting path: %s", inventoryPath, path) ); } else { results.addIssue(ValidationCode.E041, "Inventory manifest must be set in %s", inventoryPath); } } private void validateInventoryVersions(SimpleInventory inventory, String inventoryPath, ValidationResultsBuilder results) { if (inventory.getVersions() != null) { for (var entry : inventory.getVersions().entrySet()) { var versionNum = entry.getKey(); var version = entry.getValue(); if (version.getCreated() != null) { try { RFC3339_FORMAT.parse(version.getCreated()); } catch (DateTimeParseException e) { results.addIssue(ValidationCode.E049, "Inventory version %s created timestamp must be formatted in accordance to RFC3339 in %s. Found: %s", versionNum, inventoryPath, version.getCreated()); } } else { results.addIssue(ValidationCode.E048, "Inventory version %s must contain a created timestamp in %s", versionNum, inventoryPath); } if (version.getUser() != null) { var user = version.getUser(); results.addIssue(notBlank(user.getName(), ValidationCode.E054, "Inventory version %s user name must be set in %s", versionNum, inventoryPath)) .addIssue(notNull(user.getAddress(), ValidationCode.W008, "Inventory version %s user address should be set in %s", versionNum, inventoryPath)); if (user.getAddress() != null) { results.addIssue(isTrue(isUri(user.getAddress()), ValidationCode.W009, "Inventory version %s user address should be a URI in %s. Found: %s", versionNum, inventoryPath, user.getAddress())); } } else { results.addIssue(ValidationCode.W007, "Inventory version %s should contain a user in %s", versionNum, inventoryPath); } if (version.getMessage() == null) { results.addIssue(ValidationCode.W007, "Inventory version %s should contain a message in %s", versionNum, inventoryPath); } if (version.getState() != null) { var manifest = inventory.getManifest() == null ? Collections.emptyMap() : inventory.getManifest(); for (var digest : version.getState().keySet()) { results.addIssue(isTrue(manifest.containsKey(digest), ValidationCode.E050, "Inventory version %s contains digest %s that does not exist in the manifest in %s", versionNum, digest, inventoryPath)); } validateDigestPathsMap(version.getState(), path -> results.addIssue(ValidationCode.E053, "Inventory version %s cannot contain paths that begin or end with '/' in %s. Found: %s", versionNum, inventoryPath, path), path -> results.addIssue(ValidationCode.E095, "Inventory version %s paths must be unique in %s. Found: %s", versionNum, inventoryPath, path), path -> results.addIssue(ValidationCode.E052, "Inventory version %s cannot contain blank path parts in %s. Found: %s", versionNum, inventoryPath, path), path -> results.addIssue(ValidationCode.E052, "Inventory version %s cannot contain path parts equal to '.' or '..' in %s. Found: %s", versionNum, inventoryPath, path), path -> results.addIssue(ValidationCode.E095, "Inventory version %s paths must be non-conflicting in %s. Found conflicting path: %s", versionNum, inventoryPath, path) ); } else { results.addIssue(ValidationCode.E048, "Inventory version %s must contain a state in %s", versionNum, inventoryPath); } } } else { results.addIssue(ValidationCode.E043, "Inventory versions must be set in %s", inventoryPath); } } private void validateInventoryVersionNumbers(SimpleInventory inventory, String inventoryPath, ValidationResultsBuilder results) { if (inventory.getVersions() != null) { if (inventory.getHead() != null && !inventory.getVersions().containsKey(inventory.getHead())) { results.addIssue(ValidationCode.E010, "Inventory versions is missing an entry for version %s in %s", inventory.getHead(), inventoryPath); } if (inventory.getVersions().size() > 0) { var versions = new TreeSet<VersionNum>(Comparator.naturalOrder()); inventory.getVersions().keySet().forEach(version -> { parseAndValidateVersionNum(version, inventoryPath, results).ifPresent(versions::add); }); long previousNum = 0; Integer paddingWidth = null; boolean inconsistentPadding = false; for (var currentNum : versions) { var nextNum = previousNum + 1; if (currentNum.getVersionNum() == nextNum) { if (paddingWidth == null) { paddingWidth = currentNum.getZeroPaddingWidth(); } else if (!inconsistentPadding) { inconsistentPadding = paddingWidth != currentNum.getZeroPaddingWidth(); } } else { var missing = new VersionNum(nextNum, currentNum.getZeroPaddingWidth()); while (!missing.equals(currentNum)) { results.addIssue(ValidationCode.E010, "Inventory versions is missing an entry for version %s in %s", missing, inventoryPath); missing = missing.nextVersionNum(); } } previousNum = currentNum.getVersionNum(); } results.addIssue(isFalse(inconsistentPadding, ValidationCode.E013, "Inventory versions contain inconsistently padded version numbers in %s", inventoryPath)); var highestVersion = versions.isEmpty() ? null : versions.last(); if (highestVersion != null && inventory.getHead() != null) { results.addIssue(isTrue(highestVersion.toString().equals(inventory.getHead()), ValidationCode.E040, "Inventory head must be the highest version number in %s. Expected: %s; Found: %s", inventoryPath, highestVersion, inventory.getHead())); } } else { results.addIssue(ValidationCode.E008, "Inventory must contain at least one version %s", inventoryPath); } } } private void validateInventoryFixity(SimpleInventory inventory, String inventoryPath, ValidationResultsBuilder results) { if (inventory.getFixity() != null) { var fixity = inventory.getFixity(); for (var entry : fixity.entrySet()) { var algorithm = entry.getKey(); var digestMap = entry.getValue(); if (digestMap != null) { var digests = new HashSet<String>(digestMap.size()); for (var digest : digestMap.keySet()) { var digestLower = digest.toLowerCase(); if (!isDigestValidHex(digestLower, algorithm)) { results.addIssue(ValidationCode.E057, "Inventory fixity block digests must be valid in %s. Found: %s", inventoryPath, digest); } if (digests.contains(digestLower)) { results.addIssue(ValidationCode.E097, "Inventory fixity block cannot contain duplicates of digest %s in %s", digestLower, inventoryPath); } else { digests.add(digestLower); } } validateDigestPathsMap(digestMap, path -> results.addIssue(ValidationCode.E100, "Inventory fixity block cannot contain content paths that begin or end with '/' in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E101, "Inventory fixity block content paths must be unique in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E099, "Inventory fixity block cannot contain blank content path parts in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E099, "Inventory fixity block cannot contain content path parts equal to '.' or '..' in %s. Found: %s", inventoryPath, path), path -> results.addIssue(ValidationCode.E101, "Inventory fixity block content paths must be non-conflicting in %s. Found conflicting path: %s", inventoryPath, path) ); } } } } private void validateDigestPathsMap(Map<String, List<String>> map, Consumer<String> leadingTrailingSlashes, Consumer<String> nonUnique, Consumer<String> blankPart, Consumer<String> dotPart, Consumer<String> conflicting) { var files = new HashSet<String>(); var dirs = new HashSet<String>(); for (var paths : map.values()) { for (var path : paths) { var trimmedPath = path; var startsWith = path.startsWith("/"); var endsWith = path.endsWith("/"); if (startsWith || endsWith) { leadingTrailingSlashes.accept(path); // Trim here so that we don't get empty part failures too if (startsWith) { trimmedPath = trimmedPath.substring(1); } if (endsWith) { trimmedPath = trimmedPath.substring(0, trimmedPath.length() - 1); } } if (files.contains(path)) { nonUnique.accept(path); } else { files.add(path); } var parts = trimmedPath.split("/"); var pathBuilder = new StringBuilder(); var erroredBlank = false; var erroredDot = false; for (int i = 0; i < parts.length; i++) { var part = parts[i]; if (!erroredBlank && part.isEmpty()) { blankPart.accept(path); erroredBlank = true; } else if (!erroredDot && (part.equals(".") || part.equals(".."))) { dotPart.accept(path); erroredDot = true; } if (i < parts.length - 1) { if (i > 0) { pathBuilder.append("/"); } pathBuilder.append(part); dirs.add(pathBuilder.toString()); } } } } Set<String> iter; Set<String> check; if (files.size() > dirs.size()) { iter = dirs; check = files; } else { iter = files; check = dirs; } iter.forEach(path -> { if (check.contains(path)) { conflicting.accept(path); } }); } private Optional<VersionNum> parseAndValidateVersionNum(String num, String inventoryPath, ValidationResultsBuilder results) { Optional<VersionNum> versionNum = Optional.empty(); if (isInvalidVersionNum(num)) { // TODO this is not the right code https://github.com/OCFL/spec/issues/532 results.addIssue(ValidationCode.E011, "Inventory contains invalid version number in %s. Found: %s", inventoryPath, num); } else { var parsed = VersionNum.fromString(num); if (parsed.equals(VERSION_ZERO)) { results.addIssue(ValidationCode.E009, "Inventory version numbers must be greater than 0 in %s. Found: %s", inventoryPath, num); } else { versionNum = Optional.of(parsed); } } return versionNum; } private Optional<ValidationIssue> notBlank(String value, ValidationCode code, String messageTemplate, Object... args) { if (value == null || value.isBlank()) { return Optional.of(createIssue(code, messageTemplate, args)); } return Optional.empty(); } private Optional<ValidationIssue> notNull(Object value, ValidationCode code, String messageTemplate, Object... args) { if (value == null) { return Optional.of(createIssue(code, messageTemplate, args)); } return Optional.empty(); } private Optional<ValidationIssue> isTrue(boolean condition, ValidationCode code, String messageTemplate, Object... args) { if (!condition) { return Optional.of(createIssue(code, messageTemplate, args)); } return Optional.empty(); } private Optional<ValidationIssue> isFalse(boolean condition, ValidationCode code, String messageTemplate, Object... args) { if (condition) { return Optional.of(createIssue(code, messageTemplate, args)); } return Optional.empty(); } private boolean isUri(String value) { try { var uri = new URI(value); return !(uri.getScheme() == null || uri.getScheme().isBlank()); } catch (URISyntaxException e) { return false; } } private boolean isInvalidVersionNum(String num) { return num == null || !VALID_VERSION.matcher(num).matches(); } private Optional<ValidationIssue> ifNotNull(Object value, Supplier<Optional<ValidationIssue>> condition) { if (value != null) { return condition.get(); } return Optional.empty(); } private boolean isDigestValidHex(String lowerDigest, String algorithm) { // can't validate something we don't have info on if (!(algorithm == null || !DIGEST_LENGTHS.containsKey(algorithm))) { var length = DIGEST_LENGTHS.get(algorithm); if (lowerDigest.length() != length) { return false; } for (int i = 0; i < lowerDigest.length(); i++) { if (!lowerHexChars.get(lowerDigest.charAt(i))) { return false; } } } return true; } private ValidationIssue createIssue(ValidationCode code, String messageTemplate, Object... args) { var message = messageTemplate; if (args != null && args.length > 0) { message = String.format(messageTemplate, args); } return new ValidationIssue(code, message); } }
zhangdan660/jgdraw
symbol/src/main/java/fr/itris/glips/svgeditor/AbstractHyperlinkAction.java
<gh_stars>0 /* * $Id: AbstractHyperlinkAction.java 3484 2009-09-03 15:55:34Z kleopatra $ * * Copyright 2006 Sun Microsystems, Inc., 4150 Network Circle, * Santa Clara, California 95054, U.S.A. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package fr.itris.glips.svgeditor; import java.awt.event.ItemEvent; import org.jdesktop.swingx.action.AbstractActionExt; /** * Convenience implementation to simplify {@link org.jdesktop.swingx.JXHyperlink} configuration and * provide minimal api. <p> * * @author <NAME> */ public abstract class AbstractHyperlinkAction<T> extends AbstractActionExt { /** * Key for the visited property value. */ public static final String VISITED_KEY = "visited"; /** * the object the actionPerformed can act on. */ protected T target; /** * Instantiates a LinkAction with null target. * */ public AbstractHyperlinkAction () { this(null); } /** * Instantiates a LinkAction with a target of type targetClass. * The visited property is initialized as defined by * {@link AbstractHyperlinkAction#installTarget()} * * @param target the target this action should act on. */ public AbstractHyperlinkAction(T target) { setTarget(target); } /** * Set the visited property. * * @param visited */ public void setVisited(boolean visited) { putValue(VISITED_KEY, visited); } /** * * @return visited state */ public boolean isVisited() { Boolean visited = (Boolean) getValue(VISITED_KEY); return Boolean.TRUE.equals(visited); } public T getTarget() { return target; } /** * PRE: isTargetable(target) * @param target */ public void setTarget(T target) { T oldTarget = getTarget(); uninstallTarget(); this.target = target; installTarget(); firePropertyChange("target", oldTarget, getTarget()); } /** * hook for subclasses to update internal state after * a new target has been set. <p> * * Subclasses are free to decide the details. * Here: * <ul> * <li> the text property is set to target.toString or empty String if * the target is null * <li> visited is set to false. * </ul> */ protected void installTarget() { setName(target != null ? target.toString() : "" ); setVisited(false); } /** * hook for subclasses to cleanup before the old target * is overwritten. <p> * * Subclasses are free to decide the details. * Here: does nothing. */ protected void uninstallTarget() { } @Override public void itemStateChanged(ItemEvent e) { // do nothing } /** * Set the state property. * Overridden to to nothing. * PENDING: really? * @param state if true then this action will fire ItemEvents */ @Override public void setStateAction(boolean state) { } }
maxis42/Programming-in-python-Coursera-MIPT
1 Diving in python/Homework/6 Async/client_server/client.py
<gh_stars>1-10 import socket import time import re class Client: def __init__(self, host, port, timeout=None): self.host = host self.port = port self.timeout = timeout # seconds self.sock = self._sock @property def _sock(self): socket.setdefaulttimeout(self.timeout) sock = socket.create_connection((self.host, self.port), self.timeout) return sock def get(self, metric_type): query = f"get {metric_type}\n" self.sock.sendall(query.encode("utf-8")) status, data = self._get_response() metrics_data = self._parse_data(data) return metrics_data def put(self, metric_type, metric_value, timestamp=None): if timestamp is None: timestamp = int(time.time()) query = f"put {metric_type} {metric_value} {timestamp}\n" self.sock.sendall(query.encode("utf-8")) status, data = self._get_response() def __del__(self): self.sock.close() def _get_response(self): data = self.recvall(self.sock) response = data.decode("utf-8") self._check_response(response) response_data = response.split() status = response_data[0] data = response_data[1:] return status, data @staticmethod def _check_response(response): if (response[-1] != "\n") or (response[-2] != "\n"): raise ClientError() data = response.split("\n") status = data[0] content = data[1:-2] if status != "ok": raise ClientError() for metric in content: metric_data = metric.split(" ") if len(metric_data) != 3: raise ClientError() metric_type, metric_value, timestamp = metric_data # if re.fullmatch("\w+\.\w+", metric_type) is None: # raise ClientError() # # if re.fullmatch("[-+]?\d*\.\d+|\d+", metric_value) is None: # raise ClientError() # # if re.fullmatch("\d+", timestamp) is None: # raise ClientError() @staticmethod def recvall(sock): BUFF_SIZE = 4096 # 4 KiB data = b'' while True: part = sock.recv(BUFF_SIZE) data += part if len(part) < BUFF_SIZE: # either 0 or end of data break return data @staticmethod def _parse_data(data): data = iter(data) data = zip(data, data, data) metrics_data = {} for metric_type, metric_value, timestamp in data: metric_value = float(metric_value) timestamp = int(timestamp) if metric_type not in metrics_data: metrics_data[metric_type] = [] metrics_data[metric_type].append((timestamp, metric_value)) for metric_type in metrics_data: metrics_data[metric_type] = sorted(metrics_data[metric_type], key=lambda x: x[0]) return metrics_data class ClientError(Exception): """ Client error exception class. """ pass if __name__ == "__main__": client = Client("127.0.0.1", 8888, timeout=5) client.put("palm.cpu", 0.5, timestamp=1150864247) client.put("palm.cpu", 2.0, timestamp=1150864248) client.put("palm.cpu", 0.5, timestamp=1150864248) client.put("eardrum.cpu", 3, timestamp=1150864250) client.put("eardrum.cpu", 4, timestamp=1150864251) client.put("eardrum.memory", 4200000) print(client.get("*"))
daniol/angular-line-awesome
projects/angular-line-awesome/icons/build/-lab-artstation.icon.js
export const labArtstation = { name: 'lab-artstation', data: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32"><path d="M13.73 5L26 27l1.994-4.512a3.004 3.004 0 00-.123-2.908L20.096 6.436A3.003 3.003 0 0017.536 5H13.73zm-2.218 2.178L5.63 18h11.916L11.512 7.178zM4 21l2.172 4.342A2.998 2.998 0 008.854 27h13.712l-3.345-6H4z"/></svg>` };
JohnsonLu3/Gerrymandering-Analysis
src/main/java/gerrymandering/repository/VotesRepository.java
<reponame>JohnsonLu3/Gerrymandering-Analysis<filename>src/main/java/gerrymandering/repository/VotesRepository.java package gerrymandering.repository; import gerrymandering.model.Votes; import org.springframework.data.repository.CrudRepository; import java.util.List; /** * Created by yisuo on 11/14/17. */ public interface VotesRepository extends CrudRepository<Votes, Integer> { List<Votes> findAll(); }
Futureblur/MoonEngine
Engine/Source/Runtime/Event/EventSystem.h
<filename>Engine/Source/Runtime/Event/EventSystem.h // Copyright (c) 2021 Futureblur. All rights reserved. Visit https://ice.futureblur.com for more information. #pragma once #include "Core/Core.h" #include "Core/IO/Log.h" #include "EventTypes.h" #include "Core/Error/ErrorList.h" //Converts the IEvent base class to any derived type. //NewVarName is the name of the new variable that will be created by this macro. //It should not already exist in the current scope otherwise it will cause conflicts. #define ICE_EVENT_CAST(NewVarName, BaseVarName, EventType) EventType NewVarName; \ try { NewVarName = dynamic_cast<const EventType&>(BaseVarName); } catch (const std::bad_cast& e) \ { \ ICE_FATAL(Error::EFailed::A_CAST_EVENT, { #NewVarName, #EventType }); \ } namespace Iceblur { class ICE_API EventSystem { public: using Callback = std::function<void(const IEvent& e)>; using EventList = std::map<IEvent::EventName, std::vector<Callback>>; EventSystem() = default; ~EventSystem() = default; static void Initialize(); static void Subscribe(IEvent::EventName type, const Callback& trigger); static void Dispatch(const IEvent& event); private: static EventSystem* GetInstance() { return m_Instance; } private: static inline EventSystem* m_Instance = nullptr; EventList m_EventList; }; }
open-star/GD-OpenDolphin
client/src/open/dolphin/infomodel/OrcaInputCd.java
<gh_stars>1-10 /* * OrcaInputCd.java * Copyright (C) 2007 Digital Globe, Inc. All rights reserved. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ package open.dolphin.infomodel; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.List; import open.dolphin.queries.DolphinQuery; import org.apache.commons.lang.builder.ToStringBuilder; /** * MEMO:ORCA の tbl_inputcd エンティティクラス。 * * @author Minagawa, Kazushi */ public class OrcaInputCd extends InfoModel {//id private String hospId; private String cdsyu; private String inputCd; private String sryKbn; private String sryCd; private int dspSeq; private String dspName; private String termId; private String opId; private String creYmd; private String upYmd; private String upHms; private List<OrcaInputSet> inputSet; /** * Creates a new instance of OrcaInputCd */ public OrcaInputCd() { } /** * 検索 MEMO:何もしない * @param query * @return false */ @Override public boolean search(DolphinQuery query) { return false; } /** * hospIdのGetter * @return */ public String getHospId() { return hospId; } /** * hospIdのSetter * @param hospId */ public void setHospId(String hospId) { this.hospId = hospId; } /** * cdsyuのGetter * @return */ public String getCdsyu() { return cdsyu; } /** * cdsyuのSetter * @param cdsyu */ public void setCdsyu(String cdsyu) { this.cdsyu = cdsyu; } /** * inputCdのGetter * @return */ public String getInputCd() { return inputCd; } /** * inputCdのSetter * @param inputCd */ public void setInputCd(String inputCd) { this.inputCd = inputCd; } /** * sryKbnのGetter * @return */ public String getSryKbn() { return sryKbn; } /** * sryKbnのSetter * @param sryKbn */ public void setSryKbn(String sryKbn) { this.sryKbn = sryKbn; } /** * sryCdのGetter * @return */ public String getSryCd() { return sryCd; } /** * sryCdのSetter * @param sryCd */ public void setSryCd(String sryCd) { this.sryCd = sryCd; } /** * dspSeqのGetter * @return */ public int getDspSeq() { return dspSeq; } /** * dspSeqのSetter * @param dspSeq */ public void setDspSeq(int dspSeq) { this.dspSeq = dspSeq; } /** * dspNameのGetter * @return */ public String getDspName() { return dspName; } /** * dspNameのSetter * @param dspName */ public void setDspName(String dspName) { this.dspName = dspName; } /** * termIdのGetter * @return */ public String getTermId() { return termId; } /** * termIdのSetter * @param termId */ public void setTermId(String termId) { this.termId = termId; } /** * opIdのGetter * @return */ public String getOpId() { return opId; } /** * opIdのSetter * @param opId */ public void setOpId(String opId) { this.opId = opId; } /** * creYmdのGetter * @return */ public String getCreYmd() { return creYmd; } /** * creYmdのSetter * @param creYmd */ public void setCreYmd(String creYmd) { this.creYmd = creYmd; } /** * upYmdのGetter * @return */ public String getUpYmd() { return upYmd; } /** * upYmdのSetter * @param upYmd */ public void setUpYmd(String upYmd) { this.upYmd = upYmd; } /** * upHmsのGetter * @return */ public String getUpHms() { return upHms; } /** * upHmsのSetter * @param upHms */ public void setUpHms(String upHms) { this.upHms = upHms; } /** * inputSetのGetter * @return */ public List<OrcaInputSet> getInputSet() { return inputSet; } /** * inputSetのSetter * @param list */ public void setInputSet(List<OrcaInputSet> list) { inputSet = list; } /** * inputSetのAdder * @param set */ public void addInputSet(OrcaInputSet set) { if (inputSet == null) { inputSet = new ArrayList<OrcaInputSet>(); } inputSet.add(set); } /** * stampInfoのGetter * @return */ public ModuleInfoBean getStampInfo() { ModuleInfoBean ret = new ModuleInfoBean(); ret.initialize(getInputCd(), getDspName(), null, ENTITY_MED_ORDER, ROLE_ORCA_SET); // ret.setStampName(getDspName()); // ret.setStampRole(ROLE_ORCA_SET); // ret.setEntity(ENTITY_MED_ORDER); // ret.setStampId(getInputCd()); return ret; } /** * bundleMedのGetter * @return */ public BundleMed getBundleMed() { BundleMed ret = new BundleMed(); for (OrcaInputSet set : inputSet) { ret.addClaimItem(set.getClaimItem()); } return ret; } /** * * @return */ @Override public String toString() { return ToStringBuilder.reflectionToString(this); } /** * MEMO:何もしない * @param result * @throws IOException */ public void serialize(Writer result) throws IOException { //TODO serialize } /** * MEMO:何もしない * @param result * @throws IOException */ public void deserialize(Writer result) throws IOException { //TODO deserialize } }
f-m-c/TAMshapes
Pencil/src/includes/functions/calculateHorVertLineAndConstraints.js
function calculateHorVertLineAndConstraints ( iStart, iEnd, iControlPoints, iCPnum = 0, startsHorizontal, isStraight = false, alignType = "center", endsDiagonal = false, cornerRadius = 10) { // calculate x/y contraints for segments for a horizontal / vertical only routing with open number of control points // iStart, iEnd: Start point and end point of the line // iControlPoints: Array of all control points {x: , y: } between start and end // iCPnum: number of control points to be used from array // startsHorizontal: First line at start point horizontal (true) or vertical (false) // aligntype: start, center, end, control - only for straight lines (no elbows: iPoints has only two elements or three if aligntype = control) // endsDiagonal: last line (to end) can be diagonal - iPoints has at least 3 elements // cornerRadius: determines minimal distance to calculate an elbow, else corner will be ignored // // output: Array for all control points // [{xmin: , xmax:, ymin:, ymax:}] // // Example: startsHorizontal = false iPoints: 3 elements // iStart x (= p1) // | p2 // pa x----O----x pb // | // x iEnd (= p3) // output: [{xmin: (iEnd.x+iStart.x)/2 , xmax:(iEnd.x+iStart.x)/2, ymin:-9999, ymax:9999}] // // Example2: startsHorizontal = true // p1 x----x 1 // 0 | // p2 O p6 p6 // p3 | x 6 x // 3 x-O-x 2 | / // p4 O | / // 4 x----O----x 5 x----O // p5 p5 (endsDiagonal = true) // output: [{xmin:-9999, xmax:9999, ymin:(iStart.y+p[2].y)/2, ymax:(iStart.y+p[2].y)/2}, // {xmin: (p[2].x+p[3].x)/2 , xmax:(p[2].x+p[3].x)/2, ymin:-9999, ymax:9999}, // {xmin:-9999, xmax:9999, ymin: (p[3].y+p[4].y)/2, ymax:(p[3].y+p[4].y)/2}, // {xmin: (p[4].x+p[5].x)/2 , xmax:(p[4].x+p[5].x)/2, ymin:-9999, ymax:9999}] // range determines whether an ellbow will be inserted at all const range = cornerRadius * 0.7; // target point array const aPoints = []; const aConstraints = []; for (let i = 0; i < iControlPoints.length; i++) { aConstraints.push( {xmin: 0 - Number.MAX_VALUE, xmax: Number.MAX_VALUE, ymin: 0 - Number.MAX_VALUE, ymax: Number.MAX_VALUE} ); } if (isStraight) { // special case: straight line. This is the only case where the starting point can differ from the // fist given point (except alignType = start) const p1 = iStart; // first point const p2 = iEnd; // last point let pa = p1; let pb = p2; switch (alignType) { case "control": // we pick the first as control point const c = iControlPoints[0]; if (startsHorizontal) { pa.x = p1.x; pa.y = c.y; pb.x = p2.x; pb.y = c.y; aConstraints[0].xmin = (p1.x+p2.x)/2; aConstraints[0].xmax = (p1.x+p2.x)/2; } else { pa.x = c.x; pa.y = p1.y; pb.x = c.x; pb.y = p2.y; aConstraints[0].ymin = (p1.y+p2.y)/2; aConstraints[0].ymax = (p1.y+p2.y)/2; } break; case "start": if (startsHorizontal) { pa.y = p1.y; pb.y = p1.y; } else { pa.x = p1.x; pb.x = p1.x; } break; case "center": if (startsHorizontal) { pa.y = p1.y + (p2.y-p1.y)/2; pb.y = pa.y; } else { pa.x = p1.x + (p2.x-p1.x)/2; pb.x = pa.x; } break; case "end": if (startsHorizontal) { pa.y = p2.y; pb.y = p2.y; } else { pa.x = p2.x; pb.x = p2.x; } break; } // switch aligntype aPoints.push(pa, pb); } else { // it's not a straight line: Add segment with elbow corner with every element of iPoints. // first line is starting point var cpnum = parseInt(iCPnum); let iPoints = []; for (let i = 0; i < cpnum; i++) { iPoints.push(iControlPoints[i]); } iPoints.push(iEnd); var p1 = iStart; var p2 = iPoints[0]; var nextIsHor = startsHorizontal; aPoints.push(iStart); // now loop over rest of array for (let i = 0; i < iPoints.length; i++){ let pax = 0; let pay = 0; p2 = iPoints[i]; //next point // last loop iteration? Then check whether endsDiagonal is set if (endsDiagonal && i == iPoints.length - 1 ) { // just push the control point instead of calculated point aPoints.push(iPoints[i-1]); } else { let skip = false; if (nextIsHor) { // next segment is horizontal if (i > 0) { aConstraints[i-1].xmin = (p1.x + p2.x)/2; aConstraints[i-1].xmax = (p1.x + p2.x)/2 } if (Math.abs(p2.x-p1.x) < range) { // too small: don't add elbow skip = true; } else { if (i == iPoints.length - 1 && Math.abs(p2.y-p1.y) < range) { // In case it's the last control point: adjust last aPoint.y to endPoint.y aPoints[aPoints.length - 1].y = p2.y; skip = true; } else { pax = p2.x; pay = p1.y; } } } else { // vertical if (i > 0) { aConstraints[i-1].ymin = (p1.y + p2.y)/2; aConstraints[i-1].ymax = (p1.y + p2.y)/2; } if (Math.abs(p2.y-p1.y) < range) { // too small: don't add elbow skip = true; } else { if (i == iPoints.length - 1 && Math.abs(p2.x-p1.x) < range) { // In case it's the last control point: adjust last aPoint.x to endPoint.x aPoints[aPoints.length - 1].x = p2.x; skip = true; } else { pax = p1.x; pay = p2.y; } } } if (!skip) { aPoints.push({x: pax, y: pay}); p1 = {x: pax, y: pay}; } else { // push the last point again so that the control points still match the segments aPoints.push(aPoints[aPoints.length - 1]); p1 = aPoints[aPoints.length - 1]; } nextIsHor = !nextIsHor; } } // last point to push aPoints.push(p2); } return [aPoints, aConstraints]; } module.exports = calculateHorVertLineAndConstraints; //test = calculateHorVertControlConstraints2({x:0,y:0}, {x:40,y:40},[{x:20,y:5}, {x:5,y:35}], 2, true, false, "center", false);
pwr-pbrwio/PBR20M2
projects/opengrok/suggester/src/main/java/org/opengrok/suggest/popular/impl/chronicle/ChronicleMapAdapter.java
/* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License (the "License"). * You may not use this file except in compliance with the License. * * See LICENSE.txt included in this distribution for the specific * language governing permissions and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at LICENSE.txt. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ /* * Copyright (c) 2018 Oracle and/or its affiliates. All rights reserved. */ package org.opengrok.suggest.popular.impl.chronicle; import net.openhft.chronicle.map.ChronicleMap; import org.apache.lucene.util.BytesRef; import org.opengrok.suggest.popular.PopularityMap; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map.Entry; import java.util.function.Predicate; /** * Adapter for {@link ChronicleMap} to expose only the necessary operations needed for most popular completion. */ public class ChronicleMapAdapter implements PopularityMap { private ChronicleMap<BytesRef, Integer> map; private final File chronicleMapFile; public ChronicleMapAdapter(final String name, final double averageKeySize, final int entries, final File file) throws IOException { map = ChronicleMap.of(BytesRef.class, Integer.class) .name(name) .averageKeySize(averageKeySize) .keyReaderAndDataAccess(BytesRefSizedReader.INSTANCE, new BytesRefDataAccess()) .entries(entries) .createOrRecoverPersistedTo(file); this.chronicleMapFile = file; } /** {@inheritDoc} */ @Override public int get(final BytesRef key) { return map.getOrDefault(key, 0); } /** {@inheritDoc} */ @Override public void increment(final BytesRef key, final int value) { if (value < 0) { throw new IllegalArgumentException("Cannot increment by negative value " + value); } map.merge(key, value, Integer::sum); } /** {@inheritDoc} */ @Override public List<Entry<BytesRef, Integer>> getPopularityData(final int page, final int pageSize) { if (page < 0) { throw new IllegalArgumentException("Cannot retrieve popularity data for negative page: " + page); } if (pageSize < 0) { throw new IllegalArgumentException("Cannot retrieve negative number of results: " + pageSize); } List<Entry<BytesRef, Integer>> list = new ArrayList<>(map.entrySet()); list.sort(Entry.<BytesRef, Integer>comparingByValue().reversed()); int startIndex = page * pageSize; if (startIndex >= list.size()) { return Collections.emptyList(); } int endIndex = startIndex + pageSize; if (endIndex > list.size()) { endIndex = list.size(); } return list.subList(startIndex, endIndex); } /** * Removes the entries with key that meets the predicate. * @param predicate predicate which tests which entries should be removed */ public void removeIf(final Predicate<BytesRef> predicate) { map.entrySet().removeIf(e -> predicate.test(e.getKey())); } /** * Resizes the underlying {@link ChronicleMap}. * @param newMapSize new entries count * @param newMapAvgKey new average key size * @throws IOException if some error occurred */ public void resize(final int newMapSize, final double newMapAvgKey) throws IOException { if (newMapSize < 0) { throw new IllegalArgumentException("Cannot resize chronicle map to negative size"); } if (newMapAvgKey < 0) { throw new IllegalArgumentException("Cannot resize chronicle map to map with negative key size"); } Path tempFile = Files.createTempFile("opengrok", "chronicle"); try { map.getAll(tempFile.toFile()); String field = map.name(); map.close(); Files.delete(chronicleMapFile.toPath()); ChronicleMap<BytesRef, Integer> m = ChronicleMap.of(BytesRef.class, Integer.class) .name(field) .averageKeySize(newMapAvgKey) .entries(newMapSize) .keyReaderAndDataAccess(BytesRefSizedReader.INSTANCE, new BytesRefDataAccess()) .createOrRecoverPersistedTo(chronicleMapFile); m.putAll(tempFile.toFile()); map = m; } finally { Files.delete(tempFile); } } /** * Closes the opened {@link ChronicleMap}. */ @Override public void close() { map.close(); } }
mharris717/ascension
lib/ascension/game/events.rb
<filename>lib/ascension/game/events.rb events = <<EOF hero played construct played monster defeated EOF module Event class Events include FromHash setup_mongo_persist :events attr_accessor :side def as_json [] end fattr(:events) { [] } include Enumerable def each(&b) events.each(&b) end def <<(event) event.events = self #event.first = true if first?(event) self.events << event propagate(event) end def propagate(event) if side side.constructs.each { |c| c.handle_event(event,side) } #side.played.each { |c|} side.played.each { |c| c.handle_event(event,side) } end end def first?(event) match = events.select { |x| x.key == event.key && x.class == event.class }#.size == 1#.tap { |x| puts "first? #{x}" } raise "Bad something" if match.empty? match.first == event end def [](i) events[i] end def cond?(&b) events.any?(&b) end end class Base include FromHash #fattr(:first) { false } attr_accessor :events fattr(:first) do events.first?(self) end end class CardPlayed < Base setup_mongo_persist :card attr_accessor :card def realm card.realm end def card_type card.class end def key [realm,card_type] end end class MonsterKilled < Base setup_mongo_persist :card attr_accessor :card fattr(:center) { false } def key [center] end end class CardPurchased < Base setup_mongo_persist :card attr_accessor :card def realm card.realm end def card_type card.class end def key [realm,card_type] end end class EndTurn < Base fattr(:standin) do 14 end setup_mongo_persist :standin def key [:end_turn] end end end class Trigger end
ke4qqq/cloudstack
awsapi/src/com/cloud/bridge/service/core/ec2/EC2Volume.java
<gh_stars>1-10 // Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.bridge.service.core.ec2; import java.util.ArrayList; import java.util.List; public class EC2Volume { private String id; private Long size; // <- in gigs private String zoneName; private String instanceId; private String snapshotId; private String device; private String deviceId; private String state; private String type; private String VMState; private String hypervisor; private String created; private String attached; private String attachmentState; private List<EC2TagKeyValue> tagsSet; public EC2Volume() { id = null; zoneName = null; instanceId = null; snapshotId = null; device = null; deviceId = null; state = null; type = null; VMState = null; hypervisor = null; created = null; attached = null; attachmentState = null; tagsSet = new ArrayList<EC2TagKeyValue>(); } public void setSize(Long size) { if (size != null) { this.size = (size / 1073741824); } else this.size = (long)0; } /** * @return the id */ public String getId() { return id; } /** * @return the size */ public Long getSize() { return size; } /** * @return the zoneName */ public String getZoneName() { return zoneName; } /** * @return the instanceId */ public String getInstanceId() { return instanceId; } /** * @return the snapshotId */ public String getSnapshotId() { return snapshotId; } /** * @return the device */ public String getDevice() { return device; } /** * @return the deviceId */ public String getDeviceId() { return deviceId; } /** * @return the state */ public String getState() { return state; } /** * @return the type */ public String getType() { return type; } /** * @return the VMState */ public String getVMState() { return VMState; } /** * @return the hypervisor */ public String getHypervisor() { return hypervisor; } /** * @param id the id to set */ public void setId(String id) { this.id = id; } /** * @param zoneName the zoneName to set */ public void setZoneName(String zoneName) { this.zoneName = zoneName; } /** * @param instanceId the instanceId to set */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * @param snapshotId the snapshotId to set */ public void setSnapshotId(String snapshotId) { this.snapshotId = snapshotId; } /** * @param device the device to set */ public void setDevice(String device) { this.device = device; } /** * @param deviceId the deviceId to set */ public void setDeviceId(String deviceId) { this.deviceId = deviceId; } /** * @param state the state to set */ public void setState(String state) { this.state = state; } /** * @param type the type to set */ public void setType(String type) { this.type = type; } /** * @param VMState the VMState to set */ public void setVMState(String VMState) { this.VMState = VMState; } /** * @param hypervisor the hypervisor to set */ public void setHypervisor(String hypervisor) { this.hypervisor = hypervisor; } /** * @return the created */ public String getCreated() { return created; } /** * @return the attached */ public String getAttached() { return attached; } /** * @param created the created to set */ public void setCreated(String created) { this.created = created; } /** * @param attached the attached to set */ public void setAttached(String attached) { this.attached = attached; } /** * @param state of the attached VM to set */ public void setAttachmentState(String attachedState) { this.attachmentState = attachedState; } /** * @return state of the vm */ public String getAttachmentState() { return attachmentState; } public void addResourceTag(EC2TagKeyValue param) { tagsSet.add(param); } public EC2TagKeyValue[] getResourceTags() { return tagsSet.toArray(new EC2TagKeyValue[0]); } }
clearstorydata/ridley
lib/ridley/version.rb
module Ridley VERSION = "1.6.0" end
daohuei/ucsc-nlp-unicorn
nlp_201/hw3/algo.py
<reponame>daohuei/ucsc-nlp-unicorn<filename>nlp_201/hw3/algo.py import numpy as np word_tag_prob = {} tag_bigram_prob = {} max_score_dict = {} tag_set = set() def score(words, i, cur_tag, prev_tag): return np.log(word_tag_prob[(words[i], cur_tag)]) + np.log( tag_bigram_prob[(cur_tag, prev_tag)] ) # this will recur n times => which the time complexity will be O(n * |T|) with memorization of O(n * |T|) # if there is no memorization, it will be O(|T| ^ n) def max_score_tag(words, n, cur_tag, max_score_dict): if n == 1: # if it is the first tag, simply calculate the score and return empty candidates return [], score(words, n, cur_tag, "<START>") max_score = float("-inf") candidate_tag = None # go through every tags: time complexity: O(|T|), the size of the tag_set for prev_tag in tag_set: candidate_tag_sequences, prev_score = [], float("-inf") if (prev_tag, n - 1) in max_score_dict.keys(): # if the max score of previous tag already been calculated candidate_tag_sequences, prev_score = max_score_dict[ (prev_tag, n - 1) ] else: # get the best sequence candidates of given tag and index(sequence length) candidate_tag_sequences, prev_score = max_score_tag( words, n - 1, prev_tag, max_score_dict ) # calculate current score and sum up with previous score cur_score = prev_score + score(words, n, cur_tag, prev_tag) # if it becomes the maximum if cur_score > max_score: # update max score and candidate tags max_score = cur_score candidate_tag = prev_tag # store the calculation in the dictionary candidate_tag_sequences.append(candidate_tag) max_score_dict[(cur_tag, n)] = (candidate_tag_sequences, max_score) return candidate_tag_sequences, max_score def add_semiring(a, b): return b if a[1] < b[1] else a def mul_semiring(a, b): # since it is in the log space, so we use a + b return a + b # this will recur n times => which the time complexity will be O(n * |T|) with memorization of O(n * |T| ^ 2) # if there is no memorization, it will be O(|T| ^ 2n) def max_score_tag_semiring(words, n, cur_tag, max_score_dict): if n == 1: # if it is the first tag, simply calculate the score and return empty candidates return [], score(words, n, cur_tag, "<START>") max_score = (None, float("-inf")) # go through every tags: time complexity: O(|T|), the size of the tag_set for prev_tag in tag_set: candidate_tag_sequences, prev_score = [], float("-inf") if (prev_tag, n - 1) in max_score_dict.keys(): # if the max score of previous tag already been calculated candidate_tag_sequences, prev_score = max_score_dict[ (prev_tag, n - 1) ] else: # get the best sequence candidates of given tag and index(sequence length) candidate_tag_sequences, prev_score = max_score_tag( words, n - 1, prev_tag, max_score_dict ) # calculate current score and sum up with previous score cur_score = ( prev_tag, mul_semiring(prev_score, score(words, n, cur_tag, prev_tag)), ) max_score = add_semiring(max_score, cur_score) # store the calculation in the dictionary candidate_tag_sequences.append(max_score[0]) max_score_dict[(cur_tag, n)] = (candidate_tag_sequences, max_score[1]) return candidate_tag_sequences, max_score[1]
weucode/COMFORT
artifact_evaluation/data/codeCoverage/fuzzilli_generate/766.js
<gh_stars>10-100 function main() { const v3 = [1337,1337,1337,1337,1337]; // v3 = .object(ofGroup: Array, withProperties: ["length", "__proto__", "constructor"], withMethods: ["concat", "fill", "indexOf", "entries", "forEach", "find", "reverse", "slice", "flat", "reduce", "join", "findIndex", "reduceRight", "some", "copyWithin", "toString", "pop", "filter", "map", "splice", "keys", "unshift", "sort", "includes", "flatMap", "shift", "values", "every", "toLocaleString", "push", "lastIndexOf"]) function v5(v6,v7) { v3[8] = v7; const v11 = [13.37,13.37,13.37,13.37]; // v11 = .object(ofGroup: Array, withProperties: ["length", "__proto__", "constructor"], withMethods: ["concat", "fill", "indexOf", "entries", "forEach", "find", "reverse", "slice", "flat", "reduce", "join", "findIndex", "reduceRight", "some", "copyWithin", "toString", "pop", "filter", "map", "splice", "keys", "unshift", "sort", "includes", "flatMap", "shift", "values", "every", "toLocaleString", "push", "lastIndexOf"]) const v13 = [Int16Array,1392904795,v11,13.37,1337]; // v13 = .object(ofGroup: Array, withProperties: ["length", "__proto__", "constructor"], withMethods: ["concat", "fill", "indexOf", "entries", "forEach", "find", "reverse", "slice", "flat", "reduce", "join", "findIndex", "reduceRight", "some", "copyWithin", "toString", "pop", "filter", "map", "splice", "keys", "unshift", "sort", "includes", "flatMap", "shift", "values", "every", "toLocaleString", "push", "lastIndexOf"]) function v14(v15,v16) { const v22 = "sO0KQ7OSCy".indexOf(0,-65535); // v22 = .integer let v23 = v22; const v24 = "k**baeaDif"; // v24 = .string + .object(ofGroup: String, withProperties: ["constructor", "__proto__", "length"], withMethods: ["padEnd", "split", "charAt", "match", "lastIndexOf", "charCodeAt", "trim", "startsWith", "includes", "repeat", "search", "slice", "endsWith", "matchAll", "indexOf", "concat", "replace", "padStart", "substring", "codePointAt"]) let v25 = 0; do { const v26 = v25 + 1; // v26 = .primitive v25 = v26; let v31 = noFTL; const v35 = "dotAll" << v31; // v35 = .integer | .bigint const v36 = [v35,-4168823199,-4168823199,-4168823199]; // v36 = .object(ofGroup: Array, withProperties: ["length", "__proto__", "constructor"], withMethods: ["concat", "fill", "indexOf", "entries", "forEach", "find", "reverse", "slice", "flat", "reduce", "join", "findIndex", "reduceRight", "some", "copyWithin", "toString", "pop", "filter", "map", "splice", "keys", "unshift", "sort", "includes", "flatMap", "shift", "values", "every", "toLocaleString", "push", "lastIndexOf"]) const v37 = ["__proto__",1337,-2642800796,v36,1337]; // v37 = .object(ofGroup: Array, withProperties: ["length", "__proto__", "constructor"], withMethods: ["concat", "fill", "indexOf", "entries", "forEach", "find", "reverse", "slice", "flat", "reduce", "join", "findIndex", "reduceRight", "some", "copyWithin", "toString", "pop", "filter", "map", "splice", "keys", "unshift", "sort", "includes", "flatMap", "shift", "values", "every", "toLocaleString", "push", "lastIndexOf"]) function v39(v40,v41) { const v44 = "string".padStart(1337,v37); // v44 = .string + .object(ofGroup: String, withProperties: ["constructor", "__proto__", "length"], withMethods: ["padEnd", "split", "charAt", "match", "lastIndexOf", "charCodeAt", "trim", "startsWith", "includes", "repeat", "search", "slice", "endsWith", "matchAll", "indexOf", "concat", "replace", "padStart", "substring", "codePointAt"]) const v45 = Function(v44); // v45 = .unknown } const v47 = new Promise(v39); // v47 = .object(ofGroup: Promise, withProperties: ["__proto__"], withMethods: ["finally", "then", "catch"]) for (const v48 in v13) { } const v50 = v23.__proto__; // v50 = .unknown v23 = -3511245301; } while (v25 < 5); } const v51 = v14(v14,v14); // v51 = .unknown "1000"[-65536] = v6; const v53 = RegExp; // v53 = .object(ofGroup: Function, withProperties: ["constructor", "caller", "arguments", "length", "prototype", "name", "__proto__"], withMethods: ["apply", "call", "bind"]) + .function([.string] => .object(ofGroup: RegExp, withProperties: ["ignoreCase", "global", "dotAll", "__proto__", "unicode", "source", "multiline", "flags", "sticky"], withMethods: ["test", "compile", "exec"]) + .regexp) + .constructor([.string] => .object(ofGroup: RegExp, withProperties: ["ignoreCase", "global", "dotAll", "__proto__", "unicode", "source", "multiline", "flags", "sticky"], withMethods: ["test", "compile", "exec"]) + .regexp) const v55 = "unicode".__proto__; // v55 = .object() const v56 = "unicode".padStart(1337,v5); // v56 = .string + .object(ofGroup: String, withProperties: ["constructor", "__proto__", "length"], withMethods: ["padEnd", "split", "charAt", "match", "lastIndexOf", "charCodeAt", "trim", "startsWith", "includes", "repeat", "search", "slice", "endsWith", "matchAll", "indexOf", "concat", "replace", "padStart", "substring", "codePointAt"]) const v57 = Function(v56); // v57 = .unknown let v60 = 0; do { const v61 = v60 + 1; // v61 = .primitive v60 = v61; } while (v60 < 5); return -3793068018; } const v63 = new Promise(v5); // v63 = .object(ofGroup: Promise, withProperties: ["__proto__"], withMethods: ["finally", "then", "catch"]) } main();
wenwei8268/Alink
core/src/main/java/com/alibaba/alink/operator/batch/dataproc/StringIndexerPredictBatchOp.java
package com.alibaba.alink.operator.batch.dataproc; import org.apache.flink.ml.api.misc.param.Params; import com.alibaba.alink.common.annotation.InputPorts; import com.alibaba.alink.common.annotation.NameCn; import com.alibaba.alink.common.annotation.ParamSelectColumnSpec; import com.alibaba.alink.common.annotation.PortDesc; import com.alibaba.alink.common.annotation.PortSpec; import com.alibaba.alink.common.annotation.PortSpec.OpType; import com.alibaba.alink.common.annotation.PortType; import com.alibaba.alink.common.annotation.TypeCollections; import com.alibaba.alink.operator.batch.utils.ModelMapBatchOp; import com.alibaba.alink.operator.common.dataproc.StringIndexerModelMapper; import com.alibaba.alink.params.dataproc.StringIndexerPredictParams; /** * Map string to index. */ @InputPorts(values = { @PortSpec(value = PortType.MODEL, opType = OpType.BATCH, desc = PortDesc.PREDICT_INPUT_MODEL, suggestions = StringIndexerTrainBatchOp.class), @PortSpec(value = PortType.DATA, desc = PortDesc.PREDICT_INPUT_DATA) }) @ParamSelectColumnSpec(name = "selectedCol", allowedTypeCollections = TypeCollections.INT_LONG_STRING_TYPES) @NameCn("StringIndexer预测") public final class StringIndexerPredictBatchOp extends ModelMapBatchOp <StringIndexerPredictBatchOp> implements StringIndexerPredictParams <StringIndexerPredictBatchOp> { private static final long serialVersionUID = 3074096923032622056L; public StringIndexerPredictBatchOp() { this(new Params()); } public StringIndexerPredictBatchOp(Params params) { super(StringIndexerModelMapper::new, params); } }
dautapankumardora/rdkservices
StateObserver/StateObserver.h
<reponame>dautapankumardora/rdkservices /** * If not stated otherwise in this file or this component's LICENSE * file the following copyright and licenses apply: * * Copyright 2019 RDK Management * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ /** * @file StateObserver.h * @brief Thunder Plugin based Implementation for StateObserver service API's (RDK-25848). */ /** @mainpage StateObserver */ #pragma once #ifndef STATEOBSERVER_H #define STATEOBSERVER_H #include <cjson/cJSON.h> #include "Module.h" #include "libIBus.h" #include "utils.h" #include "utils.h" #include "AbstractPlugin.h" namespace WPEFramework { namespace Plugin { // This is a server for a JSONRPC communication channel. // For a plugin to be capable to handle JSONRPC, inherit from PluginHost::JSONRPC. // By inheriting from this class, the plugin realizes the interface PluginHost::IDispatcher. // This realization of this interface implements, by default, the following methods on this plugin // - exists // - register // - unregister // Any other methood to be handled by this plugin can be added can be added by using the // templated methods Register on the PluginHost::JSONRPC class. // As the registration/unregistration of notifications is realized by the class PluginHost::JSONRPC, // this class exposes a public method called, Notify(), using this methods, all subscribed clients // will receive a JSONRPC message as a notification, in case this method is called. /** * @brief This class provides the interface to get the value of various device properties * and set up event listeners to be notified when the state of the device changes. * */ class StateObserver : public AbstractPlugin { public: static const string STATE_OBSERVER_PLUGIN_NAME; static const string EVT_STATE_OBSERVER_PROPERTY_CHANGED; private: typedef Core::JSON::String JString; typedef Core::JSON::ArrayType<JString> JStringArray; typedef Core::JSON::Boolean JBool; // We do not allow this plugin to be copied !! StateObserver(const StateObserver&) = delete; StateObserver& operator=(const StateObserver&) = delete; //Begin methods uint32_t getValues(const JsonObject& parameters, JsonObject& response); uint32_t registerListeners(const JsonObject& parameters, JsonObject& response); uint32_t unregisterListeners(const JsonObject& parameters, JsonObject& response); uint32_t setApiVersionNumberWrapper(const JsonObject& parameters, JsonObject& response); uint32_t getApiVersionNumberWrapper(const JsonObject& parameters, JsonObject& response); uint32_t getRegisteredPropertyNames(const JsonObject &parameters, JsonObject &response); uint32_t getNameWrapper(const JsonObject& parameters, JsonObject& response); void getVal(std::vector<string> pname,JsonObject& response); void InitializeIARM(); void DeinitializeIARM(); //End methods virtual string getName(); virtual unsigned int getApiVersionNumber(); void setApiVersionNumber(unsigned int apiVersionNumber); //Begin events //End events public: StateObserver(); virtual ~StateObserver(); //IPlugin methods virtual const string Initialize(PluginHost::IShell* service) override; virtual void Deinitialize(PluginHost::IShell* service) override; static void onReportStateObserverEvents(const char *owner, IARM_EventId_t eventId, void *data, size_t len); void notify(std::string eventname, JsonObject& param); void setProp(JsonObject& params,std::string propName,int state,int error); public: static StateObserver* _instance; private: uint32_t m_apiVersionNumber; }; } // namespace Plugin } // namespace WPEFramework #endif //STATEOBSERVER_H
rsdoherty/azure-sdk-for-python
sdk/metricsadvisor/azure-ai-metricsadvisor/samples/sample_feedback.py
<filename>sdk/metricsadvisor/azure-ai-metricsadvisor/samples/sample_feedback.py<gh_stars>1-10 # coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- """ FILE: sample_feedback.py DESCRIPTION: This sample demonstrates feedback operations. USAGE: python sample_feedback.py Set the environment variables with your own values before running the sample: 1) METRICS_ADVISOR_ENDPOINT - the endpoint of your Azure Metrics Advisor service 2) METRICS_ADVISOR_SUBSCRIPTION_KEY - Metrics Advisor service subscription key 3) METRICS_ADVISOR_API_KEY - Metrics Advisor service API key 4) METRICS_ADVISOR_METRIC_ID - the ID of an metric from an existing data feed 5) METRICS_ADVISOR_FEEDBACK_ID - the ID of an existing feedback """ import os import datetime def sample_add_feedback(): # [START add_feedback] from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorClient from azure.ai.metricsadvisor.models import AnomalyFeedback, ChangePointFeedback, CommentFeedback, PeriodFeedback service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT") subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY") api_key = os.getenv("METRICS_ADVISOR_API_KEY") metric_id = os.getenv("METRICS_ADVISOR_METRIC_ID") client = MetricsAdvisorClient(service_endpoint, MetricsAdvisorKeyCredential(subscription_key, api_key)) anomaly_feedback = AnomalyFeedback(metric_id=metric_id, dimension_key={"Dim1": "Common Lime"}, start_time=datetime.datetime(2020, 8, 5), end_time=datetime.datetime(2020, 8, 7), value="NotAnomaly") client.add_feedback(anomaly_feedback) change_point_feedback = ChangePointFeedback(metric_id=metric_id, dimension_key={"Dim1": "Common Lime"}, start_time=datetime.datetime(2020, 8, 5), end_time=datetime.datetime(2020, 8, 7), value="NotChangePoint") client.add_feedback(change_point_feedback) comment_feedback = CommentFeedback(metric_id=metric_id, dimension_key={"Dim1": "Common Lime"}, start_time=datetime.datetime(2020, 8, 5), end_time=datetime.datetime(2020, 8, 7), value="comment") client.add_feedback(comment_feedback) period_feedback = PeriodFeedback(metric_id=metric_id, dimension_key={"Dim1": "Common Lime"}, start_time=datetime.datetime(2020, 8, 5), end_time=datetime.datetime(2020, 8, 7), period_type="AssignValue", value=2) client.add_feedback(period_feedback) # [END add_feedback] def sample_get_feedback(): # [START get_feedback] from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorClient service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT") subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY") api_key = os.getenv("METRICS_ADVISOR_API_KEY") feedback_id = os.getenv("METRICS_ADVISOR_FEEDBACK_ID") client = MetricsAdvisorClient(service_endpoint, MetricsAdvisorKeyCredential(subscription_key, api_key)) result = client.get_feedback(feedback_id=feedback_id) print("Type: {}; Id: {}".format(result.feedback_type, result.id)) # [END get_feedback] def sample_list_feedback(): # [START list_feedback] from azure.ai.metricsadvisor import MetricsAdvisorKeyCredential, MetricsAdvisorClient service_endpoint = os.getenv("METRICS_ADVISOR_ENDPOINT") subscription_key = os.getenv("METRICS_ADVISOR_SUBSCRIPTION_KEY") api_key = os.getenv("METRICS_ADVISOR_API_KEY") metric_id = os.getenv("METRICS_ADVISOR_METRIC_ID") client = MetricsAdvisorClient(service_endpoint, MetricsAdvisorKeyCredential(subscription_key, api_key)) results = client.list_feedback(metric_id=metric_id) for result in results: print("Type: {}; Id: {}".format(result.feedback_type, result.id)) # [END list_feedback] if __name__ == '__main__': print("---Creating feedback...") sample_add_feedback() print("Feedback successfully created...") print("\n---Get a feedback...") sample_get_feedback() print("\n---List feedbacks...") sample_list_feedback()
lukaszlaszuk/insightconnect-plugins
plugins/google_drive/unit_test/test_upload_file.py
import sys import os from unittest import TestCase from komand_google_drive.actions.upload_file import UploadFile from komand_google_drive.actions.upload_file.schema import Input, Output from insightconnect_plugin_runtime.exceptions import PluginException from unit_test.util import Util from parameterized import parameterized sys.path.append(os.path.abspath("../")) class TestUploadFile(TestCase): @classmethod def setUpClass(cls) -> None: cls.action = Util.default_connector(UploadFile()) @parameterized.expand( [ [ "upload_file", {"filename": "upload.txt", "content": "UmFwaWQ3IEluc2lnaHRDb25uZWN0Cg=="}, "Docs", "0BwwA4oUTeiV1TGRPeTVjaWRDY1E", "upload_file", "https://docs.google.com/document/d/upload_file", ], [ "upload_file2", {"filename": "upload.ppt", "content": "UmFwaWQ3IEluc2lnaHRDb25uZWN0Cg=="}, "Slides", "0BwwA4oUTeiV1TGRPeTVjaWRDY1E", "upload_file2", "https://docs.google.com/presentation/d/upload_file2", ], [ "upload_file3", {"filename": "upload.csv", "content": "UmFwaWQ3IEluc2lnaHRDb25uZWN0Cg=="}, "Sheets", "0BwwA4oUTeiV1TGRPeTVjaWRDY1E", "upload_file3", "https://docs.google.com/spreadsheets/d/upload_file3", ], [ "without_file_id", {"filename": "upload.txt", "content": "UmFwaWQ3IEluc2lnaHRDb25uZWN0Cg=="}, "Docs", None, "upload_file", "https://docs.google.com/document/d/upload_file", ], ] ) def test_upload_file(self, name, file, file_type, folder_id, expected_id, expected_link): actual = self.action.run( { Input.FILE: file, Input.GOOGLE_FILE_TYPE: file_type, Input.FOLDER_ID: folder_id, } ) expected = {Output.FILE_ID: expected_id, Output.FILE_LINK: expected_link} self.assertEqual(actual, expected) @parameterized.expand( [ [ "invalid_folder_id", {"filename": "Folder Not Found", "content": "UmFwaWQ3IEluc2lnaHRDb25uZWN0Cg=="}, "Docs", "11111", "Something unexpected occurred.", "Check the logs and if the issue persists please contact support.", '<HttpError 404 when requesting http://example.com returned "Folder not found".' ' Details: "Folder not found">', ] ] ) def test_upload_file_bad(self, name, file, file_type, folder_id, cause, assistance, data): with self.assertRaises(PluginException) as e: self.action.run({Input.FILE: file, Input.GOOGLE_FILE_TYPE: file_type, Input.FOLDER_ID: folder_id}) self.assertEqual(e.exception.cause, cause) self.assertEqual(e.exception.assistance, assistance) self.assertEqual(e.exception.data, data)
digirati-co-uk/narrative-editor
packages/narrative-editor/src/components/NarrativeEditor/NarrativeEditor.js
<gh_stars>1-10 import React, { Component } from 'react'; import { createHistory, Router, Location, LocationProvider, Link, navigate, } from '@reach/router'; import { connect } from 'react-redux'; import EditAnnotationPage from '../EditAnnotationPage/EditAnnotationPage'; import OverviewPage from '../OverviewPage/OverviewPage'; import ExportPage from '../ExportPage/ExportPage'; import createHashSource, { pushHashPath } from 'hash-source'; import PreviewPage from '../PreviewPage/PreviewPage'; import ImportPage from '../ImportPage/ImportPage'; import { tileSource, canvas, metadata, annotations, reset, } from '@narrative-editor/presley'; import uuid from 'uuid/v1'; import './NarrativeEditor.scss'; import BEM from '@fesk/bem-js/lib/index'; import posed, { PoseGroup } from 'react-pose'; import ManifestPage from '../ManifestPage/ManifestPage'; const hashSource = createHashSource(); const history = createHistory(hashSource); const RouteContainer = posed.div({ enter: { opacity: 1, delay: 100 }, exit: { opacity: 0, transition: { duration: 100 } }, }); const PosedRouter = ({ children }) => ( <Location> {({ location }) => ( <PoseGroup> <RouteContainer key={location.pathname}> <Router location={location}>{children}</Router> </RouteContainer> </PoseGroup> )} </Location> ); const extractLanguage = (field, defaultValue = '') => { if (!field) { return defaultValue; } if (field.en) { return field.en[0]; } const first = Object.values(field)[0]; return first ? first[0] : defaultValue; }; const $b = BEM.block('narrative-editor'); class NarrativeEditor extends Component { onImageSelected = source => { this.props.changeTileSource(source); const canvasId = uuid(); this.props.createCanvas(canvasId); this.props.updateLabel('Untitled manifest'); this.props.canvasUpdateLabel(canvasId, 'Untitled canvas'); }; startAgain = () => { if (window.confirm('Are you sure you want to discard your work?')) { this.props.purge().then(() => { this.props.startAgain(); }); } }; handleChooseManifest = selectedManifest => { const { dispatch } = this.props; if (selectedManifest.id) { dispatch(metadata.updateId(selectedManifest.id)); } if (selectedManifest.label) { dispatch( metadata.updateLabel( extractLanguage(selectedManifest.label, 'Untitled manifest') ) ); } if (selectedManifest.summary) { dispatch( metadata.updateSummary(extractLanguage(selectedManifest.summary, '')) ); } if (selectedManifest.metadata) { selectedManifest.metadata.forEach(metadataPair => { dispatch( metadata.addMetadataPair( extractLanguage(metadataPair.label, 'Untitled'), extractLanguage(metadataPair.value) ) ); }); } this.importCanvas(selectedManifest.items[0]); }; importCanvas = selectedCanvas => { const { dispatch } = this.props; const id = selectedCanvas.id || uuid(); dispatch(canvas.createCanvas(id)); dispatch( canvas.canvasUpdateLabel( id, extractLanguage(selectedCanvas.label, 'Untitled canvas') ) ); if (selectedCanvas.summary) { dispatch( canvas.canvasUpdateSummary(id, extractLanguage(selectedCanvas.summary)) ); } if (selectedCanvas.metadata) { selectedCanvas.metadata.forEach(metadataPair => { dispatch( canvas.canvasAddMetadataPair( id, extractLanguage(metadataPair.label, 'Untitled'), extractLanguage(metadataPair.value) ) ); }); } const annotationList = selectedCanvas.items[0]; const annotation = annotationList.items[0]; if ( selectedCanvas.annotations && selectedCanvas.annotations[0] && selectedCanvas.annotations[0].items ) { selectedCanvas.annotations[0].items.forEach(singleAnnotation => { dispatch( annotations.addAnnotation(singleAnnotation.id, singleAnnotation) ); }); } this.props.changeTileSource(annotation.body); }; handleChooseCanvas = selectedCanvas => { this.importCanvas(selectedCanvas); }; render() { const { currentResource, changeTileSource, startAgain, purge } = this.props; if (!currentResource) { return ( <ImportPage default route="import" onChooseCanvas={this.handleChooseCanvas} onChooseManifest={this.handleChooseManifest} /> ); } return ( <LocationProvider history={history}> <div className={$b.modifier('dark')}> <header className={$b.element('header')}> <h1 className={$b.element('title')}>Narrative editor</h1> <ul className={$b.element('navigation')}> <li className={$b.element('navigation-item')}> <Link to="/">Overview</Link> </li> <li className={$b.element('navigation-item')}> <Link to="/manifest">Manifest</Link> </li> <li className={$b.element('navigation-item')}> <Link to="/preview">Preview</Link> </li> <li className={$b.element('navigation-item')}> <Link to="/export">Export</Link> </li> <li className={$b .element('navigation-item') .modifier('start-again')} onClick={this.startAgain} > Start again? </li> </ul> </header> <main style={{ overflow: 'hidden', height: '100vh' }}> <PosedRouter> <OverviewPage navigate={history.navigate} path="/" /> <ManifestPage path="/manifest" /> <EditAnnotationPage path="edit-annotation/:annotationId" onUpdateAnnotation={() => { history.navigate('/'); }} /> <ExportPage path="export" /> <PreviewPage path="preview" /> </PosedRouter> </main> </div> </LocationProvider> ); } } const mapStateToProps = state => ({ currentResource: state.tileSource.current, }); const bindActionCreators = { dispatch: ev => ev, changeTileSource: tileSource.changeTileSource, createCanvas: canvas.createCanvas, updateLabel: metadata.updateLabel, canvasUpdateLabel: canvas.canvasUpdateLabel, startAgain: reset, }; export default connect( mapStateToProps, bindActionCreators )(NarrativeEditor);
sriharshachilakapati/SilenceEngine
silenceengine/src/main/java/com/shc/silenceengine/audio/openal/ALFormat.java
/* * The MIT License (MIT) * * Copyright (c) 2014-2017 <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.shc.silenceengine.audio.openal; import com.shc.silenceengine.core.SilenceException; import static com.shc.silenceengine.audio.AudioDevice.Constants.*; /** * Represents the internal format of an OpenAL Buffer. This format is not the file format or the encoding, but it is * whether the audio is mono or stereo and whether the audio is 8-bit or 16-bit audio. * * @author <NAME> */ public enum ALFormat { /** * 8-Bit Mono Audio Format */ MONO_8(AL_FORMAT_MONO8), /** * 16-Bit Mono Audio Format */ MONO_16(AL_FORMAT_MONO16), /** * 8-Bit Stereo Audio Format */ STEREO_8(AL_FORMAT_STEREO8), /** * 16-Bit Stereo Audio Format */ STEREO_16(AL_FORMAT_STEREO16); // The integer constant of the OpenAL format private int alFormat; ALFormat(int alFormat) { this.alFormat = alFormat; } /** * Turns the OpenAL Format integer constant into a ALFormat enum. * * @param value The integer constant * * @return The equivalent ALFormat enum */ public static ALFormat getEnum(int value) { switch (value) { case AL_FORMAT_MONO8: return MONO_8; case AL_FORMAT_MONO16: return MONO_16; case AL_FORMAT_STEREO8: return STEREO_8; case AL_FORMAT_STEREO16: return STEREO_16; } throw new SilenceException("Unknown format value: " + value); } /** * Returns the OpenAL format constant integer * * @return The integer constant value of this OpenAL Format */ public int getAlFormat() { return alFormat; } }
alizbazar/medizure
src/components/ActionButton.js
'use strict' import React, { Component } from 'react' import { StyleSheet, Text, View, TouchableOpacity, Image, Animated } from 'react-native' const heartActive = require('src/assets/heart.png') const heartInactive = require('src/assets/heart-inactive.png') import { constants } from 'src/styles' const styles = StyleSheet.create({ button: { width: 300, flexDirection: 'row', justifyContent: 'center', alignItems: 'center', borderWidth: 5, borderColor: constants.colors.inverse, borderRadius: 24, padding: 10, }, buttonSelected: { backgroundColor: constants.colors.inverse, }, label: Object.assign({}, constants.text.buttonCopy, { textAlign: 'center', fontSize: 22, }), labelSelected: { color: constants.colors.highlight, fontFamily: constants.fonts.bold, }, heartActive: { width: 30, height: 26, marginRight: 13, }, heartInactive: { width: 34, height: 30, marginRight: 10 } }) export default class ActionButton extends Component { renderHeart() { if (this.props.selected) { return ( <Animated.Image source={heartActive} style={[styles.heartActive, {transform: [{scale: this.props.bounce || 1.0}]} ]} />) } else { return <Image source={heartInactive} style={styles.heartInactive} /> } } render() { return ( <TouchableOpacity onPress={this.props.onPress} activeOpacity={constants.helpers.touchableOpacity}> <View style={[styles.button, this.props.selected ? styles.buttonSelected : null]}> {this.props.heart ? this.renderHeart() : null} <Text style={[styles.label, this.props.selected ? styles.labelSelected : null]}> {this.props.children} </Text> </View> </TouchableOpacity> ) } }
komba/vk
lib/vk/api/groups/methods/join.rb
# frozen_string_literal: true require 'vk/api/methods' module Vk module API class Groups < Vk::Schema::Namespace module Methods # With this method you can join the group or public page, and also confirm your participation in an event. class Join < Schema::Method # @!group Properties self.open = false self.method = 'groups.join' # @method initialize(arguments) # @param [Hash] arguments # @option arguments [Integer] :group_id ID or screen name of the community. # @option arguments [String] :not_sure Optional parameter which is taken into account when 'gid' belongs to the event:; '1' — Perhaps I will attend; '0' — I will be there for sure (default); ; # @return [Groups::Methods::Join] # @!group Arguments # @return [Integer] ID or screen name of the community. attribute :group_id, API::Types::Coercible::Int.optional.default(nil) # @return [String] Optional parameter which is taken into account when 'gid' belongs to the event:; '1' — Perhaps I will attend; '0' — I will be there for sure (default); ; attribute :not_sure, API::Types::Coercible::String.optional.default(nil) end end end end end
neuroquery/nqdc
tests/test_articles.py
<reponame>neuroquery/nqdc<filename>tests/test_articles.py from pathlib import Path import json from unittest.mock import patch import pytest from nqdc import _download, _articles @pytest.mark.parametrize("n_jobs", [1, 3]) def test_extract_articles(n_jobs, tmp_path, entrez_mock): download_dir, code = _download.download_articles_for_query( "fMRI[abstract]", tmp_path ) assert code == 0 articles_dir = Path(f"{download_dir}-articles") created_dir, code = _articles.extract_articles( download_dir, articles_dir, n_jobs=n_jobs ) assert created_dir == articles_dir assert code == 0 assert len(list(articles_dir.glob("**/*.xml"))) == 7 # check does not repeat completed extraction with patch("nqdc._articles._extract_from_articleset") as mock: created_dir, code = _articles.extract_articles( download_dir, articles_dir ) assert len(mock.mock_calls) == 0 assert code == 0 # check returns 1 if download incomplete info_file = download_dir.joinpath("info.json") info = json.loads(info_file.read_text("utf-8")) info["is_complete"] = False info_file.write_text(json.dumps(info), "utf-8") created_dir, code = _articles.extract_articles(download_dir) assert created_dir == tmp_path.joinpath( "query-7838640309244685021f9954f8aa25fc", "articles" ) assert code == 1 info_file.unlink() _, code = _articles.extract_articles(download_dir) assert code == 1
mindnervestech/angular-formio
auth/auth.config.js
<reponame>mindnervestech/angular-formio /** * @fileoverview added by tsickle * Generated from: auth/auth.config.ts * @suppress {checkTypes,constantProperty,extraRequire,missingOverride,missingReturn,unusedPrivateMembers,uselessCode} checked by tsc */ import { Injectable } from '@angular/core'; /** * @record */ export function FormioAuthFormConfig() { } if (false) { /** @type {?|undefined} */ FormioAuthFormConfig.prototype.path; /** @type {?|undefined} */ FormioAuthFormConfig.prototype.form; /** @type {?|undefined} */ FormioAuthFormConfig.prototype.component; } /** * @record */ export function FormioAuthRouteConfig() { } if (false) { /** @type {?|undefined} */ FormioAuthRouteConfig.prototype.auth; /** @type {?|undefined} */ FormioAuthRouteConfig.prototype.login; /** @type {?|undefined} */ FormioAuthRouteConfig.prototype.register; } var FormioAuthConfig = /** @class */ (function () { function FormioAuthConfig() { } FormioAuthConfig.decorators = [ { type: Injectable }, ]; return FormioAuthConfig; }()); export { FormioAuthConfig }; if (false) { /** @type {?} */ FormioAuthConfig.prototype.component; /** @type {?} */ FormioAuthConfig.prototype.delayAuth; /** @type {?} */ FormioAuthConfig.prototype.login; /** @type {?} */ FormioAuthConfig.prototype.register; /** @type {?} */ FormioAuthConfig.prototype.oauth; } /** * @record */ export function FormioOAuthConfig() { } if (false) { /** @type {?} */ FormioOAuthConfig.prototype.type; /** @type {?} */ FormioOAuthConfig.prototype.options; } /** @enum {string} */ var FormioOauthType = { okta: "okta", saml: "saml", }; export { FormioOauthType }; /** * @record */ export function FormioOktaConfig() { } if (false) { /** @type {?|undefined} */ FormioOktaConfig.prototype.formio; } /** * @record */ export function FormioSamlConfig() { } if (false) { /** @type {?} */ FormioSamlConfig.prototype.relay; } /** * @record */ export function OktaConfig() { } if (false) { /** @type {?|undefined} */ OktaConfig.prototype.url; /** @type {?|undefined} */ OktaConfig.prototype.tokenManager; /** @type {?|undefined} */ OktaConfig.prototype.issuer; /** @type {?|undefined} */ OktaConfig.prototype.clientId; /** @type {?|undefined} */ OktaConfig.prototype.redirectUri; /** @type {?|undefined} */ OktaConfig.prototype.postLogoutRedirectUri; /** @type {?|undefined} */ OktaConfig.prototype.pkce; /** @type {?|undefined} */ OktaConfig.prototype.authorizeUrl; /** @type {?|undefined} */ OktaConfig.prototype.userinfoUrl; /** @type {?|undefined} */ OktaConfig.prototype.tokenUrl; /** @type {?|undefined} */ OktaConfig.prototype.ignoreSignature; /** @type {?|undefined} */ OktaConfig.prototype.maxClockSkew; /** @type {?|undefined} */ OktaConfig.prototype.scopes; /** @type {?|undefined} */ OktaConfig.prototype.httpRequestClient; } /** * @record */ export function OktaTokenManagerConfig() { } if (false) { /** @type {?|undefined} */ OktaTokenManagerConfig.prototype.storage; /** @type {?|undefined} */ OktaTokenManagerConfig.prototype.secure; /** @type {?|undefined} */ OktaTokenManagerConfig.prototype.autoRenew; /** @type {?|undefined} */ OktaTokenManagerConfig.prototype.expireEarlySeconds; /** @type {?|undefined} */ OktaTokenManagerConfig.prototype.storageKey; }
wokalski/Distraction-Free-Xcode-plugin
Archived/v1/WCDistractionFreeXcodePlugin/Headers/PlugIns/IDELanguageSupportUI/IDEViewToyViewController.h
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import "IDEViewController.h" #import "IDEToyViewController.h" @class CALayer, CAPluginLayer, DVTObservingToken, DVTStackBacktrace, IDEToy, IDEViewToy, NSDate, NSString, NSView; @interface IDEViewToyViewController : IDEViewController <IDEToyViewController> { DVTObservingToken *_remoteViewStateObservingToken; DVTObservingToken *_remoteViewDescriptionObservingToken; NSView *_layerContainerView; CAPluginLayer *_pluginLayer; CALayer *_snapshotLayer; CDStruct_b590ebd7 _delegateRespondsTo; NSDate *_resultDisplayDate; id <IDEToyViewControllerDelegate> _delegate; IDEViewToy *_viewToy; } + (id)keyPathsForValuesAffectingToy; @property(readonly) IDEViewToy *viewToy; // @synthesize viewToy=_viewToy; @property(retain, nonatomic) id <IDEToyViewControllerDelegate> delegate; // @synthesize delegate=_delegate; @property(copy) NSDate *resultDisplayDate; // @synthesize resultDisplayDate=_resultDisplayDate; - (void).cxx_destruct; - (void)primitiveInvalidate; - (void)viewWillUninstall; - (void)viewDidInstall; - (void)updateViewForRemoteViewSize:(struct CGSize)arg1; - (void)loadView; - (void)switchToLayer:(id)arg1; @property(readonly, copy) NSString *titleForDisplay; - (void)preflightDelegateRespondsToSelectorChecks; @property(readonly) IDEToy *toy; - (id)initWithViewToy:(id)arg1; // Remaining properties @property(retain) DVTStackBacktrace *creationBacktrace; @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) DVTStackBacktrace *invalidationBacktrace; @property(readonly) Class superclass; @property(readonly, nonatomic, getter=isValid) BOOL valid; @end
Cazoo-uk/javascript-client
src/utils/binarySearch/index.js
<gh_stars>10-100 /** Copyright 2016 Split Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. **/ function bs(items, value) { let startIndex = 0; let stopIndex = items.length - 1; let middle = Math.floor((stopIndex + startIndex) / 2); let minIndex = startIndex; let maxIndex = stopIndex; while (items[middle] !== value && startIndex < stopIndex) { // adjust search area if (value < items[middle]) { stopIndex = middle - 1; } else if (value > items[middle]) { startIndex = middle + 1; } // recalculate middle middle = Math.floor((stopIndex + startIndex) / 2); } // correct if middle is out of range if (middle < minIndex) { middle = minIndex; } else if (middle > maxIndex) { middle = maxIndex; } // we want to always return based on strict minor comparation if (value < items[middle] && middle > minIndex) { return middle - 1; } return middle; } export default bs;
JiaqiLiZju/NvTK
NvTK/Explainer/Featuremap.py
"""Feature map based model interpretation methods in NvTK. """ import torch import logging import numpy as np from .Motif import normalize_pwm __all__ = ["get_activate_W", "get_fmap", "get_activate_W_from_fmap", "get_activate_sequence_from_fmap", "save_activate_seqlets"] def _get_W_from_conv(model, motif_width=5, normalize=True, device=torch.device("cuda")): ''' Experimental function! get motif directly from convolution parameters, PWM were extracted from `model.Embedding.conv` ''' x_tensor = torch.zeros((4, 4, motif_width)).to(device) x_tensor[0,0,:] = 1 x_tensor[1,1,:] = 1 x_tensor[2,2,:] = 1 x_tensor[3,3,:] = 1 try: fmap = model.Embedding.conv(x_tensor).data.cpu().numpy() except AttributeError: logging.error("Check if you model have model.Embedding.conv attr?") raise AttributeError W = fmap.swapaxes(0, 1).clip(0) if normalize: W = np.array([normalize_pwm(pwm) for pwm in W]) return W # hook class ActivateFeaturesHook(): def __init__(self, module): self.hook = module.register_forward_hook(self.hook_fn) def hook_fn(self, module, input, output): self.features = output.cpu().data.numpy()#.mean(-1) def get_features(self): return self.features def close(self): self.hook.remove() def get_fmap(model, hook_module, data_loader, device=torch.device("cuda")): """Get feature map of input data at model.hook_module Parameters ---------- model : model hook_module : int hook_module data_loader : torch.Data.Dataloader input data device : torch.device, optional torch.device, Default is `torch.device("cuda")`. Returns ---------- fmap : np.ndarr feature map of input data at model.hook_module X : np.ndarr input data """ fmap, X = [], [] model.eval() with torch.no_grad(): activations = ActivateFeaturesHook(hook_module) for x_tensor, _ in data_loader: x_tensor = x_tensor.to(device) _ = model(x_tensor) X.append(x_tensor.cpu().numpy()) fmap.append(activations.get_features()) fmap = np.vstack(fmap) X = np.vstack(X) activations.close() return fmap, X def get_activate_W_from_fmap(fmap, X, pool=1, threshold=0.99, motif_width=10): """Get activated motif pwm from feature map Parameters ---------- fmap : np.ndarr feature map of input data at model.hook_module X : np.ndarr input data pool : int input data threshold : floor threshold determine the activated sites in feature map motif_width : int width of motif, the width region sequence of activated sites will be normalized as counts Returns ---------- W : np.ndarr array of activated motif pwm, shape of W (n_filters, 4, motif_width) """ motif_nb = fmap.shape[1] X_dim, seq_len = X.shape[1], X.shape[-1] W=[] for filter_index in range(motif_nb): # find regions above threshold data_index, pos_index = np.where(fmap[:,filter_index,:] > np.max(fmap[:,filter_index,:], axis=1, keepdims=True)*threshold) seq_align = []; count_matrix = [] for i in range(len(pos_index)): # pad 1-nt start = pos_index[i] - 1 # - motif_width // 2 end = start + motif_width + 2 # handle boundary conditions if end > seq_len: end = seq_len start = end - motif_width - 2 if start < 0: start = 0 end = start + motif_width + 2 seq = X[data_index[i], :, start*pool:end*pool] seq_align.append(seq) count_matrix.append(np.sum(seq, axis=0, keepdims=True)) seq_align = np.array(seq_align) count_matrix = np.array(count_matrix) # normalize counts seq_align = (np.sum(seq_align, axis=0)/np.sum(count_matrix, axis=0))*np.ones((X_dim, (motif_width+2)*pool)) seq_align[np.isnan(seq_align)] = 0 W.append(seq_align) W = np.array(W) return W def get_activate_W(model, hook_module, data, pool=1, threshold=0.99, motif_width=20): """Get activated motif pwm of input data at model.hook_module Parameters ---------- model : model hook_module : int hook_module data_loader : torch.Data.Dataloader input data device : torch.device, optional torch.device, Default is `torch.device("cuda")`. pool : int input data threshold : floor threshold determine the activated sites in feature map motif_width : int width of motif, the width region sequence of activated sites will be normalized as counts Returns ---------- W : np.ndarr array of activated motif pwm, shape of W (n_filters, 4, motif_width) """ fmap, X = get_fmap(model, hook_module, data) W = get_activate_W_from_fmap(fmap, X, pool, threshold, motif_width) return W def onehot2seq(gene_seq, gene_name, out_fname): d = {0:'A', 1:'C', 2:'G', 3:'T'} s = '' for i, fas in zip(gene_name, map(lambda y: ''.join(map(lambda x:d[x], np.where(y.T==1)[-1])), gene_seq)): s += '>'+str(i)+'\n' s += fas+'\n' with open(out_fname, 'w') as fh: fh.write(s) def get_activate_sequence_from_fmap(fmap, X, pool=1, threshold=0.99, motif_width=40): """Get activated sequence from feature map. Seqlets could be further analyzed by bioinformatic softwares, such as Homer2. Parameters ---------- fmap : np.ndarr feature map of input data at model.hook_module X : np.ndarr input data pool : int input data threshold : floor threshold determine the activated sites in feature map motif_width : int width of motif, the width region sequence of activated sites will be normalized as counts Returns ---------- W : list list of activated motif seqlets, shape of W (n_filters, 4, motif_width) M : list Seqlet Names, defined as "Motif_Act" """ motif_nb = fmap.shape[1] seq_len = X.shape[-1] W, M = [], [] for filter_index in range(motif_nb): # find regions above threshold data_index, pos_index = np.where(fmap[:,filter_index,:] > np.max(fmap[:,filter_index,:], axis=1, keepdims=True)*threshold) for i in range(len(pos_index)): # handle boundary conditions start = pos_index[i] - 1 end = pos_index[i] + motif_width + 2 if end > seq_len: end = seq_len start= end - motif_width - 2 if start < 0: start = 0 end = start + motif_width + 2 seq = X[data_index[i], :, start*pool:end*pool] W.append(seq) M.append('_'.join(("Motif", str(filter_index), "Act", str(i)))) return W, M def save_activate_seqlets(model, hook_module, data, out_fname, pool=1, threshold=0.99, motif_width=40): """Save activated Seqlets pwm from feature map Seqlets could be further analyzed by bioinformatic softwares, such as Homer2. Parameters ---------- model : model hook_module : int hook_module data_loader : torch.Data.Dataloader input data out_fname : str output file name device : torch.device, optional torch.device, Default is `torch.device("cuda")`. pool : int input data threshold : floor threshold determine the activated sites in feature map motif_width : int width of motif, the width region sequence of activated sites will be normalized as counts """ fmap, X = get_fmap(model, hook_module, data) gene_seq, gene_name = get_activate_sequence_from_fmap(fmap, X, pool=pool, threshold=threshold, motif_width=motif_width) onehot2seq(gene_seq, gene_name, out_fname)
ScalablyTyped/SlinkyTyped
v/vega-typings/src/main/scala/typingsSlinky/vegaTypings/transformMod/NestTransform.scala
package typingsSlinky.vegaTypings.transformMod import typingsSlinky.vegaTypings.signalMod.SignalRef import typingsSlinky.vegaTypings.vegaTypingsStrings.nest import org.scalablytyped.runtime.StObject import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess} @js.native trait NestTransform extends _Transforms { var generate: js.UndefOr[Boolean | SignalRef] = js.native var keys: js.UndefOr[js.Array[FieldRef] | SignalRef] = js.native var `type`: nest = js.native } object NestTransform { @scala.inline def apply(`type`: nest): NestTransform = { val __obj = js.Dynamic.literal() __obj.updateDynamic("type")(`type`.asInstanceOf[js.Any]) __obj.asInstanceOf[NestTransform] } @scala.inline implicit class NestTransformMutableBuilder[Self <: NestTransform] (val x: Self) extends AnyVal { @scala.inline def setGenerate(value: Boolean | SignalRef): Self = StObject.set(x, "generate", value.asInstanceOf[js.Any]) @scala.inline def setGenerateUndefined: Self = StObject.set(x, "generate", js.undefined) @scala.inline def setKeys(value: js.Array[FieldRef] | SignalRef): Self = StObject.set(x, "keys", value.asInstanceOf[js.Any]) @scala.inline def setKeysUndefined: Self = StObject.set(x, "keys", js.undefined) @scala.inline def setKeysVarargs(value: FieldRef*): Self = StObject.set(x, "keys", js.Array(value :_*)) @scala.inline def setType(value: nest): Self = StObject.set(x, "type", value.asInstanceOf[js.Any]) } }
lisongyan123/urule
urule-core/src/main/java/com/bstek/urule/model/rete/jsondeserializer/ParameterValueDeserializer.java
// // Source code recreated from a .class file by IntelliJ IDEA // (powered by Fernflower decompiler) // package com.bstek.urule.model.rete.jsondeserializer; import com.bstek.urule.model.rete.JsonUtils; import com.bstek.urule.model.rule.ParameterValue; import com.bstek.urule.model.rule.Value; import com.bstek.urule.model.rule.ValueType; import org.codehaus.jackson.JsonNode; public class ParameterValueDeserializer implements ValueDeserializer { public ParameterValueDeserializer() { } public Value deserialize(JsonNode var1) { ParameterValue var2 = new ParameterValue(); var2.setArithmetic(JsonUtils.parseComplexArithmetic(var1)); var2.setVariableLabel(JsonUtils.getJsonValue(var1, "variableLabel")); var2.setVariableName(JsonUtils.getJsonValue(var1, "variableName")); var2.setKeyLabel(JsonUtils.getJsonValue(var1, "keyLabel")); var2.setKeyName(JsonUtils.getJsonValue(var1, "keyName")); return var2; } public boolean support(ValueType var1) { return var1.equals(ValueType.Parameter); } }
nasa/gunns
ms-utils/properties/CombustCH4.cpp
/** @file @brief Methane Combustion Model implementation @copyright Copyright 2019 United States Government as represented by the Administrator of the National Aeronautics and Space Administration. All Rights Reserved. LIBRARY DEPENDENCY: ((properties/Combust.o) (properties/ChemicalCompound.o) (simulation/hs/TsHsMsg.o) (software/exceptions/TsInitializationException.o)) **************************************************************************************************/ #include "CombustCH4.hh" #include "ChemicalCompound.hh" #include "GenericMacros.hh" #include "software/exceptions/TsInitializationException.hh" #include <cmath> /// @brief Minimum error for solveElemPotEqns method. const double CombustCH4::mMinErrorEquilCH4 = 5.0E-2; /// @brief Initial temperature step for solveCombustion iteration. const double CombustCH4::mTestTempStepCH4 = 100.0; //////////////////////////////////////////////////////////////////////////////////////////////////// /// @details Default constructs this Methane Combustion Model. Initializes the model. Creates global /// members for this simulation. Memory is allocated for the arrays mRatio, mMoles, and /// mCompounds. The ten compounds that take place in this reaction are also created. //////////////////////////////////////////////////////////////////////////////////////////////////// CombustCH4::CombustCH4() : Combust(CombustCH4::NCompounds, mTestTempStepCH4, mMaxItEquilCH4, mMaxItCombustCH4, mMaxCombustLoopsCH4, mMinErrorEquilCH4), mDampCoeff(0.25), mInitialOxidizer(0.0), mInitialFuel(0.0), mEnt1(0.0), mEnt2(0.0) { /// - Allocate memory mRatio = new double[CombustCH4::NCompounds]; mMoles = new double[CombustCH4::NCompounds]; mCompounds = const_cast<const ChemicalCompound**>(new ChemicalCompound* [CombustCH4::NCompounds]); /// - Set each compound in the array mCompounds to its correct compound type mCompounds[CombustCH4::O2] = mCompoundsDefined.getCompound(ChemicalCompound::O2); mCompounds[CombustCH4::CH4] = mCompoundsDefined.getCompound(ChemicalCompound::CH4); mCompounds[CombustCH4::H2O] = mCompoundsDefined.getCompound(ChemicalCompound::H2O); mCompounds[CombustCH4::CO2] = mCompoundsDefined.getCompound(ChemicalCompound::CO2); mCompounds[CombustCH4::OH] = mCompoundsDefined.getCompound(ChemicalCompound::OH); mCompounds[CombustCH4::CO] = mCompoundsDefined.getCompound(ChemicalCompound::CO); mCompounds[CombustCH4::O] = mCompoundsDefined.getCompound(ChemicalCompound::O); mCompounds[CombustCH4::H2] = mCompoundsDefined.getCompound(ChemicalCompound::H2); mCompounds[CombustCH4::H] = mCompoundsDefined.getCompound(ChemicalCompound::H); mCompounds[CombustCH4::He] = mCompoundsDefined.getCompound(ChemicalCompound::He); } #define O2 CombustCH4::O2 #define CH4 CombustCH4::CH4 #define H2O CombustCH4::H2O #define CO2 CombustCH4::CO2 #define OH CombustCH4::OH #define CO CombustCH4::CO #define O CombustCH4::O #define H2 CombustCH4::H2 #define H CombustCH4::H #define HE CombustCH4::He //////////////////////////////////////////////////////////////////////////////////////////////////// /// @details Default constructs this Methane combustion model. Frees memory allocated for arrays. //////////////////////////////////////////////////////////////////////////////////////////////////// CombustCH4::~CombustCH4(){ delete[] mCompounds; delete[] mMoles; delete[] mRatio; } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @param[in] Value (--) value to take exp() of /// /// @return double (--) exp(value). /// /// @details Safely calculates exp(value). If value is low enough to cause an underflow error, this /// method will return exp(-8) , preventing run time errors. //////////////////////////////////////////////////////////////////////////////////////////////////// double CombustCH4::calcExpSafe(double value){ if(value <= -8){ return 0.0; } else{ return exp(value); } } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @param[in] MoleRatios (--) Array of mole fractions for inlet fluids. /// @param[in] FluidTypes (--) Array of fluid types that describes inlet mixture. /// @param[in] NFluidTypes (--) Number of entries in array FluidTypes. /// /// @throws TsInitializationException /// /// @details Sets the initial mixture mole quantities to the inlet condition. This method also /// checks if the inlet quantities are outside of this reaction's explosive limit, and /// halts combustion if this criteria is not reached. If the inlet conditions are within /// unstable limits, this method will call the unstable method, and halt normal /// combustion/recombination. //////////////////////////////////////////////////////////////////////////////////////////////////// void CombustCH4::updateCompound(double* moleRatios, FluidProperties::FluidType* fluidTypes, int nFluidTypes){ for(int i = 0; i < nFluidTypes; i++){ if( moleRatios[i] < -DBL_EPSILON or moleRatios[i] > 1.0 + DBL_EPSILON){ throwError( "Initialization Error", "invalid input data. Mole fractions must be non-negative and less than 1."); } } /// - Reset arrays to zero. for(int i = 0; i < mNCompounds; i++){ mMoles[i] = 0.0; } /// - Set initial oxidizer and fuel, based on inlet conditions int OxInt = findFluidType(fluidTypes, FluidProperties::GUNNS_O2, nFluidTypes); int MethInt = findFluidType(fluidTypes, FluidProperties::GUNNS_CH4, nFluidTypes); if(-1 == OxInt or -1 == MethInt){ throwError( "Initialization Error", "inlet initialization error. Fluid index must contain O2 and CH4."); } mInitialOxidizer = moleRatios[OxInt]; mInitialFuel = moleRatios[MethInt]; /// - If there is Helium present at the inlet, it is added to the model. if(findFluidType(fluidTypes, FluidProperties::GUNNS_HE, nFluidTypes) != -1){ mMoles[He] = moleRatios[findFluidType(fluidTypes, FluidProperties::GUNNS_HE, nFluidTypes)]; } /// - If there is no fuel and/or oxidizer, stop combustion. if(mInitialOxidizer == 0.0 or mInitialFuel == 0.0){ mCombustionOccurs = false; } /// - Check that the inlet fuel to oxidizer ratio is within the explosive limit. /// If not, no combustion occurs and the mixture exits the combustor at the same state as the /// inlet. See header description for explanation of explosive limits. if(mInitialFuel < 0.05 or mInitialFuel > 2.5 * mInitialOxidizer - 0.3928){ mCombustionOccurs = false; } /// - Check if the mixture is within the unstable region. if(mInitialOxidizer / mInitialFuel > 1.65957 and mInitialOxidizer / mInitialFuel < 2.0){ mUnstable = true; } mMoles[O2] = mInitialOxidizer; mMoles[CH4] = mInitialFuel; } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @details Gives an initial guess for the solveEquilibrium() method. From that first guess, /// solveEquilibrium() iterates until the correct result is met. For each subsequent call /// to solveEquilibrium(), the previous equilibrium concentration are used as a first guess. //////////////////////////////////////////////////////////////////////////////////////////////////// void CombustCH4::makeFirstGuessAtEquil(){ double Sum = sumArray(mMoles, mNCompounds); for(int i = 0; i < mNCompounds - 1; i++){ mMoles[i] = Sum / (mNCompounds - 1); } } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @param[in] ratios (--) Pointer to array of mole fractions. /// @param[in] moles (--) Pointer to array of moles. /// @param[in] compound (--) Compound type /// /// @details If the molar amount for a compound is negative after applying the conservation of atomic /// species constraint, this method will set the the amount to what was calculated using /// the elemental potential equations. This prevents numerical exceptions. /// /// @note If this method is called on the last pass of the solveElemPotEqns method, an incorrect /// solution will result. //////////////////////////////////////////////////////////////////////////////////////////////////// void CombustCH4::checkNegativeMoles(double* ratios, double* moles, CombustCH4::Compound compound){ if(moles[compound] <= DBL_EPSILON){ moles[compound] = (1e-8 + ratios[compound]) * sumArray(ratios, mNCompounds); } } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @param[in,out] productRatios (--) Array of Mole Fractions from current solveEquilibrium() iteration /// @param[in,out] productMoles (--) Array of Moles from current solveEquilibrium() iteration /// @param[in] temp (K) Temperature to calculate equilibrium at /// @param[in] it (--) Current solveEquilibrium() iteration /// /// @details Uses the Methane-Oxygen combustion elemental potential equations to calculate the /// equilibrium concentrations. This method calculates each elemental Lagrange constraint /// based on the concentrations of O2, CO2, and H2O. These constraints are then used to /// calculate the mole fractions of all other compounds. The conservation of elemental /// composition equations are then used to recalculate the concentrations of O2, CO2, and /// H2O. This method can easily diverge and generate non-physical results, therefore a /// dampened iterative method is used to reach the converged solution. //////////////////////////////////////////////////////////////////////////////////////////////////// void CombustCH4::solveElemPotEqns(double* ratios, double* moles, double temp){ double lagrangeO; double lagrangeC; double lagrangeH; double ratioHold[mNCompounds - 1]; /// - Calculate Lagrange multipliers /// - Fuel rich case if(2.0 * mInitialFuel > mInitialOxidizer){ lagrangeH = 0.5 * (log(ratios[H2]) + calcGibbs(mCompounds[H2], temp)); lagrangeO = log(ratios[H2O]) - 2.0 * lagrangeH + calcGibbs(mCompounds[H2O], temp) ; lagrangeC = log(ratios[CO]) - 1.0 * lagrangeO + calcGibbs(mCompounds[CO], temp); } /// - Fuel lean case else{ lagrangeO = 0.5 * (log(ratios[O2]) + calcGibbs(mCompounds[O2], temp)); lagrangeC = log(ratios[CO2]) - 2.0 * lagrangeO + calcGibbs(mCompounds[CO2], temp); lagrangeH = 0.5*(log(ratios[H2O]) - lagrangeO + calcGibbs(mCompounds[H2O], temp)); } /// - Calculate mole fractions of dissociation products using elemental potential equations ratioHold[O2] = calcExpSafe(2.0 * lagrangeO - calcGibbs(mCompounds[O2], temp)); ratioHold[CH4] = calcExpSafe(lagrangeC + 4.0 * lagrangeH - calcGibbs(mCompounds[CH4], temp)); ratioHold[H2O] = calcExpSafe(lagrangeO + 2.0 * lagrangeH - calcGibbs(mCompounds[H2O], temp)); ratioHold[CO2] = calcExpSafe(lagrangeC + 2.0 * lagrangeO - calcGibbs(mCompounds[CO2], temp)); ratioHold[OH] = calcExpSafe(lagrangeO + lagrangeH - calcGibbs(mCompounds[OH], temp)); ratioHold[CO] = calcExpSafe(lagrangeC + lagrangeO - calcGibbs(mCompounds[CO], temp)); ratioHold[O] = calcExpSafe(lagrangeO - calcGibbs(mCompounds[O], temp)); ratioHold[H2] = calcExpSafe(2.0 * lagrangeH - calcGibbs(mCompounds[H2], temp)); ratioHold[H] = calcExpSafe(lagrangeH - calcGibbs(mCompounds[H], temp)); /// - Use low pass filter to ensure stability for(int i = 0; i < mNCompounds - 1; i++){ ratios[i] = ratios[i] + mDampCoeff * (ratioHold[i] - ratios[i]); } /// - Recalculate total number of moles, based on new mole ratios. if(2.0 * mInitialFuel > mInitialOxidizer){ double Sum = sumArray(ratios, mNCompounds); for(int i = 0; i < mNCompounds; i++){ ratios[i] = ratios[i] / Sum; } } double Sum = sumArray(moles,mNCompounds); for(int i = 0; i < mNCompounds; i++){ moles[i] = ratios[i] * Sum; } /// - Apply conservation of elemental composition equations. /// - The product moles must all be positive. If a negative value is calculated, switch to the /// result obtained using the elemental potential equations /// - Fuel rich case if(2.0 * mInitialFuel > mInitialOxidizer){ moles[CO] = mInitialFuel - moles[CH4] - moles[CO2]; checkNegativeMoles(ratios, moles, CO); moles[H2O] = 2.0 * (mInitialOxidizer - moles[CO2] - moles[O2]) - moles[CO] - moles[OH] - moles[O]; checkNegativeMoles(ratios, moles, H2O); moles[H2] = 2.0 * (mInitialFuel - moles[CH4]) - moles[H2O] - 0.5 * (moles[H] + moles[OH]); checkNegativeMoles(ratios, moles, H2); } /// - Fuel lean case else{ moles[CO2] = mInitialFuel - moles[CH4] - moles[CO]; checkNegativeMoles(ratios, moles, CO2); moles[H2O] = 2.0 * (mInitialFuel - moles[CH4]) - moles[H2] - 0.5 * (moles[H] + moles[OH]); checkNegativeMoles(ratios, moles, H2O); moles[O2] = mInitialOxidizer - moles[CO2] - 0.5 * (moles[H2O] + moles[CO] + moles[OH] + moles[O]); checkNegativeMoles(ratios, moles, O2); } } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @details This method is called when the oxidizer to fuel ratio is within the unstable range. /// This method calculates the solution at each end of this range, then uses linear /// interpolation to estimate the solution at the desired oxidizer ratio. //////////////////////////////////////////////////////////////////////////////////////////////////// void CombustCH4::solveUnstableCombustion(){ /// - Store the inlet conditions in temporary variables double fuel = mInitialFuel; double ox = mInitialOxidizer; double temp = mTemp; double helium = mMoles[He]; /// - Calculate fuel lean solution mInitialFuel = 0.333 * (fuel + ox); /// - Store to use in interpolation function double fuel1 = mInitialFuel; mInitialOxidizer = 0.667 * (fuel + ox); for(int i = 0; i < mNCompounds - 1; i++){ mMoles[i] = 0.0; } mMoles[O2] = mInitialOxidizer; mMoles[CH4] = mInitialFuel; recalculateRatios(); mEnth = 0.0; mMW = 0.0; for(int i = 0; i < mNCompounds; i++){ mEnth += mRatio[i] * calcEnth(mCompounds[i], mTemp); mMW += mRatio[i] * mCompounds[i]->mMWeight; } mEnth = mEnth / mMW; makeFirstGuessAtEquil(); calculateProperties(); /// - Store to use in interpolation function double temp1 = mTemp; double gamma1 = mGamma; double MW1 = mMW; double enth1 = mEnth; mEnt1 = mEnt; /// - Reset Test quantities mTestTempStep = mTestTempStepCH4; mMoles[He] = helium; mTemp = temp; for(int i = 0; i < mNCompounds - 1; i++){ mMoles[i] = 0.0; } /// - Calculate fuel rich solution mInitialFuel = 0.376 * (fuel + ox); /// - Store to use in interpolation function double fuel2 = mInitialFuel ; mInitialOxidizer = 0.624 * (fuel + ox); mMoles[O2] = mInitialOxidizer; mMoles[CH4] = mInitialFuel; recalculateRatios(); mEnth = 0.0; mMW = 0.0; for(int i = 0; i < mNCompounds; i++){ mEnth += mRatio[i] * calcEnth(mCompounds[i], mTemp); mMW += mRatio[i] * mCompounds[i]->mMWeight; } mEnth = mEnth / mMW; makeFirstGuessAtEquil(); calculateProperties(); double temp2 = mTemp; double gamma2 = mGamma; double MW2 = mMW; double enth2 = mEnth; mEnt2 = mEnt; /// - Interpolate mTemp = interpolate(fuel, fuel1, fuel2, temp1, temp2) ; mMW = interpolate(fuel, fuel1, fuel2, MW1, MW2) ; mGamma = interpolate(fuel, fuel1, fuel2, gamma1, gamma2) ; mEnth = interpolate(fuel, fuel1, fuel2, enth1, enth2) ; mEnt = interpolate(fuel, fuel1, fuel2, mEnt1, mEnt2); mInitialFuel = fuel; mInitialOxidizer = ox; } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @details This method is called when the oxidizer to fuel ratio is within the unstable range. /// This method calculates the solution at each end of this range, then uses linear /// interpolation to estimate the solution at the desired oxidizer ratio. //////////////////////////////////////////////////////////////////////////////////////////////////// void CombustCH4::solveUnstableRecombination(){ /// - Store the inlet conditions in temporary variables double fuel = mInitialFuel ; double ox = mInitialOxidizer ; double temp = mTemp ; /// - Calculate fuel lean solution mInitialFuel = 0.333 * (fuel + ox) ; /// - Store to use in interpolation function double fuel1 = mInitialFuel ; mInitialOxidizer = 0.667 * (fuel + ox) ; mEnt = mEnt1 ; calculateProperties() ; /// - Store to use in interpolation function double temp1 = mTemp ; double gamma1 = mGamma ; double MW1 = mMW ; double enth1 = mEnth ; double ent1 = mEnt ; /// - Reset Test quantities mTestTempStep = mTestTempStepCH4 ; mTemp = temp ; /// - Calculate rich lean solution mInitialFuel = 0.376 * (fuel + ox) ; /// - Store to use in interpolation function double fuel2 = mInitialFuel ; mInitialOxidizer = 0.624 * (fuel + ox) ; mEnt = mEnt2; calculateProperties() ; double temp2 = mTemp ; double gamma2 = mGamma ; double MW2 = mMW ; double enth2 = mEnth ; double ent2 = mEnt ; /// - Interpolate mTemp = interpolate(fuel, fuel1, fuel2, temp1, temp2) ; mMW = interpolate(fuel, fuel1, fuel2, MW1, MW2) ; mGamma = interpolate(fuel, fuel1, fuel2, gamma1, gamma2) ; mEnth = interpolate(fuel, fuel1, fuel2, enth1, enth2) ; mEnt = interpolate(fuel, fuel1, fuel2, ent1, ent2); } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @param[in] x (--) Independent variable of interest. /// @param[in] x1 (--) Independent variable at lower edge of range. /// @param[in] x2 (--) Independent variable at higher edge of range. /// @param[in] y1 (--) Dependent variable at lower edge of range. /// @param[in] y2 (--) Dependent variable at higher edge of range. /// /// @return double (--) Dependent variable of interest. /// /// @details Uses linear interpolation to estimate f(X). //////////////////////////////////////////////////////////////////////////////////////////////////// double CombustCH4::interpolate(double x, double x1, double x2, double y1, double y2){ return y1 + (y2 - y1) * ((x - x1) / (x2 - x1)) ; }
shawkins/infinispan-1
server/integration/endpoint/src/main/java/org/infinispan/server/endpoint/subsystem/RouterSubsystemAdd.java
<gh_stars>1-10 /* * JBoss, Home of Professional Open Source * Copyright 2011-2013 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @author tags. All rights reserved. * See the copyright.txt in the distribution for a * full listing of individual contributors. * * This copyrighted material is made available to anyone wishing to use, * modify, copy, or redistribute it subject to the terms and conditions * of the GNU Lesser General Public License, v. 2.1. * This program is distributed in the hope that it will be useful, but WITHOUT A * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public License, * v.2.1 along with this distribution; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301, USA. */ package org.infinispan.server.endpoint.subsystem; import java.util.Optional; import org.infinispan.server.router.configuration.builder.RouterConfigurationBuilder; import org.jboss.as.controller.AbstractAddStepHandler; import org.jboss.as.controller.AttributeDefinition; import org.jboss.as.controller.OperationContext; import org.jboss.as.controller.OperationFailedException; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.registry.Resource; import org.jboss.dmr.ModelNode; import org.jboss.msc.service.ServiceBuilder; import org.jboss.msc.service.ServiceName; class RouterSubsystemAdd extends AbstractAddStepHandler { static final RouterSubsystemAdd INSTANCE = new RouterSubsystemAdd(); @Override protected void populateModel(ModelNode source, ModelNode target) throws OperationFailedException { for(AttributeDefinition attr : RouterConnectorResource.ROUTER_CONNECTOR_ATTRIBUTES) { attr.validateAndSet(source, target); } } @Override protected void performRuntime(OperationContext context, ModelNode operation, ModelNode model) throws OperationFailedException { // Read the full model ModelNode config = Resource.Tools.readModel(context.readResource(PathAddress.EMPTY_ADDRESS)); RouterConfigurationBuilder configuration = new RouterConfigurationBuilder(); RouterService routerService = new RouterService(configuration, getServiceName(config)); final ServiceName routerServiceName = EndpointUtils.getServiceName(operation, "router"); ServiceBuilder<?> builder = context.getServiceTarget().addService(routerServiceName, routerService); ModelNode multiTenancyInnerConfiguration = config.get(ModelKeys.MULTI_TENANCY, ModelKeys.MULTI_TENANCY_NAME); ModelNode SinglePortInnerConfiguration = config.get(ModelKeys.SINGLE_PORT, ModelKeys.SINGLE_PORT_NAME); addMultiTenantRest(context, operation, multiTenancyInnerConfiguration, routerService, builder); addMultiTenantHotRod(context, operation, multiTenancyInnerConfiguration, routerService, builder); addSinglePortHotRod(context, operation, SinglePortInnerConfiguration, routerService, builder); builder.install(); } private void addSinglePortHotRod(OperationContext context, ModelNode operation, ModelNode config, RouterService routerService, ServiceBuilder<?> builder) throws OperationFailedException { if (config.isDefined()) { EndpointUtils.addSocketBindingDependency(context, builder, operation.get(ModelKeys.SINGLE_PORT_SOCKET_BINDING).asString(), routerService.getSinglePortSocketBinding()); // We are parsing this model: { // "security-realm" => "ClientCertRealm", // "hotrod" => {"single-port-hotrod" => {"name" => "single-port-hotrod"}}, // "rest" => {"single-port-rest" => {"name" => "single-port-rest"}} //} RouterService.SinglePortRouting singlePortRouting = routerService.getSinglePortRouting(); // We use endpoint names as keys in the model. That's very handy while parsing and writing to an XML // but it's causes a bit of headache when reading. ModelNode hotRodModelNode = config.get(ModelKeys.HOTROD).asList().get(0).get(0); ModelNode restModelNode = config.get(ModelKeys.REST).asList().get(0).get(0); ModelNode securityRealmModelNode = SinglePortResource.SECURITY_REALM.resolveModelAttribute(context, config); String securityRealm = securityRealmModelNode.asString(); String hotRodServerName = SinglePortHotRodResource.NAME.resolveModelAttribute(context, hotRodModelNode).asString(); String restServerName = SinglePortRestResource.NAME.resolveModelAttribute(context, restModelNode).asString(); if (hotRodModelNode.isDefined()) { EndpointUtils.addHotRodDependency(builder, hotRodServerName, singlePortRouting.getHotrodServer()); } if (restModelNode.isDefined()) { EndpointUtils.addRestDependency(builder, restServerName, singlePortRouting.getRestServer()); } if (securityRealmModelNode.isDefined()) { EndpointUtils.addSecurityRealmDependency(builder, securityRealm, singlePortRouting.getSecurityRealm()); } } } private void addMultiTenantRest(OperationContext context, ModelNode operation, ModelNode config, RouterService routerService, ServiceBuilder<?> builder) throws OperationFailedException { if (config.get(ModelKeys.REST).isDefined()) { EndpointUtils.addSocketBindingDependency(context, builder, operation.get(ModelKeys.REST_SOCKET_BINDING).asString(), routerService.getRestSocketBinding()); for (ModelNode r : config.get(ModelKeys.REST).asList()) { ModelNode restNode = r.get(0); String restName = MultiTenantRestResource.NAME.resolveModelAttribute(context, restNode).asString(); if (restNode.get(ModelKeys.PREFIX).isDefined()) { for (ModelNode prefixNode : restNode.get(ModelKeys.PREFIX).asList()) { String pathPrefix = PrefixResource.PATH.resolveModelAttribute(context, prefixNode.get(0)).asString(); RouterService.RestRouting restRouting = routerService.getRestRouting(pathPrefix, restName); EndpointUtils.addRestDependency(builder, restName, restRouting.getRest()); } } } } } private void addMultiTenantHotRod(OperationContext context, ModelNode operation, ModelNode config, RouterService routerService, ServiceBuilder<?> builder) throws OperationFailedException { if(config.get(ModelKeys.HOTROD).isDefined()) { EndpointUtils.addSocketBindingDependency(context, builder, operation.get(ModelKeys.HOTROD_SOCKET_BINDING).asString(), routerService.getHotrodSocketBinding()); for(ModelNode hr : config.get(ModelKeys.HOTROD).asList()) { ModelNode hotRod = hr.get(0); String hotRodName = MultiTenantHotRodResource.NAME.resolveModelAttribute(context, hotRod).asString(); routerService.tcpNoDelay(RouterConnectorResource.TCP_NODELAY.resolveModelAttribute(context, hotRod).asBoolean()); routerService.tcpKeepAlive(RouterConnectorResource.TCP_KEEPALIVE.resolveModelAttribute(context, hotRod).asBoolean()); routerService.sendBufferSize(RouterConnectorResource.SEND_BUFFER_SIZE.resolveModelAttribute(context, hotRod).asInt()); routerService.receiveBufferSize(RouterConnectorResource.RECEIVE_BUFFER_SIZE.resolveModelAttribute(context, hotRod).asInt()); if(hotRod.get(ModelKeys.SNI).isDefined()) { for(ModelNode sni : hotRod.get(ModelKeys.SNI).asList()) { ModelNode sniNode = sni.get(0); String sniHostName = SniResource.HOST_NAME.resolveModelAttribute(context, sniNode).asString(); String securityRealm = SniResource.SECURITY_REALM.resolveModelAttribute(context, sniNode).asString(); RouterService.HotRodRouting hotRodRouting = routerService.getHotRodRouting(sniHostName); EndpointUtils.addHotRodDependency(builder, hotRodName, hotRodRouting.getHotRod()); EndpointUtils.addSecurityRealmDependency(builder, securityRealm, hotRodRouting.getSecurityRealm()); } } } } } private Optional<String> getServiceName(ModelNode config) { return Optional.ofNullable(config.get(ModelKeys.NAME)).map(ModelNode::asString); } }
iabhimanyu/Algorithms
Dynamic Programming/common sub sequence/cpp/LCS.cpp
#include<iostream> #include<string.h> #include<algorithm> using namespace std; void strrev(char c[]) { int i = 0, j = strlen(c); while(i < j/2) { char t = c[i]; c[i] = c[j - i - 1]; c[j - i - 1] = t; i++; } } void PrintPath(int **Diag, char a[], char c[], int i, int j) { static int k = 0; if(i == 0 || j == 0) return; if(Diag[i][j] == 1) PrintPath(Diag, a, c, i, j-1); else if(Diag[i][j] == 2) PrintPath(Diag, a, c, i-1, j); else if(Diag[i][j] == 0) { c[k++] = a[i]; c[k] = '\0'; PrintPath(Diag, a, c, i-1, j-1); } else return; } void LongestCommonSubsequence(char a[], char b[], int alen, int blen) { int **LCS, **Diag; LCS = new int*[alen + 1]; Diag = new int*[alen]; for (int i = 0; i < alen; ++i) LCS[i] = new int [blen + 1]; for (int i = 0; i < alen; ++i) Diag[i] = new int [blen]; //--------giving value -1 to diag ----------------- for (int i = 0; i < alen; ++i) for (int j = 0; j < blen; ++j) Diag[i][j] = -1; //--------------------Giving value 0 to LCS for i = 0 and j = 0---------------- for (int i = 0; i < alen; ++i) LCS[i][0] = 0; for (int i = 0; i < blen; ++i) LCS[0][i] = 0; //------------------Computing LCS matrix----------------------- for (int i = 1; i < alen; ++i) { for (int j = 1; j < blen; ++j) { if(a[i] == b[j]) { LCS[i][j] = 1 + LCS[i-1][j-1]; Diag[i][j] = 0; //Move diagonally backwards } else { //---FINDING MAX ------------- if( LCS[i][j-1] > LCS[i-1][j] ) { LCS[i][j] = LCS[i][j-1] ; Diag[i][j] = 1; //Move horizantally backwards } else { LCS[i][j] = LCS[i-1][j]; //Move vertically backwards Diag[i][j] = 2; } } } } // cout<<endl; // //---------PRINTING THE LCS MATRIX------------------ // for (int i = 0; i < alen; ++i) // { // for (int j = 0; j < blen; ++j) // { // cout<<LCS[i][j]<<" "; // } // cout<<endl; // } // for (int i = 0; i < alen; ++i) // { // for (int j = 0; j < blen; ++j) // { // // Diag[i][j] = -1; // cout<<Diag[i][j]<<" "; // } // cout<<endl; // } char c[15]; c[0] = '\0'; PrintPath(Diag, a, c, alen - 1, blen - 1); strrev(c); cout<<endl<<c; } int main() { char a[15], b[15], c[15], d[15]; cin>>c>>d; a[0] = b[0] = '0'; a[1] = b[1] = '\0'; strcat(a,c); strcat(b,d); int alen = strlen(a), blen = strlen(b); LongestCommonSubsequence(a, b, alen, blen); }
chs6558/chs6558.github.io
node_modules/styled-icons/fa-solid/MortarPestle/MortarPestle.esm.js
<gh_stars>0 export * from '@styled-icons/fa-solid/MortarPestle';
andyglick/openclover-git
clover-ant/src/test/resources/CloverOptimizeJUnitTest/testOptimizedCIBuildCyclesWithFrequentUnoptimizedRuns/test/com/cenqua/clover/testcases/testoptimization/AppClass23456Test.java
package com.cenqua.clover.testcases.testoptimization; import junit.framework.TestCase; public class AppClass23456Test extends TestCase { public AppClass23456Test(String name) { super(name); } public void testMain() { AppClass2.main(null); AppClass3.main(null); AppClass4.main(null); AppClass5.main(null); AppClass6.main(null); } }
Colbys/smartactors-core
CoreFeatures/MessageProcessing/ChainCallReceiver/src/main/java/info/smart_tools/smartactors/message_processing/chain_call_receiver/package-info.java
<gh_stars>10-100 /** * Contains implementation of a {@link info.smart_tools.smartactors.message_processing_interfaces.message_processing.IMessageReceiver} that * chooses a {@link info.smart_tools.smartactors.message_processing_interfaces.message_processing.IReceiverChain} for a message and applies it. * * @see info.smart_tools.smartactors.message_processing_interfaces.message_processing.IMessageProcessingSequence#callChain( * info.smart_tools.smartactors.message_processing_interfaces.message_processing.IReceiverChain) */ package info.smart_tools.smartactors.message_processing.chain_call_receiver;
datacite/levriero
app/models/orcid_claim.rb
class OrcidClaim < Base LICENSE = "https://creativecommons.org/publicdomain/zero/1.0/".freeze def self.import_by_month(options = {}) from_date = (options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current).beginning_of_month until_date = (options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current).end_of_month # get first day of every month between from_date and until_date (from_date..until_date).select { |d| d.day == 1 }.each do |m| OrcidClaimImportByMonthJob.perform_later(from_date: m.strftime("%F"), until_date: m.end_of_month.strftime("%F")) end "Queued import for claims created from #{from_date.strftime('%F')} until #{until_date.strftime('%F')}." end def self.import(options = {}) from_date = options[:from_date].present? ? Date.parse(options[:from_date]) : Date.current - 1.day until_date = options[:until_date].present? ? Date.parse(options[:until_date]) : Date.current orcid_claim = OrcidClaim.new orcid_claim.queue_jobs(orcid_claim.unfreeze( from_date: from_date.strftime("%F"), until_date: until_date.strftime("%F"), )) end def source_id "datacite_orcid_search_link" end def push_data(result, _options = {}) return result.body.fetch("errors") if result.body.fetch("errors", nil).present? items = result.body.dig("data", "response", "docs") Array.wrap(items).map do |item| NameIdentifierImportJob.perform_later(item) rescue Aws::SQS::Errors::InvalidParameterValue, Aws::SQS::Errors::RequestEntityTooLarge, Seahorse::Client::NetworkingError => e Rails.logger.error e.message end items.length end def self.push_item(item) doi = item.fetch("doi") pid = normalize_doi(doi) related_identifiers = item.fetch("relatedIdentifier", []) skip_doi = related_identifiers.any? do |related_identifier| ["IsIdenticalTo", "IsPartOf", "IsPreviousVersionOf", "IsVersionOf"].include?(related_identifier.split(":", 3).first) end name_identifiers = item.fetch("nameIdentifier", []) return nil if name_identifiers.blank? || skip_doi source_id = item.fetch("sourceId", "datacite_orcid_auto_update") relation_type_id = "is_authored_by" source_token = ENV["DATACITE_ORCID_AUTO_UPDATE_SOURCE_TOKEN"] push_items = Array.wrap(name_identifiers).reduce([]) do |ssum, iitem| name_identifier_scheme, name_identifier = iitem.split(":", 2) name_identifier = name_identifier.strip obj_id = normalize_orcid(name_identifier) if obj_id.present? subj = cached_datacite_response(pid) obj = cached_orcid_response(obj_id) ssum << { "message_action" => "create", "subj_id" => pid, "obj_id" => obj_id, "relation_type_id" => relation_type_id, "source_id" => source_id, "source_token" => source_token, "occurred_at" => item.fetch("updated"), "timestamp" => Time.zone.now.iso8601, "license" => LICENSE, "subj" => subj, "obj" => obj } end ssum end # there can be one or more name_identifier per DOI Array.wrap(push_items).each do |iiitem| # send to DataCite Event Datas API if ENV["STAFF_ADMIN_TOKEN"].present? push_url = "#{ENV['LAGOTTINO_URL']}/events" data = { "data" => { "type" => "events", "attributes" => { "messageAction" => iiitem["message_action"], "subjId" => iiitem["subj_id"], "objId" => iiitem["obj_id"], "relationTypeId" => iiitem["relation_type_id"].to_s.dasherize, "sourceId" => iiitem["source_id"].to_s.dasherize, "sourceToken" => iiitem["source_token"], "occurredAt" => iiitem["occurred_at"], "timestamp" => iiitem["timestamp"], "license" => iiitem["license"], "subj" => iiitem["subj"], "obj" => iiitem["obj"], }, }, } response = Maremma.post(push_url, data: data.to_json, bearer: ENV["STAFF_ADMIN_TOKEN"], content_type: "application/vnd.api+json", accept: "application/vnd.api+json; version=2") if [200, 201].include?(response.status) Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} pushed to Event Data service." elsif response.status == 409 Rails.logger.info "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} already pushed to Event Data service." elsif response.body["errors"].present? Rails.logger.error "[Event Data] #{iiitem['subj_id']} #{iiitem['relation_type_id']} #{iiitem['obj_id']} had an error: #{response.body['errors']}" Rails.logger.error data.inspect end end end end end
rakshithShetty/captionGAN
scripts/evalSimilarity.py
import numpy as np import pandas as pd import matplotlib.pyplot as plt import json import os import random import scipy.io import codecs from collections import defaultdict import sys #%matplotlib inline if len(sys.argv) == 1: target = '135235570_5698072cd4.jpg' elif len(sys.argv) == 2: target = sys.argv[1] else: targetId = sys.argv[1] def topN(a,N): return np.argsort(a)[::-1][:N] dataset_root = '../data/flickr8k/' datafilePath = os.path.join(dataset_root, 'dataset.json') jsondataset = json.load(open(datafilePath, 'r')) image_root = os.path.join(dataset_root, 'imgs') features_path = os.path.join(dataset_root, 'vgg_feats.mat') print 'BasicDataProvider: reading %s' % (features_path, ) features_struct = scipy.io.loadmat(features_path) features = features_struct['feats'] cnt = 0 if len(sys.argv) == 2: for img in jsondataset['images']: if img['filename'] == target: targetId = cnt break cnt += 1 print targetId targFeat = features[:,targetId] scr = targFeat.T.dot(features) N = 8 topIdx = topN(scr,N) figIdx = 1 print topIdx for n in topIdx: fname = os.path.join(image_root,jsondataset['images'][n]['filename']) im = plt.imread(fname) plt.subplot(4,2,figIdx) plt.imshow(np.flipud(im)) figIdx += 1 plt.show() #labels_df.sort('synset_id') #predictions_df = pd.DataFrame(np.vstack(df.prediction.values), columns=labels_df['name']) ##print(predictions_df.iloc[0]) # # ##plt.gray() ##plt.matshow(predictions_df.values) ##plt.xlabel('Classes') ##plt.ylabel('Windows') ##plt.show() # #N = 4 #max_s = predictions_df.max(0) #print type(max_s) #topIdx = topN(max_s.tolist(),N) #print max_s[topIdx] # # ##print predictions_df[topIdx].argmax() ### Find, print, and display the top detections: person and bicycle. ## #### Show top predictions for top detection. ##f = pd.Series(df['prediction'].iloc[i], index=labels_df['name']) ##print('Top detection:') ##print(f.order(ascending=False)[:5]) ##print('') ## ### Show top predictions for second-best detection. ##f = pd.Series(df['prediction'].iloc[j], index=labels_df['name']) ##print('Second-best detection:') ##print(f.order(ascending=False)[:5]) # ## Show top detection in red, second-best top detection in blue. ##i = predictions_df[max_s[topIdx[0]]].argmax() #fin = open('_temp2/det_input.txt') #inputs = [_.strip() for _ in fin.readlines()] #im = plt.imread(inputs[0]) #width = N; ##colorArray = #for n in topIdx: # i = predictions_df.ix[::][max_s[n:n+1].index].idxmax().tolist() # plt.imshow(np.flipud(im)) # currentAxis = plt.gca() # det = df.iloc[i] # coords = (det['xmin'], det['ymin']), det['xmax'] - det['xmin'], det['ymax'] - det['ymin'] # currentAxis.add_patch(plt.Rectangle(*coords, fill=False, edgecolor='r', linewidth=width)) # currentAxis.text(det['xmin'],det['ymin'],max_s[n:n+1].index[0]) # width -= 1 # # ##det = df.iloc[j] ## ## #plt.show()
ishaileshmishra/stock-market-android-app
app/src/main/java/com/pravrajya/diamond/views/users/registration/SignUpActivity.java
<reponame>ishaileshmishra/stock-market-android-app package com.pravrajya.diamond.views.users.registration; import android.content.Intent; import android.os.Bundle; import android.text.TextUtils; import com.fxn.stash.Stash; import com.google.firebase.auth.FirebaseAuth; import com.google.firebase.auth.FirebaseUser; import com.pravrajya.diamond.R; import com.pravrajya.diamond.databinding.ActivityProfileBinding; import com.pravrajya.diamond.views.BaseActivity; import com.pravrajya.diamond.views.users.login.UserProfile; import com.pravrajya.diamond.views.users.main.views.MainActivity; import java.util.Objects; import androidx.databinding.DataBindingUtil; import static com.pravrajya.diamond.utils.Constants.USER_PROFILE; public class SignUpActivity extends BaseActivity { private ActivityProfileBinding binding; private FirebaseAuth mAuth; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Objects.requireNonNull(getSupportActionBar()).setElevation(0); binding = DataBindingUtil.setContentView(this,R.layout.activity_profile); mAuth = FirebaseAuth.getInstance(); binding.etFullName.clearFocus(); buttonClickHandler(); } private void buttonClickHandler() { binding.btnLogin.setOnClickListener(v->{ finish(); }); binding.btnRegistration.setOnClickListener(v->{ String name = Objects.requireNonNull(binding.etFullName.getText()).toString(); String emailId = Objects.requireNonNull(binding.etEmail.getText()).toString(); String password = Objects.requireNonNull(binding.etPassword.getText()).toString(); String confirm_password = Objects.requireNonNull(binding.etConformPassword.getText()).toString(); binding.inputName.setError(null); binding.inputEmail.setError(null); binding.inputPassword.setError(null); binding.inputConformPassword.setError(null); if (TextUtils.isEmpty(name)){ errorToast("Please enter full valid name"); binding.inputName.setError("Please enter full valid name"); }else if (!isValidEmail(emailId)){ errorToast("Please enter a valid e-mail address"); binding.inputEmail.setError("Please enter a valid e-mail address"); }else if(!isValidPassword(password)){ errorToast("Password is not Valid"); binding.inputPassword.setError("Password is not Valid"); }else if(!password.equalsIgnoreCase(confirm_password)){ errorToast("Password and Confirm Password does not Match"); binding.inputConformPassword.setError("Password and Confirm Password does not Match"); }else{ firebaseCreateAccount(name,emailId, password); } }); } private void firebaseCreateAccount(String name, String email, String password) { showProgressDialog("Registration in progress"); mAuth.createUserWithEmailAndPassword(email, password) .addOnCompleteListener(this, task -> { hideProgressDialog(); if (task.isSuccessful()) { FirebaseUser user = mAuth.getCurrentUser(); assert user != null; setUserProfile(user); } else { errorToast(Objects.requireNonNull(task.getException()).getLocalizedMessage()); } }); } private void setUserProfile(FirebaseUser user) { Stash.put(USER_PROFILE,new UserProfile(user.getUid(), user.getEmail(), user.getDisplayName(), user.getProviderData().toString(), user.getPhoneNumber(), user.getProviders().toString(), user.getPhotoUrl().toString())); startActivity(new Intent(getApplicationContext(), MainActivity.class)); finish(); } }
Wdifulvio523/personal-portfolio
node_modules/webpack-hot-middleware/client.js
<filename>node_modules/webpack-hot-middleware/client.js /*eslint-env browser*/ /*global __resourceQuery __webpack_public_path__*/ var options = { path: "/__webpack_hmr", timeout: 20 * 1000, overlay: true, reload: false, log: true, warn: true, name: '', autoConnect: true, overlayStyles: {}, ansiColors: {} }; if (__resourceQuery) { var querystring = require('querystring'); var overrides = querystring.parse(__resourceQuery.slice(1)); setOverrides(overrides); } if (typeof window === 'undefined') { // do nothing } else if (typeof window.EventSource === 'undefined') { console.warn( "webpack-hot-middleware's client requires EventSource to work. " + "You should include a polyfill if you want to support this browser: " + "https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events#Tools" ); } else { if (options.autoConnect) { connect(); } } /* istanbul ignore next */ function setOptionsAndConnect(overrides) { setOverrides(overrides); connect(); } function setOverrides(overrides) { if (overrides.autoConnect) options.autoConnect = overrides.autoConnect == 'true'; if (overrides.path) options.path = overrides.path; if (overrides.timeout) options.timeout = overrides.timeout; if (overrides.overlay) options.overlay = overrides.overlay !== 'false'; if (overrides.reload) options.reload = overrides.reload !== 'false'; if (overrides.noInfo && overrides.noInfo !== 'false') { options.log = false; } if (overrides.name) { options.name = overrides.name; } if (overrides.quiet && overrides.quiet !== 'false') { options.log = false; options.warn = false; } if (overrides.dynamicPublicPath) { options.path = __webpack_public_path__ + options.path; } if (overrides.ansiColors) options.ansiColors = JSON.parse(overrides.ansiColors); if (overrides.overlayStyles) options.overlayStyles = JSON.parse(overrides.overlayStyles); } function EventSourceWrapper() { var source; var lastActivity = new Date(); var listeners = []; init(); var timer = setInterval(function() { if ((new Date() - lastActivity) > options.timeout) { handleDisconnect(); } }, options.timeout / 2); function init() { source = new window.EventSource(options.path); source.onopen = handleOnline; source.onerror = handleDisconnect; source.onmessage = handleMessage; } function handleOnline() { if (options.log) console.log("[HMR] connected"); lastActivity = new Date(); } function handleMessage(event) { lastActivity = new Date(); for (var i = 0; i < listeners.length; i++) { listeners[i](event); } } function handleDisconnect() { clearInterval(timer); source.close(); setTimeout(init, options.timeout); } return { addMessageListener: function(fn) { listeners.push(fn); } }; } function getEventSourceWrapper() { if (!window.__whmEventSourceWrapper) { window.__whmEventSourceWrapper = {}; } if (!window.__whmEventSourceWrapper[options.path]) { // cache the wrapper for other entries loaded on // the same page with the same options.path window.__whmEventSourceWrapper[options.path] = EventSourceWrapper(); } return window.__whmEventSourceWrapper[options.path]; } function connect() { getEventSourceWrapper().addMessageListener(handleMessage); function handleMessage(event) { if (event.data == "\uD83D\uDC93") { return; } try { processMessage(JSON.parse(event.data)); } catch (ex) { if (options.warn) { console.warn("Invalid HMR message: " + event.data + "\n" + ex); } } } } // the reporter needs to be a singleton on the page // in case the client is being used by multiple bundles // we only want to report once. // all the errors will go to all clients var singletonKey = '__webpack_hot_middleware_reporter__'; var reporter; if (typeof window !== 'undefined') { if (!window[singletonKey]) { window[singletonKey] = createReporter(); } reporter = window[singletonKey]; } function createReporter() { var strip = require('strip-ansi'); var overlay; if (typeof document !== 'undefined' && options.overlay) { overlay = require('./client-overlay')({ ansiColors: options.ansiColors, overlayStyles: options.overlayStyles }); } var styles = { errors: "color: #ff0000;", warnings: "color: #999933;" }; var previousProblems = null; function log(type, obj) { var newProblems = obj[type].map(function(msg) { return strip(msg); }).join('\n'); if (previousProblems == newProblems) { return; } else { previousProblems = newProblems; } var style = styles[type]; var name = obj.name ? "'" + obj.name + "' " : ""; var title = "[HMR] bundle " + name + "has " + obj[type].length + " " + type; // NOTE: console.warn or console.error will print the stack trace // which isn't helpful here, so using console.log to escape it. if (console.group && console.groupEnd) { console.group("%c" + title, style); console.log("%c" + newProblems, style); console.groupEnd(); } else { console.log( "%c" + title + "\n\t%c" + newProblems.replace(/\n/g, "\n\t"), style + "font-weight: bold;", style + "font-weight: normal;" ); } } return { cleanProblemsCache: function () { previousProblems = null; }, problems: function(type, obj) { if (options.warn) { log(type, obj); } if (overlay && type !== 'warnings') overlay.showProblems(type, obj[type]); }, success: function() { if (overlay) overlay.clear(); }, useCustomOverlay: function(customOverlay) { overlay = customOverlay; } }; } var processUpdate = require('./process-update'); var customHandler; var subscribeAllHandler; function processMessage(obj) { switch(obj.action) { case "building": if (options.log) { console.log( "[HMR] bundle " + (obj.name ? "'" + obj.name + "' " : "") + "rebuilding" ); } break; case "built": if (options.log) { console.log( "[HMR] bundle " + (obj.name ? "'" + obj.name + "' " : "") + "rebuilt in " + obj.time + "ms" ); } // fall through case "sync": if (obj.name && options.name && obj.name !== options.name) { return; } if (obj.errors.length > 0) { if (reporter) reporter.problems('errors', obj); } else { if (reporter) { if (obj.warnings.length > 0) { reporter.problems('warnings', obj); } else { reporter.cleanProblemsCache(); } reporter.success(); } processUpdate(obj.hash, obj.modules, options); } break; default: if (customHandler) { customHandler(obj); } } if (subscribeAllHandler) { subscribeAllHandler(obj); } } if (module) { module.exports = { subscribeAll: function subscribeAll(handler) { subscribeAllHandler = handler; }, subscribe: function subscribe(handler) { customHandler = handler; }, useCustomOverlay: function useCustomOverlay(customOverlay) { if (reporter) reporter.useCustomOverlay(customOverlay); }, setOptionsAndConnect: setOptionsAndConnect }; }
zhangkn/iOS14Header
System/Library/PrivateFrameworks/HomeKitDaemon.framework/HMDCloudManagerDelegate.h
/* * This header is generated by classdump-dyld 1.0 * on Sunday, September 27, 2020 at 11:51:59 AM Mountain Standard Time * Operating System: Version 14.0 (Build 18A373) * Image Source: /System/Library/PrivateFrameworks/HomeKitDaemon.framework/HomeKitDaemon * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. */ @protocol HMDCloudManagerDelegate @required -(void)fetchHomeFromCloudZone:(id)arg1 cloudConflict:(BOOL)arg2 withDelay:(double)arg3; -(void)fetchHomeManagerCloudConflict:(BOOL)arg1 withDelay:(double)arg2; -(void)uploadHomeConfigToCloud:(BOOL)arg1 withDelay:(double)arg2; -(void)fetchHomeDataFromCloudWithCloudConflict:(BOOL)arg1 withDelay:(double)arg2; -(void)archiveServerToken:(id)arg1; -(void)eraseLocalHomeData; -(void)reloadHomeDataFromLocalStore:(BOOL)arg1; -(void)fetchAllZones; -(void)verifyHomeDataFromCloud:(/*^block*/id)arg1; -(void)schedulePostFetch; -(void)notifyZonesCloudZoneReady:(id)arg1; -(void)kickAccountAvailabilityCheck; @end
deepakddixit/monarch
ADS/geode-core/src/main/java/org/apache/geode/internal/admin/JoinLeaveListener.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.admin; /** * Interface implemented by those who want to be alerted when a node joins or leaves a distributed * GemFire system */ public interface JoinLeaveListener extends java.util.EventListener { public void nodeJoined(GfManagerAgent source, GemFireVM joined); public void nodeLeft(GfManagerAgent source, GemFireVM left); public void nodeCrashed(GfManagerAgent source, GemFireVM crashed); }
EsmaeeilMoradi/AOSPGallery4AS
app/src/main/java/org/apache/http/impl/io/AbstractSessionOutputBuffer.java
/* * $HeadURL: http://svn.apache.org/repos/asf/httpcomponents/httpcore/trunk/module-main/src/main/java/org/apache/http/impl/io/AbstractSessionOutputBuffer.java $ * $Revision: 652091 $ * $Date: 2008-04-29 13:41:07 -0700 (Tue, 29 Apr 2008) $ * * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.impl.io; import java.io.IOException; import java.io.OutputStream; import org.apache.http.io.SessionOutputBuffer; import org.apache.http.io.HttpTransportMetrics; import org.apache.http.params.HttpParams; import org.apache.http.params.HttpProtocolParams; import org.apache.http.protocol.HTTP; import org.apache.http.util.ByteArrayBuffer; import org.apache.http.util.CharArrayBuffer; /** * Abstract base class for session output buffers that stream data * to an {@link OutputStream}. * * @author <a href="mailto:oleg at ural.ru"><NAME></a> * * * @deprecated Please use {@link java.net.URL#openConnection} instead. * Please visit <a href="http://android-developers.blogspot.com/2011/09/androids-http-clients.html">this webpage</a> * for further details. */ @Deprecated public abstract class AbstractSessionOutputBuffer implements SessionOutputBuffer { private static final byte[] CRLF = new byte[] {HTTP.CR, HTTP.LF}; private static final int MAX_CHUNK = 256; private OutputStream outstream; private ByteArrayBuffer buffer; private String charset = HTTP.US_ASCII; private boolean ascii = true; private HttpTransportMetricsImpl metrics; protected void init(final OutputStream outstream, int buffersize, final HttpParams params) { if (outstream == null) { throw new IllegalArgumentException("Input stream may not be null"); } if (buffersize <= 0) { throw new IllegalArgumentException("Buffer size may not be negative or zero"); } if (params == null) { throw new IllegalArgumentException("HTTP parameters may not be null"); } this.outstream = outstream; this.buffer = new ByteArrayBuffer(buffersize); this.charset = HttpProtocolParams.getHttpElementCharset(params); this.ascii = this.charset.equalsIgnoreCase(HTTP.US_ASCII) || this.charset.equalsIgnoreCase(HTTP.ASCII); this.metrics = new HttpTransportMetricsImpl(); } protected void flushBuffer() throws IOException { int len = this.buffer.length(); if (len > 0) { this.outstream.write(this.buffer.buffer(), 0, len); this.buffer.clear(); this.metrics.incrementBytesTransferred(len); } } public void flush() throws IOException { flushBuffer(); this.outstream.flush(); } public void write(final byte[] b, int off, int len) throws IOException { if (b == null) { return; } // Do not want to buffer largish chunks // if the byte array is larger then MAX_CHUNK // write it directly to the output stream if (len > MAX_CHUNK || len > this.buffer.capacity()) { // flush the buffer flushBuffer(); // write directly to the out stream this.outstream.write(b, off, len); this.metrics.incrementBytesTransferred(len); } else { // Do not let the buffer grow unnecessarily int freecapacity = this.buffer.capacity() - this.buffer.length(); if (len > freecapacity) { // flush the buffer flushBuffer(); } // buffer this.buffer.append(b, off, len); } } public void write(final byte[] b) throws IOException { if (b == null) { return; } write(b, 0, b.length); } public void write(int b) throws IOException { if (this.buffer.isFull()) { flushBuffer(); } this.buffer.append(b); } public void writeLine(final String s) throws IOException { if (s == null) { return; } if (s.length() > 0) { write(s.getBytes(this.charset)); } write(CRLF); } public void writeLine(final CharArrayBuffer s) throws IOException { if (s == null) { return; } if (this.ascii) { int off = 0; int remaining = s.length(); while (remaining > 0) { int chunk = this.buffer.capacity() - this.buffer.length(); chunk = Math.min(chunk, remaining); if (chunk > 0) { this.buffer.append(s, off, chunk); } if (this.buffer.isFull()) { flushBuffer(); } off += chunk; remaining -= chunk; } } else { // This is VERY memory inefficient, BUT since non-ASCII charsets are // NOT meant to be used anyway, there's no point optimizing it byte[] tmp = s.toString().getBytes(this.charset); write(tmp); } write(CRLF); } public HttpTransportMetrics getMetrics() { return this.metrics; } }
prasannachinnapareddy/FullStactDevelopement
Backend/src/com/cts/training/model/Sector.java
package com.cts.training.model; import java.io.Serializable; //import javax.persistence.Column; //import javax.persistence.Entity; //import javax.persistence.Id; //import javax.persistence.Table; //@Entity //@Table(name = "SectorDetails") public class Sector implements Serializable { //@Id //@Column(name = "sector_id") private int id; private String sectorname; private String breif; public Sector() { } public Sector(int id, String sectorname, String breif) { super(); this.id = id; this.sectorname = sectorname; this.breif = breif; } @Override public String toString() { return "Sector [id=" + id + ", sectorname=" + sectorname + ", breif=" + breif + "]"; } public int getId() { return id; } public void setId(int id) { this.id = id; } public String getSectorname() { return sectorname; } public void setSectorname(String sectorname) { this.sectorname = sectorname; } public String getBreif() { return breif; } public void setBreif(String breif) { this.breif = breif; } public void setEnabled(boolean b) { } }
OpenOPx/kobotoolbox
kobo-docker/.vols/static/kpi/js/components/passwordStrength.es6
import React from 'react'; import autoBind from 'react-autobind'; import zxcvbn from 'zxcvbn'; import {bem} from '../bem'; import {t} from '../utils'; /* Properties: - password <string>: required */ class PasswordStrength extends React.Component { constructor(props){ super(props); autoBind(this); } render() { const report = zxcvbn(this.props.password); const barModifier = `score-${report.score}`; return ( <bem.PasswordStrength> <bem.PasswordStrength__title> {t('Password strength')} </bem.PasswordStrength__title> <bem.PasswordStrength__bar m={barModifier}> <bem.PasswordStrength__indicator/> </bem.PasswordStrength__bar> {(report.feedback.warning || report.feedback.suggestions.length > 0) && <bem.PasswordStrength__messages> {report.feedback.warning && <bem.PasswordStrength__message m='warning'> {t(report.feedback.warning)} </bem.PasswordStrength__message> } {report.feedback.suggestions.length > 0 && report.feedback.suggestions.map((suggestion, index) => { return ( <bem.PasswordStrength__message key={index}> {t(suggestion)} </bem.PasswordStrength__message> ) }) } </bem.PasswordStrength__messages> } </bem.PasswordStrength> ) } } export default PasswordStrength;
wavestate/wavestate-iirrational
src/wavestate/iirrational/v2/hintsets.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # SPDX-License-Identifier: Apache-2.0 # SPDX-FileCopyrightText: © 2021 Massachusetts Institute of Technology. # SPDX-FileCopyrightText: © 2021 <NAME> <<EMAIL>> # NOTICE: authors should document their contributions in concisely in NOTICE # with details inline in source files, comments, and docstrings. """ """ # import warnings exact_data = dict( resavg_RthreshOrdDn=None, resavg_RthreshOrdUp=None, resavg_RthreshOrdC=None, resavg_EthreshOrdDn=1e-2, ) quiet = dict( log_print=False, ) verbose = dict( log_print=True, optimize_log=True, )
LamWaiBen/LamWaiBen.github.io
leetcode/63.unique-paths-ii.js
<reponame>LamWaiBen/LamWaiBen.github.io /* * @lc app=leetcode id=63 lang=javascript * * [63] Unique Paths II */ // @lc code=start /** * @param {number[][]} obstacleGrid * @return {number} */ var uniquePathsWithObstacles = function(obstacleGrid) { let n = obstacleGrid.length let m = obstacleGrid[0].length let dp = Array(n + 1).fill(0).map(v => Array(m + 1).fill(-1)) // 递归 let recur = function (r, c) { if (r > n || c > m) return 0; if (obstacleGrid[r - 1][c - 1]) return 0 if (r == n && c == m) return 1; if (dp[r][c] > -1) return dp[r][c] dp[r][c] = recur(r + 1, c) + recur(r, c + 1) return Math.max(dp[r][c], 0) } return recur(1, 1) }; // @lc code=end
MichaelHeydon/vue-tables-2-private
compiled/computed/datepicker-columns.js
<reponame>MichaelHeydon/vue-tables-2-private<filename>compiled/computed/datepicker-columns.js<gh_stars>1-10 "use strict"; var intersect = require('array-intersect')["default"]; module.exports = function () { if (this.opts.filterable === true) { return this.opts.dateColumns; } if (this.opts.filterable === false) { return []; } return intersect(this.opts.filterable, this.opts.dateColumns); };
goodmind/FlowDefinitelyTyped
flow-types/types/terminal-menu_vx.x.x/flow_v0.25.x-/terminal-menu.js
declare module "terminal-menu" { import typeof * as stream from "stream"; /** * Creates a TerminalMenu with default settings. */ declare function MenuContainerFactory(): MenuContainerFactory$MenuContainerFactory$TerminalMenu; /** * Creates a TerminalMenu using options to override default settings. * @param options Override values for available settings. */ declare function MenuContainerFactory( options: MenuContainerFactory$MenuContainerFactory$TerminalMenuOptions ): MenuContainerFactory$MenuContainerFactory$TerminalMenu; /** * A Thickness structure specifying the amount of padding to apply. */ declare export interface MenuContainerFactory$Thickness { /** * Represents width of the left side of the bounding rectangle. */ left: number; /** * Represents width of the right side of the bounding rectangle. */ right: number; /** * Represents width of the upper side of the bounding rectangle. */ top: number; /** * Represents width of the lower side of the bounding rectangle. */ bottom: number; } /** * Options to configure the menu. */ declare export interface MenuContainerFactory$TerminalMenuOptions { /** * Menu width in columns. * Default = 50. */ width?: number; /** * Horizontal offset for top-left corner. * Default = 1 */ x?: number; /** * Vertical offset for top-left corner. * Default = 1 */ y?: number; /** * Foreground color for the menu. * Default = 'white' */ fg?: string; /** * Background color for the menu. * Default = 'blue' */ bg?: string; /** * Padding for the bounding rectangle. * If a number is passed, all elements of the Thickness structure will be set to * that value. * Default = { * left: 2, * right: 2, * top: 1, * bottom: 1 * } */ padding?: number | MenuContainerFactory$Thickness; /** * Index of the menu item to be selected. * Default = 0 */ selected?: number; } /** * Retro ansi terminal menus. */ declare export type MenuContainerFactory$TerminalMenu = { /** * Create a new selectable menu item with label as the anchor. * @param label Label to use as the menu item anchor. */ add(label: string): void, /** * Create a new selectable menu item with label as the anchor. * @param label Label to use as the menu item anchor. * @param callback Callback to invoke when the menu item is selected. */ add(label: string, callback: (label: string, index: number) => void): void, /** * Writes a message to the terminal. * @param msg Message to be written. */ write(msg: string): void, /** * Return a duplex stream to wire up input and output. */ createStream(): stream.Duplex, /** * Reset the terminal, clearing all content. */ reset(): void, /** * Unregister all listeners and puts the terminal back to its original state. */ close(): void, /** * When a menu item is selected, this event is fired. * @param eventName Name of the event. Only value available for eventName is "select" * @param callback Handler for the event specified by eventName */ on( eventName: string | Symbol, callback: (label: string, index: number) => void ): this, /** * When a menu item is selected, this event is fired. * This overload ensures backward compatibility with older versions of NodeJS (< 6.0) * @param eventName Name of the event. Only value available for eventName is "select" * @param callback Handler for the event specified by eventName */ on( eventName: string, callback: (label: string, index: number) => void ): this } & NodeJS.EventEmitter; declare module.exports: typeof MenuContainerFactory; }
Focinfi/ckb-sdk-go
utils/epoch.go
package utils import ( "github.com/Focinfi/ckb-sdk-go/types" ) // Epoch represents a CKB consensus protocol difficulty adjust period type Epoch struct { Length uint64 Number uint64 Index uint64 } // Since returns the since block of this epoch func (e Epoch) Since() uint64 { return (0x20 << 56) + (e.Length << 40) + (e.Index << 24) + e.Number } // ParseEpochByHexStr parses the 0x prefixed hex epoch string into a Epoch func ParseEpochByHexStr(hexStr string) (*Epoch, error) { hexNum, err := types.ParseHexUint64(hexStr) if err != nil { return nil, err } num := hexNum.Uint64() return &Epoch{ Length: (num >> 40) & 0xFFFF, Index: (num >> 24) & 0xFFFF, Number: num & 0xFFFFFF, }, nil }
alessandrodalbello/google-hash-code
practice-2020/src/main/java/org/hashcode/practice2020/solvers/RepetitiveGreedySolver.java
<gh_stars>1-10 package org.hashcode.practice2020.solvers; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.hashcode.practice2020.model.Pizza; import org.hashcode.practice2020.model.PizzaInput; import org.hashcode.practice2020.model.PizzaOutput; import org.hashcode.Solver; public class RepetitiveGreedySolver implements Solver<PizzaInput, PizzaOutput> { private final GreedySolver greedySolver; public RepetitiveGreedySolver() { greedySolver = new GreedySolver(); } @Override public PizzaOutput solve(PizzaInput inputData) { final int totalMaxSlices = inputData.getMaxSlices(); Map<Integer, Pizza> remainingPizzas = new HashMap<>(inputData.getPizzas()); PizzaOutput bestSolutionFound = new PizzaOutput(List.of()); boolean isBetterSolutionPossible; do { List<Pizza> pizzas = List.copyOf(remainingPizzas.values()); PizzaOutput greedySolution = greedySolver.solve(new PizzaInput(totalMaxSlices, pizzas)); if (bestSolutionFound.getSolutionScore() < greedySolution.getSolutionScore()) { bestSolutionFound = greedySolution; } int pizzaTypeToRemove = remainingPizzas.size() - 1; if (remainingPizzas.containsKey(pizzaTypeToRemove)) { remainingPizzas.remove(pizzaTypeToRemove); } else { remainingPizzas.remove(pizzaTypeToRemove - 1); } isBetterSolutionPossible = isBetterSolutionPossible(totalMaxSlices, bestSolutionFound, remainingPizzas.values()); } while (isBetterSolutionPossible); return bestSolutionFound; } private boolean isBetterSolutionPossible(int maxSlices, PizzaOutput bestSolutionFound, Collection<Pizza> pizzas) { return bestSolutionFound.getSolutionScore() < maxSlices && totalSlices(pizzas) > bestSolutionFound.getSolutionScore(); } private int totalSlices(Collection<Pizza> pizzas) { return pizzas.stream() .reduce(0, (totalSlices, pizza) -> totalSlices += pizza.getSlices(), Integer::sum); } }
devibizsys/IBizSysRuntime
saibz5base/src/main/java/net/ibizsys/paas/core/valuerule/IDEFValueRuleEngine.java
package net.ibizsys.paas.core.valuerule; import net.ibizsys.paas.core.IActionContext; import net.ibizsys.paas.util.IGlobalContext; /** * 属性规则检查引擎接口 * @author Administrator * */ public interface IDEFValueRuleEngine { /** * 初始化 * @param iGlobalContext * @param iDEFValueRule * @throws Exception */ void init(IGlobalContext iGlobalContext,IDEFValueRule iDEFValueRule) throws Exception; /** * 测试规则的条件是否成立 * @param iActionContext * @return * @throws Exception */ boolean testCondition(IActionContext iActionContext) throws Exception; /** * 关闭 */ void close(); }
aajisaka/maven-artifact-transfer
src/main/java/org/apache/maven/shared/transfer/dependencies/resolve/internal/Maven30DependencyResolver.java
package org.apache.maven.shared.transfer.dependencies.resolve.internal; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import org.apache.maven.RepositoryUtils; import org.apache.maven.artifact.handler.ArtifactHandler; import org.apache.maven.artifact.handler.manager.ArtifactHandlerManager; import org.apache.maven.model.DependencyManagement; import org.apache.maven.model.Model; import org.apache.maven.shared.artifact.filter.resolve.TransformableFilter; import org.apache.maven.shared.artifact.filter.resolve.transform.SonatypeAetherFilterTransformer; import org.apache.maven.shared.transfer.dependencies.DependableCoordinate; import org.apache.maven.shared.transfer.dependencies.resolve.DependencyResolverException; import org.sonatype.aether.RepositorySystem; import org.sonatype.aether.RepositorySystemSession; import org.sonatype.aether.artifact.Artifact; import org.sonatype.aether.artifact.ArtifactType; import org.sonatype.aether.artifact.ArtifactTypeRegistry; import org.sonatype.aether.collection.CollectRequest; import org.sonatype.aether.collection.DependencyCollectionException; import org.sonatype.aether.graph.Dependency; import org.sonatype.aether.graph.DependencyFilter; import org.sonatype.aether.repository.RemoteRepository; import org.sonatype.aether.resolution.ArtifactResolutionException; import org.sonatype.aether.resolution.ArtifactResult; import org.sonatype.aether.util.artifact.DefaultArtifact; import org.sonatype.aether.util.artifact.DefaultArtifactType; /** * */ class Maven30DependencyResolver implements MavenDependencyResolver { private static final Class<?>[] ARG_CLASSES = new Class<?>[] {org.apache.maven.model.Dependency.class, ArtifactTypeRegistry.class}; private final RepositorySystem repositorySystem; private final ArtifactHandlerManager artifactHandlerManager; private final RepositorySystemSession session; private final List<RemoteRepository> aetherRepositories; Maven30DependencyResolver( RepositorySystem repositorySystem, ArtifactHandlerManager artifactHandlerManager, RepositorySystemSession session, List<RemoteRepository> aetherRepositories ) { super(); this.repositorySystem = repositorySystem; this.artifactHandlerManager = artifactHandlerManager; this.session = session; this.aetherRepositories = aetherRepositories; } /** * Based on RepositoryUtils#toDependency(org.apache.maven.model.Dependency, ArtifactTypeRegistry) * * @param coordinate {@link DependableCoordinate} * @param stereotypes {@link org.eclipse.aether.artifact.ArtifactTypeRegistry * @return as Aether Dependency */ private static Dependency toDependency( DependableCoordinate coordinate, ArtifactTypeRegistry stereotypes ) { ArtifactType stereotype = stereotypes.get( coordinate.getType() ); if ( stereotype == null ) { stereotype = new DefaultArtifactType( coordinate.getType() ); } Artifact artifact = new DefaultArtifact( coordinate.getGroupId(), coordinate.getArtifactId(), coordinate.getClassifier(), null, coordinate.getVersion(), null, stereotype ); return new Dependency( artifact, null ); } private static Dependency toDependency( org.apache.maven.model.Dependency mavenDependency, ArtifactTypeRegistry typeRegistry ) throws DependencyResolverException { Object[] args = new Object[] {mavenDependency, typeRegistry}; return Invoker.invoke( RepositoryUtils.class, "toDependency", ARG_CLASSES, args ); } @Override // CHECKSTYLE_OFF: LineLength public Iterable<org.apache.maven.shared.transfer.artifact.resolve.ArtifactResult> resolveDependencies( DependableCoordinate coordinate, TransformableFilter dependencyFilter ) // CHECKSTYLE_ON: LineLength throws DependencyResolverException { ArtifactTypeRegistry typeRegistry = createTypeRegistry(); Dependency aetherRoot = toDependency( coordinate, typeRegistry ); CollectRequest request = new CollectRequest( aetherRoot, aetherRepositories ); return resolveDependencies( dependencyFilter, request ); } private ArtifactTypeRegistry createTypeRegistry() throws DependencyResolverException { return Invoker.invoke( RepositoryUtils.class, "newArtifactTypeRegistry", ArtifactHandlerManager.class, artifactHandlerManager ); } @Override // CHECKSTYLE_OFF: LineLength public Iterable<org.apache.maven.shared.transfer.artifact.resolve.ArtifactResult> resolveDependencies( Model model, TransformableFilter dependencyFilter ) // CHECKSTYLE_ON: LineLength throws DependencyResolverException { // Are there examples where packaging and type are NOT in sync ArtifactHandler artifactHandler = artifactHandlerManager.getArtifactHandler( model.getPackaging() ); String extension = artifactHandler != null ? artifactHandler.getExtension() : null; Artifact aetherArtifact = new DefaultArtifact( model.getGroupId(), model.getArtifactId(), extension, model.getVersion() ); Dependency aetherRoot = new Dependency( aetherArtifact, null ); CollectRequest request = new CollectRequest( aetherRoot, aetherRepositories ); request.setDependencies( resolveDependencies( model.getDependencies() ) ); DependencyManagement mavenDependencyManagement = model.getDependencyManagement(); if ( mavenDependencyManagement != null ) { request.setManagedDependencies( resolveDependencies( mavenDependencyManagement.getDependencies() ) ); } return resolveDependencies( dependencyFilter, request ); } /** * @param mavenDependencies {@link org.apache.maven.model.Dependency} can be {@code null}. * @return List of resolved dependencies. * @throws DependencyResolverException in case of a failure of the typeRegistry error. */ // CHECKSTYLE_OFF: LineLength private List<Dependency> resolveDependencies( Collection<org.apache.maven.model.Dependency> mavenDependencies ) throws DependencyResolverException // CHECKSTYLE_ON: LineLength { if ( mavenDependencies == null ) { return Collections.emptyList(); } ArtifactTypeRegistry typeRegistry = createTypeRegistry(); List<Dependency> aetherDependencies = new ArrayList<>( mavenDependencies.size() ); for ( org.apache.maven.model.Dependency mavenDependency : mavenDependencies ) { aetherDependencies.add( toDependency( mavenDependency, typeRegistry ) ); } return aetherDependencies; } @Override // CHECKSTYLE_OFF: LineLength public Iterable<org.apache.maven.shared.transfer.artifact.resolve.ArtifactResult> resolveDependencies( Collection<org.apache.maven.model.Dependency> mavenDependencies, Collection<org.apache.maven.model.Dependency> managedMavenDependencies, TransformableFilter filter ) // CHECKSTYLE_ON: LineLength throws DependencyResolverException { List<Dependency> aetherDependencies = resolveDependencies( mavenDependencies ); List<Dependency> aetherManagedDependencies = resolveDependencies( managedMavenDependencies ); CollectRequest request = new CollectRequest( aetherDependencies, aetherManagedDependencies, aetherRepositories ); return resolveDependencies( filter, request ); } // CHECKSTYLE_OFF: LineLength private Iterable<org.apache.maven.shared.transfer.artifact.resolve.ArtifactResult> resolveDependencies( TransformableFilter dependencyFilter, CollectRequest request ) throws DependencyResolverException // CHECKSTYLE_ON :LineLength { try { DependencyFilter depFilter = null; if ( dependencyFilter != null ) { depFilter = dependencyFilter.transform( new SonatypeAetherFilterTransformer() ); } final List<ArtifactResult> dependencyResults = repositorySystem.resolveDependencies( session, request, depFilter ); // Keep it lazy! Often artifactsResults aren't used, so transforming up front is too expensive return new Iterable<org.apache.maven.shared.transfer.artifact.resolve.ArtifactResult>() { @Override public Iterator<org.apache.maven.shared.transfer.artifact.resolve.ArtifactResult> iterator() { // CHECKSTYLE_OFF: LineLength Collection<org.apache.maven.shared.transfer.artifact.resolve.ArtifactResult> artResults = new ArrayList<>( dependencyResults.size() ); // CHECKSTYLE_ON: LineLength for ( ArtifactResult artifactResult : dependencyResults ) { artResults.add( new Maven30ArtifactResult( artifactResult ) ); } return artResults.iterator(); } }; } catch ( ArtifactResolutionException e ) { throw new Maven30DependencyResolverException( e ); } catch ( DependencyCollectionException e ) { throw new Maven30DependencyResolverException( e ); } } }
giiita/scaladia
refuel-container/src/main/scala-2/refuel/internal/AtomicUpdater.scala
<filename>refuel-container/src/main/scala-2/refuel/internal/AtomicUpdater.scala package refuel.internal import java.util.concurrent.atomic.AtomicReferenceFieldUpdater import java.util.function.UnaryOperator import refuel.inject.exception.UnExceptedOperateException import scala.annotation.tailrec trait AtomicUpdater[U, W] { self: U => val updater: AtomicReferenceFieldUpdater[U, W] /** * Recursive, atomic updates. * If there is an unguaranteed update of atomicity, it may loop indefinitely. * * @param f Update function. * @return */ @tailrec protected final def atomicUpdate(f: W => W): W = { val ref = getRef val nw = f(snapshot(ref)) if (compareAndSet(ref, nw)) nw else atomicUpdate(f) } /** * Provides a way to create a snapshot if necessary. * Usually not processed. * * In that case, response is a reference, so if you change it, * it may affect the value that other threads reference. * To prevent this, use [[snapshot]] or [[compareAndSet]] to override snapshot and update. * * {{{ * override def snapshot(w: W): W = w.snapshot() * * val old = getRef * val newRef = get * newRef.update(x -> y) * compareAndSet(old, newRef) * }}} * * @param w value type * @return */ protected def snapshot(w: W): W = throw new UnExceptedOperateException("Snapshot method is not defined.") /** * Update only if the existing data is in the expected state. * If it is not in the expected state, it will not be updated. * * @param o Expected symbol. * @param n Update symbol. * @return */ protected def compareAndSet(o: W, n: W): Boolean = updater.compareAndSet(this, o, n) /** * Returns a new reference that is the result of the snapshot. * * @return */ protected def get: W = snapshot(getRef) /** * Gets the current value held in the field of the given object managed * by this updater. * * @return the current value */ protected def getRef: W = updater.get(this) /** * Sets the field of the given object managed by this updater to the * given updated value. This operation is guaranteed to act as a volatile * store with respect to subsequent invocations of [[compareAndSet]]. * * @param n the new value */ protected def set(n: W): Unit = updater.set(this, n) /** * Atomically sets the field of the given object managed by this updater * to the given value and returns the old value. * * @param n the new value * @return the previous value */ protected def getAndSet(n: W): W = updater.getAndSet(this, n) /** * Atomically updates the field of the given object managed by this updater * with the results of applying the given function, returning the previous * value. The function should be side-effect-free, since it may be * re-applied when attempted updates fail due to contention among threads. * * @param nf a side-effect-free function * @return the previous value * @since 1.8 */ protected def getAndUpdate(nf: UnaryOperator[W]): W = updater.getAndUpdate(this, nf) }
mayth/parasol
app/views/admin/adjustments/show.json.jbuilder
<reponame>mayth/parasol<filename>app/views/admin/adjustments/show.json.jbuilder json.extract! @adjustment, :id, :point, :reason, :challenge_id, :created_at, :updated_at
raviagarwal7/r8
src/main/java/com/android/tools/r8/utils/OrderedMergingIterator.java
<gh_stars>1-10 // Copyright (c) 2016, the R8 project authors. Please see the AUTHORS file // for details. All rights reserved. Use of this source code is governed by a // BSD-style license that can be found in the LICENSE file. package com.android.tools.r8.utils; import com.android.tools.r8.errors.InternalCompilerError; import com.android.tools.r8.graph.KeyedDexItem; import com.android.tools.r8.graph.PresortedComparable; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; public class OrderedMergingIterator<T extends KeyedDexItem<S>, S extends PresortedComparable<S>> implements Iterator<T> { private final List<T> one; private final List<T> other; private int oneIndex = 0; private int otherIndex = 0; public OrderedMergingIterator(List<T> one, List<T> other) { this.one = one; this.other = other; } private static <T> T getNextChecked(List<T> list, int position) { if (position >= list.size()) { throw new NoSuchElementException(); } return list.get(position); } @Override public boolean hasNext() { return oneIndex < one.size() || otherIndex < other.size(); } @Override public T next() { if (oneIndex >= one.size()) { return getNextChecked(other, otherIndex++); } if (otherIndex >= other.size()) { return getNextChecked(one, oneIndex++); } int comparison = one.get(oneIndex).getKey().compareTo(other.get(otherIndex).getKey()); if (comparison < 0) { return one.get(oneIndex++); } if (comparison == 0) { throw new InternalCompilerError("Source arrays are not disjoint."); } return other.get(otherIndex++); } }
tbepler/JProbe
src/chiptools/jprobe/function/mutationprofiler/KmerLibraryArg.java
package chiptools.jprobe.function.mutationprofiler; import java.io.File; import jprobe.services.function.Function; import chiptools.jprobe.function.ChiptoolsDirArg; public class KmerLibraryArg extends ChiptoolsDirArg<MutationProfilerParams>{ public KmerLibraryArg(Function<?> parent, boolean optional) { super(parent.getClass(), KmerLibraryArg.class, optional); } @Override public boolean isValid(File f){ return f != null && (f.isDirectory() || f.canRead()); } @Override protected void process(MutationProfilerParams params, File f) { params.kmerLibrary = f; } }
ArnoVanDerVegt/MVM
js/frontend/ide/plugins/simulator/graph/io/ColorBarChartDrawer.js
<reponame>ArnoVanDerVegt/MVM<gh_stars>10-100 /** * Wheel, copyright (c) 2020 - present by <NAME> * Distributed under an MIT license: https://arnovandervegt.github.io/wheel/license.txt **/ const ChartDrawer = require('./ChartDrawer').ChartDrawer; const colors = [ '0,0,0', // 0 Black '0,0,0', // 1 Black '0,50,255', // 2 Blue '76,209,55', // 3 Green '241,196,15', // 4 Yellow '232,65,24', // 5 Red '255,255,255', // 6 White '150,75,0', // 7 Brown '238,130,238', // 8 Violet '50,127,200' // 9 Azure ]; exports.ColorBarChartDrawer = class extends ChartDrawer { draw(buffer, maxValue) { let context = this._context; let index = buffer.getCurrentOffset(); for (let i = 0; i < 21; i++) { if (i >= buffer.getCurrentSize()) { break; } let value = Math.min(buffer.getValue(2 + index++), maxValue); if (value < 1) { continue; } if (value in colors) { let color = colors[value]; let x = i * 13 + 2; let y = 48 - 12; context.fillStyle = 'rgb(' + color + ')'; context.strokeStyle = 'rgba(' + color + ', 0.6)'; context.lineWidth = 0.5; context.fillRect(x, y, 9, 24); context.strokeRect(x + 0.5, y, 9, 24); } } } };
TheRayKid101/Prehistoric-Dinosaur-Biomes
src/main/java/net/eymbra/prehistoric/EymbraClientPrehistoric.java
<filename>src/main/java/net/eymbra/prehistoric/EymbraClientPrehistoric.java package net.eymbra.prehistoric; import net.eymbra.blocks.EymbraBlocks; import net.eymbra.entities.EymbraEntities; import net.eymbra.entities.renderer.AnkylosaurusEntityRenderer; import net.eymbra.entities.renderer.DodoEntityRenderer; import net.eymbra.entities.renderer.DragonflyEntityRenderer; import net.eymbra.entities.renderer.HadrosaurEntityRenderer; import net.eymbra.entities.renderer.IchthyosaurusEntityRenderer; import net.eymbra.entities.renderer.PachycepalosaurusEntityRenderer; import net.eymbra.entities.renderer.TarSlimeEntityRenderer; import net.eymbra.gui.screen.EymbraScreenHanderType; import net.eymbra.particles.EymbraCustomSuspendParticle; import net.eymbra.particles.EymbraParticles; import net.eymbra.prehistoric.mixin.SkyPropertiesAccessor; import net.eymbra.sounds.EymbraSoundEvents; import net.fabricmc.api.ClientModInitializer; import net.fabricmc.api.EnvType; import net.fabricmc.api.Environment; import net.fabricmc.fabric.api.blockrenderlayer.v1.BlockRenderLayerMap; import net.fabricmc.fabric.api.client.particle.v1.ParticleFactoryRegistry; import net.fabricmc.fabric.api.client.rendereregistry.v1.EntityRendererRegistry; import net.fabricmc.fabric.api.client.rendering.v1.ColorProviderRegistry; import net.minecraft.block.BlockState; import net.minecraft.client.color.world.BiomeColors; import net.minecraft.client.color.world.GrassColors; import net.minecraft.client.render.RenderLayer; import net.minecraft.client.render.SkyProperties; import net.minecraft.client.render.SkyProperties.SkyType; import net.minecraft.client.sound.PositionedSoundInstance; import net.minecraft.item.BlockItem; import net.minecraft.util.Identifier; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Vec3d; import net.minecraft.world.BlockRenderView; @Environment(EnvType.CLIENT) public class EymbraClientPrehistoric implements ClientModInitializer { @Environment(EnvType.CLIENT) public static final PositionedSoundInstance TM_OPEN = PositionedSoundInstance.ambient(EymbraSoundEvents.TIME_MACHINE_OPEN); @Environment(EnvType.CLIENT) public static final PositionedSoundInstance TM_CLOSE = PositionedSoundInstance.ambient(EymbraSoundEvents.TIME_MACHINE_CLOSE); @Override public void onInitializeClient() { EymbraScreenHanderType.init(); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_SHORT_BUSH, RenderLayer.getCutout()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_DEAD_BUSH, RenderLayer.getCutout()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_PORTAL_BLOCK, RenderLayer.getTranslucent()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_CALAMITES_SAPLING, RenderLayer.getCutout()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_DARKWOOD_SAPLING, RenderLayer.getCutout()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_LEPIDODENDRALES_SAPLING, RenderLayer.getCutout()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_MANGROVE_SAPLING, RenderLayer.getCutout()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_CALAMITES_LEAVES, RenderLayer.getCutoutMipped()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_LEPIDODENDRALES_LEAVES, RenderLayer.getCutoutMipped()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_DARKWOOD_LEAVES, RenderLayer.getCutoutMipped()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_MANGROVE_LEAVES, RenderLayer.getCutoutMipped()); BlockRenderLayerMap.INSTANCE.putBlock(EymbraBlocks.PREHISTORIC_GRASS_BLOCK, RenderLayer.getCutoutMipped()); EntityRendererRegistry.INSTANCE.register(EymbraEntities.HADROSAUR, (entityRenderDispatcher, context) -> new HadrosaurEntityRenderer(entityRenderDispatcher)); EntityRendererRegistry.INSTANCE.register(EymbraEntities.DRAGONFLY, (entityRenderDispatcher, context) -> new DragonflyEntityRenderer(entityRenderDispatcher)); EntityRendererRegistry.INSTANCE.register(EymbraEntities.TAR_SLIME, (entityRenderDispatcher, context) -> new TarSlimeEntityRenderer(entityRenderDispatcher)); EntityRendererRegistry.INSTANCE.register(EymbraEntities.ICHTHYOSAURUS, (entityRenderDispatcher, context) -> new IchthyosaurusEntityRenderer(entityRenderDispatcher)); EntityRendererRegistry.INSTANCE.register(EymbraEntities.PACHYCEPALOSAURUS, (entityRenderDispatcher, context) -> new PachycepalosaurusEntityRenderer(entityRenderDispatcher)); EntityRendererRegistry.INSTANCE.register(EymbraEntities.DODO, (entityRenderDispatcher, context) -> new DodoEntityRenderer(entityRenderDispatcher)); EntityRendererRegistry.INSTANCE.register(EymbraEntities.ANKYLOSAURUS, (entityRenderDispatcher, context) -> new AnkylosaurusEntityRenderer(entityRenderDispatcher)); ParticleFactoryRegistry.getInstance().register(EymbraParticles.RED_SAND, EymbraCustomSuspendParticle.RedSandFactory::new); ParticleFactoryRegistry.getInstance().register(EymbraParticles.RAINFOREST_DUST, EymbraCustomSuspendParticle.RainforestDustFactory::new); ColorProviderRegistry.BLOCK.register((state, world, pos, tintIndex) -> { return world != null && pos != null ? BiomeColors.getGrassColor(world, pos) : GrassColors.getColor(0.5D, 1.0D); }, EymbraBlocks.PREHISTORIC_GRASS_BLOCK, EymbraBlocks.PREHISTORIC_SHORT_BUSH); ColorProviderRegistry.ITEM.register((stack, tintIndex) -> { BlockState blockState = ((BlockItem) stack.getItem()).getBlock().getDefaultState(); return ColorProviderRegistry.BLOCK.get(EymbraBlocks.PREHISTORIC_GRASS_BLOCK).getColor(blockState, (BlockRenderView) null, (BlockPos) null, tintIndex); }, EymbraBlocks.PREHISTORIC_GRASS_BLOCK, EymbraBlocks.PREHISTORIC_SHORT_BUSH); SkyPropertiesAccessor.getIdentifier().put(new Identifier(EymbraPrehistoric.MODID, "sky_property"), new SkyProperties(128.0F, true, SkyType.NORMAL, true, false) { @Override public Vec3d adjustFogColor(Vec3d color, float sunHeight) { return color.multiply((sunHeight * 0.94F + 0.06F) * 0.8, (sunHeight * 0.94F + 0.06F) * 0.8, (sunHeight * 0.91F + 0.09F) * 0.8); } @Override public boolean useThickFog(int camX, int camY) { return true; } }); } }
Gjoll/OpSys
Src/IO/Command_Command.cpp
<reponame>Gjoll/OpSys /* Copyright 1992 <NAME>. This is an unpublished work by <NAME>. All rights are reserved, and no part of this work may be distributed or released with out the express permission of Kurt <NAME>, 3540 43'rd Ave S., Mpls, Mn 55406. */ /* $Revision: 1.1 $ $Modtime: 01 May 1993 09:32:50 $ $Workfile: command.cpp $ */ #include <OsConfig.h> #include <OsAssert.h> #include <Base.h> #include <ptrBlock.h> #include <Task.h> #include <Block.h> #include <Semaphore.h> #include <OpSys.h> #include <CmdHdr.h> #include <IoBuf.h> #include <CPipe.h> #include <CmdPipe.h> #include <OsTypes.h> #include <CmdHandler.h> #include <Command.h> /* Command::Command Description: Class: Command. Command constructor. This links the Command object to a CmdHandler, stored the command name, and computed the additive checksum of the command name (used to speed up command comparisons). Parameters: main CmdHandler to link this command to. str Command name of this command. */ Command::Command(CmdHandler* main, const char* str) : cmdStr(str) { register const char* cPtr = cmdStr; cmdSum = 0; while (*cPtr != '\0') cmdSum += *cPtr++; main->link(this); } /* Command::~Command Description: Class: Command. Command destructor. */ Command::~Command() { }
aquelemesmo/lipebot
src/commands/utils/ajuda.js
const { MessageEmbed, MessageActionRow, MessageButton } = require("discord.js") const botoes = require("../../json/botoesCollectors.json") module.exports.run = async (bot, message, args) => { const row = new MessageActionRow() .addComponents(new MessageButton().setStyle('SUCCESS').setCustomId(botoes.ajudaCommand.home).setEmoji("\🏡")) .addComponents(new MessageButton().setStyle('SUCCESS').setCustomId(botoes.ajudaCommand.id).setEmoji("\🍃")) .addComponents(new MessageButton().setStyle('SUCCESS').setCustomId(botoes.ajudaCommand.id2).setEmoji("\👮")) .addComponents(new MessageButton().setStyle('SUCCESS').setCustomId(botoes.ajudaCommand.id3).setEmoji("\💰")) .addComponents(new MessageButton().setStyle('SUCCESS').setCustomId(botoes.ajudaCommand.id4).setEmoji("\⚙️")) const row2 = new MessageActionRow() .addComponents(new MessageButton().setStyle('SUCCESS').setCustomId(botoes.ajudaCommand.id5).setEmoji("\🎮")) .addComponents(new MessageButton().setStyle('SUCCESS').setCustomId(botoes.ajudaCommand.id6).setEmoji("<a:minecraft:780777822435672084>")) const embed = new MessageEmbed() .setColor("PURPLE") .setTitle("Meu painel de controle") .setThumbnail(bot.user.displayAvatarURL()) .setDescription("Esse aqui e meu painel de controle onde você pode ver as categorias, e dentro dele tem meus comandos e minhas funcionalidades! Caso você encontrou um bug, reporte usando ``lp!report-bug``\n\nE eu possuo " + bot.commands.size + " comandos!") .addFields( {name: "> \🍃・Utilidades", value: 'Acessar as categorias utilidades', inline: false}, {name: "> \👮・Moderação", value: 'Apenas staffs podem usar esse comando', inline: false}, {name: "> \💰・Economia", value: 'Vamos brincar de economia!', inline: false}, {name: "> \⚙️・Configuração", value: 'Posso configurar seu servidor para ficar bonito', inline: false}, {name: "> \🎮・Jogos", value: 'Você pode jogar alguns joguinhos divertidos', inline: false}, {name: "> <a:minecraft:780777822435672084>・Minecraft", value: 'Mostrar informações sobre o Minecraft!', inline: false} ) await message.reply({embeds: [embed], components: [row, row2]}) const filtroHome = m => m.customId === botoes.ajudaCommand.home && m.user.id === message.author.id; const collectorHome = message.channel.createMessageComponentCollector({filtroHome}) const filtro = m => m.customId === botoes.ajudaCommand.id && m.user.id === message.author.id; const collector = message.channel.createMessageComponentCollector({filtro}) const filtro2 = m => m.customId === botoes.ajudaCommand.id2 && m.user.id === message.author.id; const collector2 = message.channel.createMessageComponentCollector({filtro2}) const filtro3 = m => m.customId === botoes.ajudaCommand.id3 && m.user.id === message.author.id; const collector3 = message.channel.createMessageComponentCollector({filtro3}) const filtro4 = m => m.customId === botoes.ajudaCommand.id4 && m.user.id === message.author.id; const collector4 = message.channel.createMessageComponentCollector({filtro4}) const filtro5 = m => m.customId === botoes.ajudaCommand.id5 && m.user.id === message.author.id; const collector5 = message.channel.createMessageComponentCollector({filtro5}) const filtro6 = m => m.customId === botoes.ajudaCommand.id6 && m.user.id === message.author.id; const collector6 = message.channel.createMessageComponentCollector({filtro6}) collectorHome.on(`collect`, async m => { if(m.customId === 'HomeButton') { await m.deferUpdate() await m.editReply({embeds: [embed]}) } }) collector.on(`collect`, async m => { if(m.customId === 'UtilsButton') { const utilsEmbed = new MessageEmbed() .setColor("PURPLE") .setTitle("Categoria Úteis") .setDescription("`\`\`\ botinfo | help | invite | perfil | mcserver | ping | rep | reportar-bug | sugerir-bot | sugerir | userinfo\`\`\`") await m.deferUpdate() await m.editReply({embeds: [utilsEmbed]}) } }) collector2.on(`collect`, async m => { if(m.customId === 'ModButton') { const modEmbed = new MessageEmbed() .setColor("PURPLE") .setTitle("Categoria Moderação") .setDescription("`\`\`\ ban | kick | mute | unban | unmute | warn \`\`\`") await m.deferUpdate() await m.editReply({embeds: [modEmbed]}) } }) collector3.on(`collect`, async m => { if(m.customId === 'EcoButton') { const ecoEmbed = new MessageEmbed() .setColor("PURPLE") .setTitle("Categoria Economia") .setDescription("`\`\`\ addmoney | daily | depall | hack | removemoney | roubar | work \`\`\`") await m.deferUpdate() await m.editReply({embeds: [ecoEmbed]}) } }) collector4.on(`collect`, async m => { if(m.customId === 'ConfigButton') { const configEmbed = new MessageEmbed() .setColor("PURPLE") .setTitle("Categoria Configuração") .setDescription("`\`\`\ setbooster | setleave | setsugerir | setwelcome \`\`\`") await m.deferUpdate() await m.editReply({embeds: [configEmbed]}) } }) collector5.on(`collect`, async m => { if(m.customId === 'GameButton') { const gameEmbed = new MessageEmbed() .setColor("PURPLE") .setTitle("Categoria Jogos") .setDescription("`\`\`\ chess | poker | snake \`\`\`") await m.deferUpdate() await m.editReply({embeds: [gameEmbed]}) } }) collector6.on(`collect`, async m => { if(m.customId === 'MinecraftButton') { const mcEmbed = new MessageEmbed() .setColor("PURPLE") .setTitle("Categoria Jogos") .setDescription("`\`\`\ mchead | mcserver | mcskin \`\`\`") await m.deferUpdate() await m.editReply({embeds: [mcEmbed]}) } }) } module.exports.help = { name: "ajuda", aliases: ["comandos", "help"], description: "Mostra todos os comandos do bot" }
imkaywu/open3DCV
src/camera/pinhole_camera.h
#ifndef open3dcv_pinhole_camera_h_ #define open3dcv_pinhole_camera_h_ #include <iostream> #include <vector> #include "math/numeric.h" #include "camera/camera.h" namespace open3DCV { class PinholeCamera : public Camera { public: PinholeCamera(); PinholeCamera(const std::vector<float>& r_intrinsics, const std::vector<float>& r_extrinsics); PinholeCamera(const Mat34f& r_projection); PinholeCamera(const Mat3f& R, const Vec3f& t); PinholeCamera(const Mat3f& K, const Mat3f& R, const Vec3f& t); virtual ~PinholeCamera(); void update_projection(); void update_parameters(); void update_matrices(const int is_proj = 1); void update_center(); void update_axes(); int decompose(Mat3f & K, Mat3f& R) const; const Mat34f& projection() const; Mat34f& projection(); const Vec3f& direction() const; Vec3f& direction(); const Vec3f& center() const; Vec3f& center(); Vec3f pixel2ray(const Vec2f& pixel) const; Vec3f project(const Vec4f& coord) const; Vec4f unproject(const Vec3f& icoord) const; float compute_depth(const Vec4f& coord) const; // float getScale(const Vec4f& coord, const int level) const; // get patch axes // void getPAxes(const Vecf& coord, const Vec4f& normal, Vec4f& pxaxis, Vec4f& pyaxis, const int level = 0) const; // void setAxisScale(const float axisScale); private: Vec3f center_; // optical center Vec3f oaxis_; // optical axis Vec3f xaxis_; // x-axis of the camera-centered coordinate system Vec3f yaxis_; // y-axis of the camera-centered coordinate system Vec3f zaxis_; // z-axis of the camera-centered coordinate system Mat34f projection_; // projection matrix Mat3f K_; // intrinsic matrix Mat3f R_; // rotation Vec3f om_; // axis-angle Vec3f t_; // translation std::vector<float> intrinsics_; // intrinsic params: f_x, f_y, c_x, c_y std::vector<float> extrinsics_; // extrinsic params: om(0), om(1), om(2), t(0), t(1), t(2); // currently not used, from PMVS float ip_scale_; // image plane scale (fx + fy), used to compute the projection/scene-image scale float axis_scale_; }; } #endif
petr-panteleyev/java-api-for-mysql
src/test/java/org/panteleyev/mysqlapi/TableCreationTest.java
package org.panteleyev.mysqlapi; /* * Copyright (c) <NAME>. All rights reserved. * Licensed under the BSD license. See LICENSE file in the project root for full license information. */ import org.panteleyev.mysqlapi.model.ImmutableRecord; import org.panteleyev.mysqlapi.model.ImmutableRecordWithPrimitives; import org.panteleyev.mysqlapi.model.RecordWithAllTypes; import org.panteleyev.mysqlapi.model.RecordWithOptionals; import org.panteleyev.mysqlapi.model.RecordWithPrimitives; import org.testng.Assert; import org.testng.annotations.Test; import java.util.Arrays; @Test public class TableCreationTest extends Base { @Test public void testCreateTables() { getDao().createTables(Arrays.asList( RecordWithAllTypes.class, RecordWithOptionals.class, RecordWithPrimitives.class, ImmutableRecord.class, ImmutableRecordWithPrimitives.class )); Assert.assertTrue(getDao().getAll(RecordWithAllTypes.class).isEmpty()); Assert.assertTrue(getDao().getAll(RecordWithOptionals.class).isEmpty()); Assert.assertTrue(getDao().getAll(RecordWithPrimitives.class).isEmpty()); Assert.assertTrue(getDao().getAll(ImmutableRecord.class).isEmpty()); Assert.assertTrue(getDao().getAll(ImmutableRecordWithPrimitives.class).isEmpty()); } }
gkahn13/GtS
src/gcg/algos/gcg_train_tfrecord.py
<gh_stars>10-100 from gcg.algos.gcg_train import GCGtrain from gcg.data.timer import timeit from gcg.policies.gcg_policy_tfrecord import GCGPolicyTfrecord class GCGtrainTfrecord(GCGtrain): def __init__(self, exp_name, env_params, env_eval_params, rp_params, rp_eval_params, labeller_params, policy_params, alg_params, log_level='info', log_fname='log.txt', seed=None, is_continue=False, params_txt=None): policy_params['kwargs']['tfrecord_folders'] = alg_params['offpolicy'] policy_params['kwargs']['batch_size'] = alg_params['batch_size'] # labelling was done to make the tfrecords labeller_params = { 'class': None, 'kwargs': { } } # since evaluation is in tfrecords env_eval_params = None super(GCGtrainTfrecord, self).__init__( exp_name=exp_name, env_params=env_params, env_eval_params=env_eval_params, rp_params=rp_params, rp_eval_params=rp_eval_params, labeller_params=labeller_params, policy_params=policy_params, alg_params=alg_params, log_level=log_level, log_fname=log_fname, seed=seed, is_continue=is_continue, params_txt=params_txt ) assert isinstance(self._policy, GCGPolicyTfrecord) ############### ### Restore ### ############### def _add_offpolicy(self, folders, max_to_add): pass # don't add to replay pools since these are tfrecords ######################## ### Training methods ### ######################## def _run_env_eval(self, step, do_sampler_step=True, calculate_holdout=True): timeit.start('eval') ### calculate holdout costs self._policy.eval_holdout() timeit.stop('eval') def _run_train_step(self, step): def do_train_step(): timeit.start('train') self._policy.train_step(step) timeit.stop('train') if self._train_every_n_steps >= 1: if step % int(self._train_every_n_steps) == 0: do_train_step() else: for _ in range(int(1. / self._train_every_n_steps)): do_train_step() ### update target network if step > self._update_target_after_n_steps and step % self._update_target_every_n_steps == 0: self._policy.update_target()
kaydoh/scale
scale/messaging/__init__.py
<reponame>kaydoh/scale default_app_config = 'messaging.apps.MessagingConfig'
LeandroTk/Algorithms
computer_science/algorithms/dynamic_programming/factorial.cpp
#include <iostream> #include <vector> using namespace std; unsigned long long factorial_dp(int n) { vector<unsigned long long> v(1000000); v[0] = 1; for (int i = 1; i <= n; i++) v[i] = v[i - 1] * i; return v[n]; } int main() { cout << factorial_dp(0) << endl; cout << factorial_dp(1) << endl; cout << factorial_dp(5) << endl; cout << factorial_dp(50) << endl; return 0; }
wangchuanli001/Project-experience
java-经典java小程序源代码合集/java扫雷程序/JMine/AboutFrame.java
<gh_stars>10-100 import java.awt.event.*; import java.awt.*; import javax.swing.*; class AboutFrame extends JFrame implements MouseListener{ private JPanel aboutPane; private JLabel msg; private JLabel msg1; private JLabel msg2; private JButton exit; public AboutFrame(String strName) { super(strName); setSize(180, 170); setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); aboutPane = new JPanel(); msg = new JLabel("JMine written by <NAME>."); msg1 = new JLabel(" Enjoy! "); msg2 = new JLabel("Vision 1.0 "); exit = new JButton("Exit"); exit.addMouseListener(this); aboutPane.add(msg); aboutPane.add(msg1); aboutPane.add(msg2); aboutPane.add(exit); setContentPane(aboutPane); setLocation(250,220); } // the event handle to deal with the mouse click public void mouseClicked(MouseEvent e) { this.setVisible(false); } public void mousePressed(MouseEvent e) { //System.out.println("Jerry Press"); } public void mouseReleased(MouseEvent e) { //System.out.println("Jerry Release"); } public void mouseExited(MouseEvent e) { //System.out.println("Jerry Exited"); } public void mouseEntered(MouseEvent e) { //System.out.println("Jerry Entered"); } public static void main(String[] args) { AboutFrame about = new AboutFrame("Win"); about.show(); } }
smith750/kc
coeus-impl/src/main/java/org/kuali/kra/irb/actions/risklevel/ProtocolRiskLevelBean.java
<reponame>smith750/kc<gh_stars>0 /* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.irb.actions.risklevel; import java.io.Serializable; /** * Encapsulates the actions that can be performed on a ProtocolRiskLevel. */ public class ProtocolRiskLevelBean implements Serializable { private static final long serialVersionUID = -3726620115307425457L; private String errorPropertyKey; private ProtocolRiskLevel newProtocolRiskLevel; public ProtocolRiskLevelBean(String errorPropertyKey) { this.errorPropertyKey = errorPropertyKey + ".protocolRiskLevelBean"; newProtocolRiskLevel = new ProtocolRiskLevel(); } public String getErrorPropertyKey() { return errorPropertyKey; } public ProtocolRiskLevel getNewProtocolRiskLevel() { return newProtocolRiskLevel; } public void setNewProtocolRiskLevel(ProtocolRiskLevel newProtocolRiskLevel) { this.newProtocolRiskLevel = newProtocolRiskLevel; } }
BadSugar/DearPyGui
DearPyGui/src/ui/AppItems/basic/mvCheckbox.cpp
#include <utility> #include "mvCheckbox.h" #include "mvContext.h" #include "mvItemRegistry.h" #include "AppItems/fonts/mvFont.h" #include "AppItems/themes/mvTheme.h" #include "AppItems/containers/mvDragPayload.h" #include "mvPythonExceptions.h" #include "AppItems/widget_handlers/mvItemHandlerRegistry.h" namespace Marvel { mvCheckbox::mvCheckbox(mvUUID uuid) : mvAppItem(uuid) { } void mvCheckbox::applySpecificTemplate(mvAppItem* item) { auto titem = static_cast<mvCheckbox*>(item); if (config.source != 0) _value = titem->_value; _disabled_value = titem->_disabled_value; } void mvCheckbox::draw(ImDrawList* drawlist, float x, float y) { //----------------------------------------------------------------------------- // pre draw //----------------------------------------------------------------------------- // show/hide if (!config.show) return; // focusing if (info.focusNextFrame) { ImGui::SetKeyboardFocusHere(); info.focusNextFrame = false; } // cache old cursor position ImVec2 previousCursorPos = ImGui::GetCursorPos(); // set cursor position if user set if (info.dirtyPos) ImGui::SetCursorPos(state.pos); // update widget's position state state.pos = { ImGui::GetCursorPosX(), ImGui::GetCursorPosY() }; // set item width if (config.width != 0) ImGui::SetNextItemWidth((float)config.width); // set indent if (config.indent > 0.0f) ImGui::Indent(config.indent); // push font if a font object is attached if (font) { ImFont* fontptr = static_cast<mvFont*>(font.get())->getFontPtr(); ImGui::PushFont(fontptr); } apply_local_theming(this); //----------------------------------------------------------------------------- // draw //----------------------------------------------------------------------------- { // push imgui id to prevent name collisions ScopedID id(uuid); if(!config.enabled) _disabled_value = *_value; if (ImGui::Checkbox(info.internalLabel.c_str(), config.enabled ? _value.get() : &_disabled_value)) { bool value = *_value; if(config.alias.empty()) mvSubmitCallback([=]() { mvAddCallback(getCallback(false), uuid, ToPyBool(value), config.user_data); }); else mvSubmitCallback([=]() { mvAddCallback(getCallback(false), config.alias, ToPyBool(value), config.user_data); }); } } //----------------------------------------------------------------------------- // update state //----------------------------------------------------------------------------- UpdateAppItemState(state); //----------------------------------------------------------------------------- // post draw //----------------------------------------------------------------------------- // set cursor position to cached position if (info.dirtyPos) ImGui::SetCursorPos(previousCursorPos); if (config.indent > 0.0f) ImGui::Unindent(config.indent); // pop font off stack if (font) ImGui::PopFont(); cleanup_local_theming(this); if (handlerRegistry) handlerRegistry->checkEvents(&state); // handle drag & drop if used apply_drag_drop(this); } PyObject* mvCheckbox::getPyValue() { return ToPyBool(*_value); } void mvCheckbox::setPyValue(PyObject* value) { *_value = ToBool(value); } void mvCheckbox::setDataSource(mvUUID dataSource) { if (dataSource == config.source) return; config.source = dataSource; mvAppItem* item = GetItem((*GContext->itemRegistry), dataSource); if (!item) { mvThrowPythonError(mvErrorCode::mvSourceNotFound, "set_value", "Source item not found: " + std::to_string(dataSource), this); return; } if (GetEntityValueType(item->type) != GetEntityValueType(type)) { mvThrowPythonError(mvErrorCode::mvSourceNotCompatible, "set_value", "Values types do not match: " + std::to_string(dataSource), this); return; } _value = *static_cast<std::shared_ptr<bool>*>(item->getValue()); } }
avilaplana/scala-99
src/test/scala/alvaro/P09Spec.scala
package alvaro import org.scalatest.{Matchers, WordSpecLike} import scala.annotation.tailrec import scala.collection.:+ object P09 { def pack(list: List[Char]): List[List[Char]] = { @tailrec def p(lt: List[List[Char]], l: List[Char]): List[List[Char]] = (lt, l) match { case (List(Nil), head :: tail) => p(List(List(head)), tail) case (lt, Nil) => lt case (init :+ last, head :: tail) if (last.contains(head)) => p(init :+ (last :+ head), tail) case _ => p(lt :+ List(l.head), l.tail) } p(List(Nil), list) } } class P09Spec extends WordSpecLike with Matchers { "pack" should { import P09._ "return empty List(List()) when the list is empty" in { pack(Nil) shouldBe List(Nil) } "return List(List(a)) when the list is List(a)" in { pack(List('a')) shouldBe List(List('a')) } "return List(List(a,a)) when the list is List(a,a)" in { pack(List('a', 'a')) shouldBe List(List('a', 'a')) } "return List(List(a), List(b)) when the list is List(a,b)" in { pack(List('a', 'b')) shouldBe List(List('a'), List('b')) } "return List(List('a, 'a, 'a, 'a), List('b), List('c, 'c), List('a, 'a), List('d), List('e, 'e, 'e, 'e)) when " + "the list is List('a, 'a, 'a, 'a, 'b, 'c, 'c, 'a, 'a, 'd, 'e, 'e, 'e, 'e)" in { val charsToPack = List('a', 'a', 'a', 'a', 'b', 'c', 'c', 'a', 'a', 'd', 'e', 'e', 'e', 'e') pack(charsToPack) shouldBe List(List('a', 'a', 'a', 'a'), List('b'), List('c', 'c'), List('a', 'a'), List('d'), List('e', 'e', 'e', 'e')) } } "pack with foldLeft" should { def pack(l: List[List[Char]], e: Char) = l match { case List(Nil) => List(List(e)) case init :+ last if last.contains(e) => init :+ (last :+ e) case _ => l :+ List(e) } "return empty List(List()) when the list is empty" in { Nil.foldLeft(List[List[Char]](Nil))(pack _) shouldBe List(Nil) } "return List(List(a)) when the list is List(a)" in { List('a').foldLeft(List[List[Char]](Nil))(pack _) shouldBe List(List('a')) } "return List(List(a,a)) when the list is List(a,a)" in { List('a', 'a').foldLeft(List[List[Char]](Nil))(pack _) shouldBe List(List('a', 'a')) } "return List(List(a), List(b)) when the list is List(a,b)" in { List('a', 'b').foldLeft(List[List[Char]](Nil))(pack _) shouldBe List(List('a'), List('b')) } "return List(List('a, 'a, 'a, 'a), List('b), List('c, 'c), List('a, 'a), List('d), List('e, 'e, 'e, 'e)) when " + "the list is List('a, 'a, 'a, 'a, 'b, 'c, 'c, 'a, 'a, 'd, 'e, 'e, 'e, 'e)" in { val charsToPack = List('a', 'a', 'a', 'a', 'b', 'c', 'c', 'a', 'a', 'd', 'e', 'e', 'e', 'e') charsToPack.foldLeft(List[List[Char]](Nil))(pack _) shouldBe List( List('a', 'a', 'a', 'a'), List('b'), List('c', 'c'), List('a', 'a'), List('d'), List('e', 'e', 'e', 'e')) } } }
hackpanda/NewXmPluginSDK-master
plugProject/LockCommLib/src/main/java/cn/zelkova/lockprotocol/LockCommSetProfileForWifi.java
package cn.zelkova.lockprotocol; /** * 设置网关WIFI参数 * @author lwq * Created by liwenqi on 2016/11/16. */ public class LockCommSetProfileForWifi extends LockCommBase { /** * @param ssid * @param pwd */ public LockCommSetProfileForWifi(String ssid, String pwd, byte authMode) { super.mKLVList.add((byte) 0x01, ssid); super.mKLVList.add((byte) 0x02, pwd); super.mKLVList.add((byte) 0x03, authMode); } @Override public short getCmdId() { return 0x31; } @Override public String getCmdName() { return "SetProfileForWifi"; } @Override public boolean needSessionToken() { return false; } }
DoSomething/slothie
test/unit/lib/middleware/messages/member/topics/ask-yes-no.test.js
'use strict'; require('dotenv').config(); const test = require('ava'); const chai = require('chai'); const sinon = require('sinon'); const sinonChai = require('sinon-chai'); const httpMocks = require('node-mocks-http'); const underscore = require('underscore'); const helpers = require('../../../../../../../lib/helpers'); const logger = require('../../../../../../../lib/logger'); const topicFactory = require('../../../../../../helpers/factories/topic'); const userFactory = require('../../../../../../helpers/factories/user'); chai.should(); chai.use(sinonChai); // module to be tested const askYesNoCatchAll = require('../../../../../../../lib/middleware/messages/member/topics/ask-yes-no'); // stubs const askYesNoBroadcast = topicFactory.getValidAskYesNoBroadcastTopic(); const error = { message: 'Epic fail' }; const sandbox = sinon.sandbox.create(); test.beforeEach((t) => { sandbox.stub(logger, 'debug') .returns(underscore.noop); sandbox.stub(helpers.replies, 'invalidAskYesNoResponse') .returns(underscore.noop); sandbox.stub(helpers.replies, 'saidNo') .returns(underscore.noop); sandbox.stub(helpers.replies, 'saidYes') .returns(underscore.noop); sandbox.stub(helpers, 'sendErrorResponse') .returns(underscore.noop); t.context.req = httpMocks.createRequest(); t.context.res = httpMocks.createResponse(); t.context.req.user = userFactory.getValidUser(); }); test.afterEach((t) => { sandbox.restore(); t.context = {}; }); test('askYesNoCatchAll should call next if req.topic is not an askYesNo', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); sandbox.stub(helpers.topic, 'isAskYesNo') .returns(false); t.context.req.topic = topicFactory.getValidAutoReply(); // test await middleware(t.context.req, t.context.res, next); helpers.topic.isAskYesNo.should.have.been.calledWith(t.context.req.topic); next.should.have.been.called; helpers.replies.saidYes.should.not.have.been.called; helpers.replies.saidNo.should.not.have.been.called; helpers.replies.invalidAskYesNoResponse.should.not.have.been.called; }); test('askYesNoCatchAll should call sendErrorResponse if askYesNo and request.parseAskYesNoResponse fails', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.reject(error)); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); next.should.not.have.been.called; helpers.replies.saidYes.should.not.have.been.called; helpers.replies.saidNo.should.not.have.been.called; helpers.replies.invalidAskYesNoResponse.should.not.have.been.called; helpers.sendErrorResponse.should.have.been.calledWith(t.context.res, error); }); test('askYesNoCatchAll should executeInboundTopicChange and send saidYes reply if askYesNo and request isSaidYesMacro', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.resolve()); sandbox.stub(helpers.request, 'isSaidYesMacro') .returns(true); sandbox.stub(helpers.request, 'executeInboundTopicChange') .returns(Promise.resolve()); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); next.should.not.have.been.called; const details = `broadcast/${askYesNoBroadcast.id}`; helpers.request.executeInboundTopicChange .should.have.been.calledWith(t.context.req, t.context.req.topic.saidYesTopic, details); helpers.replies.saidYes .should.have.been.calledWith(t.context.req, t.context.res, t.context.req.topic.saidYes); helpers.sendErrorResponse.should.not.been.called; }); test('askYesNoCatchAll should call sendErrorResponse if request isSaidYesMacro but executeInboundTopicChange fails', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.resolve()); sandbox.stub(helpers.request, 'isSaidYesMacro') .returns(true); sandbox.stub(helpers.request, 'executeInboundTopicChange') .throws(); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); next.should.not.have.been.called; helpers.replies.saidYes.should.not.have.been.called; helpers.sendErrorResponse.should.have.been.called; }); test('askYesNoCatchAll should call sendErrorResponse if request isSaidYesMacro but saidYesTopic id undefined', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; t.context.req.topic.saidYesTopic = null; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.resolve()); sandbox.stub(helpers.request, 'isSaidYesMacro') .returns(true); sandbox.stub(helpers.request, 'executeInboundTopicChange') .throws(); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); next.should.not.have.been.called; helpers.replies.saidYes.should.not.have.been.called; helpers.sendErrorResponse.should.have.been.called; }); test('askYesNoCatchAll should execute executeInboundTopicChange and send saidNo reply if askYesNo and request isSaidNoMacro', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.resolve()); sandbox.stub(helpers.request, 'isSaidYesMacro') .returns(false); sandbox.stub(helpers.request, 'isSaidNoMacro') .returns(true); sandbox.stub(helpers.request, 'executeInboundTopicChange') .returns(Promise.resolve()); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); const details = `broadcast/${askYesNoBroadcast.id}`; helpers.request.executeInboundTopicChange .should.have.been.calledWith(t.context.req, askYesNoBroadcast.saidNoTopic, details); helpers.sendErrorResponse.should.not.been.called; helpers.replies.saidNo .should.have.been.calledWith(t.context.req, t.context.res, t.context.req.topic.saidNo); }); test('askYesNoCatchAll should call sendErrorResponse if request isSaidNoMacro but executeSaidNoMacro fails', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.resolve()); sandbox.stub(helpers.request, 'isSaidYesMacro') .returns(false); sandbox.stub(helpers.request, 'isSaidNoMacro') .returns(true); sandbox.stub(helpers.request, 'executeInboundTopicChange') .throws(); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); next.should.not.have.been.called; helpers.replies.saidNo.should.not.have.been.called; helpers.sendErrorResponse.should.have.been.called; }); test('askYesNoCatchAll should call sendErrorResponse if request isSaidNoMacro but saidNoTopic id undefined', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; t.context.req.topic.saidNoTopic = null; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.resolve()); sandbox.stub(helpers.request, 'isSaidYesMacro') .returns(false); sandbox.stub(helpers.request, 'isSaidNoMacro') .returns(true); sandbox.stub(helpers.request, 'executeInboundTopicChange') .throws(); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); next.should.not.have.been.called; helpers.replies.saidNo.should.not.have.been.called; helpers.sendErrorResponse.should.have.been.called; }); test('askYesNoCatchAll should not changeTopic and send invalidAskYesNoResponse template if askYesNo and request is neither saidYes or saidNo macro', async (t) => { const next = sinon.stub(); const middleware = askYesNoCatchAll(); t.context.req.topic = askYesNoBroadcast; sandbox.stub(helpers.request, 'parseAskYesNoResponse') .returns(Promise.resolve()); sandbox.stub(helpers.request, 'isSaidYesMacro') .returns(false); sandbox.stub(helpers.request, 'isSaidNoMacro') .returns(false); sandbox.stub(helpers.request, 'executeInboundTopicChange') .returns(Promise.resolve()); // test await middleware(t.context.req, t.context.res, next); helpers.request.parseAskYesNoResponse.should.have.been.calledWith(t.context.req); next.should.not.have.been.called; helpers.request.executeInboundTopicChange.should.not.have.been.called; helpers.replies.invalidAskYesNoResponse.should.have.been.called; helpers.sendErrorResponse.should.not.been.called; });
kitboy/docker-shop
html/ecshop3/ecshop/h5/app/network/API/api.recommend.service.js
(function () { 'use strict'; angular .module('app') .factory('APIRecommendService', APIRecommendService); APIRecommendService.$inject = ['$http', '$q', '$timeout', 'CacheFactory', 'ENUM']; function APIRecommendService($http, $q, $timeout, CacheFactory, ENUM) { var service = new APIEndpoint( $http, $q, $timeout, CacheFactory, 'APIRecommendService' ); service.categoryList = _categoryList; service.productList = _productList; service.brandList = _brandList; service.shopList = _shopList; service.bonusInfo = _bonusInfo; return service; function _categoryList(params) { return this.fetch( '/v2/ecapi.recommend.category.list', params, false, function(res){ return ENUM.ERROR_CODE.OK == res.data.error_code ? res.data.categories : null; }); } function _productList(params) { return this.fetch( '/v2/ecapi.recommend.product.list', params, false, function(res){ return ENUM.ERROR_CODE.OK == res.data.error_code ? res.data.products : null; }); } function _brandList(params) { return this.fetch( '/v2/ecapi.recommend.brand.list', params, false, function(res){ return ENUM.ERROR_CODE.OK == res.data.error_code ? res.data.brands : null; }); } function _shopList(params) { return this.fetch( '/v2/ecapi.recommend.shop.list', params, false, function(res){ return ENUM.ERROR_CODE.OK == res.data.error_code ? res.data.shops : null; }); } function _bonusInfo(params) { return this.fetch( '/v2/ecapi.recommend.bonus.info', params, false, function(res){ return ENUM.ERROR_CODE.OK == res.data.error_code ? res.data.bonus_info : null; }); } } })();
photowey/flowable-ext
flowable-ext-core-api/src/main/java/com/photowey/ext/core/api/engine/ProcessEngineExt.java
<reponame>photowey/flowable-ext<filename>flowable-ext-core-api/src/main/java/com/photowey/ext/core/api/engine/ProcessEngineExt.java /* * Copyright © 2020 photowey (<EMAIL>) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.photowey.ext.core.api.engine; import com.photowey.ext.core.api.dynamic.DynamicBpmnServiceExt; import com.photowey.ext.core.api.form.FormServiceExt; import com.photowey.ext.core.api.history.HistoryServiceExt; import com.photowey.ext.core.api.identity.IdentityServiceExt; import com.photowey.ext.core.api.management.ManagementServiceExt; import com.photowey.ext.core.api.process.ProcessMigrationServiceExt; import com.photowey.ext.core.api.repository.RepositoryServiceExt; import com.photowey.ext.core.api.runtime.RuntimeServiceExt; import com.photowey.ext.core.api.task.TaskServiceExt; import com.photowey.flowable.ext.mybatis.api.service.NativeQueryService; import org.flowable.engine.ProcessEngine; import org.flowable.engine.ProcessEngineConfiguration; /** * {@code ProcessEngineExt} is an interface that extends(ext) {@link ProcessEngine} * provides access to all the ext services that expose the BPM and workflow operations. * * @author photowey * @date 2020/12/28 * @since 1.0.0 */ public interface ProcessEngineExt { /** * {@link ProcessEngine} * * @return {@link ProcessEngine} */ ProcessEngine getProcessEngine(); /** * The flowable-ext version */ String FLOWABLE_EXT_VERSION = "1.0.0"; /** * start Executors */ void startExecutors(); /** * {@link RepositoryServiceExt} * * @return {@link RepositoryServiceExt} */ RepositoryServiceExt getRepositoryService(); /** * {@link RuntimeServiceExt} * * @return {@link RuntimeServiceExt} */ RuntimeServiceExt getRuntimeService(); /** * {@link FormServiceExt} * * @return {@link FormServiceExt} */ FormServiceExt getFormService(); /** * {@link TaskServiceExt} * * @return {@link TaskServiceExt} */ TaskServiceExt getTaskService(); /** * {@link HistoryServiceExt} * * @return {@link HistoryServiceExt} */ HistoryServiceExt getHistoryService(); /** * {@link IdentityServiceExt} * * @return {@link IdentityServiceExt} */ IdentityServiceExt getIdentityService(); /** * {@link ManagementServiceExt} * * @return {@link ManagementServiceExt} */ ManagementServiceExt getManagementService(); /** * {@link DynamicBpmnServiceExt} * * @return {@link DynamicBpmnServiceExt} */ DynamicBpmnServiceExt getDynamicBpmnService(); /** * {@link ProcessMigrationServiceExt} * * @return {@link ProcessMigrationServiceExt} */ ProcessMigrationServiceExt getProcessMigrationService(); /** * {@link ProcessEngineConfiguration} * * @return {@link ProcessEngineConfiguration} */ ProcessEngineConfiguration getProcessEngineConfiguration(); /** * {@link NativeQueryService} * provides all native query operations. * * @return {@link NativeQueryService} */ NativeQueryService getNativeQueryService(); }
jerebenitez/IFE-simpact-openfoam
src/remesh/fnod_fix.h
<filename>src/remesh/fnod_fix.h SUBROUTINE fnod_fix(nnodbd,nodcre) !-------------------------------------------------------------------- ! get ifx conditions for new boundary nodes in a refined set !-------------------------------------------------------------------- IMPLICIT NONE !--- Dummy variables INTEGER(kind=4),POINTER:: nodcre(:,:), & !Nodes between is generated a new node nnodbd(:) !nnodbd = 0 If created node is not a boundary node ! = 1 If created node is a boundary node END SUBROUTINE fnod_fix
samreid/ludum-dare-35-shapeshift
js/view/arcade/ArcadeGameNode.js
// Copyright 2015 /** * * @author <NAME> */ define( function( require ) { 'use strict'; // modules var inherit = require( 'PHET_CORE/inherit' ); var shapeshift = require( 'SHAPESHIFT/shapeshift' ); var Node = require( 'SCENERY/nodes/Node' ); var HBox = require( 'SCENERY/nodes/HBox' ); var VBox = require( 'SCENERY/nodes/VBox' ); var BodyNode = require( 'SHAPESHIFT/view/BodyNode' ); var OperationButton = require( 'SHAPESHIFT/view/OperationButton' ); var Eyebrow = require( 'SHAPESHIFT/view/Eyebrow' ); var Eyeball = require( 'SHAPESHIFT/view/Eyeball' ); var FontAwesomeNode = require( 'SUN/FontAwesomeNode' ); var Plane = require( 'SCENERY/nodes/Plane' ); var TitledPanel = require( 'SHAPESHIFT/view/TitledPanel' ); var Text = require( 'SCENERY/nodes/Text' ); var Rectangle = require( 'SCENERY/nodes/Rectangle' ); var Vector2 = require( 'DOT/Vector2' ); var Panel = require( 'SUN/Panel' ); var PhetFont = require( 'SCENERY_PHET/PhetFont' ); var MultiLineText = require( 'SCENERY_PHET/MultiLineText' ); var Image = require( 'SCENERY/nodes/Image' ); var ShapeshiftModel = require( 'SHAPESHIFT/model/ShapeshiftModel' ); var TextPushButton = require( 'SUN/buttons/TextPushButton' ); var RoundPushButton = require( 'SUN/buttons/RoundPushButton' ); var VStrut = require( 'SCENERY/nodes/VStrut' ); // operations var Reflect = require( 'SHAPESHIFT/model/operations/Reflect' ); var Rotate = require( 'SHAPESHIFT/model/operations/Rotate' ); var ConvexHull = require( 'SHAPESHIFT/model/operations/ConvexHull' ); var RadialDoubling = require( 'SHAPESHIFT/model/operations/RadialDoubling' ); var SelfFractal = require( 'SHAPESHIFT/model/operations/SelfFractal' ); var DeleteVertices = require( 'SHAPESHIFT/model/operations/DeleteVertices' ); var Snowflake = require( 'SHAPESHIFT/model/operations/Snowflake' ); var Subdivide = require( 'SHAPESHIFT/model/operations/Subdivide' ); var Shear = require( 'SHAPESHIFT/model/operations/Shear' ); var ArcadeLevelDesign = require( 'SHAPESHIFT/view/arcade/ArcadeLevelDesign' ); var ArcadeGameModel = require( 'SHAPESHIFT/view/arcade/ArcadeGameModel' ); // images var bannerImage = require( 'image!SHAPESHIFT/banner.png' ); function ArcadeGameNode( blah, layoutBounds, visibleBoundsProperty, showHomeScreen ) { Node.call( this ); this.layoutBounds = layoutBounds; var ground = new Rectangle( -1000, 480, 3000, 1000, { fill: 'green' } ); this.ground = ground; this.time = 0; var levels = new ArcadeLevelDesign().getLevels(); var model = new ArcadeGameModel( levels ); this.visibleBoundsProperty = visibleBoundsProperty; // So the eyes can watch the mouse wherever it goes this.addChild( new Plane( { fill: '#5555ff' } ) ); this.addChild( ground ); this.model = model; this.layoutBounds = layoutBounds; this.shapeLayer = new Node( { translation: layoutBounds.center } ); this.addChild( this.shapeLayer ); this.buttonLayer = new HBox( { spacing: 25 } ); this.addChild( this.buttonLayer ); this.bodyNodeMap = {}; // body.id => BodyNode this.operationButtons = []; // listen this.model.bodies.addItemAddedListener( this.addBody.bind( this ) ); this.model.bodies.addItemRemovedListener( this.removeBody.bind( this ) ); // add this.model.bodies.forEach( this.addBody.bind( this ) ); // this.addOperation( new Reflect() ); this.addOperation( new Rotate( Math.PI / 2 ) ); this.addOperation( new ConvexHull() ); this.addOperation( new RadialDoubling() ); this.addOperation( new Snowflake() ); // this.addOperation( new DeleteVertices( 2 ) ); this.addOperation( new DeleteVertices( 3 ) ); // this.addOperation( new SelfFractal() ); // makes things slow in preview for many others this.addOperation( new Subdivide() ); this.addOperation( new Shear() ); var leftEye = new Eyeball(); var rightEye = new Eyeball(); leftEye.centerX = this.layoutBounds.centerX - leftEye.width; this.addChild( leftEye.mutate( { y: 300 } ) ); this.addChild( rightEye.mutate( { left: leftEye.right + leftEye.width, y: leftEye.y } ) ); this.leftEyebrow = new Eyebrow(); this.rightEyebrow = new Eyebrow(); this.addChild( this.leftEyebrow.mutate( { x: leftEye.x, y: leftEye.y - 30 } ) ); this.addChild( this.rightEyebrow.mutate( { x: rightEye.x, y: leftEye.y - 30, scale: new Vector2( -1, 1 ) } ) ); this.addInputListener( { move: function( event ) { leftEye.lookAt( event.pointer.point ); rightEye.lookAt( event.pointer.point ); } } ); var goalNode = new VBox( { spacing: 10 } ); this.goalNode = goalNode; var update = function() { goalNode.children = model.goalBodies.map( function( b ) { return new BodyNode( b ).mutate( { scale: 0.5 } ); } ).getArray(); if ( goalNode.children.length > 0 ) { goalNode.right = visibleBoundsProperty.value.right - 10; } }; this.model.goalBodies.addItemAddedListener( update ); this.model.goalBodies.addItemRemovedListener( update ); update(); this.addChild( goalNode ); model.levelStartedEmitter.addListener( function() { self.goalNode.bottom = layoutBounds.top; } ); var homeButton = new RoundPushButton( { scale: 1.5, content: new FontAwesomeNode( 'home', { fill: 'black' } ) } ); homeButton.addListener( showHomeScreen ); this.addChild( homeButton ); visibleBoundsProperty.link( function( visibleBounds ) { homeButton.top = visibleBounds.top + 10; homeButton.left = visibleBounds.left + 10; } ); var self = this; visibleBoundsProperty.link( function( visibleBounds ) { self.buttonLayer.bottom = visibleBounds.bottom - 20; } ); var levelDescriptionNode = new MultiLineText( 'default text', { align: 'left', font: new PhetFont( { size: 23 } ) } ); var quote = new Panel( levelDescriptionNode ); var updateTextLocation = function() { var visibleBounds = self.visibleBoundsProperty.value; quote.top = visibleBounds.top + 10; quote.centerX = visibleBounds.centerX; }; visibleBoundsProperty.link( updateTextLocation ); this.addChild( quote ); model.levelProperty.link( function( level ) { levelDescriptionNode.setText( 'Level: ' + (model.levels.indexOf( level ) + 1) ); updateTextLocation(); } ); model.successEmitter.addListener( function( callback ) { var textPushButton = new TextPushButton( 'Continue', { scale: 4 } ); var levelText = (model.levels.indexOf( model.level ) + 1) + ''; var createSuccessPanelChildren = [ new Text( 'Level ' + levelText + ' cleared!', { fontSize: 48 } ), textPushButton ]; var panel = new Panel( new VBox( { children: createSuccessPanelChildren } ), { centerX: layoutBounds.centerX, bottom: layoutBounds.bottom - 10 } ); textPushButton.addListener( function() { self.removeChild( panel ); callback(); } ); self.addChild( panel ); } ); model.levelProperty.link( function( level ) { self.clearOperations(); for ( var i = 0; i < level.availableOperations.length; i++ ) { var obj = level.availableOperations[ i ]; self.addOperation( obj ); } self.goalNode.bottom = layoutBounds.top; } ); this.speed = 1; var tryAgainButton = new TextPushButton( 'Try Again', { scale: 3 } ); tryAgainButton.addListener( function() { // blah var currentIndex = self.model.levels.indexOf( self.model.level ); self.model.startLevel( self.model.levels[ currentIndex ] ); self.goalNode.bottom = layoutBounds.top; self.gameOver = false; self.gameOverPanel.visible = false; } ); var gameOverPanel = new Panel( new VBox( { spacing: 20, children: [ new MultiLineText( 'Game Over', { font: new PhetFont( 48 ) } ), tryAgainButton ], center: this.layoutBounds.center } ) ); this.addChild( gameOverPanel ); this.gameOverPanel = gameOverPanel; gameOverPanel.visible = false; } shapeshift.register( 'ArcadeGameNode', ArcadeGameNode ); return inherit( Node, ArcadeGameNode, { clearOperations: function() { this.buttonLayer.children = []; this.operationButtons.length = 0; }, addOperation: function( operation ) { var operationButton = new OperationButton( this.model, operation ); this.buttonLayer.addChild( operationButton ); this.operationButtons.push( operationButton ); this.layoutOperationButtons(); }, layoutOperationButtons: function() { this.buttonLayer.centerX = this.layoutBounds.centerX; }, addBody: function( body ) { var bodyNode = new BodyNode( body ); this.shapeLayer.addChild( bodyNode ); this.bodyNodeMap[ body.id ] = bodyNode; }, removeBody: function( body ) { this.shapeLayer.removeChild( this.bodyNodeMap[ body.id ] ); delete this.bodyNodeMap[ body.id ]; }, step: function( dt ) { if ( !this.gameOver ) { this.time += dt; this.model.step( dt ); this.operationButtons.forEach( function( operationButton ) { operationButton.update(); } ); this.leftEyebrow.step( dt ); this.rightEyebrow.step( dt ); this.goalNode.translate( 0, this.speed ); if ( this.goalNode.globalBounds.intersectsBounds( this.ground.globalBounds ) ) { this.gameOver = true; this.gameOverPanel.visible = true; this.gameOverPanel.center = this.layoutBounds.center; } } } } ); } );
touxiong88/92_mediatek
kernel/drivers/aee/aed/aed-debug.c
<filename>kernel/drivers/aee/aed/aed-debug.c #include <linux/proc_fs.h> #include <linux/delay.h> #include <linux/kthread.h> #include <linux/kallsyms.h> #include <linux/notifier.h> #include <linux/kprobes.h> #include <asm/uaccess.h> #include "aed.h" #ifndef PARTIAL_BUILD static spinlock_t fiq_debugger_test_lock0; static spinlock_t fiq_debugger_test_lock1; static int test_case = 0; static int test_cpu = 0; static struct task_struct *wk_tsk[NR_CPUS]; extern int nr_cpu_ids; extern struct atomic_notifier_head panic_notifier_list; static int force_spinlock(struct notifier_block *this, unsigned long event, void *ptr) { unsigned long flags; xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n ==> panic flow spinlock deadlock test \n"); spin_lock_irqsave(&fiq_debugger_test_lock0, flags); while(1); xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n You should not see this \n"); return 0; } static struct notifier_block panic_test = { .notifier_call = force_spinlock, .priority = INT_MAX, }; static int kwdt_thread_test(void *arg) { struct sched_param param = { .sched_priority = RTPM_PRIO_WDT}; int cpu; unsigned long flags; sched_setscheduler(current, SCHED_FIFO, &param); set_current_state(TASK_INTERRUPTIBLE); cpu = smp_processor_id(); xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n ==> kwdt_thread_test on CPU %d, test_case = %d \n", cpu, test_case); msleep(1000); if (test_case == 1) { if (cpu == test_cpu) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n CPU %d : disable preemption and local IRQ forever", cpu); spin_lock_irqsave(&fiq_debugger_test_lock0, flags); while (1); xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n Error : You should not see this ! \n"); } else { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n CPU %d : Do nothing and exit \n ", cpu); } } else if (test_case == 2) { if (cpu == test_cpu) { msleep(1000); xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n CPU %d : disable preemption and local IRQ forever", cpu); spin_lock_irqsave(&fiq_debugger_test_lock0, flags); while (1); xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "\n Error : You should not see this ! \n"); } else { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n CPU %d : disable irq \n ", cpu); local_irq_disable(); while (1); xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "\n Error : You should not see this ! \n"); } } else if (test_case == 3) { if (cpu == test_cpu) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n CPU %d : register panic notifier and force spinlock deadlock \n", cpu); atomic_notifier_chain_register(&panic_notifier_list, &panic_test); spin_lock_irqsave(&fiq_debugger_test_lock0, flags); while(1); xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "\n Error : You should not see this ! \n"); } else { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n CPU %d : Do nothing and exit \n ", cpu); } } else if (test_case == 4) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n CPU %d : disable preemption and local IRQ forever \n ", cpu); spin_lock_irqsave(&fiq_debugger_test_lock1, flags); while (1); xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n Error : You should not see this ! \n"); } return 0; } static int proc_write_generate_wdt(struct file* file, const char __user *buf, unsigned long count, void *data) { unsigned int i = 0; char msg[4]; unsigned char name[10] = {0}; if ((count < 2) || (count > sizeof(msg))) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "\n count = %d \n", count); return -EINVAL; } if (copy_from_user(msg, buf, count)) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "copy_from_user error"); return -EFAULT; } msg[count] = 0; test_case = (unsigned int) msg[0] - '0'; test_cpu = (unsigned int) msg[2] - '0'; xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "test_case = %d, test_cpu = %d", test_case, test_cpu); if ((msg[1] != ':') || (test_case < 1) || (test_case > 4) || (test_cpu < 0) || (test_cpu > nr_cpu_ids)) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, \ "WDT test - Usage: [test case number(1~4):test cpu(0~%d)] \n", \ nr_cpu_ids); return -EINVAL; } if (test_case == 1) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "Test 1 : One CPU WDT timeout (smp_send_stop succeed) \n"); } else if (test_case == 2) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "Test 2 : One CPU WDT timeout, other CPU disable irq (smp_send_stop fail in old design) \n"); } else if (test_case == 3) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "Test 3 : WDT timeout but deadlock in panic flow \n"); } else if (test_case == 4) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "Test 4 : All CPU WDT timeout (other CPU stop in the loop) \n"); } else { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "\n Unknown test_case %d \n", test_case); return -EINVAL; } // create kernel threads and bind on every cpu for(i = 0; i < nr_cpu_ids; i++) { sprintf(name, "wd-test-%d", i); xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "[WDK]thread name: %s\n", name); wk_tsk[i] = kthread_create(kwdt_thread_test, &data, name); if (IS_ERR(wk_tsk[i])) { int ret = PTR_ERR(wk_tsk[i]); wk_tsk[i] = NULL; return ret; } kthread_bind(wk_tsk[i], i); } for(i = 0; i < nr_cpu_ids; i++) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, " wake_up_process(wk_tsk[%d]) \n", i); wake_up_process(wk_tsk[i]); } return count; } static int proc_read_generate_wdt(char *page, char **start, off_t off, int count, int *eof, void *data) { return sprintf(page, "WDT test - Usage: [test case number:test cpu] \n"); } /*****************************BEGIN OOPS***************************/ /**********BEGIN ISR trigger HWT**********/ /* kprobe pre_handler: called just before the probed instruction is executed */ static int handler_pre(struct kprobe *p, struct pt_regs *regs) { xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "process_name:[%s], pid = %d.\n", current->comm, current->pid); return 0; } /* kprobe post_handler: called after the probed instruction is executed */ int flag = 1; void handler_post(struct kprobe *p, struct pt_regs *regs, unsigned long flags) { if (flag) { flag = 0; mdelay(30*1000); } } static int handler_fault(struct kprobe *p, struct pt_regs *regs, int trapnr); static struct kprobe kp_kpd_irq_handler = { .symbol_name = "kpd_irq_handler", .pre_handler = handler_pre, .post_handler = handler_post, .fault_handler = handler_fault, }; /* * fault_handler: this is called if an exception is generated for any * instruction within the pre- or post-handler, or when Kprobes * single-steps the probed instruction. */ static int handler_fault(struct kprobe *p, struct pt_regs *regs, int trapnr) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "fault_handler: p->addr = 0x%p, trap #%dn", p->addr, trapnr); unregister_kprobe(&kp_kpd_irq_handler); xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "kprobe at %p unregistered\n", kp_kpd_irq_handler.addr); /* Return 0 because we don't handle the fault. */ return 0; } static int register_kprobe_kpd_irq_handler(void) { int ret = 0; /* All set to register with Kprobes */ ret = register_kprobe(&kp_kpd_irq_handler); if (ret < 0) { xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "register_kprobe failed, returned %d\n", ret); } else { xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "Planted kprobe at %p, press Vol+/- to trigger.\n", kp_kpd_irq_handler.addr); } return ret; } /**********END ISR trigger HWT**********/ /**********BEGIN panic case**********/ static int noinline stack_overflow_routine(int x, int y, int z) { char a[4]; int i; for(i = 0; i < 60; i++) { a[i] = 0; } /* stack overflow */ return a[0] + a[1]+ a[11] + a[50]; } static void noinline buffer_over_flow(void) { xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "test case : buffer overflow\n"); stack_overflow_routine(10, 1, 22); } static void noinline access_null_pointer(void) { xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "test case : derefence Null pointer\n"); *((unsigned *)0) = 0xDEAD; } static void noinline double_free(void) { char *p = kmalloc(32,GFP_KERNEL); int i; xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "test case : double free\n"); for (i = 0; i < 32; i++) { p[i] = (char)i; } xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "aee_ut_ke: call free\n"); kfree(p); xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "aee_ut_ke: call free again\n"); kfree(p); } static void noinline devide_by_0(void) { int ZERO = 0; int number; xlog_printk(ANDROID_LOG_INFO, AEK_LOG_TAG, "test case: division by %d\n", ZERO); number = 100/ZERO; } /**********END panic case**********/ static int proc_read_generate_oops(char *page, char **start, off_t off, int count, int *eof, void *data) { int len; BUG(); len = sprintf(page, "Oops Generated\n"); return len; } static int proc_write_generate_oops(struct file* file, const char __user *buf, unsigned long count, void *data) { char msg[6]; int test_case, test_subcase, test_cpu; if ((count < 2) || (count > sizeof(msg))) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "%s: count = %d \n", __func__, count); return -EINVAL; } if (copy_from_user(msg, buf, count)) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "%s: error\n", __func__); return -EFAULT; } msg[count] = 0; test_case = (unsigned int) msg[0] - '0'; test_subcase = (unsigned int) msg[2] - '0'; test_cpu = (unsigned int) msg[4] - '0'; xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "test_case = %d-%d, test_cpu = %d\n", test_case, test_subcase, test_cpu); switch(test_case) { case 1: switch(test_subcase) { case 1: buffer_over_flow(); break; case 2: access_null_pointer(); break; case 3: double_free(); break; case 4: devide_by_0(); break; default: break; } break; case 2: register_kprobe_kpd_irq_handler(); break; case 3: break; default: break; } return count; } static int nested_panic(struct notifier_block *this, unsigned long event, void *ptr) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "\n => force nested panic \n"); BUG(); } static struct notifier_block panic_blk = { .notifier_call = nested_panic, .priority = INT_MAX - 100, }; static int proc_read_generate_nested_ke(char *page, char **start, off_t off, int count, int *eof, void *data) { int len; atomic_notifier_chain_register(&panic_notifier_list, &panic_blk); xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "\n => panic_notifier_list registered \n"); BUG(); len = sprintf(page, "Nested panic generated\n"); return len; } extern void aed_md_exception(const int *log, int log_size, const int *phy, int phy_size, const char* detail); static int proc_read_generate_ee(char *page, char **start, off_t off, int count, int *eof, void *data) { #define TEST_PHY_SIZE 0x10000 int log[16], i; char *ptr; memset(log, 0, sizeof(log)); ptr = kmalloc(TEST_PHY_SIZE, GFP_KERNEL); if (ptr == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "proc_read_generate_ee kmalloc fail\n"); return sprintf(page, "kmalloc fail \n"); } for (i = 0; i < TEST_PHY_SIZE; i++) { ptr[i] = (i % 26) + 'A'; } aed_md_exception(log, 0, (int *)ptr, TEST_PHY_SIZE, __FILE__); kfree(ptr); return sprintf(page, "Modem EE Generated\n"); } static int proc_read_generate_combo(char *page, char **start, off_t off, int count, int *eof, void *data) { #define TEST_PHY_SIZE 0x10000 int i; char *ptr; ptr = kmalloc(TEST_PHY_SIZE, GFP_KERNEL); if (ptr == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "proc_read_generate_combo kmalloc fail\n"); return sprintf(page, "kmalloc fail \n"); } for (i = 0; i < TEST_PHY_SIZE; i++) { ptr[i] = (i % 26) + 'A'; } aee_kernel_dal_show("Oops, MT662X is generating core dump, please wait up to 5 min \n"); aed_combo_exception(NULL, 0, (int *)ptr, TEST_PHY_SIZE, __FILE__); kfree(ptr); return sprintf(page, "Combo EE Generated\n"); } static int proc_read_generate_kernel_notify(char *page, char **start, off_t off, int count, int *eof, void *data) { return sprintf(page, "Usage: write message with format \"R|W|E:Tag:You Message\" into this file to generate kernel warning\n"); } static int proc_write_generate_kernel_notify(struct file* file, const char __user *buf, unsigned long count, void *data) { char msg[164], *colon_ptr; if (count == 0) { return -EINVAL; } if ((count < 5) || (count >= sizeof(msg))) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "aed: %s count sould be >= 5 and <= %d bytes.\n", __func__, sizeof(msg)); return -EINVAL; } if (copy_from_user(msg, buf, count)) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "aed: %s unable to read message\n", __func__); return -EFAULT; } /* Be safe */ msg[count] = 0; if (msg[1] != ':') { return -EINVAL; } colon_ptr = strchr(&msg[2], ':'); if ((colon_ptr == NULL) || ((colon_ptr - msg) > 32)) { xlog_printk(ANDROID_LOG_WARN, AEK_LOG_TAG, "aed: %s cannot find valid module name\n", __func__); return -EINVAL; } *colon_ptr = 0; switch (msg[0]) { case 'R': aee_kernel_reminding(&msg[2], colon_ptr + 1); break; case 'W': aee_kernel_warning(&msg[2], colon_ptr + 1); break; case 'E': aee_kernel_exception(&msg[2], colon_ptr + 1); break; default: return -EINVAL; } return count; } static int proc_read_generate_dal(char *page, char **start, off_t off, int count, int *eof, void *data) { int len; aee_kernel_dal_show("Test for DAL \n"); len = sprintf(page, "DAL Generated\n"); return len; } int aed_proc_debug_init(struct proc_dir_entry *aed_proc_dir) { struct proc_dir_entry *aed_proc_generate_oops_file; struct proc_dir_entry *aed_proc_generate_nested_ke_file; struct proc_dir_entry *aed_proc_generate_ee_file; struct proc_dir_entry *aed_proc_generate_combo_file; struct proc_dir_entry *aed_proc_generate_ke_file; struct proc_dir_entry *aed_proc_generate_wdt_file; struct proc_dir_entry *aed_proc_generate_dal_file; spin_lock_init(&fiq_debugger_test_lock0); spin_lock_init(&fiq_debugger_test_lock1); aed_proc_generate_oops_file = create_proc_entry("generate-oops", S_IFREG | 0600, aed_proc_dir); if (aed_proc_generate_oops_file == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "aed create_proc_read_entry failed at generate-oops\n"); return -ENOMEM; } aed_proc_generate_oops_file->write_proc = proc_write_generate_oops; aed_proc_generate_oops_file->read_proc = proc_read_generate_oops; aed_proc_generate_nested_ke_file = create_proc_read_entry("generate-nested-ke", 0400, aed_proc_dir, proc_read_generate_nested_ke, NULL); if (aed_proc_generate_nested_ke_file == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "aed create_proc_read_entry failed at generate-nested-ke\n"); return -ENOMEM; } aed_proc_generate_ke_file = create_proc_entry("generate-kernel-notify", S_IFREG | 0600, aed_proc_dir); if (aed_proc_generate_ke_file == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "aed create_proc_read_entry failed at generate-kernel-notify\n"); return -ENOMEM; } aed_proc_generate_ke_file->write_proc = proc_write_generate_kernel_notify; aed_proc_generate_ke_file->read_proc = proc_read_generate_kernel_notify; aed_proc_generate_ee_file = create_proc_read_entry("generate-ee", 0400, aed_proc_dir, proc_read_generate_ee, NULL); if(aed_proc_generate_ee_file == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "aed create_proc_read_entry failed at generate-ee\n"); return -ENOMEM; } aed_proc_generate_combo_file = create_proc_read_entry("generate-combo", 0400, aed_proc_dir, proc_read_generate_combo, NULL); if(aed_proc_generate_combo_file == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "aed create_proc_read_entry failed at generate-combo\n"); return -ENOMEM; } aed_proc_generate_wdt_file = create_proc_entry("generate-wdt", S_IFREG | 0600, aed_proc_dir); if (aed_proc_generate_wdt_file == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "aed create_proc_read_entry failed at generate-wdt\n"); return -ENOMEM; } aed_proc_generate_wdt_file->write_proc = proc_write_generate_wdt; aed_proc_generate_wdt_file->read_proc = proc_read_generate_wdt; aed_proc_generate_dal_file = create_proc_read_entry("generate-dal", 0400, aed_proc_dir, proc_read_generate_dal, NULL); if(aed_proc_generate_dal_file == NULL) { xlog_printk(ANDROID_LOG_ERROR, AEK_LOG_TAG, "aed create_proc_read_entry failed at generate-dal\n"); return -ENOMEM; } return 0; } int aed_proc_debug_done(struct proc_dir_entry *aed_proc_dir) { remove_proc_entry("generate-oops", aed_proc_dir); remove_proc_entry("generate-nested-ke", aed_proc_dir); remove_proc_entry("generate-kernel-notify", aed_proc_dir); remove_proc_entry("generate-ee", aed_proc_dir); remove_proc_entry("generate-combo", aed_proc_dir); remove_proc_entry("generate-wdt", aed_proc_dir); remove_proc_entry("generate-dal", aed_proc_dir); remove_proc_entry("generate-mt-ramdump", aed_proc_dir); return 0; } #else int aed_proc_debug_init(struct proc_dir_entry *aed_proc_dir) { return 0; } int aed_proc_debug_done(struct proc_dir_entry *aed_proc_dir) { return 0; } #endif
ScalablyTyped/SlinkyTyped
w/winrt-uwp/src/main/scala/typingsSlinky/winrtUwp/Windows/Networking/NetworkOperators/UiccAppRecordKind.scala
<filename>w/winrt-uwp/src/main/scala/typingsSlinky/winrtUwp/Windows/Networking/NetworkOperators/UiccAppRecordKind.scala package typingsSlinky.winrtUwp.Windows.Networking.NetworkOperators import org.scalablytyped.runtime.StObject import scala.scalajs.js import scala.scalajs.js.`|` import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess} @js.native sealed trait UiccAppRecordKind extends StObject /** Describes the kinds of UICC application record. */ @JSGlobal("Windows.Networking.NetworkOperators.UiccAppRecordKind") @js.native object UiccAppRecordKind extends StObject { /** A file that consists of a sequence of records. */ @js.native sealed trait recordOriented extends UiccAppRecordKind /** A transparent file - a sequence of bytes without further structure . */ @js.native sealed trait transparent extends UiccAppRecordKind /** An unknown app record kind. For example, a UICC app that is enumerable by the modem, but is not an app used for cellular registration, would be reported as Unknown. */ @js.native sealed trait unknown extends UiccAppRecordKind }
pointable/hitcamp
schema/Lesson.js
<gh_stars>0 'use strict'; var shortId = require('shortid'); exports = module.exports = function(app, mongoose) { // console.log('run'); var lessonSchema = new mongoose.Schema({ shortID: { type: String, unique: true, index: true }, data: { type: String, default: 'Lesson Data' }, lessonName: { type: String, default: '' }, tiles: { type: Object, default: {} }, isComplete: { type: String, default: 'no' }, isPrivate: { type: Boolean, default: true }, activities: [ {type: mongoose.Schema.Types.ObjectId, ref: 'Activity'}], wordLists: [ {type: mongoose.Schema.Types.ObjectId, ref: 'WordList'}], owner: {type: mongoose.Schema.Types.ObjectId, ref: 'User'}, // owner: { type: String, default: '' }, timeCreated: { type: Date, default: Date.now }, isLocked: { type: Boolean, default: false}, backgroundImageURL: { type: String, default: 'about:blank' }, backgroundThumbnailURL: { type: String, default: 'about:blank' }, backgroundImageJSON:{ type: Object, default: {} } }); //METHODS // userSchema.methods.canPlayRoleOf = function(role) { // if (role === "admin" && this.roles.admin) { // return true; // } // // if (role === "account" && this.roles.account) { // return true; // } // // return false; // }; //STATICS // userSchema.statics.validatePassword = function(password, hash, done) { // var bcrypt = require('bcrypt-nodejs'); // bcrypt.compare(password, hash, function(err, res) { // done(err, res); // }); // }; lessonSchema.plugin(require('./plugins/pagedFind')); // // userSchema.index({ username: 1 }, { unique: true }); lessonSchema.set('autoIndex', (app.get('env') === 'development')); app.db.model('Lesson', lessonSchema); };
cas-nctu/multispec
Linux_build/SharedMultiSpec/SClassifyEchoPhase.cpp
// MultiSpec // // Copyright 1988-2020 Purdue Research Foundation // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at: https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the specific // language governing permissions and limitations under the License. // // MultiSpec is curated by the Laboratory for Applications of Remote Sensing at // Purdue University in West Lafayette, IN and licensed by <NAME>. // // File: SClassifyEchoPhase.cpp // // Authors: <NAME> // <NAME> // // Revision date: 12/21/2017 // // Language: C // // System: Linux, Macintosh and Windows Operating Systems // // Brief description: This file contains functions that control the // echo classification processes. // //------------------------------------------------------------------------------------ #include "SMultiSpec.h" #if defined multispec_wx #endif #include "SEcho.h" SInt16 ClassifyNonHomogeneousCells ( SInt32 numberLines, statistics* class_stat, HDoublePtr si_ptr1, HSInt32Ptr epix_ptr, HDoublePtr wk1, double* thresholdTablePtr, HUCharPtr probabilityBufferPtr, AreaDescriptionPtr areaDescriptionPtr); void classify_pixel_using_ML ( statistics* class_stat, SInt16 nband, UInt32 ncls, HDoublePtr si_ptr1, double* wk3, HSInt32Ptr epix_ptr, HDoublePtr wk1); //------------------------------------------------------------------------------------ // Copyright 1988-2020 Purdue Research Foundation // // Function name: SInt16 phase1 // // Software purpose: Compute Cell Likelihood Values and Check Homogeneity // This routine reads "cell_width" lines of data at one time. // Homogeneous Cell: // FLDCLS - FLD's Class // IBUF - FLD Number // Since field is not assigned yet, it is 0. // // NON.Homog Cell: // FLDCLS - FLD's Class // IBUF - (Minus) Pixel's Class // // IBUF is stored in the epix_ptr vector. Note that the high order bit // (the sign bit) signifies whether the pixel is part of a homogeneous cell // (greater than 0) or not a part of one. The next high order bit signifies // whether the pixel is thresholded (1 signifies yes, 0 signifies no). Because // of the use of these 2 bits, the maximum number of fields that are possible // is 1,073,741,824. // ### // Data read mechanism must be refined. // change the function loglik_echo (), classify_pixel_using_ML () into macro // ### // // Parameters in: // // Parameters out: None // // Value Returned: error_code // // Called By: EchoClassifier in SClassifyEcho.cpp // // Coded By: <NAME> Date: 01/01/1989 // Revised By: <NAME> Date: 09/28/1991 // Revised By: <NAME> Date: 02/14/2014 //------------------------------------------------------------------------------------ SInt16 phase1 ( FileIOInstructionsPtr fileIOInstructionsPtr, statistics* class_stat, EchoClassifierVar* echo_info, double* pixelThresholdTablePtr, HUCharPtr probabilityBufferPtr, AreaDescriptionPtr areaDescriptionPtr) { double annexationThreshold_derived, lik_ratio, smax, threshold, xlik; //HEchoFieldInfoPtr fieldInfoPtr; FILE* fp; HFldLikPtr auxlikn[4], baseFieldLikePtr, cellLikPtr, *fieldLikeIndicesPtr, tAuxLikPtr, tFldLikPtr; HPtr fieldLikeFlagsPtr; double *cellThresholdTablePtr, *ex; HDoublePtr ex2; HSInt16Ptr cellClassPtr, fieldClassNumberPtr; HDoublePtr si_ptr1; HSInt32Ptr epixCurrentLineCell0Ptr, // epix_ptr0, epixCurrentLineCellPtr, // epix_ptr; epixPreviousLineCell0Ptr, epixPreviousLineCellPtr, savedLineCell0Ptr; HUInt32Ptr fieldLikelihoodTableIndexPtr, field_number_table; SInt32 cellBuf, // May be negative cellOrder[4] = {3,1,0,2}, inx, ibuf, // May be negative ibufn[4], iy, // from echo Variables lastcol_echo, line_x, ncl_icel, num_col, ncl_rem_icel, nrw_rem_icel, nhd2, numberMoveBytes; UInt32 count, epixLineCellBufferCount, icel_linlen, ix, iyy, lengthFieldLikeList, lineNumber, ncls, newfield, nextEmptySlot, nhd0, nhd1, //nhd2, nhd3, nrw_icel, // from echo Variables, unit_x_icel; SInt16 auxcls, cell_size, classNumber, error_code, nband; UInt16 auxcls_temp, cellLocation, cell_width, fldcls, fldclsn; Boolean combineLikeFieldsFlag, maxRatioFoundFlag, mixCellsFlag, useTempDiskFileFlag; char likeFlag1, likeFlag2, savedLikeFlag; error_code = 0; mixCellsFlag = echo_info->mixCellsFlag; combineLikeFieldsFlag = echo_info->combineLikeFieldsFlag; useTempDiskFileFlag = echo_info->useTempDiskFileFlag; // Set up Local variables related to image Specification nband = echo_info->nband; num_col = echo_info->num_col; fieldClassNumberPtr = echo_info->fieldClassNumberPtr; fieldLikelihoodTableIndexPtr = echo_info->fieldLikelihoodTableIndexPtr; field_number_table = echo_info->field_number_table; // Set up Local variables related to Statistics Specification ncls = echo_info->ncls; numberMoveBytes = sizeof (FldLik) * ncls; // Set up Local variables related to Echo Specification annexationThreshold_derived = echo_info->annexationThreshold_derived; cell_width = echo_info->cell_width; cell_size = echo_info->cell_size; nrw_icel = echo_info->nrw_icel; ncl_icel = echo_info->ncl_icel; nrw_rem_icel = echo_info->nrw_rem_icel; ncl_rem_icel = echo_info->ncl_rem_icel; icel_linlen = echo_info->icel_linlen; lastcol_echo = echo_info->lastcol_echo; cellThresholdTablePtr = &pixelThresholdTablePtr[gNumberProbabilityClasses]; // Derivations of the above line_x = num_col * nband; unit_x_icel = cell_width * nband; nhd0 = cell_width; // icel_linlen + cell_width; nhd1 = 0; // icel_linlen; nhd2 = -cell_width; // icel_linlen - cell_width; nhd3 = cell_width; // Initialize output Echo Parameters newfield = 0; echo_info->field_size = 0; echo_info->number_of_fields = 0; echo_info->number_of_homogeneous_cells = 0; // Memory Allocation ex2 = echo_info->work2; ex = echo_info->work1; cellClassPtr = echo_info->cellClassPtr; fieldLikeIndicesPtr = echo_info->fieldLikeIndicesPtr; fieldLikeFlagsPtr = echo_info->fieldLikeFlagsPtr; lengthFieldLikeList = echo_info->ncl_fldlik * 2; // Compute a Cell Likelihood and check Homogeneity //epix_ptr0 = echo_info->epix_ibufPtr; epixCurrentLineCell0Ptr = echo_info->epix_ibufPtr; epixPreviousLineCell0Ptr = echo_info->epix_ibuf2Ptr; epixLineCellBufferCount = num_col * cell_width * sizeof (SInt32); lineNumber = 0; auxlikn[0] = (HFldLikPtr)ex2; auxlikn[1] = auxlikn[0] + ncls; auxlikn[2] = auxlikn[1] + ncls; auxlikn[3] = auxlikn[2] + ncls; nextEmptySlot = 0; // Initialize field likelihood indices and flag vectors. tFldLikPtr = &echo_info->fldlikPtr[echo_info->line_fldlik]; likeFlag1 = 0x01; likeFlag2 = 0x02; for (iyy=0; iyy<lengthFieldLikeList; iyy++) { fieldLikeIndicesPtr[iyy] = tFldLikPtr; fieldLikeFlagsPtr[iyy] = likeFlag1 + likeFlag2; tFldLikPtr += ncls; } // end "for (iyy=0; iyy<bytes ;iyy++)" fp = (FILE*)fileIOInstructionsPtr; for (ix=1; ix<=nrw_icel; ix+=cell_width) { cellLikPtr = echo_info->fldlikPtr; lineNumber += cell_width; if (TickCount () >= gNextStatusTime) { LoadDItemValue (gStatusDialogPtr, IDC_Status18, (SInt32)lineNumber); LoadDItemValueWithCommas (gStatusDialogPtr, IDC_Status22, echo_info->number_of_fields); gNextStatusTime = TickCount () + gNextStatusTimeOffset; } // end "if (TickCount () >= gNextStatusTime)" // Read "Cell_width" Lines of Data if (error_code == 0) error_code = read_lines_of_data1 (fp, cell_width); if (error_code != 0) break; // Out of "for (ix=1;ix.." Loop si_ptr1 = (HDoublePtr)gOutputBufferPtr; //epix_ptr = epix_ptr0; epixCurrentLineCellPtr = epixCurrentLineCell0Ptr; epixPreviousLineCellPtr = epixPreviousLineCell0Ptr; // Scan line from left to right assigning homogeneous cells to // nearest field if possible. Do not create any new fields // during this pass. for (iy=1; iy<=(SInt32)ncl_icel; iy+=cell_width) { // Compute Cell Likelihood Value loglik_echo (class_stat, nband, ncls, si_ptr1, cellLikPtr, line_x, ex2, ex, &xlik, &classNumber, cell_width); threshold = *(echo_info->thresholds_phase1 + classNumber - 1); *cellClassPtr = classNumber; // Check Homogeneity if (xlik > threshold) { // Homogeneous Cell echo_info->number_of_homogeneous_cells++; // Check Annexation with four surrounding cells around the target // cell. The cells are: // | S(0) | S(1) | S(2) | // | S(3) | T | ibuf = ibufn[0] = ibufn[1] = ibufn[2] = ibufn[3] = 0; if (ix > 1) { //ibufn[1] = *(epix_ptr - nhd1) & 0xbfffffff; ibufn[1] = *(epixPreviousLineCellPtr - nhd1) & 0xbfffffff; if (iy > 1) //ibufn[0] = *(epix_ptr - nhd0) & 0xbfffffff; ibufn[0] = *(epixPreviousLineCellPtr - nhd0) & 0xbfffffff; if (iy < (SInt32)ncl_icel-cell_width) //ibufn[2] = *(epix_ptr - nhd2) & 0xbfffffff; ibufn[2] = *(epixPreviousLineCellPtr - nhd2) & 0xbfffffff; } // "if (ix > 1)" if (iy > 1) //ibufn[3] = *(epix_ptr - nhd3) & 0xbfffffff; ibufn[3] = *(epixCurrentLineCellPtr - nhd3) & 0xbfffffff; smax = -DBL_MAX; auxcls = -1; // Do some analysis of the 4 cells around the target cell. // If the class of the target cell is the same as the field // class of one of the 4 surrounding cells then that // surrounding cell/field will have the max likelihood ratio. // The others do not need to be checked. maxRatioFoundFlag = FALSE; for (iyy=0; iyy<4; iyy++) { cellLocation = (SInt16)cellOrder[iyy]; cellBuf = ibufn[cellLocation]; if (maxRatioFoundFlag) ibufn[cellLocation] = 0; else // !maxRatioFoundFlag { if (cellBuf > 0) { inx = field_number_table[cellBuf]; fldcls = fieldClassNumberPtr[inx]; if (fldcls == (UInt16)*cellClassPtr) { maxRatioFoundFlag = TRUE; // Now "turn off" any cell locations before // this one. for (cellBuf=0; cellBuf<(SInt32)iyy; cellBuf++) { cellLocation = (SInt16)cellOrder[cellBuf]; ibufn[cellLocation] = 0; } // end "for (cellBuf=0; cellBuf<iyy; ..." } // end "if (fldcls == (UInt16)*cellClassPtr)" } // end "if (cellBuf > 0)" } // end "else !maxRatioFoundFlag" } // end "for (iyy=0; iyy<4; iyy++)" for (iyy=0; iyy<4; iyy++) { cellLocation = (SInt16)cellOrder[iyy]; cellBuf = ibufn[cellLocation]; if ((cellBuf > 0) && (cellBuf != ibuf)) { inx = field_number_table[cellBuf]; fldcls = fieldClassNumberPtr[inx]; if (mixCellsFlag || fldcls == (UInt16)*cellClassPtr) { inx = fieldLikelihoodTableIndexPtr[inx]; lik_ratio = log_lik_ratio ( fieldLikeIndicesPtr[inx], cellLikPtr, auxlikn[cellLocation], fldcls, *cellClassPtr, &auxcls_temp, ncls); if ((lik_ratio > smax) && (lik_ratio > annexationThreshold_derived)) { ibuf = cellBuf; auxcls = auxcls_temp; tAuxLikPtr = auxlikn[cellLocation]; smax = lik_ratio; } // "if ((lik_ratio > smax) ..." } // end "if ((mixCellsFlag || ..." } // "if ((cellBuf > 0) && (cellBuf != ibuf))" } // end "for (iyy=0; iyy<4; iyy++)" if (ibuf > 0) { // Assigning to field inx = field_number_table[ibuf]; fieldClassNumberPtr[inx] = auxcls; inx = fieldLikelihoodTableIndexPtr[inx]; fieldLikeFlagsPtr[inx] &= likeFlag1; tFldLikPtr = fieldLikeIndicesPtr[inx]; echo_info->field_size += cell_size; BlockMoveData (tAuxLikPtr, tFldLikPtr, numberMoveBytes); stuffing (epixCurrentLineCellPtr, ibuf, cell_width, cell_width, num_col); } // end "if (ibuf > 0)" else // ibuf == 0 // Indicate that cell has not been assigned to a field // yet. It will be done during right to left pass. stuffing (epixCurrentLineCellPtr, 0, cell_width, cell_width, num_col); } else // xlik <= threshold { // Non-Homogeneous Cells stuffing (epixCurrentLineCellPtr, -1, cell_width, cell_width, num_col); } // end else xlik <= threshold si_ptr1 += unit_x_icel; // cell_width * nband epixCurrentLineCellPtr += cell_width; epixPreviousLineCellPtr += cell_width; cellLikPtr += ncls; cellClassPtr++; // Exit routine if user has "command period" down if (TickCount () >= gNextTime) { if (!CheckSomeEvents (osMask+keyDownMask+updateMask+mDownMask+mUpMask)) { error_code = 1; break; } // end "if (!CheckSomeEvents..." } // end "if (TickCount () >= gNextTime)" } // for iy if (error_code == 1) break; // Initialize the right remaining boundaries as non-homogeneous // pixels. stuffing (epixCurrentLineCellPtr, -1, cell_width, (SInt16)ncl_rem_icel, num_col); // Now scan line from right to left assigning homogeneous cells // to nearest field if possible. If there are no nearest // then create a new field. // The nearest fields to check will be itself if assigned // during left to right pass and the field to the right. for (iy=lastcol_echo; iy>=0; iy-=cell_width) { cellClassPtr--; cellLikPtr -= ncls; epixCurrentLineCellPtr -= cell_width; // Determine if cell is homogeneous. if (*epixCurrentLineCellPtr >= 0) { cellBuf = 0; ibuf = *epixCurrentLineCellPtr; if (iy < lastcol_echo) cellBuf = *(epixCurrentLineCellPtr + cell_width); // Get Cell Likelihood Value // Check right most cell. if ((cellBuf > 0) && (*epixCurrentLineCellPtr != (SInt16)cellBuf)) { inx = field_number_table[cellBuf]; fldcls = fieldClassNumberPtr[inx]; if (mixCellsFlag || fldcls == (UInt16)*cellClassPtr) { inx = fieldLikelihoodTableIndexPtr[inx]; lik_ratio = log_lik_ratio ( fieldLikeIndicesPtr[inx], cellLikPtr, auxlikn[0], fldcls, *cellClassPtr, &auxcls_temp, ncls); if (lik_ratio > annexationThreshold_derived) { auxcls = auxcls_temp; tAuxLikPtr = auxlikn[0]; ibuf = cellBuf; } // end "if (lik_ratio > annexationThreshold_derived)" } // end "if (mixCellsFlag || ..." if (ibuf > 0 && *epixCurrentLineCellPtr > 0) { // Check if right cell has a higher ratio than than // for current assignment. inx = field_number_table[*epixCurrentLineCellPtr]; fldcls = fieldClassNumberPtr[inx]-1; inx = fieldLikelihoodTableIndexPtr[inx]; tFldLikPtr = fieldLikeIndicesPtr[inx]; smax = cellLikPtr[fldcls] - cellLikPtr[*cellClassPtr-1]; if (lik_ratio > smax) { // Reassign the cell remove from current field. inx = field_number_table[*epixCurrentLineCellPtr]; fieldClassNumberPtr[inx] = subtract_log_lik (tFldLikPtr,cellLikPtr,ncls); echo_info->field_size -= cell_size; *epixCurrentLineCellPtr = 0; } // end "if (lik_ratio > smax)" } // end "if (ibuf > 0 && *epixCurrentLineCellPtr > 0)" } // "if ((cellBuf > 0) && (cellBuf != ibuf))" if (ibuf == 0) { // New field if (newfield >= (UInt32)echo_info->current_max_field_number) return (3); ibuf = ++newfield; echo_info->number_of_fields++; field_number_table[ibuf] = ibuf; tAuxLikPtr = cellLikPtr; auxcls = *cellClassPtr; // Get next empty location for field likelihood values. for (iyy=0; iyy<lengthFieldLikeList; iyy++) { if (nextEmptySlot >= lengthFieldLikeList) nextEmptySlot = 0; if (fieldLikeFlagsPtr[nextEmptySlot] & likeFlag1) break; nextEmptySlot++; } // end "for (iyy=0, iyy..." if (iyy >= lengthFieldLikeList) return (3); fieldLikelihoodTableIndexPtr[ibuf] = (UInt32)nextEmptySlot; fieldLikeFlagsPtr[nextEmptySlot] &= likeFlag2; nextEmptySlot++; } // if (ibuf == 0) if (*epixCurrentLineCellPtr == 0) { inx = field_number_table[ibuf]; fieldClassNumberPtr[inx] = auxcls; inx = fieldLikelihoodTableIndexPtr[inx]; fieldLikeFlagsPtr[inx] &= likeFlag1; echo_info->field_size += cell_size; BlockMoveData (tAuxLikPtr, fieldLikeIndicesPtr[inx], numberMoveBytes); stuffing (epixCurrentLineCellPtr, ibuf, cell_width, cell_width, num_col); } // end "if (*epixCurrentLineCellPtr == 0)" } // end "if (ibuf >= 0)" } // end "for (iy=lastcol_echo; iy>=0; iy-=cell_width)" // Exit routine if user has "command period" down if (TickCount () >= gNextTime) { if (!CheckSomeEvents (osMask+keyDownMask+updateMask+mDownMask+mUpMask)) { error_code = 1; break; } // end "if (!CheckSomeEvents..." } // end "if (TickCount () >= gNextTime)" if (combineLikeFieldsFlag && ix > 1) { // Scan line from left to right combining like fields. // NOTE??!! // The annexation threshold may need to be revised to take // into account the different number of cells in each field. epixPreviousLineCellPtr = epixPreviousLineCell0Ptr; for (iy=1; iy<=ncl_icel; iy+=cell_width) { ibuf = *epixCurrentLineCellPtr; if (ibuf > 0) { ibuf = field_number_table[ibuf]; fldcls = fieldClassNumberPtr[ibuf]; inx = fieldLikelihoodTableIndexPtr[ibuf]; baseFieldLikePtr = fieldLikeIndicesPtr[inx]; ibufn[0] = ibufn[1] = ibufn[2] = ibufn[3] = 0; ibufn[1] = *(epixPreviousLineCellPtr - nhd1) & 0xbfffffff; if (iy > 1) { ibufn[0] = *(epixPreviousLineCellPtr - nhd0) & 0xbfffffff; ibufn[3] = *(epixCurrentLineCellPtr - nhd3) & 0xbfffffff; } // end "if (iy > 1)" if (iy < ncl_icel-cell_width) ibufn[2] = *(epixPreviousLineCellPtr - nhd2) & 0xbfffffff; for (iyy=0; iyy<4; iyy++) { cellLocation = (SInt16)cellOrder[iyy]; cellBuf = ibufn[cellLocation]; if (cellBuf > 0) cellBuf = field_number_table[ibufn[cellLocation]]; if ((cellBuf > 0) && (cellBuf != ibuf)) { fldclsn = fieldClassNumberPtr[cellBuf]; if (fldcls == fldclsn) { inx = fieldLikelihoodTableIndexPtr[cellBuf]; lik_ratio = log_lik_ratio ( fieldLikeIndicesPtr[inx], baseFieldLikePtr, auxlikn[0], fldcls, fldclsn, &auxcls_temp, ncls); if (lik_ratio > annexationThreshold_derived) { fieldLikeFlagsPtr[inx] |= likeFlag2; for (inx=1; inx<=(SInt32)newfield; inx++) { if (field_number_table[inx] == (UInt32)cellBuf) field_number_table[inx] = ibuf; } // end "for (inx=1; inx<=..." echo_info->number_of_fields--; BlockMoveData ( auxlikn[0], baseFieldLikePtr, numberMoveBytes); } // "if (lik_ratio > ..." } // end "if (fldcls == fldclsn)" } // "if ((cellBuf > 0) && (cellBuf != ibuf))" } // end "for (iyy=0; iyy<4; iyy++)" } // end "if (ibuf > 0)" epixCurrentLineCellPtr += cell_width; epixPreviousLineCellPtr += cell_width; } // end "for (iy=1;iy <= ncl_icel; iy += cell_width)" } // end "if (combineLikeFieldsFlag && ix > 1)" // Classify the non-homogeneous cells/pixels and write the // probability information if requested. areaDescriptionPtr->line = fileIOInstructionsPtr->lineStart; error_code = ClassifyNonHomogeneousCells ( cell_width, class_stat, (HDoublePtr)gOutputBufferPtr, epixCurrentLineCell0Ptr, // epix_ptr0, ex2, pixelThresholdTablePtr, probabilityBufferPtr, areaDescriptionPtr); // Write of probability information for the line(s) if // requested. if (probabilityBufferPtr != NULL && error_code == 0) { error_code = SaveProbabilityInformation (cell_width, cellThresholdTablePtr, class_stat, probabilityBufferPtr, cellLikPtr, epixCurrentLineCell0Ptr, // epix_ptr0, fieldClassNumberPtr, field_number_table, areaDescriptionPtr); } // end "if (probabilityBufferPtr != NULL && error_code == 0)" if (useTempDiskFileFlag && error_code == 0) { // Write the current line-cell data to the temp file. count = epixLineCellBufferCount; error_code = MWriteData (echo_info->tempFileStreamPtr, &count, epixCurrentLineCell0Ptr, kErrorMessages); savedLineCell0Ptr = epixCurrentLineCell0Ptr; epixCurrentLineCell0Ptr = epixPreviousLineCell0Ptr; epixPreviousLineCell0Ptr = savedLineCell0Ptr; } // end "if (useTempDiskFileFlag && error_code == 0)" else // !useTempDiskFileFlag { //epix_ptr0 += icel_linlen; // num_col * cell_width epixPreviousLineCell0Ptr = epixCurrentLineCell0Ptr; epixCurrentLineCell0Ptr += icel_linlen; // num_col * cell_width } // else !useTempDiskFileFlag // Exchange likelihood list flags so that alternating lists will be // used. And intialize the list for the next line set. savedLikeFlag = likeFlag1; likeFlag1 = likeFlag2; likeFlag2 = savedLikeFlag; for (iyy=0; iyy<lengthFieldLikeList; iyy++) { fieldLikeFlagsPtr[iyy] |= likeFlag2; } // end "for (iyy=0; iyy<lengthFieldLikeList ;iyy++)" if (error_code != noErr) break; } // end "for (ix=1; ix<=nrw_icel; ix+=cell_width)" // Perform maximum likelihood classification of remaining rows, // if any if ((error_code == 0) && (nrw_rem_icel > 0)) { epixCurrentLineCellPtr = epixCurrentLineCell0Ptr; iyy = nrw_rem_icel * num_col; for (ix=0; ix<iyy; ix++) *epixCurrentLineCellPtr++ = -1; error_code = read_lines_of_data1 (fp, nrw_rem_icel); // Classify the non-homogeneous cells/pixels and write the // probability information if requested. areaDescriptionPtr->line = fileIOInstructionsPtr->lineStart; if (error_code == 0) error_code = ClassifyNonHomogeneousCells ( nrw_rem_icel, class_stat, (HDoublePtr)gOutputBufferPtr, epixCurrentLineCell0Ptr, // epix_ptr0, ex2, pixelThresholdTablePtr, probabilityBufferPtr, areaDescriptionPtr); // Write of probability information for the line(s) if // requested. if (probabilityBufferPtr != NULL && error_code == 0) error_code = SaveProbabilityInformation (nrw_rem_icel, NULL, class_stat, probabilityBufferPtr, cellLikPtr, epixCurrentLineCell0Ptr, // epix_ptr0, fieldClassNumberPtr, field_number_table, areaDescriptionPtr); if (useTempDiskFileFlag && error_code == 0) { // Write the current line-cell data to the temp file. count = epixLineCellBufferCount; error_code = MWriteData (echo_info->tempFileStreamPtr, &count, epixCurrentLineCell0Ptr, kErrorMessages); } // end "if (useTempDiskFileFlag && error_code == 0)" } // "if ((error_code == 0) && (nrw_rem_icel > 0))" LoadDItemValue (gStatusDialogPtr, IDC_Status18, (SInt32)lineNumber); LoadDItemValueWithCommas (gStatusDialogPtr, IDC_Status22, (SInt64)echo_info->number_of_fields); // Save maximum field number. echo_info->current_max_field_number = newfield; return (error_code); } // end "phase1 //------------------------------------------------------------------------------------ // Copyright 1988-2020 Purdue Research Foundation // // Function name: void ClassifyNonHomogeneousCells // // Software purpose: The purpose of this routine is to classify the // the non-homogeneous cells and save the probability // indeces for the non-homogenous cells if requested // for the pixels in 'cell_width' lines. // // Parameters in: // // Parameters out: None // // Value Returned: None // // Called By: // // Coded By: <NAME> Date: 05/25/1993 // Revised By: <NAME> Date: 12/29/2005 SInt16 ClassifyNonHomogeneousCells ( SInt32 numberLines, statistics* class_stat, HDoublePtr si_ptr1, HSInt32Ptr epix_ptr, HDoublePtr wk1, // nband by nband work space double* thresholdTablePtr, HUCharPtr probabilityBufferPtr, AreaDescriptionPtr areaDescriptionPtr) { double rrDivide2; Point point; RgnHandle rgnHandle; HUCharPtr savedProbabilityBufferPtr; SInt16* thresholdProbabilityPtr; SInt32 column, line, numberColumns; UInt32 numberClasses; SInt16 numberChannels, thresholdCode; Boolean polygonFieldFlag; // Set up local variables. numberClasses = gEchoClassifierVariablePtr->ncls; numberColumns = gEchoClassifierVariablePtr->num_col; numberChannels = gEchoClassifierVariablePtr->nband; polygonFieldFlag = areaDescriptionPtr->polygonFieldFlag; rgnHandle = areaDescriptionPtr->rgnHandle; point.v = (SInt16)areaDescriptionPtr->line; if (polygonFieldFlag && rgnHandle == NULL) return (1); thresholdCode = 0; if (gClassifySpecsPtr->thresholdFlag) thresholdCode = gClassifySpecsPtr->probabilityThresholdCode; thresholdProbabilityPtr = gClassifySpecsPtr->thresholdProbabilityPtr; savedProbabilityBufferPtr = probabilityBufferPtr; for (line=0; line<numberLines; line++) { point.h = (SInt16)areaDescriptionPtr->columnStart; for (column=0; column<numberColumns; column++) { // Classify any non-homogeneous pixels using the maximum // likelihood classifier. if (*epix_ptr < 0) { if (!polygonFieldFlag || PtInRgn (point, rgnHandle)) { classify_pixel_using_ML (class_stat, numberChannels, numberClasses, si_ptr1, &rrDivide2, epix_ptr, wk1); // Fill probability buffer if needed. if (savedProbabilityBufferPtr) { //maxClass = (SInt16)(labs (*epix_ptr) - 1); // Cell likehood value is classConstant - r**2/2 // But threshold is for the value r**2/2 //dValue = class_stat[maxClass].classConstantML - xlik[maxClass]; // Get the threshold table index. *probabilityBufferPtr = (UInt8)GetThresholdClass (rrDivide2, thresholdTablePtr); gTempDoubleVariable1 += thresholdProbabilityPtr[*probabilityBufferPtr]; if (*probabilityBufferPtr > thresholdCode) *epix_ptr &= 0xbfffffff; } // end "if (savedProbabilityBufferPtr)" else // !savedProbabilityBufferPtr *epix_ptr &= 0xbfffffff; } // end "if (!polygonFieldFlag || PtInRgn (..." else // polygonFieldFlag && !PtInRgn (point, rgnHandle) { if (savedProbabilityBufferPtr) *probabilityBufferPtr = 0; } // end "else polygonFieldFlag && !PtInRgn (..." } // end "if (*epix_ptr < 0)" epix_ptr++; si_ptr1 += numberChannels; probabilityBufferPtr++; point.h++; // Exit routine if user has "command period" down if (TickCount () >= gNextTime) { if (!CheckSomeEvents (osMask+keyDownMask+updateMask+mDownMask+mUpMask)) return (1); } // end "if (TickCount () >= gNextTime)" } // end "for (column=0; column<numberColumns; column++)" point.v++; } // end "for (line=0; line<numberLines; line++)" return (0); } // end "ClassifyNonHomogeneousCells" //------------------------------------------------------------------------------------ // FUNCTION : classify_pixel_using_ML // Purpose : Classify given pixel using pixelwise maximum // likelihhod classifier. // Pixel data is in "unsigned char" format. // Coded By: <NAME> Date: 01/01/1989 // Revised By: <NAME> Date: 09/28/1991 // Revised By: <NAME> Date: 12/29/2005 //------------------------------------------------------------------------------------ void classify_pixel_using_ML ( statistics* stat_ptr, SInt16 nband, UInt32 ncls, HDoublePtr si_ptr1, double* rrDivide2Ptr, HSInt32Ptr epix_ptr, HDoublePtr wk1) { double discriminantMax, dValue, rrDivide2, rrDivide2Max; HDoublePtr d_ptr, mean_ptr, tsi_ptr; UInt32 class_assigned, classNumber; SInt16 channel; discriminantMax = -DBL_MAX; class_assigned = 0; for (classNumber=1; classNumber<=ncls; classNumber++) { // Compute Quadratic Term d_ptr = wk1; mean_ptr = stat_ptr->vmean; tsi_ptr = si_ptr1; for (channel=0; channel<nband; channel++) *d_ptr++ = *tsi_ptr++ - *mean_ptr++; symtrix (wk1, stat_ptr->covinv, &rrDivide2, nband); rrDivide2 /= 2; dValue = -rrDivide2 + stat_ptr->classConstantML; if (dValue > discriminantMax) { discriminantMax = dValue; rrDivide2Max = rrDivide2; class_assigned = classNumber; } // "if (dValue > discriminantMax) " stat_ptr++; } // for class *epix_ptr = -(SInt32)class_assigned; *rrDivide2Ptr = rrDivide2Max; } // end "classify_pixel_using_ML" //------------------------------------------------------------------------------------ // FUNCTION : loglik_echo // Purpose : Compute Cell Loglik and find cell class. // Coded By: <NAME> Date: 01/01/1989 // Revised by: <NAME> Date: 09/28/1991 // Revised By: <NAME> Date: 12/29/2005 //------------------------------------------------------------------------------------ void loglik_echo ( statistics* class_stat, SInt16 nband, UInt32 ncls, HDoublePtr si_ptr0, HFldLikPtr fldlik_ptr, SInt32 line_x, HDoublePtr ex2, double* ex, double* xmax, SInt16* cell_class, SInt16 cell_width) { double discriminantMax, dValue1, dValue2; double* lexptr; statistics* stat_ptr; HDoublePtr d_ptr, d_ptr1, si_ptr1, si_ptr2; SInt32 ix, iy, kx, ky; UInt32 classIndex; // Initialize ex (nband x 1) vector and ex2 (nband x nband) upper right // triangular matrix. d_ptr = ex; d_ptr1 = ex2; for (kx=1; kx<=nband; kx++) { *d_ptr = 0; d_ptr++; for (ky=kx; ky<=nband; ky++) { *d_ptr1 = 0; d_ptr1++; } // end "for (ky=kx; ky<=nband; ky++)" } // for kx // Compute ex2 (upper right triangular matrix) and ex // ex = sum of xi's for all pixels in cell where i represents band // ex2 = sum of xi*xj for all pixels in cell where i and j represents // band. for (ix=0; ix<cell_width; ix++) { si_ptr1 = si_ptr0; for (iy=0; iy<cell_width; iy++) { lexptr = ex; d_ptr1 = ex2; for (kx=1; kx<=nband; kx++) { si_ptr2 = si_ptr1; *lexptr += dValue1 = *si_ptr1; // ex lexptr++; si_ptr1++; for (ky=kx; ky<=nband; ky++) { *d_ptr1 += dValue1 * *si_ptr2; // ex2 d_ptr1++; si_ptr2++; } // end "for (ky=kx; ky<=nband; ky++)" } // end "for (kx=1; kx<=nband; kx++)" } // end "for (iy=0; iy<cell_width; iy++)" si_ptr0 += line_x; } // end " for (ix=0; ix<cell_width; ix++)" // Compute Cell Likelihood Value *cell_class = 0; discriminantMax = -DBL_MAX; stat_ptr = class_stat; for (classIndex=0; classIndex<ncls; classIndex++) { dValue1 = 0; // sprd represents 4 * Mt_Invcov_M for given class. dValue2 = stat_ptr->sprd; // Now calculate Xt_Invcov_X for given class d_ptr = ex2; d_ptr1 = stat_ptr->covinv; for (kx=1; kx<=nband; kx++) { dValue2 += *d_ptr1 * *d_ptr; // Diagonal d_ptr1++; d_ptr++; for (ky=kx+1; ky<=nband; ky++) { dValue1 += *d_ptr1 * *d_ptr; // Off Diagonal d_ptr1++; d_ptr++; } // end "for (ky=kx+1; ky<=nband; ky++)" d_ptr1 += kx; } // for kx // Now calculate -.5 * (Xt_Invcov_X + Mt_Invcov_M) dValue2 /= 2; dValue2 += dValue1; dValue2 = -dValue2; // Now add in the constant for the class. = cellsize times // -.5*N * log (2pi) - .5 * log det (cov) + log (prior probability) dValue2 += stat_ptr->classConstant; // Now add in Xt * Invcov_M d_ptr = stat_ptr->vprd; lexptr = ex; for (kx=0; kx<nband; kx++) { dValue2 += *d_ptr * *lexptr; d_ptr++; lexptr++; } // end "for (kx=0; kx<nband; kx++)" fldlik_ptr [classIndex] = dValue2; if (dValue2 > discriminantMax) { discriminantMax = dValue2; *cell_class = (SInt16)(classIndex + 1); } // "if (dValue2 > discriminantMax)" stat_ptr++; } // for class *xmax = discriminantMax; } // end "loglik_echo"
vasireddy99/opentelemetry-cpp
api/include/opentelemetry/nostd/variant.h
<gh_stars>100-1000 // Copyright The OpenTelemetry Authors // SPDX-License-Identifier: Apache-2.0 #pragma once #include "opentelemetry/version.h" #ifdef HAVE_CPP_STDLIB # include "opentelemetry/std/variant.h" #else # ifndef HAVE_ABSEIL // We use a LOCAL snapshot of Abseil that is known to compile with Visual Studio 2015. // Header-only. Without compiling the actual Abseil binary. As Abseil moves on to new // toolchains, it may drop support for Visual Studio 2015 in future versions. # if defined(__EXCEPTIONS) # include <exception> OPENTELEMETRY_BEGIN_NAMESPACE namespace nostd { class bad_variant_access : public std::exception { public: virtual const char *what() const noexcept override { return "bad_variant_access"; } }; [[noreturn]] inline void throw_bad_variant_access() { throw bad_variant_access{}; } } // namespace nostd OPENTELEMETRY_END_NAMESPACE # define THROW_BAD_VARIANT_ACCESS opentelemetry::nostd::throw_bad_variant_access() # else # define THROW_BAD_VARIANT_ACCESS std::terminate() # endif # endif # ifdef _MSC_VER // Abseil variant implementation contains some benign non-impacting warnings // that should be suppressed if compiling with Visual Studio 2017 and above. # pragma warning(push) # pragma warning(disable : 4245) // conversion from int to const unsigned _int64 # pragma warning(disable : 4127) // conditional expression is constant # endif # ifdef HAVE_ABSEIL # include <absl/types/variant.h> # else # include "./absl/types/variant.h" # endif # ifdef _MSC_VER # pragma warning(pop) # endif OPENTELEMETRY_BEGIN_NAMESPACE namespace nostd { # ifdef HAVE_ABSEIL using absl::bad_variant_access; # endif using absl::get; using absl::get_if; using absl::holds_alternative; using absl::monostate; using absl::variant; using absl::variant_alternative_t; using absl::variant_size; using absl::visit; } // namespace nostd OPENTELEMETRY_END_NAMESPACE #endif
THE-STUDNET/front
src/app/components/app_layout/controllers/layout.js
<gh_stars>0 angular.module('app_layout').controller('layout_controller', ['$scope','session','user_model', 'page_model', 'user_courses', 'connections','account','notifier_service', '$translate', 'welcome_service', 'modal_service', 'page_modal_service','social_service','events_service', 'global_search', 'notifications_service','conversations','events', 'filters_functions', 'state_service', function( $scope, session, user_model, page_model, user_courses, connections, account, notifier_service, $translate, welcome_service, modal_service, page_modal_service, social_service, events_service, global_search, notifications_service, conversations, events, filters_functions, state_service){ var ctrl = this; ctrl.isApp = (navigator.userAgent.indexOf('twicapp') !== -1); this.tpl = { header: 'app/components/app_layout/tpl/header.html', mobile_header: 'app/components/app_layout/tpl/mobile_header.html', desktop_header: 'app/components/app_layout/tpl/desktop_header.html', confirm: 'app/shared/elements/confirm/modal.html' }; this.notifications = notifications_service; this.isStudnetAdmin = session.roles[1]; ctrl.state_service = state_service; user_model.get([session.id]).then(function(){ var me = user_model.list[session.id].datum; if(!me.welcome_date || new Date(me.welcome_date) < new Date()){ welcome_service.init(); } }); function openStartForm(){ modal_service.open({ label: 'Settings', template: 'app/shared/custom_elements/startform/modal.html', reference: document.activeElement, scope : { email : session.email } }); } this.openSF = function(){ openStartForm(); $scope.$evalAsync(); }; if(session.organization_id){ page_model.queue([session.organization_id]); } connections.load(); user_courses.load([session.id], true).then(function(){ this.courses = user_courses; }.bind(this)); this.session = session; this.users = user_model.list; this.pages = page_model.list; this.connecteds = connections.connecteds; this.awaitings = connections.awaitings; this.global_search = global_search; this.openPageModal = function($event, type, page){ page_modal_service.open( $event, type, page); }; this.logout = function(){ account.logout(); }; ctrl.closeFullCVN = function(){ if( social_service.current ){ social_service.closeConversation(social_service.current); } }; ctrl.notifAction = function( ntf, $event ){ $event.stopPropagation(); ntf.read = true; var ref = document.activeElement; if( document.querySelector('#dktp-header').contains( $event.target ) ){ ref = document.querySelector('#desktopntf'); } modal_service.open({ label: '', template: 'app/shared/custom_elements/post/view_modal.html', scope:{ id: ntf.object.origin_id || ntf.object.id, ntf: ntf, notifications: notifications_service }, reference: ref }); }; ctrl.messagesUnread = function(){ return conversations.channel_unreads.length + conversations.conversation_unreads.length + Object.keys(conversations.connection_unreads).length; }; ctrl.openMobileConversations = function(){ social_service.openMobile(); }; ctrl.friendRequestModal = function(){ modal_service.open({ label: 'Friend request(s)', template: 'app/components/app_layout/tpl/friendrequestmodal.html', scope:{ declineRequest: ctrl.declineRequest, acceptRequest: ctrl.acceptRequest, awaitings: ctrl.awaitings }, reference: document.activeElement }); }; ctrl.notificationsModal = function(){ modal_service.open({ label: 'Your notifications', template: 'app/components/app_layout/tpl/notificationmodal.html', scope:{ notifications: ctrl.notifications, notifAction: ctrl.notifAction }, reference: document.activeElement }); }; this.declineRequest = function( id ){ connections.decline(id).then(function(){ var model = user_model.list[id].datum; $translate('ntf.co_req_refused',{username: model.firstname+' '+model.lastname}).then(function( translation ){ notifier_service.add({type:'message',message: translation}); }); }); }; this.acceptRequest = function( id ){ connections.accept(id).then(function(){ var model = user_model.list[id].datum; $translate('ntf.is_now_connection',{username: model.firstname+' '+model.lastname}).then(function( translation ){ notifier_service.add({type:'message',message: translation}); }); }); }; this.support = function(){ user_model.get([session.id]).then(function(){ var user = user_model.list[session.id].datum; linkedchat.name = user.firstname + ' ' + user.lastname + ' (' + filters_functions.username(user) +')'; linkedchat.email = user.email; if(user.avatar){ linkedchat.avatar = filters_functions.dmsLink(user.avatar); } linkedchat.titleOpened = "Ask us everything"; linkedchat.updateInfo(); linkedchat.openChat(); }); }; this.openPageModal = function($event, type, page){ page_modal_service.open( $event, type, page); }; this.confirm = function($event, question, onsubmit, params, context){ modal_service.open({ template : this.tpl.confirm, reference : $event.target, scope : { question : question, confirm : function(){ return onsubmit.apply(context || this, params); } }, is_alert : true }); }; this.backToTop = function(){ document.body.scrollTop = document.documentElement.scrollTop = 0; this.scrolled = false; }; window.addEventListener('scroll', function(){ var scrolled = this.scrolled; this.scrolled = window.scrollY > 200; if( this.scrolled !== scrolled ){ $scope.$evalAsync(); } }.bind(this)); // SOCIAL COLUMN STUFF: Expose social service & eval scope on state change. this.social = social_service; events_service.on('social.column_state_change', evalAsync); events_service.on(events.notification_received, evalAsync); $scope.$on('$destroy', function(){ events_service.off('social.column_state_change', evalAsync); }); function evalAsync(){ $scope.$evalAsync(); } } ]);
produck/duck
packages/duck-log/src/normalize.js
'use strict'; const { Normalizer, Validator } = require('@produck/duck'); const schema = require('./OptionsSchema.json'); const DEFAULT = require('./CategoryLogger/default'); schema.definitions.defaultLevels.enum = DEFAULT.LEVELS; module.exports = Normalizer({ defaults: () => ({}), handler: function normalize(_options) { const options = {}; for (const categoryName in _options) { options[categoryName] = _options[categoryName]; } return options; }, validate: Validator(schema) });
babadee001/ovie-recipes
client/src/actions/index.js
import axios from 'axios'; import { actions as toastrActions } from 'react-redux-toastr'; import { bindActionCreators } from 'redux'; export const BASE_URL = '/api/v1'; /** * @export errorHandler * @param {action} dispatch * @param {string} error * @param {constant} type * @returns {*} void */ export const errorHandler = (dispatch, error, type) => { const errorMessage = error.response ? error.response.data : error; dispatch({ type, payload: errorMessage, }); let errorData = {}; errorData = Object.assign({}, errorMessage); errorData.message = ''; errorData.error = ''; setTimeout(() => { dispatch({ type, payload: errorData }); }, 5000); }; // Post Request /** * @export postData * @param {constant} action * @param {constant} errorType * @param {any} isAuthReq * @param {string} url * @param {action} dispatch * @param {object} data * @param {string} message * @param {constant} toastrConstant * @param {string} directTo * @returns {*} void */ export const postData = ( action, errorType, isAuthReq, url, dispatch, data, message, toastrConstant, directTo ) => { const requestUrl = BASE_URL + url; let headers = {}; if (isAuthReq) { headers = { headers: { 'x-access-token': window.localStorage.getItem('token') } }; } return axios.post(requestUrl, data, headers) .then((response) => { dispatch({ type: action, payload: response.data, }); if (directTo.length > 0) { window.location.hash = directTo; } if (toastrConstant.length > 2) { const toastr = bindActionCreators(toastrActions, dispatch); toastr.add({ id: toastrConstant, type: 'success', title: 'Success', message, timeout: 5000, }); setTimeout(() => { toastr.remove(toastrConstant); }, 3500); } }) .catch((error) => { errorHandler(dispatch, error, errorType); }); }; // Get Request /** * @export getData * @param {any} action * @param {any} errorType * @param {any} isAuthReq * @param {any} url * @param {any} dispatch * @returns {*} void */ export const getData = (action, errorType, isAuthReq, url, dispatch) => { const requestUrl = BASE_URL + url; let headers = {}; if (isAuthReq) { headers = { headers: { 'x-access-token': window.localStorage.getItem('token') } }; } return axios.get(requestUrl, headers) .then((response) => { dispatch({ type: action, payload: response.data, }); }) .catch((error) => { errorHandler(dispatch, error, errorType); }); }; // Put Request /** * @export putData * @param {constant} action * @param {constant} errorType * @param {any} isAuthReq * @param {string} url * @param {action} dispatch * @param {object} data * @param {string} message * @param {constant} toastrConstant * @param {string} directTo * @returns {*} void */ export const putData = ( action, errorType, isAuthReq, url, dispatch, data, message, toastrConstant, directTo ) => { const requestUrl = BASE_URL + url; let headers = {}; if (isAuthReq) { headers = { headers: { 'x-access-token': window.localStorage.getItem('token') } }; } return axios.put(requestUrl, data, headers) .then((response) => { const toastr = bindActionCreators(toastrActions, dispatch); dispatch({ type: action, payload: response.data, }); if (directTo.length > 3) { window.location.hash = directTo; } if (toastrConstant.length > 2) { toastr.add({ id: toastrConstant, type: 'success', title: 'Success', message, timeout: 5000, }); setTimeout(() => { toastr.remove(toastrConstant); }, 3500); } }) .catch((error) => { errorHandler(dispatch, error, errorType); }); }; // Delete Request /** * @export deleteData * @param {constant} action * @param {constant} errorType * @param {any} isAuthReq * @param {string} url * @param {action} dispatch * @param {string} message * @param {constant} toastrConstant * @returns {*} void */ export const deleteData = ( action, errorType, isAuthReq, url, dispatch, message, toastrConstant ) => { const requestUrl = BASE_URL + url; let headers = {}; if (isAuthReq) { headers = { headers: { 'x-access-token': window.localStorage.getItem('token') } }; } return axios.delete(requestUrl, headers) .then((response) => { dispatch({ type: action, payload: response.data, }); if (toastrConstant.length > 2) { const toastr = bindActionCreators(toastrActions, dispatch); toastr.add({ id: toastrConstant, type: 'success', title: 'Success', message, timeout: 5000, }); setTimeout(() => { toastr.remove(toastrConstant); }, 3500); } }) .catch((error) => { errorHandler(dispatch, error, errorType); }); };
alebabai/tg2vk
src/main/java/com/github/alebabai/tg2vk/repository/UserBaseRepository.java
<filename>src/main/java/com/github/alebabai/tg2vk/repository/UserBaseRepository.java package com.github.alebabai.tg2vk.repository; import com.github.alebabai.tg2vk.domain.User; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.PagingAndSortingRepository; import org.springframework.data.repository.query.Param; import java.util.List; import java.util.Optional; public interface UserBaseRepository extends PagingAndSortingRepository<User, Integer> { @Query("SELECT u FROM User u WHERE u.settings.id IN (SELECT s.id FROM UserSettings s WHERE s.started=true)") List<User> findAllStarted(); Optional<User> findOneByVkId(@Param("vkId") Integer id); Optional<User> findOneByTgId(@Param("tgId") Integer id); }
LatteIsAHorse/LatteIsAHorse_Backend
src/main/java/team/latte/LatteIsAHorse/repository/QuizTagRepository.java
<reponame>LatteIsAHorse/LatteIsAHorse_Backend<filename>src/main/java/team/latte/LatteIsAHorse/repository/QuizTagRepository.java package team.latte.LatteIsAHorse.repository; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Modifying; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; import org.springframework.transaction.annotation.Transactional; import team.latte.LatteIsAHorse.model.quiz.Quiz; import team.latte.LatteIsAHorse.model.quiz.QuizTag; import team.latte.LatteIsAHorse.model.tag.Tag; import java.util.Optional; public interface QuizTagRepository extends JpaRepository<QuizTag, Long> { @Modifying @Transactional @Query("DELETE FROM QuizTag q WHERE q.quiz = :quiz") void deleteAllByQuiz(@Param("quiz") Quiz quiz); Optional<QuizTag> findByTag(Tag tag); }
jburguete/mpcotool
tests/process4.c
#include <stdio.h> #include <stdlib.h> #define NTESTS 6 #define NMETHODS 41 const char *test[NTESTS] = { "Sphere", "Ackley", "Booth", "Rosenbrock", "Easom", "Beale" }; int main () { char label[NMETHODS][512]; char buffer[512]; float x[NMETHODS][3], y[NMETHODS][3]; FILE *file; unsigned int i, j; for (j = 0; j < NMETHODS; ++j) y[j][0] = y[j][1] = y[j][2] = 0.; for (i = 0; i < NTESTS; ++i) { snprintf (buffer, 512, "sed 's/,/ /g' %s.dat > %s2.dat", test[i], test[i]); system (buffer); snprintf (buffer, 512, "%s2.dat", test[i]); file = fopen (buffer, "r"); for (j = 0; j < NMETHODS; ++j) { fscanf (file, "%s%*u%f%f%f", &label[j][0], &x[j][0], &x[j][1], &x[j][2]); y[j][0] += x[j][0]; y[j][1] += x[j][1]; y[j][2] += x[j][2]; } fclose (file); } file = fopen ("out", "w"); for (j = 0; j < NMETHODS; ++j) fprintf (file, "%s %u %g %g %g\n", &label[j][0], j, y[j][0] / NTESTS, y[j][1] / NTESTS, y[j][2] / NTESTS); fclose (file); return 0; }
adam-26/react-dom-html
packages/example/server/render.js
import React from "react"; import { ServerStyleSheet } from "styled-components"; import AppScripts from "../src/components/AppScripts"; import AppStyles from "../src/components/AppStyles"; import App from "../src/components/App"; import { HtmlMetadata } from "react-dom-html-tags"; import { renderHtmlToString, renderHtmlToStaticMarkup, renderHtmlToNodeStream, renderHtmlToStaticNodeStream, HTML5_DOCTYPE } from "react-dom-html-tags/server"; import { renderToString } from "react-dom/server"; const assets = require("../build/manifest.json"); const simpleCache = {}; const CACHE_KEY = "example_only"; function renderOnClient(req) { return typeof req.query.client !== "undefined"; } const ClientTemplate = () => <AppStyles>Loading...</AppStyles>; export function stringResponse(req, res) { if (renderOnClient(req)) { const html = renderHtmlToStaticMarkup(<ClientTemplate />, { afterAppContainer: ( <AppScripts assets={assets} hydrateClient={false} /> ) }); return res.send(HTML5_DOCTYPE + html); } const htmlMetadata = HtmlMetadata.createForServerStringRender(); if (simpleCache[CACHE_KEY]) { // === if the HTML metadata is cached - use cached HTML metadata state for render === htmlMetadata.useSerializedState(simpleCache[CACHE_KEY]); } // === styled-components === const sheet = new ServerStyleSheet(); const html = renderHtmlToString(sheet.collectStyles(<App />), { head: () => ( <head> {React.Children.map(sheet.getStyleElement(), child => { // It would be neater if "getStyledElement()" accepted an additional props argument // - AND those props were COPIED when being HYDRATED on the client return React.cloneElement(child, { // todo - if styled-components supported additional props, this could be used to // prevent any configuration for hydration // "data-react-dom-html-tags": true, "data-styled-streamed": true // required to force it to hydrate immediately }); })} </head> ), afterAppContainer: <AppScripts assets={assets} hydrateClient={true} />, htmlMetadata: htmlMetadata }); // === without styled-components === // const html = renderHtmlToString(<App />, () => ({ // afterAppContainer: <AppScripts assets={assets} hydrateClient={true} />, // htmlMetadata: htmlMetadata // })); // === Cache the HTML Metadata State === if (!simpleCache[CACHE_KEY]) { // passing `renderToString` avoids adding `react-dom/server` as a client bundle dependency simpleCache[CACHE_KEY] = htmlMetadata.serializeState(renderToString); } res.send(HTML5_DOCTYPE + html); } export function streamResponse(req, res) { if (renderOnClient(req)) { const clientStream = renderHtmlToStaticNodeStream(<ClientTemplate />, { afterAppContainer: ( <AppScripts assets={assets} hydrateClient={false} /> ) }); res.write(HTML5_DOCTYPE); clientStream.pipe(res); return; } // === Stream using cached HTML Metadata state from the string render === const htmlMetadata = HtmlMetadata.createForServerStreamRender(); if (simpleCache[CACHE_KEY]) { // === if the HTML metadata is cached - use cached HTML metadata state for render === htmlMetadata.useSerializedState(simpleCache[CACHE_KEY]); } else { throw new Error( "Stream render requires the HTML metadata be loaded before render, for this example render the page as a string to cache the HTML metadata and then render the page using node streams." ); } const stream = renderHtmlToNodeStream(<App />, { afterAppContainer: <AppScripts assets={assets} hydrateClient={true} />, htmlMetadata: htmlMetadata }); res.write(HTML5_DOCTYPE); stream.pipe(res); }
weichihl/aws-greengrass-labs-kvs-stream-uploader
src/main/java/com/aws/iot/edgeconnectorforkvs/videorecorder/base/VideoRecorderBase.java
<gh_stars>0 /* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aws.iot.edgeconnectorforkvs.videorecorder.base; import com.aws.iot.edgeconnectorforkvs.videorecorder.callback.StatusCallback; import com.aws.iot.edgeconnectorforkvs.videorecorder.model.RecorderCapability; import com.aws.iot.edgeconnectorforkvs.videorecorder.model.RecorderStatus; import com.aws.iot.edgeconnectorforkvs.videorecorder.util.GstDao; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import lombok.Getter; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; import org.freedesktop.gstreamer.Bus; import org.freedesktop.gstreamer.Element; import org.freedesktop.gstreamer.Pad; import org.freedesktop.gstreamer.PadLinkException; import org.freedesktop.gstreamer.Pipeline; /** * Video Recorder Builder class. */ @Slf4j public class VideoRecorderBase { // Synchronization private ReadWriteLock rwLock; private Lock condLock; private Condition stopRunningLoop; // Dao @Getter private GstDao gstCore; // Recorder basics private RecorderStatus currStatus; private StatusCallback statusCallback; // GStreamer basics private Bus bus; @Getter private Pipeline pipeline; // Camera private RecorderCameraBase cameraSource; private RecorderCameraBase.CapabilityListener cameraCapListener; private RecorderCameraBase.NewPadListener cameraPadListener; private RecorderCameraBase.ErrorListener cameraErrListener; // Tee private ArrayList<Element> teeVideos; private ArrayList<Element> teeAudios; private AtomicInteger teeVideoIdx; private AtomicInteger teeAudioIdx; // Branches private HashMap<String, RecorderBranchBase> branches; /** * VideoRecorderBase constructor. * * @param dao Gst API data access object * @param statusCallback a callback is used to receive notifications of status. */ public VideoRecorderBase(GstDao dao, StatusCallback statusCallback) { this.rwLock = new ReentrantReadWriteLock(); this.condLock = new ReentrantLock(); this.stopRunningLoop = this.condLock.newCondition(); this.gstCore = dao; this.currStatus = RecorderStatus.STOPPED; this.statusCallback = statusCallback; this.teeVideos = new ArrayList<>(); this.teeAudios = new ArrayList<>(); this.teeVideoIdx = new AtomicInteger(0); this.teeAudioIdx = new AtomicInteger(0); this.branches = new HashMap<>(); this.gstCore.initContext(); // Pipeline this.pipeline = this.gstCore.newPipeline(); this.bus = this.gstCore.getPipelineBus(this.pipeline); // Signal bus this.gstCore.connectBus(this.bus, (Bus.WARNING) (gstObject, i, s) -> log .warn("WARN " + i + " " + s + " " + gstObject)); this.gstCore.connectBus(this.bus, (Bus.ERROR) (source, code, message) -> { log.error("ERROR " + code + " " + message + " " + source); this.willStopRecording(false, message); }); this.gstCore.connectBus(this.bus, (Bus.EOS) source -> { log.info("EOS " + source); this.willStopRecording(true, "End of stream (EOS)"); }); } /** * This method is to start recording, and the current thread is blocking until recording is * stopped or failed. */ public void startRecording() { if (this.getStatus() == RecorderStatus.STOPPED || this.getStatus() == RecorderStatus.FAILED) { this.teeVideoIdx.set(0); this.teeAudioIdx.set(0); this.gstCore.playElement(this.pipeline); this.setStatus(RecorderStatus.STARTED, "Recording starts"); this.condLock.lock(); try { while (this.getStatus() == RecorderStatus.STARTED) { this.stopRunningLoop.await(); } } catch (Exception e) { log.error(String.format("startRecording fails: %s", e.getMessage())); this.setStatus(RecorderStatus.STOPPING_ABNORMAL, String.format("startRecording fails: %s", e.getMessage())); } finally { this.condLock.unlock(); } log.info("Leave recording loop"); this.gstCore.stopElement(this.pipeline); log.info("notify stop"); if (this.getStatus() == RecorderStatus.STOPPING_ABNORMAL) { this.setStatus(RecorderStatus.FAILED, "Recording Stopped by failure"); } else { this.setStatus(RecorderStatus.STOPPED, "Recording Stopped normally"); } } else { log.warn("Cannot start recording because current state is " + this.getStatus()); } } /** * This method is to stop recording. */ public void stopRecording() { this.gstCore.sendElementEvent(this.pipeline, this.gstCore.newEosEvent()); this.gstCore.postBusMessage(this.bus, this.gstCore.newEosMessage(this.pipeline)); } /** * Get registered branch by name. * * @param branchName branch name * @return branch instance */ public RecorderBranchBase getBranch(String branchName) { RecorderBranchBase branch = null; if (!this.branches.containsKey(branchName)) { log.warn(String.format("Branch %s doesn't exist.", branchName)); } else { branch = this.branches.get(branchName); } return branch; } /** * Get current status of this recorder. * * @return recording status */ public RecorderStatus getStatus() { this.rwLock.readLock().lock(); try { return this.currStatus; } finally { this.rwLock.readLock().unlock(); } } /** * Register a camera module to the recorder. * * @param cameraSrc camera module * @return true if success */ public boolean registerCamera(@NonNull RecorderCameraBase cameraSrc) { if (this.cameraSource != null) { log.error("Camera is already registered."); return false; } // Camera capability listener this.cameraCapListener = (audioCnt, videoCnt) -> { this.bindBranches(audioCnt, videoCnt); }; // Camera pad added listener this.cameraPadListener = (cap, newPad) -> { this.bindCameraToTee(cap, newPad); }; // Camera error listener this.cameraErrListener = (description) -> { this.willStopRecording(false, "Camera error: " + description); }; this.cameraSource = cameraSrc; this.cameraSource.registerListener(this.cameraCapListener, this.cameraPadListener, this.cameraErrListener); return true; } /** * Set properties to a registered camera module. * * @param property camera property * @param data value * @return true if success */ public boolean setCameraProperty(String property, Object data) { boolean result = false; if (this.cameraSource != null) { try { this.cameraSource.setProperty(property, data); result = true; } catch (IllegalArgumentException e) { log.error("SetCamera fails because property is invalid."); } } else { log.error("SetProperty fails because camera is not registered."); } return result; } /** * Register a branch module to the recorder. * * @param branch branch module * @param name unique branch name * @return true if success */ public boolean registerBranch(RecorderBranchBase branch, String name) { if (this.branches.containsKey(name)) { log.warn("Branch is already registered: " + name); return false; } this.branches.put(name, branch); return true; } private void bindBranchToTees(boolean bindVideo, boolean bindAudio) { for (Map.Entry<String, RecorderBranchBase> branch : this.branches.entrySet()) { RecorderCapability branchCap = branch.getValue().getCapability(); ArrayList<Element> videoArr = null; ArrayList<Element> audioArr = null; // bind teeVideos if (bindVideo && (branchCap == RecorderCapability.VIDEO_AUDIO || RecorderCapability.VIDEO_ONLY == branchCap)) { videoArr = this.teeVideos; } // bind teeAudios if (bindAudio && (branchCap == RecorderCapability.VIDEO_AUDIO || RecorderCapability.AUDIO_ONLY == branchCap)) { audioArr = this.teeAudios; } branch.getValue().bindPaths(videoArr, audioArr); } } private void createTees(int numTrack, ArrayList<Element> teeArray) { for (int i = 0; i < numTrack; ++i) { Element tee = gstCore.newElement("tee"); this.gstCore.setElement(tee, "allow-not-linked", true); this.gstCore.addPipelineElements(this.pipeline, tee); this.gstCore.syncElementParentState(tee); teeArray.add(tee); } } private void bindBranches(int audioCnt, int videoCnt) { boolean bindVideo = true; boolean bindAudio = true; // Prepare video capability if (this.teeVideos.size() > 0) { log.info("Ignore binding video pads because branches are already linked."); bindVideo = false; } else { // We support only 1 video track. videoCnt = Math.min(1, videoCnt); createTees(videoCnt, this.teeVideos); } // Prepare audio capability if (this.teeAudios.size() > 0) { log.info("Ignore binding audio pads because branches are already linked."); bindAudio = false; } else { createTees(audioCnt, this.teeAudios); } this.bindBranchToTees(bindVideo, bindAudio); } private void bindCameraToTee(@NonNull RecorderCapability cap, Pad newPad) { Element teeElm = null; switch (cap) { case AUDIO_ONLY: if (this.teeAudioIdx.get() < this.teeAudios.size()) { teeElm = this.teeAudios.get(this.teeAudioIdx.getAndIncrement()); } else { log.error("Get audio tee out of range"); this.willStopRecording(false, "Get audio tee out of range"); return; } break; case VIDEO_ONLY: if (this.teeVideoIdx.get() < this.teeVideos.size()) { teeElm = this.teeVideos.get(this.teeVideoIdx.getAndIncrement()); } else { log.error("Get video tee out of range"); this.willStopRecording(false, "Get video tee out of range"); return; } break; default: throw new RejectedExecutionException("Invalid capability from RecorderCamera"); } Pad teeSink = this.gstCore.getElementStaticPad(teeElm, "sink"); if (this.gstCore.isPadLinked(teeSink)) { log.info("Unbind camera and tee before relinking."); Pad camPad = this.gstCore.getPadPeer(teeSink); this.gstCore.unlinkPad(camPad, teeSink); } try { this.gstCore.linkPad(newPad, teeSink); log.debug("Camera and tee linked."); } catch (PadLinkException ex) { log.error("Camera and tee link failed: " + ex.getLinkResult()); this.willStopRecording(false, "Camera and tee link failed: " + ex.getLinkResult()); } } private void setStatus(RecorderStatus st, String description) { boolean needNotify = false; this.rwLock.writeLock().lock(); if (this.currStatus != st) { this.currStatus = st; needNotify = true; } this.rwLock.writeLock().unlock(); if (needNotify) { this.statusCallback.notifyStatus(this, st, description); } } private void willStopRecording(boolean isNormalCase, String description) { if (this.getStatus() == RecorderStatus.FAILED) { this.setStatus(RecorderStatus.STOPPED, description); } else if (this.getStatus() != RecorderStatus.STOPPED) { if (isNormalCase) { this.setStatus(RecorderStatus.STOPPING_NORMAL, description); } else { this.setStatus(RecorderStatus.STOPPING_ABNORMAL, description); } this.condLock.lock(); try { this.stopRunningLoop.signal(); } finally { this.condLock.unlock(); } } } }
pcen/pulumi
tests/integration/partial_state/partial_state_test.go
<reponame>pcen/pulumi<filename>tests/integration/partial_state/partial_state_test.go // Copyright 2016-2018, Pulumi Corporation. All rights reserved. // +build nodejs all package ints import ( "testing" "github.com/pulumi/pulumi/pkg/v3/resource/deploy/providers" "github.com/pulumi/pulumi/pkg/v3/testing/integration" "github.com/pulumi/pulumi/sdk/v3/go/common/resource" "github.com/stretchr/testify/assert" ) // TestPartialState tests that the engine persists partial state of a resource if a provider // provides partial state alongside a resource creation or update error. // // The setup of this test uses a dynamic provider that will partially fail if a resource's state // value is the number 4. func TestPartialState(t *testing.T) { integration.ProgramTest(t, &integration.ProgramTestOptions{ Dir: "step1", Dependencies: []string{"@pulumi/pulumi"}, Quick: true, ExpectFailure: true, ExtraRuntimeValidation: func(t *testing.T, stackInfo integration.RuntimeValidationStackInfo) { // The first update tries to create a resource with state 4. This fails partially. assert.NotNil(t, stackInfo.Deployment) assert.Equal(t, 3, len(stackInfo.Deployment.Resources)) stackRes := stackInfo.Deployment.Resources[0] assert.Equal(t, resource.RootStackType, stackRes.URN.Type()) providerRes := stackInfo.Deployment.Resources[1] assert.True(t, providers.IsProviderType(providerRes.URN.Type())) a := stackInfo.Deployment.Resources[2] // We should still have persisted the resource and its outputs to the snapshot assert.Equal(t, "doomed", string(a.URN.Name())) assert.Equal(t, 4.0, a.Outputs["state"].(float64)) assert.Equal(t, []string{"state can't be 4"}, a.InitErrors) }, EditDirs: []integration.EditDir{ { Dir: "step2", Additive: true, ExtraRuntimeValidation: func(t *testing.T, stackInfo integration.RuntimeValidationStackInfo) { // The next update deletes the resource. We should successfully delete it. assert.NotNil(t, stackInfo.Deployment) assert.Equal(t, 1, len(stackInfo.Deployment.Resources)) stackRes := stackInfo.Deployment.Resources[0] assert.Equal(t, resource.RootStackType, stackRes.URN.Type()) }, }, { Dir: "step3", Additive: true, ExtraRuntimeValidation: func(t *testing.T, stackInfo integration.RuntimeValidationStackInfo) { // Step 3 creates a resource with state 5, which succeeds. assert.NotNil(t, stackInfo.Deployment) assert.Equal(t, 3, len(stackInfo.Deployment.Resources)) stackRes := stackInfo.Deployment.Resources[0] assert.Equal(t, resource.RootStackType, stackRes.URN.Type()) providerRes := stackInfo.Deployment.Resources[1] assert.True(t, providers.IsProviderType(providerRes.URN.Type())) a := stackInfo.Deployment.Resources[2] assert.Equal(t, "not-doomed", string(a.URN.Name())) assert.Equal(t, 5.0, a.Outputs["state"].(float64)) assert.Nil(t, nil) }, }, { Dir: "step4", Additive: true, ExpectFailure: true, ExtraRuntimeValidation: func(t *testing.T, stackInfo integration.RuntimeValidationStackInfo) { // Step 4 updates the resource to have state 4, which fails partially. assert.NotNil(t, stackInfo.Deployment) assert.Equal(t, 3, len(stackInfo.Deployment.Resources)) stackRes := stackInfo.Deployment.Resources[0] assert.Equal(t, resource.RootStackType, stackRes.URN.Type()) providerRes := stackInfo.Deployment.Resources[1] assert.True(t, providers.IsProviderType(providerRes.URN.Type())) a := stackInfo.Deployment.Resources[2] // We should have persisted the updated resource's new outputs // to the snapshot. assert.Equal(t, "not-doomed", string(a.URN.Name())) assert.Equal(t, 4.0, a.Outputs["state"].(float64)) assert.Equal(t, []string{"state can't be 4"}, a.InitErrors) }, }, }, }) }
dreamsxin/ultimatepp
bazaar/MediaPlayer/main.cpp
<filename>bazaar/MediaPlayer/main.cpp<gh_stars>1-10 #include <CtrlLib/CtrlLib.h> using namespace Upp; #define LAYOUTFILE <Media_demo/demo.lay> #include <CtrlCore/lay.h> void Run_MediaPlayer(); void Run_VolumeCtrl(); struct Media_Demo : public WithMain<TopWindow> { typedef Media_Demo CLASSNAME; void OnRun() { String demo = grid.Get(0); if (demo == "MediaPlayer") Run_MediaPlayer(); else if (demo == "VolumeCtrl") Run_VolumeCtrl(); } Media_Demo() { CtrlLayout(*this, "Media Examples"); butRun.WhenAction = THISBACK(OnRun); grid.AddColumn("Demo", 20); grid.AddColumn("Description", 60); grid.Add("MediaPlayer", "MediaPlayer control demo"); grid.Add("VolumeCtrl", "Simple VolumeCtrl control demo"); grid.SetLineCy(int(1.4*StdFont().GetCy())); grid.WhenLeftDouble = THISBACK(OnRun); } }; GUI_APP_MAIN { Media_Demo().Run(); }
ylunalin/amrex
Src/AmrCore/AMReX_ErrorList.cpp
#include <iostream> #include <AMReX_BLassert.H> #include <AMReX_ErrorList.H> #include <AMReX_SPACE.H> namespace amrex { ErrorRec::ErrorFunc::ErrorFunc () : m_func(0), m_func3D(0) {} ErrorRec::ErrorFunc::ErrorFunc (ErrorFuncDefault inFunc) : m_func(inFunc), m_func3D(0) {} ErrorRec::ErrorFunc::ErrorFunc (ErrorFunc3DDefault inFunc) : m_func(0), m_func3D(inFunc) {} ErrorRec::ErrorFunc* ErrorRec::ErrorFunc::clone () const { return new ErrorFunc(*this); } ErrorRec::ErrorFunc::~ErrorFunc () {} void ErrorRec::ErrorFunc::operator () (int* tag, AMREX_D_DECL(const int&tlo0,const int&tlo1,const int&tlo2), AMREX_D_DECL(const int&thi0,const int&thi1,const int&thi2), const int* tagval, const int* clearval, Real* data, AMREX_D_DECL(const int&dlo0,const int&dlo1,const int&dlo2), AMREX_D_DECL(const int&dhi0,const int&dhi1,const int&dhi2), const int* lo, const int * hi, const int* nvar, const int* domain_lo, const int* domain_hi, const Real* dx, const Real* xlo, const Real* prob_lo, const Real* time, const int* level) const { BL_ASSERT(m_func != 0); m_func(tag,AMREX_D_DECL(tlo0,tlo1,tlo2),AMREX_D_DECL(thi0,thi1,thi2), tagval,clearval,data,AMREX_D_DECL(dlo0,dlo1,dlo2),AMREX_D_DECL(dhi0,dhi1,dhi2),lo,hi,nvar, domain_lo,domain_hi,dx,xlo,prob_lo,time,level); } void ErrorRec::ErrorFunc::operator () (int* tag, const int* tlo, const int* thi, const int* tagval, const int* clearval, Real* data, const int* dlo, const int* dhi, const int* lo, const int * hi, const int* nvar, const int* domain_lo, const int* domain_hi, const Real* dx, const Real* xlo, const Real* prob_lo, const Real* time, const int* level) const { BL_ASSERT(m_func3D != 0); m_func3D(tag,AMREX_ARLIM_3D(tlo),AMREX_ARLIM_3D(thi), tagval,clearval,data,AMREX_ARLIM_3D(dlo),AMREX_ARLIM_3D(dhi), AMREX_ARLIM_3D(lo),AMREX_ARLIM_3D(hi),nvar, AMREX_ARLIM_3D(domain_lo),AMREX_ARLIM_3D(domain_hi), AMREX_ZFILL(dx),AMREX_ZFILL(xlo),AMREX_ZFILL(prob_lo),time,level); } ErrorRec::ErrorFunc2::ErrorFunc2 () : m_func(0) {} ErrorRec::ErrorFunc2::ErrorFunc2 (ErrorFunc2Default inFunc) : m_func(inFunc) {} ErrorRec::ErrorFunc2* ErrorRec::ErrorFunc2::clone () const { return new ErrorFunc2(*this); } ErrorRec::ErrorFunc2::~ErrorFunc2 () {} void ErrorRec::ErrorFunc2::operator () (int* tag, AMREX_D_DECL(const int&tlo0,const int&tlo1,const int&tlo2), AMREX_D_DECL(const int&thi0,const int&thi1,const int&thi2), const int* tagval, const int* clearval, Real* data, AMREX_D_DECL(const int&dlo0,const int&dlo1,const int&dlo2), AMREX_D_DECL(const int&dhi0,const int&dhi1,const int&dhi2), const int* lo, const int * hi, const int* nvar, const int* domain_lo, const int* domain_hi, const Real* dx, const int* level, const Real* avg) const { BL_ASSERT(m_func != 0); m_func(tag,AMREX_D_DECL(tlo0,tlo1,tlo2),AMREX_D_DECL(thi0,thi1,thi2), tagval,clearval,data,AMREX_D_DECL(dlo0,dlo1,dlo2),AMREX_D_DECL(dhi0,dhi1,dhi2),lo,hi,nvar, domain_lo,domain_hi,dx,level,avg); } ErrorRec::ErrorRec (const std::string& nm, int ng, ErrorRec::ErrorType etyp, const ErrorRec::ErrorFunc2& f2) : derive_name(nm), ngrow(ng), err_type(etyp), err_func(0), err_func2(f2.clone()) {} ErrorRec::ErrorRec (const std::string& nm, int ng, ErrorRec::ErrorType etyp, const ErrorRec::ErrorFunc& f) : derive_name(nm), ngrow(ng), err_type(etyp), err_func(f.clone()), err_func2(0) {} const std::string& ErrorRec::name () const { return derive_name; } int ErrorRec::nGrow () const { return ngrow; } ErrorRec::ErrorType ErrorRec::errType () const { return err_type; } const ErrorRec::ErrorFunc& ErrorRec::errFunc () const { return *err_func; } const ErrorRec::ErrorFunc2& ErrorRec::errFunc2() const { return *err_func2; } ErrorRec::~ErrorRec() { delete err_func; delete err_func2; } int ErrorList::size () const { return vec.size(); } void ErrorList::add (const std::string& name, int nextra, ErrorRec::ErrorType typ, const ErrorRec::ErrorFunc& func) { // // Keep list in order of definition, append(). // int n = vec.size(); vec.resize(n+1); vec[n].reset(new ErrorRec(name, nextra, typ, func)); } void ErrorList::add (const std::string& name, int nextra, ErrorRec::ErrorType typ, const ErrorRec::ErrorFunc2& func2) { // // Keep list in order of definition, append(). // int n = vec.size(); vec.resize(n+1); vec[n].reset(new ErrorRec(name, nextra, typ, func2)); } const ErrorRec& ErrorList::operator[] (int k) const { BL_ASSERT(k < size()); return *vec[k]; } static const char* err_name[] = { "Special", "Standard", "UseAverage" }; std::ostream& operator << (std::ostream& os, const ErrorList& elst) { for (int i = 0; i < elst.size(); i++) { os << elst[i].name() << ' ' << elst[i].nGrow() << ' ' << err_name[elst[i].errType()] << '\n'; } return os; } }
caricah/mq.tracah
bootstrap/src/main/java/com/caricah/iotracah/bootstrap/data/models/users/IotAccountKey.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.caricah.iotracah.bootstrap.data.models.users; import java.io.*; /** * IotUserKey definition. * * Code generated by Apache Ignite Schema Import utility: 02/23/2016. */ public class IotAccountKey implements Serializable { /** */ private static final long serialVersionUID = 0L; /** Value for username. */ private String username; /** Value for partitionId. */ private String partitionId; /** * Gets username. * * @return Value for username. */ public String getUsername() { return username; } /** * Sets username. * * @param username New value for username. */ public void setUsername(String username) { this.username = username; } /** * Gets partitionId. * * @return Value for partitionId. */ public String getPartitionId() { return partitionId; } /** * Sets partitionId. * * @param partitionId New value for partitionId. */ public void setPartitionId(String partitionId) { this.partitionId = partitionId; } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof IotAccountKey)) return false; IotAccountKey that = (IotAccountKey)o; if (username != null ? !username.equals(that.username) : that.username != null) return false; if (partitionId != null ? !partitionId.equals(that.partitionId) : that.partitionId != null) return false; return true; } /** {@inheritDoc} */ @Override public int hashCode() { int res = username != null ? username.hashCode() : 0; res = 31 * res + (partitionId != null ? partitionId.hashCode() : 0); return res; } /** {@inheritDoc} */ @Override public String toString() { return "IotUserKey [username=" + username + ", partitionId=" + partitionId + "]"; } }
grische/OpenSPH
core/physics/Constants.h
<reponame>grische/OpenSPH #pragma once /// \file Constants.h /// \brief Definitions of physical constaints (in SI units). /// \author <NAME> (sevecek at s<EMAIL>) /// \date 2016-2021 #include "common/Globals.h" NAMESPACE_SPH_BEGIN namespace Constants { constexpr Float gasConstant = 8.3144598_f; // J mol^−1 K^−1 /// Atomic mass constant constexpr Float atomicMass = 1.660539040e-27_f; // kg /// Boltzmann constant constexpr Float boltzmann = 1.380648e-23_f; // J K^-1 /// Stefan-Boltzmann constant constexpr Float stefanBoltzmann = 5.670367e-8_f; // W m^2 K^-4 /// Planck constant constexpr Float planckConstant = 6.62607015e-34_f; // Js /// Gravitational constant (CODATA 2014) constexpr Float gravity = 6.67408e-11_f; // m^3 kg^-1 s^-2 /// Speed of light in vacuum (exactly) constexpr Float speedOfLight = 299792458._f; // ms^-1 /// Radiation density constant 'a'. constexpr Float radiationDensity = 4._f * stefanBoltzmann / speedOfLight; // J m^-3 K^-4 /// Astronomical unit (exactly) constexpr Float au = 149597870700._f; // m /// Parsec constexpr Float pc = 3.0856776e16_f; // m /// Number of seconds in a day constexpr Float day = 86400._f; // s /// Number of seconds in a year constexpr Float year = 3.154e7_f; // s /// Solar mass /// http://asa.usno.navy.mil/static/files/2014/Astronomical_Constants_2014.pdf constexpr Float M_sun = 1.9884e30_f; // kg /// Earth mass constexpr Float M_earth = 5.9722e24_f; // kg /// Solar radius constexpr Float R_sun = 6.957e8_f; // m /// Earth radius constexpr Float R_earth = 6.3781e6_f; // m } // namespace Constants NAMESPACE_SPH_END