repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
ArcticReal/eCommerce
plugins/eCommerce/src/main/java/com/skytala/eCommerce/domain/order/relations/returnStatus/mapper/ReturnStatusMapper.java
<gh_stars>1-10 package com.skytala.eCommerce.domain.order.relations.returnStatus.mapper; import java.math.BigDecimal; import java.sql.Timestamp; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServletRequest; import org.apache.ofbiz.entity.GenericValue; import com.skytala.eCommerce.domain.order.relations.returnStatus.model.ReturnStatus; public class ReturnStatusMapper { public static Map<String, Object> map(ReturnStatus returnstatus) { Map<String, Object> returnVal = new HashMap<String, Object>(); if(returnstatus.getReturnStatusId() != null ){ returnVal.put("returnStatusId",returnstatus.getReturnStatusId()); } if(returnstatus.getStatusId() != null ){ returnVal.put("statusId",returnstatus.getStatusId()); } if(returnstatus.getReturnId() != null ){ returnVal.put("returnId",returnstatus.getReturnId()); } if(returnstatus.getReturnItemSeqId() != null ){ returnVal.put("returnItemSeqId",returnstatus.getReturnItemSeqId()); } if(returnstatus.getChangeByUserLoginId() != null ){ returnVal.put("changeByUserLoginId",returnstatus.getChangeByUserLoginId()); } if(returnstatus.getStatusDatetime() != null ){ returnVal.put("statusDatetime",returnstatus.getStatusDatetime()); } return returnVal; } public static ReturnStatus map(Map<String, Object> fields) { ReturnStatus returnVal = new ReturnStatus(); if(fields.get("returnStatusId") != null) { returnVal.setReturnStatusId((String) fields.get("returnStatusId")); } if(fields.get("statusId") != null) { returnVal.setStatusId((String) fields.get("statusId")); } if(fields.get("returnId") != null) { returnVal.setReturnId((String) fields.get("returnId")); } if(fields.get("returnItemSeqId") != null) { returnVal.setReturnItemSeqId((String) fields.get("returnItemSeqId")); } if(fields.get("changeByUserLoginId") != null) { returnVal.setChangeByUserLoginId((String) fields.get("changeByUserLoginId")); } if(fields.get("statusDatetime") != null) { returnVal.setStatusDatetime((Timestamp) fields.get("statusDatetime")); } return returnVal; } public static ReturnStatus mapstrstr(Map<String, String> fields) throws Exception { ReturnStatus returnVal = new ReturnStatus(); if(fields.get("returnStatusId") != null) { returnVal.setReturnStatusId((String) fields.get("returnStatusId")); } if(fields.get("statusId") != null) { returnVal.setStatusId((String) fields.get("statusId")); } if(fields.get("returnId") != null) { returnVal.setReturnId((String) fields.get("returnId")); } if(fields.get("returnItemSeqId") != null) { returnVal.setReturnItemSeqId((String) fields.get("returnItemSeqId")); } if(fields.get("changeByUserLoginId") != null) { returnVal.setChangeByUserLoginId((String) fields.get("changeByUserLoginId")); } if(fields.get("statusDatetime") != null) { String buf = fields.get("statusDatetime"); Timestamp ibuf = Timestamp.valueOf(buf); returnVal.setStatusDatetime(ibuf); } return returnVal; } public static ReturnStatus map(GenericValue val) { ReturnStatus returnVal = new ReturnStatus(); returnVal.setReturnStatusId(val.getString("returnStatusId")); returnVal.setStatusId(val.getString("statusId")); returnVal.setReturnId(val.getString("returnId")); returnVal.setReturnItemSeqId(val.getString("returnItemSeqId")); returnVal.setChangeByUserLoginId(val.getString("changeByUserLoginId")); returnVal.setStatusDatetime(val.getTimestamp("statusDatetime")); return returnVal; } public static ReturnStatus map(HttpServletRequest request) throws Exception { ReturnStatus returnVal = new ReturnStatus(); Map<String, String[]> paramMap = request.getParameterMap(); if(paramMap.containsKey("returnStatusId")) { returnVal.setReturnStatusId(request.getParameter("returnStatusId")); } if(paramMap.containsKey("statusId")) { returnVal.setStatusId(request.getParameter("statusId")); } if(paramMap.containsKey("returnId")) { returnVal.setReturnId(request.getParameter("returnId")); } if(paramMap.containsKey("returnItemSeqId")) { returnVal.setReturnItemSeqId(request.getParameter("returnItemSeqId")); } if(paramMap.containsKey("changeByUserLoginId")) { returnVal.setChangeByUserLoginId(request.getParameter("changeByUserLoginId")); } if(paramMap.containsKey("statusDatetime")) { String buf = request.getParameter("statusDatetime"); Timestamp ibuf = Timestamp.valueOf(buf); returnVal.setStatusDatetime(ibuf); } return returnVal; } }
bennn/PyonR
lib/sys.py
<gh_stars>100-1000 #lang racket (require "../runtime.rkt") (provide (rename-out [PATH :path] [stdin :stdin] [stdout :stdout] [stderr :stderr])) (define stdin (wrap-port (current-input-port))) (define stdout (wrap-port (current-output-port))) (define stderr (wrap-port (current-error-port)))
casey/fuchsia
src/connectivity/wlan/drivers/third_party/broadcom/brcmfmac/pno.cc
<reponame>casey/fuchsia /* * Copyright (c) 2016 Broadcom * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "pno.h" #include <threads.h> #include <zircon/status.h> #include "cfg80211.h" #include "core.h" #include "debug.h" #include "fwil.h" #include "fwil_types.h" #include "linuxisms.h" #define BRCMF_PNO_VERSION 2 #define BRCMF_PNO_REPEAT 4 #define BRCMF_PNO_FREQ_EXPO_MAX 3 #define BRCMF_PNO_IMMEDIATE_SCAN_BIT 3 #define BRCMF_PNO_ENABLE_BD_SCAN_BIT 5 #define BRCMF_PNO_ENABLE_ADAPTSCAN_BIT 6 #define BRCMF_PNO_REPORT_SEPARATELY_BIT 11 #define BRCMF_PNO_SCAN_INCOMPLETE 0 #define BRCMF_PNO_WPA_AUTH_ANY 0xFFFFFFFF #define BRCMF_PNO_HIDDEN_BIT 2 #define BRCMF_PNO_SCHED_SCAN_PERIOD 30 #define BRCMF_PNO_MAX_BUCKETS 16 #define GSCAN_BATCH_NO_THR_SET 101 #define GSCAN_RETRY_THRESHOLD 3 struct brcmf_pno_info { int n_reqs; struct cfg80211_sched_scan_request* reqs[BRCMF_PNO_MAX_BUCKETS]; mtx_t req_lock; }; zx_status_t brcmf_pno_attach(struct brcmf_cfg80211_info* cfg) { struct brcmf_pno_info* pi; BRCMF_DBG(TRACE, "enter\n"); pi = static_cast<decltype(pi)>(calloc(1, sizeof(*pi))); if (!pi) { return ZX_ERR_NO_MEMORY; } cfg->pno = pi; mtx_init(&pi->req_lock, mtx_plain); return ZX_OK; } void brcmf_pno_detach(struct brcmf_cfg80211_info* cfg) { struct brcmf_pno_info* pi; BRCMF_DBG(TRACE, "enter\n"); pi = cfg->pno; cfg->pno = NULL; WARN_ON(pi->n_reqs); mtx_destroy(&pi->req_lock); free(pi); } uint64_t brcmf_pno_find_reqid_by_bucket(struct brcmf_pno_info* pi, uint32_t bucket) { uint64_t reqid = 0; mtx_lock(&pi->req_lock); if ((int)bucket < pi->n_reqs) { reqid = pi->reqs[bucket]->reqid; } mtx_unlock(&pi->req_lock); return reqid; } uint32_t brcmf_pno_get_bucket_map(struct brcmf_pno_info* pi, struct brcmf_pno_net_info_le* ni) { struct cfg80211_sched_scan_request* req; struct cfg80211_match_set* ms; uint32_t bucket_map = 0; int i, j; mtx_lock(&pi->req_lock); for (i = 0; i < pi->n_reqs; i++) { req = pi->reqs[i]; if (!req->n_match_sets) { continue; } for (j = 0; j < req->n_match_sets; j++) { ms = &req->match_sets[j]; if (ms->ssid.ssid_len == ni->SSID_len && !memcmp(ms->ssid.ssid, ni->SSID, ni->SSID_len)) { bucket_map |= BIT(i); break; } if (is_valid_ether_addr(ms->bssid) && !memcmp(ms->bssid, ni->bssid, ETH_ALEN)) { bucket_map |= BIT(i); break; } } } mtx_unlock(&pi->req_lock); return bucket_map; }
PissedCapslock/methanol
methanol-testutils/src/main/java/com/github/mizosoft/methanol/testutils/Logging.java
/* * Copyright (c) 2021 <NAME> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.github.mizosoft.methanol.testutils; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Stream; /** Utility methods related to logging during tests. */ public class Logging { // Hold strong references to disabled loggers so their configuration won't get GCed @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") private static final Set<Logger> disabledLoggers = new CopyOnWriteArraySet<>(); private Logging() {} public static void disable(Class<?>... clazz) { disable(Stream.of(clazz).map(Class::getName).toArray(String[]::new)); } public static void disable(String... names) { for (var name : names) { var logger = java.util.logging.Logger.getLogger(name); logger.setLevel(Level.OFF); disabledLoggers.add(logger); } } }
Sgitario/jcloud-unit
jester-containers/src/main/java/io/jester/api/Container.java
<filename>jester-containers/src/main/java/io/jester/api/Container.java package io.jester.api; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target({ ElementType.FIELD, ElementType.TYPE }) @Retention(RetentionPolicy.RUNTIME) public @interface Container { String image(); int[] ports(); String expectedLog() default ""; String[] command() default {}; }
ACPK/atc
db/migrations/migrations.go
package migrations import "github.com/BurntSushi/migration" var Migrations = []migration.Migrator{ InitialSchema, MoveSourceAndMetadataToVersionedResources, AddTypeToVersionedResources, RemoveTransitionalCurrentVersions, NonNullableVersionInfo, AddOneOffNameSequence, AddHijackURLToBuilds, AddTimestampsToBuilds, CreateLocks, AddBuildEvents, ReplaceBuildsAbortHijackURLsWithGuidAndEndpoint, ReplaceBuildEventsIDWithEventID, AddLocks, DropOldLocks, AddConfig, AddNameToBuildInputs, AddEngineAndEngineMetadataToBuilds, AddVersionToBuildEvents, AddCompletedToBuilds, AddWorkers, AddEnabledToBuilds, CreateEventIDSequencesForInFlightBuilds, AddResourceTypesToWorkers, AddPlatformAndTagsToWorkers, AddIdToConfig, ConvertJobBuildConfigToJobPlans, AddCheckErrorToResources, AddPausedToResources, AddPausedToJobs, CreateJobsSerialGroups, CreatePipes, RenameConfigToPipelines, RenamePipelineIDToVersionAddPrimaryKey, AddNameToPipelines, AddPipelineIDToResources, AddPipelineIDToJobs, AddPausedToPipelines, AddOrderingToPipelines, AddInputsDeterminedToBuilds, AddExplicitToBuildOutputs, AddLastCheckedToResources, AddLastTrackedToBuilds, AddLastScheduledToPipelines, AddCheckingToResources, AddUniqueConstraintToResources, RemoveSourceFromVersionedResources, AddIndexesToABunchOfStuff, DropLocks, AddBaggageclaimURLToWorkers, AddContainers, AddNameToWorkers, AddLastScheduledToBuilds, AddCheckTypeAndCheckSourceToContainers, AddStepLocationToContainers, AddVolumesAndCacheInvalidator, AddCompositeUniqueConstraintToVolumes, AddWorkingDirectoryToContainers, MakeContainerWorkingDirectoryNotNull, AddEnvVariablesToContainers, }
GabrielSturtevant/mage
Mage.Sets/src/mage/cards/w/Willbender.java
package mage.cards.w; import java.util.UUID; import mage.MageInt; import mage.abilities.Ability; import mage.abilities.common.TurnedFaceUpSourceTriggeredAbility; import mage.abilities.costs.mana.ManaCostsImpl; import mage.abilities.effects.common.ChooseNewTargetsTargetEffect; import mage.abilities.keyword.MorphAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; import mage.filter.FilterStackObject; import mage.filter.predicate.mageobject.NumberOfTargetsPredicate; import mage.target.TargetStackObject; /** * * @author LevelX2 */ public final class Willbender extends CardImpl { private static final FilterStackObject FILTER = new FilterStackObject("spell or ability with a single target"); static { FILTER.add(new NumberOfTargetsPredicate(1)); } public Willbender(UUID ownerId, CardSetInfo setInfo) { super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{1}{U}"); this.subtype.add(SubType.HUMAN); this.subtype.add(SubType.WIZARD); this.power = new MageInt(1); this.toughness = new MageInt(2); // Morph {1}{U} this.addAbility(new MorphAbility(this, new ManaCostsImpl("{1}{U}"))); // When Willbender is turned face up, change the target of target spell or ability with a single target. Ability ability = new TurnedFaceUpSourceTriggeredAbility(new ChooseNewTargetsTargetEffect(true, true)); ability.addTarget(new TargetStackObject(FILTER)); this.addAbility(ability); } private Willbender(final Willbender card) { super(card); } @Override public Willbender copy() { return new Willbender(this); } }
joeivans/data-structures-and-algorithms
javascript/array-insert-shift/ArrayExtensions.js
const Utils = require('../lib/Utils'); class ArrayExtensions { constructor() { Array.prototype.nonNativeInsertShift = function (value) { const result = []; const midpoint = Utils.truncateToInteger(this.length / 2); const isEven = Utils.isEven(this.length); const determinePreOrPostInsertion = function (i) { if (isEven) { result.push(value); result.push(this[i]); } else { result.push(this[i]); result.push(value); } }; for (let i = 0; i < this.length; i++) { if (i === midpoint) { determinePreOrPostInsertion.call(this, i); } else { result.push(this[i]); } } return result; }; } } module.exports = ArrayExtensions;
LeoMurphyWM24/PlasmaPy
plasmapy/utils/decorators/checks.py
<filename>plasmapy/utils/decorators/checks.py """ Decorator for checking input/output arguments of functions. """ __all__ = [ "check_values", "check_units", "check_relativistic", "CheckBase", "CheckUnits", "CheckValues", ] import collections import functools import inspect import numpy as np import warnings from astropy import units as u from astropy.constants import c from functools import reduce from operator import add from typing import Any, Dict, List, Tuple, Union from plasmapy.utils.decorators.helpers import preserve_signature from plasmapy.utils.exceptions import ( PlasmaPyWarning, RelativityError, RelativityWarning, ) try: from astropy.units.equivalencies import Equivalency except ImportError: # TODO: remove once we have dependency Astropy >= 3.2.1 # astropy defined the Equivalency class in v3.2.1 class Equivalency: pass class CheckBase: """ Base class for 'Check' decorator classes. Parameters ---------- checks_on_return specified checks on the return of the wrapped function **checks specified checks on the input arguments of the wrapped function """ def __init__(self, checks_on_return=None, **checks): self._checks = checks if checks_on_return is not None: self._checks["checks_on_return"] = checks_on_return @property def checks(self): """ Requested checks on the decorated function's input arguments and/or return. """ return self._checks class CheckValues(CheckBase): """ A decorator class to 'check' -- limit/control -- the values of input and return arguments to a function or method. Parameters ---------- checks_on_return: Dict[str, bool] Specifications for value checks on the return of the function being wrapped. (see `check values`_ for valid specifications) **checks: Dict[str, Dict[str, bool]] Specifications for value checks on the input arguments of the function being wrapped. Each keyword argument in `checks` is the name of a function argument to be checked and the keyword value contains the value check specifications. .. _`check values`: The value check specifications are defined within a dictionary containing the keys defined below. If the dictionary is empty or omitting keys, then the default value will be assumed for the missing keys. ================ ======= ================================================ Key Type Description ================ ======= ================================================ can_be_negative `bool` [DEFAULT `True`] values can be negative can_be_complex `bool` [DEFAULT `False`] values can be complex numbers can_be_inf `bool` [DEFAULT `True`] values can be :data:`~numpy.inf` can_be_nan `bool` [DEFAULT `True`] values can be :data:`~numpy.nan` none_shall_pass `bool` [DEFAULT `False`] values can be a python `None` can_be_zero `bool` [DEFAULT `True`] values can be zero ================ ======= ================================================ Notes ----- * Checking of function arguments `*args` and `**kwargs` is not supported. Examples -------- .. code-block:: python from plasmapy.utils.decorators.checks import CheckValues @CheckValues(arg1={'can_be_negative': False, 'can_be_nan': False}, arg2={'can_be_inf': False}, checks_on_return={'none_shall_pass': True) def foo(arg1, arg2): return None # on a method class Foo: @CheckValues(arg1={'can_be_negative': False, 'can_be_nan': False}, arg2={'can_be_inf': False}, checks_on_return={'none_shall_pass': True) def bar(self, arg1, arg2): return None """ #: Default values for the possible 'check' keys. # To add a new check to the class, the following needs to be done: # 1. Add a key & default value to the `__check_defaults` dictionary # 2. Add a corresponding if-statement to method `_check_value` # __check_defaults = { "can_be_negative": True, "can_be_complex": False, "can_be_inf": True, "can_be_nan": True, "none_shall_pass": False, "can_be_zero": True, } def __init__( self, checks_on_return: Dict[str, bool] = None, **checks: Dict[str, bool] ): super().__init__(checks_on_return=checks_on_return, **checks) def __call__(self, f): """ Decorate a function. Parameters ---------- f Function to be wrapped Returns ------- function wrapped function of `f` """ self.f = f wrapped_sign = inspect.signature(f) @preserve_signature @functools.wraps(f) def wrapper(*args, **kwargs): # map args and kwargs to function parameters bound_args = wrapped_sign.bind(*args, **kwargs) bound_args.apply_defaults() # get checks checks = self._get_value_checks(bound_args) # check input arguments for arg_name in checks: # skip check of output/return if arg_name == "checks_on_return": continue # check argument self._check_value( bound_args.arguments[arg_name], arg_name, checks[arg_name] ) # call function _return = f(**bound_args.arguments) # check function return if "checks_on_return" in checks: self._check_value( _return, "checks_on_return", checks["checks_on_return"] ) return _return return wrapper def _get_value_checks( self, bound_args: inspect.BoundArguments ) -> Dict[str, Dict[str, bool]]: """ Review :attr:`checks` and function bound arguments to build a complete 'checks' dictionary. If a check key is omitted from the argument checks, then a default value is assumed (see `check values`_). Parameters ---------- bound_args: :class:`inspect.BoundArguments` arguments passed into the function being wrapped .. code-block:: python bound_args = inspect.signature(f).bind(*args, **kwargs) Returns ------- Dict[str, Dict[str, bool]] A complete 'checks' dictionary for checking function input arguments and return. """ # initialize validation dictionary out_checks = {} # Iterate through function bound arguments + return and build `out_checks: # # artificially add "return" to parameters things_to_check = bound_args.signature.parameters.copy() things_to_check["checks_on_return"] = inspect.Parameter( "checks_on_return", inspect.Parameter.POSITIONAL_ONLY, annotation=bound_args.signature.return_annotation, ) for param in things_to_check.values(): # variable arguments are NOT checked # e.g. in foo(x, y, *args, d=None, **kwargs) variable arguments # *args and **kwargs will NOT be checked # if param.kind in ( inspect.Parameter.VAR_KEYWORD, inspect.Parameter.VAR_POSITIONAL, ): continue # grab the checks dictionary for the desired parameter try: param_in_checks = self.checks[param.name] except KeyError: # checks for parameter not specified continue # build `out_checks` # read checks and/or apply defaults values out_checks[param.name] = {} for v_name, v_default in self.__check_defaults.items(): try: out_checks[param.name][v_name] = param_in_checks.get( v_name, v_default ) except AttributeError: # for the case that checks are defined for an argument, # but is NOT a dictionary # (e.g. CheckValues(x=u.cm) ... this scenario could happen # during subclassing) out_checks[param.name][v_name] = v_default # Does `self.checks` indicate arguments not used by f? if missing_params := list(set(self.checks) - set(out_checks)): params_str = ", ".join(missing_params) warnings.warn( PlasmaPyWarning( f"Expected to value check parameters {params_str} but they " f"are missing from the call to {self.f.__name__}" ) ) return out_checks def _check_value(self, arg, arg_name: str, arg_checks: Dict[str, bool]): """ Perform checks `arg_checks` on function argument `arg`. Parameters ---------- arg The argument to be checked arg_name: str The name of the argument to be checked arg_checks: Dict[str, bool] The requested checks for the argument Raises ------ ValueError raised if a check fails """ if arg_name == "checks_on_return": valueerror_msg = "The return value " else: valueerror_msg = f"The argument '{arg_name}' " valueerror_msg += f"to function {self.f.__name__}() can not contain" # check values # * 'none_shall_pass' always needs to be checked first ckeys = list(self.__check_defaults.keys()) ckeys.remove("none_shall_pass") ckeys = ("none_shall_pass",) + tuple(ckeys) for ckey in ckeys: if ckey == "can_be_complex": if not arg_checks[ckey] and np.any(np.iscomplexobj(arg)): raise ValueError(f"{valueerror_msg} complex numbers.") elif ckey == "can_be_inf": if not arg_checks[ckey] and np.any(np.isinf(arg)): raise ValueError(f"{valueerror_msg} infs.") elif ckey == "can_be_nan": if not arg_checks["can_be_nan"] and np.any(np.isnan(arg)): raise ValueError(f"{valueerror_msg} NaNs.") elif ckey == "can_be_negative": if not arg_checks[ckey] and np.any(arg < 0): raise ValueError(f"{valueerror_msg} negative numbers.") elif ckey == "can_be_zero": if not arg_checks[ckey] and np.any(arg == 0): raise ValueError(f"{valueerror_msg} zeros.") elif ckey == "none_shall_pass": if arg is None and arg_checks[ckey]: break elif arg is None: raise ValueError(f"{valueerror_msg} Nones.") class CheckUnits(CheckBase): """ A decorator class to 'check' -- limit/control -- the units of input and return arguments to a function or method. Parameters ---------- checks_on_return: list of astropy :mod:`~astropy.units` or dict of unit specifications Specifications for unit checks on the return of the function being wrapped. (see `check units`_ for valid specifications) **checks: list of astropy :mod:`~astropy.units` or dict of unit specifications Specifications for unit checks on the input arguments of the function being wrapped. Each keyword argument in `checks` is the name of a function argument to be checked and the keyword value contains the unit check specifications. .. _`check units`: Unit checks can be defined by passing one of the astropy :mod:`~astropy.units`, a list of astropy units, or a dictionary containing the keys defined below. Units can also be defined with function annotations, but must be consistent with decorator `**checks` arguments if used concurrently. If a key is omitted, then the default value will be assumed. ====================== ======= ================================================ Key Type Description ====================== ======= ================================================ units list of desired astropy :mod:`~astropy.units` equivalencies | [DEFAULT `None`] A list of equivalent pairs to try if | the units are not directly convertible. | (see :mod:`~astropy.units.equivalencies`, and/or `astropy equivalencies`_) pass_equivalent_units `bool` | [DEFAULT `False`] allow equivalent units | to pass ====================== ======= ================================================ Notes ----- * Checking of function arguments `*args` and `**kwargs` is not supported. * Decorator does NOT perform any unit conversions. * If it is desired that `None` values do not raise errors or warnings, then include `None` in the list of units or as a default value for the function argument. * If units are not specified in `checks`, then the decorator will attempt to identify desired units by examining the function annotations. Examples -------- Define units with decorator parameters:: import astropy.units as u from plasmapy.utils.decorators import CheckUnits @CheckUnits(arg1={'units': u.cm}, arg2=u.cm, checks_on_return=[u.cm, u.km]) def foo(arg1, arg2): return arg1 + arg2 # or on a method class Foo: @CheckUnits(arg1={'units': u.cm}, arg2=u.cm, checks_on_return=[u.cm, u.km]) def bar(self, arg1, arg2): return arg1 + arg2 Define units with function annotations:: import astropy.units as u from plasmapy.utils.decorators import CheckUnits @CheckUnits() def foo(arg1: u.cm, arg2: u.cm) -> u.cm: return arg1 + arg2 # or on a method class Foo: @CheckUnits() def bar(self, arg1: u.cm, arg2: u.cm) -> u.cm: return arg1 + arg2 Allow `None` values to pass, on input and output:: import astropy.units as u from plasmapy.utils.decorators import CheckUnits @CheckUnits(checks_on_return=[u.cm, None]) def foo(arg1: u.cm = None): return arg1 Allow return values to have equivalent units:: import astropy.units as u from plasmapy.utils.decorators import CheckUnits @CheckUnits(arg1={'units': u.cm}, checks_on_return={'units': u.km, 'pass_equivalent_units': True}) def foo(arg1): return arg1 Allow equivalent units to pass with specified equivalencies:: import astropy.units as u from plasmapy.utils.decorators import CheckUnits @CheckUnits(arg1={'units': u.K, 'equivalencies': u.temperature_energy(), 'pass_equivalent_units': True}) def foo(arg1): return arg1 .. _astropy equivalencies: https://docs.astropy.org/en/stable/units/equivalencies.html """ #: Default values for the possible 'check' keys. # To add a new check the the class, the following needs to be done: # 1. Add a key & default value to the `__check_defaults` dictionary # 2. Add a corresponding conditioning statement to `_get_unit_checks` # 3. Add a corresponding behavior to `_check_unit` # __check_defaults = { "units": None, "equivalencies": None, "pass_equivalent_units": False, "none_shall_pass": False, } def __init__( self, checks_on_return: Union[u.Unit, List[u.Unit], Dict[str, Any]] = None, **checks: Union[u.Unit, List[u.Unit], Dict[str, Any]], ): super().__init__(checks_on_return=checks_on_return, **checks) def __call__(self, f): """ Decorate a function. Parameters ---------- f Function to be wrapped Returns ------- function wrapped function of `f` """ self.f = f wrapped_sign = inspect.signature(f) @preserve_signature @functools.wraps(f) def wrapper(*args, **kwargs): # combine args and kwargs into dictionary bound_args = wrapped_sign.bind(*args, **kwargs) bound_args.apply_defaults() # get checks checks = self._get_unit_checks(bound_args) # check (input) argument units for arg_name in checks: # skip check of output/return if arg_name == "checks_on_return": continue # check argument self._check_unit( bound_args.arguments[arg_name], arg_name, checks[arg_name] ) # call function _return = f(**bound_args.arguments) # check output if "checks_on_return" in checks: self._check_unit( _return, "checks_on_return", checks["checks_on_return"] ) return _return return wrapper def _get_unit_checks( self, bound_args: inspect.BoundArguments ) -> Dict[str, Dict[str, Any]]: """ Review :attr:`checks` and function bound arguments to build a complete 'checks' dictionary. If a check key is omitted from the argument checks, then a default value is assumed (see `check units`_) Parameters ---------- bound_args: :class:`inspect.BoundArguments` arguments passed into the function being wrapped .. code-block:: python bound_args = inspect.signature(f).bind(*args, **kwargs) Returns ------- Dict[str, Dict[str, Any]] A complete 'checks' dictionary for checking function input arguments and return. """ # initialize validation dictionary out_checks = {} # Iterate through function bound arguments + return and build `out_checks`: # # artificially add "return" to parameters things_to_check = bound_args.signature.parameters.copy() things_to_check["checks_on_return"] = inspect.Parameter( "checks_on_return", inspect.Parameter.POSITIONAL_ONLY, annotation=bound_args.signature.return_annotation, ) for param in things_to_check.values(): # variable arguments are NOT checked # e.g. in foo(x, y, *args, d=None, **kwargs) variable arguments # *args and **kwargs will NOT be checked # if param.kind in ( inspect.Parameter.VAR_KEYWORD, inspect.Parameter.VAR_POSITIONAL, ): continue # grab the checks dictionary for the desired parameter try: param_checks = self.checks[param.name] except KeyError: param_checks = None # -- Determine target units `_units` -- # target units can be defined in one of three ways (in # preferential order): # 1. direct keyword pass-through # i.e. CheckUnits(x=u.cm) # CheckUnits(x=[u.cm, u.s]) # 2. keyword pass-through via dictionary definition # i.e. CheckUnits(x={'units': u.cm}) # CheckUnits(x={'units': [u.cm, u.s]}) # 3. function annotations # # * if option (3) is used simultaneously with option (1) or (2), then # checks defined by (3) must be consistent with checks from (1) or (2) # to avoid raising an error. # * if None is included in the units list, then None values are allowed # _none_shall_pass = False _units = None _units_are_from_anno = False if param_checks is not None: # checks for argument were defined with decorator try: _units = param_checks["units"] except TypeError: # if checks is NOT None and is NOT a dictionary, then assume # only units were specified # e.g. CheckUnits(x=u.cm) # _units = param_checks except KeyError: # if checks does NOT have 'units' but is still a dictionary, # then other check conditions may have been specified and the # user is relying on function annotations to define desired # units _units = None # If no units have been specified by decorator checks, then look for # function annotations. # # Reconcile units specified by decorator checks and function annotations _units_anno = None if param.annotation is not inspect.Parameter.empty: # unit annotations defined _units_anno = param.annotation if _units is None and _units_anno is None and param_checks is None: # no checks specified and no unit annotations defined continue elif _units is None and _units_anno is None: # checks specified, but NO unit checks msg = "No astropy.units specified for " if param.name == "checks_on_return": msg += "return value " else: msg += f"argument {param.name} " msg += f"of function {self.f.__name__}()." raise ValueError(msg) elif _units is None: _units = _units_anno _units_are_from_anno = True _units_anno = None # Ensure `_units` is an iterable if not isinstance(_units, collections.abc.Iterable): _units = [_units] if not isinstance(_units_anno, collections.abc.Iterable): _units_anno = [_units_anno] # Is None allowed? if None in _units or param.default is None: _none_shall_pass = True # Remove Nones if None in _units: _units = [t for t in _units if t is not None] if None in _units_anno: _units_anno = [t for t in _units_anno if t is not None] # ensure all _units are astropy.units.Unit or physical types & # define 'units' for unit checks & # define 'none_shall_pass' check _units = self._condition_target_units( _units, from_annotations=_units_are_from_anno ) _units_anno = self._condition_target_units( _units_anno, from_annotations=True ) if not all(_u in _units for _u in _units_anno): raise ValueError( f"For argument '{param.name}', " f"annotation units ({_units_anno}) are not included in the units " f"specified by decorator arguments ({_units}). Use either " f"decorator arguments or function annotations to defined unit " f"types, or make sure annotation specifications match decorator " f"argument specifications." ) if len(_units) == 0 and len(_units_anno) == 0 and param_checks is None: # annotations did not specify units continue elif len(_units) == 0 and len(_units_anno) == 0: # checks specified, but NO unit checks msg = "No astropy.units specified for " if param.name == "checks_on_return": msg += "return value " else: msg += f"argument {param.name} " msg += f"of function {self.f.__name__}()." raise ValueError(msg) out_checks[param.name] = { "units": _units, "none_shall_pass": _none_shall_pass, } # -- Determine target equivalencies -- # Unit equivalences can be defined by: # 1. keyword pass-through via dictionary definition # e.g. CheckUnits(x={'units': u.C, # 'equivalencies': u.temperature}) # # initialize equivalencies try: _equivs = param_checks["equivalencies"] except (KeyError, TypeError): _equivs = self.__check_defaults["equivalencies"] # ensure equivalences are properly formatted if _equivs is None or _equivs == [None]: _equivs = None elif isinstance(_equivs, Equivalency): pass elif isinstance(_equivs, (list, tuple)): # flatten list to non-list elements if isinstance(_equivs, tuple): _equivs = [_equivs] else: _equivs = self._flatten_equivalencies_list(_equivs) # ensure passed equivalencies list is structured properly # [(), ...] # or [Equivalency(), ...] # # * All equivalencies must be a list of 2, 3, or 4 element tuples # structured like... # (from_unit, to_unit, forward_func, backward_func) # if all(isinstance(el, Equivalency) for el in _equivs): _equivs = reduce(add, _equivs) else: _equivs = self._normalize_equivalencies(_equivs) out_checks[param.name]["equivalencies"] = _equivs # -- Determine if equivalent units pass -- try: peu = param_checks.get( "pass_equivalent_units", self.__check_defaults["pass_equivalent_units"], ) except (AttributeError, TypeError): peu = self.__check_defaults["pass_equivalent_units"] out_checks[param.name]["pass_equivalent_units"] = peu # Does `self.checks` indicate arguments not used by f? missing_params = [ param for param in set(self.checks.keys()) - set(out_checks.keys()) ] if len(missing_params) > 0: params_str = ", ".join(missing_params) warnings.warn( PlasmaPyWarning( f"Expected to unit check parameters {params_str} but they " f"are missing from the call to {self.f.__name__}" ) ) return out_checks def _check_unit(self, arg, arg_name: str, arg_checks: Dict[str, Any]): """ Perform unit checks `arg_checks` on function argument `arg`. Parameters ---------- arg The argument to be checked arg_name: str The name of the argument to be checked arg_checks: Dict[str, Any] The requested checks for the argument Raises ------ ValueError If `arg` is `None` when `arg_checks['none_shall_pass']=False` TypeError If `arg` does not have `units` :class:`astropy.units.UnitTypeError` If the units of `arg` do not satisfy conditions of `arg_checks` """ arg, unit, equiv, err = self._check_unit_core(arg, arg_name, arg_checks) if err is not None: raise err def _check_unit_core( self, arg, arg_name: str, arg_checks: Dict[str, Any] ) -> Tuple[ Union[None, u.Quantity], Union[None, u.Unit], Union[None, List[Any]], Union[None, Exception], ]: """ Determines if `arg` passes unit checks `arg_checks` and if the units of `arg` is equivalent to any units specified in `arg_checks`. Parameters ---------- arg The argument to be checked arg_name: str The name of the argument to be checked arg_checks: Dict[str, Any] The requested checks for the argument Returns ------- (`arg`, `unit`, `equivalencies`, `error`) * `arg` is the original input argument `arg` or `None` if unit checks fail * `unit` is the identified astropy :mod:`~astropy.units` that `arg` can be converted to or `None` if none exist * `equivalencies` is the astropy :mod:`~astropy.units.equivalencies` used for the unit conversion or `None` * `error` is the `Exception` associated with the failed unit checks or `None` for successful unit checks """ # initialize str for error messages if arg_name == "checks_on_return": err_msg = "The return value " else: err_msg = f"The argument '{arg_name}' " err_msg += f"to function {self.f.__name__}()" # initialize ValueError message valueerror_msg = f"{err_msg} can not contain" # initialize TypeError message typeerror_msg = f"{err_msg} should be an astropy Quantity with " if len(arg_checks["units"]) == 1: typeerror_msg += f"the following unit: {arg_checks['units'][0]}" else: typeerror_msg += "one of the following units: " for unit in arg_checks["units"]: typeerror_msg += str(unit) if unit != arg_checks["units"][-1]: typeerror_msg += ", " if arg_checks["none_shall_pass"]: typeerror_msg += "or None " # pass Nones if allowed if arg is None: if arg_checks["none_shall_pass"]: return arg, None, None, None else: return None, None, None, ValueError(f"{valueerror_msg} Nones") # check units in_acceptable_units = [] equiv = arg_checks["equivalencies"] for unit in arg_checks["units"]: try: in_acceptable_units.append( arg.unit.is_equivalent(unit, equivalencies=equiv) ) except AttributeError: if hasattr(arg, "unit"): err_specifier = ( "a 'unit' attribute without an 'is_equivalent' method" ) else: err_specifier = "no 'unit' attribute" msg = ( f"{err_msg} has {err_specifier}. " f"Use an astropy Quantity instead." ) return None, None, None, TypeError(msg) # How many acceptable units? nacceptable = np.count_nonzero(in_acceptable_units) unit = None equiv = None err = None if nacceptable == 0: # NO equivalent units arg = None err = u.UnitTypeError(typeerror_msg) else: # is there an exact match? units_arr = np.array(arg_checks["units"]) units_equal_mask = np.equal(units_arr, arg.unit) units_mask = np.logical_and(units_equal_mask, in_acceptable_units) if np.count_nonzero(units_mask) == 1: # matched exactly to a desired unit unit = units_arr[units_mask][0] equiv = arg_checks["equivalencies"] elif nacceptable == 1: # there is a match to 1 equivalent unit unit = units_arr[in_acceptable_units][0] equiv = arg_checks["equivalencies"] if not arg_checks["pass_equivalent_units"]: err = u.UnitTypeError(typeerror_msg) elif arg_checks["pass_equivalent_units"]: # there is a match to more than one equivalent units pass else: # there is a match to more than 1 equivalent units arg = None err = u.UnitTypeError(typeerror_msg) return arg, unit, equiv, err @staticmethod def _condition_target_units(targets: List, from_annotations: bool = False): """ From a list of target units (either as a string or astropy :class:`~astropy.units.Unit` objects), return a list of conditioned :class:`~astropy.units.Unit` objects. Parameters ---------- targets: list of target units list of units (either as a string or :class:`~astropy.units.Unit`) to be conditioned into astropy :class:`~astropy.units.Unit` objects from_annotations: bool (Default `False`) Indicates if `targets` originated from function/method annotations versus decorator input arguments. Returns ------- list: list of `targets` converted into astropy :class:`~astropy.units.Unit` objects Raises ------ TypeError If `target` is not a valid type for :class:`~astropy.units.Unit` when `from_annotations == True`, ValueError If a `target` is a valid unit type but not a valid value for :class:`~astropy.units.Unit`. """ # Note: this method does not allow for astropy physical types. This is # done because we expect all use cases of CheckUnits to define the # exact units desired. # allowed_units = [] for target in targets: try: target_unit = u.Unit(target) allowed_units.append(target_unit) except TypeError as err: # not a unit type if not from_annotations: raise err continue return allowed_units @staticmethod def _normalize_equivalencies(equivalencies): """ Normalizes equivalencies to ensure each is in a 4-tuple form:: (from_unit, to_unit, forward_func, backward_func) `forward_func` maps `from_unit` into `to_unit` and `backward_func` does the reverse. Parameters ---------- equivalencies: list of equivalent pairs list of astropy :mod:`~astropy.units.equivalencies` to be normalized Raises ------ ValueError if an equivalency can not be interpreted Notes ----- * the code here was copied and modified from :func:`astropy.units.core._normalize_equivalencies` from AstroPy version 3.2.3 * this will work on both the old style list equivalencies (pre AstroPy v3.2.1) and the modern equivalencies defined with the :class:`~astropy.units.equivalencies.Equivalency` class """ if equivalencies is None: return [] normalized = [] def return_argument(x): return x for i, equiv in enumerate(equivalencies): if len(equiv) == 2: from_unit, to_unit = equiv a = b = return_argument elif len(equiv) == 3: from_unit, to_unit, a = equiv b = a elif len(equiv) == 4: from_unit, to_unit, a, b = equiv else: raise ValueError(f"Invalid equivalence entry {i}: {equiv!r}") if not ( from_unit is u.Unit(from_unit) and (to_unit is None or to_unit is u.Unit(to_unit)) and callable(a) and callable(b) ): raise ValueError(f"Invalid equivalence entry {i}: {equiv!r}") normalized.append((from_unit, to_unit, a, b)) return normalized def _flatten_equivalencies_list(self, elist): """ Given a list of equivalencies, flatten out any sub-element lists Parameters ---------- elist: list list of astropy :mod:`~astropy.units.equivalencies` to be flattened Returns ------- list a flattened list of astropy :mod:`~astropy.units.equivalencies` """ new_list = [] for el in elist: if not isinstance(el, list): new_list.append(el) else: new_list.extend(self._flatten_equivalencies_list(el)) return new_list def check_units( func=None, checks_on_return: Dict[str, Any] = None, **checks: Dict[str, Any] ): """ A decorator to 'check' -- limit/control -- the units of input and return arguments to a function or method. Parameters ---------- func: The function to be decorated checks_on_return: list of astropy :mod:`~astropy.units` or dict of unit specifications Specifications for unit checks on the return of the function being wrapped. (see `check units`_ for valid specifications) **checks: list of astropy :mod:`~astropy.units` or dict of unit specifications Specifications for unit checks on the input arguments of the function being wrapped. Each keyword argument in `checks` is the name of a function argument to be checked and the keyword value contains the unit check specifications. .. _`check units`: Unit checks can be defined by passing one of the astropy :mod:`~astropy.units`, a list of astropy units, or a dictionary containing the keys defined below. Units can also be defined with function annotations, but must be consistent with decorator `**checks` arguments if used concurrently. If a key is omitted, then the default value will be assumed. ====================== ======= ================================================ Key Type Description ====================== ======= ================================================ units list of desired astropy :mod:`~astropy.units` equivalencies | [DEFAULT `None`] A list of equivalent pairs to try if | the units are not directly convertible. | (see :mod:`~astropy.units.equivalencies`, and/or `astropy equivalencies`_) pass_equivalent_units `bool` | [DEFAULT `False`] allow equivalent units | to pass ====================== ======= ================================================ Notes ----- * Checking of function arguments `*args` and `**kwargs` is not supported. * Decorator does NOT perform any unit conversions, look to :func:`~plasmapy.utils.decorators.validate_quantities` if that functionality is desired. * If it is desired that `None` values do not raise errors or warnings, then include `None` in the list of units or as a default value for the function argument. * If units are not specified in `checks`, then the decorator will attempt to identify desired units by examining the function annotations. * Full functionality is defined by the class :class:`CheckUnits`. Examples -------- Define units with decorator parameters:: import astropy.units as u from plasmapy.utils.decorators import check_units @check_units(arg1={'units': u.cm}, arg2=u.cm, checks_on_return=[u.cm, u.km]) def foo(arg1, arg2): return arg1 + arg2 # or on a method class Foo: @check_units(arg1={'units': u.cm}, arg2=u.cm, checks_on_return=[u.cm, u.km]) def bar(self, arg1, arg2): return arg1 + arg2 Define units with function annotations:: import astropy.units as u from plasmapy.utils.decorators import check_units @check_units def foo(arg1: u.cm, arg2: u.cm) -> u.cm: return arg1 + arg2 # or on a method class Foo: @check_units def bar(self, arg1: u.cm, arg2: u.cm) -> u.cm: return arg1 + arg2 Allow `None` values to pass:: import astropy.units as u from plasmapy.utils.decorators import check_units @check_units(checks_on_return=[u.cm, None]) def foo(arg1: u.cm = None): return arg1 Allow return values to have equivalent units:: import astropy.units as u from plasmapy.utils.decorators import check_units @check_units(arg1={'units': u.cm}, checks_on_return={'units': u.km, 'pass_equivalent_units': True}) def foo(arg1): return arg1 Allow equivalent units to pass with specified equivalencies:: import astropy.units as u from plasmapy.utils.decorators import check_units @check_units(arg1={'units': u.K, 'equivalencies': u.temperature(), 'pass_equivalent_units': True}) def foo(arg1): return arg1 .. _astropy equivalencies: https://docs.astropy.org/en/stable/units/equivalencies.html """ if checks_on_return is not None: checks["checks_on_return"] = checks_on_return if func is not None: # `check_units` called as a function return CheckUnits(**checks)(func) else: # `check_units` called as a decorator "sugar-syntax" return CheckUnits(**checks) def check_values( func=None, checks_on_return: Dict[str, bool] = None, **checks: Dict[str, bool] ): """ A decorator to 'check' -- limit/control -- the values of input and return arguments to a function or method. Parameters ---------- func: The function to be decorated checks_on_return: Dict[str, bool] Specifications for value checks on the return of the function being wrapped. (see `check values`_ for valid specifications) **checks: Dict[str, Dict[str, bool]] Specifications for value checks on the input arguments of the function being wrapped. Each keyword argument in `checks` is the name of a function argument to be checked and the keyword value contains the value check specifications. .. _`check values`: The value check specifications are defined within a dictionary containing the keys defined below. If the dictionary is empty or omitting keys, then the default value will be assumed for the missing keys. ================ ======= ================================================ Key Type Description ================ ======= ================================================ can_be_negative `bool` [DEFAULT `True`] values can be negative can_be_complex `bool` [DEFAULT `False`] values can be complex numbers can_be_inf `bool` [DEFAULT `True`] values can be :data:`~numpy.inf` can_be_nan `bool` [DEFAULT `True`] values can be :data:`~numpy.nan` none_shall_pass `bool` [DEFAULT `False`] values can be a python `None` can_be_zero `bool` [DEFAULT `True`] values can be zero ================ ======= ================================================ Notes ----- * Checking of function arguments `*args` and `**kwargs` is not supported. * Full functionality is defined by the class :class:`CheckValues`. Examples -------- .. code-block:: python from plasmapy.utils.decorators import check_values @check_values(arg1={'can_be_negative': False, 'can_be_nan': False}, arg2={'can_be_inf': False}, checks_on_return={'none_shall_pass': True) def foo(arg1, arg2): return None # on a method class Foo: @check_values(arg1={'can_be_negative': False, 'can_be_nan': False}, arg2={'can_be_inf': False}, checks_on_return={'none_shall_pass': True) def bar(self, arg1, arg2): return None """ if checks_on_return is not None: checks["checks_on_return"] = checks_on_return if func is not None: # `check_values` called as a function return CheckValues(**checks)(func) else: # `check_values` called as a decorator "sugar-syntax" return CheckValues(**checks) def check_relativistic(func=None, betafrac=0.05): r""" Warns or raises an exception when the output of the decorated function is greater than `betafrac` times the speed of light. Parameters ---------- func : `function`, optional The function to decorate. betafrac : float, optional The minimum fraction of the speed of light that will raise a `~plasmapy.utils.RelativityWarning`. Defaults to 5%. Returns ------- function Decorated function. Raises ------ TypeError If `V` is not a `~astropy.units.Quantity`. ~astropy.units.UnitConversionError If `V` is not in units of velocity. ValueError If `V` contains any `~numpy.nan` values. ~plasmapy.utils.exceptions.RelativityError If `V` is greater than or equal to the speed of light. Warns ----- : `~plasmapy.utils.exceptions.RelativityWarning` If `V` is greater than or equal to `betafrac` times the speed of light, but less than the speed of light. Examples -------- >>> from astropy import units as u >>> @check_relativistic ... def speed(): ... return 1 * u.m / u.s Passing in a custom `betafrac`: >>> @check_relativistic(betafrac=0.01) ... def speed(): ... return 1 * u.m / u.s """ def decorator(f): @preserve_signature @functools.wraps(f) def wrapper(*args, **kwargs): return_ = f(*args, **kwargs) _check_relativistic(return_, f.__name__, betafrac=betafrac) return return_ return wrapper if func: return decorator(func) return decorator def _check_relativistic(V, funcname, betafrac=0.05): r""" Warn or raise error for relativistic or superrelativistic velocities. Parameters ---------- V : ~astropy.units.Quantity A velocity. funcname : str The name of the original function to be printed in the error messages. betafrac : float, optional The minimum fraction of the speed of light that will generate a warning. Defaults to 5%. Raises ------ TypeError If `V` is not a `~astropy.units.Quantity`. ~astropy.units.UnitConversionError If `V` is not in units of velocity. ValueError If `V` contains any `~numpy.nan` values. RelativityError If `V` is greater than or equal to the speed of light. Warns ----- ~plasmapy.utils.RelativityWarning If `V` is greater than or equal to the specified fraction of the speed of light. Examples -------- >>> from astropy import units as u >>> _check_relativistic(1*u.m/u.s, 'function_calling_this') """ # TODO: Replace `funcname` with func.__name__? errmsg = "V must be a Quantity with units of velocity in _check_relativistic" if not isinstance(V, u.Quantity): raise TypeError(errmsg) try: V_over_c = (V / c).to_value(u.dimensionless_unscaled) except Exception: raise u.UnitConversionError(errmsg) beta = np.max(np.abs(V_over_c)) if beta == np.inf: raise RelativityError(f"{funcname} is yielding an infinite velocity.") elif beta >= 1: raise RelativityError( f"{funcname} is yielding a velocity that is {str(round(beta, 3))} " f"times the speed of light." ) elif beta >= betafrac: warnings.warn( f"{funcname} is yielding a velocity that is " f"{str(round(beta * 100, 3))}% of the speed of " f"light. Relativistic effects may be important.", RelativityWarning, )
kagemeka/atcoder-submissions
jp.atcoder/abc113/abc113_c/11952584.cpp
#include <bits/stdc++.h> using namespace std; string fill(string s) { while (s.size() < 6) s = "0" + s; return s; } string create_id(int p, int o) { return fill(to_string(p)) + fill(to_string(o)); } int main() { ios::sync_with_stdio(false); cin.tie(0); int n, m; cin >> n >> m; vector<int> p(m); vector<int> y(m); for (int i = 0; i < m; i++) cin >> p[i] >> y[i]; vector<vector<vector<int>>> db(n + 1); for (int i = 0; i < m; i++) { db[p[i]].push_back({y[i], i}); } vector<string> res(m); for (int i = 1; i < n + 1; i++) { sort(db[i].begin(), db[i].end()); for (int j = 0; j < db[i].size(); j++) { res[db[i][j][1]] = create_id(i, j + 1); } } for (string &id : res) { cout << id << '\n'; } return 0; }
EliahKagan/old-practice-snapshot
main/palindrome-linked-list/palindrome-linked-list.java
/** * Definition for singly-linked list. * public class ListNode { * int val; * ListNode next; * ListNode(int x) { val = x; } * } */ class Solution { public boolean isPalindrome(ListNode head) { if (head == null) return true; final ListNode beforeSecond = findMiddle(head); ListNode second = beforeSecond.next; beforeSecond.next = null; second = reverse(second); final boolean ret = match(head, second); beforeSecond.next = reverse(second); return ret; } private static ListNode findMiddle(ListNode head) { for (ListNode fast = head.next; fast != null && fast.next != null; fast = fast.next.next) head = head.next; return head; } private static ListNode reverse(ListNode head) { // returns new head ListNode acc = null; while (head != null) { final ListNode next = head.next; head.next = acc; acc = head; head = next; } return acc; } private static boolean match(ListNode first, ListNode second) { while (first != null && second != null) { if (first.val != second.val) return false; first = first.next; second = second.next; } return true; } }
MRezaNasirloo/Slick
slick-compiler/src/main/java/com/mrezanasirloo/slick/middleware/components/MiddlewareGeneratorBaseImpl.java
<filename>slick-compiler/src/main/java/com/mrezanasirloo/slick/middleware/components/MiddlewareGeneratorBaseImpl.java /* * Copyright 2018. <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mrezanasirloo.slick.middleware.components; import com.mrezanasirloo.slick.middleware.AnnotatedMethod; import com.mrezanasirloo.slick.middleware.ContainerClass; import com.mrezanasirloo.slick.middleware.MiddlewareProcessor; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.FieldSpec; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; import com.squareup.javapoet.TypeVariableName; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeParameterElement; import static com.mrezanasirloo.slick.middleware.MiddlewareProcessor.CLASS_NAME_REQUEST_SIMPLE; /** * @author : <EMAIL> * Created on: 2017-03-22 */ public class MiddlewareGeneratorBaseImpl implements MiddlewareGenerator { private final ConstructorGenerator constructorGenerator; private final FieldGenerator fieldGenerator; public MiddlewareGeneratorBaseImpl(ConstructorGenerator constructorGenerator, FieldGenerator fieldGenerator) { this.constructorGenerator = constructorGenerator; this.fieldGenerator = fieldGenerator; } @Override public TypeSpec generate(ContainerClass container, List<AnnotatedMethod> annotatedMethods) { List<MethodSpec> methodSpecList = new ArrayList<>(annotatedMethods.size()); Set<ClassName> middleware = new LinkedHashSet<>(); for (AnnotatedMethod annotatedMethod : annotatedMethods) { Collections.addAll(middleware, annotatedMethod.getMiddlewareClassNames()); } final List<FieldSpec> fieldSpecs = fieldGenerator.generate(middleware); final MethodSpec constructor = constructorGenerator.generate(container, middleware); methodSpecList.add(constructor); List<TypeVariableName> typeVariableNames = new ArrayList<>(container.getTypeParameters().size()); for (TypeParameterElement element : container.getTypeParameters()) { final TypeName typeName = TypeVariableName.get(element.asType()); typeVariableNames.add((TypeVariableName) typeName); } //add methods for (AnnotatedMethod am : annotatedMethods) { final ParameterizedTypeName parametrizedType = am.getMethodType().requestTypeGenerator.generate(am); final MethodSpec.Builder methodBuilder = MethodSpec.overriding(am.getSuperMethod()) .beginControlFlow("final $T request = new $T()", parametrizedType, parametrizedType) .addCode("@Override ") .beginControlFlow("public $T target($T data)", am.getReturnType().box(), am.getParamType()) .addStatement("$L$T.super.$L($L)", handleReturn(am), container.getSubclass(), am.getMethodName(), getParams(am)); returnNullIfNeeded(methodBuilder, am) .endControlFlow() .endControlFlow(""); am.getMethodType().rxSourceGenerator.generate(am, methodBuilder) .addStatement("request.with($L).through($L).destination($L)", am.getParamName(), am.getMiddlewareVarNamesAsString(), CLASS_NAME_REQUEST_SIMPLE.equals(am.getMethodType().requestType) ? am.getCallbackName() : "source") .addStatement("$T.getInstance().push(request).processLastRequest()", MiddlewareProcessor.CLASS_NAME_REQUEST_STACK); returnNullIfNeededAtTheEnd(methodBuilder, am); methodSpecList.add(methodBuilder.build()); } return TypeSpec.classBuilder(container.getSubclass()) .addModifiers(Modifier.PUBLIC) .superclass(container.getSuperClass()) .addTypeVariables(typeVariableNames) .addMethods(methodSpecList) .addFields(fieldSpecs) .build(); } private String getParams(AnnotatedMethod am) { String paramString = ""; final int size = am.getArgs().size(); if (size == 1) { paramString += am.isCallback(0) ? "null" : "data"; } else if (size == 2) { paramString += am.isCallback(0) ? "null, " : "data" + ", "; paramString += am.isCallback(1) ? "null" : "data"; } return paramString; } private MethodSpec.Builder returnNullIfNeeded(MethodSpec.Builder builder, AnnotatedMethod am) { if (TypeName.get(void.class).equals(am.getReturnType())) { return builder.addStatement("return null"); } return builder; } private MethodSpec.Builder returnNullIfNeededAtTheEnd(MethodSpec.Builder builder, AnnotatedMethod am) { if (!TypeName.get(void.class).equals(am.getReturnType())) { if (CLASS_NAME_REQUEST_SIMPLE.equals(am.getMethodType().requestType)) { builder.addStatement("return null"); } else { builder.addStatement("return source"); } } return builder; } private String handleReturn(AnnotatedMethod am) { if (TypeName.get(void.class).equals(am.getReturnType())) { return ""; } return "return "; } }
premium-runner-benchmark/isle-editor
app/components/spectacle/utils/context.js
<reponame>premium-runner-benchmark/isle-editor /* eslint-disable react/require-default-props */ /* * The MIT License (MIT) * * Copyright (c) 2013-2018 Formidable Labs, Inc. * * Copyright (c) 2016-2018 <NAME>, <NAME>, and potentially other * DefinitelyTyped contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ // MODULES // import React, { Component } from 'react'; import PropTypes from 'prop-types'; // VARIABLES // export const SpectacleContext = React.createContext( 'spectacle-context' ); // MAIN // class Context extends Component { render() { const { history, onStateChange, styles, route, fragments, notes, updateNotes, contentHeight, contentWidth, addFragment, updateFragment } = this.props; const value = { history, onStateChange, route, styles, fragments, notes, updateNotes, contentHeight, contentWidth, addFragment, updateFragment }; return ( <SpectacleContext.Provider value={value} > {this.props.children} </SpectacleContext.Provider> ); } } // PROPERTIES // Context.propTypes = { children: PropTypes.node, fragments: PropTypes.object, history: PropTypes.object, onStateChange: PropTypes.func, contentHeight: PropTypes.number, contentWidth: PropTypes.number, notes: PropTypes.object, route: PropTypes.object, styles: PropTypes.object }; // EXPORTS // export default Context;
srcarter3/awips2
cave/com.raytheon.uf.viz.kml.export/src/com/raytheon/uf/viz/kml/export/graphics/ext/KmlRasterImage.java
/** * This software was developed and / or modified by Raytheon Company, * pursuant to Contract DG133W-05-CQ-1067 with the US Government. * * U.S. EXPORT CONTROLLED TECHNICAL DATA * This software product contains export-restricted data whose * export/transfer/disclosure is restricted by U.S. law. Dissemination * to non-U.S. persons whether in the United States or abroad requires * an export license or other authorization. * * Contractor Name: Raytheon Company * Contractor Address: 6825 Pine Street, Suite 340 * Mail Stop B8 * Omaha, NE 68106 * 402.291.0100 * * See the AWIPS II Master Rights File ("Master Rights File.pdf") for * further licensing information. **/ package com.raytheon.uf.viz.kml.export.graphics.ext; import java.awt.Image; import java.awt.image.BufferedImage; import java.awt.image.RenderedImage; import org.eclipse.swt.graphics.RGB; import com.raytheon.uf.viz.core.data.IRenderedImageCallback; import com.raytheon.uf.viz.core.drawables.ext.IImagingExtension; import com.raytheon.uf.viz.core.exception.VizException; /** * Basically a wrapper around a RenderedImageCallback but also has logic for * filling in transparent parts of plots. * * <pre> * * SOFTWARE HISTORY * * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Jun 11, 2012 bsteffen Initial creation * * </pre> * * @author bsteffen * @version 1.0 */ public class KmlRasterImage extends KmlImage { protected final IRenderedImageCallback callback; /** * @param imageCallback */ public KmlRasterImage(IRenderedImageCallback imageCallback) { this.callback = imageCallback; } @Override public Class<? extends IImagingExtension> getExtensionClass() { return KmlRasterImageExtension.class; } /** * get the image for a ground overlay * * @return * @throws VizException */ public RenderedImage getImage() throws VizException { return callback.getImage(); } /** * get the image for a plot * * @param target * @return * @throws VizException */ public RenderedImage getImage(RGB backcolor) throws VizException { RenderedImage ri = getImage(); BufferedImage bi = cloneRenderedImage(ri); applyFill(bi, backcolor); return bi; } protected BufferedImage cloneRenderedImage(RenderedImage ri) throws VizException { BufferedImage bi = new BufferedImage(ri.getWidth(), ri.getHeight(), BufferedImage.TYPE_INT_ARGB); if (ri instanceof Image) { bi.getGraphics().drawImage((Image) ri, 0, 0, null); } else { throw new VizException("Cannot handle image of type " + ri.getClass().getSimpleName()); } return bi; } protected void applyFill(BufferedImage bi, RGB backcolor) { int backint = (0xFF << 24) | (backcolor.red << 16) | (backcolor.green << 8) | backcolor.blue; for (int i = 0; i < bi.getWidth(); i++) { for (int j = 0; j < bi.getHeight(); j++) { int argb = bi.getRGB(i, j); int alphaint = ((argb >> 24) & 0xFF); if (alphaint == 0) { argb = backint; } else if (alphaint != 255) { // blend the colors. double alpha = alphaint / 255.0; int red = (argb >> 16) & 0xFF; int green = (argb >> 8) & 0xFF; int blue = (argb >> 0) & 0xFF; red = (int) (red * alpha + backcolor.red * (1.0 - alpha)); green = (int) (green * alpha + backcolor.green * (1.0 - alpha)); blue = (int) (blue * alpha + backcolor.blue * (1.0 - alpha)); argb = (0xFF << 24) | (red << 16) | (green << 8) | blue; } bi.setRGB(i, j, argb); } } } }
nonomal/ultimateshell
ultimate-shell/target/com/g3g4x5x6/sftp/actions/UploadAction.java
<filename>ultimate-shell/target/com/g3g4x5x6/sftp/actions/UploadAction.java package com.g3g4x5x6.sftp.actions; import com.g3g4x5x6.sftp.SftpBrowser; public class UploadAction extends DefaultAction { public UploadAction(SftpBrowser sftpBrowser) { super(sftpBrowser); } }
juanfelipe82193/opensap
sapui5-sdk-1.74.0/resources/sap/ushell/renderers/fiori2/search/SearchResultListFormatter-dbg.js
<gh_stars>0 // Copyright (c) 2009-2017 SAP SE, All Rights Reserved /* global sap */ // iteration 0 ok /* eslint no-fallthrough: 0 */ sap.ui.define([ 'sap/ushell/renderers/fiori2/search/SearchConfiguration', 'sap/ushell/renderers/fiori2/search/SearchHelper', 'sap/ushell/renderers/fiori2/search/SearchNavigationObject', 'sap/ushell/renderers/fiori2/search/SearchNavigationObjectForSinaNavTarget' ], function (SearchConfiguration, SearchHelper, SearchNavigationObject, SearchNavigationObjectForSinaNavTarget) { "use strict"; var module = sap.ushell.renderers.fiori2.search.SearchResultListFormatter = function () { this.init.apply(this, arguments); }; module.prototype = { init: function () {}, format: function (searchResultSet, terms, options) { options = options || {}; options.suppressHighlightedValues = options.suppressHighlightedValues || false; var sina = searchResultSet.sina; var layoutCache = {}; var formattedResultItems = []; var resultItems = searchResultSet.items; var i, z; for (i = 0; i < resultItems.length; i++) { var resultItem = resultItems[i]; var formattedResultItem = {}; var aItemAttributes = []; for (z = 0; z < resultItem.detailAttributes.length; z++) { var detailAttribute = resultItem.detailAttributes[z]; var attributeValue = detailAttribute.value; switch (detailAttribute.metadata.type) { case sina.AttributeType.ImageBlob: if (attributeValue && attributeValue.trim().length > 0) { attributeValue = "data:;base64," + attributeValue; } case sina.AttributeType.ImageUrl: formattedResultItem.imageUrl = attributeValue; formattedResultItem.imageFormat = detailAttribute.metadata.format ? detailAttribute.metadata.format.toLowerCase() : undefined; if (detailAttribute.defaultNavigationTarget) { formattedResultItem.imageNavigation = new SearchNavigationObjectForSinaNavTarget(detailAttribute.defaultNavigationTarget); } break; case sina.AttributeType.GeoJson: formattedResultItem.geoJson = { value: attributeValue, label: (resultItem.title || detailAttribute.label) }; break; case sina.AttributeType.Group: var attributeGroupAsAttribute = this._formatAttributeGroup(detailAttribute, options, /*index*/ z); aItemAttributes.push(attributeGroupAsAttribute); break; case sina.AttributeType.Double: case sina.AttributeType.Integer: case sina.AttributeType.String: case sina.AttributeType.Date: case sina.AttributeType.Time: case sina.AttributeType.Timestamp: var oItemAttribute = this._formatSingleAttribute(detailAttribute, options, /*index*/ z); aItemAttributes.push(oItemAttribute); break; } } formattedResultItem.key = resultItem.key; formattedResultItem.keystatus = resultItem.keystatus; formattedResultItem.dataSource = resultItem.dataSource; formattedResultItem.dataSourceName = resultItem.dataSource.label; if (resultItem.titleAttributes) { var titleAttribute, formattedTitleAttribute, formattedTitle; var title = []; for (z = 0; z < resultItem.titleAttributes.length; z++) { titleAttribute = resultItem.titleAttributes[z]; if (titleAttribute.metadata.type === sina.AttributeType.Group) { formattedTitleAttribute = this._formatAttributeGroup(titleAttribute, options, /*index*/ z); } else { formattedTitleAttribute = this._formatSingleAttribute(titleAttribute, options, /*index*/ z); } formattedTitle = formattedTitleAttribute.value; title.push(formattedTitle); } formattedResultItem.title = title.join(' '); } else { formattedResultItem.title = options.suppressHighlightedValues ? resultItem.title : resultItem.titleHighlighted; } if (resultItem.titleDescriptionAttributes) { var titleDescriptionAttribute, formattedTitleDescriptionAttribute, formattedTitleDescription; var titleDescription = []; var titleDescriptionLabel = []; for (z = 0; z < resultItem.titleDescriptionAttributes.length; z++) { titleDescriptionAttribute = resultItem.titleDescriptionAttributes[z]; if (titleDescriptionAttribute.metadata.type === sina.AttributeType.Group) { formattedTitleDescriptionAttribute = this._formatAttributeGroup(titleDescriptionAttribute, options, /*index*/ z); } else { formattedTitleDescriptionAttribute = this._formatSingleAttribute(titleDescriptionAttribute, options, /*index*/ z); } formattedTitleDescription = formattedTitleDescriptionAttribute.value; titleDescription.push(formattedTitleDescription); titleDescriptionLabel.push(formattedTitleDescriptionAttribute.name); } formattedResultItem.titleDescription = titleDescription.join(' '); formattedResultItem.titleDescriptionLabel = titleDescriptionLabel.join(' '); } formattedResultItem.itemattributes = aItemAttributes; if (resultItem.defaultNavigationTarget) { formattedResultItem.titleNavigation = new SearchNavigationObjectForSinaNavTarget(resultItem.defaultNavigationTarget); if (!formattedResultItem.title || formattedResultItem.title.length === 0) { formattedResultItem.title = resultItem.defaultNavigationTarget.label; } } if (resultItem.navigationTargets && resultItem.navigationTargets.length > 0) { formattedResultItem.navigationObjects = []; for (var j = 0; j < resultItem.navigationTargets.length; j++) { var navigationTarget = new SearchNavigationObjectForSinaNavTarget(resultItem.navigationTargets[j]); navigationTarget.setLoggingType('RESULT_LIST_ITEM_NAVIGATE_CONTEXT'); formattedResultItem.navigationObjects.push(navigationTarget); } } var layoutCacheForItemType = layoutCache[resultItem.dataSource.id] || {}; layoutCache[resultItem.dataSource.id] = layoutCacheForItemType; formattedResultItem.layoutCache = layoutCacheForItemType; formattedResultItem.selected = formattedResultItem.selected || false; formattedResultItem.expanded = formattedResultItem.expanded || false; var additionalParameters = {}; this._formatResultForDocuments(resultItem, additionalParameters); this._formatResultForNotes(resultItem, additionalParameters); formattedResultItem.additionalParameters = additionalParameters; formattedResultItem.positionInList = i; formattedResultItem.resultSetId = searchResultSet.id; formattedResultItems.push(formattedResultItem); } return formattedResultItems; }, _formatAttributeGroup: function (attributeGroup, options, index) { var attributeGroupAsAttribute = {}; var attributes = {}; attributeGroupAsAttribute.name = attributeGroup.label; var isWhyFound = false; var isLongtext = false; var privateGroupMetadata = attributeGroup.metadata._private; var parentAttribute, childAttribute; // for (var attributeName in attributeGroup.attributes) { for (var i = 0; i < attributeGroup.attributes.length; i++) { // var _attribute = attributeGroup.attributes[attributeName].attribute; var attributeGroupMembership = attributeGroup.attributes[i]; var _attribute = attributeGroupMembership.attribute; var attributeNameInGroup = attributeGroupMembership.metadata.nameInGroup; var _formattedAttribute; if (_attribute.metadata.type === _attribute.sina.AttributeType.Group) { _formattedAttribute = this._formatAttributeGroup(_attribute, options, index); } else { _formattedAttribute = this._formatSingleAttribute(_attribute, options, index); } if (privateGroupMetadata) { if (privateGroupMetadata.parentAttribute === _attribute.metadata) { parentAttribute = _formattedAttribute; } else if (privateGroupMetadata.childAttribute === _attribute.metadata) { childAttribute = _formattedAttribute; } } // attributes[attributeGroup.attributes[attributeName].attribute.nameInGroup] = _formattedAttribute; if (_formattedAttribute.value !== undefined && _formattedAttribute.value.length > 0) { attributes[attributeNameInGroup] = _formattedAttribute; isWhyFound = isWhyFound || _formattedAttribute.whyfound; isLongtext = isLongtext || _formattedAttribute.longtext !== undefined; } } attributeGroupAsAttribute.value = ""; attributeGroupAsAttribute.valueRaw = undefined; attributeGroupAsAttribute.valueWithoutWhyfound = ""; attributeGroupAsAttribute.whyfound = false; if (Object.keys(attributes).length > 0) { var regularFormatting = true; if (privateGroupMetadata && parentAttribute && childAttribute && (privateGroupMetadata.isUnitOfMeasure || privateGroupMetadata.isCurrency || privateGroupMetadata.isDescription)) { var parentAttributeValue = parentAttribute.value; var childAttributeValue = childAttribute.value; parentAttributeValue = parentAttributeValue !== undefined && parentAttributeValue.trim().length > 0 ? parentAttributeValue : undefined; childAttributeValue = childAttributeValue !== undefined && childAttributeValue.trim().length > 0 ? childAttributeValue : undefined; if (!(parentAttributeValue && childAttributeValue)) { if (privateGroupMetadata.isUnitOfMeasure || privateGroupMetadata.isCurrency) { if (parentAttributeValue && !childAttributeValue) { attributeGroupAsAttribute.value = parentAttribute.value; attributeGroupAsAttribute.valueRaw = parentAttribute.valueRaw; attributeGroupAsAttribute.valueWithoutWhyfound = parentAttribute.valueWithoutWhyfound; regularFormatting = false; } } else if (privateGroupMetadata.isDescription) { var textArrangement = privateGroupMetadata.textArrangement; var sina = attributeGroup.sina; if (textArrangement === sina.AttributeGroupTextArrangement.TextFirst) { if (!parentAttributeValue && childAttributeValue) { attributeGroupAsAttribute.value = childAttribute.value; attributeGroupAsAttribute.valueRaw = childAttribute.valueRaw; attributeGroupAsAttribute.valueWithoutWhyfound = childAttribute.valueWithoutWhyfound; regularFormatting = false; } } else if (textArrangement === sina.AttributeGroupTextArrangement.TextLast) { if (parentAttributeValue && !childAttributeValue) { attributeGroupAsAttribute.value = parentAttribute.value; attributeGroupAsAttribute.valueRaw = parentAttribute.valueRaw; attributeGroupAsAttribute.valueWithoutWhyfound = parentAttribute.valueWithoutWhyfound; regularFormatting = false; } } else if (textArrangement === sina.AttributeGroupTextArrangement.TextOnly) { if (!childAttributeValue) { regularFormatting = false; } } } } } if (regularFormatting) { attributeGroupAsAttribute.value = this._formatBasedOnGroupTemplate(attributeGroup.template, attributes, "value"); attributeGroupAsAttribute.valueRaw = this._formatBasedOnGroupTemplate(attributeGroup.template, attributes, "valueRaw"); attributeGroupAsAttribute.valueWithoutWhyfound = this._formatBasedOnGroupTemplate(attributeGroup.template, attributes, "valueWithoutWhyfound"); } attributeGroupAsAttribute.whyfound = isWhyFound; } attributeGroupAsAttribute.key = attributeGroup.id; attributeGroupAsAttribute.isTitle = false; // used in table view attributeGroupAsAttribute.isSortable = attributeGroup.metadata.isSortable; // used in table view attributeGroupAsAttribute.attributeIndex = index; // used in table view attributeGroupAsAttribute.displayOrder = attributeGroup.metadata.usage.Detail && attributeGroup.metadata.usage.Detail.displayOrder; if (isLongtext) { attributeGroupAsAttribute.longtext = attributeGroupAsAttribute.value; } return attributeGroupAsAttribute; }, _formatSingleAttribute: function (detailAttribute, options, index) { var oItemAttribute = {}; var sina = detailAttribute.sina; oItemAttribute.name = detailAttribute.label; oItemAttribute.valueRaw = detailAttribute.value; oItemAttribute.value = options.suppressHighlightedValues ? detailAttribute.valueFormatted : detailAttribute.valueHighlighted; oItemAttribute.valueWithoutWhyfound = detailAttribute.valueFormatted; //result[propDisplay].valueWithoutWhyfound; // if (detailAttribute.isHighlighted && detailAttribute.metadata.type.toLowerCase() === "longtext") { // // mix snippet into longtext values // var valueHighlighted = detailAttribute.valueHighlighted; // valueHighlighted = valueHighlighted.replace(/(^[.][.][.])|([.][.][.]$)/, "").trim(); // var valueUnHighlighted = valueHighlighted.replace(/[<]([/])?b[>]/g, ""); // oItemAttribute.value = detailAttribute.valueFormatted.replace(valueUnHighlighted, valueHighlighted); // } oItemAttribute.key = detailAttribute.id; oItemAttribute.isTitle = false; // used in table view oItemAttribute.isSortable = detailAttribute.metadata.isSortable; // used in table view oItemAttribute.attributeIndex = index; // used in table view oItemAttribute.displayOrder = detailAttribute.metadata.usage.Detail && detailAttribute.metadata.usage.Detail.displayOrder; oItemAttribute.whyfound = detailAttribute.isHighlighted; if (detailAttribute.defaultNavigationTarget) { oItemAttribute.defaultNavigationTarget = new SearchNavigationObjectForSinaNavTarget(detailAttribute.defaultNavigationTarget); } // oItemAttribute.hidden = detailAttribute.metadata.hidden; if (detailAttribute.metadata.format && (detailAttribute.metadata.format === sina.AttributeFormatType.MultilineText || detailAttribute.metadata.format === sina.AttributeFormatType.Longtext)) { oItemAttribute.longtext = detailAttribute.value; } return oItemAttribute; }, _formatBasedOnGroupTemplate: function (template, attributes, valuePropertyName) { if (!(template && attributes && valuePropertyName)) { return ""; } var value = "", pos = 0; var match, regex = /{\w+}/gi; while ((match = regex.exec(template)) !== null) { value += template.substring(pos, match.index); var attributeName = match[0].slice(1, -1); value += attributes[attributeName] && attributes[attributeName][valuePropertyName] || ""; pos = regex.lastIndex; } value += template.substring(pos); return value; }, _formatResultForDocuments: function (resultItem, additionalParameters) { var keyFields = ''; additionalParameters.isDocumentConnector = false; var j, detailAttribute; for (j = 0; j < resultItem.detailAttributes.length; j++) { detailAttribute = resultItem.detailAttributes[j]; if (detailAttribute.metadata.id === 'FILE_PROPERTY') { additionalParameters.isDocumentConnector = true; } if (detailAttribute.metadata.isKey === true) { if (keyFields.length > 0) { keyFields += ';'; } keyFields = keyFields + detailAttribute.metadata.id + '=' + detailAttribute.value; //encodeURIComponent(result[prop].valueRaw); } } //fileloader if (additionalParameters.isDocumentConnector === true) { var sidClient = ';o=sid(' + resultItem.dataSource.system + '.' + resultItem.dataSource.client + ')'; var connectorName = resultItem.dataSource.id; additionalParameters.imageUrl = "/sap/opu/odata/SAP/ESH_SEARCH_SRV" + sidClient + "/FileLoaderFiles(ConnectorId='" + connectorName + "',FileType='ThumbNail',SelectionParameters='" + keyFields + "')/$value"; additionalParameters.titleUrl = "/sap/opu/odata/SAP/ESH_SEARCH_SRV" + sidClient + "/FileLoaderFiles(ConnectorId='" + connectorName + "',FileType='BinaryContent',SelectionParameters='" + keyFields + "')/$value"; // var suvlink = "/sap/opu/odata/SAP/ESH_SEARCH_SRV/FileLoaderFiles(ConnectorId='" + connectorName + "',FileType='SUVFile',SelectionParameters='PHIO_ID=" + resultItem.PHIO_ID.valueRaw + "')/$value?sap-client=" + client; // var suvlink = '/sap-pdfjs/web/viewer.html?file=' + encodeURIComponent(suvlink); var suvlink = "/sap/opu/odata/SAP/ESH_SEARCH_SRV" + sidClient + "/FileLoaderFiles(ConnectorId='" + connectorName + "',FileType='SUVFile',SelectionParameters='" + keyFields + "')/$value"; additionalParameters.suvlink = '/sap/bc/ui5_ui5/ui2/ushell/resources/sap/fileviewer/viewer/web/viewer.html?file=' + encodeURIComponent(suvlink); if (!resultItem.navigationObjects) { resultItem.navigationObjects = []; } var navigationTarget = new SearchNavigationObject({ text: "Show Document", href: additionalParameters.suvlink, target: "_blank" }); resultItem.navigationObjects.push(navigationTarget); for (j = 0; j < resultItem.detailAttributes.length; j++) { detailAttribute = resultItem.detailAttributes[j]; if (detailAttribute.id === "PHIO_ID_THUMBNAIL" && detailAttribute.value) { additionalParameters.containsThumbnail = true; } if (detailAttribute.id === "PHIO_ID_SUV" && detailAttribute.value) { additionalParameters.containsSuvFile = true; } } } }, _formatResultForNotes: function (resultItem, additionalParameters) { } }; return module; });
Surfndez/operaprestodriver
src/com/opera/core/systems/scope/internal/OperaIntervals.java
<reponame>Surfndez/operaprestodriver /* Copyright 2008-2012 Opera Software ASA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.opera.core.systems.scope.internal; import org.openqa.selenium.support.ui.Duration; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.MINUTES; import static java.util.concurrent.TimeUnit.SECONDS; /** * Enumerator for delay values in milliseconds. */ public enum OperaIntervals { /** * The response timeout specifies for how long OperaDriver should wait before timing out when it * has not received a callback confirmation message from Scope after executing a command. This * typically happens when it looses connection to Opera or when a triggered command takes too long * to finish. */ RESPONSE_TIMEOUT(new Duration(1, MINUTES)), /** * An implicit wait is to tell WebDriver to poll the DOM for a certain amount of time when trying * to find an element or elements if they are not immediately available. The default setting is * 0. Once set, the implicit wait is set for the life of the WebDriver object instance. */ IMPLICIT_WAIT(new Duration(0, MILLISECONDS)), /** * The page load timeout specifies how long the driver waits for a page to finish loading before * returning the control to the user. */ PAGE_LOAD_TIMEOUT(new Duration(30, SECONDS)), /** * The default poll interval for queries that are made across a network. Sleep intervals are not * needed for local polling, but we don't want to spam the network. */ POLL_INTERVAL(new Duration(10, MILLISECONDS)), WINDOW_EVENT_TIMEOUT(new Duration(5, SECONDS)), OPERA_IDLE_TIMEOUT(new Duration(5, SECONDS)), /** * The frequency rate at which to poll internal data structures. Seeing as the data structures * are local and we in most cases wait for some condition to be true when polling, it's highly * advised to leave this at 0 to maximize the throughput. */ INTERNAL_FREQUENCY(new Duration(0, MILLISECONDS)), /** * The handshake timeout defines how long the Scope server (OperaDriver) should wait for a * connection from a client (Opera) before shutting down. If set to 0, it will wait * indefinitely. */ HANDSHAKE_TIMEOUT(new Duration(1, MINUTES)), KILL_GRACE_TIMEOUT(new Duration(1, SECONDS)), /** * Based on experiences with slow BCM7351 it is advaisable to set this to 50s for linuxsdk (devices). */ DEFAULT_RESPONSE_TIMEOUT(new Duration(10, SECONDS)), /** * The amount of time to wait for Opera to quit before throwing. */ QUIT_RESPONSE_TIMEOUT(new Duration(10, SECONDS)), /** * Sets the amount of time to wait for an asynchronous script to finish execution before throwing * an error. If the timeout is negative, then the script will be allowed to run indefinitely. * Based on experiences with slow BCM7351 it is advaisable to set this to 60s for linuxsdk (devices). */ SCRIPT_TIMEOUT(new Duration(10, SECONDS)), /** * The interval at which an ECMAScript should be attempted reevaluated in the case of it for some * reason failing. A script reevaluation will time out on {@link #SCRIPT_TIMEOUT}. */ SCRIPT_RETRY_INTERVAL(new Duration(50, MILLISECONDS)), /** * After starting the launcher we need to wait for the launcher to connect to our listener. If * the launcher does not connect within this timeout, we assume something has gone wrong. */ LAUNCHER_CONNECT_TIMEOUT(new Duration(5, SECONDS)), /** * If anything goes wrong while connected to the launcher, don't block forever. */ LAUNCHER_RESPONSE_TIMEOUT(new Duration(3, MINUTES)), /** * If programs such as the launcher exits immediately with an improper exit value (!= 0) we can * assume something went wrong during initialization. We need to wait for a short period before * checking the exit value as it may take some time to start the program. */ PROCESS_START_SLEEP(new Duration(100, MILLISECONDS)), /** * {@link com.opera.core.systems.runner.inprocess.OperaInProcessRunner} doesn't know when the * executable is running. If an exit code is returned within this timeout, we assume the browser * has exited immediately (possibly with invalid command-line arguments, &c.). */ PROCESS_START_TIMEOUT(new Duration(300, MILLISECONDS)), MENU_EVENT_TIMEOUT(new Duration(1, SECONDS)), /** * When clicking several times in a row (e.g. for a double, triple or quadruple click) 640 ms is * the hardcoded interval Opera must wait before loosing the previous click state, not joining * clicks together. */ MULTIPLE_CLICK_SLEEP(new Duration(640, MILLISECONDS)), WINDOW_CLOSE_TIMEOUT(new Duration(500, MILLISECONDS)), /** * Different products have different animations when closing windows, and sometimes it might take * a little while if using an Opera action to close a window. */ WINDOW_CLOSE_USING_ACTION_SLEEP(new Duration(10, MILLISECONDS)), /** * The timeout for selftests. */ SELFTEST_TIMEOUT(new Duration(200, SECONDS)), /** * While communicating with slow devices it may happean data is not received yet so retry read * operation after this timeout Based on experiences with slow BCM7351 it is advaisable to set * this to 150ms for linuxsdk (devices). */ SOCKET_READ_RETRY_TIMEOUT(new Duration(0, MILLISECONDS)), /** * The default timeout of taking an external screen capture using a {@link * com.opera.core.systems.runner.OperaRunner} implementation. */ RUNNER_SCREEN_CAPTURE_TIMEOUT(new Duration(20, MILLISECONDS)); private Duration duration; private OperaIntervals(Duration duration) { this.duration = duration; } public void setValue(Duration duration) { this.duration = duration; } public Duration getValue() { return duration; } /** * Converts the duration of the enum value as milliseconds. * * @return duration value in milliseconds */ public long getMs() { return duration.in(MILLISECONDS); } @Override public String toString() { return duration.toString(); } @SuppressWarnings("unused") public static String toList() { StringBuilder intervals = new StringBuilder(); for (OperaIntervals item : values()) { if (intervals.length() != 0) { intervals.append(", "); } intervals.append(item.toString()); } return String.format("Intervals [%s]", intervals.toString()); } }
theatlasroom/sshego
cmd/gosshtun/main.go
package main import ( "context" "flag" "fmt" "io" "log" "os" tun "github.com/glycerine/sshego" ssh "github.com/glycerine/sshego/xendor/github.com/glycerine/xcryptossh" ) const ProgramName = "gosshtun" func main() { myflags := flag.NewFlagSet(ProgramName, flag.ExitOnError) cfg := tun.NewSshegoConfig() cfg.DefineFlags(myflags) err := myflags.Parse(os.Args[1:]) if cfg.ShowVersion { fmt.Printf("\n%v\n", tun.SourceVersion()) os.Exit(0) } err = cfg.ValidateConfig() if err != nil { log.Fatalf("%s command line flag error: '%s'", ProgramName, err) } //p("cfg = %#v", cfg) h, err := tun.NewKnownHosts(cfg.ClientKnownHostsPath, tun.KHJson) panicOn(err) cfg.KnownHosts = h if cfg.WriteConfigOut != "" { var o io.WriteCloser if cfg.WriteConfigOut == "-" { o = os.Stdout } else { o, err = os.Create(cfg.WriteConfigOut) if err != nil { panic(err) } } err = cfg.SaveConfig(o) if err != nil { panic(err) } } if cfg.AddUser != "" { tun.AddUserAndExit(cfg) } if cfg.DelUser != "" { tun.DelUserAndExit(cfg) } passphrase := "" totpUrl := "" ctx := context.Background() halt := ssh.NewHalter() _, _, err = cfg.SSHConnect(ctx, h, cfg.Username, cfg.PrivateKeyPath, cfg.SSHdServer.Host, cfg.SSHdServer.Port, passphrase, totpUrl, halt) if err != nil { fmt.Println(err.Error()) os.Exit(0) } if err != nil { panic(err) } if !cfg.WriteConfigOnly { select {} } } func panicOn(err error) { if err != nil { panic(err) } }
Djelibeybi/photons
modules/photons_app/mimic/operators/matrix.py
from photons_app.mimic.operator import Operator, operator from photons_app.mimic.operators.light import color_spec from photons_app import helpers as hp from photons_messages import TileMessages, TileEffectType from photons_protocol.types import enum_spec from delfick_project.norms import dictobj, sb class TileChild(dictobj.Spec): accel_meas_x = dictobj.Field(sb.integer_spec, default=0) accel_meas_y = dictobj.Field(sb.integer_spec, default=0) accel_meas_z = dictobj.Field(sb.integer_spec, default=0) user_x = dictobj.Field(sb.float_spec, default=0) user_y = dictobj.Field(sb.float_spec, default=0) width = dictobj.Field(sb.integer_spec, default=8) height = dictobj.Field(sb.integer_spec, default=8) device_version_vendor = dictobj.Field(sb.integer_spec, default=1) device_version_product = dictobj.Field(sb.integer_spec, default=55) device_version_version = dictobj.Field(sb.integer_spec, default=0) firmware_version_minor = dictobj.Field(sb.integer_spec, default=50) firmware_version_major = dictobj.Field(sb.integer_spec, default=3) firmware_build = dictobj.Field(sb.integer_spec, default=0) colors = dictobj.Field(sb.listof(color_spec())) class ChainAttr: async def __call__(self, event, options): chain_length = options.chain_length if not options.chain and not options.chain_length: chain_length = 5 elif not options.chain_length: chain_length = len(options.chain) spec = TileChild.FieldSpec() if event.zerod: chain = [spec.empty_normalise() for _ in range(chain_length)] else: chain = [ch.clone() for ch in options.chain] while len(chain) < chain_length: chain.append(spec.empty_normalise()) chain = chain[:chain_length] for ch in chain: ch.firmware_build = event.device.firmware.build ch.firmware_version_minor = event.device.firmware.minor ch.firmware_version_major = event.device.firmware.major if "candle" in event.device.cap.product.name.lower(): ch.width = 5 ch.height = 6 else: ch.width = 8 ch.height = 8 ch.device_version_vendor = event.device.cap.product.vendor.vid ch.device_version_product = event.device.cap.product.pid while len(ch.colors) < ch.height * ch.width: ch.colors.append(hp.Color(0, 1, 1, 3500)) yield event.device.attrs.attrs_path("chain").changer_to(chain) class PaletteAttr: async def __call__(self, event, options): palette_count = options.palette_count if not options.palette and not options.palette_count: palette_count = 0 elif not options.palette_count: palette_count = len(options.palette) if event.zerod: palette = [hp.Color(0, 0, 0, 0) for _ in range(palette_count)] else: palette = [p.clone() for p in options.palette] while len(palette) < palette_count: palette.append(hp.Color(0, 0, 0, 0)) palette = palette[:palette_count] yield event.device.attrs.attrs_path("palette").changer_to(palette) @operator class Matrix(Operator): class Options(dictobj.Spec): chain = dictobj.Field(sb.listof(TileChild.FieldSpec())) chain_length = dictobj.NullableField(sb.integer_spec) palette = dictobj.Field(sb.listof(color_spec())) palette_count = dictobj.NullableField(sb.integer_spec) matrix_effect = dictobj.Field( enum_spec(None, TileEffectType, unpacking=True), default=TileEffectType.OFF ) @classmethod def select(kls, device): if not kls.only_io_and_viewer_operators(device.value_store) and device.cap.has_matrix: return kls(device, device.value_store) attrs = [ ChainAttr(), PaletteAttr(), Operator.Attr.Lambda( "matrix_effect", from_zero=lambda event, options: TileEffectType.OFF, from_options=lambda event, options: options.matrix_effect, ), ] async def respond(self, event): if event | TileMessages.GetTileEffect: event.add_replies(self.state_for(TileMessages.StateTileEffect)) elif event | TileMessages.SetTileEffect: state = self.state_for(TileMessages.StateTileEffect) state.instanceid = event.pkt.instanceid event.add_replies(state) palette_count = max([len(self.device.attrs.palette), len(event.pkt.palette)]) changes = [] for i, palette in enumerate(self.device.attrs.palette): if i >= palette_count: changes.append( self.device.attrs.attrs_path("palette", i).changer_to(hp.Color(0, 0, 0, 0)) ) else: if self.device.attrs.palette[i] != event.pkt.palette[i]: changes.append( self.device.attrs.attrs_path("palette", i).changer_to( event.pkt.palette[i] ) ) for i in range(palette_count): if i >= len(self.device.attrs.palette): changes.append( self.device.attrs.attrs_path("palette", i).changer_to(event.pkt.palette[i]) ) if event.pkt.palette_count > len(self.device.attrs.palette): changes.append( self.device.attrs.attrs_path("palette").reduce_length_to( event.pkt.palette_count ) ) changes.append(self.device.attrs.attrs_path("matrix_effect").changer_to(event.pkt.type)) await self.device.attrs.attrs_apply(*changes, event=event) elif event | TileMessages.GetDeviceChain: event.add_replies(self.state_for(TileMessages.StateDeviceChain)) elif event | TileMessages.Get64: state = [] res = { ch.tile_index: ch for ch in self.state_for(TileMessages.State64, expect_one=False) } for i in range(event.pkt.tile_index, event.pkt.tile_index + event.pkt.length): if i in res: state.append(res[i]) event.add_replies(*state) if event | TileMessages.SetUserPosition: if event.pkt.tile_index < len(self.device.attrs.chain): await self.device.change( (("chain", event.pkt.tile_index, "user_x"), event.pkt.user_x), (("chain", event.pkt.tile_index, "user_y"), event.pkt.user_y), event=event, ) event.set_replies() elif event | TileMessages.Set64: state = [] res = { ch.tile_index: ch for ch in self.state_for(TileMessages.State64, expect_one=False) } for i in range(event.pkt.tile_index, event.pkt.tile_index + event.pkt.length): if i in res: state.append(res[i]) event.add_replies(*state) for i in range(event.pkt.tile_index, event.pkt.tile_index + event.pkt.length): if i < len(self.device.attrs.chain): # For efficiency, not gonna make events for this chain = self.device.attrs.chain[i] chain.colors.clear() chain.colors.extend( [ hp.Color(c.hue, c.saturation, c.brightness, c.kelvin) for c in event.pkt.colors ] ) def make_state_for(self, kls, result): if kls | TileMessages.StateTileEffect: palette = [] if self.device.attrs.matrix_effect is not TileEffectType.OFF: palette = self.device.attrs.palette result.append( kls( type=self.device.attrs.matrix_effect, palette_count=len(palette), palette=palette, parameters={}, ) ) elif kls | TileMessages.StateDeviceChain: result.append( kls( start_index=0, tile_devices_count=len(self.device.attrs.chain), tile_devices=[c.as_dict() for c in self.device.attrs.chain], ) ) elif kls | TileMessages.State64: for i, ch in enumerate(self.device.attrs.chain): result.append(kls(tile_index=i, x=0, y=0, width=ch.width, colors=list(ch.colors)))
prichhazed/yb
internal/biome/fakebiome.go
// Copyright 2020 YourBase Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // SPDX-License-Identifier: Apache-2.0 package biome import ( "context" "fmt" slashpath "path" "strings" ) // Fake is a biome that operates in-memory. It uses POSIX-style paths, but // permits any character to be used as the separator. type Fake struct { // Separator is the path separator character. If NUL, then slash '/' is used. Separator rune // Descriptor is the descriptor that will be returned by Describe. Descriptor Descriptor // DirsResult is what will be returned by Dirs. DirsResult Dirs // RunFunc is called to handle the Run method. RunFunc func(context.Context, *Invocation) error } func (f *Fake) sep() rune { if f.Separator == 0 { return '/' } return f.Separator } // Describe returns f.Descriptor. func (f *Fake) Describe() *Descriptor { return &f.Descriptor } // Dirs returns f.DirsResult. func (f *Fake) Dirs() *Dirs { return &f.DirsResult } // Run calls f.RunFunc. It returns an error if f.RunFunc is nil. func (f *Fake) Run(ctx context.Context, invoke *Invocation) error { if f.RunFunc == nil { return fmt.Errorf("fake run: RunFunc not set") } return f.RunFunc(ctx, invoke) } // JoinPath joins any number of path elements into a single path. func (f *Fake) JoinPath(elem ...string) string { sb := new(strings.Builder) for _, e := range elem { if e == "" { continue } if sb.Len() > 0 { sb.WriteRune(f.sep()) } sb.WriteString(e) } if sb.Len() == 0 { return "" } return f.cleanPath(sb.String()) } // cleanPath returns the shortest path name equivalent to path by purely // lexical processing. func (f *Fake) cleanPath(path string) string { if f.sep() == '/' { return slashpath.Clean(path) } s := strings.NewReplacer("/", "\x00", string(f.sep()), "/").Replace(path) s = slashpath.Clean(s) return strings.NewReplacer("/", string(f.sep()), "\x00", "/").Replace(s) } // IsAbsPath reports whether the path is absolute. func (f *Fake) IsAbsPath(path string) bool { return strings.HasPrefix(path, string(f.sep())) } // Close does nothing and returns nil. func (f *Fake) Close() error { return nil }
fga-gpp-mds/dulce-app
src/Components/Absences.js
<reponame>fga-gpp-mds/dulce-app import {absences as styles} from './styles' ; import React from 'react'; import { View, TouchableOpacity, Image, ScrollView, Alert } from 'react-native'; import DateRangePicker from './DateRangePicker'; import AGRButton from './AGRButton'; import ImagePicker from 'react-native-image-picker'; import {Card, CardItem, Text, Body, Form, Textarea, Picker, Right, Left} from 'native-base'; const XDate = require('xdate'); const currentDate = { year: new XDate().getFullYear(), month: new XDate().getMonth() + 1, day: new XDate().getDate() }; export default class Absences extends React.Component { constructor(props) { super(props); this.state = { documentSource: null, absenceOption: '', startDay: currentDate, endDay: currentDate }; } getDocument(response) { /* istanbul ignore console */ console.log('Response = ', response); if (response.didCancel) { /* istanbul ignore console */ console.log('User cancelled photo picker'); } else if (response.error) { /* istanbul ignore console */ console.log('ImagePicker Error: ', response.error); } else if (response.customButton) { /* istanbul ignore console */ console.log('User tapped custom button: ', response.customButton); } else { let source = {uri: response.uri}; // You can also display the image using data: // let source = { uri: 'data:image/jpeg;base64,' + response.data }; this.setState({ documentSource: source }); } } selectPhotoTapped() { const options = { quality: 1.0, maxWidth: 500, maxHeight: 500, storageOptions: { skipBackup: true } }; ImagePicker.showImagePicker(options, (response) => this.getDocument(response)); } onSelectDates(startDay, endDay) { this.setState({ startDay: startDay, endDay: endDay }); } onSelectAbsence(value) { this.setState({ absenceOption: value }); } renderPicker() { return ( <CardItem> <Body style={{flexDirection: 'row'}}> <Left> <Text style={styles.text} > Tipo de abono: </Text> </Left> <Right> <Picker selectedValue={this.state.absenceOption} style={styles.picker} onValueChange={(itemValue) => this.onSelectAbsence(itemValue)}> <Picker.Item style={styles.text} label='Atestado' value='medical leave' /> <Picker.Item style={styles.text} label='Maternidade' value='maternity leave' /> <Picker.Item style={styles.text} label='Paternidade' value='paternity leave' /> </Picker> </Right> </Body> </CardItem> ); } renderPhotoPicker() { return ( <CardItem> <Body style={styles.container2}> <TouchableOpacity onPress={this.selectPhotoTapped.bind(this)}> <View style={[styles.document, styles.documentContainer, {marginBottom: 20}]}> { this.state.documentSource === null ? <Text style={styles.text}>Selecione uma Foto</Text> : <Image style={styles.document} source={this.state.documentSource} /> } </View> </TouchableOpacity> </Body> </CardItem> ); } renderSelectedDates() { return ( <CardItem header bordered> <View style={styles.view1}> <Text style={styles.date}> {this.state.startDay.day}/{this.state.startDay.month}/ {this.state.startDay.year} até {this.state.endDay.day}/{this.state.endDay.month} /{this.state.endDay.year} </Text> </View> </CardItem> ); } _onPress() { Alert.alert( 'Solicitação enviada', 'Periodo solicitado: ' + this.state.startDay.day + '/' + this.state.startDay.month + '/' + this.state.startDay.year + ' até ' + this.state.endDay.day + '/' + this.state.endDay.month + '/' + this.state.endDay.year ); } renderCard() { return ( <Card> {this.renderSelectedDates()} {this.props.type === 'complete' ? this.renderPicker() : <View />} <CardItem> <Body style={{alignItems: 'flex-start'}} > <Form style={styles.form}> <Textarea rowSpan={5} bordered placeholder='Justificativa' /> </Form> </Body> </CardItem> {this.props.type === 'complete' ? this.renderPhotoPicker() : <View/>} <CardItem footer bordered> <AGRButton style={styles.button} onPress={() => {this._onPress();}} text='Solicitar' /> </CardItem> </Card> ); } render() { return ( <View style={{flex: 1}}> <ScrollView> <Text style={styles.title}> {this.props.title} </Text> <DateRangePicker onSuccess={(startDay, endDay) => this.onSelectDates(startDay, endDay)} theme={{markColor: '#5f4b8b', markTextColor: 'white'}} /> {this.renderCard()} </ScrollView> </View> ); } }
cybernetics/gridgain
modules/core/src/test/java/org/gridgain/grid/kernal/processors/cache/GridCacheAbstractProjectionSelfTest.java
/* Copyright (C) GridGain Systems. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* _________ _____ __________________ _____ * __ ____/___________(_)______ /__ ____/______ ____(_)_______ * _ / __ __ ___/__ / _ __ / _ / __ _ __ `/__ / __ __ \ * / /_/ / _ / _ / / /_/ / / /_/ / / /_/ / _ / _ / / / * \____/ /_/ /_/ \_,__/ \____/ \__,_/ /_/ /_/ /_/ */ package org.gridgain.grid.kernal.processors.cache; import org.gridgain.grid.*; import org.gridgain.grid.cache.*; import org.gridgain.grid.lang.*; import org.gridgain.grid.util.typedef.*; import java.util.*; import java.util.concurrent.*; import static java.util.concurrent.TimeUnit.*; import static org.gridgain.grid.cache.GridCacheAtomicityMode.*; import static org.gridgain.grid.cache.GridCacheFlag.*; import static org.gridgain.grid.cache.GridCacheWriteSynchronizationMode.*; /** * Tests for custom cache projection (with filters and flags). */ public abstract class GridCacheAbstractProjectionSelfTest extends GridCacheAbstractSelfTest { /** Test timeout */ private static final long TEST_TIMEOUT = 120 * 1000; /** Number of grids to start. */ private static final int GRID_CNT = 1; /** {@inheritDoc} */ @Override protected long getTestTimeout() { return TEST_TIMEOUT; } /** {@inheritDoc} */ @Override protected int gridCount() { return GRID_CNT; } /** {@inheritDoc} */ @Override protected GridCacheConfiguration cacheConfiguration(String gridName) throws Exception { GridCacheConfiguration cfg = super.cacheConfiguration(gridName); cfg.setCacheMode(cacheMode()); cfg.setWriteSynchronizationMode(FULL_SYNC); cfg.setPreloadMode(GridCachePreloadMode.SYNC); return cfg; } /** * @return Cache mode. */ @Override protected abstract GridCacheMode cacheMode(); /** * @return Cache instance. */ @SuppressWarnings({"TypeMayBeWeakened"}) private GridCache<String, TestCloneable> cacheCloneable() { return grid(0).cache(null); } /** * Test cloneable. */ private static class TestCloneable implements Cloneable { /** */ private String str; /** * Default constructor. */ private TestCloneable() { // No-op. } /** * @param str String value. */ private TestCloneable(String str) { this.str = str; } /** * @return str value. */ private String str() { return str; } /** {@inheritDoc} */ @Override public Object clone() throws CloneNotSupportedException { return super.clone(); } } /** */ private GridBiPredicate<String, Integer> kvFilter = new P2<String, Integer>() { @Override public boolean apply(String key, Integer val) { return key.contains("key") && val >= 0; } }; /** */ private GridPredicate<GridCacheEntry<String, Integer>> entryFilter = new P1<GridCacheEntry<String, Integer>>() { @Override public boolean apply(GridCacheEntry<String, Integer> e) { Integer val = e.peek(); // Let's assume that null values will be passed through, otherwise we won't be able // to put any new values to cache using projection with this entry filter. return e.getKey().contains("key") && (val == null || val >= 0); } }; /** * Asserts that given runnable throws specified exception. * * @param exCls Expected exception. * @param r Runnable to check. * @throws Exception If check failed. */ private void assertException(Class<? extends Exception> exCls, Runnable r) throws Exception { assert exCls != null; assert r != null; try { r.run(); fail(exCls.getSimpleName() + " must have been thrown."); } catch (Exception e) { if (e.getClass() != exCls) { throw e; } info("Caught expected exception: " + e); } } /** * @param r Runnable. * @throws Exception If check failed. */ private void assertFlagException(Runnable r) throws Exception { assertException(GridCacheFlagException.class, r); } /** * @throws Exception In case of error. */ public void testTypeProjection() throws Exception { GridCache<String, Integer> cache = cache(); cache.putAll(F.asMap("k1", 1 , "k2", 2, "k3", 3)); GridCache<Double, Boolean> anotherCache = grid(0).cache(null); assert anotherCache != null; anotherCache.put(3.14, true); GridCacheProjection<String, Integer> prj = cache.projection(String.class, Integer.class); List<String> keys = F.asList("k1", "k2", "k3"); for (String key : keys) assert prj.containsKey(key); } /** * @throws Exception In case of error. */ public void testSize() throws Exception { GridCacheProjection<String, Integer> prj = cache().projection(kvFilter); assert prj.cache() != null; int size = 10; if (atomicityMode() == TRANSACTIONAL) { GridCacheTx tx = prj.txStart(); for (int i = 0; i < size; i++) { prj.put("key" + i, i); } prj.put("k", 11); prj.put("key", -1); tx.commit(); } else { for (int i = 0; i < size; i++) { prj.put("key" + i, i); } prj.put("k", 11); prj.put("key", -1); } assertEquals(size, cache().size()); assertEquals(size, prj.size()); } /** * @throws Exception In case of error. */ public void testContainsKey() throws Exception { cache().put("key", 1); cache().put("k", 2); assert cache().containsKey("key"); assert cache().containsKey("k"); assert !cache().containsKey("wrongKey"); GridCacheProjection<String, Integer> prj = cache().projection(kvFilter); assert prj.containsKey("key"); assert !prj.containsKey("k"); assert !prj.containsKey("wrongKey"); assert prj.projection(F.<GridCacheEntry<String, Integer>>alwaysTrue()).containsKey("key"); assert !prj.projection(F.<GridCacheEntry<String, Integer>>alwaysFalse()).containsKey("key"); assert !prj.projection(F.<GridCacheEntry<String, Integer>>alwaysFalse()).containsKey("k"); } /** * @throws Exception In case of error. */ public void testPut() throws Exception { final GridCacheProjection<String, Integer> prj = cache().projection(kvFilter); prj.put("key", 1); prj.put("k", 2); assert prj.containsKey("key"); assert !prj.containsKey("k"); assertFlagException(new CAX() { @Override public void applyx() throws GridException { prj.flagsOn(LOCAL).put("key", 1); } }); assertFlagException(new CAX() { @Override public void applyx() throws GridException { prj.flagsOn(READ).put("key", 1); } }); } /** * @throws Exception In case of error. */ public void testLocalFlag() throws Exception { GridCacheProjection<String, Integer> prj = cache().projection(entryFilter); final GridCacheProjection<String, Integer> locPrj = prj.flagsOn(LOCAL); prj.put("key", 1); Integer one = 1; assertEquals(one, prj.get("key")); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locPrj.put("key", 1); } }); prj.get("key"); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locPrj.get("key"); } }); prj.getAll(F.asList("key", "key1")); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locPrj.getAll(F.asList("key", "key1")); } }); prj.remove("key"); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locPrj.remove("key"); } }); prj.put("key", 1); assertEquals(one, prj.replace("key", 2)); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locPrj.replace("key", 3); } }); prj.removeAll(F.asList("key")); assert !prj.containsKey("key"); prj.put("key", 1); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locPrj.removeAll(F.asList("key")); } }); assert prj.containsKey("key"); assert locPrj.containsKey("key"); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locPrj.reload("key"); } }); assertEquals(one, locPrj.peek("key")); locPrj.evict("key"); assert !locPrj.containsKey("key"); locPrj.promote("key"); assert locPrj.containsKey("key"); locPrj.clear("key"); assert !locPrj.containsKey("key"); } /** * @throws Exception In case of error. */ public void testEntryLocalFlag() throws Exception { GridCacheProjection<String, Integer> prj = cache().projection(entryFilter); GridCacheProjection<String, Integer> loc = prj.flagsOn(LOCAL); prj.put("key", 1); GridCacheEntry<String, Integer> prjEntry = prj.entry("key"); final GridCacheEntry<String, Integer> locEntry = loc.entry("key"); assert prjEntry != null; assert locEntry != null; Integer one = 1; assertEquals(one, prjEntry.getValue()); assertFlagException(new CA() { @Override public void apply() { locEntry.setValue(1); } }); assertEquals(one, prjEntry.getValue()); assertFlagException(new CA() { @Override public void apply() { locEntry.getValue(); } }); prjEntry.remove(); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locEntry.remove(); } }); prjEntry.set(1); assertEquals(one, prjEntry.replace(2)); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locEntry.replace(3); } }); assertFlagException(new CAX() { @Override public void applyx() throws GridException { locEntry.reload(); } }); prj.put("key", 1); assertEquals(one, locEntry.peek()); locEntry.evict(); assert locEntry.peek() == null; loc.promote("key"); assert loc.containsKey("key"); locEntry.clear(); assert locEntry.peek() == null; } /** * @throws Exception In case of error. */ public void testReadFlag() throws Exception { GridCacheProjection<String, Integer> prj = cache().projection(entryFilter); final GridCacheProjection<String, Integer> readPrj = prj.flagsOn(READ); prj.put("key", 1); Integer one = 1; assertEquals(one, prj.get("key")); assertFlagException(new CAX() { @Override public void applyx() throws GridException { readPrj.put("key", 1); } }); prj.remove("key"); assertFlagException(new CAX() { @Override public void applyx() throws GridException { readPrj.remove("key"); } }); prj.put("key", 1); assertEquals(one, prj.replace("key", 2)); assertFlagException(new CAX() { @Override public void applyx() throws GridException { readPrj.replace("key", 3); } }); prj.removeAll(F.asList("key")); assert !prj.containsKey("key"); prj.put("key", 1); assertFlagException(new CAX() { @Override public void applyx() throws GridException { readPrj.removeAll(F.asList("key")); } }); assertFlagException(new CA() { @Override public void apply() { readPrj.evict("key"); } }); assert prj.containsKey("key"); assertFlagException(new CA() { @Override public void apply() { readPrj.clear("key"); } }); assert prj.containsKey("key"); assertFlagException(new CAX() { @Override public void applyx() throws GridException { readPrj.reload("key"); } }); assert prj.containsKey("key"); assertFlagException(new CAX() { @Override public void applyx() throws GridException { readPrj.promote("key"); } }); assert prj.containsKey("key"); readPrj.get("key"); readPrj.getAll(F.asList("key", "key1")); assertEquals(one, readPrj.peek("key")); } /** * @param clone Cloned value. * @param original Original value. */ private void checkClone(TestCloneable clone, TestCloneable original) { assert original != null; assert clone != null; assert clone != original; assertEquals(clone.str(), original.str()); } /** * @throws Exception In case of error. */ @SuppressWarnings({"UnnecessaryFinalOnLocalVariable"}) public void testCloneFlag() throws Exception { GridCacheProjection<String, TestCloneable> prj = cacheCloneable().flagsOn(CLONE); final TestCloneable val = new TestCloneable("val"); prj.put("key", val); checkClone(prj.get("key"), val); checkClone(prj.getAsync("key").get(), val); Map<String, TestCloneable> map = prj.getAll(F.asList("key")); assertEquals(1, map.size()); checkClone(map.get("key"), val); map = prj.getAllAsync(F.asList("key")).get(); assertEquals(1, map.size()); checkClone(map.get("key"), val); checkClone(prj.peek("key"), val); Collection<TestCloneable> vals = prj.values(); assert vals != null; assertEquals(1, vals.size()); checkClone(vals.iterator().next(), val); Set<GridCacheEntry<String, TestCloneable>> entries = prj.entrySet(); assertEquals(1, entries.size()); checkClone(entries.iterator().next().getValue(), val); GridCacheEntry<String, TestCloneable> entry = prj.entry("key"); assert entry != null; checkClone(entry.peek(), val); } /** * @throws Exception In case of error. */ public void testEntryParent() throws Exception { cache().put("key", 1); GridCacheProxyImpl<String, Integer> prj = (GridCacheProxyImpl<String, Integer>)cache(). flagsOn(CLONE, INVALIDATE); GridCacheEntry<String, Integer> entry = prj.entry("key"); assert entry != null; GridCacheProxyImpl<String, Integer> entryPrj = (GridCacheProxyImpl<String, Integer>)entry.projection(); assert entryPrj.delegate() == prj.delegate(); } /** * @throws Exception if failed. */ public void testSkipStoreFlag() throws Exception { assertNull(cache().put("kk1", 100500)); assertEquals(100500, map.get("kk1")); GridCacheProjection<String, Integer> c = cache().flagsOn(GridCacheFlag.SKIP_STORE); assertNull(c.put("noStore", 123)); assertEquals(123, (Object) c.get("noStore")); assertNull(map.get("noStore")); assertTrue(c.remove("kk1", 100500)); assertEquals(100500, map.get("kk1")); assertNull(c.get("kk1")); assertEquals(100500, (Object) cache().get("kk1")); } /** * @throws Exception if failed. */ // TODO: enable when GG-7579 is fixed. public void _testSkipStoreFlagMultinode() throws Exception { final int nGrids = 3; // Start additional grids. for (int i = 1; i < nGrids; i++) startGrid(i); try { testSkipStoreFlag(); } finally { for (int i = 1; i < nGrids; i++) stopGrid(i); } } /** * @throws Exception In case of error. */ public void testSkipSwapFlag() throws Exception { cache().put("key", 1); cache().evict("key"); assert cache().peek("key") == null; Integer one = 1; assertEquals(one, cache().get("key")); cache().evict("key"); assertEquals(one, cache().reload("key")); cache().remove("key"); assertFalse(cache().containsKey("key")); assertNull(cache().get("key")); GridCacheProjection<String, Integer> prj = cache().flagsOn(SKIP_SWAP, SKIP_STORE); prj.put("key", 1); assertEquals(one, prj.get("key")); assertEquals(one, prj.peek("key")); assert prj.evict("key"); assert prj.peek("key") == null; assert prj.get("key") == null; } /** * Checks that previous entry in update operations is taken * from swap after eviction, even if SKIP_SWAP is enabled. * * @throws Exception If error happens. */ public void testSkipSwapFlag2() throws Exception { cache().put("key", 1); cache().evict("key"); GridCacheProjection<String, Integer> prj = cache().flagsOn(SKIP_SWAP, SKIP_STORE); assertNull(prj.get("key")); Integer old = prj.put("key", 2); assertEquals(Integer.valueOf(1), old); // Update operations on cache should not take into account SKIP_SWAP flag. prj.remove("key"); } /** * Tests {@link GridCacheFlag#SKIP_SWAP} flag on multiple nodes. * * @throws Exception If error occurs. */ public void testSkipSwapFlagMultinode() throws Exception { final int nGrids = 3; // Start additional grids. for (int i = 1; i < nGrids; i++) startGrid(i); try { final int nEntries = 100; // Put the values in cache. for (int i = 1; i <= nEntries; i++) grid(0).cache(null).put(i, i); // Evict values from cache. Values should go to swap. for (int i = 0; i < nGrids; i++) { grid(i).cache(null).evictAll(); assertTrue("Grid #" + i + " has empty swap.", grid(i).cache(null).swapIterator().hasNext()); } // Set SKIP_SWAP flag. GridCacheProjection<Object, Object> cachePrj = grid(0).cache(null).flagsOn(SKIP_SWAP, SKIP_STORE); // Put new values. for (int i = 1; i <= nEntries; i++) assertEquals(i, cachePrj.put(i, i + 1)); // We should get previous values from swap, disregarding SKIP_SWAP. // Swap should be empty now. for (int i = 0; i < nGrids; i++) assertFalse("Grid #" + i + " has non-empty swap.", grid(i).cache(null).swapIterator().hasNext()); } finally { // Stop started grids. for (int i = 1; i < nGrids; i++) stopGrid(i); } } /** * @throws Exception In case of error. */ public void testTx() throws Exception { if (atomicityMode() == ATOMIC) return; GridCacheTx tx = cache().txStart(); GridCacheProjection<String, Integer> typePrj = cache().projection(String.class, Integer.class); typePrj.put("key", 1); typePrj.put("k", 2); GridCacheProjection<String, Integer> kvFilterPrj = cache().projection(kvFilter); Integer one = 1; assertEquals(one, kvFilterPrj.get("key")); assert kvFilterPrj.get("k") == null; GridCacheProjection<String, Integer> entryFilterPrj = cache().projection(entryFilter); assertEquals(one, entryFilterPrj.get("key")); assert entryFilterPrj.get("k") == null; // Now will check projection on projection. kvFilterPrj = typePrj.projection(kvFilter); assertEquals(one, kvFilterPrj.get("key")); assert kvFilterPrj.get("k") == null; entryFilterPrj = typePrj.projection(entryFilter); assertEquals(one, entryFilterPrj.get("key")); assert entryFilterPrj.get("k") == null; typePrj = cache().projection(entryFilter).projection(String.class, Integer.class); assertEquals(one, typePrj.get("key")); assertNull(typePrj.get("k")); tx.commit(); GridCacheConfiguration cfg = grid(0).configuration().getCacheConfiguration()[0]; tx = cache().txStart( cfg.getDefaultTxConcurrency(), cfg.getDefaultTxIsolation(), cfg.getDefaultTxTimeout(), 0 ); // Try to change tx property. assertFlagException(new CA() { @Override public void apply() { cache().flagsOn(INVALIDATE); } }); assertFlagException(new CA() { @Override public void apply() { cache().projection(entryFilter).flagsOn(INVALIDATE); } }); tx.commit(); } /** * @throws GridException In case of error. */ public void testTypedProjection() throws Exception { GridCache<Object, Object> cache = grid(0).cache(null); cache.putx("1", "test string"); cache.putx("2", 0); final GridCacheProjection<String, String> prj = cache.projection(String.class, String.class); final CountDownLatch latch = new CountDownLatch(1); prj.removeAll(new P1<GridCacheEntry<String, String>>() { @Override public boolean apply(GridCacheEntry<String, String> e) { info(" --> " + e.peek().getClass()); latch.countDown(); return true; } }); assertTrue(latch.await(1, SECONDS)); } }
davidlares/davidC-
poo/virtuals.cpp
<gh_stars>0 #include <iostream> #include <string> using namespace std; // base class class Shape { public: void setValues(int a, int b){ width = a; height = b; } // virtual member function -> member function that can be redefined in his derived classes only virtual int area() { return 0; } // pure virtual function (no needs declaration) virtual int perimeter()=0; protected: int height; int width; }; class Rectangle : public Shape { public: int area(){ // redifined (according to the C++ compiler) return (height * width); } int perimeter(){ // redifined (according to the C++ compiler) return 10; } }; class Triangle : public Shape { public: int area(){ // redifined (according to the C++ compiler) return (height * width/2); } int perimeter(){ // redifined (according to the C++ compiler) return 10; } }; int main(){ Rectangle rec; Triangle t; Shape *sh = &rec; Shape *sh2 = &t; // another way sh->setValues(10,20); sh2->setValues(10,20); cout << rec.area() << endl; cout << t.area() << endl; return 0; } // IF ALL THE FUNCTIONS ARE PURE VIRTUAL MEMBERS FUNCTION = CLASS becomes ABSTRACT (prototypes)
DeadZoneLuna/csso-src
src/engine/audio/private/snd_dev_sdl.cpp
//========= Copyright Valve Corporation, All rights reserved. ============// // // Purpose: // //===========================================================================// #include "audio_pch.h" #if !DEDICATED #include "tier0/dynfunction.h" #include "video//ivideoservices.h" #include "../../sys_dll.h" // prevent some conflicts in SDL headers... #undef M_PI #include <stdint.h> #ifndef _STDINT_H_ #define _STDINT_H_ 1 #endif #include "SDL.h" // memdbgon must be the last include file in a .cpp file!!! #include "tier0/memdbgon.h" extern bool snd_firsttime; extern bool MIX_ScaleChannelVolume( paintbuffer_t *ppaint, channel_t *pChannel, int volume[CCHANVOLUMES], int mixchans ); extern void S_SpatializeChannel( /*int nSlot,*/ int volume[6], int master_vol, const Vector *psourceDir, float gain, float mono ); // 64K is about 1/3 second at 16-bit, stereo, 44100 Hz // 44k: UNDONE - need to double buffers now that we're playing back at 44100? #define WAV_BUFFERS 64 #define WAV_MASK (WAV_BUFFERS - 1) #define WAV_BUFFER_SIZE 0x0400 #if 0 #define debugsdl printf #else static inline void debugsdl(const char *fmt, ...) {} #endif //----------------------------------------------------------------------------- // // NOTE: This only allows 16-bit, stereo wave out (!!! FIXME: but SDL supports 7.1, etc, too!) // //----------------------------------------------------------------------------- class CAudioDeviceSDLAudio : public CAudioDeviceBase { public: CAudioDeviceSDLAudio(); virtual ~CAudioDeviceSDLAudio(); bool IsActive( void ); bool Init( void ); void Shutdown( void ); void PaintEnd( void ); int GetOutputPosition( void ); void ChannelReset( int entnum, int channelIndex, float distanceMod ); void Pause( void ); void UnPause( void ); float MixDryVolume( void ); bool Should3DMix( void ); void StopAllSounds( void ); int PaintBegin( float mixAheadTime, int soundtime, int paintedtime ); void ClearBuffer( void ); void MixBegin( int sampleCount ); void MixUpsample( int sampleCount, int filtertype ); void Mix8Mono( channel_t *pChannel, char *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ); void Mix8Stereo( channel_t *pChannel, char *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ); void Mix16Mono( channel_t *pChannel, short *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ); void Mix16Stereo( channel_t *pChannel, short *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ); void TransferSamples( int end ); void SpatializeChannel( int nSlot, int volume[CCHANVOLUMES/2], int master_vol, const Vector& sourceDir, float gain, float mono); void ApplyDSPEffects( int idsp, portable_samplepair_t *pbuffront, portable_samplepair_t *pbufrear, portable_samplepair_t *pbufcenter, int samplecount ); const char *DeviceName( void ) { return "SDL"; } int DeviceChannels( void ) { return 2; } int DeviceSampleBits( void ) { return 16; } int DeviceSampleBytes( void ) { return 2; } int DeviceDmaSpeed( void ) { return SOUND_DMA_SPEED; } int DeviceSampleCount( void ) { return m_deviceSampleCount; } private: SDL_AudioDeviceID m_devId; static void SDLCALL AudioCallbackEntry(void *userdata, Uint8 * stream, int len); void AudioCallback(Uint8 *stream, int len); void OpenWaveOut( void ); void CloseWaveOut( void ); void AllocateOutputBuffers(); void FreeOutputBuffers(); bool ValidWaveOut( void ) const; int m_deviceSampleCount; int m_buffersSent; int m_pauseCount; int m_readPos; int m_partialWrite; // Memory for the wave data uint8_t *m_pBuffer; }; static CAudioDeviceSDLAudio *g_wave = NULL; //----------------------------------------------------------------------------- // Constructor (just lookup SDL entry points, real work happens in this->Init()) //----------------------------------------------------------------------------- CAudioDeviceSDLAudio::CAudioDeviceSDLAudio() { m_devId = 0; } //----------------------------------------------------------------------------- // Destructor. Make sure our global pointer gets set to NULL. //----------------------------------------------------------------------------- CAudioDeviceSDLAudio::~CAudioDeviceSDLAudio() { g_wave = NULL; } //----------------------------------------------------------------------------- // Class factory //----------------------------------------------------------------------------- IAudioDevice *Audio_CreateSDLAudioDevice( void ) { if ( !g_wave ) { g_wave = new CAudioDeviceSDLAudio; Assert( g_wave ); } if ( g_wave && !g_wave->Init() ) { delete g_wave; g_wave = NULL; } return g_wave; } //----------------------------------------------------------------------------- // Init, shutdown //----------------------------------------------------------------------------- bool CAudioDeviceSDLAudio::Init( void ) { // If we've already got a device open, then return. This allows folks to call // Audio_CreateSDLAudioDevice() multiple times. CloseWaveOut() will free the // device, and set m_devId to 0. if( m_devId ) return true; m_bSurround = false; m_bSurroundCenter = false; m_bHeadphone = false; m_buffersSent = 0; m_pauseCount = 0; m_pBuffer = NULL; m_readPos = 0; m_partialWrite = 0; m_devId = 0; OpenWaveOut(); if ( snd_firsttime ) { DevMsg( "Wave sound initialized\n" ); } return ValidWaveOut(); } void CAudioDeviceSDLAudio::Shutdown( void ) { CloseWaveOut(); } //----------------------------------------------------------------------------- // WAV out device //----------------------------------------------------------------------------- inline bool CAudioDeviceSDLAudio::ValidWaveOut( void ) const { return m_devId != 0; } //----------------------------------------------------------------------------- // Opens the windows wave out device //----------------------------------------------------------------------------- void CAudioDeviceSDLAudio::OpenWaveOut( void ) { debugsdl("SDLAUDIO: OpenWaveOut...\n"); #ifndef WIN32 char appname[ 256 ]; KeyValues *modinfo = new KeyValues( "ModInfo" ); if ( modinfo->LoadFromFile( g_pFileSystem, "gameinfo.txt" ) ) Q_strncpy( appname, modinfo->GetString( "game" ), sizeof( appname ) ); else Q_strncpy( appname, "Source1 Game", sizeof( appname ) ); modinfo->deleteThis(); modinfo = NULL; // Set these environment variables, in case we're using PulseAudio. setenv("PULSE_PROP_application.name", appname, 1); setenv("PULSE_PROP_media.role", "game", 1); #endif // !!! FIXME: specify channel map, etc // !!! FIXME: set properties (role, icon, etc). //#define SDLAUDIO_FAIL(fnstr) do { DevWarning(fnstr " failed"); CloseWaveOut(); return; } while (false) //#define SDLAUDIO_FAIL(fnstr) do { printf("SDLAUDIO: " fnstr " failed: %s\n", SDL_GetError ? SDL_GetError() : "???"); CloseWaveOut(); return; } while (false) #define SDLAUDIO_FAIL(fnstr) do { const char *err = SDL_GetError(); printf("SDLAUDIO: " fnstr " failed: %s\n", err ? err : "???"); CloseWaveOut(); return; } while (false) if (!SDL_WasInit(SDL_INIT_AUDIO)) { if (SDL_InitSubSystem(SDL_INIT_AUDIO)) SDLAUDIO_FAIL("SDL_InitSubSystem(SDL_INIT_AUDIO)"); } debugsdl("SDLAUDIO: Using SDL audio target '%s'\n", SDL_GetCurrentAudioDriver()); // Open an audio device... // !!! FIXME: let user specify a device? // !!! FIXME: we can handle quad, 5.1, 7.1, etc here. SDL_AudioSpec desired, obtained; memset(&desired, '\0', sizeof (desired)); desired.freq = SOUND_DMA_SPEED; desired.format = AUDIO_S16SYS; desired.channels = 2; desired.samples = 2048; desired.callback = &CAudioDeviceSDLAudio::AudioCallbackEntry; desired.userdata = this; m_devId = SDL_OpenAudioDevice(NULL, 0, &desired, &obtained, SDL_AUDIO_ALLOW_ANY_CHANGE); if (!m_devId) SDLAUDIO_FAIL("SDL_OpenAudioDevice()"); #undef SDLAUDIO_FAIL // We're now ready to feed audio data to SDL! AllocateOutputBuffers(); SDL_PauseAudioDevice(m_devId, 0); #if defined( BINK_VIDEO ) && defined( LINUX ) // Tells Bink to use SDL for its audio decoding if ( g_pVideo != NULL) { g_pVideo->SoundDeviceCommand( VideoSoundDeviceOperation::SET_SDL_PARAMS, NULL, (void *)&obtained ); } #endif } //----------------------------------------------------------------------------- // Closes the windows wave out device //----------------------------------------------------------------------------- void CAudioDeviceSDLAudio::CloseWaveOut( void ) { // none of these SDL_* functions are available to call if this is false. if (m_devId) { SDL_CloseAudioDevice(m_devId); m_devId = 0; } SDL_QuitSubSystem(SDL_INIT_AUDIO); FreeOutputBuffers(); } //----------------------------------------------------------------------------- // Allocate output buffers //----------------------------------------------------------------------------- void CAudioDeviceSDLAudio::AllocateOutputBuffers() { // Allocate and lock memory for the waveform data. const int nBufferSize = WAV_BUFFER_SIZE * WAV_BUFFERS; m_pBuffer = new uint8_t[nBufferSize]; memset(m_pBuffer, '\0', nBufferSize); m_readPos = 0; m_partialWrite = 0; m_deviceSampleCount = nBufferSize / DeviceSampleBytes(); } //----------------------------------------------------------------------------- // Free output buffers //----------------------------------------------------------------------------- void CAudioDeviceSDLAudio::FreeOutputBuffers() { delete[] m_pBuffer; m_pBuffer = NULL; } //----------------------------------------------------------------------------- // Mixing setup //----------------------------------------------------------------------------- int CAudioDeviceSDLAudio::PaintBegin( float mixAheadTime, int soundtime, int paintedtime ) { // soundtime - total samples that have been played out to hardware at dmaspeed // paintedtime - total samples that have been mixed at speed // endtime - target for samples in mixahead buffer at speed unsigned int endtime = soundtime + mixAheadTime * DeviceDmaSpeed(); int samps = DeviceSampleCount() >> (DeviceChannels()-1); if ((int)(endtime - soundtime) > samps) endtime = soundtime + samps; if ((endtime - paintedtime) & 0x3) { // The difference between endtime and painted time should align on // boundaries of 4 samples. This is important when upsampling from 11khz -> 44khz. endtime -= (endtime - paintedtime) & 0x3; } return endtime; } void CAudioDeviceSDLAudio::AudioCallbackEntry(void *userdata, Uint8 *stream, int len) { ((CAudioDeviceSDLAudio *) userdata)->AudioCallback(stream, len); } void CAudioDeviceSDLAudio::AudioCallback(Uint8 *stream, int len) { if (!m_devId) { debugsdl("SDLAUDIO: uhoh, no audio device!\n"); return; // can this even happen? } const int totalWriteable = len; #if defined( BINK_VIDEO ) && defined( LINUX ) Uint8 *stream_orig = stream; #endif debugsdl("SDLAUDIO: writable size is %d.\n", totalWriteable); Assert(len <= (WAV_BUFFERS * WAV_BUFFER_SIZE)); while (len > 0) { // spaceAvailable == bytes before we overrun the end of the ring buffer. const int spaceAvailable = ((WAV_BUFFERS * WAV_BUFFER_SIZE) - m_readPos); const int writeLen = (len < spaceAvailable) ? len : spaceAvailable; if (writeLen > 0) { const uint8_t *buf = m_pBuffer + m_readPos; debugsdl("SDLAUDIO: Writing %d bytes...\n", writeLen); #if 0 static FILE *io = NULL; if (io == NULL) io = fopen("dumpplayback.raw", "wb"); if (io != NULL) { fwrite(buf, writeLen, 1, io); fflush(io); } #endif memcpy(stream, buf, writeLen); stream += writeLen; len -= writeLen; Assert(len >= 0); } m_readPos = len ? 0 : (m_readPos + writeLen); // if still bytes to write to stream, we're rolling around the ring buffer. } #if defined( BINK_VIDEO ) && defined( LINUX ) // Mix in Bink movie audio if that stuff is playing. if ( g_pVideo != NULL) { g_pVideo->SoundDeviceCommand( VideoSoundDeviceOperation::SDLMIXER_CALLBACK, (void *)stream_orig, (void *)&totalWriteable ); } #endif // Translate between bytes written and buffers written. m_partialWrite += totalWriteable; m_buffersSent += m_partialWrite / WAV_BUFFER_SIZE; m_partialWrite %= WAV_BUFFER_SIZE; } //----------------------------------------------------------------------------- // Actually performs the mixing //----------------------------------------------------------------------------- void CAudioDeviceSDLAudio::PaintEnd( void ) { debugsdl("SDLAUDIO: PaintEnd...\n"); #if 0 // !!! FIXME: this is the 1.3 headers, but not implemented yet in SDL. if (SDL_AudioDeviceConnected(m_devId) != 1) { debugsdl("SDLAUDIO: Audio device was disconnected!\n"); Shutdown(); } #endif } int CAudioDeviceSDLAudio::GetOutputPosition( void ) { return (m_readPos >> SAMPLE_16BIT_SHIFT)/DeviceChannels(); } //----------------------------------------------------------------------------- // Pausing //----------------------------------------------------------------------------- void CAudioDeviceSDLAudio::Pause( void ) { m_pauseCount++; if (m_pauseCount == 1) { debugsdl("SDLAUDIO: PAUSE\n"); SDL_PauseAudioDevice(m_devId, 1); } } void CAudioDeviceSDLAudio::UnPause( void ) { if ( m_pauseCount > 0 ) { m_pauseCount--; if (m_pauseCount == 0) { debugsdl("SDLAUDIO: UNPAUSE\n"); SDL_PauseAudioDevice(m_devId, 0); } } } bool CAudioDeviceSDLAudio::IsActive( void ) { return ( m_pauseCount == 0 ); } float CAudioDeviceSDLAudio::MixDryVolume( void ) { return 0; } bool CAudioDeviceSDLAudio::Should3DMix( void ) { return false; } void CAudioDeviceSDLAudio::ClearBuffer( void ) { int clear; if ( !m_pBuffer ) return; clear = 0; Q_memset(m_pBuffer, clear, DeviceSampleCount() * DeviceSampleBytes() ); } void CAudioDeviceSDLAudio::MixBegin( int sampleCount ) { MIX_ClearAllPaintBuffers( sampleCount, false ); } void CAudioDeviceSDLAudio::MixUpsample( int sampleCount, int filtertype ) { paintbuffer_t *ppaint = MIX_GetCurrentPaintbufferPtr(); int ifilter = ppaint->ifilter; Assert (ifilter < CPAINTFILTERS); S_MixBufferUpsample2x( sampleCount, ppaint->pbuf, &(ppaint->fltmem[ifilter][0]), CPAINTFILTERMEM, filtertype ); ppaint->ifilter++; } void CAudioDeviceSDLAudio::Mix8Mono( channel_t *pChannel, char *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ) { int volume[CCHANVOLUMES]; paintbuffer_t *ppaint = MIX_GetCurrentPaintbufferPtr(); if (!MIX_ScaleChannelVolume( ppaint, pChannel, volume, 1)) return; Mix8MonoWavtype( pChannel, ppaint->pbuf + outputOffset, volume, (byte *)pData, inputOffset, rateScaleFix, outCount ); } void CAudioDeviceSDLAudio::Mix8Stereo( channel_t *pChannel, char *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ) { int volume[CCHANVOLUMES]; paintbuffer_t *ppaint = MIX_GetCurrentPaintbufferPtr(); if (!MIX_ScaleChannelVolume( ppaint, pChannel, volume, 2 )) return; Mix8StereoWavtype( pChannel, ppaint->pbuf + outputOffset, volume, (byte *)pData, inputOffset, rateScaleFix, outCount ); } void CAudioDeviceSDLAudio::Mix16Mono( channel_t *pChannel, short *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ) { int volume[CCHANVOLUMES]; paintbuffer_t *ppaint = MIX_GetCurrentPaintbufferPtr(); if (!MIX_ScaleChannelVolume( ppaint, pChannel, volume, 1 )) return; Mix16MonoWavtype( pChannel, ppaint->pbuf + outputOffset, volume, pData, inputOffset, rateScaleFix, outCount ); } void CAudioDeviceSDLAudio::Mix16Stereo( channel_t *pChannel, short *pData, int outputOffset, int inputOffset, fixedint rateScaleFix, int outCount, int timecompress ) { int volume[CCHANVOLUMES]; paintbuffer_t *ppaint = MIX_GetCurrentPaintbufferPtr(); if (!MIX_ScaleChannelVolume( ppaint, pChannel, volume, 2 )) return; Mix16StereoWavtype( pChannel, ppaint->pbuf + outputOffset, volume, pData, inputOffset, rateScaleFix, outCount ); } void CAudioDeviceSDLAudio::ChannelReset( int entnum, int channelIndex, float distanceMod ) { } void CAudioDeviceSDLAudio::TransferSamples( int end ) { int lpaintedtime = g_paintedtime; int endtime = end; // resumes playback... if ( m_pBuffer ) { S_TransferStereo16( m_pBuffer, PAINTBUFFER, lpaintedtime, endtime ); } } void CAudioDeviceSDLAudio::SpatializeChannel( int nSlot, int volume[CCHANVOLUMES/2], int master_vol, const Vector& sourceDir, float gain, float mono ) { VPROF("CAudioDeviceSDLAudio::SpatializeChannel"); S_SpatializeChannel( /*nSlot,*/ volume, master_vol, &sourceDir, gain, mono ); } void CAudioDeviceSDLAudio::StopAllSounds( void ) { } void CAudioDeviceSDLAudio::ApplyDSPEffects( int idsp, portable_samplepair_t *pbuffront, portable_samplepair_t *pbufrear, portable_samplepair_t *pbufcenter, int samplecount ) { //SX_RoomFX( endtime, filter, timefx ); DSP_Process( idsp, pbuffront, pbufrear, pbufcenter, samplecount ); } #endif // !DEDICATED
timoneluo/SmsServer
potato-service/src/main/java/io/potato/ts/domain/UserGroupLink.java
<reponame>timoneluo/SmsServer package io.potato.ts.domain; import java.io.Serializable; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; import io.potato.core.BaseEntity; import lombok.Data; import lombok.EqualsAndHashCode; /** * 用户-用户组关联表 * NOTICE: this file is generated by code, DO NOT modify it! * @author timl * * <p>2019-03-19 14:14:57</p> */ @Entity @Table(name="t_user_group_link") @Data @EqualsAndHashCode(callSuper=false) public class UserGroupLink extends BaseEntity implements Serializable{ private static final long serialVersionUID = 1552976097546L; /** * */ @Id @GeneratedValue(strategy=GenerationType.IDENTITY) private Integer id; /** * */ @Column(name="group_id") private Integer groupId; /** * */ @Column(name="user_id") private Integer userId; }
nanovc/nanovc-java
api/src/main/java/io/nanovc/content/StringContentBase.java
/* MIT License https://opensource.org/licenses/MIT Copyright 2020 <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package io.nanovc.content; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Objects; /** * A base class for Content that wraps a {@link String}. * The encoding is determined by the sub-class. * If you want to specify your own encoding then use {@link EncodedStringContent} instead. */ public abstract class StringContentBase extends ContentWithByteArrayBase implements ContentWithByteArrayAPI, ImmutableContent { /** * The string content that is being represented. */ public final String value; /** * Creates a content wrapper around the given string and uses the given character set to encode it into bytes. * @param value The string value to wrap as content. */ protected StringContentBase(String value) { this.value = value; } /** * Creates a content wrapper around the given string bytes and uses the given character set to encode it to a string. * @param bytes The bytes for the string content. It is assumed that the string is encoded with the given charset. * @param charset The character set to use to encode the string as bytes. See {@link StandardCharsets} for common character sets. */ protected StringContentBase(byte[] bytes, Charset charset) { Objects.requireNonNull(bytes); Objects.requireNonNull(charset); this.value = new String(bytes, charset); } /** * Gets the character set to use to encode the string content. * @return The character set used to encode the string content. */ public abstract Charset getCharset(); /** * Gets the string content that is being represented. * @return The string content that is being represented. */ public String getValue() { return value; } /** * Gets the content as a byte array. * It is assumed that getting this data structure is efficient for this content. * * @return The content as a byte array. If the content is null then you get an empty array. */ @Override public byte[] getEfficientByteArray() { if (this.value == null) return new byte[0]; else return this.value.getBytes(getCharset()); } @Override public String toString() { return '\'' + value + '\''; } }
woorimlee/cpp_CTCI_6E_APSS
BOJ_백준/10816 숫자 카드2 (이진 탐색).cpp
<filename>BOJ_백준/10816 숫자 카드2 (이진 탐색).cpp #include <iostream> #include <algorithm> #include <vector> using namespace std; int main() { ios_base::sync_with_stdio(false); cin.tie(NULL); int N, M; cin >> N; vector <int> sg(N, 0); for (auto& it : sg) { cin >> it; } sort(sg.begin(), sg.end()); cin >> M; int c; for (int i = 0; i < M; i++) { cin >> c; cout << upper_bound(sg.begin(), sg.end(), c) - lower_bound(sg.begin(), sg.end(), c) << ' '; } return 0; }
shaqk/dfuse-eosio
eosws/completion/completion.go
<reponame>shaqk/dfuse-eosio<filename>eosws/completion/completion.go // Copyright 2020 dfuse Platform Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package completion import ( "context" "fmt" "regexp" "runtime" "sort" "strings" "sync" "github.com/dfuse-io/dfuse-eosio/eosws" "github.com/dfuse-io/dfuse-eosio/eosws/mdl" "github.com/arpitbbhayani/tripod" "go.uber.org/zap" ) const maxAccountNameLength = 13 func init() { initSQEIndexedFieldTypeByName() } type Completion interface { Complete(prefix string, limit int) ([]*mdl.SuggestionSection, error) AddAccount(account string) searchAccountNamesByPrefix(prefix string, limit int) []string } func New(ctx context.Context, db eosws.DB) (Completion, error) { zlog.Debug("fetching initial account names") accountNames, err := db.ListAccountNames(ctx, uint32(runtime.NumCPU()*2)) if err != nil { return nil, fmt.Errorf("new completion: %s", err) } return newFromData(accountNames), nil } func newFromData(accountNames []string) Completion { completion := &defaultCompletion{ writeMutex: &sync.Mutex{}, } completion.initAccountNames(accountNames) completion.initQueryLanguageFields() zlog.Debug("finished initializing completion") return completion } type defaultCompletion struct { accountNamesTrie *tripod.PrefixStoreByteTrie sqeIndexedFieldsTrie *tripod.PrefixStoreByteTrie writeMutex *sync.Mutex } // FIXME change "metrics" logging statements to proper trace's span instead so we can collect metrics func (completion *defaultCompletion) initAccountNames(accountNames []string) { accountNameCount := len(accountNames) zlog.Info("initializing completion account names", zap.Int("count", accountNameCount)) // Needed as the underlying trie returns results in insertion order. By sorting the // input array, insertion order will the sorted order of the input array zlog.Debug("sorting initial account names") sort.Strings(accountNames) zlog.Debug("adding all account names to completion trie") trie := tripod.CreatePrefixStoreByteTrie(maxAccountNameLength) for _, accountName := range accountNames { trie.Put([]byte(accountName)) } completion.accountNamesTrie = trie } func (completion *defaultCompletion) initQueryLanguageFields() { zlog.Info("initializing sqe language fields", zap.Int("count", len(sqeIndexedFields))) maxFieldLength := 0 for _, indexedField := range sqeIndexedFields { if len(indexedField.name) > maxFieldLength { maxFieldLength = len(indexedField.name) } } zlog.Debug("adding all sqe indexed fields names to completion trie") trie := tripod.CreatePrefixStoreByteTrie(maxFieldLength) for _, indexedField := range sqeIndexedFields { trie.Put([]byte(indexedField.name)) } completion.sqeIndexedFieldsTrie = trie } func (completion *defaultCompletion) AddAccount(account string) { completion.writeMutex.Lock() defer completion.writeMutex.Unlock() zlog.Debug("adding new account to completion", zap.String("name", account)) completion.accountNamesTrie.Put([]byte(account)) } func (completion *defaultCompletion) Complete(prefix string, limit int) ([]*mdl.SuggestionSection, error) { var suggestionSections []*mdl.SuggestionSection accountSuggestions := completion.completeAccountNames(prefix, limit) if len(accountSuggestions) > 0 { suggestionSections = append(suggestionSections, &mdl.SuggestionSection{ ID: "accounts", Suggestions: accountSuggestions, }) } sqeSuggestions := completion.completeSQE(prefix, limit) if len(sqeSuggestions) > 0 { suggestionSections = append(suggestionSections, &mdl.SuggestionSection{ ID: "query", Suggestions: sqeSuggestions, }) } return suggestionSections, nil } func (completion *defaultCompletion) completeAccountNames(prefix string, limit int) []*mdl.Suggestion { if !isAccountLike(prefix) { return nil } accountNames := completion.searchAccountNamesByPrefix(prefix, limit) accountNameCount := len(accountNames) if accountNameCount <= 0 { return nil } accountNameSuggestions := make([]*mdl.Suggestion, accountNameCount) for i, accountName := range accountNames { accountNameSuggestions[i] = &mdl.Suggestion{Key: string(accountName), Label: string(accountName)} } return accountNameSuggestions } func (completion *defaultCompletion) completeSQE(prefix string, limit int) []*mdl.Suggestion { if isAccountLike(prefix) { return suggestSQEDefaultSuggestions(prefix) } usedFieldNames := extractSQEFieldNames(prefix) if strings.HasSuffix(prefix, "(") { return suggestNextSQEField(prefix, limit, usedFieldNames) } if strings.HasSuffix(prefix, ")") || strings.HasSuffix(strings.ToLower(prefix), " or") { return suggestNextSQEField(prefix+" ", limit, usedFieldNames) } // Not account like and not previous fields present, try to complete field if possible if len(usedFieldNames) <= 0 { return completion.completeSQEFieldName(prefix, prefix, limit) } // We had at least one field (complete or not) present if prefix == "" || strings.HasSuffix(prefix, " ") { return suggestNextSQEField(prefix, limit, usedFieldNames) } currentField := findClosestUncompletedSQEField(prefix) if currentField == "" { return suggestSQEPrefixOnly(prefix) } if shouldCompleteSQEFieldValue(currentField) { return completion.completeSQEFieldValue(currentField, prefix, limit) } // If we are not completing the field value, we are then completing the name suggestions := completion.completeSQEFieldName(currentField, prefix, limit) if len(suggestions) > 0 { return suggestions } // If we were not able to complete the field name and no previous // completed field were present, user is probably screwing it up, propose // a bunch of standard proposals in this case if len(usedFieldNames) == 0 { return suggestNextSQEField("", limit, emptyFields) } return suggestSQEPrefixOnly(prefix) } func (completion *defaultCompletion) completeSQEFieldName(field string, prefix string, limit int) []*mdl.Suggestion { fields := completion.searchSQEFieldsByPrefix(field, limit) fieldCount := len(fields) if fieldCount <= 0 { return nil } prefixWithoutField := strings.TrimSuffix(prefix, field) // A single match on the field name should automatically auto-complete the value if fieldCount == 1 { return completion.completeSQEFieldValue(fields[0]+":", prefixWithoutField+fields[0]+":", limit) } fieldSuggestions := make([]*mdl.Suggestion, fieldCount) for i, field := range fields { label := prefixWithoutField + field + ":" fieldSuggestions[i] = &mdl.Suggestion{Key: label, Label: label} } return fieldSuggestions } func (completion *defaultCompletion) completeSQEFieldValue(field string, prefix string, limit int) []*mdl.Suggestion { colonIndex := strings.Index(field, ":") fieldName := field[0:colonIndex] value := field[colonIndex+1:] valueType, ok := sqeIndexedFieldTypeByName[fieldName] if !ok { return suggestSQEPrefixOnly(prefix) } switch valueType { case accountType: accountNames := completion.searchAccountNamesByPrefix(value, limit) accountNameCount := len(accountNames) if accountNameCount <= 0 { return suggestSQEPrefixOnly(prefix) } prefixWithoutField := strings.TrimSuffix(prefix, field) suggestions := make([]*mdl.Suggestion, accountNameCount) for i, accountName := range accountNames { label := prefixWithoutField + fieldName + ":" + string(accountName) suggestions[i] = &mdl.Suggestion{Key: label, Label: label} } return suggestions case booleanType: var booleanValues []string if value == "" { booleanValues = []string{"true", "false"} } else if strings.HasPrefix("true", value) { booleanValues = []string{"true"} } else if strings.HasPrefix("false", value) { booleanValues = []string{"false"} } else { booleanValues = []string{"true", "false"} } prefixWithoutField := strings.TrimSuffix(prefix, field) suggestions := make([]*mdl.Suggestion, len(booleanValues)) for i, booleanValue := range booleanValues { label := prefixWithoutField + fieldName + ":" + booleanValue suggestions[i] = &mdl.Suggestion{Key: label, Label: label} } return suggestions default: return suggestSQEPrefixOnly(prefix) } } func suggestNextSQEField(prefix string, limit int, alreadyUsedFields []string) []*mdl.Suggestion { var suggestions []*mdl.Suggestion for _, sqeIndexedField := range sqeIndexedFields { if len(suggestions) >= limit { break } if contains(alreadyUsedFields, sqeIndexedField.name) { continue } label := prefix + sqeIndexedField.name + ":" suggestions = append(suggestions, &mdl.Suggestion{Key: label, Label: label}) } return suggestions } func suggestSQEDefaultSuggestions(prefix string) []*mdl.Suggestion { accountHistory := fmt.Sprintf("(auth:%s OR receiver:%s)", prefix, prefix) signedBy := fmt.Sprintf("auth:%s", prefix) eosTokenTransfer := fmt.Sprintf("receiver:eosio.token account:eosio.token action:transfer (data.from:%s OR data.to:%s)", prefix, prefix) fuzzyTokenSearch := fmt.Sprintf("data.to:%s", prefix) return []*mdl.Suggestion{ &mdl.Suggestion{Key: accountHistory, Label: accountHistory, Summary: "account_history"}, &mdl.Suggestion{Key: signedBy, Label: signedBy, Summary: "signed_by"}, &mdl.Suggestion{Key: eosTokenTransfer, Label: eosTokenTransfer, Summary: "eos_token_transfer"}, &mdl.Suggestion{Key: fuzzyTokenSearch, Label: fuzzyTokenSearch, Summary: "fuzzy_token_search"}, } } func suggestSQEPrefixOnly(prefix string) []*mdl.Suggestion { return []*mdl.Suggestion{&mdl.Suggestion{Key: prefix, Label: prefix}} } func (completion *defaultCompletion) searchAccountNamesByPrefix(prefix string, limit int) []string { results := completion.accountNamesTrie.PrefixSearch([]byte(prefix)) length := limit if results.Len() < limit { length = results.Len() } index := 0 matchingAccountNames := make([]string, length) for e := results.Front(); e != nil; e = e.Next() { matchingAccountNames[index] = string(e.Value.([]byte)) index++ if index >= limit { break } } return matchingAccountNames } func (completion *defaultCompletion) searchSQEFieldsByPrefix(prefix string, limit int) []string { results := completion.sqeIndexedFieldsTrie.PrefixSearch([]byte(prefix)) length := limit if results.Len() < limit { length = results.Len() } index := 0 matchingFields := make([]string, length) for e := results.Front(); e != nil; e = e.Next() { matchingFields[index] = string(e.Value.([]byte)) index++ if index >= limit { break } } return matchingFields } var sqeFieldRegexp = regexp.MustCompile("([a-z\\._]+):(.+)?") func extractSQEFieldNames(prefix string) []string { parts := strings.Split(prefix, " ") var fieldNames []string for _, part := range parts { match := sqeFieldRegexp.FindAllStringSubmatch(part, -1) if len(match) <= 0 { continue } fieldNames = append(fieldNames, match[0][1]) } return fieldNames } func findClosestUncompletedSQEField(prefix string) string { parts := strings.Split(prefix, " ") for i := len(parts) - 1; i >= 0; i-- { if strings.Trim(parts[i], " ") == "" { continue } return parts[i] } return "" } func shouldCompleteSQEFieldValue(field string) bool { return strings.Contains(field, ":") } type valueType int32 const ( accountType valueType = iota actionType actionIndexType assetType booleanType blockNumType hexType freeFormType nameType permissionType transactionIDType ) type indexedField struct { name string valueType valueType } // Keep them in order of importance for now until some scoring is implemented var sqeIndexedFields = []indexedField{ {"account", accountType}, {"receiver", accountType}, {"action", actionType}, {"data.to", accountType}, {"data.from", accountType}, {"auth", permissionType}, {"block_num", blockNumType}, {"trx_idx", transactionIDType}, {"data.account", accountType}, {"data.active", freeFormType}, {"data.active_key", freeFormType}, {"data.actor", freeFormType}, {"data.amount", assetType}, {"data.authority", freeFormType}, {"data.bid", freeFormType}, {"data.bidder", accountType}, {"data.canceler", accountType}, {"data.creator", accountType}, {"data.executer", accountType}, {"data.is_active", booleanType}, {"data.is_priv", booleanType}, {"data.isproxy", booleanType}, {"data.issuer", accountType}, {"data.level", freeFormType}, {"data.location", freeFormType}, {"data.maximum_supply", assetType}, {"data.name", nameType}, {"data.newname", nameType}, {"data.owner", accountType}, {"data.parent", accountType}, {"data.payer", accountType}, {"data.permission", permissionType}, {"data.producer", accountType}, {"data.producer_key", freeFormType}, {"data.proposal_name", nameType}, {"data.proposal_hash", freeFormType}, {"data.proposer", accountType}, {"data.proxy", freeFormType}, {"data.public_key", freeFormType}, {"data.producers", freeFormType}, {"data.quant", freeFormType}, {"data.quantity", freeFormType}, {"data.ram_payer", accountType}, {"data.receiver", accountType}, {"data.requirement", freeFormType}, {"data.symbol", freeFormType}, {"data.threshold", freeFormType}, {"data.transfer", freeFormType}, {"data.voter", accountType}, {"data.voter_name", nameType}, {"data.weight", freeFormType}, {"act_digest", hexType}, {"act_idx", actionIndexType}, {"scheduled", booleanType}, } var sqeIndexedFieldTypeByName map[string]valueType func initSQEIndexedFieldTypeByName() { sqeIndexedFieldTypeByName = map[string]valueType{} for _, sqeIndexedField := range sqeIndexedFields { sqeIndexedFieldTypeByName[sqeIndexedField.name] = sqeIndexedField.valueType } } var accountNameRegex = regexp.MustCompile("^[a-z0-5\\.]{1,13}$") var blockNumLikeRegex = regexp.MustCompile("^[0-9]+$") var hexLikeRegex = regexp.MustCompile("^[0-9a-f]{6,}$") func isAccountLike(input string) bool { return accountNameRegex.Match([]byte(input)) } func isBlockNumLike(input string) bool { return !isAccountLike(input) && blockNumLikeRegex.Match([]byte(input)) } func isHexdecimalLike(input string) bool { return !isAccountLike(input) && hexLikeRegex.Match([]byte(input)) } var emptyFields []string func contains(elements []string, candidate string) bool { for _, element := range elements { if element == candidate { return true } } return false }
Sp2000/colplus-backend
webservice/src/main/java/life/catalogue/importer/ContinuousImporter.java
<reponame>Sp2000/colplus-backend package life.catalogue.importer; import life.catalogue.api.vocab.ImportState; import life.catalogue.api.vocab.Users; import life.catalogue.common.util.LoggingUtils; import life.catalogue.concurrent.ExecutorUtils; import life.catalogue.config.ContinuousImportConfig; import life.catalogue.config.ImporterConfig; import life.catalogue.db.mapper.DatasetMapper; import life.catalogue.dw.ManagedExtended; import java.util.List; import java.util.concurrent.TimeUnit; import org.apache.ibatis.session.SqlSession; import org.apache.ibatis.session.SqlSessionFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; /** * A scheduler for new import jobs that runs continuously in the background * and submits new import jobs to the ImportManager if it is idle. * <p> * New jobs are selected by priority according to the following criteria: * <p> * - never imported datasets first * - the datasets configured indexing frequency * * If imports have failed previously, there will be an embargo for 1 week. */ public class ContinuousImporter implements ManagedExtended { private static final Logger LOG = LoggerFactory.getLogger(ContinuousImporter.class); private static final String THREAD_NAME = "continuous-importer"; private static final int WAIT_TIME_IN_HOURS = 1; private Thread thread; private ImportManager manager; private ImporterConfig cfg; private SqlSessionFactory factory; private ContinousImporterJob job; public ContinuousImporter(ImporterConfig cfg, ImportManager manager, SqlSessionFactory factory) { this.cfg = cfg; this.manager = manager; this.factory = factory; } static class ContinousImporterJob implements Runnable { private final SqlSessionFactory factory; private final ImportManager manager; private final ContinuousImportConfig cfg; private volatile boolean running = true; public ContinousImporterJob(ImporterConfig cfg, ImportManager manager, SqlSessionFactory factory) { this.manager = manager; this.factory = factory; this.cfg = cfg.continuous; if (cfg.maxQueue < cfg.batchSize) { LOG.warn("Importer queue is shorter ({}) than the batch size ({}) to submit. Reduce batches to half the queue size!", cfg.maxQueue, cfg.batchSize); cfg.batchSize = (cfg.maxQueue / 2); } } public void terminate() { running = false; } @Override public void run() { MDC.put(LoggingUtils.MDC_KEY_TASK, getClass().getSimpleName()); while (running) { try { while (!manager.hasStarted()) { LOG.debug("Importer not started, sleep for {} minutes", cfg.polling); TimeUnit.MINUTES.sleep(cfg.polling); } while (manager.queueSize() > cfg.threshold) { LOG.debug("Importer busy, sleep for {} minutes", cfg.polling); TimeUnit.MINUTES.sleep(cfg.polling); } List<DatasetMapper.DatasetDI> datasets = fetch(); if (datasets.isEmpty()) { LOG.debug("No datasets eligable to be imported. Sleep for {} hour", WAIT_TIME_IN_HOURS); TimeUnit.HOURS.sleep(WAIT_TIME_IN_HOURS); } else { LOG.info("Trying to schedule {} dataset imports", datasets.size()); datasets.forEach(this::scheduleImport); } } catch (InterruptedException e) { LOG.info("Interrupted continuous importing. Stop"); running = false; } catch (Exception e) { LOG.error("Error scheduling continuous imports. Shutdown continous importer!", e); running = false; } } MDC.remove(LoggingUtils.MDC_KEY_TASK); } private void scheduleImport(DatasetMapper.DatasetDI d) { boolean force = d.getState() == ImportState.FAILED; try { if (force) { LOG.info("Schedule a forced import of dataset {} which failed the last time on {}: {}", d.getFinished() ,d.getKey(), d.getTitle()); } manager.submit(new ImportRequest(d.getKey(), Users.IMPORTER, force, false, false)); } catch (IllegalArgumentException e) { LOG.warn("Failed to schedule a {}dataset import {}: {}", force? "forced ":"", d.getKey(), d.getTitle(), e); } } /** * Find the next batch of datasets eligable for importing */ private List<DatasetMapper.DatasetDI> fetch() { // check never crawled datasets first try (SqlSession session = factory.openSession(true)) { List<DatasetMapper.DatasetDI> datasets = session.getMapper(DatasetMapper.class).listNeverImported(cfg.batchSize); if (datasets.isEmpty()) { // now check for eligable datasets based on import frequency datasets = session.getMapper(DatasetMapper.class).listToBeImported(cfg.defaultFrequency, cfg.batchSize); } return datasets; } } } @Override public boolean hasStarted() { return job != null && job.running; } @Override public void start() throws Exception { if (cfg.continuous.polling > 0) { LOG.info("Enable continuous importing"); job = new ContinousImporterJob(cfg, manager, factory); thread = new Thread(job, THREAD_NAME); LOG.info("Start continuous importing with maxQueue={}, polling every {} minutes", job.cfg.threshold, job.cfg.polling ); thread.start(); } else { LOG.warn("Continuous importing disabled"); } } @Override public void stop() throws Exception { if (job != null) { job.terminate(); } if (thread != null) { thread.join(ExecutorUtils.MILLIS_TO_DIE); } } }
BBN-E/serif
src/ProfileGenerator/NameHypothesis.h
// Copyright (c) 2010 by BBNT Solutions LLC // All Rights Reserved. #ifndef NAME_HYPOTHESIS_H #define NAME_HYPOTHESIS_H #include "ProfileGenerator/GenericHypothesis.h" #include "ProfileGenerator/PGFact.h" #include "ProfileGenerator/Profile.h" #include "ProfileGenerator/ProfileSlot.h" #include "boost/shared_ptr.hpp" #include "boost/make_shared.hpp" #include "boost/enable_shared_from_this.hpp" #include <set> #include <vector> #include "Generic/common/bsp_declare.h" BSP_DECLARE(NameHypothesis); BSP_DECLARE(PGDatabaseManager); /*! \brief An implementation of GenericHypothesis for slot types that expect an entity name for a value. It makes use of XDoc lite information when checking facts for equivalency. */ class NameHypothesis : public GenericHypothesis, public boost::enable_shared_from_this<NameHypothesis> { public: friend NameHypothesis_ptr boost::make_shared<NameHypothesis>(PGFact_ptr const&, ProfileSlot_ptr const&, PGDatabaseManager_ptr const&); friend NameHypothesis_ptr boost::make_shared<NameHypothesis>(int const &, std::wstring const&, bool const&, PGDatabaseManager_ptr const&); ~NameHypothesis(void) { } typedef std::map<std::wstring, double> ActorStringConfidenceMap; void addSupportingHypothesis(GenericHypothesis_ptr hypo); // TODO: Not sure that "equiv and better" is possible in new actor-id world bool isEquivAndBetter(GenericHypothesis_ptr hypoth) { NameHypothesis_ptr nameHypoth = boost::dynamic_pointer_cast<NameHypothesis>(hypoth); if (nameHypoth == NameHypothesis_ptr()) // cast failed return false; ComparisonType comp = compareValues(nameHypoth); return (comp == BETTER); } bool isEquiv(GenericHypothesis_ptr hypoth) { NameHypothesis_ptr nameHypoth = boost::dynamic_pointer_cast<NameHypothesis>(hypoth); if (nameHypoth == NameHypothesis_ptr()) // cast failed return false; ComparisonType comp = compareValues(nameHypoth); return (comp == EQUAL || comp == BETTER); } bool isSimilar(GenericHypothesis_ptr hypoth) { // NOTE: This function is NOT reciprocal, and should be called each way if you want that behavior NameHypothesis_ptr nameHypoth = boost::dynamic_pointer_cast<NameHypothesis>(hypoth); if (nameHypoth == NameHypothesis_ptr()) // cast failed return false; ComparisonType comp = compareValues(nameHypoth); return (comp == EQUAL || comp == BETTER || comp == SIMILAR || comp == VERY_SIMILAR); } std::vector<kb_arg_t> getKBArguments(int actor_id, ProfileSlot_ptr slot); typedef enum { SUBJECT, OBJECT, POSSESSIVE_MOD, POSSESSIVE, PRONOUN_ROLES_SIZE } PronounRole; typedef enum { UNKNOWN, INANIMATE, SECOND, MALE, FEMALE, PLURAL_ORG, GENDER_TYPES_SIZE } GenderType; GenderType getGender() { return _gender; } int getActorId() { return _actor_id; } std::wstring getDisplayValue(); std::wstring getNormalizedValue() { return _normalizedValue; } std::wstring getShortFormalName() { return _shortFormalName; } std::map<std::wstring, int>& getPossibleDisplayValues() { return _possibleDisplayValues; } ActorStringConfidenceMap& getActorStrings() { return _actorStrings; } std::wstring getRepeatReference(int& counter, PronounRole role); bool isIllegalHypothesis(ProfileSlot_ptr slot, std::string& rationale); bool isRiskyHypothesis(ProfileSlot_ptr slot, std::string& rationale); int rankAgainst(GenericHypothesis_ptr hypo); bool matchesProfileName(Profile_ptr profile); bool matchesName(std::wstring test_name); bool isFamily() { return _slot != ProfileSlot_ptr() && _slot->isFamily(); } bool isPersonSlot() { return _is_person_slot; } static void loadGenderNameLists(); private: NameHypothesis(PGFact_ptr fact, ProfileSlot_ptr slot, PGDatabaseManager_ptr pgdm); NameHypothesis(int actor_id, std::wstring canonical_name, bool is_person_slot, PGDatabaseManager_ptr pgdm); PGDatabaseManager_ptr _pgdm; int _actor_id; std::wstring _canonical_name; bool _is_person_slot; ActorStringConfidenceMap _actorStrings; bool _display_value_up_to_date; std::map<std::wstring, int> _possibleDisplayValues; std::wstring _displayValue; std::wstring _normalizedValue; std::wstring _shortFormalName; GenderType _gender; ProfileSlot_ptr _slot; // Initialization functions void initialize(); void initalizeStaticVectors(); void addNameString(std::wstring name, double confidence); void addNameVariants(std::wstring name, double confidence); void setPersonShortFormalName(std::wstring candidate); // Name comparison driver typedef enum { BETTER, EQUAL, VERY_SIMILAR, SIMILAR, NONE } ComparisonType; ComparisonType compareValues(NameHypothesis_ptr otherNameHypothesis); // Name manipulation functions & resources static std::set<std::wstring> _maleFirstNames; static std::set<std::wstring> _femaleFirstNames; static std::vector<std::wstring> _honorifics; static std::vector<std::wstring> _prefixHonorifics; static std::set<std::wstring> _lowercaseNameWords; std::wstring getLastTwoNames(std::vector<std::wstring>& tokens) const; std::wstring stripHonorifics(std::wstring name); std::wstring stripAlPrefix(std::wstring name); // Gender static const std::wstring pro_unknown[], pro_inanimate[], pro_second[], pro_male[], pro_female[], pro_plural[]; std::wstring getPronounString(PronounRole role, GenderType gender_type); GenderType guessGenderFromFirstName(std::wstring personName); std::wstring getSimpleCapitalization(std::wstring phrase); std::wstring fixLowercaseNameWords(std::wstring phrase, bool is_short_name); void fixDisplayCapitalization(); void updateDisplayValue(); // last name except when preceded by "marker" that goes with last; "Johnson" or "<NAME>" // for ORGs can be "Apple" for "Apple Corporation" and "SRI" for "SRI International" }; #endif
ckamtsikis/cmssw
HLTriggerOffline/Tau/test/runHLTTauValidation_WithHLT.py
<filename>HLTriggerOffline/Tau/test/runHLTTauValidation_WithHLT.py import FWCore.ParameterSet.Config as cms process = cms.Process("HLT") process.load("FWCore.MessageService.MessageLogger_cfi") process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) ) process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(10) ) process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring('file:/scratch/bachtis/RECO-ZTT-GEN-SIM-DIGI-0013.root') ) process.load("Configuration.StandardSequences.GeometryPilot2_cff") process.load("Configuration.StandardSequences.MagneticField_cff") # Conditions: fake or frontier # process.load("Configuration.StandardSequences.FakeConditions_cff") process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") process.GlobalTag.globaltag = 'STARTUP_V4::All' process.load("Configuration.StandardSequences.L1Emulator_cff") # Choose a menu/prescale/mask from one of the choices # in L1TriggerConfig.L1GtConfigProducers.Luminosity process.load("Configuration.StandardSequences.L1TriggerDefaultMenu_cff") # run HLT process.load("SimGeneral.HepPDTESSource.pythiapdt_cfi") process.load("HLTrigger.Configuration.HLT_2E30_cff") process.schedule = process.HLTSchedule process.hltL1gtTrigReport = cms.EDAnalyzer( "L1GtTrigReport", UseL1GlobalTriggerRecord = cms.bool( False ), L1GtRecordInputTag = cms.InputTag( "hltGtDigis" ) ) process.hltTrigReport = cms.EDAnalyzer( "HLTrigReport", HLTriggerResults = cms.InputTag( 'TriggerResults','','HLT' ) ) process.HLTAnalyzerEndpath = cms.EndPath( process.hltL1gtTrigReport + process.hltTrigReport ) process.schedule.append(process.HLTAnalyzerEndpath) process.load("FWCore.MessageService.MessageLogger_cfi") process.DQMStore = cms.Service("DQMStore") #Load DQM Services process.load("DQMServices.Core.DQM_cfg") process.load("DQMServices.Components.DQMEnvironment_cfi") #Reconfigure Environment and saver #process.dqmEnv.subSystemFolder = cms.untracked.string('HLT/HLTTAU') #process.DQM.collectorPort = 9091 #process.DQM.collectorHost = cms.untracked.string('pcwiscms10') process.dqmSaver.saveByRun = cms.untracked.int32(-1) process.dqmSaver.saveAtJobEnd = cms.untracked.bool(True) process.dqmSaver.workflow = cms.untracked.string('/A/N/C') process.dqmSaver.forceRunNumber = cms.untracked.int32(123) #Load the Validation process.load("HLTriggerOffline.Tau.Validation.HLTTauValidation_cff") #Load The Post processor process.load("HLTriggerOffline.Tau.Validation.HLTTauPostValidation_cfi") #Define the Paths process.validation = cms.Path(process.HLTTauVal) process.postProcess = cms.EndPath(process.HLTTauPostVal+process.dqmSaver) #process.postProcess = cms.EndPath(process.dqmSaver) process.schedule.append(process.validation) process.schedule.append(process.postProcess)
lanl/DnMFkCPP
install_dependencies/xianyi-OpenBLAS-6d2da63/lapack/CMakeFiles/slaswp_minus.c
#define MINUS #define ASMNAME slaswp_minus #define ASMFNAME slaswp_minus_ #define NAME slaswp_minus_ #define CNAME slaswp_minus #define CHAR_NAME "slaswp_minus_" #define CHAR_CNAME "slaswp_minus" #include "/lustre/scratch3/turquoise/rvangara/RD100/distnnmfkcpp_Src/install_dependencies/xianyi-OpenBLAS-6d2da63/lapack/laswp/generic/laswp_k.c"
lpi/codeforces-go
main/1200-1299/1255D.go
package main import ( "bufio" "bytes" . "fmt" "io" ) // github.com/EndlessCheng/codeforces-go func Sol1255D(reader io.Reader, writer io.Writer) { in := bufio.NewReader(reader) out := bufio.NewWriter(writer) defer out.Flush() const alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" var t, r, c, k, lastRi, lastRj, avg, nAvg int for Fscan(in, &t); t > 0; t-- { Fscan(in, &r, &c, &k) cells := make([][]byte, r) cnt := 0 for i := range cells { Fscan(in, &cells[i]) if cntRow := bytes.Count(cells[i], []byte{'R'}); cntRow > 0 { cnt += cntRow lastRi = i if i&1 == 0 { lastRj = bytes.LastIndex(cells[i], []byte{'R'}) } else { lastRj = bytes.Index(cells[i], []byte{'R'}) } } } cells[lastRi][lastRj] = '.' if r&1 == 1 { cells[r-1][c-1] = 'R' } else { cells[r-1][0] = 'R' } avg = cnt / k if cnt%k == 0 { nAvg = k } else { nAvg = (avg+1)*k - cnt } nowI := 0 nowCnt := 0 left := nAvg modify := func(i, j int, isRice bool) { cells[i][j] = alphabet[nowI] if isRice { nowCnt++ if nowCnt == avg { nowI++ nowCnt = 0 left-- if left == 0 { avg++ left = k - nAvg } } } } for i, row := range cells { if i&1 == 0 { for j, ch := range row { modify(i, j, ch == 'R') } } else { for j := c - 1; j >= 0; j-- { modify(i, j, cells[i][j] == 'R') } } } for _, row := range cells { Fprintf(out, "%s\n", row) } } } //func main() { // Sol1255D(os.Stdin, os.Stdout) //}
robort-yuan/AI-EXPRESS
source/solution_zoo/video_box/src/smartplugin_box/vencmodule.cpp
/** * Copyright (c) 2020, Horizon Robotics, Inc. * All rights reserved. * @Author: * @Mail: @horizon.ai */ #include "vencmodule.h" #include <fcntl.h> #include <fstream> #include <iostream> #include <stdio.h> #include <sys/stat.h> #include <sys/types.h> #include <unistd.h> #include <cstring> #include "hb_venc.h" #include "hb_vio_interface.h" #include "hb_vp_api.h" #include "hobotlog/hobotlog.hpp" #include "visualplugin/horizonserver_api.h" namespace horizon { namespace vision { std::once_flag VencModule::flag_; VencModule::VencModule() : chn_id_(-1), timeout_(50), pipe_fd_(-1) { std::call_once(flag_, []() { int ret = HB_VENC_Module_Init(); if (ret != 0) { LOGF << "HB_VENC_Module_Init Failed. ret = " << ret; } }); } VencModule::~VencModule() { HB_VENC_Module_Uninit(); } int VencModule::VencChnAttrInit(VENC_CHN_ATTR_S *pVencChnAttr, PAYLOAD_TYPE_E p_enType, int p_Width, int p_Height, PIXEL_FORMAT_E pixFmt) { int streambuf = 2 * 1024 * 1024; memset(pVencChnAttr, 0, sizeof(VENC_CHN_ATTR_S)); pVencChnAttr->stVencAttr.enType = p_enType; pVencChnAttr->stVencAttr.u32PicWidth = p_Width; pVencChnAttr->stVencAttr.u32PicHeight = p_Height; pVencChnAttr->stVencAttr.enMirrorFlip = DIRECTION_NONE; pVencChnAttr->stVencAttr.enRotation = CODEC_ROTATION_0; pVencChnAttr->stVencAttr.stCropCfg.bEnable = HB_FALSE; if (p_Width * p_Height > 2688 * 1522) { streambuf = 2 * 1024 * 1024; } else if (p_Width * p_Height > 1920 * 1080) { streambuf = 1024 * 1024; } else if (p_Width * p_Height > 1280 * 720) { streambuf = 512 * 1024; } else { streambuf = 256 * 1024; } if (p_enType == PT_JPEG || p_enType == PT_MJPEG) { pVencChnAttr->stVencAttr.enPixelFormat = pixFmt; pVencChnAttr->stVencAttr.u32BitStreamBufferCount = 1; pVencChnAttr->stVencAttr.u32FrameBufferCount = 2; pVencChnAttr->stVencAttr.bExternalFreamBuffer = HB_TRUE; pVencChnAttr->stVencAttr.stAttrJpeg.dcf_enable = HB_FALSE; pVencChnAttr->stVencAttr.stAttrJpeg.quality_factor = 0; pVencChnAttr->stVencAttr.stAttrJpeg.restart_interval = 0; pVencChnAttr->stVencAttr.u32BitStreamBufSize = streambuf; } else { pVencChnAttr->stVencAttr.enPixelFormat = pixFmt; pVencChnAttr->stVencAttr.u32BitStreamBufferCount = 3; pVencChnAttr->stVencAttr.u32FrameBufferCount = 3; pVencChnAttr->stVencAttr.bExternalFreamBuffer = HB_TRUE; pVencChnAttr->stVencAttr.u32BitStreamBufSize = streambuf; } if (p_enType == PT_H265) { pVencChnAttr->stRcAttr.enRcMode = VENC_RC_MODE_H265VBR; pVencChnAttr->stRcAttr.stH265Vbr.bQpMapEnable = HB_TRUE; pVencChnAttr->stRcAttr.stH265Vbr.u32IntraQp = 20; pVencChnAttr->stRcAttr.stH265Vbr.u32IntraPeriod = 50; pVencChnAttr->stRcAttr.stH265Vbr.u32FrameRate = 25; } if (p_enType == PT_H264) { pVencChnAttr->stRcAttr.enRcMode = VENC_RC_MODE_H264VBR; pVencChnAttr->stRcAttr.stH264Vbr.bQpMapEnable = HB_TRUE; pVencChnAttr->stRcAttr.stH264Vbr.u32IntraQp = 20; pVencChnAttr->stRcAttr.stH264Vbr.u32IntraPeriod = 50; pVencChnAttr->stRcAttr.stH264Vbr.u32FrameRate = 25; pVencChnAttr->stVencAttr.stAttrH264.h264_profile = HB_H264_PROFILE_MP; pVencChnAttr->stVencAttr.stAttrH264.h264_level = HB_H264_LEVEL1; } pVencChnAttr->stGopAttr.u32GopPresetIdx = 3; pVencChnAttr->stGopAttr.s32DecodingRefreshType = 2; return 0; } int VencModule::Init(uint32_t chn_id, const VencModuleInfo *module_info, const VencConfig &smart_venc_cfg) { VENC_CHN_ATTR_S vencChnAttr; VENC_RC_ATTR_S *pstRcParam; int width = module_info->width; int height = module_info->height; PAYLOAD_TYPE_E ptype = PT_H264; VencChnAttrInit(&vencChnAttr, ptype, width, height, HB_PIXEL_FORMAT_NV12); int s32Ret = HB_VENC_CreateChn(chn_id, &vencChnAttr); if (s32Ret != 0) { printf("HB_VENC_CreateChn %d failed, %d.\n", chn_id, s32Ret); return -1; } if (ptype == PT_H264) { pstRcParam = &(vencChnAttr.stRcAttr); vencChnAttr.stRcAttr.enRcMode = VENC_RC_MODE_H264CBR; s32Ret = HB_VENC_GetRcParam(chn_id, pstRcParam); if (s32Ret != 0) { printf("HB_VENC_GetRcParam failed.\n"); return -1; } printf(" vencChnAttr.stRcAttr.enRcMode = %d mmmmmmmmmmmmmmmmmm \n", vencChnAttr.stRcAttr.enRcMode); printf(" u32VbvBufferSize = %d mmmmmmmmmmmmmmmmmm \n", vencChnAttr.stRcAttr.stH264Cbr.u32VbvBufferSize); pstRcParam->stH264Cbr.u32BitRate = module_info->bits; pstRcParam->stH264Cbr.u32FrameRate = 25; pstRcParam->stH264Cbr.u32IntraPeriod = 50; pstRcParam->stH264Cbr.u32VbvBufferSize = 3000; } else if (ptype == PT_H265) { pstRcParam = &(vencChnAttr.stRcAttr); vencChnAttr.stRcAttr.enRcMode = VENC_RC_MODE_H265CBR; s32Ret = HB_VENC_GetRcParam(chn_id, pstRcParam); if (s32Ret != 0) { printf("HB_VENC_GetRcParam failed.\n"); return -1; } printf(" m_VencChnAttr.stRcAttr.enRcMode = %d mmmmmmmmmmmmmmmmmm \n", vencChnAttr.stRcAttr.enRcMode); printf(" u32VbvBufferSize = %d mmmmmmmmmmmmmmmmmm \n", vencChnAttr.stRcAttr.stH265Cbr.u32VbvBufferSize); pstRcParam->stH265Cbr.u32BitRate = module_info->bits; pstRcParam->stH265Cbr.u32FrameRate = 25; pstRcParam->stH265Cbr.u32IntraPeriod = 50; pstRcParam->stH265Cbr.u32VbvBufferSize = 3000; } else if (ptype == PT_MJPEG) { pstRcParam = &(vencChnAttr.stRcAttr); vencChnAttr.stRcAttr.enRcMode = VENC_RC_MODE_MJPEGFIXQP; s32Ret = HB_VENC_GetRcParam(chn_id, pstRcParam); if (s32Ret != 0) { printf("HB_VENC_GetRcParam failed.\n"); return -1; } } s32Ret = HB_VENC_SetChnAttr(chn_id, &vencChnAttr); // config if (s32Ret != 0) { printf("HB_VENC_SetChnAttr failed\n"); return -1; } chn_id_ = chn_id; venc_info_.width = module_info->width; venc_info_.height = module_info->height; venc_info_.type = module_info->type; venc_info_.bits = module_info->bits; s32Ret = HB_VP_Init(); if (s32Ret != 0) { printf("vp_init fail s32Ret = %d !\n", s32Ret); } buffers_.mmz_size = venc_info_.width * venc_info_.height * 3 / 2; s32Ret = HB_SYS_Alloc(&buffers_.mmz_paddr, reinterpret_cast<void **>(&buffers_.mmz_vaddr), buffers_.mmz_size); if (s32Ret == 0) { printf("mmzAlloc paddr = 0x%lx, vaddr = 0x%p \n", buffers_.mmz_paddr, buffers_.mmz_vaddr); } // char stream_name[100] = {0}; // sprintf(stream_name, "%s%d%s", "./video_box/output_stream_", chn_id_, // ".h264"); // outfile_ = fopen(stream_name, "wb"); server_cfg_ = smart_venc_cfg; return s32Ret; } int VencModule::Start() { int ret = 0; VENC_RECV_PIC_PARAM_S pstRecvParam; pstRecvParam.s32RecvPicNum = 0; // unchangable ret = HB_VENC_StartRecvFrame(chn_id_, &pstRecvParam); if (ret != 0) { LOGE << "HB_VENC_StartRecvStream Failed. ret = " << ret; return ret; } process_running_ = true; process_thread_ = std::make_shared<std::thread>(&VencModule::Process, this); return ret; } int VencModule::Process() { const char *fifo_name = nullptr; if (0 == chn_id_) { fifo_name = "/tmp/h264_fifo"; } else { fifo_name = "/tmp/h264_fifo1"; } if (access(fifo_name, F_OK) == -1) { int res = mkfifo(fifo_name, 0777); if (res != 0) { LOGE << "mkdir fifo failed!!!!"; return -1; } } // 会阻塞在这里,直到读取进程打开该fifo pipe_fd_ = open(fifo_name, O_WRONLY); if (pipe_fd_ == -1) { LOGE << "open fifo fail"; return -1; } VIDEO_STREAM_S pstStream; while (process_running_) { memset(&pstStream, 0, sizeof(VIDEO_STREAM_S)); int ret = HB_VENC_GetStream(chn_id_, &pstStream, timeout_); if (ret < 0) { printf("HB_VENC_GetStream error!!!\n"); } else { // fwrite(pstStream.pstPack.vir_ptr, 1, pstStream.pstPack.size, outfile_); write(pipe_fd_, (unsigned char *)pstStream.pstPack.vir_ptr, pstStream.pstPack.size); LOGD << "Venc chn " << chn_id_ << " get stream pack size " << pstStream.pstPack.size; HB_VENC_ReleaseStream(chn_id_, &pstStream); } } return 0; } int VencModule::Input(void *data, const xstream::OutputDataPtr &xstream_out) { int ret = 0; VencData *venc_data = static_cast<VencData *>(data); // 拷贝数据到Buffer相应位置 auto dst_start_vaddr = buffers_.mmz_vaddr + venc_data->width * (venc_data->channel % 2) + venc_info_.width * venc_data->height * (venc_data->channel / 2); auto src_start_vaddr = venc_data->y_virtual_addr; for (uint32_t j = 0; j < venc_data->height; j++) { memcpy(dst_start_vaddr, src_start_vaddr, venc_data->width); src_start_vaddr += venc_data->width; dst_start_vaddr += venc_info_.width; } dst_start_vaddr = buffers_.mmz_vaddr + venc_info_.width * venc_info_.height + venc_data->width * (venc_data->channel % 2) + venc_info_.width * venc_data->height * (venc_data->channel / 2) / 2; src_start_vaddr = venc_data->uv_virtual_addr; for (uint32_t j = 0; j < venc_data->height / 2; j++) { memcpy(dst_start_vaddr, src_start_vaddr, venc_data->width); src_start_vaddr += venc_data->width; dst_start_vaddr += venc_info_.width; } buffers_.mmz_flag |= 1 << venc_data->channel; if (!(buffers_.mmz_flag ^ ((1 << server_cfg_.input_num) - 1))) { // 送给编码器编码 VIDEO_FRAME_S pstFrame; memset(&pstFrame, 0, sizeof(VIDEO_FRAME_S)); pstFrame.stVFrame.width = venc_info_.width; pstFrame.stVFrame.height = venc_info_.height; pstFrame.stVFrame.size = buffers_.mmz_size; pstFrame.stVFrame.pix_format = HB_PIXEL_FORMAT_NV12; pstFrame.stVFrame.phy_ptr[0] = buffers_.mmz_paddr; pstFrame.stVFrame.phy_ptr[1] = buffers_.mmz_paddr + venc_info_.width * venc_info_.height; pstFrame.stVFrame.vir_ptr[0] = buffers_.mmz_vaddr; pstFrame.stVFrame.vir_ptr[1] = buffers_.mmz_vaddr + venc_info_.width * venc_info_.height; pstFrame.stVFrame.pts = 0; ret = HB_VENC_SendFrame(chn_id_, &pstFrame, timeout_); if (ret != 0) { LOGE << "HB_VENC_SendStream Failed. ret = " << ret; } // fwrite(buffers_.mmz_vaddr, 1, buffers_.mmz_size, outfile_); buffers_.mmz_flag = 0; } return ret; } int VencModule::Stop() { int ret = 0; process_running_ = false; if (pipe_fd_ > 0) process_thread_->join(); LOGE << "VENC Stop id: " << chn_id_; ret = HB_VENC_StopRecvFrame(chn_id_); if (ret != 0) { LOGE << "HB_VENC_StopRecvStream Failed. ret = " << ret; return ret; } return ret; } int VencModule::DeInit() { int ret = 0; ret = HB_VENC_DestroyChn(chn_id_); if (ret != 0) { LOGE << "HB_VENC_DestroyChn Failed. ret = " << ret; return ret; } return ret; } } // namespace vision } // namespace horizon
qwasdw/Code-for-interview-Java-version
ch03/src/kth_last_node_in_linked_list.java
<gh_stars>1-10 public class kth_last_node_in_linked_list { public static void main(String[] args) { ListNode a = new ListNode(111); ListNode b = new ListNode(222); ListNode c = new ListNode(333); ListNode d = new ListNode(444); a.setNext(b); b.setNext(c); c.setNext(d); int k = 5; ListNode node = kth_last_node(a, k); try { System.out.println(node.getValue()); } catch (Exception e){ System.out.println("不存在倒数第" + k + "个节点"); } } public static ListNode kth_last_node(ListNode head, int k){ if (head == null || k <= 0) return null; ListNode preNode = head; ListNode node = head; int count = 0; while (node.getNext() != null && count < (k - 1)){ count++; node = node.getNext(); } if (count < (k - 1)) return null; while (node.getNext() != null){ preNode = preNode.getNext(); node = node.getNext(); } return preNode; } }
splitio/qos-runner
src/main/java/io/split/qos/server/modules/QOSServerModule.java
package io.split.qos.server.modules; import com.google.common.base.Preconditions; import com.google.inject.AbstractModule; import com.google.inject.Singleton; import com.google.inject.assistedinject.FactoryModuleBuilder; import com.google.inject.name.Names; import io.split.qos.server.QOSServerConfiguration; import io.split.qos.server.failcondition.FailCondition; import io.split.qos.server.failcondition.SimpleFailCondition; import io.split.qos.server.integrations.datadog.DatadogBroadcaster; import io.split.qos.server.integrations.datadog.DatadogBroadcasterImpl; import io.split.qos.server.integrations.pagerduty.PagerDutyBroadcaster; import io.split.qos.server.integrations.pagerduty.PagerDutyBroadcasterImpl; import io.split.qos.server.integrations.slack.broadcaster.SlackTestResultBroacasterImpl; import io.split.qos.server.integrations.slack.broadcaster.SlackTestResultBroadcaster; import io.split.qos.server.integrations.slack.commander.SlackCommandProvider; import io.split.qos.server.integrations.slack.commander.SlackCommanderProviderImpl; import io.split.qos.server.integrations.slack.commandintegration.SlackCommandIntegration; import io.split.qos.server.integrations.slack.commandintegration.SlackCommandIntegrationImpl; import io.split.qos.server.integrations.slack.listener.SlackCommandListener; import io.split.qos.server.integrations.slack.listener.SlackCommandListenerImpl; import io.split.qos.server.util.BroadcasterTestWatcher; import io.split.testrunner.junit.JUnitRunner; import io.split.testrunner.junit.JUnitRunnerFactory; /** * Module for installing Server related Guice injections. */ public class QOSServerModule extends AbstractModule { private final String serverName; private final String teamName; // Server Name // Default QOS Server public static final String QOS_SERVER_NAME = "QOS_SERVER_NAME"; public static final String TEAM_NAME = "TEAM_NAME"; private final QOSServerConfiguration configuration; public QOSServerModule(QOSServerConfiguration configuration) { this.configuration = Preconditions.checkNotNull(configuration); this.serverName = Preconditions.checkNotNull(configuration.getServerName()); this.teamName = Preconditions.checkNotNull(configuration.getTeamName()); } @Override protected void configure() { install(new FactoryModuleBuilder() .implement(JUnitRunner.class, JUnitRunner.class) .build(JUnitRunnerFactory.class)); bind(FailCondition.class).to(SimpleFailCondition.class).in(Singleton.class); bind(SlackCommandProvider.class).to(SlackCommanderProviderImpl.class); bind(SlackCommandIntegration.class).to(SlackCommandIntegrationImpl.class); bind(SlackTestResultBroadcaster.class).to(SlackTestResultBroacasterImpl.class); bind(SlackCommandListener.class).to(SlackCommandListenerImpl.class); bind(PagerDutyBroadcaster.class).to(PagerDutyBroadcasterImpl.class); bind(DatadogBroadcaster.class).to(DatadogBroadcasterImpl.class); bindConstant() .annotatedWith(Names.named(QOS_SERVER_NAME)) .to(serverName); bindConstant() .annotatedWith(Names.named(TEAM_NAME)) .to(teamName); bind(QOSServerConfiguration.class).toInstance(configuration); // HACK. Since we need the server for the tests (for broadcasting) and the tests use a complete different // Injector, we use static variables to communicate. BroadcasterTestWatcher.serverName = serverName; } }
jhwsx/Java_01_AdvancedFeatures
src/com/java/advanced/features/io/bixiangdong/p14_file/FileMehtodDemo.java
<filename>src/com/java/advanced/features/io/bixiangdong/p14_file/FileMehtodDemo.java package com.java.advanced.features.io.bixiangdong.p14_file; import javafx.scene.input.DataFormat; import java.io.File; import java.io.IOException; import java.text.DateFormat; /** * @author wangzhichao * @since 2021/7/27 * */ public class FileMehtodDemo { public static void main(String[] args) { /** * 获取 api 演示 */ // getDemo(); // createAndDeleteDemo(); // isDemo(); // renameToDemo(); listRootsDemo(); } private static void listRootsDemo() { File[] files = File.listRoots(); for (File file : files) { System.out.println(file.getAbsolutePath()); System.out.println("file.getTotalSpace() = " + file.getTotalSpace()); System.out.println("file.getUsableSpace() = " + file.getUsableSpace()); System.out.println("file.getFreeSpace() = " + file.getFreeSpace()); } } private static void renameToDemo() { // 重命名 // File f1 = new File("I:\\xinjing.mp4"); File f2 = new File("I:\\xinjing2.mp4"); // boolean b = f1.renameTo(f2); // System.out.println("b = " + b); // 移动 File f3 = new File("H:\\xinjing.mp4"); boolean b1 = f2.renameTo(f3); System.out.println("b1 = " + b1); } private static void isDemo() { File file = new File("file.txt"); System.out.println("file.exists() = " + file.exists()); File dir = new File("a"); System.out.println("dir.exists() = " + dir.exists()); File what = new File("what"); System.out.println("what.exists() = " + what.exists()); } private static void createAndDeleteDemo() { // 对于文件 File file = new File("create.txt"); try { // 如果文件存在,返回false;反之,返回true boolean created = file.createNewFile(); System.out.println("created = " + created); } catch (IOException e) { e.printStackTrace(); } if (file.exists()) { boolean delete = file.delete(); System.out.println("delete = " + delete); } try { // temp.getAbsolutePath() = C:\Users\willw\AppData\Local\Temp\temp2464096493512010557.dat File temp = File.createTempFile("temp", ".dat"); System.out.println("temp.getAbsolutePath() = " + temp.getAbsolutePath()); } catch (IOException e) { e.printStackTrace(); } // 对于文件夹 File dir = new File("a"); // 创建此抽象路径名指定的目录。 创建单级目录 boolean mkdir = dir.mkdir(); System.out.println("mkdir = " + mkdir); // 如果此路径名表示一个目录,则该目录必须为空才能删除。 boolean delete = dir.delete(); System.out.println("delete = " + delete); File dir2 = new File("a//c//d//e"); // 创建多级目录 boolean mkdirs = dir2.mkdirs(); System.out.println("mkdirs = " + mkdirs); } private static void getDemo() { File file = new File("file.txt"); String name = file.getName(); System.out.println("name = " + name); String absPath = file.getAbsolutePath(); System.out.println("absPath = " + absPath); String path = file.getPath(); System.out.println("path = " + path); try { String canonicalPath = file.getCanonicalPath(); System.out.println("canonicalPath = " + canonicalPath); } catch (IOException e) { e.printStackTrace(); } // 相对路径,获取父目录为null String parent = file.getParent(); System.out.println("parent = " + parent); long length = file.length(); System.out.println("length = " + length); long time = file.lastModified(); DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG); String timeStr = dateFormat.format(time); System.out.println("timeStr = " + timeStr); } }
ERS-HCL/atomic-react-comp
src/lib/components/atoms/Caption/index.js
import PropTypes from "prop-types"; import styled from "styled-components"; import { font, palette } from "styled-theme"; const Caption = styled.caption` font-family: ${font("quote")}; color: ${palette("grayscale", 1)}; font-weight: 500; line-height: 3rem; font-size: 0.875rem; text-transform: uppercase; `; Caption.propTypes = { reverse: PropTypes.bool }; export default Caption;
xiaohuliqibao/LangBot
src/main/java/top/kagerou/lang/service/serviceImp/VoiceStorageServiceImp.java
<reponame>xiaohuliqibao/LangBot<gh_stars>0 package top.kagerou.lang.service.serviceImp; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.stream.Stream; import org.springframework.core.io.Resource; import org.springframework.core.io.UrlResource; import org.springframework.stereotype.Service; import org.springframework.util.FileSystemUtils; import org.springframework.web.multipart.MultipartFile; import lombok.extern.slf4j.Slf4j; import top.kagerou.lang.service.VoiceStorageService; import top.kagerou.lang.util.VoicdFromateUtil; @Slf4j @Service public class VoiceStorageServiceImp implements VoiceStorageService { private static String VOICE_CUSTOMIZE_MP3_PATH_LINUX = "/home/qibao/file/miraibot/voices/customize/mp3/"; private final Path root = Paths.get(VOICE_CUSTOMIZE_MP3_PATH_LINUX); @Override public void delectAll() { FileSystemUtils.deleteRecursively(root.toFile()); } @Override public void init() { try { Files.createDirectories(root); } catch (IOException e) { throw new RuntimeException("Could not initialize folder for upload!"); } } @Override public Resource load(String filename) { try { Path file = root.resolve(filename); Resource resource = new UrlResource(file.toUri()); if (resource.exists() || resource.isReadable()) { return resource; } else { throw new RuntimeException("Could not read the file!"); } } catch (Exception e) { throw new RuntimeException("Error: " + e.getMessage()); } } @Override public Stream<Path> loadAll() { // return null; try { return Files.walk(this.root, 1).filter(path -> !path.equals(this.root)).map(this.root::relativize); } catch (Exception e) { throw new RuntimeException("Could not load the files!"); } } @Override public void save(MultipartFile file) { try { Files.copy(file.getInputStream(), this.root.resolve(file.getOriginalFilename())); log.info(file.getOriginalFilename().toString()); String fileName = file.getOriginalFilename().substring(0, file.getOriginalFilename().length() - 4); if (VoicdFromateUtil.mp3ToAmr(fileName)) { log.info("转码成功"); } else { log.error("转码失败"); } } catch (Exception e) { throw new RuntimeException("Could not store the file. Error: " + e.getMessage()); } } }
harderthan/gazebo
gazebo/gui/building/BaseInspectorDialog.cc
/* * Copyright (C) 2015 Open Source Robotics Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "gazebo/common/Assert.hh" #include "gazebo/gui/building/BaseInspectorDialog.hh" using namespace gazebo; using namespace gui; ///////////////////////////////////////////////// BaseInspectorDialog::BaseInspectorDialog(QWidget *_parent):QDialog(_parent) { } ///////////////////////////////////////////////// BaseInspectorDialog::~BaseInspectorDialog() { } ///////////////////////////////////////////////// void BaseInspectorDialog::InitColorComboBox() { this->colorComboBox = new QComboBox; this->colorComboBox->setIconSize(QSize(15, 15)); this->colorComboBox->setMinimumWidth(50); this->colorComboBox->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed); QPixmap colorIcon(15, 15); this->colorList.push_back(QColor(255, 255, 255, 255)); this->colorList.push_back(QColor(194, 169, 160, 255)); this->colorList.push_back(QColor(235, 206, 157, 255)); this->colorList.push_back(QColor(254, 121, 5, 255)); this->colorList.push_back(QColor(255, 195, 78, 255)); this->colorList.push_back(QColor(111, 203, 172, 255)); for (unsigned int i = 0; i < this->colorList.size(); ++i) { colorIcon.fill(this->colorList.at(i)); this->colorComboBox->addItem(colorIcon, QString("")); } } ///////////////////////////////////////////////// void BaseInspectorDialog::InitTextureComboBox() { this->textureComboBox = new QComboBox; this->textureComboBox->setIconSize(QSize(30, 30)); this->textureComboBox->setMinimumWidth(50); this->textureComboBox->setMinimumHeight(50); this->textureComboBox->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed); this->textureList.push_back(":wood.jpg"); this->textureList.push_back(":tiles.jpg"); this->textureList.push_back(":bricks.png"); for (unsigned int i = 0; i < this->textureList.size(); ++i) { this->textureComboBox->addItem(QPixmap(this->textureList[i]).scaled( QSize(90, 90), Qt::IgnoreAspectRatio), QString("")); } this->textureComboBox->addItem("X"); this->textureComboBox->setCurrentIndex(this->textureComboBox->count()-1); } ///////////////////////////////////////////////// QColor BaseInspectorDialog::GetColor() const { return this->colorList[this->colorComboBox->currentIndex()]; } ///////////////////////////////////////////////// QString BaseInspectorDialog::GetTexture() const { QString texture = QString(""); if (this->textureComboBox->currentIndex() != -1 && this->textureComboBox->currentIndex() < this->textureComboBox->count() - 1) { texture = this->textureList[this->textureComboBox->currentIndex()]; } return texture; } ///////////////////////////////////////////////// void BaseInspectorDialog::SetColor(const QColor _color) { int index = -1; for (unsigned int i = 0; i < this->colorList.size(); ++i) { if (this->colorList[i] == _color) { index = i; break; } } if (index == -1) { // Add a new color this->colorList.push_back(_color); QPixmap colorIcon(15, 15); colorIcon.fill(this->colorList.back()); this->colorComboBox->addItem(colorIcon, QString("")); index = this->colorComboBox->count()-1; } GZ_ASSERT(index >= 0, "Color index is broken < 0"); this->colorComboBox->setCurrentIndex(index); } ///////////////////////////////////////////////// void BaseInspectorDialog::SetTexture(QString _texture) { // Find index corresponding to texture (only a few textures allowed so far) int index = this->textureComboBox->count()-1; for (unsigned int i = 0; i < this->textureList.size(); ++i) { if (this->textureList[i] == _texture) { index = i; break; } } this->textureComboBox->setCurrentIndex(index); }
saichikine/GMAT
plugins/ProductionPropagatorPlugin/src/base/propagator/PrinceDormand853.hpp
// //------------------------------------------------------------------------------ // PrinceDormand853 //------------------------------------------------------------------------------ // GMAT: General Mission Analysis Tool. // // Copyright (c) 2002 - 2018 United States Government as represented by the // Administrator of The National Aeronautics and Space Administration. // All Other Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // You may not use this file except in compliance with the License. // You may obtain a copy of the License at: // http://www.apache.org/licenses/LICENSE-2.0. // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language // governing permissions and limitations under the License. // // *** File Name : PrinceDormand853.hpp // *** Created : July 1, 2013 // ************************************************************************** // *** Developed By : <NAME>, CBNU *** // ************************************************************************** #ifndef PrinceDormand853_H #define PrinceDormand853_H #include "ProductionPropagatorDefs.hpp" #include "gmatdefs.hpp" #include "RungeKutta.hpp" /** * This class implements a Runge-Kutta integrator using the coefficients derived * by Prince and Dormand. This particular set of coefficients implements the * eighth order integrator with seventh order error control. */ class PRODUCTIONPROPAGATOR_API PrinceDormand853 : public RungeKutta { public: // PrinceDormand853(); PrinceDormand853(const std::string &nomme = ""); virtual ~PrinceDormand853(); PrinceDormand853(const PrinceDormand853&); PrinceDormand853 & operator=(const PrinceDormand853&); virtual Propagator* Clone() const; protected: void SetCoefficients(); }; #endif // PrinceDormand853_hpp
wood-ghost/openvino
src/tests/functional/shared_test_classes/include/shared_test_classes/single_layer/einsum.hpp
<gh_stars>1000+ // Copyright (C) 2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #pragma once #include <string> #include <tuple> #include <vector> #include "shared_test_classes/base/layer_test_utils.hpp" namespace LayerTestsDefinitions { typedef std::tuple< std::string, // Equation std::vector<std::vector<size_t>> // Input shapes > EinsumEquationWithInput; typedef std::tuple< InferenceEngine::Precision, // Input precision EinsumEquationWithInput, // Equation with corresponding input shapes std::string // Device name > EinsumLayerTestParamsSet; class EinsumLayerTest : public testing::WithParamInterface<EinsumLayerTestParamsSet>, virtual public LayerTestsUtils::LayerTestsCommon { public: static std::string getTestCaseName(const testing::TestParamInfo<EinsumLayerTestParamsSet>& obj); protected: void SetUp() override; }; } // namespace LayerTestsDefinitions
zzgchina888/msdn-code-gallery-microsoft
Official Windows Platform Sample/Windows 8.1 Store app samples/[C++]-Windows 8.1 Store app samples/Projection sample/C++/Constants.cpp
<reponame>zzgchina888/msdn-code-gallery-microsoft<gh_stars>1-10 //********************************************************* // // Copyright (c) Microsoft. All rights reserved. // //********************************************************* #include "pch.h" #include "MainPage.xaml.h" #include "Constants.h" using namespace SDKSample; using namespace SDKSample::Projection; Platform::Array<Scenario>^ MainPage::scenariosInner = ref new Platform::Array<Scenario> { { "Creating and projecting a view", "SDKSample.Projection.Scenario1" }, { "Second screen availability", "SDKSample.Projection.Scenario2" } };
hradec/duke
src/duke/io/ImageLoadUtils.cpp
#include "duke/io/ImageLoadUtils.hpp" #include "duke/attributes/Attributes.hpp" #include "duke/attributes/AttributeKeys.hpp" #include "duke/filesystem/FsUtils.hpp" #include "duke/filesystem/MemoryMappedFile.hpp" #include "duke/gl/GlUtils.hpp" #include "duke/gl/Textures.hpp" #include "duke/image/ImageDescription.hpp" #include "duke/image/ImageUtils.hpp" #include "duke/io/IO.hpp" #include "duke/memory/Allocator.hpp" #include <sstream> using std::move; namespace duke { namespace { AlignedMalloc alignedMalloc; ReadFrameResult error(const std::string& error, ReadFrameResult& result) { result.error = error; return move(result); } } // namespace void loadImage(ReadFrameResult& result, const ReadOptionsFunc& getReadOptions) { IImageReader* pReader = result.reader.get(); CHECK(pReader); if (pReader->hasError()) { result.error = pReader->getError(); return; } const auto& description = pReader->getContainerDescription(); CHECK(description.subimages.size() == 1); const auto& options = getReadOptions(description); if (!pReader->read(options, alignedMalloc, result.frame)) { result.error = pReader->getError(); return; } // Appending container properties to image. attribute::merge(description.metadata, result.frame.getMutableDescription().extra_attributes); // Deducing opengl format from channel format. result.frame.updateOpenGlFormat(); } ReadFrameResult load(const char* pFilename, const ReadOptionsFunc& getReadOptions) { ReadFrameResult result; if (!pFilename) return error("no filename", result); const char* pExtension = fileExtension(pFilename); if (!pExtension) return error("no extension", result); const auto& descriptors = IODescriptors::instance().findDescriptor(pExtension); if (descriptors.empty()) return error("no reader available", result); std::vector<std::string> errors; for (const IIODescriptor* pDescriptor : descriptors) { result.reader.reset(pDescriptor->createFileReader(pFilename)); result.error.clear(); loadImage(result, getReadOptions); if (result) return move(result); errors.emplace_back(pDescriptor->getName()); errors.back() += " : "; errors.back() += result.error; errors.back() += "\n"; } std::string msg("No reader succeeded for '"); msg += pFilename; msg += "'\n"; for (const auto& error : errors) msg += error; return error(msg, result); } } /* namespace duke */
matiaslopezd/wix-style-react
src/PopoverMenu/index.js
<filename>src/PopoverMenu/index.js<gh_stars>1-10 export {default} from './PopoverMenu';
input-output-hk/Scorex
scorex-basics/src/test/scala/scorex/crypto/ads/merkle/MerkleSpecification.scala
package scorex.crypto.ads.merkle import java.io.{File, FileOutputStream} import org.scalacheck.Gen import org.scalatest.prop.{GeneratorDrivenPropertyChecks, PropertyChecks} import org.scalatest.{Matchers, PropSpec} import scorex.crypto.hash.FastCryptographicHash import scala.util.Random class MerkleSpecification extends PropSpec with PropertyChecks with GeneratorDrivenPropertyChecks with Matchers { property("value returned from byIndex() is valid for random dataset") { //fix block numbers for faster tests for (blocks <- List(7, 8, 9, 128)) { val smallInteger = Gen.choose(0, blocks - 1) val (treeDirName: String, _, tempFile: String) = generateFile(blocks) val tree = MerkleTree.fromFile(tempFile, treeDirName, 1024) forAll(smallInteger) { (index: Int) => val leafOption = tree.byIndex(index) leafOption should not be None val leaf = leafOption.get val resp = leaf.check(index, tree.rootHash)(FastCryptographicHash) resp shouldBe true } tree.storage.close() } } property("hash root is the same") { //fix block numbers for faster tests for (blocks <- List(7, 8, 9, 128)) { val (treeDirName: String, _, tempFile: String) = generateFile(blocks, "2") val fileTree = MerkleTree.fromFile(tempFile, treeDirName, 1024) val rootHash = fileTree.rootHash fileTree.storage.close() val tree = new MerkleTree(treeDirName, fileTree.nonEmptyBlocks, 1024) val newRootHash = tree.rootHash tree.storage.close() rootHash shouldBe newRootHash } } def generateFile(blocks: Int, subdir: String = "1"): (String, File, String) = { val treeDirName = "/tmp/scorex-test/test/" + subdir + "/" val treeDir = new File(treeDirName) val tempFile = treeDirName + "/data.file" val data = new Array[Byte](1024 * blocks) Random.nextBytes(data) treeDir.mkdirs() for (file <- treeDir.listFiles) file.delete val fos = new FileOutputStream(tempFile) fos.write(data) fos.close() (treeDirName, treeDir, tempFile) } }
janishar/jPost
jpost/src/main/java/com/mindorks/jpost/core/CustomChannel.java
/* * Copyright (C) 2016 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.mindorks.jpost.core; import com.mindorks.jpost.exceptions.IllegalChannelStateException; import com.mindorks.jpost.exceptions.NullObjectException; import java.lang.ref.WeakReference; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.PriorityBlockingQueue; /** * Created by janisharali on 22/09/16. */ public interface CustomChannel<Q extends PriorityBlockingQueue<? extends WeakReference<? extends ChannelPost>>, M extends ConcurrentHashMap<? extends Integer,? extends WeakReference<?>>> extends Channel<Q, M>{ void terminateChannel(); void startChannel(); void stopChannel(); /** * * @param msg * @param subscriberIds * @param <T> * @throws NullObjectException * @throws IllegalChannelStateException */ <T>void broadcast(T msg, Integer... subscriberIds) throws NullObjectException, IllegalChannelStateException; }
bitkylin/ClusterDeviceManager
Project-v1-Obsolete/clustermanage-server/src/main/java/cc/bitky/clustermanage/server/message/base/IMessage.java
<reponame>bitkylin/ClusterDeviceManager package cc.bitky.clustermanage.server.message.base; public interface IMessage { int getMsgId(); int getDeviceId(); void setDeviceId(int deviceId); int getGroupId(); void setGroupId(int groupId); }
theothertomelliott/gort
dataaccess/memory/token-access.go
<filename>dataaccess/memory/token-access.go /* * Copyright 2021 The Gort Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package memory import ( "context" "time" "github.com/getgort/gort/data" "github.com/getgort/gort/data/rest" "github.com/getgort/gort/dataaccess/errs" ) var ( tokensByUser map[string]rest.Token // key=username tokensByValue map[string]rest.Token // key=token ) func init() { tokensByUser = make(map[string]rest.Token) tokensByValue = make(map[string]rest.Token) } // TokenEvaluate will test a token for validity. It returns true if the token // exists and is still within its valid period; false otherwise. func (da *InMemoryDataAccess) TokenEvaluate(ctx context.Context, tokenString string) bool { token, err := da.TokenRetrieveByToken(ctx, tokenString) if err != nil { return false } return !token.IsExpired() } // TokenGenerate generates a new token for the given user with a specified // expiration duration. Any existing token for this user will be automatically // invalidated. If the user doesn't exist an error is returned. func (da *InMemoryDataAccess) TokenGenerate(ctx context.Context, username string, duration time.Duration) (rest.Token, error) { exists, err := da.UserExists(ctx, username) if err != nil { return rest.Token{}, err } if !exists { return rest.Token{}, errs.ErrNoSuchUser } // If a token already exists for this user, automatically invalidate it. token, err := da.TokenRetrieveByUser(ctx, username) if err == nil { da.TokenInvalidate(ctx, token.Token) } tokenString, err := data.GenerateRandomToken(64) if err != nil { return rest.Token{}, err } validFrom := time.Now().UTC() validUntil := validFrom.Add(duration) token = rest.Token{ Duration: duration, Token: tokenString, User: username, ValidFrom: validFrom, ValidUntil: validUntil, } tokensByUser[username] = token tokensByValue[tokenString] = token return token, nil } // TokenInvalidate immediately invalidates the specified token. An error is // returned if the token doesn't exist. func (da *InMemoryDataAccess) TokenInvalidate(ctx context.Context, tokenString string) error { token, err := da.TokenRetrieveByToken(ctx, tokenString) if err != nil { return err } delete(tokensByUser, token.User) delete(tokensByValue, token.Token) return nil } // TokenRetrieveByUser retrieves the token associated with a username. An // error is returned if no such token (or user) exists. func (da *InMemoryDataAccess) TokenRetrieveByUser(ctx context.Context, username string) (rest.Token, error) { if token, ok := tokensByUser[username]; ok { return token, nil } return rest.Token{}, errs.ErrNoSuchToken } // TokenRetrieveByToken retrieves the token by its value. An error is returned // if no such token exists. func (da *InMemoryDataAccess) TokenRetrieveByToken(ctx context.Context, tokenString string) (rest.Token, error) { if token, ok := tokensByValue[tokenString]; ok { return token, nil } return rest.Token{}, errs.ErrNoSuchToken }
metux/chromium-deb
third_party/WebKit/Source/core/html/track/AudioTrack.cpp
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "core/html/track/AudioTrack.h" #include "core/html/HTMLMediaElement.h" namespace blink { AudioTrack::AudioTrack(const String& id, const AtomicString& kind, const AtomicString& label, const AtomicString& language, bool enabled) : TrackBase(WebMediaPlayer::kAudioTrack, kind, label, language, id), enabled_(enabled) {} AudioTrack::~AudioTrack() {} DEFINE_TRACE(AudioTrack) { TrackBase::Trace(visitor); } void AudioTrack::setEnabled(bool enabled) { if (enabled == enabled_) return; enabled_ = enabled; if (MediaElement()) MediaElement()->AudioTrackChanged(this); } const AtomicString& AudioTrack::AlternativeKeyword() { DEFINE_STATIC_LOCAL(const AtomicString, keyword, ("alternative")); return keyword; } const AtomicString& AudioTrack::DescriptionsKeyword() { DEFINE_STATIC_LOCAL(const AtomicString, keyword, ("descriptions")); return keyword; } const AtomicString& AudioTrack::MainKeyword() { DEFINE_STATIC_LOCAL(const AtomicString, keyword, ("main")); return keyword; } const AtomicString& AudioTrack::MainDescriptionsKeyword() { DEFINE_STATIC_LOCAL(const AtomicString, keyword, ("main-desc")); return keyword; } const AtomicString& AudioTrack::TranslationKeyword() { DEFINE_STATIC_LOCAL(const AtomicString, keyword, ("translation")); return keyword; } const AtomicString& AudioTrack::CommentaryKeyword() { DEFINE_STATIC_LOCAL(const AtomicString, keyword, ("commentary")); return keyword; } bool AudioTrack::IsValidKindKeyword(const String& kind) { return kind == AlternativeKeyword() || kind == DescriptionsKeyword() || kind == MainKeyword() || kind == MainDescriptionsKeyword() || kind == TranslationKeyword() || kind == CommentaryKeyword() || kind == g_empty_atom; } } // namespace blink
HPCToolkit/hpctest
internal/notes/builtin-SAVE/packages/mpibash/package.py
<gh_stars>1-10 ############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by <NAME>, <EMAIL>, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Mpibash(Package): """Parallel scripting right from the Bourne-Again Shell (Bash)""" homepage = "http://www.ccs3.lanl.gov/~pakin/software/mpibash-4.3.html" version('4.3', '81348932d5da294953e15d4814c74dd1', url="http://ftp.gnu.org/gnu/bash/bash-4.3.tar.gz") # patch -p1 < ../mpibash-4.3.patch patch('mpibash-4.3.patch', level=1, when='@4.3') # above patch modifies configure.ac depends_on('autoconf', type='build') # uses MPI_Exscan which is in MPI-1.2 and later depends_on('mpi@1.2:') depends_on('libcircle') def install(self, spec, prefix): # run autoconf to rebuild configure autoconf = which('autoconf') autoconf() configure("--prefix=" + prefix, "CC=mpicc") make(parallel=False) make("install")
njhoffman/better-musician
src/store/reducers.js
<reponame>njhoffman/better-musician import { combineReducers } from 'redux'; import { reducer as formReducer } from 'redux-form'; import { connectRouter } from 'connected-react-router'; import { init as initLog } from 'shared/logger'; import uiReducer from './ui'; import apiReducer from './api'; import userReducer from './user'; import configReducer from './config'; import ormReducer from './orm'; // selectors need access to ORM // TODO: put orm in own module const { info } = initLog('reducers'); const makeRootReducer = (history, asyncReducers, injectedModels = []) => { if (injectedModels.length > 0) { info(`Injecting models into reducer: ${injectedModels.join(' ,')}$`); // orm.register(...injectedModels); } const reducers = { orm: ormReducer, form: formReducer, ui: uiReducer, api: apiReducer, user: userReducer, router: connectRouter(history), config: configReducer, ...asyncReducers }; info(`Combining reducers: ${Object.keys(reducers).join(' - ')}`); return combineReducers(reducers); }; export const injectReducer = (reducerProps, models = []) => { const { key, history, reducer, store, clearOld = false } = reducerProps; info(`Injecting reducer: ${key}`); const asyncRed = clearOld ? {} : store.asyncReducers; asyncRed[key] = reducer; const newReducer = connectRouter(history)(makeRootReducer(history, asyncRed, models)); store.replaceReducer(newReducer); }; export default makeRootReducer;
simia-tech/netx
test/helper.go
<reponame>simia-tech/netx package test import ( "context" "fmt" "log" "net" "testing" "github.com/simia-tech/errx" "github.com/simia-tech/netx" "github.com/stretchr/testify/require" ) type action func(net.Conn) error func echoServer(conn net.Conn) error { data, err := ReadBlock(conn) if err != nil { return err } if err := WriteBlock(conn, data); err != nil { return err } return nil } func echoClient(conn net.Conn) error { if err := WriteBlock(conn, []byte("test")); err != nil { return errx.Annotatef(err, "write block") } bytes, err := ReadBlock(conn) if err != nil { return errx.Annotatef(err, "read block") } if string(bytes) != "test" { return fmt.Errorf("expected \"test\", got \"%s\"", bytes) } return nil } func makeListeners(tb testing.TB, n int, a action, options *Options) (string, func() []int, func()) { address := netx.RandomAddress("echo-") counters := []func() int{} listeners := []net.Listener{} for index := 0; index < n; index++ { listener, counter, _ := makeListener(tb, address, a, options) listeners = append(listeners, listener) counters = append(counters, counter) } return address, func() []int { result := []int{} for _, counter := range counters { result = append(result, counter()) } return result }, func() { for _, listener := range listeners { listener.Close() } } } func makeListener(tb testing.TB, address string, a action, options *Options) (net.Listener, func() int, chan error) { if address == "" { if options.ListenAddress == "" { address = netx.RandomAddress("echo-") } else { address = options.ListenAddress } } listener, err := netx.Listen(options.ListenNetwork, address, options.ListenOptions...) require.NoError(tb, err) counter := 0 errChan := make(chan error, 1) go func() { for { conn, err := listener.Accept() if err != nil { log.Printf("test echo listener accept error: %v", err) errChan <- err return } if err = a(conn); err != nil { errChan <- err return } if err := conn.Close(); err != nil { log.Printf("test echo listener close error: %v", err) errChan <- err return } counter++ } }() return listener, func() int { return counter }, errChan } func makeCalls(n int, address string, a action, options *Options) error { for index := 0; index < n; index++ { conn, err := makeConn(address, options) if err != nil { return err } if err := a(conn); err != nil { return err } if err := conn.Close(); err != nil { return err } } return nil } func makeConn(address string, options *Options) (net.Conn, error) { conn, err := netx.Dial(context.Background(), options.DialNetwork, address, options.DialOptions...) if err != nil { return nil, err } return conn, nil } func sum(items []int) int { result := 0 for _, item := range items { result += item } return result }
GTreeSoftware/GTreeCodeExample
Function/ineuronprocessobject.h
#ifndef INEURONPROCESSOBJECT_H #define INEURONPROCESSOBJECT_H #include "../ngtypes/ineurondataobject.h" class ProcessStatus { public: ProcessStatus(){ isSuccess = false; } ProcessStatus(bool arg, const std::string& str, const std::string &infostr):isSuccess(arg), className(str), errorStr(infostr){} ~ProcessStatus(){} bool success() const{ return isSuccess; } std::string ErrorClassName() const { return className; } std::string ErrorInfo() const { return errorStr; } void SetSuccess(bool arg) {isSuccess = arg; } void SetClassName(const std::string &name) { className = name; } void SetErrorStr(const std::string &str) { errorStr = str; } private: bool isSuccess; std::string className; std::string errorStr; }; typedef std::shared_ptr<ProcessStatus> ProcStatPointer; class INeuronProcessObject { public: //INeuronProcessObject(); virtual ProcStatPointer Update() = 0; virtual void SetInput(ConstIDataPointer input){m_Input = input;} virtual ConstIDataPointer GetOutput()=0;//{return m_Source;} virtual IDataPointer ReleaseData()=0; virtual ~INeuronProcessObject(){} std::string ClassName()const { return className_; } protected: std::string className_; ConstIDataPointer m_Input; IDataPointer m_Source;//output data }; typedef std::shared_ptr<INeuronProcessObject> INeuronProcessPointer; #define MAKEPROCESSSTATUS(statusName, flag, className, infoStr) std::shared_ptr<ProcessStatus> statusName = \ std::shared_ptr<ProcessStatus>(new ProcessStatus(flag, className, infoStr)); #define INEURONPROCESSOBJECT_DEFINE virtual ProcStatPointer Update(); \ virtual ConstIDataPointer GetOutput(); \ virtual IDataPointer ReleaseData(); #define INEURONPROCESSOBJECT_RELEASEDATA_IMPLE(className) IDataPointer className::ReleaseData() \ { \ m_Source->ReleaseProcessObject(); \ IDataPointer tData(m_Source); \ m_Source.reset(); \ return tData; } #define INEURONPROCESSOBJECT_GETOUTPUT_IMPLE(className, typeName) ConstIDataPointer className::GetOutput() \ { if (!m_Source) \ m_Source = std::shared_ptr<typeName>(new typeName(this)); \ return m_Source; } //NG_SMART_POINTER_NEW(typeName, m_Source, this); \ #endif // INEURONPROCESSOBJECT_H
IbexOmega/CrazyCanvas
Dependencies/NoesisGUI/Providers/Src/LocalFontProvider.cpp
//////////////////////////////////////////////////////////////////////////////////////////////////// // NoesisGUI - http://www.noesisengine.com // Copyright (c) 2013 Noesis Technologies S.L. All Rights Reserved. //////////////////////////////////////////////////////////////////////////////////////////////////// #include <NsApp/LocalFontProvider.h> #include <NsCore/Find.h> #include <NsGui/Stream.h> using namespace Noesis; using namespace NoesisApp; //////////////////////////////////////////////////////////////////////////////////////////////////// LocalFontProvider::LocalFontProvider(const char* rootPath) { StrCopy(mRootPath, sizeof(mRootPath), rootPath); } //////////////////////////////////////////////////////////////////////////////////////////////////// void LocalFontProvider::ScanFolder(const char* folder) { char uri[512] = ""; if (!StrIsNullOrEmpty(mRootPath)) { StrCopy(uri, sizeof(uri), mRootPath); StrAppend(uri, sizeof(uri), "/"); } StrAppend(uri, sizeof(uri), folder); ScanFolder(uri, folder, ".ttf"); ScanFolder(uri, folder, ".otf"); ScanFolder(uri, folder, ".ttc"); } //////////////////////////////////////////////////////////////////////////////////////////////////// Ptr<Stream> LocalFontProvider::OpenFont(const char* folder, const char* filename) const { char uri[512] = ""; if (!StrIsNullOrEmpty(mRootPath)) { StrCopy(uri, sizeof(uri), mRootPath); StrAppend(uri, sizeof(uri), "/"); } StrAppend(uri, sizeof(uri), folder); StrAppend(uri, sizeof(uri), "/"); StrAppend(uri, sizeof(uri), filename); return OpenFileStream(uri); } //////////////////////////////////////////////////////////////////////////////////////////////////// void LocalFontProvider::ScanFolder(const char* path, const char* folder, const char* ext) { FindData findData; if (FindFirst(path, ext, findData)) { do { RegisterFont(folder, findData.filename); } while (FindNext(findData)); FindClose(findData); } }
jattenberg/parallax
src/main/java/com/dsi/parallax/ml/classifier/lazy/KDType.java
/******************************************************************************* * Copyright 2012 <NAME>. Not for re-use or redistribution. ******************************************************************************/ package com.dsi.parallax.ml.classifier.lazy; /** * The distance function used for kd trees */ public enum KDType { /** The manhattan. */ MANHATTAN, /** The weightedmanhattan. */ WEIGHTEDMANHATTAN, /** The euclidian. */ EUCLIDIAN, /** The weightedeuclidian. */ WEIGHTEDEUCLIDIAN }
Dorllen/JDemo
bao_console/src/main/java/bao/action/base/Worker.java
package bao.action.base; import bao.menu.BaseMenu; public interface Worker { void work(BaseMenu list); }
Matthew-Griffith/ringteki-client
test/server/cards/05-UotE/FromTheShadows.spec.js
describe('From the Shadows', function() { integration(function() { describe('When playing From the Shadows\'s', function() { beforeEach(function() { this.setupTest({ phase: 'conflict', player1: { inPlay: ['adept-of-shadows'], dynastyDeck: ['young-rumormonger'] }, player2: { inPlay: ['bayushi-aramoro'], dynastyDeck: ['miya-mystic', 'disguised-protector'], hand: ['from-the-shadows', 'shosuro-sadako', 'bayushi-kachiko'] } }); this.adeptOfShadows = this.player1.findCardByName('adept-of-shadows'); this.youngRumormonger = this.player1.findCardByName('young-rumormonger'); this.bayushiAramoro = this.player2.findCardByName('bayushi-aramoro'); this.disguisedProtector = this.player2.placeCardInProvince('disguised-protector', 'province 1'); this.miyaMystic = this.player2.placeCardInProvince('miya-mystic', 'province 2'); this.shosuroSadako = this.player2.findCardByName('shosuro-sadako'); this.bayushiKachiko = this.player2.findCardByName('bayushi-kachiko'); }); it('should not be playable in a pre-conflict window', function() { this.player1.pass(); this.player2.clickCard('from-the-shadows', 'hand'); expect(this.player2).toHavePrompt('Initiate an action'); }); describe('during a conflict', function() { beforeEach(function() { this.noMoreActions(); this.initiateConflict({ type: 'military', attackers: [this.adeptOfShadows], defenders: [] }); }); describe('when the opponent is less honorable', function() { beforeEach(function() { this.player1.honor = 8; this.player2.honor = 10; this.player2.clickCard('from-the-shadows', 'hand'); }); it('should not be playable', function() { this.player2.clickCard('from-the-shadows', 'hand'); expect(this.player2).toHavePrompt('Conflict Action Window'); }); }); describe('when the opponent is equally honorable', function() { beforeEach(function() { this.player1.honor = 9; this.player2.honor = 9; this.player2.clickCard('from-the-shadows', 'hand'); }); it('should not be playable', function() { this.player2.clickCard('from-the-shadows', 'hand'); expect(this.player2).toHavePrompt('Conflict Action Window'); }); }); describe('when the opponent is more honorable', function() { beforeEach(function() { this.player1.honor = 10; this.player2.honor = 8; this.player2.clickCard('from-the-shadows', 'hand'); }); it('should allow selecting a character', function() { expect(this.player2).toHavePrompt('Choose a character'); }); it('should not allow selecting a non-shinobi dynasty character', function() { this.player2.clickCard(this.miyaMystic); expect(this.player2).toHavePrompt('Choose a character'); }); it('should not allow selecting a non-shinobi conflict character', function() { this.player2.clickCard(this.bayushiKachiko); expect(this.player2).toHavePrompt('Choose a character'); }); it('should not allow selecting a shinobi character that is in play', function() { this.player2.clickCard(this.bayushiAramoro); expect(this.player2).toHavePrompt('Choose a character'); }); it('should not allow selecting an opponents shinobi character', function() { this.player2.clickCard(this.adeptOfShadows); expect(this.player2).toHavePrompt('Choose a character'); }); it('should allow selecting a shinobi dynasty character', function() { this.player2.clickCard(this.disguisedProtector); expect(this.player1).toHavePrompt('Conflict Action Window'); }); it('should allow selecting a shinobi conflict character', function() { this.player2.clickCard(this.shosuroSadako); expect(this.player1).toHavePrompt('Conflict Action Window'); }); describe('when a legal character is selected', function() { beforeEach(function() { this.player2.clickCard(this.disguisedProtector); }); it('should put the character into play in the conflict', function() { expect(this.disguisedProtector.inConflict).toBe(true); expect(this.game.currentConflict.defenders).toContain(this.disguisedProtector); }); it('the character should enter play dishonored', function() { expect(this.disguisedProtector.isDishonored).toBe(true); }); describe('if Young Rumormonger is in play', function() { beforeEach(function() { this.youngRumormonger = this.player1.placeCardInProvince('young-rumormonger', 'province 1'); this.player1.putIntoPlay(this.youngRumormonger); }); it('should not allow Young Rumormonger to use his ability', function() { expect(this.player1.formatPrompt()).not.toContain('interrupt'); expect(this.player1).not.toBeAbleToSelect(this.youngRumormonger); expect(this.player1).toHavePrompt('Conflict Action Window'); expect(this.player2).toHavePrompt('Waiting for opponent to take an action or pass'); }); }); }); describe('when Shosuro Sadako is chosen', function() { beforeEach(function() { this.player2.clickCard(this.shosuroSadako); }); it('should put Shosuro Sadako into play dishonored', function() { expect(this.shosuroSadako.inConflict).toBe(true); expect(this.game.currentConflict.defenders).toContain(this.shosuroSadako); expect(this.shosuroSadako.isDishonored).toBe(true); expect(this.shosuroSadako.getMilitarySkill()).toBe(4); }); }); }); }); }); }); });
SocialGouv/ecollecte
control/tests/test_send_questionnaire_file.py
<filename>control/tests/test_send_questionnaire_file.py<gh_stars>1-10 from pytest import mark from django.shortcuts import reverse from tests import factories, utils pytestmark = mark.django_db class SendQuestionnaireRunner(): def __init__(self, client): questionnaire = factories.QuestionnaireFactory(is_draft=False) self.filename = questionnaire.basename user = utils.make_audited_user(questionnaire.control) utils.login(client, user=user) url = reverse('send-questionnaire-file', args=[questionnaire.id]) self.response = client.get(url) def test_download_questionnaire_file_works_if_the_control_is_associated_with_the_user(client): runner = SendQuestionnaireRunner(client) assert runner.response.status_code == 200 def test_download_questionnaire_file_has_right_filename(client): runner = SendQuestionnaireRunner(client) assert runner.response.has_header('Content-Disposition') assert runner.response['Content-Disposition'].find(runner.filename) > -1 def test_download_questionnaire_file_fails_if_the_control_is_not_associated_with_the_user(client): questionnaire = factories.QuestionnaireFactory(is_draft=False) unauthorized_control = factories.ControlFactory() assert unauthorized_control != questionnaire.control user = utils.make_audited_user(unauthorized_control) utils.login(client, user=user) url = reverse('send-questionnaire-file', args=[questionnaire.id]) response = client.get(url) assert response.status_code != 200 def test_inspector_can_download_questionnaire_file_if_draft(client): questionnaire = factories.QuestionnaireFactory(is_draft=True) user = utils.make_inspector_user(questionnaire.control) utils.login(client, user=user) url = reverse('send-questionnaire-file', args=[questionnaire.id]) response = client.get(url) assert response.status_code == 200 def test_audited_cannot_download_questionnaire_file_if_draft(client): questionnaire = factories.QuestionnaireFactory(is_draft=True) user = utils.make_audited_user(questionnaire.control) utils.login(client, user=user) url = reverse('send-questionnaire-file', args=[questionnaire.id]) response = client.get(url) assert response.status_code == 404
Hess-Gregory/cms_server
app/components/about/aboutRouter.js
<gh_stars>0 import about from './aboutController'; const router = require('express').Router() router.get('/about', about.getAbouts) router.post('/about', about.storeAbout) router.get('/about/:aboutId', about.findAbout) router.put('/about/:aboutId', about.updateAbout) router.delete('/about/:aboutId', about.deleteAbout) module.exports = router
WJW53/WebNotes
WebLevelOne/14.包管理器/2. [重点]npm/2-7. 运行环境配置/test.js
<gh_stars>1-10 //读取package.json文件中的版本号 const packageConfig = require("./package.json"); console.log(packageConfig.version); console.log(packageConfig.a);
danterusdev/Sol-Client
game/src/main/java/me/mcblueparrot/client/util/data/Colour.java
<filename>game/src/main/java/me/mcblueparrot/client/util/data/Colour.java<gh_stars>0 package me.mcblueparrot.client.util.data; import java.awt.Color; import com.google.gson.annotations.Expose; import lombok.Getter; import me.mcblueparrot.client.util.Utils; import net.minecraft.util.MathHelper; public class Colour { @Getter @Expose private int value; public static final Colour WHITE = new Colour(255, 255, 255); public static final Colour BLACK = new Colour(0, 0, 0); public static final Colour RED = new Colour(255, 0, 0); public static final Colour BLUE = new Colour(0, 150, 255); public static final Colour WHITE_128 = WHITE.withAlpha(128); public static final Colour BLACK_128 = BLACK.withAlpha(128); public Colour(int value) { this.value = value; checkRange(); } public Colour(int red, int green, int blue, int alpha) { this(((alpha & 0xFF) << 24) | ((red & 0xFF) << 16) | ((green & 0xFF) << 8) | (blue & 0xFF)); } public Colour(int red, int green, int blue) { this(red, green, blue, 255); } public Colour withAlpha(int alpha) { return new Colour(getRed(), getGreen(), getBlue(), alpha); } private void checkRange() { checkRange(getRed(), "red"); checkRange(getGreen(), "green"); checkRange(getGreen(), "blue"); checkRange(getAlpha(), "alpha"); } private void checkRange(int value, String name) { if(value > 255 || value < 0) { throw new IllegalStateException("Invalid range for " + name + " (" + value + ")"); } } public int getRed() { return (value >> 16) & 0xFF; } public int getGreen() { return (value >> 8) & 0xFF; } public int getBlue() { return value & 0xFF; } public int getAlpha() { return (value >> 24) & 0xFF; } public float getRedFloat() { return getRed() / 255F; } public float getGreenFloat() { return getGreen() / 255F; } public float getBlueFloat() { return getBlue() / 255F; } public float getAlphaFloat() { return getAlpha() / 255F; } public Color toAWT() { return new Color(value, true); } public int[] getComponents() { return new int[] {getRed(), getGreen(), getBlue(), getAlpha()}; } public Colour multiply(float factor) { return new Colour(clamp((int) (getRed() * factor)), clamp((int) (getGreen() * factor)), clamp((int) (getBlue() * factor)), getAlpha()); } private int clamp(int channel) { return MathHelper.clamp_int(channel, 0, 255); } public Colour add(int amount) { return new Colour(clamp(getRed() + amount), clamp(getGreen() + amount), clamp(getBlue() + amount), getAlpha()); } public int getShadowValue() { return Utils.getShadowColour(getValue()); } public Colour getShadow() { return new Colour(getShadowValue()); } }
tmtsoftware/csw-prototype
javacsw/src/main/scala/javacsw/services/cs/akka/JConfigServiceClient.scala
package javacsw.services.cs.akka import java.io.File import java.util.Optional import java.util.concurrent.CompletableFuture import javacsw.services.cs.core.{JBlockingConfigManager, JConfigManager} import akka.actor.{ActorRef, ActorRefFactory, ActorSystem} import akka.util.Timeout import com.typesafe.config.{Config, ConfigFactory, ConfigResolveOptions} import csw.services.cs.akka.{ConfigServiceActor, ConfigServiceClient} import csw.services.cs.core.{ConfigData, ConfigId} import scala.compat.java8.FutureConverters._ import scala.compat.java8.OptionConverters._ import scala.concurrent.Future import scala.concurrent.duration._ /** * A non-blocking Java client for the config service * * @param client the scala implementation of the cs client * @param context the akka actor context (system or context) */ class JConfigServiceClient(client: ConfigServiceClient)(implicit context: ActorRefFactory) extends JConfigManager(client) /** * Static utility methods for use by Java applications */ object JConfigServiceClient { /** * Convenience method that gets the contents of the given file from the config service * by first looking up the config service with the location service and * then fetching the contents of the file using a config service client. * (Use only for small files.) * * @param path the path of the file in the config service * @param id optional id of a specific version of the file * @param system actor system needed to access config service * @param timeout time to wait for a reply * @return the future contents of the file as a ConfigData object, if found */ def getFromConfigService(path: File, id: Optional[ConfigId], system: ActorSystem, timeout: Timeout): CompletableFuture[Optional[ConfigData]] = { import system.dispatcher ConfigServiceClient.getFromConfigService(path, id.asScala)(system, timeout).map(_.asJava).toJava.toCompletableFuture } /** * Convenience method that gets the contents of the given file from the config service * by first looking up the config service with the location service and * then fetching the contents of the file using a config service client. * (Use only for small files.) * * @param path the path of the file in the config service * @param id optional id of a specific version of the file * @param system actor system needed to access config service * @param timeout time to wait for a reply * @return the future contents of the file as a string, if the file was found */ def getStringFromConfigService(path: File, id: Optional[ConfigId], system: ActorSystem, timeout: Timeout): CompletableFuture[Optional[String]] = { import system.dispatcher ConfigServiceClient.getStringFromConfigService(path, id.asScala)(system, timeout).map(_.asJava).toJava.toCompletableFuture } /** * Convenience method that gets a Typesafe Config from the config service * by first looking up the config service with the location service and * then fetching the contents of the given file using a config service client. * Finally, the file contents is parsed as a Typesafe config file and the * Config object returned. * * @param path the path of the file in the config service * @param id optional id of a specific version of the file * @param resource optional resource file to use in case the file can't be retrieved from the config service for some reason * @param system actor system needed to access config service * @param timeout time to wait for a reply * @return the future config, parsed from the file */ def getConfigFromConfigService(path: File, id: Optional[ConfigId], resource: Optional[File], system: ActorSystem, timeout: Timeout): CompletableFuture[Optional[Config]] = { import system.dispatcher ConfigServiceClient.getConfigFromConfigService(path, id.asScala, resource.asScala)(system, timeout).map(_.asJava).toJava.toCompletableFuture } /** * Convenience method that stores the contents of a given Config object in the config service. * * @param path the path the file should have in the config service * @param config the config to store * @param system actor system needed to access config service * @param timeout time to wait for a reply * @return the future contents of the file as a ConfigData object, if found */ def saveConfigToConfigService(path: File, config: Config, system: ActorSystem, timeout: Timeout): CompletableFuture[ConfigId] = ConfigServiceClient.saveConfigToConfigService(path, config)(system, timeout).toJava.toCompletableFuture } /** * A blocking Java client for the config service * * @param client the scala implementation of the cs client * @param context the akka actor context (system or context) */ class JBlockingConfigServiceClient(client: ConfigServiceClient)(implicit context: ActorRefFactory) extends JBlockingConfigManager(client) /** * Contains Java API helper methods related to the Scala ConfigServiceActor class */ object JConfigService { /** * Java API: Locate the config service with the given name using the location service. * * @param name the name the config service was registered with * @param system the actor system to use * @param timeout amount of time to alow for looking up config service with location service * @return the future ActorRef for the config service (May throw an exception if not found) */ def locateConfigService(name: String, system: ActorSystem, timeout: Timeout): CompletableFuture[ActorRef] = ConfigServiceActor.locateConfigService(name)(system, timeout).toJava.toCompletableFuture /** * Java API: Locate the default config service using the location service * * @param system the actor system to use * @return the future ActorRef for the config service (May throw an exception if not found) */ def locateConfigService(system: ActorSystem, timeout: Timeout): CompletableFuture[ActorRef] = ConfigServiceActor.locateConfigService()(system, timeout).toJava.toCompletableFuture }
joelliusp/SpaceHabit
SpaceHabitRPG/Models/Zone.py
from StoryModels import StoryModels from AllDBFields import ZoneDBFields from ZoneDefinitions import ZoneDefinition from ZoneDefinitions import AllZones from bson.objectid import ObjectId from OrphanedModelException import OrphanedModelException import DatabaseLayer import random class Zone(StoryModels): """ This is a wrapper for the zone data from the database. This is different from the other models in that Zone is used as a part of the hero model. """ def __init__(self, definitionKey): return super().__init__(definitionKey) @classmethod def construct_model_from_pk(cls,pk): """ args: id: uses the id to load this model from the database. return: an instance of the model on which this is called """ collection = DatabaseLayer.get_table(cls.get_dbFields().COLLECTION_NAME) obj = cls(None) obj.dict = collection.find_one({cls.get_dbFields().PK_KEY:pk}) return obj def save_changes(self,heroId): """ args: heroId: this needs to be an pymongo objectId. It is used as an owner relationship to a hero model """ from AllDBFields import HeroDbFields ownerCollection = DatabaseLayer.get_table(self.get_dbFields().OWNER_COLLECTION) if self.get_pk(): if self._changes: ownerCollection.update_one({self.get_dbFields().PK_KEY:heroId},{'$set':self._changes}) else: collection = DatabaseLayer.get_table(self.get_dbFields().COLLECTION_NAME) pk = collection.insert_one(self.dict).inserted_id self.dict[self.get_dbFields().PK_KEY] = pk nestedZone = {HeroDbFields.ZONE:self.dict} ownerCollection.update_one({self.get_dbFields().PK_KEY:heroId},{'$set':nestedZone}) self._changes = {} @classmethod def get_dbFields(cls): return ZoneDBFields def get_zoneName(self): if not self._definition: self._definition = ZoneDefinition(self.definitionKey) return self._definition.get_name() def get_fullName(self): return "{0} {1}".format(self.get_zoneName(),self.suffix).rstrip() @property def suffix(self): if self.get_dbFields().SUFFIX in self.dict: return self.dict[self.get_dbFields().SUFFIX] else: return "" @suffix.setter def suffix(self,value): self.set_common_story_property(self.get_dbFields().SUFFIX,value) @property def monstersKilled(self): if self.dict[self.get_dbFields().MONSTERS_KILLED]: return self.dict[self.get_dbFields().MONSTERS_KILLED] else: return 0 @monstersKilled.setter def monstersKilled(self,value): self.set_common_story_property(self.get_dbFields().MONSTERS_KILLED,value) @property def maxMonsters(self): return self.dict[self.get_dbFields().MAX_MONSTERS] @maxMonsters.setter def maxMonsters(self,value): self.set_common_story_property(self.get_dbFields().MAX_MONSTERS,value) @property def lvl(self): return self.dict[self.get_dbFields().LVL] @lvl.setter def lvl(self,value): self.set_common_story_property(self.get_dbFields().LVL,value) def get_description(self): if not self._definition: self._definition = ZoneDefinition(self.definitionKey) return self._definition.get_description() @property def previousZoneReferencePK(self): if self.get_dbFields().PREVIOUS_ZONE_REFERENCE_PK in self.dict: return self.dict[self.get_dbFields().PREVIOUS_ZONE_REFERENCE_PK] return None @previousZoneReferencePK.setter def previousZoneReferencePK(self,value): self.set_common_story_property(self.get_dbFields().PREVIOUS_ZONE_REFERENCE_PK,value) @property def nextZoneReferenceList(self): return self.dict[self.get_dbFields().NEXT_ZONE_REFERENCE_LIST] @nextZoneReferenceList.setter def nextZoneReferenceList(self,value): self.set_common_story_property(self.get_dbFields().NEXT_ZONE_REFERENCE_LIST,value) @property def alias(self): raise NotImplementedError() return self.dict[self.get_dbFields().ALIAS] @alias.setter def alias(self,value): raise NotImplementedError() self.dict[self.get_dbFields().ALIAS] = value self._changes[self.get_dbFields().ALIAS] = value @property def definitionKey(self): return self.dict[self.get_dbFields().DEFINITION_KEY] @definitionKey.setter def definitionKey(self,value): self.set_common_story_property(self.get_dbFields().DEFINITION_KEY,value) @classmethod def get_home_zone(cls): """ this probably only needs to be called when a new hero is being created for a user args: heroId: this needs to be an pymongo objectId. It is used as an owner relationship to a hero model returns: a model of type zone with starting details """ from AllDBFields import ZoneDefinitionFields zone = Zone(ZoneDefinitionFields.HOME) zone.maxMonsters = 0 zone.skillLvl = 0 return zone @classmethod def construct_next_zone_choice(cls,heroLvl,vistiedZones,matchHeroLvl = False): """ generates a zone with unique name and randomlvl args: heroLvl: this should be a positive integer greater than 1 visitedZones: this should be a dict. the dict is used to keep tract of which name suffix combinations have popped up already. matchHeroLvl: Set this to true if first level.if this is true than the zone difficulty level will perfectly match the hero's level rather than approximate it. returns: a model of type zone also adds to the value for a key in the visitedZones dict """ import GeneralUtilities as gu selectedZoneKey = Zone.get_random_zone_definitionKey(heroLvl) definition = ZoneDefinition(selectedZoneKey) zone = {ZoneDBFields.DEFINITION_KEY:selectedZoneKey,ZoneDBFields.LVL: heroLvl, ZoneDBFields.MAX_MONSTERS: random.randint(5,15),ZoneDBFields.NAME: definition.get_name(), ZoneDBFields.DESCRIPTION: definition.get_description()} if selectedZoneKey in vistiedZones: #if we've visited it before zone[ZoneDBFields.SUFFIX] = Zone.generate_full_zone_name_suffix(vistiedZones[selectedZoneKey]) zone[ZoneDBFields.FULL_NAME] = \ "{0} {1}".format(zone[ZoneDBFields.NAME],zone[ZoneDBFields.SUFFIX]).rstrip() vistiedZones[selectedZoneKey] += 1 else: zone[ZoneDBFields.FULL_NAME] = zone[ZoneDBFields.NAME] vistiedZones[selectedZoneKey] = 1 if not matchHeroLvl: zone[ZoneDBFields.LVL] = gu.calculate_lvl(heroLvl,10) return zone @classmethod def get_random_zone_definitionKey(cls,heroLvl): """ selects a random dictionary key to be used with ZoneDefinitions args: heroLvl: this should be a positive integer greater than 1 returns: a string which is a dict key """ zoneGroupKeys = Zone.get_unlocked_zone_groupKeys(heroLvl) selectedZoneGroupKey = random.choice(zoneGroupKeys) zoneList = list(AllZones[selectedZoneGroupKey].keys()) return random.choice(zoneList) @classmethod def generate_full_zone_name_suffix(cls,visitCount): """ each time we visit a particular zone type, we don't want it to have the same exact name as last time. To do this, we will add a suffix to the name. This generates a suffic based on the number of times that zone has been hit. args: visitCount: the number of times the hero character has visited a zone returns: a suffix which will be a string. We will take this string and append it to stuff. """ if visitCount < 1: return "" symbols = Zone.get_symbols() hugeVisitCountResult = Zone.special_action_for_extremely_huge_visitCounts(visitCount,symbols) numericSuffix = hugeVisitCountResult['numericSuffix'] visitCount = hugeVisitCountResult['visitCount'] adjustedVisitCount = Zone.skip_powers_of_base_in_number(visitCount,len(symbols)) suffix = Zone.get_symbol_suffix(adjustedVisitCount,symbols) if numericSuffix > 0: suffix += str(numericSuffix) return suffix.strip() @classmethod def special_action_for_extremely_huge_visitCounts(cls,visitCount,symbols): """ this gets a special suffix for extremely huge vist counts, i.e, higher than 10100. Also shrinks the number play nicely with the normal suffix generating process args: visitCount: the number of times the hero character has visited a zone symbols: the list of symbols. We're changing the first element to something magic return: a dict with the numericSuffix value and the updated visitCount """ numericSuffix = 0 if visitCount > (len(symbols)-1) * len(symbols): symbols[0] = "?4815162342" numericSuffix = Zone.get_numeric_suffix(visitCount,len(symbols)) visitCount = Zone.adjust_visitCount_for_extremely_huge_counts(visitCount,len(symbols)) return {'numericSuffix':numericSuffix,'visitCount':visitCount} @classmethod def get_symbol_suffix(cls,visitCount,symbols): """ converts a number to a suffix. Think of it as converting a number to a base 100 system of sorts args: visitCount: the number of times the hero character has visited a zone symbols: the list of symbols. return: a string to be zone suffix """ suffix = "" while visitCount > 0: r = visitCount % len(symbols) visitCount //= len(symbols) suffix = (symbols[r] + " " + suffix) return suffix @classmethod def adjust_visitCount_for_extremely_huge_counts(cls,visitCount,symbolsLen): """ args: visitCount: the number of times the hero character has visited a zone symbolsLen: the count of all the available symbols to be made into a suffix """ return visitCount % ((symbolsLen-1) * symbolsLen) @classmethod def get_numeric_suffix(cls,visitCount,symbolsLen): """ args: visitCount: the number of times the hero character has visited a zone symbolsLen: the count of all the available symbols to be made into a suffix """ #the -1 on the first array length is to account for the single symbol range of items return visitCount // ((symbolsLen-1) * symbolsLen) + 1 #+1 because the 1 suffix would be redundant @classmethod def get_symbols(cls): """ if you add any items to symbols, please adjust the unit test to account for that """ symbols =["","Alpha", "Beta","Cain","Delta", #4 "Epsilon","Foxtrot","September","October", #8 "November","Kilo","Juliett","Romeo","Silver","Deckard", #14 "Sierra","Tango","Zeta","Theta","July","Ludwig","Tyrell", #21 "Lambda","Mu","London","Victor","Quintin","Gold", #27 "Whiskey","Xray","Zulu","Pi","Rho","Antilles","Blanca", #34 "Sigma","Tau","India","Hector","Quebec","Waltz","Sapphire", #41 "Tokyo","Ramesses","Washington","Darius","Emerald","Midgard", #47 "Futura","Charlotte","Flanders","Berlin","Onion","Ruby", #53 "David","Pizza","Lazlo","Kong","Jerico","Diamond", #59 "Black","White","Olaf","Biggs","Wedge","Tyrannus", #65 "Richter","Medusa","Swan","Gemini","Noir","Xerxes",#71 "TNT","Plutonia","Cerberus","Tiberius", #75 "Arcturus","Prime","Tarsonis","Babylon","Sparta",#80 "Atlanta","Yutani","Python","Ridley","Midway", #85 "Bismark","Dextera","Dominus","Jejunum", #89 "Superior","Distal","Eurebus","Indigo", #93 "Xs","Rex","Titan","Zen","Apex","Omega","Zed"] #100 return symbols @classmethod def skip_powers_of_base_in_number(cls,num,base): """ Numbers naturally want to follow this pattern: 0,A,B,C,...,Y,Z,A0,AA,AB,AC,...,AY,AZ,B0,BA,BB,BC But I want zone suffix naming system to follow this pattern: 0,A,B,C,...,Y,Z,AA,AB,AC,...,AY,AZ,BA,BB,BC,... This function adjust numbers to fit the wanted pattern, i.e. without the proverbial mulitples of 10 The accuracy of this function becomes unreliable after base^2 args: num: this is the number that we're offsetting. base: an integer. multiples of this number will be skipped returns: a number that's been offset for the base occurances skipped over """ if base < 1 or not float.is_integer(float(base)): raise ValueError("Base needs to be a positive non-zero integer") if not float.is_integer(float(num)): raise ValueError("num needs to be an integer and not a floating number") isNegative = False if num < 0: num *= -1 isNegative = True adjusterNum = num + (num // base) return num + (adjusterNum // base) @classmethod def get_unlocked_zone_groupKeys(cls,heroLvl): """" gets the list of availible zones groups that can be selected depeding on the hero's level args: heroLvl: this should be an interger returns: a list of dict keys to the AllZones dict. """ if heroLvl < 1: return [] availableZonesGroups = [] availableZonesGroups.append("lvl1Zones") if heroLvl >= 5: availableZonesGroups.append("lvl5Zones") if heroLvl >= 10: availableZonesGroups.append("lvl10Zones") if heroLvl >= 15: availableZonesGroups.append("lvl15Zones") if heroLvl >= 20: availableZonesGroups.append("lvl20Zones") if heroLvl >= 25: availableZonesGroups.append("lvl25Zones") if heroLvl >= 30: availableZonesGroups.append("lvl30Zones") return availableZonesGroups
vlehtola/questmud
lib/guilds/spells/abjuration/_kyo_brr_tdr.c
// Changed spell name from remove protections to purge by C. 20051028 // Idea: removes ALL magic effects on the target (both good and bad) resolve(int bonus, string target) { object ob,sphere; string target2; if(target) ob = present(target, environment(this_player())); if (!ob) { write("No such person here.\n"); return 1; } if(!living(ob)) { write(target+" is not a valid target.\n"); return 1; } target2 = target; if(ob == this_player()) { target = "yourself"; target2 = this_player()->query_objective()+"self"; } write("You cast purge at "+target+".\n"); say(this_player()->query_name()+" purges all magic effects from "+target2+".\n",ob); tell_object(ob, "All the magic effects affecting you are purged by "+this_player()->query_name()+".\n"); ob->end_sphere_of_protection(); ob->end_protection(); ob->end_stone_skin(); ob->end_stun_res(); ob->end_vulnerability(); ob->end_vulnerability(); ob->end_shield(); ob->end_haste(1); /* no ep loss //Celtron */ // Added by C. removes magic bane effect & 2 stat boosts ob->end_stat_boost(); ob->end_stat_boost(); sphere = present("magic_bane", ob); if(sphere) sphere->end_magic_bane(); // the following spell effects should be modified so that they work WITHOUT shadow()! C ob->end_anticounter(); ob->end_antireflect(); ob->end_mana_shield(); return 1; }
MaartenS11/Team-Fortress-Invasion
mp/src/old/cl_dll/idebugoverlaypanel.h
<filename>mp/src/old/cl_dll/idebugoverlaypanel.h //======== (C) Copyright 1999, 2000 Valve, L.L.C. All rights reserved. ======== // // The copyright to the contents herein is the property of Valve, L.L.C. // The contents may be used and/or copied only with the written permission of // Valve, L.L.C., or in accordance with the terms and conditions stipulated in // the agreement/contract under which the contents have been supplied. // // Purpose: // // $Workfile: $ // $Date: $ // //----------------------------------------------------------------------------- // $Log: $ // // $NoKeywords: $ //============================================================================= #if !defined( IDEBUGOVERLAYPANEL_H ) #define IDEBUGOVERLAYPANEL_H #ifdef _WIN32 #pragma once #endif #include <vgui/VGUI.h> namespace vgui { class Panel; } class IDebugOverlayPanel { public: virtual void Create( vgui::VPANEL parent ) = 0; virtual void Destroy( void ) = 0; }; extern IDebugOverlayPanel *debugoverlaypanel; #endif // IDEBUGOVERLAYPANEL_H
RWTH-OS/MP-MPICH
rexec/RexecShell/Main.cpp
<reponame>RWTH-OS/MP-MPICH<gh_stars>0 //--------------------------------------------------------------------------- #define UNICODE #include <winsock2.h> #include <vcl\vcl.h> #pragma hdrstop #include "Main.h" #include "Client.h" #include "NetState.h" #include "ConfigDlg.h" #include "Parform.h" #include "Exclude.h" #include "About.h" #include "LoginData.h" #include "Environment.h" #include "Include.h" #include "RexHelp.h" #include <stdio.h> //--------------------------------------------------------------------------- #pragma resource "*.dfm" TMainWindow *MainWindow; CRITICAL_SECTION CS; //--------------------------------------------------------------------------- // Message handlers //--------------------------------------------------------------------------- // Msg REFRESH_FINISH void __fastcall TMainWindow::ThreadExit(TMessage &Message) { //PluginCombo->Enabled = true; Setglobalaccount1->Enabled = true; SpeedButton1->Enabled = true; SpeedButton4->Enabled = true; Auswaehlen->Enabled = true; LoadConfig1->Enabled = true; ChangePlugin->Enabled = true; Screen->Cursor=crDefault; Hint = "Ready"; } // Msg REFRESH_START void __fastcall TMainWindow::ThreadStart(TMessage &Message) { //PluginCombo->Enabled = false; Setglobalaccount1->Enabled = false; SpeedButton1->Enabled = false; SpeedButton4->Enabled = false; Auswaehlen->Enabled = false; LoadConfig1->Enabled = false; ChangePlugin->Enabled = false; //Show Background-Activity Cursor during query Screen->Cursor=crAppStart; Hint = "Querying hosts"; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::FormCreate(TObject *Sender) { InitializeCriticalSection(&CS); WSADATA wsaData,*lpwsaData; char *domain,user[256]; lpwsaData = &wsaData; WORD wVersionRequested = MAKEWORD(1, 1); int nResult = WSAStartup(wVersionRequested, lpwsaData); if (nResult != 0) { Application->MessageBox("WSAStartup failed","Error",MB_OK|MB_ICONERROR); return; } if (LOBYTE(lpwsaData->wVersion) != 1 || HIBYTE(lpwsaData->wVersion) != 1) { WSACleanup(); Application->MessageBox("Wrong wsock version","Error",MB_OK|MB_ICONERROR); return; } DWORD size=256; domain=getenv("USERDOMAIN"); GetUserNameA(user,&size); sprintf(name,"%s/%s",domain,user); Application->ShowHint = true; Application->OnHint = ShowHint; ShownBefore=false; Servers = new CServers(FALSE); PluginManager.LoadPlugins(); CreatePluginMenu(); PluginManager.RegisterClientWindow(Handle); Servers->RegisterClientWindow(Handle); //read checked Menue items from ini-file BOOL retval = true; DWORD errcode = NO_ERROR; char IniFileName[1024]; AnsiString inivalue; if (!GetFullExeName(IniFileName)) strcpy(IniFileName,"RexecShell.ini"); else { ChangeFileExt(IniFileName,".ini") ; } IniFile = new TIniFile(IniFileName); inivalue = IniFile->ReadString("RexecShell" , "SetRPCEncryption","Default"); if (inivalue == FALSESTR) RPCEncryption->Checked = false; SetRPCEncryption(RPCEncryption->Checked); inivalue = IniFile->ReadString("RexecShell" , "checkprofilevalidity","Default"); if (inivalue == FALSESTR) checkprofilevalidity->Checked = false; CheckUserAccountOnChange = checkprofilevalidity->Checked; } //--------------------------------------------------------------------------- __fastcall TMainWindow::TMainWindow(TComponent* Owner) : TForm(Owner) { /*InitializeCriticalSection(&CS); WSADATA wsaData,*lpwsaData; char *domain,user[256]; lpwsaData = &wsaData; WORD wVersionRequested = MAKEWORD(1, 1); int nResult = WSAStartup(wVersionRequested, lpwsaData); if (nResult != 0) { Application->MessageBox("WSAStartup failed","Error",MB_OK|MB_ICONERROR); return; } if (LOBYTE(lpwsaData->wVersion) != 1 || HIBYTE(lpwsaData->wVersion) != 1) { WSACleanup(); Application->MessageBox("Wrong wsock version","Error",MB_OK|MB_ICONERROR); return; } DWORD size=256; domain=getenv("USERDOMAIN"); GetUserNameA(user,&size); sprintf(name,"%s/%s",domain,user); Application->ShowHint = true; Application->OnHint = ShowHint; ShownBefore=false;*/ } __fastcall TMainWindow::~TMainWindow() { WSACleanup(); DeleteCriticalSection(&CS); } void __fastcall TMainWindow::ShowHint(TObject * Sender) { StatusBar1->SimpleText = Application->Hint; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::BeendenClick(TObject *Sender) { Servers->StopProcessing(); Close(); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::SpeedButton1Click(TObject *Sender) { NodesBox->Invalidate(); if(ConfDlg->ShowModal()!=mrCancel) { NodesBox->Items->Assign(ConfDlg->GetHostList()); Run1->Enabled=(NodesBox->Items->Count>0); StartButton->Enabled=Run1->Enabled; } NodesBox->Invalidate(); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::StartButtonClick(TObject *Sender) { TClientForm *d; CHostRef *Ref; bool started = false; TStrings *Procs; Procs = ConfDlg->GetHostList(); for (int i=0;i<Procs->Count;++i) { Ref=(CHostRef*)(Procs->Objects[i]); if(!Ref || !Ref->IsValidConfig()) continue; d=Ref->GetWindow(); if(!d) { d=new TClientForm(this); Ref->SetWindow(d); } d->Execute(Ref); Sleep(50); if(d->state != error) started = true; } if(started) { SpeedButton2->Enabled=true; NodesBox->Repaint(); //StartButton->Enabled=false; KillAll1->Enabled=true; Clearall1->Enabled=true; if(ParWindow->Visible) ParWindow->FormShow(0); } } //--------------------------------------------------------------------------- void __fastcall TMainWindow::Tile1Click(TObject *Sender) { Tile(); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::Cascade1Click(TObject *Sender) { Cascade(); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::SpeedButton2Click(TObject *Sender) { if(MDIChildCount) { ((TClientForm*)ActiveMDIChild)->Kill1Click(this); } if(!MDIChildCount) SpeedButton2->Enabled=False; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::NodesBoxDblClick(TObject *Sender) { CHostRef *ref; for (int i=0;i<NodesBox->Items->Count;i++) { ref = (CHostRef*)NodesBox->Items->Objects[i]; if(NodesBox->Selected[i] && ref) { ref->ShowWindow(); break; } } } //--------------------------------------------------------------------------- void __fastcall TMainWindow::CloseAll1Click(TObject *Sender) { int i; for(i=MDIChildCount-1;i>=0;i--) MDIChildren[i]->Close(); /* SpeedButton1->Enabled=true; StartButton->Enabled=true; Run1->Enabled=true; SpeedButton4->Enabled=true; Auswaehlen->Enabled=true; LoadConfig1->Enabled=MainWindow->SaveConfig1->Enabled=true; KillAll1->Enabled=false; CloseAll1->Enabled=false; Tile1->Enabled=false; Cascade1->Enabled=false; Clearall1->Enabled=false; */ } //--------------------------------------------------------------------------- void __fastcall TMainWindow::FontDialog1Apply(TObject *Sender, HWND Wnd) { int i; TClientForm::MemoFont=FontDialog1->Font; for(i=0;i<MDIChildCount;i++) ((TClientForm*)(MDIChildren[i]))->applyFont(); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::SetFont1Click(TObject *Sender) { if(FontDialog1->Execute()) FontDialog1Apply(this, 0); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::NodesBoxDrawItem(TWinControl *Control, int Index, TRect &Rect, TOwnerDrawState State) { TClientForm *Window; CHostRef *ref = NULL; HostData *refdata = NULL; //si TColor FC,BC; TFontStyles FS; long int i = 0; //si bool DoStrikeOut = false; //si if (Index >= NodesBox->Items->Count) return; memcpy(&ref,&i,sizeof(ref));//ref = NULL; is ignored by compiler //si memcpy(&refdata,&i,sizeof(refdata));//refdata = NULL; is ignored by compiler //si TListBox *ListBox = dynamic_cast<TListBox*>(Control); TCanvas *Canvas = ListBox->Canvas; BC=Canvas->Brush->Color; Canvas->Brush->Color=clWindow; // display the text FC=Canvas->Font->Color; FS = Canvas->Font->Style; ref = (CHostRef*)NodesBox->Items->Objects[Index]; refdata = (*ref); /*Si: memory-access error occured sometimes in original code: if(!ref || !(*ref)->ProcData || !(*ref)->ProcData->Executable || !(*ref)->Account) { Canvas->Font->Style = FS<<fsStrikeOut; Canvas->Font->Color=clWindowText; } */ //replace (*ref) with refdata and divide if-expression // original fails when *ref==NULL if (!ref) { DoStrikeOut = true; } else { if (! refdata) { DoStrikeOut = true; } else if(!(refdata->ProcData)) //ERROR refdata invalid!!! { DoStrikeOut = true; } else { if ((!(refdata->ProcData->Executable)) || (!(refdata->Account))) { DoStrikeOut = true; } } } if(DoStrikeOut) { Canvas->Font->Style = FS<<fsStrikeOut; //strike out invalid host Canvas->Font->Color=clWindowText; } else { Window = ref->GetWindow(); if(Window) { switch(Window->state) { case killing: Canvas->Font->Color=clFuchsia; break; case running: Canvas->Font->Color=clGreen; break; //case init: Canvas->Font->Color=clOlive; break; //si case init: Canvas->Font->Color=clBlue; break; default: Canvas->Font->Color=clWindowText; break; } } else Canvas->Font->Color=clGrayText; } if(State.Contains(odSelected)) { Canvas->Brush->Color=Canvas->Font->Color; Canvas->Font->Color=clHighlightText; } Canvas->FillRect(Rect); // clear the rectangle Canvas->TextOut(Rect.Left + 2, Rect.Top, ListBox->Items->Strings[Index]); Canvas->Font->Color=FC; Canvas->Brush->Color=BC; Canvas->Font->Style = FS; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::KillAll1Click(TObject *Sender) { for(int i=0;i<MDIChildCount;i++) ((TClientForm*)(MDIChildren[i]))->Kill1Click(this); KillAll1->Enabled=false; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::LoadConfig1Click(TObject *Sender) { if(OpenDialog1->Execute()) { if(ConfDlg->LoadConfig(OpenDialog1->FileName)) { ConfName = OpenDialog1->FileName; SpeedButton1Click(this); } } } //--------------------------------------------------------------------------- void __fastcall TMainWindow::SaveConfig1Click(TObject *Sender) { int endpos,startpos=0; if((ConfName.IsEmpty() && ConfDlg->GlobalConfig.ProcData && ConfDlg->GlobalConfig.ProcData->Executable) || (ConfName == "Noname.rsc")) ConfName=ConfDlg->GlobalConfig.ProcData->Executable; if(ConfName.IsEmpty()) ConfName="Noname.rsc"; endpos=ConfName.Length()+1; for (int i=ConfName.Length();i>0;i--) { if(ConfName[i]=='.') endpos=i; else if(ConfName[i]=='\\') { startpos=i; break; } } SaveDialog1->FileName=ConfName.SubString(startpos+1,endpos-startpos-1)+".rsc"; if(SaveDialog1->Execute()) { if(SaveDialog1->FileName.Pos(".")<=0) SaveDialog1->FileName=SaveDialog1->FileName+".rsc"; ConfName = SaveDialog1->FileName; ConfDlg->SaveConfig(ConfName); } } //--------------------------------------------------------------------------- void __fastcall TMainWindow::ShowParTool1Click(TObject *Sender) { ParWindow->Show(); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::About1Click(TObject *Sender) { //siAboutBox= new TAboutBox(this); AboutBox->ShowModal(); //sidelete AboutBox; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::Clearall1Click(TObject *Sender) { for(int i=0;i<MDIChildCount;i++) ((TClientForm*)(MDIChildren[i]))->ClearOutput1Click(this); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::Setglobalaccount1Click(TObject *Sender) { TLoginDlg *Login; Login = new TLoginDlg(this); //do not show OK to force user to use set for all Login->OKButton->Visible = false; Login->CancelButton->Caption = "Exit"; if(Login->ShowModal() != mrCancel) { Servers->Lock(); for(int i=0;i<Servers->Count();i++) { Login->GetLoginData((*Servers)[i]); } Servers->Unlock(); Servers->Refresh(); } delete Login; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::FormClose(TObject *Sender, TCloseAction &Action) { // store actual plugin CloseAll1Click(this); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::OnPluginChange(TMessage &Message) { PlgItem->Items[oldPlugin]->Checked = false; oldPlugin=PluginManager.GetActualIndex(); PlgItem->Items[oldPlugin]->Checked = true; } void __fastcall TMainWindow::PluginClick(TObject *Sender) { PluginManager.SetActualPlugin(PlgItem->IndexOf((TMenuItem*)Sender)); } void __fastcall TMainWindow::CreatePluginMenu() { int i; TStrings *PlgNames; PlgItem = 0; TMenuItem *Item = MainMenu1->Items[0].Items[0]; for(i=0;i<Item->Count;++i) { if(Item->Items[i]->Caption=="C&hange Plug-in") { PlgItem=Item->Items[i]; break; } } if(!PlgItem) return; PlgNames=PluginManager.GetPluginList(); for(i=0;i<PlgNames->Count;++i) { Item=new TMenuItem(PlgItem); Item->Caption = PlgNames->Strings[i]; //Item->RadioItem=true; Item->OnClick = PluginClick; PlgItem->Add(Item); } oldPlugin = PluginManager.GetActualIndex(); PlgItem->Items[oldPlugin]->Checked=true; } void __fastcall TMainWindow::FormDestroy(TObject *Sender) { delete Servers; Servers = 0; delete IniFile; IniFile = NULL; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::EditIncludelist1Click(TObject *Sender) { //IncludeForm = new TIncludeForm(this); IncludeForm->ShowModal(); //delete IncludeForm; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::Window1Click(TObject *Sender) { Tile1->Enabled = (MDIChildCount >0); Cascade1->Enabled = Tile1->Enabled; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::Editexcludelist1Click(TObject *Sender) { //ExcludeForm = new TExcludeForm(this); ExcludeForm->ShowModal(); //delete ExcludeForm; } //--------------------------------------------------------------------------- void __fastcall TMainWindow::FormShow(TObject *Sender) { if (!ShownBefore) { ShownBefore = true; Servers->Refresh(); if (Servers->useMachinestxt) { EditIncludelist1->Enabled = false; Editexcludelist1->Enabled = false; } } } //--------------------------------------------------------------------------- void __fastcall TMainWindow::Info1Click(TObject *Sender) { AnsiString EName; char Msg[500]; EName = Application->ExeName; EName += "\n"; #if (!(BCBVER > 1)) //CBuilder1 EName += "CBuilder1 Version"; #else //CBuilder5 EName += "CBuilder5 Version"; #endif StrPCopy(Msg,EName); Application->MessageBox(Msg,"Info",MB_OK); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::RPCEncryptionClick(TObject *Sender) { RPCEncryption->Checked = (! RPCEncryption->Checked); SetRPCEncryption(RPCEncryption->Checked); if (RPCEncryption->Checked) IniFile->WriteString("RexecShell","SetRPCEncryption",TRUESTR); else IniFile->WriteString("RexecShell","SetRPCEncryption",FALSESTR); Servers->Refresh(); } //--------------------------------------------------------------------------- void __fastcall TMainWindow::checkprofilevalidityClick(TObject *Sender) { checkprofilevalidity->Checked = (! checkprofilevalidity->Checked); CheckUserAccountOnChange = checkprofilevalidity->Checked; if (checkprofilevalidity->Checked) IniFile->WriteString("RexecShell","checkprofilevalidity",TRUESTR); else IniFile->WriteString("RexecShell","checkprofilevalidity",FALSESTR); } //---------------------------------------------------------------------------
hhq163/kk_core
examples/base/global.go
package base const ReadBufferSize = 10240 //单位byte const PacketMaxSize = 1024 * 64 const NUM_MSG_TYPES = 1000 //协议总数 const ( STATUS_NEVER = 0 // Opcode not accepted from client (deprecated or server side only) STATUS_AUTHED = 1 // Player authenticated STATUS_UNHANDLED = 2 // We don' handle this opcode yet ) //消息类型 const ( MSG_NULL_ACT = 0 MSG_REGISTER_EMAIL = 1 //邮箱注册 MSG_REGISTER_PHONE = 2 //手机注册 MSG_REGISTER_RSP = 3 //注册返回 MSG_LOGINANOTHER = 4 //挤用户下线 MSG_LOGIN = 5 //登录 MSG_LOGIN_RSP = 6 //登录返回 MSG_HEARTBEAT = 7 //心跳 MSG_HEARTBEAT_RSP = 8 //心跳返回 MSG_REBIND = 9 //断线重连 MSG_REBIND_RSP = 10 //断线重连响应 MSG_GET_USERINFO = 11 //查看玩家信息 MSG_GET_USERINFO_RSP = 12 //查看玩家信息回复 MSG_RESET_PASSWORD = 13 //重置密码 MSG_RESET_PASSWORD_RSP = 14 //重置密码返回 MSG_CREATER_ROLE = 15 //创建角色 MSG_CREATER_ROLE_RSP = 16 //创建角色返回 MSG_POSITION_CHANGE = 17 //位置改变定时上报 MSG_POSITION_CHANGE_RSP = 18 //位置改变返回 MSG_GET_POSITION = 19 //获取当前位置 MSG_GET_POSITION_RSP = 20 //获取当前位置返回 MSG_GET_VERIFICATION_CODE = 21 //获取验证码 MSG_GET_VERIFICATION_CODE_RSP = 22 //获取验证码返回 MSG_CHECK_ACCOUNT = 23 //检测帐号 MSG_CHECK_ACCOUNT_RSP = 24 //检测帐号返回 MSG_CHECK_NICK_NAME = 25 //检测昵称 MSG_CHECK_NICK_NAME_RSP = 26 //检测昵称返回 MSG_GET_AMOUNT = 27 //获取游戏子钱包金额及积分 MSG_GET_AMOUNT_RSP = 28 //获取游戏子钱包金额及积分响应 MSG_GET_KNAPSACK = 29 //获取用户背包信息 MSG_GET_KNAPSACK_RSP = 30 //获取用户背包信息返回 MSG_ENTER_CITY = 31 //进入城市 MSG_ENTER_CITY_RSP = 32 //进入城市返回 MSG_BROAD_RAND_EVENT = 33 //广播随机事件给前端 MSG_BROAD_FINISH_EVENT = 34 //广播完成事件给前端 MSG_BROAD_POSITION = 35 //广播玩家当前给前端 MSG_BROAD_USER_OFFLINE = 36 //广播玩家掉线 MSG_FINISH_EVENT = 37 //完成事件请求 MSG_FINISH_EVENT_RSP = 38 //完成事件返回 MSG_UPDATE_POINT = 39 //积分变更 MSG_UPDATE_POINT_RSP = 40 //积分变更响应 MSG_GET_INVITE_USERS = 41 //获取战友列表 MSG_GET_INVITE_USERS_RSP = 42 //获取战友列表响应 MSG_GET_GRAB_COMRADES = 43 //获取被抢走战友列表 MSG_GET_GRAB_COMRADES_RES = 44 //获取被抢走战友列表响应 MSG_GET_BUILDING_DESC = 45 //获取建筑简介 MSG_GET_BUILDING_DESC_RSP = 46 //获取建筑简介响应 MSG_GET_MEMBER_SYS = 47 //获取会员等级体系数据 MSG_GET_MEMBER_SYS_RSP = 48 //获取会员等级体系响应 MSG_GET_USER_LEVEL = 49 //获取用户当前等级状态 MSG_GET_USER_LEVEL_RSP = 50 //获取用户当前等级状态响应 MSG_GRAB_COMRADE = 51 //抢战友 MSG_GRAB_COMRADE_RES = 52 //抢战友响应 MSG_GET_ITEMS_LIST = 53 //获取道具商品列表 MSG_GET_ITEMS_LIST_RSP = 54 //获取道具商品列表响应 MSG_BUY_ITEM = 55 //购买道具 MSG_BUY_ITEM_RSP = 56 //购买道具响应 MSG_MODIFY_NICKNAME = 57 //修改昵称 MSG_MODIFY_NICKNAME_RSP = 58 //修改昵称响应 ) var ActReqCount uint64 //操作数 var ActRspSucessCount uint64 //操作成功数 var ActRspFailCount uint64 //操作失败数 var ActReqTimePoint int64 //开始时间 var ActLastTimePoint int64 //最后响应时间 var ActionRspSucessCount uint64 //操作响应成功数 var PreActionRspSucessCount uint64 //上一次操作成功响应数 var PreActionRspSucessTimePoint int64 //上一次响应成功计算时间点 var ActionRepCount uint64 //操作请求数 var PreActionRepCount uint64 //上一次操作请求数 var PreActionRepTimePoint int64 //上一次请求计算时间点 var ActionRspFailCount uint64 //操作失败数 var PreActionRspFailCount uint64 //上一次操作失败数 var PreActionRspFailTimePoint int64 //上一次操作失败响应时间点
kilbouri/advent-of-code
2020/day12/part2.py
from os import getcwd import re import math def rotate(degrees: int, wp: list): newWaypoint = wp.copy() rads = math.radians(degrees) newWaypoint[1] = wp[1] * math.cos(rads) - wp[0] * math.sin(rads) newWaypoint[0] = wp[0] * math.cos(rads) + wp[1] * math.sin(rads) return newWaypoint def main(): with open(f"{getcwd()}/2020/day12/input.txt", "r") as input: file = input.read() instructions = re.findall(r"(.)(\d+)", file) # (north, east) waypoint = [1, 10] dN = 0 dE = 0 for movement in instructions: action = movement[0] amount = int(movement[1]) if action == "N": waypoint[0] += amount elif action == "S": waypoint[0] -= amount elif action == "E": waypoint[1] += amount elif action == "W": waypoint[1] -= amount elif action == "R": waypoint = rotate(-amount, waypoint) elif action == "L": waypoint = rotate(amount, waypoint) elif action == "F": dN += waypoint[0] * amount dE += waypoint[1] * amount print(abs(dN) + abs(dE)) if __name__ == "__main__": main()
dhis2/dhis2
dhis-2/dhis-services/dhis-service-tracker/src/test/java/org/hisp/dhis/tracker/TrackerIdentifierCollectorTest.java
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.tracker; import static java.util.Collections.singletonList; import static org.hisp.dhis.tracker.domain.MetadataIdentifier.ofAttribute; import static org.hisp.dhis.tracker.domain.MetadataIdentifier.ofCode; import static org.hisp.dhis.tracker.domain.MetadataIdentifier.ofName; import static org.hisp.dhis.tracker.domain.MetadataIdentifier.ofUid; import static org.hisp.dhis.utils.Assertions.assertContainsOnly; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.mockito.Mockito.mock; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.hisp.dhis.category.CategoryOption; import org.hisp.dhis.category.CategoryOptionCombo; import org.hisp.dhis.common.CodeGenerator; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.program.Program; import org.hisp.dhis.program.ProgramStage; import org.hisp.dhis.programrule.ProgramRuleService; import org.hisp.dhis.relationship.RelationshipType; import org.hisp.dhis.trackedentity.TrackedEntityAttribute; import org.hisp.dhis.trackedentity.TrackedEntityType; import org.hisp.dhis.trackedentitycomment.TrackedEntityComment; import org.hisp.dhis.tracker.domain.Attribute; import org.hisp.dhis.tracker.domain.DataValue; import org.hisp.dhis.tracker.domain.Enrollment; import org.hisp.dhis.tracker.domain.Event; import org.hisp.dhis.tracker.domain.Note; import org.hisp.dhis.tracker.domain.Relationship; import org.hisp.dhis.tracker.domain.RelationshipItem; import org.hisp.dhis.tracker.domain.TrackedEntity; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class TrackerIdentifierCollectorTest { private TrackerIdentifierCollector collector; @BeforeEach void setUp() { ProgramRuleService programRuleService = mock( ProgramRuleService.class ); collector = new TrackerIdentifierCollector( programRuleService ); } @Test void collectTrackedEntities() { TrackerIdSchemeParams idSchemes = TrackerIdSchemeParams.builder() .idScheme( TrackerIdSchemeParam.ofAttribute( "NTVsGflP5Ix" ) ) .orgUnitIdScheme( TrackerIdSchemeParam.NAME ) .build(); TrackedEntity trackedEntity = TrackedEntity.builder() .trackedEntity( uid() ) .trackedEntityType( ofAttribute( "NTVsGflP5Ix", "sunshine" ) ) .orgUnit( ofName( "ward" ) ) .attributes( teAttributes( "VohJnvWfvyo", "qv9xOw8fBzy" ) ) .build(); TrackerImportParams params = params( idSchemes ) .trackedEntities( singletonList( trackedEntity ) ) .build(); Map<Class<?>, Set<String>> ids = collector.collect( params ); assertNotNull( ids ); assertContainsOnly( ids.get( TrackedEntity.class ), trackedEntity.getTrackedEntity() ); assertContainsOnly( ids.get( TrackedEntityType.class ), "sunshine" ); assertContainsOnly( ids.get( OrganisationUnit.class ), "ward" ); assertContainsOnly( ids.get( TrackedEntityAttribute.class ), "VohJnvWfvyo", "qv9xOw8fBzy" ); } @Test void collectEnrollments() { TrackerIdSchemeParams idSchemes = TrackerIdSchemeParams.builder() .orgUnitIdScheme( TrackerIdSchemeParam.NAME ) .programIdScheme( TrackerIdSchemeParam.ofAttribute( "NTVsGflP5Ix" ) ) .build(); Enrollment enrollment = Enrollment.builder() .enrollment( uid() ) .trackedEntity( uid() ) .program( ofAttribute( "NTVsGflP5Ix", "sunshine" ) ) .orgUnit( ofName( "ward" ) ) .attributes( teAttributes( "VohJnvWfvyo", "qv9xOw8fBzy" ) ) .build(); TrackerImportParams params = params( idSchemes ) .enrollments( singletonList( enrollment ) ) .build(); Map<Class<?>, Set<String>> ids = collector.collect( params ); assertNotNull( ids ); assertContainsOnly( ids.get( Enrollment.class ), enrollment.getUid() ); assertContainsOnly( ids.get( TrackedEntity.class ), enrollment.getTrackedEntity() ); assertContainsOnly( ids.get( Program.class ), "sunshine" ); assertContainsOnly( ids.get( OrganisationUnit.class ), "ward" ); assertContainsOnly( ids.get( TrackedEntityAttribute.class ), "VohJnvWfvyo", "qv9xOw8fBzy" ); } @Test void collectEvents() { TrackerIdSchemeParams idSchemes = TrackerIdSchemeParams.builder() .orgUnitIdScheme( TrackerIdSchemeParam.NAME ) .programIdScheme( TrackerIdSchemeParam.ofAttribute( "NTVsGflP5Ix" ) ) .dataElementIdScheme( TrackerIdSchemeParam.UID ) .categoryOptionComboIdScheme( TrackerIdSchemeParam.CODE ) .build(); Event event = Event.builder() .event( uid() ) .enrollment( uid() ) .program( ofAttribute( "NTVsGflP5Ix", "sunshine" ) ) .programStage( ofAttribute( "NTVsGflP5Ix", "flowers" ) ) .orgUnit( ofName( "ward" ) ) .dataValues( dataValues( "VohJnvWfvyo", "qv9xOw8fBzy" ) ) .attributeOptionCombo( ofCode( "rgb" ) ) .attributeCategoryOptions( Set.of( ofCode( "red" ), ofCode( "green" ), ofCode( "blue" ) ) ) .notes( List.of( Note.builder().note( "i1vviSlidJE" ).value( "nice day!" ).build() ) ) .build(); TrackerImportParams params = params( idSchemes ) .events( singletonList( event ) ) .build(); Map<Class<?>, Set<String>> ids = collector.collect( params ); assertNotNull( ids ); assertContainsOnly( ids.get( Event.class ), event.getUid() ); assertContainsOnly( ids.get( Enrollment.class ), event.getEnrollment() ); assertContainsOnly( ids.get( Program.class ), "sunshine" ); assertContainsOnly( ids.get( ProgramStage.class ), "flowers" ); assertContainsOnly( ids.get( OrganisationUnit.class ), "ward" ); assertContainsOnly( ids.get( DataElement.class ), "VohJnvWfvyo", "qv9xOw8fBzy" ); assertContainsOnly( ids.get( CategoryOptionCombo.class ), "rgb" ); assertContainsOnly( ids.get( CategoryOption.class ), "red", "green", "blue" ); assertContainsOnly( ids.get( TrackedEntityComment.class ), "i1vviSlidJE" ); } @Test void collectEventsSkipsNotesWithoutAnId() { Event event = Event.builder() .notes( List.of( Note.builder().value( "nice day!" ).build() ) ) .build(); TrackerImportParams params = params( TrackerIdSchemeParams.builder().build() ) .events( singletonList( event ) ) .build(); Map<Class<?>, Set<String>> ids = collector.collect( params ); assertNotNull( ids ); assertNull( ids.get( TrackedEntityComment.class ) ); } @Test void collectEventsSkipsNotesWithoutAValue() { Event event = Event.builder() .notes( List.of( Note.builder().note( "i1vviSlidJE" ).build() ) ) .build(); TrackerImportParams params = params( TrackerIdSchemeParams.builder().build() ) .events( singletonList( event ) ) .build(); Map<Class<?>, Set<String>> ids = collector.collect( params ); assertNotNull( ids ); assertNull( ids.get( TrackedEntityComment.class ) ); } @Test void collectRelationships() { TrackerIdSchemeParams idSchemes = TrackerIdSchemeParams.builder() .idScheme( TrackerIdSchemeParam.ofAttribute( "NTVsGflP5Ix" ) ) .orgUnitIdScheme( TrackerIdSchemeParam.NAME ) .build(); Relationship relationship = Relationship.builder() .relationship( uid() ) .relationshipType( ofAttribute( "NTVsGflP5Ix", "sunshine" ) ) .from( RelationshipItem.builder() .enrollment( uid() ) .build() ) .to( RelationshipItem.builder() .event( uid() ) .build() ) .build(); TrackerImportParams params = params( idSchemes ) .relationships( singletonList( relationship ) ) .build(); Map<Class<?>, Set<String>> ids = collector.collect( params ); assertNotNull( ids ); assertContainsOnly( ids.get( Relationship.class ), relationship.getRelationship() ); assertContainsOnly( ids.get( RelationshipType.class ), "sunshine" ); assertContainsOnly( ids.get( Enrollment.class ), relationship.getFrom().getEnrollment() ); assertContainsOnly( ids.get( Event.class ), relationship.getTo().getEvent() ); } private String uid() { return CodeGenerator.generateUid(); } private TrackerImportParams.TrackerImportParamsBuilder params( TrackerIdSchemeParams idSchemes ) { return TrackerImportParams.builder().idSchemes( idSchemes ); } private List<Attribute> teAttributes( String... uids ) { List<Attribute> result = new ArrayList<>(); for ( String uid : uids ) { result.add( teAttribute( uid ) ); } return result; } private Attribute teAttribute( String uid ) { return Attribute.builder() .attribute( ofUid( uid ) ) .build(); } private Set<DataValue> dataValues( String... dataElementUids ) { Set<DataValue> result = new HashSet<>(); for ( String uid : dataElementUids ) { result.add( dataValue( uid ) ); } return result; } private DataValue dataValue( String dataElementUid ) { return DataValue.builder() .dataElement( ofUid( dataElementUid ) ) .build(); } }
aaronmjacobs/Forge
Source/Math/Bounds.cpp
#include "Math/Bounds.h" #include "Core/Assert.h" Bounds::Bounds(const glm::vec3& centerPosition, const glm::vec3& extentVector) : center(centerPosition) , extent(extentVector) , radius(glm::length(extentVector)) { } Bounds::Bounds(std::span<const glm::vec3> points) { ASSERT(points.size() > 0); glm::vec3 min = points[0]; glm::vec3 max = points[0]; for (const glm::vec3& point : points) { min = glm::min(min, point); max = glm::max(max, point); } center = (min + max) * 0.5f; extent = glm::abs(max - min) * 0.5f; radius = glm::length(extent); } void Bounds::setExtent(const glm::vec3& extentVector) { extent = extentVector; radius = glm::length(extent); }
sonnat/sonnat-ui
packages/sonnat-ui/src/utils/useSyncEffect.js
<filename>packages/sonnat-ui/src/utils/useSyncEffect.js import { useRef, useMemo, useEffect } from "react"; export default function useSyncEffect(effectFn, dependencies) { const key = useRef([]); let cleanUpFn; // Store "generation" key. Just returns a new object every time // eslint-disable-next-line react-hooks/exhaustive-deps const currentKey = useMemo(() => ({}), dependencies); // "the first render", or "memo dropped the value" if (key.current !== currentKey) { key.current = currentKey; cleanUpFn = effectFn(); } useEffect( () => { return () => { if (cleanUpFn) cleanUpFn(); }; }, [currentKey] // eslint-disable-line react-hooks/exhaustive-deps ); }
xiyifen/myshop
src/main/java/com/xiyifen/myshop/system/mapper/Report2Mapper.java
package com.xiyifen.myshop.system.mapper; import com.xiyifen.myshop.system.entity.Report2; import com.baomidou.mybatisplus.core.mapper.BaseMapper; /** * @author xiyifen */ public interface Report2Mapper extends BaseMapper<Report2> { }
Shefin-CSE16/Competitive-Programming
CodeForces/Solutions/1088A.cpp
#include <bits/stdc++.h> using namespace std; #define ll long long int main() { ll x; cin >> x; for(ll i = 1; i <= x; i++) { for(ll j = 1; j <= x; j++) { if(i % j == 0) { if(i * j > x && i < x * j) { cout << i << " " << j << endl; return 0; } } } } cout << -1 << endl; return 0; }
gcao/gene
lib/gene/handlers/complex_string_handler.rb
module Gene module Handlers class ComplexStringHandler COMPLEX_STRING1 = Gene::Types::Symbol.new('#""') COMPLEX_STRING2 = Gene::Types::Symbol.new("#''") def initialize @logger = Logem::Logger.new(self) end def call context, data return Gene::NOT_HANDLED unless data.is_a? Gene::Types::Base and (data.type == COMPLEX_STRING1 or data.type == COMPLEX_STRING2) @logger.debug('call', data) Gene::Types::ComplexString.new *data.data end end end end
we11cheng/WCTelegram
Telegraph/TGAccessChecker.h
<gh_stars>1-10 #import <Foundation/Foundation.h> typedef enum { TGPhotoAccessIntentRead, TGPhotoAccessIntentSave, TGPhotoAccessIntentCustomWallpaper } TGPhotoAccessIntent; typedef enum { TGMicrophoneAccessIntentVoice, TGMicrophoneAccessIntentVideo, TGMicrophoneAccessIntentCall, } TGMicrophoneAccessIntent; typedef enum { TGLocationAccessIntentSend, TGLocationAccessIntentTracking, } TGLocationAccessIntent; @interface TGAccessChecker : NSObject + (bool)checkAddressBookAuthorizationStatusWithAlertDismissComlpetion:(void (^)(void))alertDismissCompletion; + (bool)checkPhotoAuthorizationStatusForIntent:(TGPhotoAccessIntent)intent alertDismissCompletion:(void (^)(void))alertDismissCompletion; + (bool)checkMicrophoneAuthorizationStatusForIntent:(TGMicrophoneAccessIntent)intent alertDismissCompletion:(void (^)(void))alertDismissCompletion; + (bool)checkCameraAuthorizationStatusWithAlertDismissComlpetion:(void (^)(void))alertDismissCompletion; + (bool)checkLocationAuthorizationStatusForIntent:(TGLocationAccessIntent)intent alertDismissComlpetion:(void (^)(void))alertDismissCompletion; @end
best08618/asylo
gcc-gcc-7_3_0-release/gcc/testsuite/gcc.dg/special/weak-1.c
/* { dg-do run { xfail { hppa*-*-hpux* && { ! lp64 } } } } */ /* { dg-require-weak "" } */ /* { dg-additional-sources weak-1a.c } */ /* See PR target/23387 for hppa xfail details. */ #include <stdlib.h> int foo(void) __attribute__((weak)); int foo(void) { return 0; } int main(void) { if (foo()) exit(0); else abort(); }
bcncybersecurity/frab
test/support/capybara_helper.rb
module CapybaraHelper def sign_in(email, password) visit root_path click_on 'Log-in' fill_in 'Email', with: email fill_in 'Password', with: password click_on 'Log in' end def sign_in_user(user) sign_in(user.email, 'frab123') end def visit_conference_settings(matcher = :first) click_on 'Conferences' click_on 'Show', match: matcher find('ul.nav:eq(2)').click_link('Settings') end end
Knight13/Exploring-Deep-Neural-Decision-Trees
Covertype/NN.py
<filename>Covertype/NN.py<gh_stars>1-10 import numpy as np import tensorflow as tf import covtype_data import time import random x = covtype_data.feature y = covtype_data.label epochs = 100 batch_size = 100 input_num_units = x.shape[1] hidden_num_units_1 = 10**5 hidden_num_units_2 = 10**5 num_class = y.shape[1] sess = tf.InteractiveSession() tf.set_random_seed(1990) x_ph = tf.placeholder(tf.float32, [None, input_num_units]) y_ph = tf.placeholder(tf.float32, [None, num_class]) seed = 1990 weights = { 'hidden_1': tf.Variable(tf.random_normal([input_num_units, hidden_num_units_1], seed=seed)), 'hidden_2': tf.Variable(tf.random_normal([hidden_num_units_1, hidden_num_units_2], seed=seed)), 'output': tf.Variable(tf.random_normal([hidden_num_units_2, num_class], seed=seed)) } biases = { 'hidden_1': tf.Variable(tf.random_normal([hidden_num_units_1], seed=seed)), 'hidden_2': tf.Variable(tf.random_normal([hidden_num_units_2], seed=seed)), 'output': tf.Variable(tf.random_normal([num_class], seed=seed)) } x = np.array(x, dtype = np.float32) #1st hidden layer hidden_layer_1 = tf.add(tf.matmul(x_ph, weights['hidden_1']), biases['hidden_1']) hidden_layer_1 = tf.nn.softmax(hidden_layer_1) #2nd hidden layer hidden_layer_2 = tf.add(tf.matmul(hidden_layer_1, weights['hidden_2']), biases['hidden_2']) hidden_layer_2 = tf.nn.softmax(hidden_layer_2) #output layer y_pred = tf.matmul(hidden_layer_2, weights['output']) + biases['output'] loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_pred, labels=y_ph)) opt = tf.train.AdamOptimizer(0.1) train_step = opt.minimize(loss) sess = tf.InteractiveSession() tf.set_random_seed(1990) start_time = time.time() sess.run(tf.initialize_all_variables()) for epoch in range(epochs): avg_cost = 0 total_batch = int(x.shape[0]/batch_size) for i in range(total_batch): batch_mask = np.random.choice(x.shape[0], batch_size) batch_x = x[batch_mask].reshape(-1, x.shape[1]) batch_y = y[batch_mask].reshape(-1, y.shape[1]) _, loss_e = sess.run([train_step, loss], feed_dict={x_ph: batch_x, y_ph: batch_y}) avg_cost += loss_e / total_batch print "Epoch:", (epoch+1), "cost =", "{:.5f}".format(avg_cost) print('error rate %.5f' % (1 - np.mean(np.argmax(y_pred.eval(feed_dict={x_ph: x}), axis=1) == np.argmax(y, axis=1)))) print("--- %s seconds ---" % (time.time() - start_time))
fossabot/hrdc
hrdc/stream/parser/__init__.py
from .base import Parser from .binary import Binary from .hex import Hex from .code import Code
tunepack/tunepack-desktop
src/renderer/handlers/search.js
<reponame>tunepack/tunepack-desktop import { sendAndWait } from '../utils/handlers' import * as Channel from 'shared/constants/Channel' export default (query) => { return sendAndWait(Channel.SEARCH, { query }) }
Voldemort-Team/Voldemort
voldemort-core/src/main/java/com/fs/voldemort/core/functional/func/Func7.java
package com.fs.voldemort.core.functional.func; import java.io.Serializable; @FunctionalInterface public interface Func7<T1, T2, T3, T4, T5, T6, T7, R> extends Serializable { R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7); }
mcarlen/libbiarc
lib/CurveBundle.cpp
/*! \class CurveBundle include/CurveBundle.h include/CurveBundle.h \ingroup BiarcLibGroup \brief The CurveBundle class for storing and manipulating biarc curves in \f$\mathcal{R}^3\f$. This class is used to store and manipulate a set of curves. This data can be interpolated to a biarc curve. The class is for open and closed curves, but this must be specified (how to do that is explained later in this text). \code #include "../include/CurveBundle.h" int main() { int N = 100; CurveBundle<Vector3> borromean; Curve<Vector3> c[3]; ... construct curves (see objects/borromean.cpp) ... for (int i=0;i<3;i++) borromean.newCurve(c[i]); return 0; } \endcode \sa Curve,Biarc */ // // documentation of inlined methods // /*! \fn ostream & CurveBundle::operator<<(ostream &out, CurveBundle &c) Overloaded left shift operator. Writes the current CurveBundle object \a c to the ostream object \a out. If there is an interpolated curve, this function prints point/tangent,matching point/tangent of all the biarcs of each curve. For non valid biarcs only the point/tangent data is written to the stream. */ #include "../include/CurveBundle.h" #ifndef __CURVE_BUNDLE_SRC__ #define __CURVE_BUNDLE_SRC__ /*! Internal function to initialize a CurveBundle object. Sets The header strings to the default values. The number of curves is set to zero. */ // init to Null template<class Vector> void CurveBundle<Vector>::init() { bundle.clear(); } /*! Constructs an empty curve and sets the header to "No name","","","" */ template<class Vector> CurveBundle<Vector>::CurveBundle() { init(); } /*! Constructs a CurveBundle object from a portable knot format data file (PKF file). \sa readPKF(). */ template<class Vector> CurveBundle<Vector>::CurveBundle(const char* filename) { init(); if(!readPKF(filename)) { cerr << "Portable Knot File Error.\n"; exit(1); } } /*! Copy constructor. \sa operator= */ template<class Vector> CurveBundle<Vector>::CurveBundle(const CurveBundle<Vector> &curve) { init(); *this = curve; } /*! Assign operator. Copies the header structure from \a c. Copies all the curves in this bundle (i.e. all the point/tangent data and closes the curve if necessary). */ template<class Vector> CurveBundle<Vector>& CurveBundle<Vector>::operator= (const CurveBundle<Vector> &c) { if (bundle.size()!=0) bundle.clear(); for (int i=0;i<c.curves();i++) bundle.push_back(c.bundle[i]); return *this; } /*! Destructor. */ template<class Vector> CurveBundle<Vector>::~CurveBundle() { bundle.clear(); } /*! Returns the number of curves currently stored in the bundle. */ template<class Vector> int CurveBundle<Vector>::curves() const {return (int)bundle.size();} /*! Returns the total number of nodes of the Bundle (i.e. the sum of the number of nodes of each curve) */ template<class Vector> int CurveBundle<Vector>::nodes() const { int N = bundle[0].nodes(); for (int i=1;i<curves();i++) N += bundle[i].nodes(); return N; } /*! Returns the thickness (biggest possible tube radius without self-intersection) of the curve bundle */ template<class Vector> FLOAT_TYPE CurveBundle<Vector>::thickness() { return compute_thickness(this); } /*! Returns the "fast" thickness (pt radii only) of the curve bundle. */ template<class Vector> FLOAT_TYPE CurveBundle<Vector>::thickness_fast() { FLOAT_TYPE thick = bundle[0].thickness_fast(); for (int i=1;i<curves();++i) { FLOAT_TYPE th = bundle[i].thickness_fast(); if (th<thick) thick = th; } return thick; } /*! Returns the total length of the bundle. This is the sum of the arc-length of all the stored curves. */ template<class Vector> FLOAT_TYPE CurveBundle<Vector>::length() { FLOAT_TYPE L = 0.0; for (int i=0;i<curves();i++) L += bundle[i].length(); return L; } /*! This is the way to close all the curves at once. \sa unlink(), Curve::link(), Curve::unlink() */ template<class Vector> void CurveBundle<Vector>::link() { for (int i=0;i<curves();i++) bundle[i].link(); } /*! Opens all curves in bundle. \sa link(), Curve::link(), Curve::unlink() */ template<class Vector> void CurveBundle<Vector>::unlink() { for (int i=0;i<curves();i++) bundle[i].unlink(); } /*! Add a new curve \a c to the bundle. */ template<class Vector> void CurveBundle<Vector>::newCurve(Curve<Vector>& c) { bundle.push_back(c); } /*! Read a new curve from the stream \a in and add it to the bundle. */ template<class Vector> void CurveBundle<Vector>::newCurve(istream &in) { Curve<Vector> c(in); newCurve(c); } /*! Takes an already valid curve object pointer and adds this as a component to the current Bundle */ template<class Vector> void CurveBundle<Vector>::newCurve(Curve<Vector>* c) { newCurve(*c); } /*! Return a reference to the curve number \a c. */ template<class Vector> Curve<Vector>& CurveBundle<Vector>::operator[](int c) { return bundle[c]; } /*! Interpolate the curves in the bundle with biarcs. This function uses the same \f$\Gamma\f$ value for all curves! \sa Curve::make(), makeMidpointRule() */ template<class Vector> void CurveBundle<Vector>::make(FLOAT_TYPE f) { for (int i=0;i<curves();i++) bundle[i].make(f); } /*! Interpolate all the curves in the bundle with the midpoint matching rule. \sa Curve::make(), make() */ template<class Vector> void CurveBundle<Vector>::makeMidpointRule() { for (int i=0;i<curves();i++) bundle[i].makeMidpointRule(); } /*! This function resamples all curves in the bundle with \a NewNoNodes nodes. resample() can only be done if we have an interpolated set of curves! \sa refine(),make(). */ template<class Vector> void CurveBundle<Vector>::resample(int NewNoNodes) { for (int i=0;i<curves();i++) bundle[i].resample(NewNoNodes); } /*! Change the orientation of all the curves in the bundle. This means flipping the tangents and reordering the points. */ template<class Vector> void CurveBundle<Vector>::changeDirection() { for (int i=0;i<curves();i++) bundle[i].changeDirection(); } /*! Normalize the length of the sum of curve lengths! This means that for 10 curves with length 1 in the bundle, the length will be 1/10 after normalisation! An interpolated curve is necessary to compute the length of it. \sa scale(), Curve::normalize(), Curve::scale() */ template<class Vector> void CurveBundle<Vector>::normalize() { FLOAT_TYPE invL = 1.0/length(); for (int i=0;i<curves();i++) bundle[i].scale(invL); } /*! Scales the length of the curves by \a s. The curves need not to be interpolated, since only the data points are changed. Returns a reference to itself. \sa normalize() */ template<class Vector> CurveBundle<Vector>& CurveBundle<Vector>::scale(FLOAT_TYPE s) { for (int i=0;i<curves();i++) bundle[i].scale(s); return *this; } /*! Translates all the curves by \a v. Redo the interpolation after this operation if the initial curve was biarc interpolated, since the matching points and bezier points are no longer correct. \sa center(), operator-=() */ template<class Vector> CurveBundle<Vector>& CurveBundle<Vector>::operator+=(const Vector &v) { for (int i=0;i<curves();i++) bundle[i]+=v; return *this; } /*! Translate all the curves by \a -v. Redo the interpolation after this operation if the initial curve was biarc interpolated, since the matching points and bezier points are no longer correct. \sa operator+=() */ template<class Vector> CurveBundle<Vector>& CurveBundle<Vector>::operator-=(const Vector &v) { for (int i=0;i<curves();i++) bundle[i]-=v; return *this; } /*! Applies the rotation specified by a rotation matrix \a m to each curve in the bundle. No check is done for \a m, the user must know what matrix he wants to apply. This is not the standart 4x4 transformation matrix approach known from homogeneous coordinates stuff. */ template<class Vector> CurveBundle<Vector>& CurveBundle<Vector>::rotate(Matrix3 &m) { for (int i=0;i<curves();i++) bundle[i].rotate(m); return *this; } /*! This function shifts the center of mass of the bundle to <0,0,0>. This is different from : centering each particular the curve in the bundle to <0,0,0>!!! \sa getCenter() */ template<class Vector> void CurveBundle<Vector>::center() { Vector delta_center = getCenter(); (*this) -= delta_center; } /*! Returns the bundles's center of mass. \sa center() */ template<class Vector> Vector CurveBundle<Vector>::getCenter() { Vector sum(0,0,0); for (int i=0;i<curves();i++) sum += bundle[i].getCenter(); sum /= curves(); return sum; } /*! Read data from a PKF file \a infile. More details are in the class Curve documentation. This class can store more than 1 curve! Returns 1 if all went well, zero otherwise. \sa writePKF(),Curve::readPKF(),Curve::writePKF() */ template<class Vector> int CurveBundle<Vector>::readPKF(const char *infile) { ifstream in(infile, ios::in); if (!in.good()) { cerr<<"CurveBundle::readPKF() : could not read " << infile <<endl; return 0; } int B = readPKF(in); in.close(); return B; } /*! Read the curves from a stream \a in. More details about the PKF data format are given in the Curve class documentation. \sa writePKF(),Curve::readPKF() */ template<class Vector> int CurveBundle<Vector>::readPKF(istream &in) { readHeader(in); char tmp[1024]; // Read NoComp and NoNodes in.getline(tmp,sizeof tmp); if(strncmp(tmp,"NCMP ",5)) { cerr << "Expected NCMP: " << tmp << '\n'; return 0; } int NoCurves = atoi(tmp+5); if (NoCurves<1||NoCurves>1000) { cerr << "at NCMP tag : You have either 0 or more" << " than 1000 curves in your file!\n"; return 0; } for (int i=0;i<NoCurves;i++) newCurve(in); return 1; } /*! Writes the current curves to a PKF file \a outfile. This goes through all components and writes them to the file. Returns 1 if all went well, zero otherwise. \sa readPKF() */ template<class Vector> int CurveBundle<Vector>::writePKF(const char *outfile) { ofstream out(outfile,ios::trunc|ios::out); if(!out.good()) { cerr<<"CurveBundle::writePKF() : File " << outfile <<" problem.\n"; return 0; } return writePKF(out); } /*! Writes the current CurveBundle instance to the stream \a out. (I.e. header, number of curves and the curve data). \sa readPKF() */ template<class Vector> int CurveBundle<Vector>::writePKF(ostream &out) { writeHeader(out); out << "NCMP " << curves() << endl; for (int i=0;i<curves();i++) { if (!bundle[i].writePKF(out,0)) { cerr << "CurveBundle::writeCurvePKF() : Problem with curve " << i << endl; return 0; } } return 1; } /*! Read data from a XYZ file \a infile. More details are in the class Curve documentation. This class can store more than 1 curve! Returns 1 if all went well, zero otherwise. \sa writePKF(),Curve::readPKF(),Curve::writePKF() */ template<class Vector> int CurveBundle<Vector>::readXYZ(const char *infile) { ifstream in(infile, ios::in); if (!in.good()) { cerr<<"CurveBundle::readXYZ() : could not read " << infile <<endl; return 0; } Curve<Vector> c; int B = c.readXYZ(in); in.close(); newCurve(c); return B; } /*! This function reads the curve data from a file \a infile. The file structure is a list of x,y,z coordinates. The default for the delimiter is " ", but can be changed with the second argument of the function. The first line gives the number of nodes in the following format {#nodes}. Then the coordinates are read in. The \a delimiter argument is any string that separates the coordinate values from each other, the default value is a space ' ' delimiter. Returns 1 if all went well, zero otherwise. \sa computeTangents(),polygonalToArcs(),arcsToPolygonal() TODO : not ready, change DOC!!! */ template<class Vector> int CurveBundle<Vector>::readData(const char* infile, const char* delimiter) { ifstream in(infile, ios::in); if(!in.good()) { cerr<<"CurveBundle::readData() : File " << infile <<" problem.\n"; return 0; } char tmp[1024]; in.getline(tmp,sizeof tmp); // Read in number of curves int NoCurves = atoi(tmp); Curve<Vector> ctmp; assert(NoCurves>0); for (int i=0;i<NoCurves;i++) { if (!ctmp.readData(in,delimiter)) { cerr << "CurveBundle::readData() : Could not read curve " << i << " !\n"; return 0; } newCurve(ctmp); } in.close(); return 1; } /*! Writes the curve to a data file. First line is the number of points and then follows a list of x,y,z coordinates. If the \a tangents_flag is set to 1, the tangents of the points are also dropped (Default is 0). \sa readData() TODO : not ready, change doc!!! TODO : not ready!!! */ template<class Vector> int CurveBundle<Vector>::writeData(const char* outfile, const char* delimiter, int tangents_flag) { ofstream ofs(outfile,ios::trunc|ios::out); if(!ofs.good()) { cerr<<"CurveBundle::writeData() : File " << outfile <<" problem.\n"; return 0; } // Number of points on curve ofs << curves() << endl; for (int i=0;i<curves();i++) { if (!bundle[i].writeData(ofs,delimiter,tangents_flag)) { cerr << "CurveBundle::writeCurveData() : Problem writing curve " << i << endl; return 0; } } return 1; } /*! Read a file in VECT format. Infos about the format at http://www.geomview.org/docs/html/geomview_42.html Returns 1 if all went well, zero otherwise. Does not support the whole VECT spec. \sa computeTangents(),polygonalToArcs(),arcsToPolygonal(),writeVECT() */ template<class Vector> int CurveBundle<Vector>::readVECT(const char* infile) { ifstream in(infile, ios::in); if(!in.good()) { cerr<<"CurveBundle::readData() : File " << infile <<" problem.\n"; return 0; } char tmp[1024]; in.getline(tmp,sizeof tmp); if (strncmp(tmp,"VECT",4)) { cerr << "VECT expected, got " << tmp << endl; return 0; } // Read in number of curves in.getline(tmp,sizeof tmp); char tmp_coord[1024]; int NoCurves = atoi(strtok(tmp, " ")); // ignore total vertices and number of colors Vector v; Curve<Vector> ctmp; assert(NoCurves>0); int NoNodes[NoCurves]; in.getline(tmp, sizeof tmp); // ignore color NoNodes[0] = atoi(strtok(tmp," ")); for (int i=1;i<NoCurves;++i) NoNodes[i] = atoi(strtok(NULL," ")); in.getline(tmp, sizeof tmp); // ignore color // number of nodes for each for (int i=0;i<NoCurves;i++) { ctmp.flush_all(); for (int j=0;j<abs(NoNodes[i]);j++) { in.getline(tmp_coord,sizeof tmp_coord); if (tmp_coord[0]=='#') { j=j-1; continue; } v[0] = atof(strtok(tmp_coord," ")); for (int k=1;k<(int)v.type;k++) { v[k] = atof(strtok(NULL," ")); } ctmp.append(v,Vector3(0,0,0)); } if (NoNodes[i]<0) ctmp.link(); ctmp.computeTangents(); newCurve(ctmp); } // ignore the color part in.close(); return 1; } /*! Not implemented */ template<class Vector> int CurveBundle<Vector>::writeVECT(const char* outfile) { ofstream ofs(outfile,ios::trunc|ios::out); if(!ofs.good()) { cerr<<"CurveBundle::writeVECT() : File " << outfile <<" problem.\n"; return 0; } // Number of points on curve ofs << "VECT\n"; int verts = 0; for (int i=0;i<curves();++i) verts+=bundle[i].nodes(); ofs << curves() << " " << verts << " 0\n"; ofs.precision(20); for (int i=0;i<curves();i++) { ofs << (bundle[i].isClosed()?"-":"") << bundle[i].nodes() << endl; ofs << "0\n"; for (int k=0;k<bundle[i].nodes();++k) ofs << bundle[i][k].getPoint() << endl; } return 1; } /*! Comptes the tangents on each curve in the bundle. \sa Curve::computeTangents() */ template<class Vector> void CurveBundle<Vector>::computeTangents() { for (int i=0;i<curves();i++) bundle[i].computeTangents(); } /*! This function converts all polygonal curves into curves made of arcs of circles. \sa arcsTolPolygonal(), Curve::polygonalToArcs() */ template<class Vector> void CurveBundle<Vector>::polygonalToArcs() { for (int i=0;i<curves();i++) bundle[i].polygonalToArcs(); } /*! This function converts all the biarc curves into polygonal curves. \sa polygonalToArcs(),Curve::arcsToPolygonal */ template<class Vector> void CurveBundle<Vector>::arcsToPolygonal() { for (int i=0;i<curves();i++) bundle[i].arcsToPolygonal(); } #endif //
liaopeiyuan/ml-arsenal-public
settings.py
from datetime import datetime PATH= "kail" """ Local """ if PATH=='kail': print("Using paths on kail-main") CHECKPOINTS='/data/kaggle/salt/checkpoints' DATA='/data/kaggle/salt/' RESULT='/data/ml-arsenal/projects/TGS_salt' CODE='/data/ml-arsenal' CUDA_DEVICES='0,1' MODE='gpu' GRAPHICS=True IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='kail0': print("Using paths on kail-main w. GTX 1080 Ti") CHECKPOINTS='/data/kaggle/salt/checkpoints' DATA='/data/kaggle/salt/' RESULT='/data/ml-arsenal/projects/TGS_salt' CODE='/data/ml-arsenal' CUDA_DEVICES='0' MODE='gpu' GRAPHICS=True IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='kail1': print("Using paths on kail-main w. GTX 1070") CHECKPOINTS='/data/kaggle/salt/checkpoints' DATA='/data/kaggle/salt/' RESULT='/data/ml-arsenal/projects/TGS_salt' CODE='/data/ml-arsenal' CUDA_DEVICES='1' MODE='gpu' GRAPHICS=True IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='local': print("Using local paths on alexanderliao@alexanderliao-Thinkpad-P50.") CHECKPOINTS='/home/alexanderliao/data/Kaggle/competitions/tgs-salt-identification-challenge/checkpoints' DATA='/home/alexanderliao/data/Kaggle/competitions/tgs-salt-identification-challenge' RESULT='/home/alexanderliao/data/GitHub/ml-arsenal/projects/TGS_salt' CODE='/home/alexanderliao/data/GitHub/ml-arsenal' CUDA_DEVICES='0' MODE='gpu' GRAPHICS=True IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='gcp0': print("Using GCP paths on liaop20@kaggle.") CHECKPOINTS='/home/liaop20/data/salt/checkpoints' DATA='/home/liaop20/data/salt' RESULT='/home/liaop20/ml-arsenal/projects/TGS_salt' CODE='/home/liaop20/ml-arsenal' CUDA_DEVICES='0' MODE='gpu' GRAPHICS=False IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='gcp1': print("Using GCP paths on liaop20@kaggle.") CHECKPOINTS='/home/liaop20/data/salt/checkpoints' DATA='/home/liaop20/data/salt' RESULT='/home/liaop20/ml-arsenal/projects/TGS_salt' CODE='/home/liaop20/ml-arsenal' CUDA_DEVICES='1' MODE='gpu' GRAPHICS=False IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='gcp2': print("Using GCP paths on liaop20@kaggle.") CHECKPOINTS='/home/liaop20/data/salt/checkpoints' DATA='/home/liaop20/data/salt' RESULT='/home/liaop20/ml-arsenal/projects/TGS_salt' CODE='/home/liaop20/ml-arsenal' CUDA_DEVICES='2' MODE='gpu' GRAPHICS=False IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='gcp3': print("Using GCP paths on liaop20@kaggle.") CHECKPOINTS='/home/liaop20/data/salt/checkpoints' DATA='/home/liaop20/data/salt' RESULT='/home/liaop20/ml-arsenal/projects/TGS_salt' CODE='/home/liaop20/ml-arsenal' CUDA_DEVICES='3' MODE='gpu' GRAPHICS=False IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='gcp': print("Using GCP paths on liaop20@kaggle.") CHECKPOINTS='/home/liaop20/data/salt/checkpoints' DATA='/home/liaop20/data/salt' RESULT='/home/liaop20/ml-arsenal/projects/TGS_salt' CODE='/home/liaop20/ml-arsenal' CUDA_DEVICES='0,1,2,3' MODE='gpu' GRAPHICS=False IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') if PATH=='aaron': print("Using paths on Aaron's PC.") CHECKPOINTS='/mydisk/Programming/Git/salt/checkpoints' DATA='/mydisk/Programming/Git/salt' RESULT='/mydisk/Programming/Git/ml-arsenal/projects/TGS_salt' CODE='/mydisk/Programming/Git/ml-arsenal' CUDA_DEVICES='0' MODE='gpu' GRAPHICS=False IDENTIFIER = datetime.now().strftime('%Y-%m-%d_%H-%M-%S') print('')
ysyluminous/Java_Learning
code/back/bxd/file/ProducerConsumerDemo.java
<filename>code/back/bxd/file/ProducerConsumerDemo.java<gh_stars>1-10 import com.sun.xml.internal.ws.policy.privateutil.LocalizationMessages; import com.sun.xml.internal.ws.runtime.config.TubelineFeatureReader; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Created by <NAME> on 2016/7/13/013. */ class Resource { private String name; private int count = 1; private boolean flag = false; //造锁替换synchronized private Lock lock = new ReentrantLock(); private Condition condition = lock.newCondition(); //public synchronized void set(String name) public void set(String name)throws InterruptedException { lock.lock(); try{ //没有判断标记就去执行,改if为while始终判断标记 while (flag) //try {wait();}catch (Exception e){} condition.await(); this.name = name+"--"+count++; System.out.println(Thread.currentThread().getName()+"..生产者"+this.name); flag = true; //判断所有标记造成死锁,notifyAll唤醒所有线程 //this.notifyAll(); condition.signal(); } finally { lock.unlock(); } } public synchronized void out() { while (!flag) try {wait();}catch (Exception e){} System.out.println(Thread.currentThread().getName()+"..消费者...."+this.name); flag = false; this.notifyAll(); } } class Producer implements Runnable { private Resource res; Producer(Resource res) { this.res = res; } public void run() { while (true) { res.set("+商品+"); } } } class Consumer implements Runnable { private Resource res; Consumer(Resource res) { this.res = res; } public void run() { while (true) { res.out(); } } } public class ProducerConsumerDemo { public static void main(String args []) { Resource r = new Resource(); Producer pro = new Producer(r); Consumer con = new Consumer(r); Thread t1 = new Thread(pro); Thread t2 = new Thread(pro); Thread t3 = new Thread(con); Thread t4 = new Thread(con); t1.start(); t2.start(); t3.start(); t4.start(); } }
Th3Shadowbroker/nexus
src/main/java/org/th3shadowbroker/nexus/util/NumericRange.java
<gh_stars>0 package org.th3shadowbroker.nexus.util; import lombok.Data; import org.th3shadowbroker.nexus.exceptions.RangeFormatException; import java.util.Random; /** * Class for handling ranges like '5-10'. */ @Data public class NumericRange { /** * The minimum. */ private final double min; /** * The maximum. */ private final double max; /** * A random for generating randoms within the range. */ private final Random random; /** * Create a new numeric range. * The order of the passed integers doesn't matter. * @param a The first value. * @param b The second value. */ public NumericRange(double a, double b) { this.min = Math.min(a, b); this.max = Math.max(a, b); this.random = new Random(); } /** * Get an integer within the range. * @return A random integer. */ public int getIntWithin() { if (!isActualRange()) return getMinInt(); return getMinInt() + random.nextInt(getMaxInt() + 1 - getMinInt()); } /** * Get a double within the range. * @return A random double. */ public double getDoubleWithing() { if (!isActualRange()) return max; return min + (max - min) * random.nextDouble(); } /** * True if min isn't max. * @return True if min isn't max. */ public boolean isActualRange() { return min != max; } /** * Get the minimum as an integer. * @return The minimum. */ public int getMinInt() { return (int) min; } /** * Get the maximum as an integer. * @return The maximum. */ public int getMaxInt() { return (int) max; } @Override public String toString() { return toString('-'); } public String toString(char separator) { return String.format("%s%s%s", min, separator, max); } /** * Parse the given string. * @param value The string. * @return The parsed range. * @throws RangeFormatException If the value couldn't be parsed. */ public static NumericRange parse(String value) throws RangeFormatException { return parse(value, '-'); } /** * Parse the given string. * @param value The string. * @param separator The separator. * @return The parsed range. * @throws RangeFormatException If the value couldn't be parsed. */ public static NumericRange parse(String value, char separator) throws RangeFormatException { String[] splitted = value.split(String.valueOf(separator)); if (splitted.length == 2) { try { // Parse values double a = Double.parseDouble(splitted[0]); double b = Double.parseDouble(splitted[1]); return new NumericRange(a, b); // a or b is not a valid double } catch (NumberFormatException ex) { throw new RangeFormatException(value, ex); } } // Invalid input throw new RangeFormatException(value, null); } }
windsome/windpress
src/routes/Coin/components/MyArtList.js
<filename>src/routes/Coin/components/MyArtList.js var xdebug = window.myDebug('COIN:ArtworkList') import React, { Component, PropTypes } from 'react' import { IndexLink, Link } from 'react-router' import ReactModal from 'react-modal'; import _ from 'lodash'; import moment from 'moment'; //import classNames from 'classnames'; import styles2 from './styles.scss'; import styles from 'components/widgets/Widgets.scss'; import classNames from 'classnames/bind'; const cx = classNames.bind(styles); import {utilPostStatusToString} from '../modules/utils'; import { userHasCap } from '../common'; ReactModal.setAppElement('#root'); const Progress = ({total, current}) => { current = parseInt(current); var percent = total && current && Math.floor(current*100/total)+'%' || '0%'; //xdebug ("progress:", total, current); return ( <div style={{overflow:'hidden'}}> <div className="progress" style={{marginBottom:0, backgroundColor:'#ccc', height:5}}> <div className="progress-bar" role="progressbar" aria-valuenow="60" aria-valuemin="0" aria-valuemax="100" style={{width: percent, color:'#f0f', backgroundColor:'#ff0'}}><span className="sr-only"> {percent} </span></div> </div> <div style={{height:'1.1em'}}> <div className="pull-left text-left">总需<b>{total}</b>注</div> <div className="pull-right text-right">剩余<b style={{color:'#f00'}}>{total-current}</b>注</div> </div> </div> ) } const Card = ({art}) => { var ops = art.ops; //xdebug ("art:", art); var id = art && art.id || 0; var userId = art && art.userId || 0; var name = art && art.desc && art.desc.name || '未命名艺术作品'; var createdAt = art && moment(new Date(art.createdAt)).format('YYYY-MM-DD HH:mm') || ' '; var excerpt = art && art.desc && art.desc.excerpt || '无摘要艺术作品'; var images = art && art.images || ['/images/none.jpg']; var statusString = utilPostStatusToString (art.status); var viewUrl = '/coin/shop/'+userId+'/'+id; return ( <div className="clearfix" style={{marginTop:5, marginBottom:5, paddingBottom:5, backgroundColor:'#f6f6f6'}}> <div className={ cx('imageContainer') }> <Link className={ cx('media-photo', 'media-cover') } to={ viewUrl }> <div className={ cx('listing-img-container','media-cover','text-center') }> <img src={ images[0] } className={ cx('responsive-imgwh') } alt={excerpt}/> </div> </Link> </div> <div className={cx('infoContainer')}> <Link className={cx('linkContainer')} to={ viewUrl }> <Progress total={art.count} current={art.paid}/> <div> <div className={cx('text_size_small_weight_light_inline')}> <div className={cx('detailContainer')} style={{textOverflow: 'ellipsis',overflow: 'hidden'}}>{"编号"+id+" "+name }</div> </div> </div> </Link> <div> <div className={cx('text_size_small_weight_light_inline')} style={{float:'left'}}> <div className={cx('detailContainer')} style={{textOverflow: 'ellipsis',overflow: 'hidden'}}><sub>{ statusString+" "+createdAt }</sub></div> </div> { ops && ops.opEdit && <Link onClick={(e)=>{ops.opEdit(e, id)}} to={ '/coin/edit/'+id } style={{marginLeft:5, marginRight:5}}> <span>编辑</span> </Link> } { ops && ops.opDelete && <Link onClick={(e)=>{ops.opDelete(e, id)}} style={{marginLeft:5, marginRight:5}}> <span>删除</span> </Link> } { ops && ops.opCheckRequest && <Link onClick={(e)=>{ops.opCheckRequest(e, id)}} style={{marginLeft:5, marginRight:5}}> <span>提交审核</span> </Link> } { ops && ops.opCheckFail && <Link onClick={(e)=>{ops.opCheckFail(e, id)}} style={{marginLeft:5, marginRight:5}}> <span>退回修改</span> </Link> } { ops && ops.opPublish && <Link onClick={(e)=>{ops.opPublish(e, id)}} style={{marginLeft:5, marginRight:5}}> <span>发布</span> </Link> } { ops && ops.opSetFavor && <Link onClick={(e)=>{ops.opSetFavor(e, id)}} style={{marginLeft:5, marginRight:5}}> <span>推荐</span> </Link> } { ops && ops.opUnsetFavor && <Link onClick={(e)=>{ops.opUnsetFavor(e, id)}} style={{marginLeft:5, marginRight:5}}> <span>取消推荐</span> </Link> } { ops && ops.opRefund && <Link onClick={(e)=>{ops.opRefund(e, id)}} style={{marginLeft:5, marginRight:5}}> <span>退款</span> </Link> } </div> </div> </div> ) } const ArtworkList = ({items}) => { var artworks = items && items.map ((item, index)=>{ return ( <li key={index} className="col-xs-12 col-sm-4 appmsg_item_v js_appmsgitem"> <Card art={item}/></li> ) }); return ( <div className="container"> { !_.isEmpty(items) && <ul className="row inner_list_v" style={{listStyle:'none', paddingLeft: 0}}> {artworks} </ul> } { _.isEmpty(items) && <div style={{textAlign:'center'}}> 没有上传宝贝,返回发布一个 </div> } </div> ) } export default class Page extends Component { static propTypes = { goBack: PropTypes.func.isRequired, push: PropTypes.func.isRequired, replace: PropTypes.func.isRequired, retrievePosts: PropTypes.func.isRequired, removePost: PropTypes.func.isRequired, updatePost: PropTypes.func.isRequired, refundPost: PropTypes.func.isRequired, retrieveSomePosts: PropTypes.func.isRequired, pageName: PropTypes.string.isRequired, postStatus: PropTypes.object.isRequired, postDb: PropTypes.object.isRequired, refund: PropTypes.object.isRequired, cuser: PropTypes.object.isRequired, } constructor (props) { super (props); this.state = { modalIsOpen: false, modalMessage: '', modalFunc: null, } } componentDidMount() { this.props.retrievePosts (); } deletePost (id) { var { removePost, retrievePosts } = this.props; this.setState({modalIsOpen:false, modalMessage:null, modalFunc:null}); if (id) { removePost && removePost (id).then ((retobj)=> { if (retobj && retobj.count > 0) { xdebug ("delete ok! refresh post list!"); retrievePosts && retrievePosts (); } else { xdebug ("delete fail!"); } }) } } updatePostStatus (id, status) { var { updatePost, retrievePosts, retrieveSomePosts } = this.props; this.setState({modalIsOpen:false, modalMessage:null, modalFunc:null}); if (id) { updatePost && updatePost ({id, status}).then ((retobj)=> { if (retobj && retobj.count > 0) { xdebug ("delete ok! refresh post list!"); retrieveSomePosts && retrieveSomePosts ([id]); } else { xdebug ("delete fail!"); } }) } } updatePostFavor (id, favor) { var { updatePost, retrievePosts, retrieveSomePosts } = this.props; this.setState({modalIsOpen:false, modalMessage:null, modalFunc:null}); if (id) { updatePost && updatePost ({id, favor}).then ((retobj)=> { if (retobj && retobj.count > 0) { xdebug ("delete ok! refresh post list!"); retrieveSomePosts && retrieveSomePosts ([id]); } else { xdebug ("delete fail!"); } }) } } refundOnePost (id) { var { refundPost, retrieveSomePosts } = this.props; this.setState({modalIsOpen:false, modalMessage:null, modalFunc:null}); if (id) { refundPost && refundPost (id).then ((retobj)=> { if (retobj && retobj.count > 0) { xdebug ("refund ok! refresh post list!"); retrieveSomePosts && retrieveSomePosts ([id]); } else { xdebug ("refund fail!", retobj); } }) } } render () { var { goBack, push, replace, retrievePosts, postDb, postStatus, pageName, cuser } = this.props; var userId = cuser && cuser.user && cuser.user.id; var postList = postStatus && postStatus.result || []; xdebug ("render:", this.props); var isRoot = userHasCap (cuser.user, 'ROOT'); var canPostFavor = userHasCap (cuser.user, 'POST_FAVOR'); var canPublish = userHasCap (cuser.user, 'SHOP_AGENT'); var canPostPublish = userHasCap (cuser.user, 'POST_PUBLISH'); var canPostRefund = userHasCap (cuser.user, 'POST_REFUND'); var total = postStatus && postStatus.total || 0; var count = postStatus && postStatus.result && postStatus.result.length || 0; var page = postStatus && postStatus.page || 0; const retriveRefresh = () => { retrievePosts (0); } const retriveMore = () => { retrievePosts (page+1); } var isFetching = postStatus && postStatus.fetching; var message = postStatus && postStatus.error && postStatus.error.message; if (isFetching) message = "正在加载中。。。"; var opEdit = (evt, id) => { evt.preventDefault(); push ('/coin/edit/'+id); }; var opDelete = (evt, id) => { evt.preventDefault(); this.setState({modalIsOpen:true, modalMessage: '确定要删除吗?', modalFunc: ()=>this.deletePost(id)}); }; var opCheckRequest = (evt, id) => { evt.preventDefault(); this.setState({modalIsOpen:true, modalMessage: '确定要提交审核吗?', modalFunc: ()=>this.updatePostStatus(id,1)}); }; var opCheckFail = (evt, id) => { evt.preventDefault(); this.setState({modalIsOpen:true, modalMessage: '确定要打回修改吗?', modalFunc: ()=>this.updatePostStatus(id,3)}); }; var opPublish = (evt, id) => { evt.preventDefault(); this.setState({modalIsOpen:true, modalMessage: '确定要发布吗?', modalFunc: ()=>this.updatePostStatus(id,2)}); }; var opSetFavor = (evt, id) => { evt.preventDefault(); this.setState({modalIsOpen:true, modalMessage: '确定要推荐吗?', modalFunc: ()=>this.updatePostFavor(id,1)}); }; var opUnsetFavor = (evt, id) => { evt.preventDefault(); this.setState({modalIsOpen:true, modalMessage: '确定要取消推荐吗?', modalFunc: ()=>this.updatePostFavor(id,0)}); }; var opRefund = (evt, id) => { evt.preventDefault(); this.setState({modalIsOpen:true, modalMessage: '确定要退款吗?', modalFunc: ()=>this.refundOnePost(id)}); }; var handleCloseModal = () => { this.setState({modalIsOpen:false, modalMessage:null, modalFunc:null}); } var artworks = postList && postList.map ((id, index)=>{ var artwork = postDb && postDb[id]; var status = artwork.status; var favor = artwork.favor; var isOwner = artwork.owner == userId; var ops = {}; switch (status) { case 0: case 3: if (isRoot) { if (isOwner) ops = {opEdit, opDelete, opPublish}; else ops = {opEdit, opDelete, opCheckFail, opPublish}; } else if (isOwner) { if (canPublish) { ops = {opEdit, opDelete, opPublish}; } else { ops = {opEdit, opDelete, opCheckRequest}; } } break; case 1: if (isRoot) { ops = { opCheckFail, opPublish }; } else if (isOwner) { if (canPublish) { ops = { opCheckFail, opPublish }; } } break; case 2: if (canPostFavor) { if (favor == 1) ops = { ...ops, opUnsetFavor }; else ops = { ...ops, opSetFavor }; } break; case 4: case 5: case 6: ops = { opRefund }; break; case 7: break; } artwork.ops = ops; return artwork; }); return ( <div style={{paddingTop:50}}> <nav className="navbar navbar-default navbar-fixed-top" style={{lineHeight:'50px', fontSize:20, backgroundColor:'#db3652', color:'#fff', whiteSpace:'nowrap'}}> <div style={{float:'left', marginLeft:5, boxSizing:'inline-block'}}><Link to="/coin" onClick={(e)=>{e.preventDefault(); goBack && goBack();} }> <i className="glyphicon glyphicon-menu-left img-circle" style={{top:5, color:'#ddd', opacity:1, backgroundColor:'#333', padding:5}}/> </Link></div> <div style={{float:'left', paddingLeft: 50}}> <span>{pageName}</span> </div> </nav> <div> {message} </div> <div> { (count != 0) && <div style={{marginTop:10, textAlign:'center'}} onClick={retriveRefresh}>点此可刷新</div> } <ArtworkList items={artworks}/> { count < total && <div style={{marginTop:10, textAlign:'center'}} onClick={retriveMore}>点此加载更多...</div> } </div> <ReactModal isOpen={this.state.modalIsOpen} contentLabel="对话框" onRequestClose={handleCloseModal}> <div className="modal-content" style={{zIndex:1000}}> <div className="modal-header"> <button type="button" className="close" onClick={handleCloseModal}> <span aria-hidden="true">&times;</span> <span className="sr-only">Close</span> </button> <h5 className="modal-title">确认您的操作!</h5> </div> <div className="modal-body container" style={{textAlign: 'center'}}> <h4>{this.state.modalMessage || '错误操作!'}</h4> </div> <div className="modal-footer"> <div style={{backgroundColor:'#db3652', color:'#fff', textAlign:'center', fontSize:20, padding:5}} onClick={this.state.modalFunc}> <span>确定</span> </div> </div> </div> </ReactModal> </div> ); } }
e-amzallag/mondialrelay-api
src/main/java/org/dajlab/mondialrelayapi/soap/ArrayOfString.java
package org.dajlab.mondialrelayapi.soap; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p> * Classe Java pour ArrayOfString complex type. * * <p> * Le fragment de schéma suivant indique le contenu attendu figurant dans cette * classe. * * <pre> * &lt;complexType name="ArrayOfString"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="string" type="{http://www.w3.org/2001/XMLSchema}string" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ArrayOfString", propOrder = { "string" }) public class ArrayOfString { @XmlElement(nillable = true) protected List<String> string; /** * Gets the value of the string property. * * <p> * This accessor method returns a reference to the live list, not a snapshot. * Therefore any modification you make to the returned list will be present * inside the JAXB object. This is why there is not a <CODE>set</CODE> method * for the string property. * * <p> * For example, to add a new item, do as follows: * * <pre> * getString().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list {@link String } * * */ public List<String> getString() { if (string == null) { string = new ArrayList<String>(); } return this.string; } }
fakecoinbase/ebaklundslashethtax
src/runtime-types/address-string.js
'use strict'; const t = require('flow-runtime'); const AddressString = t.refinement(t.any(), input => { if (!t.string().accepts(input) || !/0x[0-9a-f]{20}/.test(input)) return 'must be an address string (lower case)'; }); t.AddressString = () => AddressString;
alemosan1/prueba2
src/main/java/es/tid/topologyModuleBase/COPServiceTopology/client/auth/Authentication.java
package es.tid.topologyModuleBase.COPServiceTopology.client.auth; import java.util.Map; import es.tid.topologyModuleBase.COPServiceTopology.client.Pair; import java.util.List; @javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaClientCodegen", date = "2016-05-27T13:24:30.808+02:00") public interface Authentication { /** Apply authentication settings to header and query params. */ void applyToParams(List<Pair> queryParams, Map<String, String> headerParams); }
Peakstep233/Yttr
src/main/java/com/unascribed/yttr/mixin/effector/MixinVoxelShapes.java
package com.unascribed.yttr.mixin.effector; import java.util.stream.Stream; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.ModifyVariable; import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable; import org.spongepowered.asm.mixin.injection.callback.LocalCapture; import com.unascribed.yttr.mixinsupport.YttrWorld; import net.minecraft.block.BlockState; import net.minecraft.block.Blocks; import net.minecraft.block.ShapeContext; import net.minecraft.util.math.AxisCycleDirection; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Box; import net.minecraft.util.math.Direction; import net.minecraft.util.shape.VoxelShape; import net.minecraft.util.shape.VoxelShapes; import net.minecraft.world.WorldView; @Mixin(VoxelShapes.class) public class MixinVoxelShapes { private static BlockPos yttr$currentlyCheckingPos = null; @ModifyVariable(at=@At(value="INVOKE_ASSIGN", target="net/minecraft/world/WorldView.getBlockState(Lnet/minecraft/util/math/BlockPos;)Lnet/minecraft/block/BlockState;"), ordinal=0, method="calculatePushVelocity(Lnet/minecraft/util/math/Box;Lnet/minecraft/world/WorldView;DLnet/minecraft/block/ShapeContext;Lnet/minecraft/util/math/AxisCycleDirection;Ljava/util/stream/Stream;)D") private static BlockState replaceBlockState(BlockState in, Box box, WorldView world) { if (world instanceof YttrWorld && yttr$currentlyCheckingPos != null && ((YttrWorld)world).yttr$isPhased(yttr$currentlyCheckingPos)) { return Blocks.VOID_AIR.getDefaultState(); } return in; } @Inject(at=@At(value="INVOKE", target="net/minecraft/util/math/Box.getMin(Lnet/minecraft/util/math/Direction$Axis;)D"), method="calculatePushVelocity(Lnet/minecraft/util/math/Box;Lnet/minecraft/world/WorldView;DLnet/minecraft/block/ShapeContext;Lnet/minecraft/util/math/AxisCycleDirection;Ljava/util/stream/Stream;)D", locals=LocalCapture.CAPTURE_FAILHARD) private static void storeMutable(Box arg1, WorldView arg2, double arg3, ShapeContext arg4, AxisCycleDirection arg5, Stream<VoxelShape> arg6, CallbackInfoReturnable<Double> ci, AxisCycleDirection var1, Direction.Axis var2, Direction.Axis var3, Direction.Axis var4, BlockPos.Mutable mut) { yttr$currentlyCheckingPos = mut; } @Inject(at=@At("RETURN"), method="calculatePushVelocity(Lnet/minecraft/util/math/Box;Lnet/minecraft/world/WorldView;DLnet/minecraft/block/ShapeContext;Lnet/minecraft/util/math/AxisCycleDirection;Ljava/util/stream/Stream;)D") private static void forgetMutable(Box arg1, WorldView arg2, double arg3, ShapeContext arg4, AxisCycleDirection arg5, Stream<VoxelShape> arg6, CallbackInfoReturnable<Double> ci) { yttr$currentlyCheckingPos = null; } }
ajitda/flexerp
node_modules/redeyed/test/redeyed-smoke.js
'use strict'; /*jshint asi: true*/ // applying redeyed to a bunch of files of contained libraries as a smoke test var test = require('tap').test , path = require('path') , fs = require('fs') , readdirp = require('readdirp') , redeyed = require('..') , esprima = require('esprima') , node_modules = path.join(__dirname, '..', 'node_modules') , tapdir = path.join(node_modules, 'tap') , esprimadir = path.join(node_modules, 'esprima') test('tap', function (t) { var invalidTapFiles = [ , 'slide/lib/async-map-ordered.js' ] function shouldProcess (path) { var include = true invalidTapFiles.every(function (entry) { return include = (path.indexOf(entry) < 0) }); return include } function containsVarKeyword(code) { code = code.replace(/^#!([^\r\n]+)/, function(match, captured) { return "//" + captured; }); return esprima.tokenize(code).some(function (t) { return t.type === 'Keyword' && t.value === 'var' }) } readdirp({ root: tapdir, fileFilter: '*.js' }) .on('data', function (entry) { var code = fs.readFileSync(entry.fullPath, 'utf-8') if (!shouldProcess(entry.fullPath) || !containsVarKeyword(code)) return var resultAst = redeyed(code, { Keyword: { 'var': '+:-' } }, { buildAst: true }).code , resultTokenize = redeyed(code, { Keyword: { 'var': '+:-' } }, { buildAst: false }).code t.assert(~resultAst.indexOf('+var-') || !(~resultAst.indexOf('var ')), 'redeyed ' + entry.path) t.assert(~resultTokenize.indexOf('+var-') || !(~resultTokenize.indexOf('var ')), 'redeyed ' + entry.path) }) .on('end', t.end.bind(t)) }) test('esprima', function (t) { readdirp({ root: esprimadir, fileFilter: '*.js' }) .on('data', function (entry) { var code = fs.readFileSync(entry.fullPath, 'utf-8') , resultAst = redeyed(code, { Keyword: { 'var': '+:-' } }, { buildAst: true }).code , resultTokenize = redeyed(code, { Keyword: { 'var': '+:-' } }, { buildAst: false }).code t.assert(~resultAst.indexOf('+var-') || !(~resultAst.indexOf('var ')), 'redeyed ' + entry.path) t.assert(~resultTokenize.indexOf('+var-') || !(~resultTokenize.indexOf('var ')), 'redeyed ' + entry.path) }) .on('end', t.end.bind(t)) }) test('redeyed', function (t) { readdirp({ root: path.join(__dirname, '..'), fileFilter: '*.js', directoryFilter: ['!.git', '!node_modules' ] }) .on('data', function (entry) { var code = fs.readFileSync(entry.fullPath, 'utf-8') , result = redeyed(code, { Keyword: { 'var': '+:-' } }).code t.assert(~result.indexOf('+var-') || !(~result.indexOf('var ')), 'redeyed ' + entry.path) }) .on('end', t.end.bind(t)) })
jeremyagray/relue
tests/relue/math/hailstone.js
<filename>tests/relue/math/hailstone.js 'use strict'; const chai = require('chai'); const assert = chai.assert; const relue = require('../../../index.js'); suite('relue.math.hailstone()', function() { test('relue.math.hailstone() is a function', function(done) { assert.equal(typeof relue.math.hailstone, 'function', 'relue.math.hailstone should be a function.'); done(); }); test('relue.math.hailstone(), input type checks', function(done) { let generator = relue.math.hailstone(null); assert.isNull(generator.next().value, 'Null is not a valid argument.'); generator = relue.math.hailstone(undefined); assert.isNull(generator.next().value, 'Undefined is not a valid argument.'); generator = relue.math.hailstone(true); assert.isNull(generator.next().value, 'True is not a valid argument.'); generator = relue.math.hailstone(false); assert.isNull(generator.next().value, 'False is not a valid argument.'); generator = relue.math.hailstone(''); assert.isNull(generator.next().value, 'The empty string is not a valid argument.'); generator = relue.math.hailstone('a'); assert.isNull(generator.next().value, '"a" is not a valid argument.'); generator = relue.math.hailstone('prime'); assert.isNull(generator.next().value, '"prime" is not a valid argument.'); done(); }); test('relue.math.hailstone(-1)', function(done) { const hailstone = [0]; const generator = relue.math.hailstone(-1); let i = 0; while (! generator.next().done) { const actual = generator.next().value; const expected = hailstone[i]; assert.isNumber(actual, 'Numbers should be returned.'); assert.equal(actual, expected, 'Hailstone sequences should be equal.'); } done(); }); test('relue.math.hailstone(0)', function(done) { const hailstone = [0]; const generator = relue.math.hailstone(0); let i = 0; while (! generator.next().done) { const actual = generator.next().value; const expected = hailstone[i]; assert.isNumber(actual, 'Numbers should be returned.'); assert.equal(actual, expected, 'Hailstone sequences should be equal.'); } done(); }); test('relue.math.hailstone(1)', function(done) { const hailstone = [1]; const generator = relue.math.hailstone(1); let i = 0; while (! generator.next().done) { const actual = generator.next().value; const expected = hailstone[i]; assert.isNumber(actual, 'Numbers should be returned.'); assert.equal(actual, expected, 'Hailstone sequences should be equal.'); } done(); }); test('relue.math.hailstone(2)', function(done) { const hailstone = [2, 1]; let i = 0; for (let actual of relue.math.hailstone(2)) { const expected = hailstone[i]; assert.isNumber(actual, 'Numbers should be returned.'); assert.equal(actual, expected, 'Hailstone sequences should be equal.'); i++; } done(); }); test('relue.math.hailstone(5)', function(done) { const hailstone = [5, 16, 8, 4, 2, 1]; let i = 0; for (let actual of relue.math.hailstone(5)) { const expected = hailstone[i]; assert.isNumber(actual, 'Numbers should be returned.'); assert.equal(actual, expected, 'Hailstone sequences should be equal.'); i++; } done(); }); test('relue.math.hailstone(7)', function(done) { const hailstone = [ 7, 22, 11, 34, 17, 52, 26, 13, 40, 20, 10, 5, 16, 8, 4, 2, 1 ]; let i = 0; for (let actual of relue.math.hailstone(7)) { const expected = hailstone[i]; assert.isNumber(actual, 'Numbers should be returned.'); assert.equal(actual, expected, 'Hailstone sequences should be equal.'); i++; } done(); }); });
logV/superfluous
superfluous/components/about_sidebar/events.js
<reponame>logV/superfluous "use strict"; // http://stackoverflow.com/questions/2905867/how-to-scroll-to-specific-item-using-jquery function focusElement(scrollTo) { var container = $("body,html"); container.stop(true, true).animate({ scrollTop: scrollTo.offset().top - container.offset().top }); } module.exports = { events: { "click .content-link" : "handle_focus_heading", }, handle_focus_heading: function(el) { var hash = el.target.hash; var el = $(hash); focusElement(el); } };
jacogreyling/pexeso
config/manifest.js
<filename>config/manifest.js 'use strict'; const Confidence = require('confidence'); const Config = require('./config'); const criteria = { env: process.env.NODE_ENV }; const manifest = { server: { debug: { request: ['error'] }, cache: { name: 'redisCache', engine: require('catbox-redis'), shared: true, partition: 'cache', url: Config.get('/hapiRedis/url') }, connections: { routes: { security: true } } }, connections: [{ port: Config.get('/port/web'), labels: ['web'], state: { isHttpOnly: false, isSecure: { $filter: 'env', production: true, $default: false } } }], registrations: [ { plugin: 'bell' }, { plugin: 'blipp' }, { plugin: 'inert' }, { plugin: 'vision' }, { plugin: 'hapi-auth-cookie' }, { plugin: { register: 'crumb', options: { restful: true } } }, { plugin: { register: 'visionary', options: { engines: { jsx: 'hapi-react-views' }, compileOptions: { removeCacheRegExp: '.jsx' }, relativeTo: __dirname + '/../', path: './server/web' } } }, { plugin: { register: 'hapi-mongo-models', options: { mongodb: Config.get('/hapiMongoModels/mongodb'), models: { Account: './server/models/account', AdminGroup: './server/models/admin-group', Admin: './server/models/admin', AuthAttempt: './server/models/auth-attempt', Session: './server/models/session', User: './server/models/user', Statistic: './server/models/statistic', Score: './server/models/score', Event: './server/models/event' }, autoIndex: Config.get('/hapiMongoModels/autoIndex') } } }, { plugin: { register: 'good', options: { ops: { interval: 5000 }, reporters: { consoleReporter: [ { module: 'good-squeeze', name: 'Squeeze', args: [{ log: '*', response: '*' }] }, { module: 'good-console' }, 'stdout' ] } } } }, { plugin: 'hapi-io' }, { plugin: { register: './server/hapi-io-redis', options: { connection: { url: Config.get('/hapiRedis/url') } } } }, { plugin: './server/telemetry' }, { plugin: './server/auth' }, { plugin: './server/mailer' }, /*{ plugin: { register: './server/https-redirect', options: { redirect: Config.get('/ssl') } } },*/ { plugin: './server/api/accounts', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/admin-groups', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/admins', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/auth-attempts', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/contact', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/index', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/login', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/logout', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/sessions', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/signup', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/verify-account', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/users', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/statistics', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/scores', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/events', options: { routes: { prefix: '/api' } } }, { plugin: './server/api/monitor', options: { routes: { prefix: '/api' } } }, { plugin: './server/web/account' }, { plugin: './server/web/admin' }, { plugin: './server/web/main' }, { plugin: './server/web/public' }, { plugin: './server/web/status' } ] }; const store = new Confidence.Store(manifest); exports.get = function (key) { return store.get(key, criteria); }; exports.meta = function (key) { return store.meta(key, criteria); };
idmontie/gptp
BurningGround/src/hooks/bunker_hooks.cpp
#include "bunker_hooks.h" #include "../SCBW/enumerations.h" /// Checks whether the unit can attack from inside a bunker. bool unitCanAttackInsideBunkerHook(const CUnit *unit) { //Default StarCraft behavior const u16 unitId = unit->id; if (unitId == UnitId::TerranMarine || unitId == UnitId::Hero_JimRaynorMarine || unitId == UnitId::TerranGhost || unitId == UnitId::Hero_SarahKerrigan || unitId == UnitId::Hero_AlexeiStukov || unitId == UnitId::Hero_SamirDuran || unitId == UnitId::Hero_InfestedDuran || unitId == UnitId::TerranFirebat || unitId == UnitId::Hero_GuiMontag) return true; else return false; }
mfkiwl/zephyr
include/zephyr/arch/x86/ia32/gdbstub.h
<filename>include/zephyr/arch/x86/ia32/gdbstub.h<gh_stars>1-10 /* * Copyright (c) 2020 Intel Corporation. * * SPDX-License-Identifier: Apache-2.0 */ /** * @file * @brief IA-32 specific gdbstub interface header */ #ifndef ZEPHYR_INCLUDE_ARCH_X86_GDBSTUB_SYS_H_ #define ZEPHYR_INCLUDE_ARCH_X86_GDBSTUB_SYS_H_ #ifndef _ASMLANGUAGE #include <stdint.h> #include <zephyr/toolchain.h> /** * @brief Number of register used by gdbstub in IA-32 */ #define GDB_STUB_NUM_REGISTERS 16 /** * @brief GDB interruption context * * The exception stack frame contents used by gdbstub. The contents * of this struct are used to display information about the current * cpu state. */ struct gdb_interrupt_ctx { uint32_t ss; uint32_t gs; uint32_t fs; uint32_t es; uint32_t ds; uint32_t edi; uint32_t esi; uint32_t ebp; uint32_t esp; uint32_t ebx; uint32_t edx; uint32_t ecx; uint32_t eax; uint32_t vector; uint32_t error_code; uint32_t eip; uint32_t cs; uint32_t eflags; } __packed; /** * @brief IA-32 register used in gdbstub */ enum GDB_REGISTER { GDB_EAX, GDB_ECX, GDB_EDX, GDB_EBX, GDB_ESP, GDB_EBP, GDB_ESI, GDB_EDI, GDB_PC, GDB_EFLAGS, GDB_CS, GDB_SS, GDB_DS, GDB_ES, GDB_FS, GDB_GS, GDB_ORIG_EAX = 41, }; struct gdb_ctx { unsigned int exception; unsigned int registers[GDB_STUB_NUM_REGISTERS]; }; #endif /* _ASMLANGUAGE */ #endif /* ZEPHYR_INCLUDE_ARCH_X86_GDBSTUB_SYS_H_ */
isabelgk/airfx
source/projects/ar.bite_tilde/ar.bite_tilde.cpp
<filename>source/projects/ar.bite_tilde/ar.bite_tilde.cpp #include "c74_min.h" using namespace c74::min; class bite : public object<bite>, public vector_operator<> { public: MIN_DESCRIPTION {"an unusual edge-maker"}; MIN_TAGS {"destruction, lofi"}; MIN_AUTHOR {"<NAME>"}; inlet<> in1 {this, "(signal) Input1"}; inlet<> in2 {this, "(signal) Input2"}; outlet<> out1 {this, "(signal) Output1", "signal"}; outlet<> out2 {this, "(signal) Output2", "signal"}; attribute<number, threadsafe::no, limit::clamp> A {this, "bite", 0.5, range {0.0, 1.0} }; attribute<number, threadsafe::no, limit::clamp> B {this, "output", 1.0, range {0.0, 1.0} }; message<> dspsetup {this, "dspsetup", MIN_FUNCTION { A = 0.5; B = 1.0; sampleAL = 0.0; sampleBL = 0.0; sampleCL = 0.0; sampleDL = 0.0; sampleEL = 0.0; sampleFL = 0.0; sampleGL = 0.0; sampleHL = 0.0; sampleIL = 0.0; sampleAR = 0.0; sampleBR = 0.0; sampleCR = 0.0; sampleDR = 0.0; sampleER = 0.0; sampleFR = 0.0; sampleGR = 0.0; sampleHR = 0.0; sampleIR = 0.0; fpNShapeL = 0.0; fpNShapeR = 0.0; //this is reset: values being initialized only once. Startup values, whatever they are. return {}; } }; void operator()(audio_bundle _input, audio_bundle _output) { double* in1 = _input.samples(0); double* in2 = _input.samples(1); double* out1 = _output.samples(0); double* out2 = _output.samples(1); long sampleFrames = _input.frame_count(); double overallscale = 1.3; overallscale /= 44100.0; overallscale *= samplerate(); double gain = ((A*2.0)-1.0)*overallscale; double outputgain = B; double midA; double midB; double midC; double midD; double trigger; double inputSampleL; double inputSampleR; while (--sampleFrames >= 0) { sampleIL = sampleHL; sampleHL = sampleGL; sampleGL = sampleFL; sampleFL = sampleEL; sampleEL = sampleDL; sampleDL = sampleCL; sampleCL = sampleBL; sampleBL = sampleAL; sampleAL = *in1; sampleIR = sampleHR; sampleHR = sampleGR; sampleGR = sampleFR; sampleFR = sampleER; sampleER = sampleDR; sampleDR = sampleCR; sampleCR = sampleBR; sampleBR = sampleAR; sampleAR = *in2; //rotate the buffer in primitive fashion if (sampleAL<1.2e-38 && -sampleAL<1.2e-38) { static int noisesource = 0; //this declares a variable before anything else is compiled. It won't keep assigning //it to 0 for every sample, it's as if the declaration doesn't exist in this context, //but it lets me add this denormalization fix in a single place rather than updating //it in three different locations. The variable isn't thread-safe but this is only //a random seed and we can share it with whatever. noisesource = noisesource % 1700021; noisesource++; int residue = noisesource * noisesource; residue = residue % 170003; residue *= residue; residue = residue % 17011; residue *= residue; residue = residue % 1709; residue *= residue; residue = residue % 173; residue *= residue; residue = residue % 17; double applyresidue = residue; applyresidue *= 0.00000001; applyresidue *= 0.00000001; sampleAL = applyresidue; } if (sampleAR<1.2e-38 && -sampleAR<1.2e-38) { static int noisesource = 0; noisesource = noisesource % 1700021; noisesource++; int residue = noisesource * noisesource; residue = residue % 170003; residue *= residue; residue = residue % 17011; residue *= residue; residue = residue % 1709; residue *= residue; residue = residue % 173; residue *= residue; residue = residue % 17; double applyresidue = residue; applyresidue *= 0.00000001; applyresidue *= 0.00000001; sampleAR = applyresidue; //this denormalization routine produces a white noise at -300 dB which the noise //shaping will interact with to produce a bipolar output, but the noise is actually //all positive. That should stop any variables from going denormal, and the routine //only kicks in if digital black is input. As a final touch, if you save to 24-bit //the silence will return to being digital black again. } midA = sampleAL - sampleEL; midB = sampleIL - sampleEL; midC = sampleCL - sampleEL; midD = sampleGL - sampleEL; midA *= ((((sampleBL + sampleCL + sampleDL)/3) - ((sampleAL + sampleEL)/2.0))*gain); midB *= ((((sampleFL + sampleGL + sampleHL)/3) - ((sampleEL + sampleIL)/2.0))*gain); midC *= ((sampleDL - ((sampleCL + sampleEL)/2.0))*gain); midD *= ((sampleFL - ((sampleEL + sampleGL)/2.0))*gain); trigger = sin(midA + midB + midC + midD); inputSampleL = sampleEL + (trigger*8.0); midA = sampleAR - sampleER; midB = sampleIR - sampleER; midC = sampleCR - sampleER; midD = sampleGR - sampleER; midA *= ((((sampleBR + sampleCR + sampleDR)/3) - ((sampleAR + sampleER)/2.0))*gain); midB *= ((((sampleFR + sampleGR + sampleHR)/3) - ((sampleER + sampleIR)/2.0))*gain); midC *= ((sampleDR - ((sampleCR + sampleER)/2.0))*gain); midD *= ((sampleFR - ((sampleER + sampleGR)/2.0))*gain); trigger = sin(midA + midB + midC + midD); inputSampleR = sampleER + (trigger*8.0); if (outputgain != 1.0) { inputSampleL *= outputgain; inputSampleR *= outputgain; } //stereo 64 bit dither, made small and tidy. int expon; frexp((double)inputSampleL, &expon); long double dither = (rand()/(RAND_MAX*7.737125245533627e+25))*pow(2,expon+62); dither /= 536870912.0; //needs this to scale to 64 bit zone inputSampleL += (dither-fpNShapeL); fpNShapeL = dither; frexp((double)inputSampleR, &expon); dither = (rand()/(RAND_MAX*7.737125245533627e+25))*pow(2,expon+62); dither /= 536870912.0; //needs this to scale to 64 bit zone inputSampleR += (dither-fpNShapeR); fpNShapeR = dither; //end 64 bit dither *out1 = inputSampleL; *out2 = inputSampleR; *in1++; *in2++; *out1++; *out2++; } } private: long double fpNShapeL; long double fpNShapeR; //default stuff double sampleAL; double sampleBL; double sampleCL; double sampleDL; double sampleEL; double sampleFL; double sampleGL; double sampleHL; double sampleIL; double sampleAR; double sampleBR; double sampleCR; double sampleDR; double sampleER; double sampleFR; double sampleGR; double sampleHR; double sampleIR; }; MIN_EXTERNAL(bite);
Mammad88/job4j_elementary
chapter_003/src/main/java/ru/job4j/collection/sort/JobDescByPriority.java
package ru.job4j.collection.sort; import java.util.Comparator; /** * class JobDescByPriority - сортировка компаратора по приоритету и по убыванию. * * @author <NAME> (<EMAIL>) * @version $1.0$ * @since 24.03.2020 */ public class JobDescByPriority implements Comparator<Job> { @Override public int compare(Job first, Job second) { return Integer.compare(second.getPriority(), first.getPriority()); } }
cybik/QSettingsDialog
QSettingsDialog/dialogui/settingsengine.h
#ifndef SETTINGSENGINE_H #define SETTINGSENGINE_H #include <QObject> #include <QSignalMapper> #include "qsettingsentry.h" #include "qsettingsloader.h" #include "qsettingswidget.h" #include "checkinghelper.h" class SettingsEngine : public QObject { Q_OBJECT public: explicit SettingsEngine(QObject *parent = nullptr); ~SettingsEngine(); void addEntry(QSharedPointer<QSettingsEntry> entry, QSettingsWidgetBase *currentWidget, CheckingHelper *checkingHelper); public slots: void startLoading(); void startSaving(); void startResetting(); void abortOperation(); signals: void progressMaxChanged(int max); void progressValueChanged(int value); void operationCompleted(int errorCount); void operationAborted(); private slots: void entryLoaded(bool successful, const QVariant &data, bool isUserEdited); void entrySaved(bool successful); void entryResetted(bool successful); private: struct EntryInfoBase { QSharedPointer<QSettingsEntry> entry; QSettingsWidgetBase *currentWidget; CheckingHelper *checkingHelper; }; template<class TLoader> struct EntryInfo : public EntryInfoBase { TLoader *currentLoader; inline EntryInfo(QSharedPointer<QSettingsEntry> entry, QSettingsWidgetBase *currentWidget, CheckingHelper *checkingHelper, TLoader *currentLoader) : EntryInfoBase({entry, currentWidget, checkingHelper}), currentLoader(currentLoader) {} }; QList<EntryInfo<QSimpleSettingsLoader>> simpleEntries; QList<EntryInfo<QAsyncSettingsLoader>> asyncEntries; QHash<QObject*, int> activeAsyncs; int currentCount; int errorCount; void updateEntry(EntryInfoBase &entry, const QVariant &data, bool isUserEdited); void disableEntry(EntryInfoBase &entry); void updateProgress(bool increment); void tryLoadComplete(); }; #endif // SETTINGSENGINE_H