repo_name
stringlengths
6
101
path
stringlengths
4
300
text
stringlengths
7
1.31M
RopeMC/RopeMC
src/main/java/de/ropemc/api/exceptions/WrongTypeException.java
<filename>src/main/java/de/ropemc/api/exceptions/WrongTypeException.java package de.ropemc.api.exceptions; public class WrongTypeException extends Throwable { public WrongTypeException(String message) { super(message); } }
yeastrc/msdapl
MS_LIBRARY/src/org/yeastrc/ms/domain/search/mascot/MascotResultData.java
/** * MascotResultData.java * @author <NAME> * Oct 12, 2009 * @version 1.0 */ package org.yeastrc.ms.domain.search.mascot; import java.math.BigDecimal; /** * */ public interface MascotResultData { public abstract int getRank(); public abstract void setRank(int rank); public abstract BigDecimal getIonScore(); public abstract void setIonScore(BigDecimal ionScore); public abstract BigDecimal getIdentityScore(); public abstract void setIdentityScore(BigDecimal identityScore); public abstract int getStar(); public abstract void setStar(int star); public abstract BigDecimal getHomologyScore(); public abstract void setHomologyScore(BigDecimal homologyScore); public abstract BigDecimal getExpect(); public abstract void setExpect(BigDecimal expect); public abstract BigDecimal getCalculatedMass(); public abstract void setCalculatedMass(BigDecimal mass); public abstract int getMatchingIons(); public abstract void setMatchingIons(int matchingIons); public abstract int getPredictedIons(); public abstract void setPredictedIons(int predictedIons); }
bauman/clips-rules-rebuild
clipspy-1.0.0/clips/facts.py
<gh_stars>100-1000 # Copyright (c) 2016-2021, <NAME> # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software without # specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, # OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """This module contains the definition of: * ImpliedFact class * TemplateFact class * Template class * TemplateSlot class * DefinedFacts class * Facts namespace class """ import os from itertools import chain import clips from clips.modules import Module from clips.common import PutSlotError, PUT_SLOT_ERROR from clips.common import environment_builder, environment_modifier from clips.common import CLIPSError, SaveMode, TemplateSlotDefaultType from clips._clips import lib, ffi class Fact: """CLIPS Fact base class.""" __slots__ = '_env', '_fact' def __init__(self, env: ffi.CData, fact: ffi.CData): self._env = env self._fact = fact lib.RetainFact(self._fact) def __del__(self): try: lib.ReleaseFact(self._env, self._fact) except (AttributeError, TypeError): pass # mostly happening during interpreter shutdown def __hash__(self): return hash(self._fact) def __eq__(self, fact): return self._fact == fact._fact def __str__(self): return ' '.join(fact_pp_string(self._env, self._fact).split()) def __repr__(self): string = ' '.join(fact_pp_string(self._env, self._fact).split()) return "%s: %s" % (self.__class__.__name__, string) @property def index(self) -> int: """The fact index.""" return lib.FactIndex(self._fact) @property def exists(self) -> bool: """True if the fact has been asserted within CLIPS. Equivalent to the CLIPS (fact-existp) function. """ return lib.FactExistp(self._fact) @property def template(self) -> 'Template': """The associated Template.""" template = lib.FactDeftemplate(self._fact) name = ffi.string(lib.DeftemplateName(template)).decode() return Template(self._env, name) def retract(self): """Retract the fact from the CLIPS environment.""" ret = lib.Retract(self._fact) if ret != lib.RE_NO_ERROR: raise CLIPSError(self._env, code=ret) class ImpliedFact(Fact): """An Implied Fact or Ordered Fact represents its data as a list of elements similarly as for a Multifield. Implied Fact cannot be build or modified. They can be asserted via the Environment.assert_string() method. """ def __iter__(self): return chain(slot_value(self._env, self._fact)) def __len__(self): return len(slot_value(self._env, self._fact)) def __getitem__(self, index): return slot_value(self._env, self._fact)[index] class TemplateFact(Fact): """A Template or Unordered Fact represents its data as a dictionary where each slot name is a key. TemplateFact slot values can be modified. The Fact will be re-evaluated against the rule network once modified. """ __slots__ = '_env', '_fact' def __init__(self, env: ffi.CData, fact: ffi.CData): super().__init__(env, fact) def __iter__(self): return chain(slot_values(self._env, self._fact)) def __len__(self): slots = slot_values(self._env, self._fact) return len(tuple(slots)) def __getitem__(self, key): try: return slot_value(self._env, self._fact, slot=str(key)) except CLIPSError as error: if error.code == lib.GSE_SLOT_NOT_FOUND_ERROR: raise KeyError("'%s'" % key) else: raise error def modify_slots(self, **slots): """Modify one or more slot values of the Fact. Fact must be asserted within the CLIPS engine. Equivalent to the CLIPS (modify) function. """ modifier = environment_modifier(self._env, 'fact') ret = lib.FMSetFact(modifier, self._fact) if ret != lib.FME_NO_ERROR: raise CLIPSError(self._env, code=ret) for slot, slot_val in slots.items(): value = clips.values.clips_value(self._env, value=slot_val) ret = lib.FMPutSlot(modifier, str(slot).encode(), value) if ret != PutSlotError.PSE_NO_ERROR: raise PUT_SLOT_ERROR[ret](slot) if lib.FMModify(modifier) is ffi.NULL: raise CLIPSError(self._env, code=lib.FBError(self._env)) class Template: """A Fact Template is a formal representation of the fact data structure. In CLIPS, Templates are defined via the (deftemplate) function. Templates allow to assert new facts within the CLIPS environment. Implied facts are associated to implied templates. Implied templates have a limited set of features. """ __slots__ = '_env', '_name' def __init__(self, env: ffi.CData, name: str): self._env = env self._name = name.encode() def __hash__(self): return hash(self._ptr()) def __eq__(self, tpl): return self._ptr() == tpl._ptr() def __str__(self): string = lib.DeftemplatePPForm(self._ptr()) string = ffi.string(string).decode() if string != ffi.NULL else '' return ' '.join(string.split()) def __repr__(self): string = lib.DeftemplatePPForm(self._ptr()) string = ffi.string(string).decode() if string != ffi.NULL else '' return "%s: %s" % (self.__class__.__name__, ' '.join(string.split())) def _ptr(self) -> ffi.CData: tpl = lib.FindDeftemplate(self._env, self._name) if tpl == ffi.NULL: raise CLIPSError(self._env, 'Template <%s> not defined' % self.name) return tpl @property def implied(self) -> bool: """True if the Template is implied.""" return lib.ImpliedDeftemplate(self._ptr()) @property def name(self) -> str: """Template name.""" return self._name.decode() @property def module(self) -> Module: """The module in which the Template is defined. Python equivalent of the CLIPS deftemplate-module command. """ name = ffi.string(lib.DeftemplateModule(self._ptr())).decode() return Module(self._env, name) @property def deletable(self) -> bool: """True if the Template can be undefined.""" return lib.DeftemplateIsDeletable(self._ptr()) @property def slots(self) -> tuple: """The slots of the template.""" if self.implied: return () value = clips.values.clips_value(self._env) lib.DeftemplateSlotNames(self._ptr(), value) return tuple(TemplateSlot(self._env, self.name, n) for n in clips.values.python_value(self._env, value)) @property def watch(self) -> bool: """Whether or not the Template is being watched.""" return lib.GetDeftemplateWatch(self._ptr()) @watch.setter def watch(self, flag: bool): """Whether or not the Template is being watched.""" lib.EnvSetDeftemplateWatch(self._ptr(), flag) def facts(self) -> iter: """Iterate over the asserted Facts belonging to this Template.""" fact = lib.GetNextFactInTemplate(self._ptr(), ffi.NULL) while fact != ffi.NULL: yield new_fact(self._ptr(), fact) fact = lib.GetNextFactInTemplate(self._ptr(), fact) def assert_fact(self, **slots) -> TemplateFact: """Assert a new fact with the given slot values. Only deftemplates that have been explicitly defined can be asserted with this function. Equivalent to the CLIPS (assert) function. """ builder = environment_builder(self._env, 'fact') ret = lib.FBSetDeftemplate(builder, self._name) if ret != lib.FBE_NO_ERROR: raise CLIPSError(self._env, code=ret) for slot, slot_val in slots.items(): value = clips.values.clips_value(self._env, value=slot_val) ret = lib.FBPutSlot(builder, str(slot).encode(), value) if ret != PutSlotError.PSE_NO_ERROR: raise PUT_SLOT_ERROR[ret](slot) fact = lib.FBAssert(builder) if fact != ffi.NULL: return TemplateFact(self._env, fact) else: raise CLIPSError(self._env, code=lib.FBError(self._env)) def undefine(self): """Undefine the Template. Equivalent to the CLIPS (undeftemplate) function. The object becomes unusable after this method has been called. """ if not lib.Undeftemplate(self._ptr(), self._env): raise CLIPSError(self._env) class TemplateSlot: """Template Facts organize the information within Slots. Slots might restrict the type or amount of data they store. """ __slots__ = '_env', '_tpl', '_name' def __init__(self, env: ffi.CData, tpl: str, name: str): self._env = env self._tpl = tpl.encode() self._name = name.encode() def __hash__(self): return hash(self._ptr()) + hash(self._name) def __eq__(self, slot): return self._ptr() == slot._ptr() and self._name == slot._name def __str__(self): return self.name def __repr__(self): return "%s: %s" % (self.__class__.__name__, self.name) def _ptr(self) -> ffi.CData: tpl = lib.FindDeftemplate(self._env, self._tpl) if tpl == ffi.NULL: raise CLIPSError( self._env, 'Template <%s> not defined' % self._tpl.decode()) return tpl @property def name(self) -> str: """The slot name.""" return self._name.decode() @property def multifield(self) -> bool: """True if the slot is a multifield slot.""" return bool(lib.DeftemplateSlotMultiP(self._ptr(), self._name)) @property def types(self) -> tuple: """A tuple containing the value types for this Slot. Equivalent to the CLIPS (deftemplate-slot-types) function. """ value = clips.values.clips_value(self._env) if lib.DeftemplateSlotTypes(self._ptr(), self._name, value): return clips.values.python_value(self._env, value) raise CLIPSError(self._env) @property def range(self) -> tuple: """A tuple containing the numeric range for this Slot. Equivalent to the CLIPS (deftemplate-slot-range) function. """ value = clips.values.clips_value(self._env) if lib.DeftemplateSlotRange(self._ptr(), self._name, value): return clips.values.python_value(self._env, value) raise CLIPSError(self._env) @property def cardinality(self) -> tuple: """A tuple containing the cardinality for this Slot. Equivalent to the CLIPS (deftemplate-slot-cardinality) function. """ value = clips.values.clips_value(self._env) if lib.DeftemplateSlotCardinality(self._ptr(), self._name, value): return clips.values.python_value(self._env, value) raise CLIPSError(self._env) @property def default_type(self) -> TemplateSlotDefaultType: """The default value type for this Slot. Equivalent to the CLIPS (deftemplate-slot-defaultp) function. """ return TemplateSlotDefaultType( lib.DeftemplateSlotDefaultP(self._ptr(), self._name)) @property def default_value(self) -> type: """The default value for this Slot. Equivalent to the CLIPS (deftemplate-slot-default-value) function. """ value = clips.values.clips_value(self._env) if lib.DeftemplateSlotDefaultValue(self._ptr(), self._name, value): return clips.values.python_value(self._env, value) raise CLIPSError(self._env) @property def allowed_values(self) -> tuple: """A tuple containing the allowed values for this Slot. Equivalent to the CLIPS (slot-allowed-values) function. """ value = clips.values.clips_value(self._env) if lib.DeftemplateSlotAllowedValues(self._ptr(), self._name, value): return clips.values.python_value(self._env, value) raise CLIPSError(self._env) class DefinedFacts: """The DefinedFacts constitute a set of a priori or initial knowledge specified as a collection of facts of user defined classes. When the CLIPS environment is reset, every fact specified within a deffacts construct in the CLIPS knowledge base is added to the DefinedFacts list. """ __slots__ = '_env', '_name' def __init__(self, env: ffi.CData, name: str): self._env = env self._name = name.encode() def __hash__(self): return hash(self._ptr()) def __eq__(self, dfc): return self._ptr() == dfc._ptr() def __str__(self): string = lib.DeffactsPPForm(self._ptr()) string = ffi.string(string).decode() if string != ffi.NULL else '' return ' '.join(string.split()) def __repr__(self): string = lib.DeffactsPPForm(self._ptr()) string = ffi.string(string).decode() if string != ffi.NULL else '' return "%s: %s" % (self.__class__.__name__, ' '.join(string.split())) def _ptr(self) -> ffi.CData: dfc = lib.FindDeffacts(self._env, self._name) if dfc == ffi.NULL: raise CLIPSError( self._env, 'DefinedFacts <%s> not defined' % self.name) return dfc @property def name(self) -> str: """DefinedFacts name.""" return self._name.decode() @property def module(self) -> Module: """The module in which the DefinedFacts is defined. Python equivalent of the CLIPS (deffacts-module) command. """ name = ffi.string(lib.DeffactsModule(self._ptr())).decode() return Module(self._env, name) @property def deletable(self) -> bool: """True if the DefinedFacts can be undefined.""" return lib.DeffactsIsDeletable(self._ptr()) def undefine(self): """Undefine the DefinedFacts. Equivalent to the CLIPS (undeffacts) function. The object becomes unusable after this method has been called. """ if not lib.Undeffacts(self._ptr(), self._env): raise CLIPSError(self._env) class Facts: """Facts and Templates namespace class. .. note:: All the Facts methods are accessible through the Environment class. """ __slots__ = ['_env'] def __init__(self, env): self._env = env @property def fact_duplication(self) -> bool: """Whether or not duplicate facts are allowed.""" return lib.GetFactDuplication(self._env) @fact_duplication.setter def fact_duplication(self, duplication: bool) -> bool: return lib.SetFactDuplication(self._env, duplication) def facts(self) -> iter: """Iterate over the asserted Facts.""" fact = lib.GetNextFact(self._env, ffi.NULL) while fact != ffi.NULL: yield new_fact(self._env, fact) fact = lib.GetNextFact(self._env, fact) def templates(self) -> iter: """Iterate over the defined Templates.""" template = lib.GetNextDeftemplate(self._env, ffi.NULL) while template != ffi.NULL: name = ffi.string(lib.DeftemplateName(template)).decode() yield Template(self._env, name) template = lib.GetNextDeftemplate(self._env, template) def find_template(self, name: str) -> Template: """Find the Template by its name.""" tpl = lib.FindDeftemplate(self._env, name.encode()) if tpl == ffi.NULL: raise LookupError("Template '%s' not found" % name) return Template(self._env, name) def defined_facts(self) -> iter: """Iterate over the DefinedFacts.""" deffacts = lib.GetNextDeffacts(self._env, ffi.NULL) while deffacts != ffi.NULL: name = ffi.string(lib.DeffactsName(deffacts)).decode() yield DefinedFacts(self._env, name) deffacts = lib.GetNextDeffacts(self._env, deffacts) def find_defined_facts(self, name: str) -> DefinedFacts: """Find the DefinedFacts by its name.""" dfs = lib.FindDeffacts(self._env, name.encode()) if dfs == ffi.NULL: raise LookupError("DefinedFacts '%s' not found" % name) return DefinedFacts(self._env, name) def assert_string(self, string: str) -> (ImpliedFact, TemplateFact): """Assert a fact as string.""" fact = lib.AssertString(self._env, string.encode()) if fact == ffi.NULL: raise CLIPSError( self._env, code=lib.GetAssertStringError(self._env)) return new_fact(self._env, fact) def load_facts(self, facts: str): """Load a set of facts into the CLIPS data base. Equivalent to the CLIPS (load-facts) function. Facts can be loaded from a string or from a text file. """ facts = facts.encode() if os.path.exists(facts): if not lib.LoadFacts(self._env, facts): raise CLIPSError(self._env) else: if not lib.LoadFactsFromString(self._env, facts, len(facts)): raise CLIPSError(self._env) def save_facts(self, path, mode=SaveMode.LOCAL_SAVE): """Save the facts in the system to the specified file. Equivalent to the CLIPS (save-facts) function. """ if not lib.SaveFacts(self._env, path.encode(), mode): raise CLIPSError(self._env) def new_fact(env: ffi.CData, fact: ffi.CData) -> (ImpliedFact, TemplateFact): if lib.ImpliedDeftemplate(lib.FactDeftemplate(fact)): return ImpliedFact(env, fact) else: return TemplateFact(env, fact) def slot_value(env: ffi.CData, fact: ffi.CData, slot: str = None) -> type: value = clips.values.clips_value(env) slot = slot.encode() if slot is not None else ffi.NULL implied = lib.ImpliedDeftemplate(lib.FactDeftemplate(fact)) if not implied and slot == ffi.NULL: raise ValueError() ret = lib.GetFactSlot(fact, slot, value) if ret != lib.GSE_NO_ERROR: raise CLIPSError(env, code=ret) return clips.values.python_value(env, value) def slot_values(env: ffi.CData, fact: ffi.CData) -> iter: value = clips.values.clips_value(env) lib.FactSlotNames(fact, value) return ((s, slot_value(env, fact, slot=s)) for s in clips.values.python_value(env, value)) def fact_pp_string(env: ffi.CData, fact: ffi.CData) -> str: builder = environment_builder(env, 'string') lib.SBReset(builder) lib.FactPPForm(fact, builder, False) return ffi.string(builder.contents).decode()
AssemblyPayments/acmepos-ios
motelpos/Pods/SPIClient-iOS/Library/Network/SPIConnection.h
<reponame>AssemblyPayments/acmepos-ios // // SPIConnection.h // SPIClient-iOS // // Created by <NAME> on 2017-11-28. // Copyright © 2017 mx51. All rights reserved. // #import <Foundation/Foundation.h> typedef NS_ENUM(NSInteger, SPIConnectionState) { SPIConnectionStateDisconnected, SPIConnectionStateConnecting, SPIConnectionStateConnected, }; @protocol SPIConnectionDelegate <NSObject> - (void)onSpiConnectionStatusChanged:(SPIConnectionState)newConnectionState; - (void)onSpiMessageReceived:(NSString *)message; - (void)didReceiveError:(NSError *)error; @end @protocol SPIConnection <NSObject> - (void)setUrl:(NSString *)url; - (void)connect; - (void)disconnect; - (void)send:(NSString *)msg; - (BOOL)isConnected; - (SPIConnectionState)state; - (id<SPIConnectionDelegate>)delegate; - (void)setDelegate:(id<SPIConnectionDelegate>)delegate; @end
shanjiantao/jnr-constants
src/main/java/jnr/constants/platform/WaitFlags.java
<filename>src/main/java/jnr/constants/platform/WaitFlags.java // WARNING: This file is autogenerated. DO NOT EDIT! // Generated 2018-09-20 16:28:42 +0000 package jnr.constants.platform; public enum WaitFlags implements jnr.constants.Constant { WNOHANG, WUNTRACED, WSTOPPED, WEXITED, WCONTINUED, WNOWAIT, __UNKNOWN_CONSTANT__; private static final ConstantResolver<WaitFlags> resolver = ConstantResolver.getBitmaskResolver(WaitFlags.class); public final int value() { return (int) resolver.longValue(this); } public final int intValue() { return (int) resolver.longValue(this); } public final long longValue() { return resolver.longValue(this); } public final String description() { return resolver.description(this); } public final boolean defined() { return resolver.defined(this); } public final String toString() { return description(); } public static WaitFlags valueOf(long value) { return resolver.valueOf(value); } }
lukaszgotszaldintel/compute-runtime
opencl/test/unit_test/fixtures/device_instrumentation_fixture.h
<reponame>lukaszgotszaldintel/compute-runtime /* * Copyright (C) 2018-2020 Intel Corporation * * SPDX-License-Identifier: MIT * */ #include <memory> namespace NEO { class ClDevice; class Device; struct HardwareInfo; struct DeviceInstrumentationFixture { void SetUp(bool instrumentation); std::unique_ptr<ClDevice> device = nullptr; HardwareInfo *hwInfo = nullptr; }; } // namespace NEO
GrapixLeGrand/AppArt
app/src/main/java/ch/epfl/sdp/appart/map/helper/MapFrontendHelper.java
package ch.epfl.sdp.appart.map.helper; import android.util.Log; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.regex.Matcher; import java.util.regex.Pattern; import ch.epfl.sdp.appart.ad.Ad; import ch.epfl.sdp.appart.database.DatabaseService; import ch.epfl.sdp.appart.location.Location; import ch.epfl.sdp.appart.location.geocoding.GeocodingService; import ch.epfl.sdp.appart.location.place.Place; import ch.epfl.sdp.appart.location.place.address.AddressFactory; import ch.epfl.sdp.appart.location.place.locality.LocalityFactory; import ch.epfl.sdp.appart.map.MapService; import ch.epfl.sdp.appart.scrolling.card.Card; public class MapFrontendHelper { private static final Pattern extractPostalCodePattern = Pattern.compile("\\d+"); /** * This corresponds to the maximum distance from the current user * position at which apartments get drawn on the map. */ private static final Float MAX_DISTANCE = 50_000.0f; public static CompletableFuture<List<Card>> retrieveCards(DatabaseService databaseService) { CompletableFuture<List<Card>> futureCards = databaseService.getCards(); futureCards.exceptionally(e -> { e.printStackTrace(); return null; }); return futureCards; } public static void centerOnCurrentLocation(MapService mapService, Location currentLocation) { if (currentLocation != null) { mapService.zoomOnPosition(currentLocation, 12.0f); } } public static CompletableFuture<Ad> retrieveAd(DatabaseService databaseService, String adId) { CompletableFuture<Ad> futureAd = databaseService.getAd(adId); return basicExceptionally(futureAd); } private static <T> CompletableFuture<T> basicExceptionally(CompletableFuture<T> future) { future.exceptionally(e -> { e.printStackTrace(); return null; }); return future; } public static Place getPlaceFromAdLocation(Ad ad) { Matcher extractPostalCodeMatcher = extractPostalCodePattern.matcher(ad.getCity()); Place place; if (extractPostalCodeMatcher.find()) { String postalCode = extractPostalCodeMatcher.group(); String locality = ad.getCity().replaceAll( "\\d" + "+", ""); place = AddressFactory.makeAddress(ad.getStreet() , postalCode, locality); } else { place = LocalityFactory.makeLocality(ad.getCity()); } return place; } public static void addMarker(CompletableFuture<Ad> futureAd, GeocodingService geocodingService, Location currentLocation, MapService mapService, Card card) { futureAd.thenAccept(ad -> { Place place; try { place = MapFrontendHelper.getPlaceFromAdLocation(ad); } catch (Exception e) { return; } geocodingService.getLocation(place).thenAccept(adLoc -> { boolean addMarker = currentLocation == null || geocodingService.getDistanceSync(adLoc, currentLocation) < MAX_DISTANCE; if (addMarker) { mapService.addMarker(adLoc, card, false, card.getCity()); } }).exceptionally(e -> { Log.d("MAP_HELPER", "Failed to get location"); return null; }); }); } }
qjclinux/DocxFactory
include/DocxFactory/DocxCompiler/DocxCompilerDateTimeField.h
#ifndef __DOCXFACTORY_DOCX_COMPILER_DATE_TIME_FIELD_H__ #define __DOCXFACTORY_DOCX_COMPILER_DATE_TIME_FIELD_H__ #include "DocxFactory/DocxCompiler/DocxCompilerField.h" namespace DocxFactory { using namespace std; struct DateTimeFormat; class ZipFile; class DocxCompilerDateTimeField : public DocxCompilerField { public: DocxCompilerDateTimeField( DocxCompilerItem* p_item, const string& p_name, const string& p_format, const map<FieldParam, string>* p_valueByParam, xercesc::DOMElement* p_placeHolderNode ); virtual ~DocxCompilerDateTimeField(); virtual void serialize( ZipFile* p_zipFile ); protected: private: DocxCompilerDateTimeField( const DocxCompilerDateTimeField& p_other ); DocxCompilerDateTimeField& operator = ( const DocxCompilerDateTimeField ); list<DateTimeFormat*> m_dateTimeFormat; }; }; #endif
Mignet/supermarket-android
xsl781/src/main/java/org/xsl781/data/DateTimeTypeAdapter.java
package org.xsl781.data; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonParseException; import com.google.gson.JsonPrimitive; import com.google.gson.JsonSerializationContext; import com.google.gson.JsonSerializer; import java.lang.reflect.Type; import java.util.Date; public class DateTimeTypeAdapter implements JsonSerializer<Date>, JsonDeserializer<Date> { public JsonElement serialize(Date src, Type arg1, JsonSerializationContext arg2) { return new JsonPrimitive(src.getTime()); } public Date deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { if (!(json instanceof JsonPrimitive)) { throw new JsonParseException("The date should be a string value"); } return new Date(json.getAsLong()); } }
RandyLiu6410/4D-BIM-platform
server/utils/httpResponse.js
module.exports = { OK: 200, Created: 201, BadRequest: 400, Unauthorized: 401, Forbidden: 403, NotFound: 404, InternalServerError: 500, BadGateway: 502, ServiceUnavailable: 503 }
zmbush/relay
packages/react-relay/classic/store/__tests__/RelayQueryWriter-test.js
/** * Copyright (c) 2013-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. * * @emails oncall+relay * @format */ 'use strict'; require('configureForRelayOSS'); const RelayClassic = require('RelayClassic'); const RelayChangeTracker = require('../RelayChangeTracker'); const RelayQueryPath = require('../../query/RelayQueryPath'); const RelayQueryTracker = require('../RelayQueryTracker'); const RelayQueryWriter = require('../RelayQueryWriter'); const RelayRecordStore = require('../RelayRecordStore'); const RelayRecordWriter = require('../RelayRecordWriter'); const RelayTestUtils = require('RelayTestUtils'); const {getNode} = RelayTestUtils; describe('RelayQueryWriter', () => { let changeTracker; let node; let path; let payload; let recordID; let recordWriter; let records; let store; beforeEach(() => { records = {}; store = new RelayRecordStore({records}); recordWriter = new RelayRecordWriter(records, {}, false); changeTracker = new RelayChangeTracker(); node = getNode(RelayClassic.QL`query { me }`); path = RelayQueryPath.create(node); payload = { __typename: 'User', id: '660361306', }; recordID = '660361306'; }); describe('query tracking', () => { it('tracks a node upon creation', () => { const queryTracker = new RelayQueryTracker(); const trackNodeForID = (queryTracker.trackNodeForID = jest.fn()); const queryWriter = new RelayQueryWriter( store, recordWriter, queryTracker, changeTracker, ); queryWriter.createRecordIfMissing(node, recordID, path, payload); // The interesting bit: expect(trackNodeForID).toBeCalledWith(node, recordID); // Sanity check. expect(records).toEqual({ [recordID]: { __dataID__: recordID, __typename: 'User', }, }); }); it('degrades gracefully in the absence of a configured tracker', () => { const queryWriter = new RelayQueryWriter( store, recordWriter, null, changeTracker, ); // The interesting bit: expect(() => { queryWriter.createRecordIfMissing(node, recordID, path, payload); }).not.toThrow(); // Sanity check. expect(records).toEqual({ [recordID]: { __dataID__: recordID, __typename: 'User', }, }); }); }); });
ajbm6/framework
build/php7/ice/flash.zep.c
<reponame>ajbm6/framework<gh_stars>0 #ifdef HAVE_CONFIG_H #include "../ext_config.h" #endif #include <php.h> #include "../php_ext.h" #include "../ext.h" #include <Zend/zend_operators.h> #include <Zend/zend_exceptions.h> #include <Zend/zend_interfaces.h> #include "kernel/main.h" #include "kernel/object.h" #include "kernel/memory.h" #include "kernel/fcall.h" #include "kernel/operators.h" #include "kernel/array.h" #include "ext/spl/spl_exceptions.h" #include "kernel/exception.h" #include "kernel/concat.h" /** * Shows HTML notifications related to different circumstances. * * @package Ice/Flash * @category Helper * @author Ice Team * @copyright (c) 2014-2016 Ice Team * @license http://iceframework.org/license */ ZEPHIR_INIT_CLASS(Ice_Flash) { ZEPHIR_REGISTER_CLASS(Ice, Flash, ice, flash, ice_flash_method_entry, 0); zend_declare_property_null(ice_flash_ce, SL("session"), ZEND_ACC_PROTECTED TSRMLS_CC); zend_declare_property_null(ice_flash_ce, SL("tag"), ZEND_ACC_PROTECTED TSRMLS_CC); zend_declare_property_null(ice_flash_ce, SL("options"), ZEND_ACC_PROTECTED TSRMLS_CC); ice_flash_ce->create_object = zephir_init_properties_Ice_Flash; return SUCCESS; } PHP_METHOD(Ice_Flash, setOptions) { zval *options, options_sub; zval *this_ptr = getThis(); ZVAL_UNDEF(&options_sub); zephir_fetch_params(0, 1, 0, &options); zephir_update_property_zval(this_ptr, SL("options"), options); RETURN_THISW(); } /** * Flash constructor. Fetch session and tag service from the di. * * @param array options */ PHP_METHOD(Ice_Flash, __construct) { zend_long ZEPHIR_LAST_CALL_STATUS; zephir_fcall_cache_entry *_0 = NULL; zval *options_param = NULL, di, _1, _2, _3; zval options; zval *this_ptr = getThis(); ZVAL_UNDEF(&options); ZVAL_UNDEF(&di); ZVAL_UNDEF(&_1); ZVAL_UNDEF(&_2); ZVAL_UNDEF(&_3); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 0, 1, &options_param); if (!options_param) { ZEPHIR_INIT_VAR(&options); array_init(&options); } else { zephir_get_arrval(&options, options_param); } ZEPHIR_CALL_CE_STATIC(&di, ice_di_ce, "fetch", &_0, 0); zephir_check_call_status(); ZEPHIR_INIT_VAR(&_2); ZVAL_STRING(&_2, "session"); ZEPHIR_CALL_METHOD(&_1, &di, "get", NULL, 0, &_2); zephir_check_call_status(); zephir_update_property_zval(this_ptr, SL("session"), &_1); ZEPHIR_INIT_NVAR(&_2); ZVAL_STRING(&_2, "tag"); ZEPHIR_CALL_METHOD(&_3, &di, "get", NULL, 0, &_2); zephir_check_call_status(); zephir_update_property_zval(this_ptr, SL("tag"), &_3); if (zephir_fast_count_int(&options TSRMLS_CC)) { zephir_update_property_zval(this_ptr, SL("options"), &options); } ZEPHIR_MM_RESTORE(); } /** * Get option value with key. * * @param string key The option key * @param mixed defaultValue The value to return if option key does not exist * @return mixed */ PHP_METHOD(Ice_Flash, getOption) { zval *key_param = NULL, *defaultValue = NULL, defaultValue_sub, __$null, value, _0; zval key; zval *this_ptr = getThis(); ZVAL_UNDEF(&key); ZVAL_UNDEF(&defaultValue_sub); ZVAL_NULL(&__$null); ZVAL_UNDEF(&value); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 1, &key_param, &defaultValue); if (UNEXPECTED(Z_TYPE_P(key_param) != IS_STRING && Z_TYPE_P(key_param) != IS_NULL)) { zephir_throw_exception_string(spl_ce_InvalidArgumentException, SL("Parameter 'key' must be a string") TSRMLS_CC); RETURN_MM_NULL(); } if (EXPECTED(Z_TYPE_P(key_param) == IS_STRING)) { zephir_get_strval(&key, key_param); } else { ZEPHIR_INIT_VAR(&key); ZVAL_EMPTY_STRING(&key); } if (!defaultValue) { defaultValue = &defaultValue_sub; defaultValue = &__$null; } zephir_read_property(&_0, this_ptr, SL("options"), PH_NOISY_CC | PH_READONLY); if (zephir_array_isset_fetch(&value, &_0, &key, 1 TSRMLS_CC)) { RETURN_CTOR(value); } RETVAL_ZVAL(defaultValue, 1, 0); RETURN_MM(); } /** * Display the messages. * * @param boolean remove * @return string */ PHP_METHOD(Ice_Flash, getMessages) { zend_string *_4$$3; zend_ulong _3$$3; zephir_fcall_cache_entry *_6 = NULL; zend_long ZEPHIR_LAST_CALL_STATUS; zval *remove_param = NULL, key, type, message, messages, body, _0, _1, *_2$$3, _5$$4, _7$$5; zend_bool remove; zval *this_ptr = getThis(); ZVAL_UNDEF(&key); ZVAL_UNDEF(&type); ZVAL_UNDEF(&message); ZVAL_UNDEF(&messages); ZVAL_UNDEF(&body); ZVAL_UNDEF(&_0); ZVAL_UNDEF(&_1); ZVAL_UNDEF(&_5$$4); ZVAL_UNDEF(&_7$$5); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 0, 1, &remove_param); if (!remove_param) { remove = 1; } else { remove = zephir_get_boolval(remove_param); } ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "session_key"); ZEPHIR_CALL_METHOD(&key, this_ptr, "getoption", NULL, 0, &_0); zephir_check_call_status(); zephir_read_property(&_1, this_ptr, SL("session"), PH_NOISY_CC | PH_READONLY); ZEPHIR_CALL_METHOD(&messages, &_1, "get", NULL, 0, &key); zephir_check_call_status(); ZEPHIR_INIT_VAR(&body); ZVAL_STRING(&body, ""); if (Z_TYPE_P(&messages) == IS_ARRAY) { zephir_is_iterable(&messages, 0, "ice/flash.zep", 80); ZEND_HASH_FOREACH_KEY_VAL(Z_ARRVAL_P(&messages), _3$$3, _4$$3, _2$$3) { ZEPHIR_INIT_NVAR(&type); if (_4$$3 != NULL) { ZVAL_STR_COPY(&type, _4$$3); } else { ZVAL_LONG(&type, _3$$3); } ZEPHIR_INIT_NVAR(&message); ZVAL_COPY(&message, _2$$3); ZEPHIR_CALL_METHOD(&_5$$4, this_ptr, "getmessage", &_6, 0, &type, &message); zephir_check_call_status(); zephir_concat_self(&body, &_5$$4 TSRMLS_CC); } ZEND_HASH_FOREACH_END(); ZEPHIR_INIT_NVAR(&message); ZEPHIR_INIT_NVAR(&type); } if (remove) { zephir_read_property(&_7$$5, this_ptr, SL("session"), PH_NOISY_CC | PH_READONLY); ZEPHIR_CALL_METHOD(NULL, &_7$$5, "remove", NULL, 0, &key); zephir_check_call_status(); } RETURN_CCTOR(body); } /** * Get a message formatting it with HTML. * * @param string type * @param mixed message * @return string */ PHP_METHOD(Ice_Flash, getMessage) { zval _3, _10$$5, _12$$5; zephir_fcall_cache_entry *_1 = NULL; zend_long ZEPHIR_LAST_CALL_STATUS; zval *type_param = NULL, *messages = NULL, messages_sub, params, body, close, message, _0, _2, _4, *_5, _6$$4, _7$$4, _8$$5, _9$$5, _11$$5, _13$$5, _14$$5, _15$$5, _16$$5, _17$$6, _18$$6; zval type; zval *this_ptr = getThis(); ZVAL_UNDEF(&type); ZVAL_UNDEF(&messages_sub); ZVAL_UNDEF(&params); ZVAL_UNDEF(&body); ZVAL_UNDEF(&close); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZVAL_UNDEF(&_2); ZVAL_UNDEF(&_4); ZVAL_UNDEF(&_6$$4); ZVAL_UNDEF(&_7$$4); ZVAL_UNDEF(&_8$$5); ZVAL_UNDEF(&_9$$5); ZVAL_UNDEF(&_11$$5); ZVAL_UNDEF(&_13$$5); ZVAL_UNDEF(&_14$$5); ZVAL_UNDEF(&_15$$5); ZVAL_UNDEF(&_16$$5); ZVAL_UNDEF(&_17$$6); ZVAL_UNDEF(&_18$$6); ZVAL_UNDEF(&_3); ZVAL_UNDEF(&_10$$5); ZVAL_UNDEF(&_12$$5); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 2, 0, &type_param, &messages); zephir_get_strval(&type, type_param); ZEPHIR_SEPARATE_PARAM(messages); ZEPHIR_INIT_VAR(&_0); array_init(&_0); ZEPHIR_CALL_METHOD(&params, this_ptr, "getoption", &_1, 0, &type, &_0); zephir_check_call_status(); zephir_read_property(&_2, this_ptr, SL("tag"), PH_NOISY_CC | PH_READONLY); ZEPHIR_INIT_VAR(&_3); zephir_create_array(&_3, 3, 0 TSRMLS_CC); ZEPHIR_INIT_VAR(&_4); ZVAL_STRING(&_4, "#"); zephir_array_fast_append(&_3, &_4); ZEPHIR_INIT_NVAR(&_4); ZVAL_STRING(&_4, "×"); zephir_array_fast_append(&_3, &_4); add_assoc_stringl_ex(&_3, SL("class"), SL("close")); ZEPHIR_CALL_METHOD(&close, &_2, "a", NULL, 0, &_3); zephir_check_call_status(); ZEPHIR_INIT_VAR(&body); ZVAL_STRING(&body, ""); if (Z_TYPE_P(messages) != IS_ARRAY) { ZEPHIR_INIT_NVAR(messages); zephir_create_array(messages, 1, 0 TSRMLS_CC); zephir_array_fast_append(messages, messages); } zephir_is_iterable(messages, 0, "ice/flash.zep", 116); ZEND_HASH_FOREACH_VAL(Z_ARRVAL_P(messages), _5) { ZEPHIR_INIT_NVAR(&message); ZVAL_COPY(&message, _5); ZEPHIR_INIT_NVAR(&_7$$4); ZVAL_STRING(&_7$$4, "html"); ZEPHIR_CALL_METHOD(&_6$$4, this_ptr, "getoption", &_1, 0, &_7$$4); zephir_check_call_status(); if (zephir_is_true(&_6$$4)) { zephir_read_property(&_8$$5, this_ptr, SL("tag"), PH_NOISY_CC | PH_READONLY); ZEPHIR_INIT_NVAR(&_10$$5); zephir_create_array(&_10$$5, 1, 0 TSRMLS_CC); ZEPHIR_INIT_LNVAR(_11$$5); ZEPHIR_CONCAT_VV(&_11$$5, &close, &message); zephir_array_update_string(&_10$$5, SL("content"), &_11$$5, PH_COPY | PH_SEPARATE); ZEPHIR_INIT_NVAR(&_12$$5); zephir_create_array(&_12$$5, 1, 0 TSRMLS_CC); ZEPHIR_INIT_NVAR(&_13$$5); ZVAL_STRING(&_13$$5, "content"); zephir_array_fast_append(&_12$$5, &_13$$5); ZEPHIR_INIT_NVAR(&_13$$5); ZVAL_STRING(&_13$$5, "div"); ZEPHIR_INIT_NVAR(&_14$$5); ZVAL_STRING(&_14$$5, "content"); ZVAL_BOOL(&_15$$5, 1); ZVAL_BOOL(&_16$$5, 1); ZEPHIR_CALL_METHOD(&_9$$5, &_8$$5, "taghtml", NULL, 0, &_13$$5, &params, &_10$$5, &_12$$5, &_14$$5, &_15$$5, &_16$$5); zephir_check_call_status(); zephir_concat_self(&body, &_9$$5 TSRMLS_CC); } else { ZEPHIR_INIT_NVAR(&_17$$6); ZEPHIR_GET_CONSTANT(&_17$$6, "PHP_EOL"); ZEPHIR_INIT_LNVAR(_18$$6); ZEPHIR_CONCAT_VV(&_18$$6, &message, &_17$$6); zephir_concat_self(&body, &_18$$6 TSRMLS_CC); } } ZEND_HASH_FOREACH_END(); ZEPHIR_INIT_NVAR(&message); RETURN_CCTOR(body); } /** * Adds a message to the flash. * * @param string type * @param string message * @return void */ PHP_METHOD(Ice_Flash, message) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *type_param = NULL, *message_param = NULL, key, messages, _0, _1, _3, _2$$3; zval type, message; zval *this_ptr = getThis(); ZVAL_UNDEF(&type); ZVAL_UNDEF(&message); ZVAL_UNDEF(&key); ZVAL_UNDEF(&messages); ZVAL_UNDEF(&_0); ZVAL_UNDEF(&_1); ZVAL_UNDEF(&_3); ZVAL_UNDEF(&_2$$3); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 2, 0, &type_param, &message_param); zephir_get_strval(&type, type_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "session_key"); ZEPHIR_CALL_METHOD(&key, this_ptr, "getoption", NULL, 0, &_0); zephir_check_call_status(); zephir_read_property(&_1, this_ptr, SL("session"), PH_NOISY_CC | PH_READONLY); ZEPHIR_INIT_NVAR(&_0); array_init(&_0); ZEPHIR_CALL_METHOD(&messages, &_1, "get", NULL, 0, &key, &_0); zephir_check_call_status(); if (!(zephir_array_isset(&messages, &type))) { ZEPHIR_INIT_VAR(&_2$$3); array_init(&_2$$3); zephir_array_update_zval(&messages, &type, &_2$$3, PH_COPY | PH_SEPARATE); } zephir_array_update_multi(&messages, &message TSRMLS_CC, SL("za"), 2, &type); zephir_read_property(&_3, this_ptr, SL("session"), PH_NOISY_CC | PH_READONLY); ZEPHIR_CALL_METHOD(NULL, &_3, "set", NULL, 0, &key, &messages); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Add success message. * * @param string message * @return void */ PHP_METHOD(Ice_Flash, success) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "success"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Alias of success message. */ PHP_METHOD(Ice_Flash, ok) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "success"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Add info message. * * @param string message * @return void */ PHP_METHOD(Ice_Flash, info) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "info"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Alias of info message. */ PHP_METHOD(Ice_Flash, notice) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "info"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Add warning message. * * @param string message * @return void */ PHP_METHOD(Ice_Flash, warning) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "warning"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Alias of warning message. */ PHP_METHOD(Ice_Flash, alert) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "warning"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Add danger message. * * @param string message * @return void */ PHP_METHOD(Ice_Flash, danger) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "danger"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } /** * Alias of danger message. */ PHP_METHOD(Ice_Flash, error) { zend_long ZEPHIR_LAST_CALL_STATUS; zval *message_param = NULL, _0; zval message; zval *this_ptr = getThis(); ZVAL_UNDEF(&message); ZVAL_UNDEF(&_0); ZEPHIR_MM_GROW(); zephir_fetch_params(1, 1, 0, &message_param); zephir_get_strval(&message, message_param); ZEPHIR_INIT_VAR(&_0); ZVAL_STRING(&_0, "danger"); ZEPHIR_CALL_METHOD(NULL, this_ptr, "message", NULL, 0, &_0, &message); zephir_check_call_status(); ZEPHIR_MM_RESTORE(); } zend_object *zephir_init_properties_Ice_Flash(zend_class_entry *class_type TSRMLS_DC) { zval _1$$3, _2$$3; zval __$true, _0; ZVAL_BOOL(&__$true, 1); ZVAL_UNDEF(&_0); ZVAL_UNDEF(&_1$$3); ZVAL_UNDEF(&_2$$3); ZEPHIR_MM_GROW(); { zval local_this_ptr, *this_ptr = &local_this_ptr; ZEPHIR_CREATE_OBJECT(this_ptr, class_type); zephir_read_property(&_0, this_ptr, SL("options"), PH_NOISY_CC | PH_READONLY); if (Z_TYPE_P(&_0) == IS_NULL) { ZEPHIR_INIT_VAR(&_1$$3); zephir_create_array(&_1$$3, 6, 0 TSRMLS_CC); add_assoc_stringl_ex(&_1$$3, SL("session_key"), SL("_flash")); ZEPHIR_INIT_VAR(&_2$$3); zephir_create_array(&_2$$3, 1, 0 TSRMLS_CC); add_assoc_stringl_ex(&_2$$3, SL("class"), SL("alert alert-success")); zephir_array_update_string(&_1$$3, SL("success"), &_2$$3, PH_COPY | PH_SEPARATE); ZEPHIR_INIT_NVAR(&_2$$3); zephir_create_array(&_2$$3, 1, 0 TSRMLS_CC); add_assoc_stringl_ex(&_2$$3, SL("class"), SL("alert alert-info")); zephir_array_update_string(&_1$$3, SL("info"), &_2$$3, PH_COPY | PH_SEPARATE); ZEPHIR_INIT_NVAR(&_2$$3); zephir_create_array(&_2$$3, 1, 0 TSRMLS_CC); add_assoc_stringl_ex(&_2$$3, SL("class"), SL("alert alert-warning")); zephir_array_update_string(&_1$$3, SL("warning"), &_2$$3, PH_COPY | PH_SEPARATE); ZEPHIR_INIT_NVAR(&_2$$3); zephir_create_array(&_2$$3, 1, 0 TSRMLS_CC); add_assoc_stringl_ex(&_2$$3, SL("class"), SL("alert alert-danger")); zephir_array_update_string(&_1$$3, SL("danger"), &_2$$3, PH_COPY | PH_SEPARATE); zephir_array_update_string(&_1$$3, SL("html"), &__$true, PH_COPY | PH_SEPARATE); zephir_update_property_zval(this_ptr, SL("options"), &_1$$3); } ZEPHIR_MM_RESTORE(); return Z_OBJ_P(this_ptr); } }
RISCfuture/METAR-Decoder
METAR Decoder/Source/Support/ImproperFraction.h
<filename>METAR Decoder/Source/Support/ImproperFraction.h @interface ImproperFraction : NSObject @property (assign) NSInteger whole; @property (strong) Rational *fraction; - (instancetype) init NS_UNAVAILABLE; - (instancetype) initWithWhole:(NSInteger)wholePart fraction:(Rational *)fractionalPart NS_DESIGNATED_INITIALIZER; @property (NS_NONATOMIC_IOSONLY, readonly, copy) NSString *stringValue; @end
acepace/BPjs
src/main/java/il/ac/bgu/cs/bp/bpjs/execution/listeners/BProgramRunnerListener.java
package il.ac.bgu.cs.bp.bpjs.execution.listeners; import il.ac.bgu.cs.bp.bpjs.model.BProgram; import il.ac.bgu.cs.bp.bpjs.execution.BProgramRunner; import il.ac.bgu.cs.bp.bpjs.model.BThreadSyncSnapshot; import il.ac.bgu.cs.bp.bpjs.model.BEvent; import il.ac.bgu.cs.bp.bpjs.model.FailedAssertion; /** * An object interested in the life-cycle of a {@link BProgram} being run by a {@link BProgramRunner}. * @author michael */ public interface BProgramRunnerListener { /** * Called before the BProgram is started (pre-setup). * * @param bprog The BProgram about to start */ void starting(BProgram bprog); /** * Called when the {@link BProgram} {@code bp} was started. * * @param bp The BProgram started. */ void started( BProgram bp ); /** * Called when a BProgram cannot advance, and is waiting for external events * to continue. For this to happen, the BProgram has to be in daemon mode. * * @param bp The BProgram informing the change. * * @see BProgram#setDaemonMode(boolean) */ void superstepDone( BProgram bp ); /** * Called when the {@link BProgram} {@code bp} ends. * * @param bp The BProgram ended. */ void ended( BProgram bp ); /** * Called when a b-thread in the {@link BProgram} has made a failed assertion. * This means that the program is in violation of some of its requirements. * @param bp The program where the failed assertion happened. * @param theFailedAssertion Details about the assertion that failed. */ void assertionFailed( BProgram bp, FailedAssertion theFailedAssertion ); /** * Called when a BThread is added to a BProgram. * @param bp the program the thread was added to. * @param theBThread the new BThread */ void bthreadAdded( BProgram bp, BThreadSyncSnapshot theBThread ); /** * Called when a BThread is removed from a BProgram. * @param bp the program the thread was removed from. * @param theBThread the removed BThread */ void bthreadRemoved( BProgram bp, BThreadSyncSnapshot theBThread ); /** * Called when a BThread has ran to completion. * @param bp the program the thread ran in. * @param theBThread the done BThread */ void bthreadDone( BProgram bp, BThreadSyncSnapshot theBThread); /** * Called when a BProgram selects an event. * @param bp The BProgram the event was selected in. * @param theEvent the new event selected. */ void eventSelected( BProgram bp, BEvent theEvent ); }
foundry2D/Kinc
Backends/Graphics5/Direct3D12/Sources/kinc/backend/graphics5/pipeline.h
<gh_stars>100-1000 #pragma once #include <stdint.h> #ifdef __cplusplus extern "C" { #endif struct kinc_g5_shader; struct ID3D12PipelineState; struct ID3D12GraphicsCommandList; struct ID3D12RootSignature; typedef struct { struct ID3D12PipelineState *pso; #ifdef KORE_DXC struct ID3D12RootSignature *rootSignature; int vertexConstantsSize; int fragmentConstantsSize; #endif int textures; // ID3D11InputLayout* inputLayout; // ID3D11Buffer* fragmentConstantBuffer; // ID3D11Buffer* vertexConstantBuffer; // ID3D11Buffer* geometryConstantBuffer; // ID3D11Buffer* tessEvalConstantBuffer; // ID3D11Buffer* tessControlConstantBuffer; // static void setConstants(ID3D12GraphicsCommandList *commandList, Graphics5::PipelineState *pipeline); } PipelineState5Impl; typedef struct { struct ID3D12PipelineState *pso; #ifdef KORE_DXC struct ID3D12RootSignature *rootSignature; int vertexConstantsSize; int fragmentConstantsSize; #endif int textures; // ID3D11InputLayout* inputLayout; // ID3D11Buffer* fragmentConstantBuffer; // ID3D11Buffer* vertexConstantBuffer; // ID3D11Buffer* geometryConstantBuffer; // ID3D11Buffer* tessEvalConstantBuffer; // ID3D11Buffer* tessControlConstantBuffer; // static void setConstants(ID3D12GraphicsCommandList *commandList, Graphics5::PipelineState *pipeline); } ComputePipelineState5Impl; typedef struct { int vertexOffset; uint32_t vertexSize; int fragmentOffset; uint32_t fragmentSize; int computeOffset; uint32_t computeSize; int geometryOffset; uint32_t geometrySize; int tessEvalOffset; uint32_t tessEvalSize; int tessControlOffset; uint32_t tessControlSize; } ConstantLocation5Impl; typedef struct { int nothing; } AttributeLocation5Impl; struct kinc_g5_pipeline; void kinc_g5_internal_setConstants(struct ID3D12GraphicsCommandList *commandList, struct kinc_g5_pipeline *pipeline); #ifdef __cplusplus } #endif
folio-org/mod-search
src/test/java/org/folio/search/support/extension/impl/PostgresContainerExtension.java
package org.folio.search.support.extension.impl; import org.junit.jupiter.api.extension.AfterAllCallback; import org.junit.jupiter.api.extension.BeforeAllCallback; import org.junit.jupiter.api.extension.ExtensionContext; import org.testcontainers.containers.PostgreSQLContainer; public class PostgresContainerExtension implements BeforeAllCallback, AfterAllCallback { private static final String SPRING_PROPERTY_NAME = "spring.datasource.url"; private static final String POSTGRES_IMAGE = "postgres:10.6"; private static final PostgreSQLContainer<?> CONTAINER = new PostgreSQLContainer<>(POSTGRES_IMAGE) .withDatabaseName("folio_test").withUsername("folio_admin").withPassword("password"); @Override public void beforeAll(ExtensionContext context) { if (!CONTAINER.isRunning()) { CONTAINER.start(); } System.setProperty(SPRING_PROPERTY_NAME, CONTAINER.getJdbcUrl()); } @Override public void afterAll(ExtensionContext context) { System.clearProperty(SPRING_PROPERTY_NAME); } }
Tubelz/macaw
scene.go
package macaw import ( "github.com/tubelz/macaw/system" "github.com/veandco/go-sdl2/sdl" ) // SceneManager manges the scenes in the game type SceneManager struct { Scenes []*Scene currentPos int // SceneMap has the position of the scene in the array SceneMap map[string]int } // AddScene adds a new scene func (s *SceneManager) AddScene(scene *Scene) { s.Scenes = append(s.Scenes, scene) if len(s.Scenes) == 1 { scene.Init() } if s.SceneMap == nil { s.SceneMap = make(map[string]int) } if scene.Name != "" { s.SceneMap[scene.Name] = len(s.Scenes) - 1 } } // Current returns the current Scene func (s *SceneManager) Current() *Scene { if len(s.Scenes) > 0 { return s.Scenes[s.currentPos] } return nil } // RemoveScene removes a scene func (s *SceneManager) RemoveScene() { } // NextScene goes to the next scene if it exists func (s *SceneManager) NextScene() *Scene { (s.Current()).Exit() if s.currentPos < (len(s.Scenes) - 1) { s.currentPos++ } else { s.currentPos = 0 } s.Current().Init() return s.Current() } // ChangeScene changes to a specific scene by its name func (s *SceneManager) ChangeScene(sceneName string) *Scene { if pos, ok := s.SceneMap[sceneName]; ok { (s.Current()).Exit() s.currentPos = pos } s.Current().Init() return s.Current() } // Scene is responsible to hold the systems in a scene type Scene struct { Name string UpdateSystems []system.Systemer // responsible to update the game RenderSystem *system.RenderSystem // responsible to render the game InitFunc func() ExitFunc func() SceneOptions } // Init initializes the scene according to the options func (s *Scene) Init() { if s.InitFunc != nil { s.InitFunc() } // HideCursor option s.showCursor() // Music option s.playMusic() // change background color if s.BgColor != (sdl.Color{}) { s.RenderSystem.BgColor = s.BgColor } else { s.RenderSystem.BgColor = sdl.Color{0xFF, 0xFF, 0xFF, 0xFF} } // Run Init for each system in the scene for _, system := range s.UpdateSystems { system.Init() } } // Exit executes a function, if setted, when scene is excited func (s *Scene) Exit() { if s.ExitFunc != nil { s.ExitFunc() } } // AddGameUpdateSystem adds the systems which will run in the game loop func (s *Scene) AddGameUpdateSystem(system system.Systemer) { s.UpdateSystems = append(s.UpdateSystems, system) } // AddRenderSystem adds the render system to our game loop func (s *Scene) AddRenderSystem(system *system.RenderSystem) { s.RenderSystem = system } // SceneOptions contains the options for the scene type SceneOptions struct { HideCursor bool // true - hides, false - shows Music string BgColor sdl.Color } func (s *SceneOptions) showCursor() { show := 1 if s.HideCursor { show = 0 } sdl.ShowCursor(show) } func (s *SceneOptions) playMusic() { if s.Music != "" { PlayMusic(s.Music) } else { StopMusic() } }
neroroxxx/BMC
src/BMC.hardware.relays.cpp
<gh_stars>10-100 /* See https://www.RoxXxtar.com/bmc for more details Copyright (c) 2020 RoxXxtar.com Licensed under the MIT license. See LICENSE file in the project root for full license information. */ #include <BMC.h> // NON-LATCHING RELAYS #if BMC_MAX_NL_RELAYS > 0 void BMC::setupRelaysNL(){ for(uint8_t i = 0; i < BMC_MAX_NL_RELAYS; i++){ uint8_t m = BMC_GET_BYTE(3,globalData.relaysNL[i].event>>1); bool momentary = bitRead(m,5); bool reversed = bitRead(m,6); relaysNL[i].begin(BMCBuildData::getRelayNLPin(i), momentary, reversed); // bit 0 of event is initial state relaysNL[i].command(globalData.relaysNL[i].event & 0x01); } assignRelaysNL(); } void BMC::assignRelaysNL(){ for(uint8_t i = 0; i < BMC_MAX_NL_RELAYS; i++){ uint8_t m = BMC_GET_BYTE(3, globalData.relaysNL[i].event>>1); relaysNL[i].setMomentary(bitRead(m,5)); relaysNL[i].setReverse(bitRead(m,6)); relaysNLTmp[i].type = 255; relaysNLTmp[i].data1 = 255; uint32_t e = globalData.relaysNL[i].event>>1; switch(e & 0xF0){ case BMC_MIDI_PROGRAM_CHANGE: case BMC_MIDI_CONTROL_CHANGE: relaysNLTmp[i].type = BMC_GET_BYTE(0, e); relaysNLTmp[i].data1 = BMC_GET_BYTE(1, e); break; } relaysNL[i].reassign(); } } void BMC::readRelaysNL(){ uint32_t _relaysNLStates = 0; for(uint8_t i = 0; i < BMC_MAX_NL_RELAYS; i++){ bitWrite(_relaysNLStates,i,relaysNL[i].update()); handleRelaysNL(i); } if(_relaysNLStates!=relayNLStates){ relayNLStates = _relaysNLStates; editor.utilitySendNLRelayActivity(relayNLStates); } } // triggered on incoming midi messages void BMC::checkRelaysNLMidiInput(uint8_t type, uint8_t channel, uint8_t data1, uint8_t data2){ for(uint8_t i = 0; i < BMC_MAX_NL_RELAYS; i++){ uint8_t match = relaysNLTmp[i].match2(type, channel, data1); // if match = 0 the message doesn't match // if match = 1 the message match including data1 // if match = 2 the message match, it's a program change BUT data1 doesn't match // if data1 doesn't match but it's a program then we use that to RESET the relay if(match==0){ continue; } uint8_t cmd = BMC_GET_BYTE(3, globalData.relaysNL[i].event>>1) & 0x03; if((type & 0xF0)==BMC_MIDI_PROGRAM_CHANGE){ if(cmd==BMC_RELAY_RESET || cmd==BMC_RELAY_SET){ // if match = 2 the type and channel matched but NOT data1 (program #) // in this case we set the relay to the opposite state relaysNL[i].command( match==2 ? !cmd : cmd); } else if(cmd==BMC_RELAY_TOGGLE){ // if it's a TOGGLE we only toggle when the actual program change was received if(match==1){ relaysNL[i].toggle(); } } } else { if(cmd==BMC_RELAY_RESET || cmd==BMC_RELAY_SET){ relaysNL[i].command((data2>63) ? cmd : !cmd); } else if(cmd==BMC_RELAY_TOGGLE){ if(data2==127){ relaysNL[i].toggle(); } } } } } void BMC::handleRelaysNL(uint8_t index){ #if !defined(BMC_FAST_MODE) if(index >= BMC_MAX_NL_RELAYS){ return; } #endif handleRelay(index, false, globalData.relaysNL[index].event); } #endif // LATCHING RELAYS #if BMC_MAX_L_RELAYS > 0 void BMC::setupRelaysL(){ for(uint8_t i = 0; i < BMC_MAX_L_RELAYS; i++){ uint8_t m = BMC_GET_BYTE(3,globalData.relaysL[i].event>>1); bool momentary = bitRead(m, 5); bool reversed = bitRead(m, 6); relaysL[i].begin(BMCBuildData::getRelayLPinA(i), BMCBuildData::getRelayLPinB(i), momentary, reversed); // bit 0 of event is initial state relaysL[i].command(globalData.relaysL[i].event & 0x01); } assignRelaysL(); } void BMC::assignRelaysL(){ for(uint8_t i = 0; i < BMC_MAX_L_RELAYS; i++){ uint8_t m = BMC_GET_BYTE(3,globalData.relaysL[i].event>>1); relaysL[i].setMomentary(bitRead(m,5)); relaysL[i].setReverse(bitRead(m,6)); relaysLTmp[i].type = 255; relaysLTmp[i].data1 = 255; uint32_t e = globalData.relaysL[i].event>>1; switch(e & 0xF0){ case BMC_MIDI_PROGRAM_CHANGE: case BMC_MIDI_CONTROL_CHANGE: relaysLTmp[i].type = BMC_GET_BYTE(0, e); relaysLTmp[i].data1 = BMC_GET_BYTE(1, e); break; } relaysL[i].reassign(); } } void BMC::readRelaysL(){ uint32_t _relaysLStates = 0; for(uint8_t i = 0; i < BMC_MAX_L_RELAYS; i++){ bitWrite(_relaysLStates,i,relaysL[i].update()); handleRelaysL(i); } if(_relaysLStates!=relayLStates){ relayLStates = _relaysLStates; editor.utilitySendLRelayActivity(relayLStates); } } // triggered on incoming midi messages void BMC::checkRelaysLMidiInput(uint8_t type, uint8_t channel, uint8_t data1, uint8_t data2){ for(uint8_t i = 0; i < BMC_MAX_L_RELAYS; i++){ uint8_t match = relaysLTmp[i].match2(type, channel, data1); // if match = 0 the message doesn't match // if match = 1 the message match including data1 // if match = 2 the message match, it's a program change BUT data1 doesn't match // if data1 doesn't match but it's a program then we use that to RESET the relay if(match==0){ continue; } uint8_t cmd = BMC_GET_BYTE(3, globalData.relaysL[i].event>>1) & 0x03; if((type & 0xF0)==BMC_MIDI_PROGRAM_CHANGE){ if(cmd==BMC_RELAY_RESET || cmd==BMC_RELAY_SET){ // if match = 2 the type and channel matched but NOT data1 (program #) // in this case we set the relay to the opposite state relaysL[i].command( match==2 ? !cmd : cmd); } else if(cmd==BMC_RELAY_TOGGLE){ // if it's a TOGGLE we only toggle when the actual program change was received if(match==1){ relaysL[i].toggle(); } } } else { if(cmd==BMC_RELAY_RESET || cmd==BMC_RELAY_SET){ relaysL[i].command( (data2>63) ? cmd : !cmd ); } else if(cmd==BMC_RELAY_TOGGLE){ if(data2==127){ relaysL[i].toggle(); } } } } } void BMC::handleRelaysL(uint8_t index){ #if !defined(BMC_FAST_MODE) if(index >= BMC_MAX_L_RELAYS){ return; } #endif handleRelay(index, true, globalData.relaysL[index].event); } #endif #if BMC_MAX_NL_RELAYS > 0 || BMC_MAX_L_RELAYS > 0 void BMC::handleRelay(uint8_t index, bool latching, uint32_t event){ event = event >> 1; uint8_t eventType = BMC_GET_BYTE(0, event); if(BMC_GET_BYTE(0,event)!=BMC_NONE){ switch(parseMidiEventType(eventType)){ #if BMC_MAX_PRESETS > 0 case BMC_RELAY_EVENT_TYPE_PRESET: if(presets.peekChanged()){ uint8_t newState = BMC_GET_BYTE(3, event) & 0x03; if(BMC_EVENT_TO_PRESET_NUM(event>>8) == presets.get()){ setRelay(index, latching, newState); } else if(newState!=BMC_RELAY_TOGGLE){ setRelay(index, latching, !newState); } } break; #endif case BMC_RELAY_EVENT_TYPE_PAGE: if(pageChangedPeek()){ uint8_t newState = BMC_GET_BYTE(3, event) & 0x03; if(BMC_GET_BYTE(1, event) == page){ setRelay(index, latching, newState); } else if(newState!=BMC_RELAY_TOGGLE){ setRelay(index, latching, !newState); } } break; case BMC_MIDI_PROGRAM_CHANGE: case BMC_MIDI_CONTROL_CHANGE: break; case BMC_RELAY_EVENT_TYPE_USER_1: case BMC_RELAY_EVENT_TYPE_USER_2: case BMC_RELAY_EVENT_TYPE_USER_3: case BMC_RELAY_EVENT_TYPE_USER_4: case BMC_RELAY_EVENT_TYPE_USER_5: case BMC_RELAY_EVENT_TYPE_USER_6: case BMC_RELAY_EVENT_TYPE_USER_7: case BMC_RELAY_EVENT_TYPE_USER_8: if(callback.userEventRelays){ uint8_t cmd = callback.userEventRelays(parseUserEventType(eventType), event); setRelay(index, latching, cmd); } break; } } } void BMC::checkRelaysMidiInput(uint8_t type, uint8_t channel, uint8_t data1, uint8_t data2){ if(type>=0xF0 || (type!=BMC_MIDI_PROGRAM_CHANGE && type!=BMC_MIDI_CONTROL_CHANGE)){ return; } #if BMC_MAX_NL_RELAYS > 0 checkRelaysNLMidiInput(type, channel, data1, data2); #endif #if BMC_MAX_L_RELAYS > 0 checkRelaysLMidiInput(type, channel, data1, data2); #endif } void BMC::checkRelaysMidiInput(BMCMidiMessage m){ checkRelaysMidiInput(m.getStatus(),m.getChannel()-1,m.getData1(),m.getData2()); } void BMC::setRelay(uint8_t index, bool latching, uint8_t cmd){ if(!latching){ #if BMC_MAX_NL_RELAYS > 0 if(index>=BMC_MAX_NL_RELAYS){ return; } relaysNL[index].command(cmd); #endif } else { #if BMC_MAX_L_RELAYS > 0 if(index>=BMC_MAX_L_RELAYS){ return; } relaysL[index].command(cmd); #endif } } bool BMC::getRelayState(uint8_t index, bool latching){ if(!latching){ #if BMC_MAX_NL_RELAYS > 0 if(index < BMC_MAX_NL_RELAYS){ return relaysNL[index].getState(); } #endif } else { #if BMC_MAX_L_RELAYS > 0 if(index < BMC_MAX_L_RELAYS){ return relaysL[index].getState(); } #endif } return false; } #endif
Kadachha-Bharat/Admin_panel
src/app/views/dashboard/DashboardRoutes.js
<filename>src/app/views/dashboard/DashboardRoutes.js import {MatxLoadable} from "../../../components/matx/index"; import { authRoles } from "../../auth/authRoles"; const Dashboard = MatxLoadable({ loader: () => import("./dashboard") }) const dashboardRoutes = [ { path: "/dashboard", component: Dashboard , auth: authRoles.admin } ]; export default dashboardRoutes;
CaQtiml/Problem-Solved
codeforce1154B.cpp
#include "bits/stdc++.h" using namespace std; int dp[105]; int ck[105]; //int cnt; int main() { int n; cin >> n ; for(int i=1;i<=n;i++) cin >> dp[i]; sort(dp+1,dp+1+n); int cnt=0; int kk=1; for(int i=1;i<=n;i++) { if(dp[i]!=dp[i-1]) { cnt++; ck[kk++]=dp[i]; } } //for(int i=1;i<=3;i++) cout << ck[i] << "\n"; //cout << cnt << "\n"; if(cnt==1) {cout << "0";exit(0);} if(cnt>3) {cout << "-1";exit(0);} kk--; int d=0; for(int i=2;i<=kk;i++) { if(i==2) { d=ck[i]-ck[i-1]; } else { if(d!=ck[i]-ck[i-1]) {cout << "-1";exit(0);} } } //if(kk==1) {cout << "0";exit(0);} if(kk==2) { if((ck[2]-ck[1])%2==0) {cout << (ck[2]-ck[1])/2; exit(0);} int diff=ck[2]-ck[1]; if(ck[1]+diff==ck[2] || ck[2]+diff==ck[1]) {cout << ck[2]-ck[1];exit(0);} if((ck[2]-ck[1])%2!=0) {cout << "-1";exit(0);} } if(kk==3) { cout << ck[2]-ck[1]; } }
lnceballosz/inventaire
server/lib/emails/build_transaction_email.js
<reponame>lnceballosz/inventaire // SPDX-FileCopyrightText: 2014 <NAME>, <NAME> // SPDX-License-Identifier: AGPL-3.0-only const _ = require('builders/utils') const user_ = require('controllers/user/lib/user') const transactions_ = require('controllers/transactions/lib/transactions') const items_ = require('controllers/items/lib/items') const snapshot_ = require('controllers/items/lib/snapshot/snapshot') const comments_ = require('controllers/comments/lib/comments') const { states } = require('models/attributes/transaction') const email_ = require('./email') module.exports = async transactionId => { const transaction = await transactions_.byId(transactionId) const role = findUserToNotify(transaction) // If no role needs to be notified, no email needs to be sent if (!role) return // Progressively building the email ViewModel transaction.role = role await addAssociatedData(transaction) return buildTailoredEmail(transaction) } const addAssociatedData = transaction => { return Promise.all([ user_.byId(transaction.owner), user_.byId(transaction.requester), items_.byId(transaction.item).then(snapshot_.addToItem).catch(catchDeleteItems), comments_.byTransactionId(transaction._id) ]) .then(([ owner, requester, item, messages ]) => { owner = user_.serializeData(owner) requester = user_.serializeData(requester) let image if (item.snapshot) { item.title = item.snapshot['entity:title'] image = item.snapshot['entity:image'] } else { item.title = transaction.snapshot.entity.title image = transaction.snapshot.entity && transaction.snapshot.entity.image } // Overriding transaction document ids by the ids' docs (owner, requester, etc.) // for the email ViewModel Object.assign(transaction, { owner, requester, item, messages, image }) return transaction }) .then(buildTimeline) .then(aliasUsers) } const buildTailoredEmail = transaction => { const emailType = findEmailType(transaction) return email_.transactions[emailType](transaction) } const findUserToNotify = transaction => { const { read } = transaction // assumes that both can't have unread updates if (!read.owner) { return 'owner' } else if (!read.requester) { return 'requester' } else { return null } } const newTransaction = transaction => { const ownerActed = _.some(transaction.actions, ownerIsActor) if (ownerActed) return false const ownerSentMessage = _.some(transaction.messages, OwnerIsSender(transaction)) if (ownerSentMessage) return false else return true } const findEmailType = transaction => { if (transaction.role === 'owner') { if (newTransaction(transaction)) { return 'yourItemWasRequested' } else { return 'updateOnYourItem' } } else { return 'updateOnItemYouRequested' } } const buildTimeline = transaction => { let { actions, messages } = transaction actions = formatActions(transaction, actions) messages = formatMessages(transaction, messages) let timeline = _.union(actions, messages) timeline = _.sortBy(timeline, ev => ev.created || ev.timestamp) return extractTimelineLastSequence(transaction, timeline) } // format actions and messages for ViewModels const formatActions = (transaction, actions) => { const { owner, requester } = transaction return actions.map(action => { action.user = ownerIsActor(action) ? owner : requester return action }) } const formatMessages = (transaction, messages) => { const { owner, requester } = transaction return messages.map(message => { message.user = ownerIsMessager(owner, message) ? owner : requester return message }) } const extractTimelineLastSequence = (transaction, timeline) => { const lastSequence = [] const lastEvent = timeline.pop() lastSequence.push(lastEvent) let sameSequence = true while ((timeline.length > 0) && sameSequence) { const prevEvent = timeline.pop() if (prevEvent.user._id === lastEvent.user._id) { lastSequence.unshift(prevEvent) } else { sameSequence = false } } transaction.timeline = lastSequence return transaction } const aliasUsers = transaction => { const lastEvent = transaction.timeline.slice(-1)[0] // deducing main and other user from the last sequence // as the user notified (mainUser) is necessarly the one that hasn't acted last transaction.other = lastEvent.user transaction.mainUser = findMainUser(transaction) return transaction } const findMainUser = transaction => { const { owner, requester, other } = transaction if (owner._id === other._id) { return requester } else { return owner } } const ownerIsActor = action => states[action.action].actor === 'owner' const OwnerIsSender = transaction => message => message.user === transaction.owner const ownerIsMessager = (owner, message) => message.user === owner._id const catchDeleteItems = err => { if (err.statusCode === 404) return { snapshot: {} } else throw err }
sirinath/Harmony
drlvm/vm/gc_gen/src/mark_sweep/gc_ms.h
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _GC_MS_H_ #define _GC_MS_H_ #include "wspace.h" /* heap size limit is not interesting. only for manual tuning purpose */ extern POINTER_SIZE_INT min_heap_size_bytes; extern POINTER_SIZE_INT max_heap_size_bytes; typedef struct GC_MS { /* <-- First couple of fields overloaded as GC */ void* physical_start; void *heap_start; void *heap_end; POINTER_SIZE_INT reserved_heap_size; POINTER_SIZE_INT committed_heap_size; unsigned int num_collections; Boolean in_collection; int64 time_collections; float survive_ratio; /* mutation related info */ Mutator *mutator_list; SpinLock mutator_list_lock; unsigned int num_mutators; /* collection related info */ Collector **collectors; unsigned int num_collectors; unsigned int num_active_collectors; /* not all collectors are working */ /*concurrent markers and collectors*/ Conclctor** conclctors; unsigned int num_conclctors; unsigned int num_active_markers; unsigned int num_active_sweepers; /* metadata is the pool for rootset, markstack, etc. */ GC_Metadata *metadata; Finref_Metadata *finref_metadata; unsigned int collect_kind; /* MAJOR or MINOR */ unsigned int last_collect_kind; unsigned int cause; /*GC_CAUSE_LOS_IS_FULL, GC_CAUSE_NOS_IS_FULL, or GC_CAUSE_RUNTIME_FORCE_GC*/ Boolean collect_result; /* succeed or fail */ Boolean generate_barrier; /* FIXME:: this is wrong! root_set belongs to mutator */ Vector_Block *root_set; Vector_Block *weakroot_set; Vector_Block *uncompressed_root_set; //For_LOS_extend Space_Tuner *tuner; volatile unsigned int gc_concurrent_status; Collection_Scheduler* collection_scheduler; SpinLock lock_con_mark; SpinLock lock_enum; SpinLock lock_con_sweep; SpinLock lock_collect_sched; /* system info */ unsigned int _system_alloc_unit; unsigned int _machine_page_size_bytes; unsigned int _num_processors; /* END of GC --> */ Wspace *wspace; } GC_MS; ////////////////////////////////////////////////////////////////////////////////////////// inline void *gc_ms_fast_alloc(unsigned size, Allocator *allocator) { void *p_obj = wspace_thread_local_alloc(size, allocator); if(p_obj) { ((Mutator*)allocator)->new_obj_size += size; ((Mutator*)allocator)->new_obj_num++; } return p_obj; } inline void *gc_ms_alloc(unsigned size, Allocator *allocator) { void * p_obj = wspace_alloc(size, allocator); if(p_obj) ((Mutator*)allocator)->new_obj_num++; return p_obj; } inline Wspace *gc_ms_get_wspace(GC_MS *gc) { return gc->wspace; } inline void gc_ms_set_wspace(GC_MS *gc, Wspace *wspace) { gc->wspace = wspace; } inline POINTER_SIZE_INT gc_ms_free_memory_size(GC_MS *gc) { return wspace_free_memory_size(gc_ms_get_wspace(gc)); } inline POINTER_SIZE_INT gc_ms_total_memory_size(GC_MS *gc) { return space_committed_size((Space*)gc_ms_get_wspace(gc)); } void gc_ms_print_detail_stat(GC_MS *gc); ///////////////////////////////////////////////////////////////////////////////////////// void gc_ms_initialize(GC_MS *gc, POINTER_SIZE_INT initial_heap_size, POINTER_SIZE_INT final_heap_size); void gc_ms_destruct(GC_MS *gc); void gc_ms_reclaim_heap(GC_MS *gc); void gc_ms_iterate_heap(GC_MS *gc); void gc_ms_start_con_mark(GC_MS* gc); void gc_ms_start_con_mark(GC_MS* gc, unsigned int num_markers); void gc_ms_update_space_stat(GC_MS* gc); void gc_ms_start_con_sweep(GC_MS* gc, unsigned int num_collectors); void gc_ms_start_mostly_con_mark(GC_MS* gc, unsigned int num_markers); void gc_ms_start_mostly_con_final_mark(GC_MS* gc, unsigned int num_markers); void gc_ms_reset_space_stat(GC_MS* gc); unsigned int gc_ms_get_live_object_size(GC_MS* gc); FORCE_INLINE Con_Collection_Statistics *gc_ms_get_con_collection_stat(GC_MS* gc) { return gc->wspace->con_collection_statistics; } #endif // _GC_MS_H_
ikramulkayes/Python_season2
practice25.py
<reponame>ikramulkayes/Python_season2 my_tuple = (("Mominul", 1101),("Mustafiz", 1202),("Bell", 2101),("Cook", 2103),("Smith", 3101),("Finch", 3102),("Starc", 3203), ("Imrul", 1103), ("Taijul", 1204)) dic = {} for elm in my_tuple: num = str(elm[1]) if num[0] == "1": if "Bangladesh" not in dic.keys(): dic["Bangladesh"]= {} if num[1] == "1": if "Batters" not in dic["Bangladesh"].keys(): dic["Bangladesh"]["Batters"] = [elm[0]] else: temp = dic["Bangladesh"]["Batters"] temp.append(elm[0]) dic["Bangladesh"]["Batters"] = temp if num[1] == "2": if "Bowlers" not in dic["Bangladesh"].keys(): dic["Bangladesh"]["Bowlers"] = [elm[0]] else: temp = dic["Bangladesh"]["Bowlers"] temp.append(elm[0]) dic["Bangladesh"]["Bowlers"] = temp if num[0] == "2": if "England" not in dic.keys(): dic["England"]= {} if num[1] == "1": if "Batters" not in dic["England"].keys(): dic["England"]["Batters"] = [elm[0]] else: temp = dic["England"]["Batters"] temp.append(elm[0]) dic["England"]["Batters"] = temp if num[1] == "2": if "Bowlers" not in dic["England"].keys(): dic["England"]["Bowlers"] = [elm[0]] else: temp = dic["England"]["Bowlers"] temp.append(elm[0]) dic["England"]["Bowlers"] = temp if num[0] == "3": if "Australia" not in dic.keys(): dic["Australia"]= {} if num[1] == "1": if "Batters" not in dic["Australia"].keys(): dic["Australia"]["Batters"] = [elm[0]] else: temp = dic["Australia"]["Batters"] temp.append(elm[0]) dic["Australia"]["Batters"] = temp if num[1] == "2": if "Bowlers" not in dic["Australia"].keys(): dic["Australia"]["Bowlers"] = [elm[0]] else: temp = dic["Australia"]["Bowlers"] temp.append(elm[0]) dic["Australia"]["Bowlers"] = temp #print(dic) fdic = {} tempdic = {} for key, val in dic.items(): #print(key) for k,v in val.items(): if len(v) == 1: m = v[0] tempdic[k] = m else: temp = tuple(v) tempdic[k] = temp fdic[key] = tempdic #print(tempdic) tempdic = {} print(fdic)
uktrade/contractor-approval
main/tests/views/resourcing_request/test_resourcing_request.py
<reponame>uktrade/contractor-approval import pytest from django.contrib import messages from django.core.exceptions import ValidationError from django.urls import reverse from main import tasks from main.models import Approval, ResourcingRequest from main.services.review import ReviewAction from main.tests.conftest import login from main.tests.constants import USERNAME_APPROVAL_ORDER class TestResourcingRequestCreateView: def test_hiring_manager_can_view(self, client, hiring_manager): r = client.get(reverse("resourcing-request-create")) assert r.status_code == 200 class TestResourcingRequestListView: def test_busops_can_view(self, client, busops): r = client.get(reverse("resourcing-request-list")) assert r.status_code == 200 def test_hiring_manager_cannot_view(self, client, hiring_manager): r = client.get(reverse("resourcing-request-list")) assert r.status_code == 403 class TestResourcingRequestDetailView: def test_hiring_manager_can_view(self, client, hiring_manager, resourcing_request): r = client.get( reverse( "resourcing-request-detail", kwargs={"resourcing_request_pk": resourcing_request.pk}, ) ) assert r.status_code == 200 class TestResourcingRequestSendForApprovalView: # Helpers def _send_for_approval(self, client, resourcing_request): return client.post( reverse( "resourcing-request-send-for-approval", kwargs={"resourcing_request_pk": resourcing_request.pk}, ) ) # Tests def test_hiring_manager_can_send_for_approval( self, client, hiring_manager, full_resourcing_request ): r = self._send_for_approval(client, full_resourcing_request) assert r.status_code == 302 full_resourcing_request.refresh_from_db() assert ( full_resourcing_request.state == ResourcingRequest.State.AWAITING_APPROVALS ) def test_busops_cannot_send_for_approval( self, client, busops, full_resourcing_request ): r = self._send_for_approval(client, full_resourcing_request) assert r.status_code == 403 def test_cannot_send_for_approval_twice( self, client, hiring_manager, full_resourcing_request ): r = self._send_for_approval(client, full_resourcing_request) assert r.status_code == 302 with pytest.raises(ValidationError): r = self._send_for_approval(client, full_resourcing_request) # The add comment view was replaced with the review view. I'm going to leave this as is # for the time being as it groups the comment tests together. class TestResourcingRequestAddCommentView: # Helpers def _add_comment(self, client, resourcing_request, text: str): return client.post( reverse( "resourcing-request-review", kwargs={"resourcing_request_pk": resourcing_request.pk}, ), data={"action": ReviewAction.COMMENT.value, "text": text}, ) # Tests def test_can_add_comment(self, client, hiring_manager, full_resourcing_request): text = "This is a test comment." r = self._add_comment(client, full_resourcing_request, text=text) assert r.status_code == 200 assert full_resourcing_request.comments.last().text == text def test_notification_is_sent( self, client, hiring_manager, full_resourcing_request ): text = "This is a test comment." self._add_comment(client, full_resourcing_request, text=text) assert len(tasks.TEST_NOTIFICATION_BOX) == 1 assert tasks.TEST_NOTIFICATION_BOX[0]["personalisation"]["commenter"] class TestResourcingRequestReviewView: @pytest.fixture(autouse=True) def _setup(self, client, hiring_manager, full_resourcing_request): self.client = client self.client.post( reverse( "resourcing-request-send-for-approval", kwargs={"resourcing_request_pk": full_resourcing_request.pk}, ) ) def _review( self, resourcing_request, action: ReviewAction, approval_type: Approval.Type, text: str = None, follow: bool = False, ): return self.client.post( reverse( "resourcing-request-review", kwargs={"resourcing_request_pk": resourcing_request.pk}, ), data={ "action": action.value, "approval_type": approval_type.value, "text": text, }, follow=follow, ) def _approve( self, resourcing_request, approval_type: str, text: str = None, follow: bool = False, ): return self._review( resourcing_request=resourcing_request, action=ReviewAction.APPROVE, approval_type=Approval.Type(approval_type), text=text, follow=follow, ) def test_can_add_approval(self, head_of_profession, full_resourcing_request): r = self._approve( full_resourcing_request, approval_type="head_of_profession", text="LGTM!", ) assert r.status_code == 200 full_resourcing_request.refresh_from_db() assert full_resourcing_request.head_of_profession_approval def test_requestor_is_notified_of_approval( self, head_of_profession, full_resourcing_request, settings ): self._approve( full_resourcing_request, approval_type="head_of_profession", text="LGTM!", ) approval_notifications = [ x for x in tasks.TEST_NOTIFICATION_BOX if x["template_id"] == settings.GOVUK_NOTIFY_APPROVAL_TEMPLATE_ID ] # 1 comment left notification was sent to the requestor assert len(approval_notifications) == 1 assert ( approval_notifications[0]["personalisation"]["approved_or_rejected"] == "approved" ) def test_confirmation_message(self, head_of_profession, full_resourcing_request): r = self._approve( full_resourcing_request, approval_type="head_of_profession", text="LGTM!", follow=True, ) assert r.status_code == 200 assert messages.get_messages(r.wsgi_request) def test_scenario_mark_as_complete(client, full_resourcing_request): # send for approval login(client, "hiring-manager") client.post( reverse( "resourcing-request-send-for-approval", kwargs={"resourcing_request_pk": full_resourcing_request.pk}, ) ) # give all approvals for username, approval_type in USERNAME_APPROVAL_ORDER: login(client, username) client.post( reverse( "resourcing-request-review", kwargs={"resourcing_request_pk": full_resourcing_request.pk}, ), data={ "action": ReviewAction.APPROVE.value, "approval_type": approval_type.value, }, ) full_resourcing_request.refresh_from_db() assert full_resourcing_request.state == ResourcingRequest.State.APPROVED login(client, "hiring-manager") # mark as complete client.post( reverse( "resourcing-request-mark-as-complete", kwargs={"resourcing_request_pk": full_resourcing_request.pk}, ) ) full_resourcing_request.refresh_from_db() assert full_resourcing_request.state == ResourcingRequest.State.COMPLETED
kreativgebiet/kickup-react
client/modules/tasks/__test__/selectors.test.js
// @flow import { getTasks } from '../selectors' describe('tasks selectors', () => { it('returns tasks tree', () => { const state = { tasks: {} } expect(getTasks(state)).toBeInstanceOf(Object) }) it('returns nothing with empty state', () => { const state = {} expect(getTasks(state)).toBeUndefined() }) it('returns nothing with undefined attribute', () => { expect(() => getTasks()).toThrow(TypeError) }) })
glenn-jocher/torchflare
torchflare/metrics/meters.py
"""Implementation of utilities for metrics.""" from typing import Tuple import torch from einops import reduce class _BaseMetric: """A Class which decides type of classification i.e. binary,multilabel or multiclass.""" def __init__(self, multilabel: bool = False): """Constructor class for BaseMetric class. Args: multilabel: Set to True if problem type is multilabel. """ self.multilabel = multilabel self.case_type = None @staticmethod def _check_shape(outputs: torch.Tensor, targets: torch.Tensor): """Function to check if there is a mismatch between outputs and targets. Args: outputs: The outputs of the net. targets: The targets. Raises: ValueError: If shapes does not match. """ if not (outputs.ndim == targets.ndim or outputs.ndim == targets.ndim + 1): raise ValueError("Preds and Targets must have same number of dimensions") @staticmethod def _convert_to_onehot(num_classes: int, indices: torch.Tensor) -> torch.Tensor: """Converts tensor to one_hot representation. Args: num_classes: The number of classes. indices: torch.Tensor. Returns: one_hot converted tensor. """ onehot = torch.zeros(indices.shape[0], num_classes, *indices.shape[1:], dtype=indices.dtype) index = indices.long().unsqueeze(1).expand_as(onehot) return onehot.scatter_(1, index, 1.0) @staticmethod def detach_tensor(x: torch.Tensor) -> torch.Tensor: """Detaches the tensor.""" return x.detach().cpu() # noinspection PyUnboundLocalVariable def _check_type(self, outputs: torch.Tensor, targets: torch.Tensor): """Method to infer type of the problem.""" self._check_shape(outputs, targets) if targets.ndim + 1 == outputs.ndim: if outputs.shape[1] == 1: case_type = "binary" else: case_type = "multiclass" elif outputs.ndim == targets.ndim: if self.multilabel: case_type = "multilabel" else: case_type = "binary" if self.case_type is None: self.case_type = case_type class _BaseInputHandler(_BaseMetric): """Class to handle shapes for various classification tasks.""" def __init__( self, num_classes: int, threshold: float = 0.5, multilabel: bool = False, average: str = "macro", ): """Constructor method. Args: num_classes: The number of classes. threshold: The threshold for binarization. multilabel: Whether the problem is multilabel or not. average: One of macro or micro. """ super(_BaseInputHandler, self).__init__(multilabel=multilabel) self.num_classes = num_classes self.threshold = threshold self.multilabel = multilabel self.eps = 1e-20 self.average = average assert self.average in ["micro", "macro"], "Average should be one of ['micro , 'macro'] " # noqa: S101 @staticmethod def _calculate_stats( true_preds: torch.Tensor, false_preds: torch.Tensor, pos_preds: torch.Tensor, neg_preds: torch.Tensor, ): tp = true_preds * pos_preds fp = false_preds * pos_preds tn = true_preds * neg_preds fn = false_preds * neg_preds return tp, fp, tn, fn def compute_stats( self, outputs: torch.Tensor, targets: torch.Tensor, ): """Computes true_positives, false_positives, true_negatives, false_negatives. Args: outputs: The outputs of the net. targets: The targets. Returns: True positives , false positives, true negatives , false negatives. """ outputs, targets = self._compute(outputs=outputs, targets=targets) true_preds = torch.eq(targets, outputs) false_preds = ~true_preds pos_preds = torch.eq(outputs, 1.0) neg_preds = torch.eq(outputs, 0.0) # Some einops operations pattern = "r c -> c" if self.average == "macro" else "r c -> " tp, fp, tn, fn = self._calculate_stats(true_preds, false_preds, pos_preds, neg_preds) # einops reductions tp = reduce(tp, pattern, reduction="sum") fp = reduce(fp, pattern, reduction="sum") tn = reduce(tn, pattern, reduction="sum") fn = reduce(fn, pattern, reduction="sum") return tp, fp, tn, fn def reduce(self, numerator: torch.Tensor, denominator: torch.Tensor) -> torch.Tensor: """Method to perform macro or micro reduction.""" frac = numerator / (denominator + self.eps) return torch.mean(frac) if self.average == "macro" else frac def _compute(self, outputs: torch.Tensor, targets: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: self._check_type(outputs=outputs, targets=targets) if self.case_type == "multiclass": targets = self._convert_to_onehot(num_classes=self.num_classes, indices=targets.view(-1)) # We receive logits need argmax on preds outputs = torch.argmax(outputs, dim=1) outputs = self._convert_to_onehot(num_classes=self.num_classes, indices=outputs.view(-1)) else: # Handling multilabel and binary cases outputs = torch.sigmoid(outputs).float() outputs = (outputs >= self.threshold).long() outputs = outputs.reshape(outputs.shape[0], -1) targets = targets.reshape(targets.shape[0], -1) return outputs, targets def calculate_segmentation_statistics(outputs: torch.Tensor, targets: torch.Tensor, class_dim: int = 1, threshold=None): """Compute calculate segmentation statistics. Args: outputs: torch.Tensor. targets: torch.Tensor. threshold: threshold for binarization of predictions. class_dim: indicates class dimension (K). Returns: True positives , false positives , false negatives for segmentation task. """ num_dims = len(outputs.shape) assert num_dims > 2, "Found only two dimensions, shape should be [bs , C , ...]" # noqa: S101 assert outputs.shape == targets.shape, "shape mismatch" # noqa: S101 if threshold is not None: outputs = (outputs > threshold).float() dims = [dim for dim in range(num_dims) if dim != class_dim] true_positives = torch.sum(outputs * targets, dim=dims) false_positives = torch.sum(outputs * (1 - targets), dim=dims) false_negatives = torch.sum(targets * (1 - outputs), dim=dims) return true_positives, false_positives, false_negatives class MetricMeter: """Base Class to structuring your metrics.""" def accumulate(self, outputs, targets): """Method to accumulate outputs and targets per the batch.""" raise NotImplementedError def reset(self): """Method to reset the accumulation lists.""" raise NotImplementedError __all__ = [ "_BaseMetric", "_BaseInputHandler", "MetricMeter", "calculate_segmentation_statistics", ]
Pixelated-Project/aosp-android-jar
android-31/src/android/telephony/BinderCacheManager.java
/* * Copyright (C) 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.telephony; import android.annotation.NonNull; import android.os.IBinder; import android.os.IInterface; import android.os.RemoteException; import java.util.ArrayList; import java.util.HashMap; import java.util.NoSuchElementException; import java.util.concurrent.atomic.AtomicReference; /** * Keeps track of the connection to a Binder node, refreshes the cache if the node dies, and lets * interested parties register listeners on the node to be notified when the node has died via the * registered {@link Runnable}. * @param <T> The IInterface representing the Binder type that this manager will be managing the * cache of. * @hide */ public class BinderCacheManager<T extends IInterface> { /** * Factory class for creating new IInterfaces in the case that {@link #getBinder()} is * called and there is no active binder available. * @param <T> The IInterface that should be cached and returned to the caller when * {@link #getBinder()} is called until the Binder node dies. */ public interface BinderInterfaceFactory<T> { /** * @return A new instance of the Binder node, which will be cached until it dies. */ T create(); } /** * Tracks the cached Binder node as well as the listeners that were associated with that * Binder node during its lifetime. If the Binder node dies, the listeners will be called and * then this tracker will be unlinked and cleaned up. */ private class BinderDeathTracker implements IBinder.DeathRecipient { private final T mConnection; private final HashMap<Object, Runnable> mListeners = new HashMap<>(); /** * Create a tracker to cache the Binder node and add the ability to listen for the cached * interface's death. */ BinderDeathTracker(@NonNull T connection) { mConnection = connection; try { mConnection.asBinder().linkToDeath(this, 0 /*flags*/); } catch (RemoteException e) { // isAlive will return false. } } public boolean addListener(Object key, Runnable r) { synchronized (mListeners) { if (!isAlive()) return false; mListeners.put(key, r); return true; } } public void removeListener(Object runnableKey) { synchronized (mListeners) { mListeners.remove(runnableKey); } } @Override public void binderDied() { ArrayList<Runnable> listeners; synchronized (mListeners) { listeners = new ArrayList<>(mListeners.values()); mListeners.clear(); try { mConnection.asBinder().unlinkToDeath(this, 0 /*flags*/); } catch (NoSuchElementException e) { // No need to worry about this, this means the death recipient was never linked. } } listeners.forEach(Runnable::run); } /** * @return The cached Binder. */ public T getConnection() { return mConnection; } /** * @return true if the cached Binder is alive at the time of calling, false otherwise. */ public boolean isAlive() { return mConnection.asBinder().isBinderAlive(); } } private final BinderInterfaceFactory<T> mBinderInterfaceFactory; private final AtomicReference<BinderDeathTracker> mCachedConnection; /** * Create a new instance, which manages a cached IInterface and creates new ones using the * provided factory when the cached IInterface dies. * @param factory The factory used to create new Instances of the cached IInterface when it * dies. */ public BinderCacheManager(BinderInterfaceFactory<T> factory) { mBinderInterfaceFactory = factory; mCachedConnection = new AtomicReference<>(); } /** * Get the binder node connection and add a Runnable to be run if this Binder dies. Once this * Runnable is run, the Runnable itself is discarded and must be added again. * <p> * Note: There should be no assumptions here as to which Thread this Runnable is called on. If * the Runnable should be called on a specific thread, it should be up to the caller to handle * that in the runnable implementation. * @param runnableKey The Key associated with this runnable so that it can be removed later * using {@link #removeRunnable(Object)} if needed. * @param deadRunnable The runnable that will be run if the cached Binder node dies. * @return T if the runnable was added or {@code null} if the connection is not alive right now * and the associated runnable was never added. */ public T listenOnBinder(Object runnableKey, Runnable deadRunnable) { if (runnableKey == null || deadRunnable == null) return null; BinderDeathTracker tracker = getTracker(); if (tracker == null) return null; boolean addSucceeded = tracker.addListener(runnableKey, deadRunnable); return addSucceeded ? tracker.getConnection() : null; } /** * @return The cached Binder node. May return null if the requested Binder node is not currently * available. */ public T getBinder() { BinderDeathTracker tracker = getTracker(); return (tracker != null) ? tracker.getConnection() : null; } /** * Removes a previously registered runnable associated with the returned cached Binder node * using the key it was registered with in {@link #listenOnBinder} if the runnable still exists. * @param runnableKey The key that was used to register the Runnable earlier. * @return The cached Binder node that the runnable used to registered to or null if the cached * Binder node is not alive anymore. */ public T removeRunnable(Object runnableKey) { if (runnableKey == null) return null; BinderDeathTracker tracker = getTracker(); if (tracker == null) return null; tracker.removeListener(runnableKey); return tracker.getConnection(); } /** * @return The BinderDeathTracker container, which contains the cached IInterface instance or * null if it is not available right now. */ private BinderDeathTracker getTracker() { return mCachedConnection.updateAndGet((oldVal) -> { BinderDeathTracker tracker = oldVal; // Update cache if no longer alive. BinderDied will eventually be called on the tracker, // which will call listeners & clean up. if (tracker == null || !tracker.isAlive()) { T binder = mBinderInterfaceFactory.create(); tracker = (binder != null) ? new BinderDeathTracker(binder) : null; } return (tracker != null && tracker.isAlive()) ? tracker : null; }); } }
gg-wzj/MyChatClient
app/src/main/java/com/example/mychatclient/db/bean/RecentMessageBean.java
package com.example.mychatclient.db.bean; /** * Created by wzj on 2017/9/22. */ public class RecentMessageBean { String friendPhone; String remark; String message; int wdCount; public int getWdCount() { return wdCount; } public void setWdCount(int wdCount) { this.wdCount = wdCount; } public RecentMessageBean(String friendPhone, String remark, String message, int wdCount, long time) { this.friendPhone = friendPhone; this.remark = remark; this.message = message; this.wdCount = wdCount; this.time = time; } long time; public String getRemark() { return remark; } public void setRemark(String remark) { this.remark = remark; } public RecentMessageBean() { } public String getFriendPhone() { return friendPhone; } public void setFriendPhone(String friendPhone) { this.friendPhone = friendPhone; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public long getTime() { return time; } public void setTime(long tiem) { this.time = tiem; } @Override public String toString() { return "RecentMessageBean{" + "friendPhone='" + friendPhone + '\'' + ", remark='" + remark + '\'' + ", message='" + message + '\'' + ", time=" + time + '}'; } }
LegendaryMauricius/Fusin
Fusin/src/Devices/FusinXInputDevice.cpp
#include "Devices/FusinXInputDevice.h" #include "IOCodes/FusinXInput.h" #include <iomanip> #include <algorithm> #include <math.h> #include <stdexcept> namespace Fusin { XInputDevice::XInputDevice(String name, bool hasBattery): Device(name, DT_XINPUT, hasBattery), buttons(DT_XINPUT, 0), axes(DT_XINPUT, 0), dPad(DT_XINPUT), vibration(DT_XINPUT), buttonA(XINPUT_A), buttonB(XINPUT_B), buttonX(XINPUT_X), buttonY(XINPUT_Y), buttonLB(XINPUT_LB), buttonRB(XINPUT_RB), buttonView(XINPUT_VIEW), buttonMenu(XINPUT_MENU), buttonLeftStick(XINPUT_LEFT_STICK_PRESS), buttonRightStick(XINPUT_RIGHT_STICK_PRESS), buttonGuide(XINPUT_GUIDE), axisLeftStickX(XINPUT_LEFT_X_AXIS, &axisLeftStickRight, &axisLeftStickLeft), axisLeftStickY(XINPUT_LEFT_Y_AXIS, &axisLeftStickDown, &axisLeftStickUp), axisRightStickX(XINPUT_RIGHT_X_AXIS, &axisRightStickRight, &axisRightStickLeft), axisRightStickY(XINPUT_RIGHT_Y_AXIS, &axisRightStickDown, &axisRightStickUp), axisLeftStickRight(XINPUT_LEFT_STICK_RIGHT), axisLeftStickLeft(XINPUT_LEFT_STICK_LEFT), axisLeftStickDown(XINPUT_LEFT_STICK_DOWN), axisLeftStickUp(XINPUT_LEFT_STICK_UP), axisRightStickRight(XINPUT_RIGHT_STICK_RIGHT), axisRightStickLeft(XINPUT_RIGHT_STICK_LEFT), axisRightStickDown(XINPUT_RIGHT_STICK_DOWN), axisRightStickUp(XINPUT_RIGHT_STICK_UP), axisLT(XINPUT_LT), axisRT(XINPUT_RT) { registerComponents({ &buttons, &axes, &dPad, &vibration }); buttons._registerExternalButtons({ &buttonA, &buttonB, &buttonX, &buttonY, &buttonLB, &buttonRB, &buttonView, &buttonMenu, &buttonLeftStick, &buttonRightStick, &buttonGuide }); axes._registerExternalAxes({ &axisLeftStickX, &axisLeftStickY, &axisRightStickX, &axisRightStickY, &axisLT, &axisRT }); } XInputDevice::~XInputDevice() { } String XInputDevice::getStateString() { StringStream ss; ss << std::fixed << std::setprecision(2); // axes ss << "L(" << axisLeftStickX.value() << "," << axisLeftStickY.value() << ") R(" << axisRightStickX.value() << "," << axisLeftStickY.value() << ") LT(" << axisLT.value() << ") RT(" << axisRT.value() << ") "; // default component strings ss << dPad.getStateString() << " "; // vibration if (vibration.leftForce.value() || vibration.rightForce.value()) ss << "Vibration[" << vibration.getStateString() << "] "; // buttons ss << " Buttons: "; if (buttonA.value()) ss << "A "; if (buttonB.value()) ss << "B "; if (buttonX.value()) ss << "X "; if (buttonY.value()) ss << "Y "; if (buttonLB.value()) ss << "LB "; if (buttonRB.value()) ss << "RB "; if (buttonLeftStick.value()) ss << "LStick "; if (buttonRightStick.value()) ss << "RStick "; if (buttonView.value()) ss << "View "; if (buttonMenu.value()) ss << "Menu "; if (buttonGuide.value()) ss << "Guide "; return ss.str(); } }
Cavallium/WarpPI
core/src/main/java/it/cavallium/warppi/TestDrivers.java
<reponame>Cavallium/WarpPI<gh_stars>10-100 package it.cavallium.warppi; public class TestDrivers { public static void main(final String[] args) { System.out.println("Test started."); String className; className = "jogamp.newt.driver.bcm.vc.iv.DisplayDriver"; if (TestDrivers.exists(className)) { System.out.println("[FOUND] " + className); } else { System.out.println("[NOT FOUND] " + className); } className = ".bcm.vc.iv.DisplayDriver"; if (TestDrivers.exists(className)) { System.out.println("[FOUND] " + className); } else { System.out.println("[NOT FOUND] " + className); } System.out.println("Test finished."); } public static boolean exists(final String className) { try { Class.forName(className); return true; } catch (final ClassNotFoundException e) { return false; } } }
kingking888/crawler-pyspider
projects/crawl_taobao_goods_migrate/page/goods_rate.py
import random import re import requests from crawl_taobao_goods_migrate.model.es.es_goods_rate import EsTmallGoodsRate from crawl_taobao_goods_migrate.model.task import Task from pyspider.helper.ips_pool import IpsPool from pyspider.helper.date import Date from crawl_taobao_goods_migrate.config import * from pyspider.libs.base_crawl import BaseCrawl from pyspider.libs.crawl_builder import CrawlBuilder class GoodsRate(BaseCrawl): """ 商品评价分数抓取类 """ URL = 'https://dsr-rate.tmall.com/list_dsr_info.htm?itemId={0}&spuId=806781310&sellerId=3165080793&groupId&_ksTS=1562552852233_202&callback=jsonp203' def __init__(self, url_or_id, use_proxy=True, priority=0): super(GoodsRate, self).__init__() self.__headers = { "Referer": "https://detail.tmall.com/item.htm", "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36", } self.__priority = priority self.__goods_id = url_or_id.split('id=', 1)[1].split('&', 1)[0] if 'id=' in url_or_id else url_or_id self.__use_proxy = use_proxy self.__goods_url = url_or_id def crawl_builder(self): builder = CrawlBuilder() \ .set_url(self.URL.format(self.__goods_id)) \ .set_headers(self.__headers) \ .schedule_priority(self.__priority) \ .schedule_age() \ .set_timeout(GOODS_RATE_TIMEOUT) \ .set_connect_timeout(GOODS_RATE_CONNECT_TIMEOUT) \ .set_task_id(Task.get_task_id_goods_rate(self.__goods_id)) if self.__use_proxy: # ip_url = 'http://proxy.httpdaili.com/apinew.asp?sl=10&noinfo=true&ddbh=302094241791519942' # _res = requests.get(ip_url).text # ip_list = _res.split('\r\n')[:5] # ip = random.choice(ip_list) # builder.set_proxy(ip) builder.set_proxy(IpsPool.get_ip_from_pool()) return builder def parse_response(self, response, task): content = response.text result = {} result['goods_sku'] = self.__goods_id result['grade_avg'] = float(re.search('"gradeAvg":(.*?),', content)[1]) try: result['rate_total'] = int(re.search('"rateTotal":(.*?)}', content)[1]) except: result['rate_total'] = 0 result['sync_time'] = Date.now().format_es_utc_with_tz() result['insert_date'] = Date.now().format(full=False) result['goods_url'] = self.__goods_url EsTmallGoodsRate().update([result], async_key=True) return { 'unique_name': 'tmall_goods_rate', 'url': self.URL.format(self.__goods_id), 'content': response.text }
jhonatasrm/chromium-android
app/src/main/java/org/chromium/chrome/browser/dependency_injection/ChromeActivityCommonsModule_ProvideTabModelSelectorFactory.java
package org.chromium.chrome.browser.dependency_injection; import dagger.internal.Factory; import dagger.internal.Preconditions; import javax.annotation.Generated; import org.chromium.chrome.browser.tabmodel.TabModelSelector; @Generated( value = "dagger.internal.codegen.ComponentProcessor", comments = "https://google.github.io/dagger" ) public final class ChromeActivityCommonsModule_ProvideTabModelSelectorFactory implements Factory<TabModelSelector> { private final ChromeActivityCommonsModule module; public ChromeActivityCommonsModule_ProvideTabModelSelectorFactory( ChromeActivityCommonsModule module) { this.module = module; } @Override public TabModelSelector get() { return provideInstance(module); } public static TabModelSelector provideInstance(ChromeActivityCommonsModule module) { return proxyProvideTabModelSelector(module); } public static ChromeActivityCommonsModule_ProvideTabModelSelectorFactory create( ChromeActivityCommonsModule module) { return new ChromeActivityCommonsModule_ProvideTabModelSelectorFactory(module); } public static TabModelSelector proxyProvideTabModelSelector( ChromeActivityCommonsModule instance) { return Preconditions.checkNotNull( instance.provideTabModelSelector(), "Cannot return null from a non-@Nullable @Provides method"); } }
emeric75/uvsq_licence
IN404/TD/863/src/ui/commands/Create.java
package ui.commands; import stock.Circle; import stock.Rectangle; import stock.Shape; import stock.Stock; import java.util.Arrays; public class Create implements Command{ private String[] args; public Create(String[] args) { this.args = args; } @Override public String execute(Stock stock) { if(args.length == 0) throw new IllegalArgumentException("Pas assez d'arguments pour create"); Shape to_create; String findShape = args[0]; String[] remainingArgs = Arrays.copyOfRange(args,1,args.length); switch(findShape){ case "circle": to_create = Circle.parseInput(remainingArgs); break; case "rectangle": to_create = Rectangle.parseInput(remainingArgs); break; default: throw new IllegalArgumentException("Le type de figure que vous voulez créer n'existe pas."); } stock.create(stock.getNbFigs()+"",to_create); return findShape + " " + (stock.getNbFigs()-1) + " créé."; } }
TomStevens7533/Minigin
BurgerTime/PepperDisplayComponent.h
<gh_stars>1-10 #pragma once #include "BaseComponent.h" #include <string> #include "Observer.h" namespace Burger { class PepperDisplayComponent final : public dae::BaseComponent, public dae::Observer { public: PepperDisplayComponent(std::string newString); ~PepperDisplayComponent() = default; virtual void Start() override; virtual void Update() override {}; virtual void FixedUpdate() override {}; virtual void Render() const override {}; virtual void onNotify(const BaseComponent* entity, int event, dae::EventArgs* args) override; PepperDisplayComponent(const PepperDisplayComponent& other) = delete; PepperDisplayComponent(PepperDisplayComponent&& other) = delete; PepperDisplayComponent& operator=(const PepperDisplayComponent& other) = delete; PepperDisplayComponent& operator=(PepperDisplayComponent&& other) = delete; private: int m_PepperShots{}; std::string m_BaseString{}; }; }
EricRemmerswaal/tensorflow
tensorflow/c/experimental/ops/gen/model/arg_spec.h
/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_C_EXPERIMENTAL_OPS_GEN_MODEL_ARG_SPEC_H_ #define TENSORFLOW_C_EXPERIMENTAL_OPS_GEN_MODEL_ARG_SPEC_H_ #include "tensorflow/c/experimental/ops/gen/model/arg_type.h" #include "tensorflow/core/framework/op_def.pb.h" #include "tensorflow/core/platform/types.h" namespace tensorflow { namespace generator { // An input or output argument to an Op. // // Essentially, this represents an OpDef::ArgDef and its context within the Op. class ArgSpec { public: ArgSpec() = default; ArgSpec(const ArgSpec& other) = default; static ArgSpec CreateInput(const OpDef::ArgDef& arg_def, int position); static ArgSpec CreateOutput(const OpDef::ArgDef& arg_def, int position); const string& name() const { return name_; } const string& description() const { return description_; } const ArgType arg_type() const { return arg_type_; } const int position() const { return position_; } private: explicit ArgSpec(const OpDef::ArgDef& arg_def, ArgType arg_type, int position); string name_; string description_; ArgType arg_type_; int position_; }; } // namespace generator } // namespace tensorflow #endif // TENSORFLOW_C_EXPERIMENTAL_OPS_GEN_MODEL_ARG_SPEC_H_
renber/YamlResourceBundle
src/main/java/de/renber/yamlbundleeditor/exporters/excel/ExcelExporter.java
<reponame>renber/YamlResourceBundle package de.renber.yamlbundleeditor.exporters.excel; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; import de.renber.quiterables.QuIterables; import org.apache.poi.hssf.usermodel.HSSFRow; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.CellStyle; import org.apache.poi.ss.usermodel.ColorScaleFormatting; import org.apache.poi.ss.usermodel.IndexedColors; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.xssf.usermodel.XSSFCellStyle; import org.apache.poi.xssf.usermodel.XSSFColor; import org.apache.poi.xssf.usermodel.XSSFFont; import org.apache.poi.xssf.usermodel.XSSFRow; import org.apache.poi.xssf.usermodel.XSSFSheet; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Control; import org.eclipse.swt.widgets.Label; import org.eclipse.wb.swt.SWTResourceManager; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.DumperOptions.FlowStyle; import org.yaml.snakeyaml.DumperOptions.ScalarStyle; import de.renber.databinding.context.beans.BeansDataContext; import de.renber.yamlbundleeditor.exporters.ExportException; import de.renber.yamlbundleeditor.exporters.IExportConfiguration; import de.renber.yamlbundleeditor.exporters.IExporter; import de.renber.yamlbundleeditor.models.BundleCollection; import de.renber.yamlbundleeditor.models.BundleMetaInfo; import de.renber.yamlbundleeditor.models.LocalizedValue; import de.renber.yamlbundleeditor.models.ResourceKey; import de.renber.yamlbundleeditor.services.IDialogService; import de.renber.yamlbundleeditor.services.ILocalizationService; import de.renber.yamlbundleeditor.services.IconProvider; import de.renber.yamlbundleeditor.services.impl.FileExtFilter; public class ExcelExporter implements IExporter { ILocalizationService loc; IDialogService dialogService; public ExcelExporter(ILocalizationService localizationService, IDialogService dialogService) { this.loc = localizationService; this.dialogService = dialogService; } public String getName() { return "Microsoft Excel (*.xlsx)"; } public Image getImage() { return IconProvider.getImage("export/excel"); } private List<String> filterLines; @Override public void doExport(BundleCollection collection, IExportConfiguration configuration) throws ExportException { if (collection == null) throw new IllegalArgumentException("The parameter collection must not be null."); if (configuration == null || !(configuration instanceof ExcelExportConfiguration)) throw new IllegalArgumentException("The parameter configuration must not be null and of type ExcelExportConfiguration."); ExcelExportConfiguration config = (ExcelExportConfiguration)configuration; filterLines = new ArrayList<>(); if (config.exportFilter != null && !config.exportFilter.isEmpty()) { for (String s : config.exportFilter.split("\\r?\\n")) { filterLines.add(s.toLowerCase()); } } List<BundleMetaInfo> exportBundles = QuIterables.query(collection.getBundles()).where(x -> config.getLanguagesToExport().contains(x.languageCode)).toList(); File f = dialogService.showSaveFileDialog("Export as excel", new FileExtFilter("Excel-File", "*.xlsx")); if (f != null) { try (XSSFWorkbook workBook = new XSSFWorkbook()) { XSSFSheet sheet = workBook.createSheet(); // write the header row XSSFCellStyle headerCellStyle = workBook.createCellStyle(); // foreground and background both belong to the cell background // (see // http://stackoverflow.com/questions/2803841/setting-foreground-color-for-hssfcellstyle-is-always-coming-out-black) headerCellStyle.setFillForegroundColor(IndexedColors.GREY_50_PERCENT.getIndex()); headerCellStyle.setFillPattern(CellStyle.SOLID_FOREGROUND); XSSFFont headerFont = workBook.createFont(); headerFont.setBold(true); headerCellStyle.setFont(headerFont); { Row row = sheet.createRow(0); Cell cell = row.createCell(0); cell.setCellValue("Key"); cell.setCellStyle(headerCellStyle); int cellNo = 1; for (BundleMetaInfo metaInfo : exportBundles) { cell = row.createCell(cellNo); cell.setCellValue(metaInfo.languageCode + " - " + metaInfo.localizedName); cell.setCellStyle(headerCellStyle); cellNo++; } } // write the resource keys int currRow = 1; Map<String, ResourceKey> keys = getFlatValues(collection.getValues(), "", config.levelSeparator); XSSFCellStyle multilineCellStyle = workBook.createCellStyle(); multilineCellStyle.setWrapText(true); XSSFCellStyle missingCellStyle = workBook.createCellStyle(); missingCellStyle.setFillForegroundColor(IndexedColors.RED.getIndex()); missingCellStyle.setFillPattern(CellStyle.THICK_FORWARD_DIAG); for (Entry<String, ResourceKey> entry : keys.entrySet()) { if (matchesFilter(entry.getKey(), filterLines)) { if (config.onlyExportKeysWithMissingValues) { boolean allSet = true; for (BundleMetaInfo metaInfo : exportBundles) { Object val = entry.getValue().getLocalizedValue(metaInfo.languageCode); if (val == null) { allSet = false; break; } } if (allSet) continue; } Row row = sheet.createRow(currRow); Cell cell = row.createCell(0); cell.setCellValue(entry.getKey()); int maxCellLines = 1; int cellNo = 1; for (BundleMetaInfo metaInfo : exportBundles) { cell = row.createCell(cellNo); Object val = entry.getValue().getLocalizedValue(metaInfo.languageCode); if (val != null) { String v = val.toString(); cell.setCellValue(v); if (v.contains("\n")) { cell.setCellStyle(multilineCellStyle); maxCellLines = Math.max(1, countLines(v)); } } else { if (config.highlightMissingValues) cell.setCellStyle(missingCellStyle); } cellNo++; } row.setHeightInPoints(maxCellLines * sheet.getDefaultRowHeightInPoints()); currRow++; } } // autosize columns XSSFRow row = sheet.getRow(0); for (int colNum = 0; colNum < row.getLastCellNum(); colNum++) sheet.autoSizeColumn(colNum); // write the excel file try (FileOutputStream fStream = new FileOutputStream(f)) { workBook.write(fStream); } } catch (IOException e) { throw new ExportException("Export failed.", e); } } } private int countLines(String s) { int cnt = 0; int idx = 0; while (idx >= 0) { cnt++; idx = s.indexOf("\n", idx + 1); } return cnt; } private boolean matchesFilter(String text, List<String> filters) { if (filterLines.size() == 0) return true; for(String filter: filters) { if (text.toLowerCase().contains(filter.toLowerCase())) return true; } return false; } /** * Return all ResourceKeys which have at least one localized value as a flat * list with their path */ private Map<String, ResourceKey> getFlatValues(List<ResourceKey> keys, String path, String separator) { TreeMap<String, ResourceKey> map = new TreeMap<>(); for (ResourceKey key : keys) { if (key.hasChildren()) { Map<String, ResourceKey> subMap = getFlatValues(key.getChildren(), path + key.name + separator, separator); map.putAll(subMap); } if (!key.getLocalizedValues().isEmpty()) { map.put(path + key.name, key); } } return map; } @Override public IExportConfiguration getDefaultConfiguration() { return new ExcelExportConfiguration(); } @Override public String serializeConfiguration(IExportConfiguration configuration) { DumperOptions options = new DumperOptions(); options.setDefaultFlowStyle(FlowStyle.BLOCK); options.setDefaultScalarStyle(ScalarStyle.PLAIN); Yaml yaml = new Yaml(options); try (Writer writer = new StringWriter()) { yaml.dump(configuration, writer); return writer.toString(); } catch (IOException e) { throw new RuntimeException(e); } } @Override public IExportConfiguration deserializeConfiguration(String serializedString) { try (StringReader reader = new StringReader(serializedString)) { Yaml yaml = new Yaml(); Object conf = yaml.load(reader); return (IExportConfiguration) conf; } } @Override public Control getConfigurationControl(Composite parent, BundleCollection collection, IExportConfiguration configuration) { if (parent == null) throw new IllegalArgumentException("Parameter parent must not be null."); if (collection == null) throw new IllegalArgumentException("Parameter collection must not be null."); if (configuration == null) throw new IllegalArgumentException("Parameter configuration must not be null."); ExcelExportConfigurationViewModel vm = new ExcelExportConfigurationViewModel((ExcelExportConfiguration)configuration, collection); return new ExcelExportConfigurationComposite(parent, SWT.None, new BeansDataContext(vm)); } }
MaRuifeng/docker-cucumber-sla
features/step_definitions/ui_baseline_steps.rb
## Positive: log in, build and user information scenarios When(/^I click on the application information icon$/) do begin @build_info = get_build_info rescue Exception => error $log.error("#{error.class}: #{error.message}") raise end $log.info("Build info: #{@build_info}") end Then(/^I should see the build information matches the actual release$/) do begin # build_info_reg = /[0-9]{2}\.[0-9]\.[0-9]{8}-[0-9]{4}\.[0-9]+\z/ # expected_build_info = ENV['APP_BUILD'].match(build_info_reg).to_s expected_build_info = "UI: #{ENV['APP_BUILD']}" @build_info.should eq(expected_build_info) rescue Exception => error $log.error("#{error.class}: #{error.message}") raise end $log.info('Build info verified.') end When(/^I am logged into the SLA UI as a ([^"]+)$/) do | user_role| step %{I am logged into the SLA UI as a "#{user_role}"} end When(/^I click on the user information icon$/) do begin on_page(MainPage) do @user_info = get_user_info end rescue Exception => error $log.error("#{error.class}: #{error.message}") raise end $log.info("User info: #{@user_info.to_s}") end Then(/^I should see the user information matches current "([^"]*)"$/) do |user_role| $log.info("Current user role: #{user_role}") begin expected_user_info = FigNewton.send("#{user_role.downcase}_login").to_hash @user_info[:name].should eq(expected_user_info[:name]) @user_info[:email].should eq(expected_user_info[:email]) @user_info[:id].should eq(expected_user_info[:id]) @user_info[:groups].split(',').should =~ expected_user_info[:groups].split(',') @user_info[:roles].split(',').should =~ expected_user_info[:roles].split(',') rescue Exception => error $log.error("#{error.class}: #{error.message}") raise end $log.info('User information verified.') end Then(/^I should see the user information matches current ([^"]+)$/) do |user_role| step %{I should see the user information matches current "#{user_role}"} end ## Negative: log in, build and user information scenarios Given(/^I navigated to the login page of SLA UI$/) do begin # on_page(SplashPage).get_started on_page(LoginPage) rescue Exception => error $log.error("#{error.class}: #{error.message}") raise end $log.info('Successfully landed on the login page.') end Given(/^I should see error messages pop up with invalid login attempts$/) do |table| begin table.hashes.each do |entry| on_page(LoginPage) do |page| $log.info("Attempting to log in with: #{entry[:USER_ROLE]}") page.login_as(entry[:USER_ROLE]) wait_for_spinner error_msg = page.get_error_msg $log.info("Displayed error: #{error_msg}") error_msg.should eq(entry[:ERROR_MESSAGE]) end end rescue Exception => error $log.error("#{error.class}: #{error.message}") raise end end
Gaohng/echo-app
components/dbPool.go
<gh_stars>1-10 package components import ( echoapp "github.com/gw123/echo-app" "github.com/jinzhu/gorm" _ "github.com/jinzhu/gorm/dialects/mysql" "github.com/pkg/errors" "sync" ) type DbPoolService struct { dbMap map[string]*gorm.DB dbOptionMap map[string]echoapp.DBOption mu sync.Mutex } func NewDbPool(options map[string]echoapp.DBOption) *DbPoolService { return &DbPoolService{ dbOptionMap: options, dbMap: map[string]*gorm.DB{}, } } func (dSvr DbPoolService) Db(dbName string) (*gorm.DB, error) { client, ok := dSvr.dbMap[dbName] if !ok || client == nil { dbOption, ok := dSvr.dbOptionMap[dbName] if !ok { return nil, errors.New("notfound DbName:" + dbName) } var err error client, err = gorm.Open(dbOption.Driver, dbOption.DSN) if err != nil { return nil, errors.Wrap(err, "gorm.open") } //防止多线程并发操作 dSvr.mu.Lock() defer dSvr.mu.Unlock() dSvr.dbMap[dbName] = client } return client, nil }
AlaricChan/smi-lib-wiseman
src/main/gen-src/org/xmlsoap/schemas/ws/_2004/_09/enumeration/PullResponse.java
<gh_stars>0 // // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-382 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2007.05.29 at 01:59:34 PM EDT // package org.xmlsoap.schemas.ws._2004._09.enumeration; import java.util.HashMap; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import javax.xml.namespace.QName; /** * <p> * Java class for anonymous complex type. * * <p> * The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="EnumerationContext" type="{http://schemas.xmlsoap.org/ws/2004/09/enumeration}EnumerationContextType" minOccurs="0"/> * &lt;element name="Items" type="{http://schemas.xmlsoap.org/ws/2004/09/enumeration}ItemListType" minOccurs="0"/> * &lt;element name="EndOfSequence" type="{http://www.w3.org/2001/XMLSchema}anyType" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "enumerationContext", "items", "endOfSequence" }) @XmlRootElement(name = "PullResponse") public class PullResponse { @XmlElement(name = "EnumerationContext") protected EnumerationContextType enumerationContext; @XmlElement(name = "Items") protected ItemListType items; @XmlElement(name = "EndOfSequence") protected Object endOfSequence; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the enumerationContext property. * * @return possible object is {@link EnumerationContextType } * */ public EnumerationContextType getEnumerationContext() { return enumerationContext; } /** * Sets the value of the enumerationContext property. * * @param value allowed object is {@link EnumerationContextType } * */ public void setEnumerationContext(EnumerationContextType value) { this.enumerationContext = value; } /** * Gets the value of the items property. * * @return possible object is {@link ItemListType } * */ public ItemListType getItems() { return items; } /** * Sets the value of the items property. * * @param value allowed object is {@link ItemListType } * */ public void setItems(ItemListType value) { this.items = value; } /** * Gets the value of the endOfSequence property. * * @return possible object is {@link Object } * */ public Object getEndOfSequence() { return endOfSequence; } /** * Sets the value of the endOfSequence property. * * @param value allowed object is {@link Object } * */ public void setEndOfSequence(Object value) { this.endOfSequence = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute by updating the map directly. Because of this design, there's no setter. * * * @return always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
MediaByte/portfolio
node_modules/@material-ui/icons/LocalMall.js
<filename>node_modules/@material-ui/icons/LocalMall.js 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _react = require('react'); var _react2 = _interopRequireDefault(_react); var _createSvgIcon = require('./utils/createSvgIcon'); var _createSvgIcon2 = _interopRequireDefault(_createSvgIcon); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } exports.default = (0, _createSvgIcon2.default)(_react2.default.createElement( 'g', null, _react2.default.createElement('path', { d: 'M19 6h-2c0-2.76-2.24-5-5-5S7 3.24 7 6H5c-1.1 0-1.99.9-1.99 2L3 20c0 1.1.9 2 2 2h14c1.1 0 2-.9 2-2V8c0-1.1-.9-2-2-2zm-7-3c1.66 0 3 1.34 3 3H9c0-1.66 1.34-3 3-3zm0 10c-2.76 0-5-2.24-5-5h2c0 1.66 1.34 3 3 3s3-1.34 3-3h2c0 2.76-2.24 5-5 5z' }) ), 'LocalMall');
TheRakeshPurohit/CodingSpectator
plug-ins/helios/org.eclipse.jdt.ui.tests.refactoring/resources/ExtractMethodWorkSpace/ExtractMethodTests/wiki_in/A_test2005.java
package wiki_in; import java.io.IOException; public class A_test2005 { static void fun() throws IOException { } public static void main(String args[]) { try { /*[*/ fun(); /*]*/ } catch (Exception e) { } } }
hongzhaoo/huaweicloud-sdk-java
core/src/main/java/com/huawei/openstack4j/api/workflow/WorkbookDefinitionService.java
/******************************************************************************* * Copyright 2016 ContainX and OpenStack4j * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. *******************************************************************************/ package com.huawei.openstack4j.api.workflow; import java.io.InputStream; import java.util.List; import com.huawei.openstack4j.common.RestService; import com.huawei.openstack4j.model.common.ActionResponse; import com.huawei.openstack4j.model.workflow.Scope; import com.huawei.openstack4j.model.workflow.WorkbookDefinition; /** * Service that provides CRUD operations for workbook definitions. * * @author <NAME> */ public interface WorkbookDefinitionService extends RestService { /** * List all workbook definitions with details. * * @return List of workbook definitions. */ List<? extends WorkbookDefinition> list(); /** * Create a new workbook definition. * * @param wbText Text in YAML format (Mistral language) with a workbook definition. * @param scope Scope of newly created workbook. * @return Created workbook definition. */ WorkbookDefinition create(InputStream wbText, Scope scope); /** * Get workbook definition by its identifier. * * @param identifier Workbook definition identifier (either ID or name). * @return Workbook definition. */ WorkbookDefinition get(String identifier); /** * Delete workbook definition by its identifier. * * @param identifier Workbook definition identifier (either ID or name). * @return Action response from the server. */ ActionResponse delete(String identifier); }
JackietLiu/taorg
src/main/java/com/thinkgem/jeesite/modules/taorg/dao/office/SysOfficeDao.java
/** * Copyright &copy; 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved. */ package com.thinkgem.jeesite.modules.taorg.dao.office; import java.util.List; import com.thinkgem.jeesite.common.persistence.TreeDao; import com.thinkgem.jeesite.common.persistence.annotation.MyBatisDao; import com.thinkgem.jeesite.modules.taorg.entity.office.SysOffice; /** * 企业/机构DAO接口 * @author Jackiet * @version 2017-01-26 */ @MyBatisDao public interface SysOfficeDao extends TreeDao<SysOffice> { List<SysOffice> findListByOfficeId(SysOffice sysOffice); }
roytam1/wine-win31look
dlls/ddraw/d3dvertexbuffer.c
<gh_stars>1-10 /* Direct3D Viewport * Copyright (c) 2002 <NAME> * * This file contains the implementation of Direct3DVertexBuffer COM object * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "config.h" #include <stdarg.h> #include "windef.h" #include "winbase.h" #include "winerror.h" #include "objbase.h" #include "wingdi.h" #include "ddraw.h" #include "d3d.h" #include "wine/debug.h" #include "d3d_private.h" #include "mesa_private.h" WINE_DEFAULT_DEBUG_CHANNEL(ddraw); WINE_DECLARE_DEBUG_CHANNEL(ddraw_geom); HRESULT WINAPI Main_IDirect3DVertexBufferImpl_7_1T_QueryInterface(LPDIRECT3DVERTEXBUFFER7 iface, REFIID riid, LPVOID* obp) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); TRACE("(%p/%p)->(%s,%p)\n", This, iface, debugstr_guid(riid), obp); /* By default, set the object pointer to NULL */ *obp = NULL; if ( IsEqualGUID( &IID_IUnknown, riid ) ) { IDirect3DVertexBuffer7_AddRef(ICOM_INTERFACE(This,IDirect3DVertexBuffer7)); *obp = iface; TRACE(" Creating IUnknown interface at %p.\n", *obp); return S_OK; } if ( IsEqualGUID( &IID_IDirect3DVertexBuffer, riid ) ) { IDirect3DVertexBuffer7_AddRef(ICOM_INTERFACE(This,IDirect3DVertexBuffer7)); *obp = ICOM_INTERFACE(This, IDirect3DVertexBuffer); TRACE(" Creating IDirect3DVertexBuffer interface %p\n", *obp); return S_OK; } if ( IsEqualGUID( &IID_IDirect3DVertexBuffer7, riid ) ) { IDirect3DVertexBuffer7_AddRef(ICOM_INTERFACE(This,IDirect3DVertexBuffer7)); *obp = ICOM_INTERFACE(This, IDirect3DVertexBuffer7); TRACE(" Creating IDirect3DVertexBuffer7 interface %p\n", *obp); return S_OK; } FIXME("(%p): interface for IID %s NOT found!\n", This, debugstr_guid(riid)); return OLE_E_ENUM_NOMORE; } ULONG WINAPI Main_IDirect3DVertexBufferImpl_7_1T_AddRef(LPDIRECT3DVERTEXBUFFER7 iface) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); TRACE("(%p/%p)->() incrementing from %lu.\n", This, iface, This->ref); return ++(This->ref); } ULONG WINAPI Main_IDirect3DVertexBufferImpl_7_1T_Release(LPDIRECT3DVERTEXBUFFER7 iface) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); TRACE("(%p/%p)->() decrementing from %lu.\n", This, iface, This->ref); if (--(This->ref) == 0) { HeapFree(GetProcessHeap(), 0, This->vertices); HeapFree(GetProcessHeap(), 0, This); return 0; } return This->ref; } HRESULT WINAPI Main_IDirect3DVertexBufferImpl_7_1T_Lock(LPDIRECT3DVERTEXBUFFER7 iface, DWORD dwFlags, LPVOID* lplpData, LPDWORD lpdwSize) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); TRACE("(%p/%p)->(%08lx,%p,%p)\n", This, iface, dwFlags, lplpData, lpdwSize); if (TRACE_ON(ddraw)) { TRACE(" lock flags : "); DDRAW_dump_lockflag(dwFlags); } if (This->processed == TRUE) { WARN(" application does a Lock on a vertex buffer resulting from a ProcessVertices call. Expect problems !\n"); } if (This->desc.dwCaps & D3DVBCAPS_OPTIMIZED) return D3DERR_VERTEXBUFFEROPTIMIZED; if (lpdwSize != NULL) *lpdwSize = This->vertex_buffer_size; *lplpData = This->vertices; return DD_OK; } HRESULT WINAPI Main_IDirect3DVertexBufferImpl_7_1T_Unlock(LPDIRECT3DVERTEXBUFFER7 iface) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); TRACE("(%p/%p)->()\n", This, iface); /* Nothing to do here for now. Maybe some optimizations if ever we want to do some :-) */ return DD_OK; } HRESULT WINAPI Main_IDirect3DVertexBufferImpl_7_1T_ProcessVertices(LPDIRECT3DVERTEXBUFFER7 iface, DWORD dwVertexOp, DWORD dwDestIndex, DWORD dwCount, LPDIRECT3DVERTEXBUFFER7 lpSrcBuffer, DWORD dwSrcIndex, LPDIRECT3DDEVICE7 lpD3DDevice, DWORD dwFlags) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); FIXME("(%p/%p)->(%08lx,%08lx,%08lx,%p,%08lx,%p,%08lx): stub!\n", This, iface, dwVertexOp, dwDestIndex, dwCount, lpSrcBuffer, dwSrcIndex, lpD3DDevice, dwFlags); return DD_OK; } HRESULT WINAPI Main_IDirect3DVertexBufferImpl_7_1T_GetVertexBufferDesc(LPDIRECT3DVERTEXBUFFER7 iface, LPD3DVERTEXBUFFERDESC lpD3DVertexBufferDesc) { DWORD size; ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); TRACE("(%p/%p)->(%p)\n", This, iface, lpD3DVertexBufferDesc); size = lpD3DVertexBufferDesc->dwSize; memset(lpD3DVertexBufferDesc, 0, size); memcpy(lpD3DVertexBufferDesc, &This->desc, (size < This->desc.dwSize) ? size : This->desc.dwSize); return DD_OK; } HRESULT WINAPI Main_IDirect3DVertexBufferImpl_7_1T_Optimize(LPDIRECT3DVERTEXBUFFER7 iface, LPDIRECT3DDEVICE7 lpD3DDevice, DWORD dwFlags) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); FIXME("(%p/%p)->(%p,%08lx): stub!\n", This, iface, lpD3DDevice, dwFlags); This->desc.dwCaps |= D3DVBCAPS_OPTIMIZED; return DD_OK; } HRESULT WINAPI Main_IDirect3DVertexBufferImpl_7_ProcessVerticesStrided(LPDIRECT3DVERTEXBUFFER7 iface, DWORD dwVertexOp, DWORD dwDestIndex, DWORD dwCount, LPD3DDRAWPRIMITIVESTRIDEDDATA lpStrideData, DWORD dwVertexTypeDesc, LPDIRECT3DDEVICE7 lpD3DDevice, DWORD dwFlags) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); FIXME("(%p/%p)->(%08lx,%08lx,%08lx,%p,%08lx,%p,%08lx): stub!\n", This, iface, dwVertexOp, dwDestIndex, dwCount, lpStrideData, dwVertexTypeDesc, lpD3DDevice, dwFlags); return DD_OK; } HRESULT WINAPI Thunk_IDirect3DVertexBufferImpl_1_ProcessVertices(LPDIRECT3DVERTEXBUFFER iface, DWORD dwVertexOp, DWORD dwDestIndex, DWORD dwCount, LPDIRECT3DVERTEXBUFFER lpSrcBuffer, DWORD dwSrcIndex, LPDIRECT3DDEVICE3 lpD3DDevice, DWORD dwFlags) { TRACE("(%p)->(%08lx,%08lx,%08lx,%p,%08lx,%p,%08lx) thunking to IDirect3DVertexBuffer7 interface.\n", iface, dwVertexOp, dwDestIndex, dwCount, lpSrcBuffer, dwSrcIndex, lpD3DDevice, dwFlags); return IDirect3DVertexBuffer7_ProcessVertices(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface), dwVertexOp, dwDestIndex, dwCount, COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, lpSrcBuffer), dwSrcIndex, COM_INTERFACE_CAST(IDirect3DDeviceImpl, IDirect3DDevice3, IDirect3DDevice7, lpD3DDevice), dwFlags); } HRESULT WINAPI Thunk_IDirect3DVertexBufferImpl_1_Optimize(LPDIRECT3DVERTEXBUFFER iface, LPDIRECT3DDEVICE3 lpD3DDevice, DWORD dwFlags) { TRACE("(%p)->(%p,%08lx) thunking to IDirect3DVertexBuffer7 interface.\n", iface, lpD3DDevice, dwFlags); return IDirect3DVertexBuffer7_Optimize(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface), COM_INTERFACE_CAST(IDirect3DDeviceImpl, IDirect3DDevice3, IDirect3DDevice7, lpD3DDevice), dwFlags); } HRESULT WINAPI Thunk_IDirect3DVertexBufferImpl_1_QueryInterface(LPDIRECT3DVERTEXBUFFER iface, REFIID riid, LPVOID* obp) { TRACE("(%p)->(%s,%p) thunking to IDirect3DVertexBuffer7 interface.\n", iface, debugstr_guid(riid), obp); return IDirect3DVertexBuffer7_QueryInterface(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface), riid, obp); } ULONG WINAPI Thunk_IDirect3DVertexBufferImpl_1_AddRef(LPDIRECT3DVERTEXBUFFER iface) { TRACE("(%p)->() thunking to IDirect3DVertexBuffer7 interface.\n", iface); return IDirect3DVertexBuffer7_AddRef(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface)); } ULONG WINAPI Thunk_IDirect3DVertexBufferImpl_1_Release(LPDIRECT3DVERTEXBUFFER iface) { TRACE("(%p)->() thunking to IDirect3DVertexBuffer7 interface.\n", iface); return IDirect3DVertexBuffer7_Release(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface)); } HRESULT WINAPI Thunk_IDirect3DVertexBufferImpl_1_Lock(LPDIRECT3DVERTEXBUFFER iface, DWORD dwFlags, LPVOID* lplpData, LPDWORD lpdwSize) { TRACE("(%p)->(%08lx,%p,%p) thunking to IDirect3DVertexBuffer7 interface.\n", iface, dwFlags, lplpData, lpdwSize); return IDirect3DVertexBuffer7_Lock(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface), dwFlags, lplpData, lpdwSize); } HRESULT WINAPI Thunk_IDirect3DVertexBufferImpl_1_Unlock(LPDIRECT3DVERTEXBUFFER iface) { TRACE("(%p)->() thunking to IDirect3DVertexBuffer7 interface.\n", iface); return IDirect3DVertexBuffer7_Unlock(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface)); } HRESULT WINAPI Thunk_IDirect3DVertexBufferImpl_1_GetVertexBufferDesc(LPDIRECT3DVERTEXBUFFER iface, LPD3DVERTEXBUFFERDESC lpD3DVertexBufferDesc) { TRACE("(%p)->(%p) thunking to IDirect3DVertexBuffer7 interface.\n", iface, lpD3DVertexBufferDesc); return IDirect3DVertexBuffer7_GetVertexBufferDesc(COM_INTERFACE_CAST(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer, IDirect3DVertexBuffer7, iface), lpD3DVertexBufferDesc); } #define copy_and_next(dest, src, size) memcpy(dest, src, size); dest += (size) static HRESULT process_vertices_strided(IDirect3DVertexBufferImpl *This, DWORD dwVertexOp, DWORD dwDestIndex, DWORD dwCount, LPD3DDRAWPRIMITIVESTRIDEDDATA lpStrideData, DWORD dwVertexTypeDesc, IDirect3DDeviceImpl *device_impl, DWORD dwFlags) { IDirect3DVertexBufferGLImpl *glThis = (IDirect3DVertexBufferGLImpl *) This; DWORD size = get_flexible_vertex_size(dwVertexTypeDesc); char *dest_ptr; int i; This->processed = TRUE; /* For the moment, the trick is to save the transform and lighting state at process time to restore them at drawing time. The BIG problem with this method is nothing prevents D3D to do dirty tricks like processing two different sets of vertices with two different rendering parameters and then to display them using the same DrawPrimitive call. It would be nice to check for such code here (but well, even this is not trivial to do). This is exactly what the TWIST.EXE demo does but using the same kind of ugly stuff in the D3DExecuteBuffer code. I really wonder why Microsoft went back in time when implementing this mostly useless (IMHO) API. */ glThis->dwVertexTypeDesc = dwVertexTypeDesc; if (dwVertexTypeDesc & D3DFVF_NORMAL) { WARN(" lighting state not saved yet... Some strange stuff may happen !\n"); } if (glThis->vertices == NULL) { glThis->vertices = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, size * This->desc.dwNumVertices); } dest_ptr = ((char *) glThis->vertices) + dwDestIndex * size; memcpy(&(glThis->world_mat), device_impl->world_mat, sizeof(D3DMATRIX)); memcpy(&(glThis->view_mat), device_impl->view_mat, sizeof(D3DMATRIX)); memcpy(&(glThis->proj_mat), device_impl->proj_mat, sizeof(D3DMATRIX)); for (i = 0; i < dwCount; i++) { int tex_index; if ((dwVertexTypeDesc & D3DFVF_POSITION_MASK) == D3DFVF_XYZ) { D3DVALUE *position = (D3DVALUE *) (((char *) lpStrideData->position.lpvData) + i * lpStrideData->position.dwStride); copy_and_next(dest_ptr, position, 3 * sizeof(D3DVALUE)); } else if ((dwVertexTypeDesc & D3DFVF_POSITION_MASK) == D3DFVF_XYZRHW) { D3DVALUE *position = (D3DVALUE *) (((char *) lpStrideData->position.lpvData) + i * lpStrideData->position.dwStride); copy_and_next(dest_ptr, position, 4 * sizeof(D3DVALUE)); } if (dwVertexTypeDesc & D3DFVF_RESERVED1) { dest_ptr += sizeof(DWORD); } if (dwVertexTypeDesc & D3DFVF_NORMAL) { D3DVALUE *normal = (D3DVALUE *) (((char *) lpStrideData->normal.lpvData) + i * lpStrideData->normal.dwStride); copy_and_next(dest_ptr, normal, 3 * sizeof(D3DVALUE)); } if (dwVertexTypeDesc & D3DFVF_DIFFUSE) { DWORD *color_d = (DWORD *) (((char *) lpStrideData->diffuse.lpvData) + i * lpStrideData->diffuse.dwStride); copy_and_next(dest_ptr, color_d, sizeof(DWORD)); } if (dwVertexTypeDesc & D3DFVF_SPECULAR) { DWORD *color_s = (DWORD *) (((char *) lpStrideData->specular.lpvData) + i * lpStrideData->specular.dwStride); copy_and_next(dest_ptr, color_s, sizeof(DWORD)); } for (tex_index = 0; tex_index < ((dwVertexTypeDesc & D3DFVF_TEXCOUNT_MASK) >> D3DFVF_TEXCOUNT_SHIFT); tex_index++) { D3DVALUE *tex_coord = (D3DVALUE *) (((char *) lpStrideData->textureCoords[tex_index].lpvData) + i * lpStrideData->textureCoords[tex_index].dwStride); copy_and_next(dest_ptr, tex_coord, 2 * sizeof(D3DVALUE)); } if (TRACE_ON(ddraw_geom)) { if ((dwVertexTypeDesc & D3DFVF_POSITION_MASK) == D3DFVF_XYZ) { D3DVALUE *position = (D3DVALUE *) (((char *) lpStrideData->position.lpvData) + i * lpStrideData->position.dwStride); TRACE_(ddraw_geom)(" %f %f %f", position[0], position[1], position[2]); } else if ((dwVertexTypeDesc & D3DFVF_POSITION_MASK) == D3DFVF_XYZRHW) { D3DVALUE *position = (D3DVALUE *) (((char *) lpStrideData->position.lpvData) + i * lpStrideData->position.dwStride); TRACE_(ddraw_geom)(" %f %f %f %f", position[0], position[1], position[2], position[3]); } if (dwVertexTypeDesc & D3DFVF_NORMAL) { D3DVALUE *normal = (D3DVALUE *) (((char *) lpStrideData->normal.lpvData) + i * lpStrideData->normal.dwStride); TRACE_(ddraw_geom)(" / %f %f %f", normal[0], normal[1], normal[2]); } if (dwVertexTypeDesc & D3DFVF_DIFFUSE) { DWORD *color_d = (DWORD *) (((char *) lpStrideData->diffuse.lpvData) + i * lpStrideData->diffuse.dwStride); TRACE_(ddraw_geom)(" / %02lx %02lx %02lx %02lx", (*color_d >> 16) & 0xFF, (*color_d >> 8) & 0xFF, (*color_d >> 0) & 0xFF, (*color_d >> 24) & 0xFF); } if (dwVertexTypeDesc & D3DFVF_SPECULAR) { DWORD *color_s = (DWORD *) (((char *) lpStrideData->specular.lpvData) + i * lpStrideData->specular.dwStride); TRACE_(ddraw_geom)(" / %02lx %02lx %02lx %02lx", (*color_s >> 16) & 0xFF, (*color_s >> 8) & 0xFF, (*color_s >> 0) & 0xFF, (*color_s >> 24) & 0xFF); } for (tex_index = 0; tex_index < ((dwVertexTypeDesc & D3DFVF_TEXCOUNT_MASK) >> D3DFVF_TEXCOUNT_SHIFT); tex_index++) { D3DVALUE *tex_coord = (D3DVALUE *) (((char *) lpStrideData->textureCoords[tex_index].lpvData) + i * lpStrideData->textureCoords[tex_index].dwStride); TRACE_(ddraw_geom)(" / %f %f", tex_coord[0], tex_coord[1]); } TRACE_(ddraw_geom)("\n"); } } return DD_OK; } #undef copy_and_next HRESULT WINAPI GL_IDirect3DVertexBufferImpl_7_1T_ProcessVertices(LPDIRECT3DVERTEXBUFFER7 iface, DWORD dwVertexOp, DWORD dwDestIndex, DWORD dwCount, LPDIRECT3DVERTEXBUFFER7 lpSrcBuffer, DWORD dwSrcIndex, LPDIRECT3DDEVICE7 lpD3DDevice, DWORD dwFlags) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); IDirect3DVertexBufferImpl *src_impl = ICOM_OBJECT(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, lpSrcBuffer); IDirect3DDeviceImpl *device_impl = ICOM_OBJECT(IDirect3DDeviceImpl, IDirect3DDevice7, lpD3DDevice); D3DDRAWPRIMITIVESTRIDEDDATA strided; DWORD size; TRACE("(%p/%p)->(%08lx,%08lx,%08lx,%p,%08lx,%p,%08lx)\n", This, iface, dwVertexOp, dwDestIndex, dwCount, lpSrcBuffer, dwSrcIndex, lpD3DDevice, dwFlags); if (TRACE_ON(ddraw)) { TRACE(" - vertex operations : "); dump_D3DVOP(dwVertexOp); TRACE(" - flags : "); dump_D3DPV(dwFlags); } if ((dwVertexOp & D3DVOP_TRANSFORM) == 0) return DDERR_INVALIDPARAMS; size = get_flexible_vertex_size(src_impl->desc.dwFVF); convert_FVF_to_strided_data(src_impl->desc.dwFVF, ((char *) src_impl->vertices) + dwSrcIndex * size, &strided, 0); return process_vertices_strided(This, dwVertexOp, dwDestIndex, dwCount, &strided, src_impl->desc.dwFVF, device_impl, dwFlags); } HRESULT WINAPI GL_IDirect3DVertexBufferImpl_7_ProcessVerticesStrided(LPDIRECT3DVERTEXBUFFER7 iface, DWORD dwVertexOp, DWORD dwDestIndex, DWORD dwCount, LPD3DDRAWPRIMITIVESTRIDEDDATA lpStrideData, DWORD dwVertexTypeDesc, LPDIRECT3DDEVICE7 lpD3DDevice, DWORD dwFlags) { ICOM_THIS_FROM(IDirect3DVertexBufferImpl, IDirect3DVertexBuffer7, iface); IDirect3DDeviceImpl *device_impl = ICOM_OBJECT(IDirect3DDeviceImpl, IDirect3DDevice7, lpD3DDevice); TRACE("(%p/%p)->(%08lx,%08lx,%08lx,%p,%08lx,%p,%08lx)\n", This, iface, dwVertexOp, dwDestIndex, dwCount, lpStrideData, dwVertexTypeDesc, lpD3DDevice, dwFlags); if (TRACE_ON(ddraw)) { TRACE(" - vertex operations : "); dump_D3DVOP(dwVertexOp); TRACE(" - flags : "); dump_D3DPV(dwFlags); TRACE(" - vertex format : "); dump_flexible_vertex(dwVertexTypeDesc); } if ((dwVertexOp & D3DVOP_TRANSFORM) == 0) return DDERR_INVALIDPARAMS; return process_vertices_strided(This, dwVertexOp, dwDestIndex, dwCount, lpStrideData, dwVertexTypeDesc, device_impl, dwFlags); } #if !defined(__STRICT_ANSI__) && defined(__GNUC__) # define XCAST(fun) (typeof(VTABLE_IDirect3DVertexBuffer7.fun)) #else # define XCAST(fun) (void*) #endif ICOM_VTABLE(IDirect3DVertexBuffer7) VTABLE_IDirect3DVertexBuffer7 = { ICOM_MSVTABLE_COMPAT_DummyRTTIVALUE XCAST(QueryInterface) Main_IDirect3DVertexBufferImpl_7_1T_QueryInterface, XCAST(AddRef) Main_IDirect3DVertexBufferImpl_7_1T_AddRef, XCAST(Release) Main_IDirect3DVertexBufferImpl_7_1T_Release, XCAST(Lock) Main_IDirect3DVertexBufferImpl_7_1T_Lock, XCAST(Unlock) Main_IDirect3DVertexBufferImpl_7_1T_Unlock, XCAST(ProcessVertices) GL_IDirect3DVertexBufferImpl_7_1T_ProcessVertices, XCAST(GetVertexBufferDesc) Main_IDirect3DVertexBufferImpl_7_1T_GetVertexBufferDesc, XCAST(Optimize) Main_IDirect3DVertexBufferImpl_7_1T_Optimize, XCAST(ProcessVerticesStrided) GL_IDirect3DVertexBufferImpl_7_ProcessVerticesStrided }; #if !defined(__STRICT_ANSI__) && defined(__GNUC__) #undef XCAST #endif #if !defined(__STRICT_ANSI__) && defined(__GNUC__) # define XCAST(fun) (typeof(VTABLE_IDirect3DVertexBuffer.fun)) #else # define XCAST(fun) (void*) #endif ICOM_VTABLE(IDirect3DVertexBuffer) VTABLE_IDirect3DVertexBuffer = { ICOM_MSVTABLE_COMPAT_DummyRTTIVALUE XCAST(QueryInterface) Thunk_IDirect3DVertexBufferImpl_1_QueryInterface, XCAST(AddRef) Thunk_IDirect3DVertexBufferImpl_1_AddRef, XCAST(Release) Thunk_IDirect3DVertexBufferImpl_1_Release, XCAST(Lock) Thunk_IDirect3DVertexBufferImpl_1_Lock, XCAST(Unlock) Thunk_IDirect3DVertexBufferImpl_1_Unlock, XCAST(ProcessVertices) Thunk_IDirect3DVertexBufferImpl_1_ProcessVertices, XCAST(GetVertexBufferDesc) Thunk_IDirect3DVertexBufferImpl_1_GetVertexBufferDesc, XCAST(Optimize) Thunk_IDirect3DVertexBufferImpl_1_Optimize }; #if !defined(__STRICT_ANSI__) && defined(__GNUC__) #undef XCAST #endif HRESULT d3dvertexbuffer_create(IDirect3DVertexBufferImpl **obj, IDirectDrawImpl *d3d, LPD3DVERTEXBUFFERDESC lpD3DVertBufDesc, DWORD dwFlags) { IDirect3DVertexBufferImpl *object; static const flag_info flags[] = { FE(D3DVBCAPS_DONOTCLIP), FE(D3DVBCAPS_OPTIMIZED), FE(D3DVBCAPS_SYSTEMMEMORY), FE(D3DVBCAPS_WRITEONLY) }; object = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, sizeof(IDirect3DVertexBufferGLImpl)); if (object == NULL) return DDERR_OUTOFMEMORY; object->ref = 1; object->d3d = d3d; object->desc = *lpD3DVertBufDesc; object->vertex_buffer_size = get_flexible_vertex_size(lpD3DVertBufDesc->dwFVF) * lpD3DVertBufDesc->dwNumVertices; object->vertices = HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, object->vertex_buffer_size); ICOM_INIT_INTERFACE(object, IDirect3DVertexBuffer, VTABLE_IDirect3DVertexBuffer); ICOM_INIT_INTERFACE(object, IDirect3DVertexBuffer7, VTABLE_IDirect3DVertexBuffer7); *obj = object; if (TRACE_ON(ddraw)) { TRACE(" creating implementation at %p with description : \n", *obj); TRACE(" flags : "); DDRAW_dump_flags_(lpD3DVertBufDesc->dwCaps, flags, sizeof(flags)/sizeof(flags[0]), TRUE); TRACE(" vertex type : "); dump_flexible_vertex(lpD3DVertBufDesc->dwFVF); TRACE(" num vertices : %ld\n", lpD3DVertBufDesc->dwNumVertices); } return D3D_OK; }
GuillaumeFalourd/poc-bank-api-java
src/main/java/com/example/bankpoc/repository/AccountRepository.java
<filename>src/main/java/com/example/bankpoc/repository/AccountRepository.java<gh_stars>1-10 package com.example.bankpoc.repository; import java.util.Optional; import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.stereotype.Repository; import com.example.bankpoc.models.entity.Account; @Repository public interface AccountRepository extends JpaRepository<Account, Long> { Optional<Account> findById(Long id); }
mohofo7/orbit
packages/orbit-components/src/BadgeList/index.js
<filename>packages/orbit-components/src/BadgeList/index.js // @flow import * as React from "react"; import styled from "styled-components"; import type { Props } from "."; const StyledBadgeList = styled.ul` margin: 0; padding: 0; display: flex; flex-direction: column; `; const BadgeList = ({ children, dataTest }: Props): React.Node => { return <StyledBadgeList data-test={dataTest}>{children}</StyledBadgeList>; }; export { default as BadgeListItem } from "./BadgeListItem"; export default BadgeList;
SC0d3r/matrix
spec/div.spec.js
const div = require('../div'); describe('div function', function () { let m1, m2; it('should throw if any of the two matrices are not valid', function () { m1 = [1, 2]; m2 = [2, [2]]; expect(function () { div(m1, m2) }).toThrow(); }); it('should throw if matrices dont have the same dimension for division', function () { m1 = [1, 2, 3]; m2 = [2, 2]; expect(function () { div(m1, m2) }).toThrow(); }) it('should divide two matrices element wise', function () { m1 = [1, 2]; m2 = [2, 2]; expect(div(m1, m2)).toEqual([0.5, 1]); m1 = [[1, 2], [4, 4]]; m2 = [[2, 2], [1, 2]]; expect(div(m1, m2)).toEqual([[0.5, 1], [4, 2]]); }) })
SonTrungTo/Full_Stack
EloquentJS/ch14/codesExample/attribute.js
let paras = document.body.getElementsByTagName('p'); for (let para of Array.from(paras)) { if (para.getAttribute("data-classified") == "secret") { para.remove(); } }
lyh2048/SpringBoot
SpringBoot-Shiro/src/main/java/com/example/controller/LoginController.java
package com.example.controller; import com.example.exception.ForbiddenUserException; import com.example.exception.IncorrectCaptchaException; import com.example.shiro.ShiroUtils; import org.apache.shiro.authc.IncorrectCredentialsException; import org.apache.shiro.authc.UnknownAccountException; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import javax.servlet.http.HttpServletRequest; import java.util.Map; @Controller public class LoginController { @RequestMapping(value = "/login", method = RequestMethod.GET) public String login() { if (ShiroUtils.isAuthenticated()) { return "redirect:/"; } return "login"; } @RequestMapping(value = "/login", method = RequestMethod.POST) public String login(HttpServletRequest request, Map<String, Object> map) { Object exception = request.getAttribute("shiroLoginFailure"); String msg; if (exception != null) { if (exception instanceof UnknownAccountException) { msg = "用户名错误"; } else if (exception instanceof IncorrectCredentialsException) { msg = "密码错误"; } else if (exception instanceof IncorrectCaptchaException) { msg = "验证码错误"; } else if (exception instanceof ForbiddenUserException) { msg = "该用户已被禁用,请联系管理员"; } else { msg = "未知错误"; } map.put("username", request.getParameter("username")); map.put("password", request.getParameter("password")); map.put("msg", msg); return "login"; } return "index"; } }
i-Taozi/Chorus
interpreter/chorus-executionlistener/src/main/java/org/chorusbdd/chorus/executionlistener/util/PromptOnScenarioEndExecutionListener.java
<gh_stars>10-100 /** * MIT License * * Copyright (c) 2019 Chorus BDD Organisation. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package org.chorusbdd.chorus.executionlistener.util; import org.chorusbdd.chorus.annotations.ExecutionPriority; import org.chorusbdd.chorus.executionlistener.ExecutionListenerAdapter; import org.chorusbdd.chorus.logging.ChorusLog; import org.chorusbdd.chorus.logging.ChorusLogFactory; import org.chorusbdd.chorus.logging.ChorusOut; import org.chorusbdd.chorus.results.ExecutionToken; import org.chorusbdd.chorus.results.ScenarioToken; import java.io.Console; /** * Created by nickebbutt on 20/03/2018. * * An experimental execution listener which can be added to pause execution after each scenario to give the * user a chance to inspect state */ @ExecutionPriority(200) public class PromptOnScenarioEndExecutionListener extends ExecutionListenerAdapter { private ChorusLog log = ChorusLogFactory.getLog(PromptOnScenarioEndExecutionListener.class); @Override public void scenarioCompleted(ExecutionToken testExecutionToken, ScenarioToken scenario) { if ( shouldPrompt(scenario)) { ChorusOut.out.println("Scenario " + scenario.getName() + " " + scenario.getEndState()); ChorusOut.out.println("Do you want to proceed? (y/n)"); Console console = System.console(); String l = console.readLine(); if ( ! "y".equalsIgnoreCase(l)) { log.error("Exiting early on user request"); System.exit(1); } } } protected boolean shouldPrompt(ScenarioToken scenarioToken) { return true; } }
Tiamat-Tech/sourcegraph
internal/database/user_credentials.go
package database import ( "context" "database/sql" "fmt" "time" "github.com/cockroachdb/errors" "github.com/keegancsmith/sqlf" "github.com/sourcegraph/sourcegraph/internal/database/basestore" "github.com/sourcegraph/sourcegraph/internal/database/dbutil" "github.com/sourcegraph/sourcegraph/internal/encryption" "github.com/sourcegraph/sourcegraph/internal/extsvc/auth" "github.com/sourcegraph/sourcegraph/internal/timeutil" ) // UserCredential represents a row in the `user_credentials` table. type UserCredential struct { ID int64 Domain string UserID int32 ExternalServiceType string ExternalServiceID string EncryptedCredential []byte EncryptionKeyID string CreatedAt time.Time UpdatedAt time.Time // TODO(batch-change-credential-encryption): On or after Sourcegraph 3.30, // we should remove the credential and SSHMigrationApplied fields. SSHMigrationApplied bool key encryption.Key } // Authenticator decrypts and creates the authenticator associated with the user // credential. func (uc *UserCredential) Authenticator(ctx context.Context) (auth.Authenticator, error) { // The record includes a field indicating the encryption key ID. We don't // really have a way to look up a key by ID right now, so this is used as a // marker of whether we should expect a key or not. if uc.EncryptionKeyID == "" || uc.EncryptionKeyID == UserCredentialUnmigratedEncryptionKeyID { return UnmarshalAuthenticator(string(uc.EncryptedCredential)) } if uc.key == nil { return nil, errors.New("user credential is encrypted, but no key is available to decrypt it") } secret, err := uc.key.Decrypt(ctx, uc.EncryptedCredential) if err != nil { return nil, errors.Wrap(err, "decrypting credential") } a, err := UnmarshalAuthenticator(secret.Secret()) if err != nil { return nil, errors.Wrap(err, "unmarshalling authenticator") } return a, nil } // SetAuthenticator encrypts and sets the authenticator within the user // credential. func (uc *UserCredential) SetAuthenticator(ctx context.Context, a auth.Authenticator) error { // Set the key ID. This is cargo culted from external_accounts.go, and the // key ID doesn't appear to be actually useful as anything other than a // marker of whether the data is expected to be encrypted or not. id, err := keyID(ctx, uc.key) if err != nil { return errors.Wrap(err, "getting key version") } secret, err := EncryptAuthenticator(ctx, uc.key, a) if err != nil { return errors.Wrap(err, "encrypting authenticator") } uc.EncryptedCredential = secret uc.EncryptionKeyID = id return nil } const ( // Valid domain values for user credentials. UserCredentialDomainBatches = "batches" // Placeholder encryption key IDs. UserCredentialPlaceholderEncryptionKeyID = "previously-migrated" UserCredentialUnmigratedEncryptionKeyID = "unmigrated" ) // UserCredentialNotFoundErr is returned when a credential cannot be found from // its ID or scope. type UserCredentialNotFoundErr struct{ args []interface{} } func (err UserCredentialNotFoundErr) Error() string { return fmt.Sprintf("user credential not found: %v", err.args) } func (UserCredentialNotFoundErr) NotFound() bool { return true } type UserCredentialsStore interface { basestore.ShareableStore With(basestore.ShareableStore) UserCredentialsStore Transact(context.Context) (UserCredentialsStore, error) Create(ctx context.Context, scope UserCredentialScope, credential auth.Authenticator) (*UserCredential, error) Update(context.Context, *UserCredential) error Delete(ctx context.Context, id int64) error GetByID(ctx context.Context, id int64) (*UserCredential, error) GetByScope(context.Context, UserCredentialScope) (*UserCredential, error) List(context.Context, UserCredentialsListOpts) ([]*UserCredential, int, error) } // userCredentialsStore provides access to the `user_credentials` table. type userCredentialsStore struct { *basestore.Store key encryption.Key } // UserCredentials instantiates and returns a new UserCredentialsStore with prepared statements. func UserCredentials(db dbutil.DB, key encryption.Key) UserCredentialsStore { return &userCredentialsStore{ Store: basestore.NewWithDB(db, sql.TxOptions{}), key: key, } } // UserCredentialsWith instantiates and returns a new UserCredentialsStore using the other store handle. func UserCredentialsWith(other basestore.ShareableStore, key encryption.Key) UserCredentialsStore { return &userCredentialsStore{ Store: basestore.NewWithHandle(other.Handle()), key: key, } } func (s *userCredentialsStore) With(other basestore.ShareableStore) UserCredentialsStore { return &userCredentialsStore{Store: s.Store.With(other)} } func (s *userCredentialsStore) Transact(ctx context.Context) (UserCredentialsStore, error) { txBase, err := s.Store.Transact(ctx) return &userCredentialsStore{Store: txBase}, err } // UserCredentialScope represents the unique scope for a credential. Only one // credential may exist within a scope. type UserCredentialScope struct { Domain string UserID int32 ExternalServiceType string ExternalServiceID string } // Create creates a new user credential based on the given scope and // authenticator. If the scope already has a credential, an error will be // returned. func (s *userCredentialsStore) Create(ctx context.Context, scope UserCredentialScope, credential auth.Authenticator) (*UserCredential, error) { id, err := keyID(ctx, s.key) if err != nil { return nil, err } enc, err := EncryptAuthenticator(ctx, s.key, credential) if err != nil { return nil, err } q := sqlf.Sprintf( userCredentialsCreateQueryFmtstr, scope.Domain, scope.UserID, scope.ExternalServiceType, scope.ExternalServiceID, enc, id, sqlf.Join(userCredentialsColumns, ", "), ) cred := UserCredential{key: s.key} row := s.QueryRow(ctx, q) if err := scanUserCredential(&cred, row); err != nil { return nil, err } return &cred, nil } // Update updates a user credential in the database. If the credential cannot be found, // an error is returned. func (s *userCredentialsStore) Update(ctx context.Context, credential *UserCredential) error { credential.UpdatedAt = timeutil.Now() q := sqlf.Sprintf( userCredentialsUpdateQueryFmtstr, credential.Domain, credential.UserID, credential.ExternalServiceType, credential.ExternalServiceID, credential.EncryptedCredential, credential.EncryptionKeyID, credential.UpdatedAt, credential.SSHMigrationApplied, credential.ID, sqlf.Join(userCredentialsColumns, ", "), ) row := s.QueryRow(ctx, q) if err := scanUserCredential(credential, row); err != nil { return err } return nil } // Delete deletes the given user credential. Note that there is no concept of a // soft delete with user credentials: once deleted, the relevant records are // _gone_, so that we don't hold any sensitive data unexpectedly. 💀 func (s *userCredentialsStore) Delete(ctx context.Context, id int64) error { q := sqlf.Sprintf("DELETE FROM user_credentials WHERE id = %s", id) res, err := s.ExecResult(ctx, q) if err != nil { return err } if rows, err := res.RowsAffected(); err != nil { return err } else if rows == 0 { return UserCredentialNotFoundErr{args: []interface{}{id}} } return nil } // GetByID returns the user credential matching the given ID, or // UserCredentialNotFoundErr if no such credential exists. func (s *userCredentialsStore) GetByID(ctx context.Context, id int64) (*UserCredential, error) { q := sqlf.Sprintf( "SELECT %s FROM user_credentials WHERE id = %s", sqlf.Join(userCredentialsColumns, ", "), id, ) cred := UserCredential{key: s.key} row := s.QueryRow(ctx, q) if err := scanUserCredential(&cred, row); err == sql.ErrNoRows { return nil, UserCredentialNotFoundErr{args: []interface{}{id}} } else if err != nil { return nil, err } return &cred, nil } // GetByScope returns the user credential matching the given scope, or // UserCredentialNotFoundErr if no such credential exists. func (s *userCredentialsStore) GetByScope(ctx context.Context, scope UserCredentialScope) (*UserCredential, error) { q := sqlf.Sprintf( userCredentialsGetByScopeQueryFmtstr, sqlf.Join(userCredentialsColumns, ", "), scope.Domain, scope.UserID, scope.ExternalServiceType, scope.ExternalServiceID, ) cred := UserCredential{key: s.key} row := s.QueryRow(ctx, q) if err := scanUserCredential(&cred, row); err == sql.ErrNoRows { return nil, UserCredentialNotFoundErr{args: []interface{}{scope}} } else if err != nil { return nil, err } return &cred, nil } // UserCredentialsListOpts provide the options when listing credentials. At // least one field in Scope must be set. type UserCredentialsListOpts struct { *LimitOffset Scope UserCredentialScope ForUpdate bool // TODO(batch-change-credential-encryption): this should be removed once the // OOB SSH migration is removed. SSHMigrationApplied *bool // TODO(batch-change-credential-encryption): this should be removed once the // OOB user credential migration is removed. RequiresMigration bool // TODO(batch-change-credential-encryption): this should be removed once the // OOB user credential migration is removed. OnlyEncrypted bool } // sql overrides LimitOffset.SQL() to give a LIMIT clause with one extra value // so we can populate the next cursor. func (opts *UserCredentialsListOpts) sql() *sqlf.Query { if opts.LimitOffset == nil || opts.Limit == 0 { return &sqlf.Query{} } return (&LimitOffset{Limit: opts.Limit + 1, Offset: opts.Offset}).SQL() } // List returns all user credentials matching the given options. func (s *userCredentialsStore) List(ctx context.Context, opts UserCredentialsListOpts) ([]*UserCredential, int, error) { preds := []*sqlf.Query{} if opts.Scope.Domain != "" { preds = append(preds, sqlf.Sprintf("domain = %s", opts.Scope.Domain)) } if opts.Scope.UserID != 0 { preds = append(preds, sqlf.Sprintf("user_id = %s", opts.Scope.UserID)) } if opts.Scope.ExternalServiceType != "" { preds = append(preds, sqlf.Sprintf("external_service_type = %s", opts.Scope.ExternalServiceType)) } if opts.Scope.ExternalServiceID != "" { preds = append(preds, sqlf.Sprintf("external_service_id = %s", opts.Scope.ExternalServiceID)) } // TODO(batch-change-credential-encryption): remove the remaining predicates // once the OOB SSH migration is removed. if opts.SSHMigrationApplied != nil { preds = append(preds, sqlf.Sprintf("ssh_migration_applied = %s", *opts.SSHMigrationApplied)) } if opts.RequiresMigration { preds = append(preds, sqlf.Sprintf( "encryption_key_id IN (%s, %s)", UserCredentialPlaceholderEncryptionKeyID, UserCredentialUnmigratedEncryptionKeyID, )) } if opts.OnlyEncrypted { preds = append(preds, sqlf.Sprintf( "encryption_key_id NOT IN ('', %s)", UserCredentialUnmigratedEncryptionKeyID, )) } if len(preds) == 0 { preds = append(preds, sqlf.Sprintf("TRUE")) } forUpdate := &sqlf.Query{} if opts.ForUpdate { forUpdate = sqlf.Sprintf("FOR UPDATE") } q := sqlf.Sprintf( userCredentialsListQueryFmtstr, sqlf.Join(userCredentialsColumns, ", "), sqlf.Join(preds, "\n AND "), opts.sql(), forUpdate, ) rows, err := s.Query(ctx, q) if err != nil { return nil, 0, err } defer rows.Close() var creds []*UserCredential for rows.Next() { cred := UserCredential{key: s.key} if err := scanUserCredential(&cred, rows); err != nil { return nil, 0, err } creds = append(creds, &cred) } // Check if there were more results than the limit: if so, then we need to // set the return cursor and lop off the extra credential that we retrieved. next := 0 if opts.LimitOffset != nil && opts.Limit != 0 && len(creds) == opts.Limit+1 { next = opts.Offset + opts.Limit creds = creds[:len(creds)-1] } return creds, next, nil } // 🐉 This marks the end of the public API. Beyond here are dragons. // userCredentialsColumns are the columns that must be selected by // user_credentials queries in order to use scanUserCredential(). var userCredentialsColumns = []*sqlf.Query{ sqlf.Sprintf("id"), sqlf.Sprintf("domain"), sqlf.Sprintf("user_id"), sqlf.Sprintf("external_service_type"), sqlf.Sprintf("external_service_id"), sqlf.Sprintf("credential"), sqlf.Sprintf("encryption_key_id"), sqlf.Sprintf("created_at"), sqlf.Sprintf("updated_at"), sqlf.Sprintf("ssh_migration_applied"), } // The more unwieldy queries are below rather than inline in the above methods // in a vain attempt to improve their readability. const userCredentialsGetByScopeQueryFmtstr = ` -- source: internal/database/user_credentials.go:GetByScope SELECT %s FROM user_credentials WHERE domain = %s AND user_id = %s AND external_service_type = %s AND external_service_id = %s ` const userCredentialsListQueryFmtstr = ` -- source: internal/database/user_credentials.go:List SELECT %s FROM user_credentials WHERE %s ORDER BY created_at ASC, domain ASC, user_id ASC, external_service_id ASC %s -- LIMIT clause %s -- optional FOR UPDATE ` const userCredentialsCreateQueryFmtstr = ` -- source: internal/database/user_credentials.go:Create INSERT INTO user_credentials ( domain, user_id, external_service_type, external_service_id, credential, encryption_key_id, created_at, updated_at, ssh_migration_applied ) VALUES ( %s, %s, %s, %s, %s, %s, NOW(), NOW(), TRUE ) RETURNING %s ` const userCredentialsUpdateQueryFmtstr = ` -- source: internal/database/user_credentials.go:Update UPDATE user_credentials SET domain = %s, user_id = %s, external_service_type = %s, external_service_id = %s, credential = %s, encryption_key_id = %s, updated_at = %s, ssh_migration_applied = %s WHERE id = %s RETURNING %s ` // scanUserCredential scans a credential from the given scanner into the given // credential. // // s is inspired by the BatchChange scanner type, but also matches sql.Row, which // is generally used directly in this module. func scanUserCredential(cred *UserCredential, s interface { Scan(...interface{}) error }) error { return s.Scan( &cred.ID, &cred.Domain, &cred.UserID, &cred.ExternalServiceType, &cred.ExternalServiceID, &cred.EncryptedCredential, &cred.EncryptionKeyID, &cred.CreatedAt, &cred.UpdatedAt, &cred.SSHMigrationApplied, ) } func keyID(ctx context.Context, key encryption.Key) (string, error) { if key != nil { version, err := key.Version(ctx) if err != nil { return "", errors.Wrap(err, "getting key version") } return version.JSON(), nil } return "", nil }
yangfancoming/spring-boot-build
spring-boot-project/spring-boot-devtools/src/main/java/org/springframework/boot/devtools/remote/server/Handler.java
package org.springframework.boot.devtools.remote.server; import java.io.IOException; import org.springframework.http.server.ServerHttpRequest; import org.springframework.http.server.ServerHttpResponse; /** * A single handler that is able to process an incoming remote server request. * * @author <NAME> * @since 1.3.0 */ @FunctionalInterface public interface Handler { /** * Handle the request. * @param request the request * @param response the response * @throws IOException in case of I/O errors */ void handle(ServerHttpRequest request, ServerHttpResponse response) throws IOException; }
testerwang11/agent
src/main/java/com/daxiang/mbg/mapper/TestTaskMapper.java
package com.daxiang.mbg.mapper; import com.daxiang.mbg.po.TestTask; import com.daxiang.mbg.po.TestTaskExample; import java.util.List; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; @Mapper public interface TestTaskMapper { long countByExample(TestTaskExample example); int deleteByExample(TestTaskExample example); int deleteByPrimaryKey(Integer id); int insert(TestTask record); int insertSelective(TestTask record); List<TestTask> selectByExample(TestTaskExample example); TestTask selectByPrimaryKey(Integer id); int updateByExampleSelective(@Param("record") TestTask record, @Param("example") TestTaskExample example); int updateByExample(@Param("record") TestTask record, @Param("example") TestTaskExample example); int updateByPrimaryKeySelective(TestTask record); int updateByPrimaryKey(TestTask record); }
zhang-sai/config-errors
configuration-detector/subjects/randoop/randoop-src/randoop/TimeOutException.java
package randoop; public final class TimeOutException extends RuntimeException { private static final long serialVersionUID = 7932531804127083491L; public TimeOutException(String string) { super(string); } }
chrisly42/mc68000-asm-plugin
src/main/gen/de/platon42/intellij/plugins/m68k/psi/M68kVisitor.java
<reponame>chrisly42/mc68000-asm-plugin<gh_stars>1-10 // This is a generated file. Not intended for manual editing. package de.platon42.intellij.plugins.m68k.psi; import com.intellij.psi.PsiElementVisitor; import org.jetbrains.annotations.NotNull; public class M68kVisitor extends PsiElementVisitor { public void visitAbsoluteAddressAddressingMode(@NotNull M68kAbsoluteAddressAddressingMode o) { visitAddressingMode(o); } public void visitAddressRegister(@NotNull M68kAddressRegister o) { visitRegister(o); } public void visitAddressRegisterDirectAddressingMode(@NotNull M68kAddressRegisterDirectAddressingMode o) { visitAddressingMode(o); } public void visitAddressRegisterIndirectAddressingMode(@NotNull M68kAddressRegisterIndirectAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); } public void visitAddressRegisterIndirectPostIncAddressingMode(@NotNull M68kAddressRegisterIndirectPostIncAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); } public void visitAddressRegisterIndirectPreDecAddressingMode(@NotNull M68kAddressRegisterIndirectPreDecAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); } public void visitAddressRegisterIndirectWithDisplacementNewAddressingMode(@NotNull M68kAddressRegisterIndirectWithDisplacementNewAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); // visitWithDisplacement(o); } public void visitAddressRegisterIndirectWithDisplacementOldAddressingMode(@NotNull M68kAddressRegisterIndirectWithDisplacementOldAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); // visitWithDisplacement(o); } public void visitAddressRegisterIndirectWithIndexBaseDisplacementAddressingMode(@NotNull M68kAddressRegisterIndirectWithIndexBaseDisplacementAddressingMode o) { visitAddressingMode(o); // visitWithOptionalAddressRegisterIndirect(o); // visitWithBaseDisplacement(o); // visitWithOptionalIndexRegister(o); } public void visitAddressRegisterIndirectWithIndexNewAddressingMode(@NotNull M68kAddressRegisterIndirectWithIndexNewAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); // visitWithDisplacement(o); // visitWithIndexRegister(o); } public void visitAddressRegisterIndirectWithIndexOldAddressingMode(@NotNull M68kAddressRegisterIndirectWithIndexOldAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); // visitWithDisplacement(o); // visitWithIndexRegister(o); } public void visitAddressSize(@NotNull M68kAddressSize o) { visitPsiElement(o); } public void visitAddressingMode(@NotNull M68kAddressingMode o) { visitPsiElement(o); } public void visitAsmInstruction(@NotNull M68kAsmInstruction o) { visitPsiElement(o); } public void visitAsmOp(@NotNull M68kAsmOp o) { visitPsiElement(o); } public void visitAssignment(@NotNull M68kAssignment o) { visitPsiElement(o); } public void visitBaseDisplacement(@NotNull M68kBaseDisplacement o) { visitPsiElement(o); } public void visitDataRegister(@NotNull M68kDataRegister o) { visitRegister(o); } public void visitDataRegisterDirectAddressingMode(@NotNull M68kDataRegisterDirectAddressingMode o) { visitAddressingMode(o); } public void visitDataWidth(@NotNull M68kDataWidth o) { visitPsiElement(o); } public void visitGlobalLabel(@NotNull M68kGlobalLabel o) { visitNamedElement(o); } public void visitImmediateData(@NotNull M68kImmediateData o) { visitAddressingMode(o); } public void visitIndexRegister(@NotNull M68kIndexRegister o) { visitPsiElement(o); } public void visitIndexScale(@NotNull M68kIndexScale o) { visitPsiElement(o); } public void visitLocalLabel(@NotNull M68kLocalLabel o) { visitNamedElement(o); } public void visitMacroCall(@NotNull M68kMacroCall o) { visitPsiElement(o); } public void visitMacroDefinition(@NotNull M68kMacroDefinition o) { visitNamedElement(o); } public void visitMacroNameDefinition(@NotNull M68kMacroNameDefinition o) { visitPsiElement(o); } public void visitMacroPlainLine(@NotNull M68kMacroPlainLine o) { visitPsiElement(o); } public void visitMemoryIndirectAddressingMode(@NotNull M68kMemoryIndirectAddressingMode o) { visitAddressingMode(o); // visitWithAddressRegisterIndirect(o); // visitWithBaseDisplacement(o); // visitWithOuterDisplacement(o); } public void visitMemoryIndirectPostIndexedAddressingMode(@NotNull M68kMemoryIndirectPostIndexedAddressingMode o) { visitAddressingMode(o); // visitWithOptionalAddressRegisterIndirect(o); // visitWithBaseDisplacement(o); // visitWithIndexRegister(o); // visitWithOuterDisplacement(o); } public void visitMemoryIndirectPreIndexedAddressingMode(@NotNull M68kMemoryIndirectPreIndexedAddressingMode o) { visitAddressingMode(o); // visitWithOptionalAddressRegisterIndirect(o); // visitWithBaseDisplacement(o); // visitWithIndexRegister(o); // visitWithOuterDisplacement(o); } public void visitOperandSize(@NotNull M68kOperandSize o) { visitPsiElement(o); } public void visitOuterDisplacement(@NotNull M68kOuterDisplacement o) { visitPsiElement(o); } public void visitPreprocessorDirective(@NotNull M68kPreprocessorDirective o) { visitPsiElement(o); } public void visitPreprocessorKeyword(@NotNull M68kPreprocessorKeyword o) { visitPsiElement(o); } public void visitProgramCounterIndirectWithDisplacementNewAddressingMode(@NotNull M68kProgramCounterIndirectWithDisplacementNewAddressingMode o) { visitAddressingMode(o); // visitWithDisplacement(o); } public void visitProgramCounterIndirectWithDisplacementOldAddressingMode(@NotNull M68kProgramCounterIndirectWithDisplacementOldAddressingMode o) { visitAddressingMode(o); // visitWithDisplacement(o); } public void visitProgramCounterIndirectWithIndexBaseDisplacementAddressingMode(@NotNull M68kProgramCounterIndirectWithIndexBaseDisplacementAddressingMode o) { visitAddressingMode(o); // visitWithBaseDisplacement(o); // visitWithOptionalIndexRegister(o); } public void visitProgramCounterIndirectWithIndexNewAddressingMode(@NotNull M68kProgramCounterIndirectWithIndexNewAddressingMode o) { visitAddressingMode(o); // visitWithDisplacement(o); // visitWithIndexRegister(o); } public void visitProgramCounterIndirectWithIndexOldAddressingMode(@NotNull M68kProgramCounterIndirectWithIndexOldAddressingMode o) { visitAddressingMode(o); // visitWithDisplacement(o); // visitWithIndexRegister(o); } public void visitProgramCounterMemoryIndirectAddressingMode(@NotNull M68kProgramCounterMemoryIndirectAddressingMode o) { visitAddressingMode(o); // visitWithBaseDisplacement(o); // visitWithOuterDisplacement(o); } public void visitProgramCounterMemoryIndirectPostIndexedAddressingMode(@NotNull M68kProgramCounterMemoryIndirectPostIndexedAddressingMode o) { visitAddressingMode(o); // visitWithBaseDisplacement(o); // visitWithIndexRegister(o); // visitWithOuterDisplacement(o); } public void visitProgramCounterMemoryIndirectPreIndexedAddressingMode(@NotNull M68kProgramCounterMemoryIndirectPreIndexedAddressingMode o) { visitAddressingMode(o); // visitWithBaseDisplacement(o); // visitWithIndexRegister(o); // visitWithOuterDisplacement(o); } public void visitProgramCounterReference(@NotNull M68kProgramCounterReference o) { visitPsiElement(o); } public void visitRegister(@NotNull M68kRegister o) { visitPsiElement(o); } public void visitRegisterListAddressingMode(@NotNull M68kRegisterListAddressingMode o) { visitAddressingMode(o); } public void visitRegisterRange(@NotNull M68kRegisterRange o) { visitPsiElement(o); } public void visitSpecialRegister(@NotNull M68kSpecialRegister o) { visitRegister(o); } public void visitSpecialRegisterDirectAddressingMode(@NotNull M68kSpecialRegisterDirectAddressingMode o) { visitAddressingMode(o); } public void visitSymbolDefinition(@NotNull M68kSymbolDefinition o) { visitNamedElement(o); } public void visitSymbolReference(@NotNull M68kSymbolReference o) { visitPsiElement(o); } public void visitBinaryAddExpr(@NotNull M68kBinaryAddExpr o) { visitExpr(o); } public void visitBinaryBitwiseAndExpr(@NotNull M68kBinaryBitwiseAndExpr o) { visitExpr(o); } public void visitBinaryBitwiseOrExpr(@NotNull M68kBinaryBitwiseOrExpr o) { visitExpr(o); } public void visitBinaryBitwiseXorExpr(@NotNull M68kBinaryBitwiseXorExpr o) { visitExpr(o); } public void visitBinaryCmpEqExpr(@NotNull M68kBinaryCmpEqExpr o) { visitExpr(o); } public void visitBinaryCmpGeExpr(@NotNull M68kBinaryCmpGeExpr o) { visitExpr(o); } public void visitBinaryCmpGtExpr(@NotNull M68kBinaryCmpGtExpr o) { visitExpr(o); } public void visitBinaryCmpLeExpr(@NotNull M68kBinaryCmpLeExpr o) { visitExpr(o); } public void visitBinaryCmpLtExpr(@NotNull M68kBinaryCmpLtExpr o) { visitExpr(o); } public void visitBinaryCmpNeExpr(@NotNull M68kBinaryCmpNeExpr o) { visitExpr(o); } public void visitBinaryDivExpr(@NotNull M68kBinaryDivExpr o) { visitExpr(o); } public void visitBinaryLogicalAndExpr(@NotNull M68kBinaryLogicalAndExpr o) { visitExpr(o); } public void visitBinaryLogicalOrExpr(@NotNull M68kBinaryLogicalOrExpr o) { visitExpr(o); } public void visitBinaryModExpr(@NotNull M68kBinaryModExpr o) { visitExpr(o); } public void visitBinaryMulExpr(@NotNull M68kBinaryMulExpr o) { visitExpr(o); } public void visitBinaryShiftLExpr(@NotNull M68kBinaryShiftLExpr o) { visitExpr(o); } public void visitBinaryShiftRExpr(@NotNull M68kBinaryShiftRExpr o) { visitExpr(o); } public void visitBinarySubExpr(@NotNull M68kBinarySubExpr o) { visitExpr(o); } public void visitExpr(@NotNull M68kExpr o) { visitPsiElement(o); } public void visitLiteralExpr(@NotNull M68kLiteralExpr o) { visitExpr(o); // visitPsiLiteralValue(o); } public void visitParenExpr(@NotNull M68kParenExpr o) { visitExpr(o); } public void visitRefExpr(@NotNull M68kRefExpr o) { visitExpr(o); } public void visitStatement(@NotNull M68kStatement o) { visitPsiElement(o); } public void visitUnaryComplExpr(@NotNull M68kUnaryComplExpr o) { visitExpr(o); } public void visitUnaryMinusExpr(@NotNull M68kUnaryMinusExpr o) { visitExpr(o); } public void visitUnaryNotExpr(@NotNull M68kUnaryNotExpr o) { visitExpr(o); } public void visitUnaryPlusExpr(@NotNull M68kUnaryPlusExpr o) { visitExpr(o); } public void visitNamedElement(@NotNull M68kNamedElement o) { visitPsiElement(o); } public void visitPsiElement(@NotNull M68kPsiElement o) { visitElement(o); } }
copslock/broadcom_cpri
sdk-6.5.20/src/examples/sand/fe/cint_loopback_sr_cell.c
<filename>sdk-6.5.20/src/examples/sand/fe/cint_loopback_sr_cell.c /* * * * This license is set out in https://raw.githubusercontent.com/Broadcom-Network-Switching-Software/OpenBCM/master/Legal/LICENSE file. * * Copyright 2007-2020 Broadcom Inc. All rights reserved. * * DCMN send \ receive loopback SR cell example: * * The example simulate: * 1. define single route and send loopback sr cell to this route * 2. receive sr cells */ uint32 data_set[16]; uint32 data_get[16]; int send_route_loopback (int unit, bcm_port_t port, bcm_port_loopback_t lb) { int rv; bcm_fabric_route_t route; int lb_orig; bcm_port_t lane; bcm_info_t info; bcm_fabric_route_t_init(&route); rv = bcm_info_get(unit, &info); if (rv != BCM_E_NONE) { printf("error, in bcm_info_get, rv=%d\n", rv); return rv; } /* if device is arad, find link corresponding to fabric port*/ if((info.device & 0xff00) == 0x8600) { lane = _SOC_DPP_FABRIC_PORT_TO_LINK(unit, port); } else { lane = port; } rv = bcm_port_loopback_get(unit, port, &lb_orig); if (rv != BCM_E_NONE) { printf("Error, in bcm_port_loopback_get, rv=%d\n",rv); return rv; } rv = bcm_port_loopback_set(unit, port, lb); if (rv != BCM_E_NONE) { printf("Error, in bcm_port_loopback_set 1, rv=%d\n",rv); return rv; } bshell(unit, "Sleep"); /*define a single route from FE2 to FE2*/ route.number_of_hops = 1; route.hop_ids = &lane; route.pipe_id = -1; /*build data*/ data_set[0] = 0x02002300; data_set[1] = 0x14543656; data_set[2] = 0x22222222; data_set[3] = 0x34243663; data_set[4] = 0x47478444; data_set[5] = 0x55555555; data_set[6] = 0x60890635; data_set[7] = 0x77777777; data_set[8] = 0x23432434; data_set[9] = 0x43545889; data_set[10] = 0x423443aa; data_set[11] = 0xb2533bbb; data_set[12] = 0xcc123cc6; data_set[13] = 0xdddddddd; data_set[14] = 0xeeee5435; data_set[15] = 0xf131ffff; /*send source-routed cell*/ rv = bcm_fabric_route_tx(unit, 0, &route, 16 /*in words*/, data_set); if (rv != BCM_E_NONE) { printf("Error, in soc_send_sr_cell, rv=%d\n",rv); return rv; } rv = bcm_port_loopback_set(unit, port, lb_orig); if (rv != BCM_E_NONE) { printf("Error, in bcm_port_loopback_set 2, rv=%d\n",rv); return rv; } return BCM_E_NONE; } /*receive sr cells*/ int receive_sr_cell(int unit, int max_messages) { int rv, count, i; uint32 data_actual_size; /* * in case several sr cells received the cells are accumulated in SW * For that reason it's important to read in loop (even if the relevant interrupt is down) * until soc_receive_sr_cell return EMPTY error. */ count = 0; while(count < max_messages) { /*receive sr cell data*/ rv = bcm_fabric_route_rx(unit, 0, 16, data_get, &data_actual_size); /*all messages was read*/ if(BCM_E_EMPTY == rv) { printf("No more messages to read \n"); break; } else if (rv != BCM_E_NONE) { printf("Error, in bcm_fabric_route_rx, rv=%d\n",rv); return rv; } /*print received data*/ printf("Message received: "); for(i=0 ; i<data_actual_size ; i++) { printf("0x%x ",data_get[i]); } printf("\n"); count++; } printf("%d messages received \n",count); return BCM_E_NONE; } int run_sr_cell_loopback(int unit, bcm_port_t port, bcm_port_loopback_t lb) { int i; int pass = 1; int rv; /*Clear buffer*/ receive_sr_cell(unit, 1000); rv = send_route_loopback(unit, port, lb); if (rv != BCM_E_NONE) { printf("run_sr_cell_loopback: FAILED\n"); return rv; } bshell(unit, "Sleep 5"); rv = receive_sr_cell(unit, 1); if (rv != BCM_E_NONE) { printf("run_sr_cell_loopback: FAILED\n"); return rv; } for (i = 0; i < 16; i++) { if (data_set[i] != data_get[i]) { pass = 0; } } if (pass) { printf("run_sr_cell_loopback: PASS\n"); } return BCM_E_NONE; }
heaths/azure-sdk-for-go
sdk/messaging/azservicebus/doc.go
//go:build go1.16 // +build go1.16 // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Package azservicebus provides clients for sending and receiving messages with Azure Service Bus. // NOTE: for creating and managing entities, use the `Client` in the `github.com/Azure/azure-sdk-for-go/sdk/messaging/azservicebus/admin` package. package azservicebus
Z80coder/flintcms
index.js
const testing = process.env.NODE_ENV === 'test' /* istanbul ignore next */ require('dotenv').config({ path: testing ? '.env.dev' : '.env' }) const path = require('path') const { generateEnvFile } = require('./server/utils/generate-env-file') const nunjuckEnv = require('./server/utils/nunjucks') const validateEnvVariables = require('./server/utils/validate-env-variables') const scaffold = require('./server/utils/scaffold') const { verifyNodemailer } = require('./server/utils/emails') const compileSass = require('./server/utils/compile-sass') const FlintPlugin = require('./server/utils/FlintPlugin') const connectToDatabase = require('./server/utils/database') const createServer = require('./server') const logger = require('./server/utils/logger') const boxen = require('boxen') const chalk = require('chalk') /** * Flint class * @class */ module.exports = class Flint { static get FlintPlugin () { return FlintPlugin } /** * Create a Flint server * * @typedef {Object} Flint * @property {String} [templatePath] - Path to your templates directory * @property {String} [scssPath] - Path to your scss directory * @property {String} [publicPath] - Path to your public directory * @property {String} [publicUrl] - Url for your public folder * @property {String} [logsPath] - Path to your logs directory * @property {String} [scssEntryPoint] - The entry point to your SCSS styles (within the scssPath) * @property {String[]} [scssIncludePaths] - Array of paths to include in SCSS compiling * @property {String} [siteName] - The title of your site * @property {String} [siteUrl] - The URL to your site * @property {Boolean} [listen] - Should the server listen; used for testing * @property {Boolean} [enableCacheBusting] - Add a hash to the compiled CSS bundle * @property {Function[]} [plugins] - Array of required Class modules * * @param {Flint} settings * @param {boolean} debugMode */ constructor (settings = {}, debugMode) { const { templatePath, scssPath, publicPath, plugins, scssEntryPoint, scssIncludePaths, logsPath, publicUrl } = settings const FLINT = Object.assign({}, settings, { logsPath: path.resolve(logsPath || 'logs'), templatePath: path.resolve(templatePath || 'templates'), scssPath: path.resolve(scssPath || 'scss'), publicPath: path.resolve(publicPath || 'public'), plugins: plugins || [], scssEntryPoint: scssEntryPoint !== undefined ? scssEntryPoint : 'main.scss', scssIncludePaths: scssIncludePaths || [], publicUrl: publicUrl || '/public', debugMode }) global.FLINT = FLINT Promise.all([ scaffold(FLINT.templatePath), scaffold(FLINT.publicPath), scaffold(FLINT.logsPath) ]) if (!FLINT.scssEntryPoint) scaffold(FLINT.scssPath) global.FLINT.nun = nunjuckEnv(FLINT.templatePath) this.port = process.env.PORT || 4000 this.startServer = this.startServer.bind(this) } /** * Checks configuration for important credentials * then starts the Flint server * @param {Number} [port] - Defaults to either the process.env port or 4000 */ async startServer (port = this.port) { const missingEnvVariables = validateEnvVariables({ log: logger }) const didGenerateEnv = await generateEnvFile() if (didGenerateEnv && !testing) return process.exit() const shouldContinue = missingEnvVariables.length === 0 /* istanbul ignore if */ if (!shouldContinue) { logger.fatal('Could not start the server.') return process.exit(1) } try { const connectedToDatabase = await connectToDatabase(logger) logger.info(connectedToDatabase) } catch (e) { logger.error(e) } try { const canSendEmails = await verifyNodemailer() logger.info(canSendEmails) } catch (e) { logger.error(e) } await compileSass(logger) this.server = createServer(logger) if (global.FLINT.listen !== false) { this.server.listen(port, () => { // eslint-disable-next-line no-console console.log(boxen(`${chalk.green.bold('Welcome to your FlintCMS server!')} You can access it here: ${chalk.cyan(`http://localhost:${port}`)} Setting up your server for the first time? Go here: ${chalk.cyan(`http://localhost:${port}/admin/install`)}`, { padding: 1, margin: 1, borderStyle: 'round', borderColor: 'green', align: 'center' })) }) } return this.server } }
stfnbssl/wizzi-browser
build/wizzi_modules/wizzi/lib/acl/index.js
<gh_stars>0 /* artifact generator: C:\my\wizzi\stfnbssl\wizzi\node_modules\wizzi-js\lib\artifacts\js\module\gen\main.js primary source IttfDocument: C:\my\wizzi\stfnbssl\wizzi\packages\wizzi\.wizzi\ittf\lib\acl\index.js.ittf */ 'use strict'; var md = module.exports = {}; md.AclStat = require('./aclstat');
ChauVV/CreateMM
src/App/Frontend/initsScreen/RestoreScreen/index.js
<reponame>ChauVV/CreateMM import RestoreScreen from './view' import { connect } from 'react-redux' import * as mapActionsToProps from './actions' const mapStateToProps = (state) => ({ ...state }) export default connect(mapStateToProps, mapActionsToProps)(RestoreScreen)
ran-li2002/minty-server
node_modules/symbol-openapi-typescript-fetch-client/dist/models/NodeHealthInfoDTO.js
<filename>node_modules/symbol-openapi-typescript-fetch-client/dist/models/NodeHealthInfoDTO.js "use strict"; /* tslint:disable */ /* eslint-disable */ /* * Copyright 2019 NEM * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Catapult REST Endpoints * OpenAPI Specification of catapult-rest 2.3.0 * * The version of the OpenAPI document: 0.11.2 * * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * https://openapi-generator.tech * Do not edit the class manually. */ Object.defineProperty(exports, "__esModule", { value: true }); exports.NodeHealthInfoDTOToJSON = exports.NodeHealthInfoDTOFromJSONTyped = exports.NodeHealthInfoDTOFromJSON = void 0; function NodeHealthInfoDTOFromJSON(json) { return NodeHealthInfoDTOFromJSONTyped(json, false); } exports.NodeHealthInfoDTOFromJSON = NodeHealthInfoDTOFromJSON; function NodeHealthInfoDTOFromJSONTyped(json, ignoreDiscriminator) { if ((json === undefined) || (json === null)) { return json; } return { 'status': json['status'], }; } exports.NodeHealthInfoDTOFromJSONTyped = NodeHealthInfoDTOFromJSONTyped; function NodeHealthInfoDTOToJSON(value) { if (value === undefined) { return undefined; } if (value === null) { return null; } return { 'status': value.status, }; } exports.NodeHealthInfoDTOToJSON = NodeHealthInfoDTOToJSON;
BlockchainDARG/ColumbiaImageSearch
cufacesearch/cufacesearch/ingester/deprecated/kafka_face_processor.py
#DEPRECATED # import os # import sys # import time # import json # import multiprocessing # from .generic_kafka_processor import GenericKafkaProcessor # from ..imgio.imgio import get_buffer_from_B64 # from ..featurizer.featsio import normfeatB64encode # from ..indexer.hbase_indexer_minimal import str_processed # # default_prefix = "KFP_" # key_str = 'sha1' # face_feat_str = 'face_feat' # face_bbox_str = 'face_bbox' # # class KafkaFaceProcessor(GenericKafkaProcessor): # # def __init__(self, conf, prefix=default_prefix, pid=None): # # when running as deamon # self.pid = pid # # call GenericKafkaProcessor init (and others potentially) # super(KafkaFaceProcessor, self).__init__(conf, prefix) # # any additional initialization needed, like producer specific output logic # self.face_out_topic = self.get_required_param('face_out_topic') # self.detector = None # self.featurizer = None # self.detector_type = "" # self.featurizer_type = "" # self.init_detector() # self.init_featurizer() # self.init_indexer() # self.face_extr_prefix = "ext:"+"_".join([self.featurizer_type, "feat", self.detector_type, "face"]) # # def set_pp(self): # self.pp = "KafkaFaceProcessor" # if self.pid: # self.pp += ":"+str(self.pid) # # def init_detector(self): # """ Initialize Face Detector from `global_conf` value. # """ # # Get detector type from conf file # self.detector_type = self.get_required_param('detector') # # Get corresponding detector object # from ..detector.generic_detector import get_detector # self.detector = get_detector(self.detector_type) # # def init_featurizer(self): # """ Initialize Feature Extractor from `global_conf` value. # """ # # Get featurizer type from conf file # self.featurizer_type = self.get_required_param('featurizer') # # Get corresponding featurizer object # from ..featurizer.generic_featurizer import get_featurizer # self.featurizer = get_featurizer(self.featurizer_type, self.global_conf) # # def init_indexer(self): # """ Initialize Indexer from `global_conf` value. # """ # from ..indexer.hbase_indexer_minimal import HBaseIndexerMinimal # self.indexer = HBaseIndexerMinimal(self.global_conf) # # def init_out_dict(self, sha1): # tmp_dict_out = dict() # tmp_dict_out[key_str] = sha1 # tmp_dict_out[str_processed] = False # # This should be used to mark this image as processed by this combination detector/featurizer # # e.g. pushing at least a column 'dlib_feat_dlib_face_facefound' with value False # # features should be pushed as B64 encoded in a column 'dlib_feat_dlib_face_BBOX_SCORE', # # BBOX order should be: left, top, right, bottom. See workflows/push_facedata_to_hbase.py # #tmp_dict_out['face_extr_prefix'] = self.face_extr_prefix # tmp_dict_out['detector_type'] = self.detector_type # tmp_dict_out['featurizer_type'] = self.featurizer_type # return tmp_dict_out # # # # This could also be done in a separate process with a single consumer from self.face_out_topic # # Similarly to the full_image_updater... # # I have had some issues with many connections opened with happybase to the same HBase instance... # def build_hbase_dict(self, list_faces_msg): # dict_rows = dict() # for msg_str in list_faces_msg: # msg = json.loads(msg_str) # dict_rows[msg[key_str]] = dict() # dict_rows[msg[key_str]][self.face_extr_prefix+"_"+str_processed] = str(int(msg[str_processed])) # if face_bbox_str in msg and face_feat_str in msg: # dict_rows[msg[key_str]][self.face_extr_prefix + "_" + self.get_bbox_str(msg[face_bbox_str])] = msg[face_feat_str] # return dict_rows # # # def process_one(self, full_msg): # start_process = time.time() # # msg is coming as json with fields: sha1, s3_url, img_infos, img_buffer # # see 'build_image_msg' of KafkaImageProcessor # # buffer is B64 encoded and should be decoded with get_buffer_from_B64 # try: # self.print_stats(full_msg) # msg = json.loads(full_msg.value) # #print msg # # # Check if sha1 is in DB with column 'ext:'+feature_type,detector_type+'_processed' set for row msg['sha1'] # check_column = self.indexer.get_check_column(self.face_extr_prefix) # rows = self.indexer.get_columns_from_sha1_rows([msg[key_str]], [check_column], families={'info': dict(), 'ext': dict()}) # if rows: # # we should skip # self.toc_process_skip(start_process) # return # # # Detect faces # list_faces_msg = [] # img, dets = self.detector.detect_from_buffer_noinfos(get_buffer_from_B64(msg['img_buffer']), up_sample=1) # if dets: # # For each detected face... # for one_face in dets: # # Compute face feature # one_feat = self.featurizer.featurize(img, one_face) # # Build out dictionary # tmp_dict_out = self.init_out_dict(msg[key_str]) # tmp_dict_out[str_processed] = True # tmp_dict_out[face_bbox_str] = one_face # # base64 encode the feature to be dumped # tmp_dict_out[face_feat_str] = normfeatB64encode(one_feat) # # Dump as JSON # list_faces_msg.append(json.dumps(tmp_dict_out).encode('utf-8')) # else: # # Push one default dict with 'facefound' set to False # tmp_dict_out = self.init_out_dict(msg[key_str]) # list_faces_msg.append(json.dumps(tmp_dict_out).encode('utf-8')) # # # Push to face_out_topic # for face_msg in list_faces_msg: # self.producer.send(self.face_out_topic, face_msg) # # # Should we push to DB here too? Using push_dict_rows # self.indexer.push_dict_rows(self.build_hbase_dict(list_faces_msg), table_name=self.indexer.table_sha1infos_name) # # self.toc_process_ok(start_process) # except Exception as inst: # self.toc_process_failed(start_process) # exc_type, exc_obj, exc_tb = sys.exc_info() # fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] # raise type(inst)("{} {}:{}, {}".format(self.pp, fname, exc_tb.tb_lineno, inst)) # # class DaemonKafkaFaceProcessor(multiprocessing.Process): # # daemon = True # # def __init__(self, conf, prefix=default_prefix): # super(DaemonKafkaFaceProcessor, self).__init__() # self.conf = conf # self.prefix = prefix # # def run(self): # # try: # print "Starting worker KafkaFaceProcessor.{}".format(self.pid) # kp = KafkaFaceProcessor(self.conf, prefix=self.prefix, pid=self.pid) # for msg in kp.consumer: # kp.process_one(msg) # except Exception as inst: # exc_type, exc_obj, exc_tb = sys.exc_info() # fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] # print "KafkaFaceProcessor.{} died (In {}:{}, {}:{})".format(self.pid, fname, exc_tb.tb_lineno, type(inst), inst)
winderica/KanColleSource
functions/1222.js
const function1222 = function (t, e, i) { "use strict"; Object.defineProperty(e, "__esModule", { value: !0 }); var n = i(15), o = i(417), r = function () { function t() { this._models = [] } return Object.defineProperty(t.prototype, "selected_type", { get: function () { return this._selected_type }, enumerable: !0, configurable: !0 }), Object.defineProperty(t.prototype, "selected_page_no", { get: function () { return n.ObjUtil.getNumber(this._o, "api_disp_page", 1) }, enumerable: !0, configurable: !0 }), Object.defineProperty(t.prototype, "page_max", { get: function () { return n.ObjUtil.getNumber(this._o, "api_page_count", 0) }, enumerable: !0, configurable: !0 }), Object.defineProperty(t.prototype, "models", { get: function () { return this._models }, enumerable: !0, configurable: !0 }), t.prototype.update = function (t, e) { this._selected_type = t, this._o = e, this._models = []; var i = n.ObjUtil.getObjectArray(this._o, "api_list"), r = n.ObjUtil.getObjectArray(this._o, "api_c_list"); if (null != i) for (var s = 0, a = i; s < a.length; s++) { var _ = a[s]; "number" == typeof _ && -1 == _ || this._models.push(new o.DutyModel_(_, r)) } }, t.prototype.getExecCount = function () { return n.ObjUtil.getNumber(this._o, "api_exec_count") }, t.prototype.hasComplete = function () { if (1 == (1 == n.ObjUtil.getNumber(this._o, "api_completed_kind"))) return !0; var t = n.ObjUtil.getObjectArray(this._o, "api_c_list"); if (null != t) for (var e = 0, i = t; e < i.length; e++) { var r = i[e], s = new o.DutyModel_(r, t); if (3 == s.status) return !0 } return !1 }, t }(); e.DutyDataHolder = r }
mespper/go-babylonjs
cmd/docs2go/writer.go
package main import ( "bytes" "fmt" "go/format" "io/ioutil" "path/filepath" "regexp" "strings" "text/template" ) const ( prefix = "zzz_" ) var ( goTmpl = template.Must(template.New("source").Funcs(funcMap).Parse(source)) funcMap = map[string]interface{}{ "constructorParameterGoList": constructorParameterGoList, "constructorParameterJSList": constructorParameterJSList, "formatParents": formatParents, "fromJSObject": fromJSObject, "hasSuffix": strings.HasSuffix, "trimSuffix": strings.TrimSuffix, "methodParameterGoList": methodParameterGoList, "methodParameterJSList": methodParameterJSList, "propertyParameterGoList": propertyParameterGoList, "propertyParameterJSList": propertyParameterJSList, "receiver": receiver, "toLower": toLower, } ) func (c *classes) writeGo(destDir string) error { for k, v := range c.m { if strings.Contains(k, "&") { logf("Skipping %v", k) continue } var buf bytes.Buffer if err := goTmpl.Execute(&buf, v); err != nil { return fmt.Errorf("%v: template failed: %v", k, err) } b := buf.Bytes() for _, v := range regexpOverrides[k] { b = v.re.ReplaceAll(b, []byte(v.repl)) } filename := filepath.Join(destDir, prefix+k+".go") clean, err := format.Source(b) if err != nil { return fmt.Errorf("%v: unable to format: %v\n%v", k, err, string(b)) } logf("Writing %v...", filename) if err := ioutil.WriteFile(filename, clean, 0644); err != nil { return err } } return nil } func formatParents(parents []string) string { var lines []string for _, v := range parents { lines = append(lines, "*"+v) } if len(lines) == 0 { lines = append(lines, "p js.Value") } lines = append(lines, "ctx js.Value") return strings.Join(lines, "\n") } func fromJSObject(parents []string) string { if len(parents) == 0 { return "p: p, ctx: ctx" } var wraps []string for _, p := range parents { wraps = append(wraps, fmt.Sprintf("%v: %vFromJSObject(p, ctx)", p, p)) } wraps = append(wraps, "ctx: ctx") return strings.Join(wraps, ", ") } func receiver(name string) string { return strings.ToLower(name[0:1]) } func toLower(s string) string { return strings.ToLower(s) } func constructorParameterGoList(key string, s *Signature) string { var params []string for i, name := range s.GoParamsName { params = append(params, fmt.Sprintf("%v %v", name, s.GoParamsType[i])) } if s.HasOpts { params = append(params, fmt.Sprintf("opts *%vOpts", key)) } return strings.Join(params, ", ") } func methodParameterGoList(name, key string, s *Signature) string { var params []string for i, name := range s.GoParamsName { params = append(params, fmt.Sprintf("%v %v", name, s.GoParamsType[i])) } if s.HasOpts { params = append(params, fmt.Sprintf("opts *%v%vOpts", name, key)) } return strings.Join(params, ", ") } func propertyParameterGoList(key string, s *Signature) string { var params []string for i, name := range s.GoParamsName { params = append(params, fmt.Sprintf("%v %v", name, s.GoParamsType[i])) } if s.HasOpts { params = append(params, fmt.Sprintf("opts *%vOpts", key)) } return strings.Join(params, ", ") } func constructorParameterJSList(key string, s *Signature) []string { return s.JSParams } func methodParameterJSList(key string, s *Signature) string { logf("constructorParameterJSList: method: key=%v", key) return strings.Join(s.JSParams, ", ") } func propertyParameterJSList(key string, s *Signature) string { logf("constructorParameterJSList: property: key=%v", key) return strings.Join(s.JSParams, ", ") } type reOverride struct { re *regexp.Regexp repl string } var regexpOverrides = map[string][]reOverride{} const source = `// Code generated by docs2go. DO NOT EDIT. package babylon import ( "syscall/js" ) {{$root := .}} {{$name := .Name}} {{$apiDocsBaseURL := .DocsBaseURL}} // {{$name}} represents a babylon.js {{$name}}. // {{.Summary}} {{- with .Description}} // // {{.}} {{- end -}}{{- with .SeeURL}} // // See: {{.}} {{- end}} type {{$name}} struct{ {{.Parents | formatParents}} } // JSObject returns the underlying js.Value. func ({{$name | receiver}} *{{$name}}) JSObject() js.Value { return {{$name | receiver}}.p } // {{$name}} returns a {{$name}} JavaScript class. func ({{.ConstructorNamespaceReceiverName}} {{.ConstructorNamespaceReceiverType}}) {{$name}}() *{{$name}} { p := {{.ConstructorNamespaceReceiverName}}.ctx.Get("{{$name}}") return {{$name}}FromJSObject(p, {{.ConstructorNamespaceReceiverName}}.ctx) } // {{$name}}FromJSObject returns a wrapped {{$name}} JavaScript class. func {{$name}}FromJSObject(p js.Value, ctx js.Value) *{{$name}} { return &{{$name}}{ {{.Parents | fromJSObject}} } } // {{$name}}ArrayToJSArray returns a JavaScript Array for the wrapped array. func {{$name}}ArrayToJSArray(array []*{{$name}}) []interface{} { var result []interface{} for _, v := range array { result = append(result, v.JSObject()) } return result } {{range $key, $value := .ConstructorNames}}{{if $value.HasOpts}} // {{$key}}Opts contains optional parameters for {{$key}}. type {{$key}}Opts struct { {{range $index, $optType := $value.GoOptsType -}} {{index $value.GoOptsName $index}} {{$optType}} {{end}} } {{end}} // {{$key}} returns a new {{$name}} object. // // {{$apiDocsBaseURL}}.{{$name | toLower}}#constructor func ({{$value.ConstructorNamespaceReceiverName}} {{$value.ConstructorNamespaceReceiverType}}) {{$key}}({{constructorParameterGoList $key $value}}) *{{$name}} { {{if $value.HasOpts}} if opts == nil { opts = &{{$key}}Opts{} } {{end}} args := make([]interface{}, 0, {{$value.JSParams | len}} + {{$value.JSOpts | len}}) {{range $index, $element := $value.JSParams -}} args = append(args, {{$element}}) {{end}} {{range $index, $element := $value.JSOpts -}}{{if index $value.GoOptsType $index | eq "js.Value"}}args = append(args, opts.{{index $value.GoOptsName $index}}) {{else -}} if opts.{{index $value.GoOptsName $index}} == nil { args = append(args, js.Undefined()) } else { args = append(args, {{if index $value.OptsNeedsArrayHelper $index}}{{index $value.OptsNeedsArrayHelper $index}}{{else}}{{$element}}{{end}}) } {{end}}{{end}} p := {{$value.ConstructorNamespaceReceiverName}}.ctx.Get("{{$name}}").New(args...) return {{$name}}FromJSObject(p, {{$value.ConstructorNamespaceReceiverName}}.ctx) } {{end}} {{range $key, $value := .MethodNames}}{{if $value.HasOpts}} // {{$name}}{{$key}}Opts contains optional parameters for {{$name}}.{{$key}}. type {{$name}}{{$key}}Opts struct { {{range $index, $optType := $value.GoOptsType -}} {{index $value.GoOptsName $index}} {{$optType}} {{end}} } {{end}} // {{$key}} calls the {{$key}} method on the {{$name}} object. // // {{$apiDocsBaseURL}}.{{$name | toLower}}#{{$key | toLower}} func ({{$name | receiver}} *{{$name}}) {{$key}}({{methodParameterGoList $name $key $value}}) {{$value.GoReturnType}} { {{if $value.HasOpts}} if opts == nil { opts = &{{$name}}{{$key}}Opts{} } {{end}} {{if or $value.JSParams $value.JSOpts}}args := make([]interface{}, 0, {{$value.JSParams | len}} + {{$value.JSOpts | len}}){{end}} {{range $index, $element := $value.JSParams -}} {{if index $value.NeedsArrayHelper $index}} args = append(args, {{index $value.NeedsArrayHelper $index}}) {{else if hasSuffix $element ".JSObject()"}} if {{trimSuffix $element ".JSObject()"}} == nil { args = append(args, js.Null()) } else { args = append(args, {{$element}}) } {{else}} args = append(args, {{$element}}) {{end}} {{end}} {{range $index, $element := $value.JSOpts -}}{{if index $value.GoOptsType $index | eq "js.Value"}}args = append(args, opts.{{index $value.GoOptsName $index}}) {{else -}} if opts.{{index $value.GoOptsName $index}} == nil { args = append(args, js.Undefined()) } else { args = append(args, {{if index $value.OptsNeedsArrayHelper $index}}{{index $value.OptsNeedsArrayHelper $index}}{{else}}{{$element}}{{end}}) } {{end}}{{end}} {{if $value.GoReturnType}}retVal := {{end}}{{$name | receiver}}.p.Call("{{$value.JSName}}"{{if or $value.JSParams $value.JSOpts}}, args...{{end}}){{if $value.GoReturnType}} {{$value.GoReturnStatement}}{{end}} } {{end}} {{range $key, $value := .PropertyNames}} // {{$key}} returns the {{$key}} property of class {{$name}}. // // {{$apiDocsBaseURL}}.{{$name | toLower}}#{{$key | toLower}} func ({{$name | receiver}} *{{$name}}) {{$key}}() {{$value.GoReturnType}} { retVal := {{$name | receiver}}.p.Get("{{$value.JSName}}") {{$value.GoReturnStatement}} } {{if $value.WriteSetter}}// Set{{$key}} sets the {{$key}} property of class {{$name}}. // // {{$apiDocsBaseURL}}.{{$name | toLower}}#{{$key | toLower}} func ({{$name | receiver}} *{{$name}}) Set{{$key}}({{propertyParameterGoList $key $value}}) *{{$name}} { {{$name | receiver}}.p.Set("{{$value.JSName}}", {{index $value.JSParams 0}}) return {{$name | receiver}} } {{end}}{{end}} `
YRIDZE/Bicycle-delivery-service
pkg/handlers/order_handler.go
package handlers import ( "encoding/json" "net/http" "github.com/YRIDZE/Bicycle-delivery-service/conf" "github.com/YRIDZE/Bicycle-delivery-service/pkg/models" "github.com/YRIDZE/Bicycle-delivery-service/pkg/models/db_repository" "github.com/YRIDZE/Bicycle-delivery-service/pkg/models/requests" "github.com/YRIDZE/Bicycle-delivery-service/pkg/services" yolo_log "github.com/YRIDZE/yolo-log" ) type OrderHandler struct { cfg *conf.ConfigToken logger *yolo_log.Logger services *services.OrderService } func NewOrderHandler(cfg *conf.ConfigToken, logger *yolo_log.Logger, repo db_repository.OrderRepositoryI) *OrderHandler { return &OrderHandler{ cfg: cfg, logger: logger, services: services.NewOrderService(repo), } } func (h *OrderHandler) RegisterRoutes(r *http.ServeMux, appH *AppHandlers) { auth := appH.UserHandler.AuthMiddleware meth := appH.MethodDispatcher r.Handle("/createOrder", auth(meth(Methods{post: http.HandlerFunc(h.Create)}))) r.Handle("/getOrders", auth(meth(Methods{get: http.HandlerFunc(h.GetAll)}))) } func (h *OrderHandler) Create(w http.ResponseWriter, req *http.Request) { order := new(requests.OrderRequest) if err := json.NewDecoder(req.Body).Decode(&order); err != nil { h.logger.Error(err.Error()) http.Error(w, "something went wrong", http.StatusBadRequest) return } order.UserID = req.Context().Value("user").(*models.User).ID if err := order.Validate(); err != nil { h.logger.Error(err.Error()) requests.ValidationErrorResponse(w, err) return } o, err := h.services.Create(order) if err != nil { h.logger.Error(err.Error()) http.Error(w, "invalid data", http.StatusUnauthorized) return } w.WriteHeader(http.StatusCreated) json.NewEncoder(w).Encode(&models.OrderResponse{ID: o.ID, UserID: o.UserID, Address: o.Address, Status: o.Address, Products: o.Products}) h.logger.Infof("order %d successfully created by User %d", o.ID, o.UserID) } func (h *OrderHandler) GetAll(w http.ResponseWriter, req *http.Request) { userID := req.Context().Value("user").(*models.User).ID o, err := h.services.GetAll(userID) if err != nil { h.logger.Error(err.Error()) http.Error(w, "something went wrong", http.StatusInternalServerError) return } var resp []models.OrderResponse for _, x := range *o { resp = append( resp, models.OrderResponse{ ID: x.ID, UserID: x.UserID, Address: x.Address, PhoneNumber: x.PhoneNumber, CustomerName: x.CustomerName, CustomerLastname: x.CustomerLastname, Status: x.Status, Products: x.Products, CreatedAt: x.CreatedAt, }, ) } w.WriteHeader(http.StatusOK) json.NewEncoder(w).Encode(resp) h.logger.Infof("user %d fetched orders", userID) }
Tarekbouamer/Image-Retrieval-for-Image-Based-Localization
cirtorch/modules/utils.py
<filename>cirtorch/modules/utils.py # for some models, we have imported features (convolutions) from caffe # because the image retrieval performance is higher for them # pre-computed localFeatures pca whitening that can be applied before the pooling layer L_WHITENING = { 'resnet101' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-lwhiten-9f830ef.pth', # no pre l2 norm # 'resnet101' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-lwhiten-da5c935.pth', # with pre l2 norm } # possible globalFeatures pooling layers, each on of these can be made regional # pre-computed regional whitening, for most commonly used architectures and pooling methods R_WHITENING = { 'alexnet-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-alexnet-gem-r-rwhiten-c8cf7e2.pth', 'vgg16-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-vgg16-gem-r-rwhiten-19b204e.pth', 'resnet101-mac-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-mac-r-rwhiten-7f1ed8c.pth', 'resnet101-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gem-r-rwhiten-adace84.pth', } # pre-computed final (globalFeatures) whitening, for most commonly used architectures and pooling methods WHITENING = { 'alexnet-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-alexnet-gem-whiten-454ad53.pth', 'alexnet-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-alexnet-gem-r-whiten-4c9126b.pth', 'vgg16-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-vgg16-gem-whiten-eaa6695.pth', 'vgg16-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-vgg16-gem-r-whiten-83582df.pth', 'resnet50-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet50-gem-whiten-f15da7b.pth', 'resnet101-mac-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-mac-r-whiten-9df41d3.pth', 'resnet101-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gem-whiten-22ab0c1.pth', 'resnet101-gem-r' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gem-r-whiten-b379c0a.pth', 'resnet101-gemmp' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet101-gemmp-whiten-770f53c.pth', 'resnet152-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-resnet152-gem-whiten-abe7b93.pth', 'densenet121-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-densenet121-gem-whiten-79e3eea.pth', 'densenet169-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-densenet169-gem-whiten-6b2a76a.pth', 'densenet201-gem' : 'http://cmp.felk.cvut.cz/cnnimageretrieval/data/whiten/retrieval-SfM-120k/retrieval-SfM-120k-densenet201-gem-whiten-22ea45c.pth', } # output dimensionality for supported architectures OUTPUT_DIM = { 'alexnet' : 256, 'vgg11' : 512, 'vgg13' : 512, 'vgg16' : 512, 'vgg19' : 512, 'resnet18' : 512, 'resnet34' : 512, 'resnet50' : 2048, 'resnet101' : 2048, 'resnet152' : 2048, 'densenet121' : 1024, 'densenet169' : 1664, 'densenet201' : 1920, 'densenet161' : 2208, # largest densenet 'squeezenet1_0' : 512, 'squeezenet1_1' : 512, } OUTPUT_DIM = { 'alexnet' : 256, 'vgg11' : 512, 'vgg13' : 512, 'vgg16' : 512, 'vgg19' : 512, 'resnet18' : 512, 'resnet34' : 512, 'resnet50' : 2048, 'resnet101' : 2048, 'resnet152' : 2048, 'densenet121' : 1024, 'densenet161' : 2208, 'densenet169' : 1664, 'densenet201' : 1920, 'densenet264' : 2688, # largest densenet 'squeezenet1_0' : 512, 'squeezenet1_1' : 512, }
meshake/metals
metals/src/main/scala/scala/meta/internal/metals/BuildTargetInfo.scala
<reponame>meshake/metals<filename>metals/src/main/scala/scala/meta/internal/metals/BuildTargetInfo.scala package scala.meta.internal.metals import java.nio.file.Path import scala.collection.mutable.ListBuffer import scala.meta.internal.metals.MetalsEnrichments._ import scala.meta.internal.mtags.URIEncoderDecoder import ch.epfl.scala.bsp4j.BuildTarget import ch.epfl.scala.bsp4j.BuildTargetIdentifier class BuildTargetInfo(buildTargets: BuildTargets) { def buildTargetDetails(targetName: String): String = { buildTargets.all .filter(_.getDisplayName == targetName) .map(_.getId()) .headOption .map(buildTargetDetail) .getOrElse(s"Build target $targetName not found") } private def buildTargetDetail( targetId: BuildTargetIdentifier ): String = { val commonInfo = buildTargets.info(targetId) val javaInfo = buildTargets.javaTarget(targetId) val scalaInfo = buildTargets.scalaTarget(targetId) val output = ListBuffer[String]() commonInfo.foreach(info => { output += "Target" output += s" ${info.getDisplayName}" if (!info.getTags.isEmpty) output ++= getSection("Tags", info.getTags.asScala.toList) if (!info.getLanguageIds.isEmpty) output ++= getSection("Languages", info.getLanguageIds.asScala.toList) val capabilities = translateCapability("Debug", info.getCapabilities().getCanDebug) :: translateCapability("Run", info.getCapabilities().getCanRun) :: translateCapability("Test", info.getCapabilities().getCanTest) :: translateCapability( "Compile", info.getCapabilities().getCanCompile ) :: Nil output ++= getSection("Capabilities", capabilities) val dependencies = getDependencies(info) if (dependencies.nonEmpty) output ++= getSection("Dependencies", dependencies) val dependentTargets = getDependentTargets(info) if (dependentTargets.nonEmpty) output ++= getSection("Dependent Targets", dependentTargets) }) javaInfo.foreach(info => { output += "" output += "Javac Options" output += " compile - https://docs.oracle.com/en/java/javase/17/docs/specs/man/javac.html#options" output += " runtime - https://docs.oracle.com/en/java/javase/17/docs/specs/man/java.html#standard-options-for-java" output += " " if (info.options.nonEmpty) info.options.foreach(f => output += s" ${if (f.isEmpty) "[BLANK]" else f}" ) else output += " [BLANK]" }) scalaInfo.foreach(info => { output += "" output += "Scalac Options" if (info.scalaBinaryVersion.startsWith("3")) output += " compile - https://docs.scala-lang.org/scala3/guides/migration/options-new.html" else output += " compile - https://docs.scala-lang.org/overviews/compiler-options/index.html#Standard_Settings" output += " " if (info.options.nonEmpty) info.options.foreach(scalacOption => output += s" ${if (scalacOption.isEmpty) "[BLANK]" else scalacOption}" ) else output += " [BLANK]" output ++= getSection("Scala Version", List(info.scalaVersion)) output ++= getSection( "Scala Binary Version", List(info.scalaBinaryVersion) ) output ++= getSection("Scala Platform", List(info.scalaPlatform)) info.jvmVersion.foreach(jvmVersion => output ++= getSection("JVM Version", List(jvmVersion)) ) info.jvmHome.foreach(jvmHome => output ++= getSection("JVM Home", List(jvmHome)) ) }) commonInfo.foreach(info => { output ++= getSection( "Base Directory", List(URIEncoderDecoder.decode(info.baseDirectory)) ) output ++= getSection("Source Directories", getSources(info)) }) val scalaClassesDir = scalaInfo.map(_.classDirectory) val javaClassesDir = javaInfo.map(_.classDirectory) if (scalaClassesDir == javaClassesDir) scalaClassesDir.foreach(classesDir => output ++= getSection("Classes Directory", List(classesDir)) ) else { javaClassesDir.foreach(classesDir => output ++= getSection("Java Classes Directory", List(classesDir)) ) scalaClassesDir.foreach(classesDir => output ++= getSection("Scala Classes Directory", List(classesDir)) ) } val scalaClassPath = scalaInfo.map(_.fullClasspath).getOrElse(Nil) val javaClassPath = javaInfo.map(_.fullClasspath).getOrElse(Nil) if (scalaClassPath == javaClassPath) output ++= getSection("Classpath", getClassPath(scalaClassPath)) else { output ++= getSection("Java Classpath", getClassPath(javaClassPath)) output ++= getSection("Scala Classpath", getClassPath(scalaClassPath)) } output += "" output.mkString(System.lineSeparator()) } private def getSection( sectionName: String, sectionText: List[_] ): List[String] = "" :: sectionName :: { if (sectionText.isEmpty) List(" NONE") else sectionText.map(text => s" $text") } private def translateCapability( capability: String, hasCapability: Boolean ): String = if (hasCapability) s" $capability" else s" $capability <- NOT SUPPORTED" private def jarHasSource(jarName: String): Boolean = { val sourceJarName = jarName.replace(".jar", "-sources.jar") buildTargets .sourceJarFile(sourceJarName) .exists(_.toFile.exists()) } private def getSingleClassPathInfo( path: Path, shortPath: Path, maxFilenameSize: Int ): String = { val filename = shortPath.toString() val padding = " " * (maxFilenameSize - filename.size) val status = if (path.toFile.exists) { val blankWarning = " " * 9 if (path.toFile().isDirectory() || jarHasSource(filename)) blankWarning else "NO SOURCE" } else " MISSING " val fullName = if (path.toFile.isFile) s" $path" else "" s" $filename$padding $status$fullName" } private def getClassPath( classPath: List[Path] ): List[String] = { def shortenPath(path: Path): Path = { if (path.toFile.isFile) path.getFileName() else path } if (classPath.nonEmpty) { val maxFilenameSize = classPath.map(shortenPath(_).toString.length()).max + 5 classPath.map(path => getSingleClassPathInfo( path, shortenPath(path), maxFilenameSize ) ) } else List(" NONE") } private def getDependencies(target: BuildTarget): List[String] = { target.getDependencies.asScala .map(f => buildTargets .info(f) .map(_.getDisplayName()) .getOrElse("Unknown target") ) .toList } private def getDependentTargets(target: BuildTarget): List[String] = { buildTargets.all .filter(dependentTarget => dependentTarget.getDependencies.contains(target.getId()) ) .map(_.getDisplayName()) .toList } private def getSources(target: BuildTarget): List[String] = { buildTargets.sourceItemsToBuildTargets .filter(_._2.iterator.asScala.contains(target.getId())) .toList .map { case (path, _) => val generated = buildTargets.checkIfGeneratedDir(path) s"$path${if (generated) " (generated)" else ""}" } .sorted } }
HighSchoolHacking/GLS-Draft
test/integration/StandaloneFunctionDeclareStart/private one parameter.java
// private static String abc(Integer def) { //
mattkantor/basic-flask-app
venv/lib/python3.6/site-packages/celery/tests/app/test_loaders.py
<gh_stars>1-10 from __future__ import absolute_import import os import sys import warnings from celery import loaders from celery.exceptions import ( NotConfigured, ) from celery.loaders import base from celery.loaders import default from celery.loaders.app import AppLoader from celery.utils.imports import NotAPackage from celery.utils.mail import SendmailWarning from celery.tests.case import ( AppCase, Case, Mock, depends_on_current_app, patch, with_environ, ) class DummyLoader(base.BaseLoader): def read_configuration(self): return {'foo': 'bar', 'CELERY_IMPORTS': ('os', 'sys')} class test_loaders(AppCase): def test_get_loader_cls(self): self.assertEqual(loaders.get_loader_cls('default'), default.Loader) @depends_on_current_app def test_current_loader(self): with self.assertPendingDeprecation(): self.assertIs(loaders.current_loader(), self.app.loader) @depends_on_current_app def test_load_settings(self): with self.assertPendingDeprecation(): self.assertIs(loaders.load_settings(), self.app.conf) class test_LoaderBase(AppCase): message_options = {'subject': 'Subject', 'body': 'Body', 'sender': '<EMAIL>', 'to': '<EMAIL>'} server_options = {'host': 'smtp.x.com', 'port': 1234, 'user': 'x', 'password': '<PASSWORD>', 'timeout': 3} def setup(self): self.loader = DummyLoader(app=self.app) def test_handlers_pass(self): self.loader.on_task_init('foo.task', 'feedface-cafebabe') self.loader.on_worker_init() def test_now(self): self.assertTrue(self.loader.now(utc=True)) self.assertTrue(self.loader.now(utc=False)) def test_read_configuration_no_env(self): self.assertDictEqual( base.BaseLoader(app=self.app).read_configuration( 'FOO_X_S_WE_WQ_Q_WE'), {}, ) def test_autodiscovery(self): with patch('celery.loaders.base.autodiscover_tasks') as auto: auto.return_value = [Mock()] auto.return_value[0].__name__ = 'moo' self.loader.autodiscover_tasks(['A', 'B']) self.assertIn('moo', self.loader.task_modules) self.loader.task_modules.discard('moo') def test_import_task_module(self): self.assertEqual(sys, self.loader.import_task_module('sys')) def test_init_worker_process(self): self.loader.on_worker_process_init() m = self.loader.on_worker_process_init = Mock() self.loader.init_worker_process() m.assert_called_with() def test_config_from_object_module(self): self.loader.import_from_cwd = Mock() self.loader.config_from_object('module_name') self.loader.import_from_cwd.assert_called_with('module_name') def test_conf_property(self): self.assertEqual(self.loader.conf['foo'], 'bar') self.assertEqual(self.loader._conf['foo'], 'bar') self.assertEqual(self.loader.conf['foo'], 'bar') def test_import_default_modules(self): modnames = lambda l: [m.__name__ for m in l] self.app.conf.CELERY_IMPORTS = ('os', 'sys') self.assertEqual( sorted(modnames(self.loader.import_default_modules())), sorted(modnames([os, sys])), ) def test_import_from_cwd_custom_imp(self): def imp(module, package=None): imp.called = True imp.called = False self.loader.import_from_cwd('foo', imp=imp) self.assertTrue(imp.called) @patch('celery.utils.mail.Mailer._send') def test_mail_admins_errors(self, send): send.side_effect = KeyError() opts = dict(self.message_options, **self.server_options) with self.assertWarnsRegex(SendmailWarning, r'KeyError'): self.loader.mail_admins(fail_silently=True, **opts) with self.assertRaises(KeyError): self.loader.mail_admins(fail_silently=False, **opts) @patch('celery.utils.mail.Mailer._send') def test_mail_admins(self, send): opts = dict(self.message_options, **self.server_options) self.loader.mail_admins(**opts) self.assertTrue(send.call_args) message = send.call_args[0][0] self.assertEqual(message.to, [self.message_options['to']]) self.assertEqual(message.subject, self.message_options['subject']) self.assertEqual(message.sender, self.message_options['sender']) self.assertEqual(message.body, self.message_options['body']) def test_mail_attribute(self): from celery.utils import mail loader = base.BaseLoader(app=self.app) self.assertIs(loader.mail, mail) def test_cmdline_config_ValueError(self): with self.assertRaises(ValueError): self.loader.cmdline_config_parser(['broker.port=foobar']) class test_DefaultLoader(AppCase): @patch('celery.loaders.base.find_module') def test_read_configuration_not_a_package(self, find_module): find_module.side_effect = NotAPackage() l = default.Loader(app=self.app) with self.assertRaises(NotAPackage): l.read_configuration(fail_silently=False) @patch('celery.loaders.base.find_module') @with_environ('CELERY_CONFIG_MODULE', 'celeryconfig.py') def test_read_configuration_py_in_name(self, find_module): find_module.side_effect = NotAPackage() l = default.Loader(app=self.app) with self.assertRaises(NotAPackage): l.read_configuration(fail_silently=False) @patch('celery.loaders.base.find_module') def test_read_configuration_importerror(self, find_module): default.C_WNOCONF = True find_module.side_effect = ImportError() l = default.Loader(app=self.app) with self.assertWarnsRegex(NotConfigured, r'make sure it exists'): l.read_configuration(fail_silently=True) default.C_WNOCONF = False l.read_configuration(fail_silently=True) def test_read_configuration(self): from types import ModuleType class ConfigModule(ModuleType): pass configname = os.environ.get('CELERY_CONFIG_MODULE') or 'celeryconfig' celeryconfig = ConfigModule(configname) celeryconfig.CELERY_IMPORTS = ('os', 'sys') prevconfig = sys.modules.get(configname) sys.modules[configname] = celeryconfig try: l = default.Loader(app=self.app) l.find_module = Mock(name='find_module') settings = l.read_configuration(fail_silently=False) self.assertTupleEqual(settings.CELERY_IMPORTS, ('os', 'sys')) settings = l.read_configuration(fail_silently=False) self.assertTupleEqual(settings.CELERY_IMPORTS, ('os', 'sys')) l.on_worker_init() finally: if prevconfig: sys.modules[configname] = prevconfig def test_import_from_cwd(self): l = default.Loader(app=self.app) old_path = list(sys.path) try: sys.path.remove(os.getcwd()) except ValueError: pass celery = sys.modules.pop('celery', None) sys.modules.pop('celery.five', None) try: self.assertTrue(l.import_from_cwd('celery')) sys.modules.pop('celery', None) sys.modules.pop('celery.five', None) sys.path.insert(0, os.getcwd()) self.assertTrue(l.import_from_cwd('celery')) finally: sys.path = old_path sys.modules['celery'] = celery def test_unconfigured_settings(self): context_executed = [False] class _Loader(default.Loader): def find_module(self, name): raise ImportError(name) with warnings.catch_warnings(record=True): l = _Loader(app=self.app) self.assertFalse(l.configured) context_executed[0] = True self.assertTrue(context_executed[0]) class test_AppLoader(AppCase): def setup(self): self.loader = AppLoader(app=self.app) def test_on_worker_init(self): self.app.conf.CELERY_IMPORTS = ('subprocess', ) sys.modules.pop('subprocess', None) self.loader.init_worker() self.assertIn('subprocess', sys.modules) class test_autodiscovery(Case): def test_autodiscover_tasks(self): base._RACE_PROTECTION = True try: base.autodiscover_tasks(['foo']) finally: base._RACE_PROTECTION = False with patch('celery.loaders.base.find_related_module') as frm: base.autodiscover_tasks(['foo']) self.assertTrue(frm.called) def test_find_related_module(self): with patch('importlib.import_module') as imp: with patch('imp.find_module') as find: imp.return_value = Mock() imp.return_value.__path__ = 'foo' base.find_related_module(base, 'tasks') def se1(val): imp.side_effect = AttributeError() imp.side_effect = se1 base.find_related_module(base, 'tasks') imp.side_effect = None find.side_effect = ImportError() base.find_related_module(base, 'tasks')
viatkinviatkin/ember-flexberry-gis
app/utils/polygon-intersect-check.js
<gh_stars>1-10 export { default } from 'ember-flexberry-gis/utils/polygon-intersect-check';
Tradecoin-Token/kss_explorer
src/js/pages/SingleAddressPage/NonFungibleTokenList.container.js
import React from 'react'; import {withRouter} from 'react-router'; import ServiceFactory from '../../services/ServiceFactory'; import Loader from '../../components/Loader'; import {NonFungibleTokenListView} from './NonFungibleTokenList.view'; const TX_PAGE_SIZE = 100; class NonFungibleTokenListContainer extends React.Component { state = { tokens: [], loading: false, hasMore: true }; fetchData = () => { const {address, networkId} = this.props.match.params; const addressService = ServiceFactory.forNetwork(networkId).addressService(); return addressService .loadNftTokens(address) .then(tokens => this.setState({ tokens, hasMore: tokens.length === TX_PAGE_SIZE })); }; loadMore = (after) => { const {address, networkId} = this.props.match.params; const addressService = ServiceFactory.forNetwork(networkId).addressService(); return addressService.loadNftTokens(address, TX_PAGE_SIZE, after); }; handleMore = () => { if (this.state.tokens.length < 1) return; if (this.state.loading) return; this.setState({loading: true}); const next = this.state.tokens[this.state.tokens.length - 1].id; this.props.loadMore(next).then(tokens => { this.setState(prevState => ({ tokens: prevState.tokens.concat(tokens), loading: false, hasMore: tokens.length === TX_PAGE_SIZE })) }); }; render() { return ( <Loader fetchData={this.fetchData} errorTitle="Failed to load non-fungible tokens"> <NonFungibleTokenListView tokens={this.state.tokens} hasMore={this.state.hasMore} loadMore={this.handleMore} /> </Loader> ); } } const RoutedNonFungibleTokensListContainer = withRouter(NonFungibleTokenListContainer); export default RoutedNonFungibleTokensListContainer;
kemingy/daily-coding-problem
src/product_except.py
# Given an array of integers, return a new array such that each element at index # i of the new array is the product of all the numbers in the original array # except the one at i. # For example, if our input was [1, 2, 3, 4, 5], the expected output would be # [120, 60, 40, 30, 24]. If our input was [3, 2, 1], the expected output would # be [2, 3, 6]. def product_except_self(nums): n = len(nums) p = 1 output = [1] * n for i in range(n): output[i] *= p p *= nums[i] p = 1 for i in range(n - 1, -1, -1): output[i] *= p p *= nums[i] return output if __name__ == '__main__': for nums in [[1,2,3,4,5], [3,2,1]]: print('Product except self of {} is {}.'.format(nums, product_except_self(nums)))
huangruichang/treex
app/src/components/Repo/CommitDetail/CommitFileItem.js
import React, { Component, PropTypes } from 'react' const styles = require('./commitDetail.scss') export default class CommitFileItem extends Component { static propTypes = { path: PropTypes.string.isRequired, onClick: PropTypes.func.isRequired, patches: PropTypes.object.isRequired, mode: PropTypes.string, style: PropTypes.object, onStageClick: PropTypes.func, onUnStageClick: PropTypes.func, } constructor(props) { super(props) } getIcon(convenientPatch) { if (convenientPatch.isAdded()) { return '+' } if (convenientPatch.isDeleted()) { return '-' } return '...' } render() { let checkbox = <input type="checkbox" key={`commit-file-item-checkbox-${this.props.path}`} defaultChecked="checked" onChange={(event) => { event.preventDefault() event.stopPropagation() this.props.onUnStageClick(this.props.patches) }}/> if (this.props.mode === 'unstaged') { checkbox = <input type="checkbox" key={`commit-file-item-checkbox-${this.props.path}`} onChange={() => { event.preventDefault() event.stopPropagation() this.props.onStageClick(this.props.patches) }}/> } this.getIcon(this.props.patches) return ( <div tabIndex={-1} className={ styles.commitFileItem } onClick={() => { this.props.onClick(this.props.patches) }} style={this.props.style}> <div> {this.props.mode ? checkbox : ''} <div className={ styles.icon }>{this.getIcon(this.props.patches)}</div> {this.props.path} </div> </div> ) } }
Gaurav0/Old-School-RPG-Map
js/engine/Battle.js
<reponame>Gaurav0/Old-School-RPG-Map /* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Old School RPG Map. * * The Initial Developer of the Original Code is Jono Xia. * Portions created by the Initial Developer are Copyright (C) 2007 * the Initial Developer. All Rights Reserved. * * Contributor(s): * <NAME> <<EMAIL>> * <NAME> <<EMAIL>> * <NAME> <<EMAIL>> * <NAME> <<EMAIL>> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ /* Class representing a battle */ var Battle = Class.extend({ _init: function() { // Initialize properties this._background = null; this._encounter = null; this._monsterList = null; this._currentAction = BATTLE_MENU_ATTACK; this._mainMenu = new BattleMenu(this); this._currentMenu = this._mainMenu; this._over = false; this._win = false; this._line = 0; this._txt = ""; this._totalExp = 0; this._totalGold = 0; this._monsterWillAttack = true; this._defending = false; this._ignoringKeys = false; this._writing = false; this._delay = 0; var screenHeight = mapCanvas.height; this._textHeight = [ screenHeight - 132, screenHeight - 110, screenHeight - 86, screenHeight - 62, screenHeight - 38 ]; }, /* Setup random encounter */ setupRandomEncounter: function(zone, backgroundRef, battleMusic) { this._background = g_imageData.images[backgroundRef].img; this._music = battleMusic; if (!!battleMusic) { g_worldmap.getSubMap(g_worldmap.getCurrentSubMapId()).pauseMusic(); document.getElementById(battleMusic).play(); } // Get encounter data associated with zone var zoneXml = null; for (var i = 0; i < g_encounterData.zones.length; ++i) if (g_encounterData.zones[i].zone == zone) zoneXml = g_encounterData.zones[i]; if (zoneXml != null) { // Choose an encounter randomly var len = zoneXml.encounters.length; var r = Math.floor(Math.random() * len); this._encounter = zoneXml.encounters[r]; // Create monster list this._monsterList = []; for (var j = 0; j < this._encounter.monsters.length; ++j) { var monsterId = this._encounter.monsters[j]; for (var k = 0; k < g_monsterData.monsters.length; ++k) if (g_monsterData.monsters[k].id == monsterId) { var monster = new Monster(g_monsterData.monsters[k]); this._monsterList.push(monster); } } } if (keyDown) this._ignoringKeys = true; }, /* Setup scripted encounter (for boss monsters, etc.) */ setupEncounter: function(name, aryMonsters, backgroundRef) { this._background = g_imageData.images[backgroundRef].img; // Create encounter object this._encounter = { "name": name, "monsters": aryMonsters }; // Create monster list this._monsterList = []; for (var i = 0; i < aryMonsters.length; ++i) { var monsterId = aryMonsters[i]; for (var j = 0; j < g_monsterData.monsters.length; ++j) if (g_monsterData.monsters[j].id == monsterId) { var monster = new Monster(g_monsterData.monsters[j]); this._monsterList.push(monster); } } if (keyDown) this._ignoringKeys = true; }, /* Draws battle screen */ draw: function() { var screenWidth = mapCanvas.width; var screenHeight = mapCanvas.height; // Draw battle background mapCtx.drawImage(this._background, 0, 0, screenWidth, screenHeight); spriteCtx.clearRect(0, 0, screenWidth, screenHeight); this.drawPlayer(); this.drawHealthBar(); this.drawManaBar(); this.drawMonsters(); this._mainMenu.display(); drawBox(menuCtx, 133, screenHeight - 150, screenWidth - 133, 150, 15, 3); textCtx.font = "bold 16px sans-serif"; var txt = this._encounter.name + " appeared!"; textCtx.fillText(txt, 154, this._textHeight[0]); this._line = 1; this._txt = txt; }, /* Draws player on battle screen */ drawPlayer: function() { spriteCtx.drawImage(g_player._img, SPRITE_WIDTH, // source x FACING_LEFT * SPRITE_HEIGHT, // source y SPRITE_WIDTH, // source width SPRITE_HEIGHT, // source height spriteCanvas.width - 3 * TILE_WIDTH, // dest x 2 * TILE_HEIGHT, // dest y SPRITE_WIDTH, // dest width SPRITE_HEIGHT); // dest height }, /* Erases player on battle screen */ clearPlayer: function() { spriteCtx.clearRect( spriteCanvas.width - 3 * TILE_WIDTH, // x 2 * TILE_HEIGHT, // y SPRITE_WIDTH, // width SPRITE_HEIGHT); // height }, /* Draws enemies on battle screen */ drawMonsters: function() { var destLeft = 2 * TILE_WIDTH; for (var i = 0; i < this._monsterList.length; ++i) { var monster = this._monsterList[i]; var img = g_imageData.images[monster.getImageRef()].img; spriteCtx.drawImage(img, monster.getLeft(), monster.getTop(), monster.getWidth(), monster.getHeight(), destLeft, 3 * TILE_HEIGHT, monster.getWidth(), monster.getHeight()); monster.setLoc(destLeft); destLeft += monster.getWidth() + 15; } }, /* Erases enemy on battle screen */ clearMonster: function(id) { var monster = this._monsterList[id]; spriteCtx.clearRect( monster.getLoc(), 3 * TILE_HEIGHT, monster.getWidth(), monster.getHeight()); }, /* Draws health bar on battle screen */ drawHealthBar: function() { var x = spriteCanvas.width - 2 * TILE_WIDTH + 0.5; var y = 2 * TILE_HEIGHT + 0.5; var w = 10; var h = SPRITE_HEIGHT; var pct = g_player.getHP() / g_player.getMaxHP(); if (pct < 0) pct = 0; var yh = y + Math.round((1 - pct) * h); var hh = h - (yh - y); // alert("y:" + y + " yh:" + yh + " h:" + h + " hh:" + hh); spriteCtx.fillStyle = "red"; spriteCtx.fillRect(x, yh, w, hh); spriteCtx.strokeStyle = "black"; spriteCtx.strokeRect(x, y, w, h); }, /* Draws MP bar on battle screen */ drawManaBar: function() { var x = spriteCanvas.width - 2 * TILE_WIDTH + 10.5; var y = 2 * TILE_HEIGHT + 0.5; var w = 10; var h = SPRITE_HEIGHT; var pct = g_player.getMP() / g_player.getMaxMP(); if (pct < 0) pct = 0; var yh = y + Math.round((1 - pct) * h); var hh = h - (yh - y); // alert("y:" + y + " yh:" + yh + " h:" + h + " hh:" + hh); spriteCtx.fillStyle = "#ccccff"; spriteCtx.fillRect(x, yh, w, hh); spriteCtx.strokeStyle = "black"; spriteCtx.strokeRect(x, y, w, h); }, /* Erases Health Bar on battle screen */ clearHealthBar: function() { var x = spriteCanvas.width - 2 * TILE_WIDTH + 0.5; var y = 2 * TILE_HEIGHT + 0.5; var w = 10; var h = SPRITE_HEIGHT; spriteCtx.clearRect(x, y, w, h); }, /* Erases MP Bar on battle screen */ clearManaBar: function() { var x = spriteCanvas.width - 2 * TILE_WIDTH + 10.5; var y = 2 * TILE_HEIGHT + 0.5; var w = 10; var h = SPRITE_HEIGHT; spriteCtx.clearRect(x, y, w, h); }, /* Draws health bar on battle screen */ updateHealthBar: function(health) { this.clearHealthBar(); var x = spriteCanvas.width - 2 * TILE_WIDTH + 0.5; var y = 2 * TILE_HEIGHT + 0.5; var w = 10; var h = SPRITE_HEIGHT; var pct = health / g_player.getMaxHP(); if (pct < 0) pct = 0; var yh = y + Math.round((1 - pct) * h); var hh = h - (yh - y); // alert("y:" + y + " yh:" + yh + " h:" + h + " hh:" + hh); spriteCtx.fillStyle = "red"; spriteCtx.fillRect(x, yh, w, hh); spriteCtx.strokeStyle = "black"; spriteCtx.strokeRect(x, y, w, h); }, /* Writes a message line on bottom right box of battle screen */ writeMsg: function(msg) { this._writing = true; this._mainMenu.clearPointer(); window.setTimeout(function() { g_battle.drawText(); var line = g_battle._line <= 4 ? g_battle._line : 4; textCtx.fillText(msg, 154, g_battle._textHeight[line]); g_battle._txt += "\n" + msg; g_battle._line++; g_battle._delay -= MESSAGE_DELAY; if (g_battle._delay == 0) { g_battle._writing = false; if (!g_battle._over) g_battle._mainMenu.drawPointer(); } }, this._delay); this._delay += MESSAGE_DELAY; }, /* Draws the previously written text */ drawText: function() { textCtx.font = "bold 16px sans-serif"; textCtx.fillStyle = "white"; textCtx.textBaseline = "top"; this.clearText(); var prevText = this._txt.split("\n"); if (this._line <= 4) { for (var i = 0; i < this._line; ++i) textCtx.fillText(prevText[i], 154, this._textHeight[i]); } else { for (var i = 0; i < 4; ++i) { var lineText = prevText[prevText.length - 4 + i]; textCtx.fillText(lineText, 154, this._textHeight[i]); } } }, /* Clears text in bottom right box of battle screen */ clearText: function() { textCtx.clearRect(154, this._textHeight[0], textCanvas.width - 154, textCanvas.height - this._textHeight[0]); }, /* End of the battle */ end: function() { menuCtx.clearRect(0, 0, menuCanvas.width, menuCanvas.height); spriteCtx.clearRect(0, 0, spriteCanvas.width, spriteCanvas.height); textCtx.clearRect(0, 0, textCanvas.width, textCanvas.height); if (!!this._music) document.getElementById(this._music).pause(); if (!g_player.isDead()) { g_worldmap.redraw(); g_worldmap.drawSprites(); g_player.plot(); g_worldmap.getSubMap(g_worldmap.getCurrentSubMapId()).playMusic(); // Callback functions for after the battle is over. if (this._win) this.onWin(); this.onExit(); } else { g_titlescreen = true; g_game.showTitleScreen(); } g_battle = null; }, /* Handles input while battling for up, down, left, and right arrow keys */ handleKey: function(key) { if (!this._writing && !this._ignoringKeys && !this.over && !g_player.isDead()) { this._currentMenu.handleKey(key); } }, /* Handles input of enter key or spacebar while battling */ handleEnter: function() { if (!this._writing && !g_player.isDead()) { if (this._over) this.end(); else { this._defending = false; this._monsterWillAttack = true; this._currentMenu.handleEnter(); this.finishTurn(); } } }, /* handles input of ESC key while battling. */ handleESC: function() { if (this._over) { this.end(); } else { this._currentMenu.handleESC(); } }, /* handles key up event */ handleKeyUp: function() { this._ignoringKeys = false; }, setMonsterWillAttack: function(willAttack) { this._monsterWillAttack = willAttack; }, /* called from battle menu to begin the attack of the monster */ beginAttack: function() { this._currentAction = BATTLE_MENU_ATTACK; if (this._monsterList.length == 1) { this.attack(0); this.finishTurn(); } else { // There is more than one monster, enter selecting mode. this._currentMenu = new MonsterMenu(this._currentMenu, this, this._monsterList); this._currentMenu.selectFirstLiveMonster(); this._currentMenu.display(); this._monsterWillAttack = false; } }, /* called from battle menu to begin defending */ defend: function() { this._defending = true; this.writeMsg("You defended."); this.finishTurn(); }, /* Finish turn after selecting monster and performing action */ finishTurn: function() { // Monster's turn if (!this._over && this._monsterWillAttack) this.monsterTurn(false); // Update Health Bar if (this._monsterWillAttack) { this.runAfterWriting(function() { g_battle.clearHealthBar(); g_battle.clearManaBar(); g_battle.drawHealthBar(); g_battle.drawManaBar(); if (!g_battle._over) { g_battle._currentMenu = g_battle._mainMenu; g_battle._mainMenu.returnTo(false); } }); } this._defending = false; this._monsterWillAttack = false; }, /* Utility function to run callback function when writing is finished */ runAfterWriting: function(callback) { if (this._writing) { window.setTimeout(function() { g_battle.runAfterWriting(callback); }); } else callback(); }, /* Utility function to call a function for each monster currently alive * callback function takes a monster and id. */ forEachMonster: function(callback) { for (var i = 0; i < this._monsterList.length; ++i) if (!this._monsterList[i].isDead()) callback(this._monsterList[i], i); }, // Earn gold & exp associated with killing a monster earnReward: function(monster, id) { var battle = this; window.setTimeout(function() { battle.clearMonster(id); }, this._delay); this.writeMsg("The " + monster.getName() + " was killed."); this._totalExp += monster.getExp(); this._totalGold += monster.getGold(); // If all monsters are dead... for (var i = 0; i < this._monsterList.length; ++i) if (!this._monsterList[i].isDead()) return; // End battle and award exp & gold to player. g_player.earnGold(this._totalGold); var gainedLevel = g_player.earnExp(this._totalExp); this.writeMsg("You have earned " + this._totalExp + " exp"); this.writeMsg("and " + this._totalGold + " GP."); if (gainedLevel) this.writeMsg("You gained a level!"); this._over = true; this._win = true; this._mainMenu.clearPointer(); }, doActionToMonster: function(id) { switch (this._currentAction) { case BATTLE_MENU_ATTACK: this.attack(id); this.finishTurn(); break; case BATTLE_MENU_DEFEND: this._defending = true; break; case BATTLE_MENU_ITEM: // not implemented break; case BATTLE_MENU_SPELL: // not implemented break; case BATTLE_MENU_RUN: // not possible break; } }, /* Player attacks monster with id provided */ attack: function(id) { // Basic battle system; determine damage from attack and defense var monster = this._monsterList[id]; var rand = Math.random(); if (rand > 0.95) { this.writeMsg("You missed!"); } else { var damage = g_player.getAttack() - monster.getDefense(); if (rand > 0.9) { this.writeMsg("Critical Hit!"); damage *= 2; } if (damage < 1) damage = 1; damage -= Math.floor(Math.random() * damage / 2); this.writeMsg("You attacked for " + damage + " damage."); monster.damage(damage); // If monster is dead, earn exp & gold associated. if (monster.isDead()) { this.earnReward(monster, id); } } }, /* Monsters attack the player */ monsterTurn: function() { for (var i = 0; i < this._monsterList.length; ++i) { var monster = this._monsterList[i]; if (!monster.isDead()) { if (monster.hasSpecialAttack() && Math.random() < 0.5) monster.useSpecialAttack(); else { // Basic battle system; determine damage from attack and defense var rand = Math.random(); if (rand > 0.9) { this.writeMsg("The " + monster.getName() + " missed!"); } else { var damage = monster.getAttack() - g_player.getDefense(); if (rand > 0.86) { this.writeMsg("Terrible Hit!"); damage = 2 * monster.getAttack() - g_player.getDefense(); } if (this._defending) damage = Math.floor(damage / 2.5); if (damage < 1) damage = 1; damage -= Math.floor(Math.random() * damage / 2); g_player.damage(damage); this.writeMsg("The " + monster.getName() + " attacked for"); this.writeMsg(damage + " damage."); // Update health bar as you go. var battle = this; var health = g_player.getHP(); window.setTimeout(function(health) { return function() { battle.updateHealthBar(health); }; }(health), this._delay); } } // If player is dead, end game! if (g_player.isDead()) { this.writeMsg("You died."); this._over = true; this._mainMenu.clearPointer(); var battle = this; this.runAfterWriting(function() { battle.clearPlayer(); }); return; } } } }, /* Player will attempt to run */ run: function() { if (Math.random() >= 0.33) { this.writeMsg("You start to run."); this.monsterTurn(false); if (g_player.isDead() || this._over) return false; if (Math.random() < 0.33) { this.writeMsg("You couldn't run away.") return false; } } this.writeMsg("You ran away.") this._over = true; this._mainMenu.clearPointer(); var battle = this; this.runAfterWriting(function() { battle.clearPlayer(); }); return true; }, /* Use the selected item. Returns true if an item was used. */ useItem: function() { if (this._itemSelection < this._numItems) { var itemId = this._itemId[this._itemSelection]; var item = g_itemData.items[itemId]; switch(item.type) { case ITEMTYPE_HEAL_ONE: item.use(g_player); break; case ITEMTYPE_ATTACK_ALL: item.use(); break; } g_player.removeFromInventory(itemId); return true; } return false; }, /* Use the selected spell. Returns true if a spell was used. */ useSpell: function() { if (this._spellSelection < this._numSpells) { var spellId = this._spellId[this._spellSelection]; var spell = g_spellData.spells[spellId]; if (g_player.getMP() >= spell.mpCost) { switch(spell.type) { case SPELLTYPE_HEAL_ONE: spell.use(g_player); break; case SPELLTYPE_ATTACK_ALL: spell.use(); break; } g_player.useMP(spell.mpCost); return true; } else { this.writeMsg("You do not have enough MP"); this.writeMsg("to cast " + spell.name + "."); } } return false; }, onExit: function() { // What happens after the battle is over and you exit? }, onWin: function() { // What happens after the battle is over and you have won? } });
gudron/rockdolljs
src/views/table/src/tableHeadRowView.js
// TableHeadRowView // --------- import TableRowView from './tableRowView'; import TableCellView from './cell/tableCellView'; import TableHeadCellView from './cell/tableHeadCellView'; const TableHeadRowView = TableRowView.extend({ childView: TableHeadCellView, _cellTypes: { text: TableCellView, head: TableHeadCellView }, }); export default TableHeadRowView;
amphied/speke-reference-server
spekev2_verification_testsuite/test_negative_cases.py
<filename>spekev2_verification_testsuite/test_negative_cases.py import pytest import xml.etree.ElementTree as ET from io import StringIO from .helpers import utils, speke_element_assertions @pytest.fixture def generic_request(spekev2_url): return utils.read_xml_file_contents("general", utils.GENERIC_WIDEVINE_TEST_FILE) @pytest.fixture def fairplay_request(spekev2_url): return utils.read_xml_file_contents(utils.TEST_CASE_1_P_V_1_A_1, utils.PRESETS_FAIRPLAY) @pytest.fixture def preset_negative_preset_shared_video(spekev2_url): return utils.read_xml_file_contents("general", utils.NEGATIVE_PRESET_SHARED_VIDEO) @pytest.fixture def preset_negative_preset_shared_audio(spekev2_url): return utils.read_xml_file_contents("general", utils.NEGATIVE_PRESET_SHARED_AUDIO) @pytest.fixture def empty_xml_response(spekev2_url): response = utils.speke_v2_request(spekev2_url, "") return response @pytest.fixture def wrong_version_response(spekev2_url): test_request_data = utils.read_xml_file_contents("general", utils.WRONG_VERSION_TEST_FILE) response = utils.speke_v2_request(spekev2_url, test_request_data) return response def test_empty_request(empty_xml_response): assert empty_xml_response.status_code != 200 and (400 <= empty_xml_response.status_code < 600), \ "Empty request is expected to return an error" def test_wrong_version_status_code(wrong_version_response): assert wrong_version_response.status_code != 200 and (400 <= wrong_version_response.status_code < 600), \ "Wrong version in the request is expected to return an error" @pytest.mark.parametrize("mandatory_element", utils.SPEKE_V2_MANDATORY_ELEMENTS_LIST) def test_mandatory_elements_missing_in_request(spekev2_url, generic_request, mandatory_element): response = utils.send_modified_speke_request_with_element_removed(spekev2_url, generic_request, mandatory_element) assert response.status_code != 200 and (400 <= response.status_code < 600), \ f"Mandatory element: {mandatory_element} not present in request but response was a 200 OK" def test_both_mandatory_filter_elements_missing_in_request(spekev2_url, generic_request): request_cpix = ET.fromstring(generic_request) for node in request_cpix.iter(): for elem in utils.SPEKE_V2_MANDATORY_FILTER_ELEMENTS_LIST: for child in node.findall(elem): node.remove(child) request_xml_data = ET.tostring(request_cpix, method="xml") response = utils.speke_v2_request(spekev2_url, request_xml_data) assert response.status_code != 200 and (400 <= response.status_code < 600), \ f"Mandatory filter elements: {utils.SPEKE_V2_MANDATORY_FILTER_ELEMENTS_LIST} not present in request but " \ f"response was a 200 OK " @pytest.mark.parametrize("mandatory_attribute", utils.SPEKE_V2_MANDATORY_ATTRIBUTES_LIST) def test_missing_mandatory_attributes_in_request(spekev2_url, generic_request, mandatory_attribute): request_cpix = ET.fromstring(generic_request) for node in request_cpix.iter(): for child in node.findall(mandatory_attribute[0]): for attribute in [x for x in mandatory_attribute[1] if x in child.attrib]: child.attrib.pop(attribute) request_xml_data = ET.tostring(request_cpix, method="xml") response = utils.speke_v2_request(spekev2_url, request_xml_data) assert response.status_code != 200 and (400 <= response.status_code < 600), \ f"Mandatory attribute(s): {mandatory_attribute[1]} for element: {mandatory_attribute[0]} not present in " \ f"request but response was a 200 OK " def test_common_encryption_scheme_for_fairplay_should_not_be_cenc(spekev2_url, fairplay_request): xml_request = fairplay_request.decode('UTF-8').replace("cbcs", "cenc") response = utils.speke_v2_request(spekev2_url, xml_request.encode('UTF-8')) assert response.status_code != 200 and (400 <= response.status_code < 600), \ f"Requests for Fairplay DRM system ID should not include cenc as common encryption. Status code returned was {response.status_code}" def test_video_preset_2_and_shared_audio_preset_request_expect_4xx(spekev2_url, preset_negative_preset_shared_audio): """ Intended track type(s) used in this test are SD, ALL, STEREO_AUDIO, MULTICHANNEL_AUDIO Expected to return HTTP 4xx error """ xml_request = preset_negative_preset_shared_audio.decode('UTF-8') response = utils.speke_v2_request(spekev2_url, xml_request.encode('UTF-8')) assert response.status_code != 200 and (400 <= response.status_code < 600), \ f"If intendedTrackType with ALL is requested, there cannot be other ContentKeyUsageRule elements with " \ f"different intendedTrackType values " def test_shared_video_preset_and_audio_preset_2_request_expect_4xx(spekev2_url, preset_negative_preset_shared_video): """ Intended track type(s) used in this test are SD, HD, ALL, MULTICHANNEL_AUDIO :returns: HTTP 4xx error """ xml_request = preset_negative_preset_shared_video.decode('UTF-8') response = utils.speke_v2_request(spekev2_url, xml_request.encode('UTF-8')) assert response.status_code != 200 and (400 <= response.status_code < 600), \ f"If intendedTrackType with ALL is requested, there cannot be other ContentKeyUsageRule elements with " \ f"different intendedTrackType values "
qingqibing/go-genproto
googleapis/ads/googleads/v3/resources/google_ads_field.pb.go
// Code generated by protoc-gen-go. DO NOT EDIT. // source: google/ads/googleads/v3/resources/google_ads_field.proto package resources import ( fmt "fmt" math "math" proto "github.com/golang/protobuf/proto" wrappers "github.com/golang/protobuf/ptypes/wrappers" enums "google.golang.org/genproto/googleapis/ads/googleads/v3/enums" _ "google.golang.org/genproto/googleapis/api/annotations" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package // A field or resource (artifact) used by GoogleAdsService. type GoogleAdsField struct { // Output only. The resource name of the artifact. // Artifact resource names have the form: // // `googleAdsFields/{name}` ResourceName string `protobuf:"bytes,1,opt,name=resource_name,json=resourceName,proto3" json:"resource_name,omitempty"` // Output only. The name of the artifact. Name *wrappers.StringValue `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` // Output only. The category of the artifact. Category enums.GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory `protobuf:"varint,3,opt,name=category,proto3,enum=google.ads.googleads.v3.enums.GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory" json:"category,omitempty"` // Output only. Whether the artifact can be used in a SELECT clause in search // queries. Selectable *wrappers.BoolValue `protobuf:"bytes,4,opt,name=selectable,proto3" json:"selectable,omitempty"` // Output only. Whether the artifact can be used in a WHERE clause in search // queries. Filterable *wrappers.BoolValue `protobuf:"bytes,5,opt,name=filterable,proto3" json:"filterable,omitempty"` // Output only. Whether the artifact can be used in a ORDER BY clause in search // queries. Sortable *wrappers.BoolValue `protobuf:"bytes,6,opt,name=sortable,proto3" json:"sortable,omitempty"` // Output only. The names of all resources, segments, and metrics that are selectable with // the described artifact. SelectableWith []*wrappers.StringValue `protobuf:"bytes,7,rep,name=selectable_with,json=selectableWith,proto3" json:"selectable_with,omitempty"` // Output only. The names of all resources that are selectable with the described // artifact. Fields from these resources do not segment metrics when included // in search queries. // // This field is only set for artifacts whose category is RESOURCE. AttributeResources []*wrappers.StringValue `protobuf:"bytes,8,rep,name=attribute_resources,json=attributeResources,proto3" json:"attribute_resources,omitempty"` // Output only. At and beyond version V1 this field lists the names of all metrics that are // selectable with the described artifact when it is used in the FROM clause. // It is only set for artifacts whose category is RESOURCE. // // Before version V1 this field lists the names of all metrics that are // selectable with the described artifact. It is only set for artifacts whose // category is either RESOURCE or SEGMENT Metrics []*wrappers.StringValue `protobuf:"bytes,9,rep,name=metrics,proto3" json:"metrics,omitempty"` // Output only. At and beyond version V1 this field lists the names of all artifacts, // whether a segment or another resource, that segment metrics when included // in search queries and when the described artifact is used in the FROM // clause. It is only set for artifacts whose category is RESOURCE. // // Before version V1 this field lists the names of all artifacts, whether a // segment or another resource, that segment metrics when included in search // queries. It is only set for artifacts of category RESOURCE, SEGMENT or // METRIC. Segments []*wrappers.StringValue `protobuf:"bytes,10,rep,name=segments,proto3" json:"segments,omitempty"` // Output only. Values the artifact can assume if it is a field of type ENUM. // // This field is only set for artifacts of category SEGMENT or ATTRIBUTE. EnumValues []*wrappers.StringValue `protobuf:"bytes,11,rep,name=enum_values,json=enumValues,proto3" json:"enum_values,omitempty"` // Output only. This field determines the operators that can be used with the artifact // in WHERE clauses. DataType enums.GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType `protobuf:"varint,12,opt,name=data_type,json=dataType,proto3,enum=google.ads.googleads.v3.enums.GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType" json:"data_type,omitempty"` // Output only. The URL of proto describing the artifact's data type. TypeUrl *wrappers.StringValue `protobuf:"bytes,13,opt,name=type_url,json=typeUrl,proto3" json:"type_url,omitempty"` // Output only. Whether the field artifact is repeated. IsRepeated *wrappers.BoolValue `protobuf:"bytes,14,opt,name=is_repeated,json=isRepeated,proto3" json:"is_repeated,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` } func (m *GoogleAdsField) Reset() { *m = GoogleAdsField{} } func (m *GoogleAdsField) String() string { return proto.CompactTextString(m) } func (*GoogleAdsField) ProtoMessage() {} func (*GoogleAdsField) Descriptor() ([]byte, []int) { return fileDescriptor_7843d52294d73b63, []int{0} } func (m *GoogleAdsField) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GoogleAdsField.Unmarshal(m, b) } func (m *GoogleAdsField) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { return xxx_messageInfo_GoogleAdsField.Marshal(b, m, deterministic) } func (m *GoogleAdsField) XXX_Merge(src proto.Message) { xxx_messageInfo_GoogleAdsField.Merge(m, src) } func (m *GoogleAdsField) XXX_Size() int { return xxx_messageInfo_GoogleAdsField.Size(m) } func (m *GoogleAdsField) XXX_DiscardUnknown() { xxx_messageInfo_GoogleAdsField.DiscardUnknown(m) } var xxx_messageInfo_GoogleAdsField proto.InternalMessageInfo func (m *GoogleAdsField) GetResourceName() string { if m != nil { return m.ResourceName } return "" } func (m *GoogleAdsField) GetName() *wrappers.StringValue { if m != nil { return m.Name } return nil } func (m *GoogleAdsField) GetCategory() enums.GoogleAdsFieldCategoryEnum_GoogleAdsFieldCategory { if m != nil { return m.Category } return enums.GoogleAdsFieldCategoryEnum_UNSPECIFIED } func (m *GoogleAdsField) GetSelectable() *wrappers.BoolValue { if m != nil { return m.Selectable } return nil } func (m *GoogleAdsField) GetFilterable() *wrappers.BoolValue { if m != nil { return m.Filterable } return nil } func (m *GoogleAdsField) GetSortable() *wrappers.BoolValue { if m != nil { return m.Sortable } return nil } func (m *GoogleAdsField) GetSelectableWith() []*wrappers.StringValue { if m != nil { return m.SelectableWith } return nil } func (m *GoogleAdsField) GetAttributeResources() []*wrappers.StringValue { if m != nil { return m.AttributeResources } return nil } func (m *GoogleAdsField) GetMetrics() []*wrappers.StringValue { if m != nil { return m.Metrics } return nil } func (m *GoogleAdsField) GetSegments() []*wrappers.StringValue { if m != nil { return m.Segments } return nil } func (m *GoogleAdsField) GetEnumValues() []*wrappers.StringValue { if m != nil { return m.EnumValues } return nil } func (m *GoogleAdsField) GetDataType() enums.GoogleAdsFieldDataTypeEnum_GoogleAdsFieldDataType { if m != nil { return m.DataType } return enums.GoogleAdsFieldDataTypeEnum_UNSPECIFIED } func (m *GoogleAdsField) GetTypeUrl() *wrappers.StringValue { if m != nil { return m.TypeUrl } return nil } func (m *GoogleAdsField) GetIsRepeated() *wrappers.BoolValue { if m != nil { return m.IsRepeated } return nil } func init() { proto.RegisterType((*GoogleAdsField)(nil), "google.ads.googleads.v3.resources.GoogleAdsField") } func init() { proto.RegisterFile("google/ads/googleads/v3/resources/google_ads_field.proto", fileDescriptor_7843d52294d73b63) } var fileDescriptor_7843d52294d73b63 = []byte{ // 652 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x95, 0x5f, 0x6f, 0xd3, 0x3c, 0x14, 0xc6, 0xd5, 0x76, 0xef, 0xd6, 0xb9, 0x5b, 0x5f, 0xc9, 0xbb, 0x09, 0xd3, 0x04, 0xdd, 0xa4, 0x89, 0x71, 0xe3, 0x48, 0xab, 0x90, 0x50, 0xc6, 0x80, 0x94, 0x3f, 0x93, 0x26, 0x81, 0xaa, 0x32, 0x8a, 0x84, 0x2a, 0x45, 0x6e, 0x73, 0x96, 0x59, 0x4a, 0xe2, 0x60, 0x3b, 0x9d, 0x26, 0x84, 0xc4, 0x67, 0xe1, 0x92, 0x8f, 0xc2, 0xa7, 0xd8, 0xf5, 0xee, 0xb9, 0xe1, 0x0a, 0x25, 0x71, 0xdc, 0x96, 0x31, 0x08, 0xdc, 0xd9, 0x39, 0xcf, 0xef, 0x39, 0x4f, 0x9d, 0xe3, 0x14, 0x3d, 0x08, 0x38, 0x0f, 0x42, 0xb0, 0xa9, 0x2f, 0xed, 0x62, 0x99, 0xad, 0xa6, 0x5d, 0x5b, 0x80, 0xe4, 0xa9, 0x98, 0x40, 0xf9, 0xd8, 0xa3, 0xbe, 0xf4, 0x4e, 0x19, 0x84, 0x3e, 0x49, 0x04, 0x57, 0x1c, 0x6f, 0x17, 0xcf, 0x09, 0xf5, 0x25, 0x31, 0x24, 0x99, 0x76, 0x89, 0x21, 0x37, 0x0f, 0x6f, 0x32, 0x87, 0x38, 0x8d, 0xae, 0x1b, 0x7b, 0x13, 0xaa, 0x20, 0xe0, 0xe2, 0xa2, 0xe8, 0xb0, 0xf9, 0xe8, 0x2f, 0x71, 0x9f, 0x2a, 0xea, 0xa9, 0x8b, 0x04, 0x34, 0x7f, 0xa7, 0xe4, 0x13, 0x66, 0x17, 0x8a, 0x31, 0x9c, 0xd1, 0x29, 0xe3, 0x42, 0x0b, 0x6e, 0xcd, 0x09, 0xca, 0xd4, 0xba, 0x74, 0x5b, 0x97, 0xf2, 0xdd, 0x38, 0x3d, 0xb5, 0xcf, 0x05, 0x4d, 0x12, 0x10, 0x52, 0xd7, 0xb7, 0xe6, 0x50, 0x1a, 0xc7, 0x5c, 0x51, 0xc5, 0x78, 0xac, 0xab, 0x3b, 0xdf, 0x9a, 0xa8, 0x7d, 0x94, 0x0b, 0x5c, 0x5f, 0xbe, 0xc8, 0x5a, 0xe3, 0x13, 0xb4, 0x5e, 0xb6, 0xf0, 0x62, 0x1a, 0x81, 0x55, 0xeb, 0xd4, 0xf6, 0x56, 0x7b, 0xf6, 0xa5, 0xdb, 0xf8, 0xee, 0xde, 0x43, 0x77, 0x67, 0x47, 0xa8, 0x57, 0x09, 0x93, 0x64, 0xc2, 0x23, 0x7b, 0xd1, 0x67, 0xb0, 0x56, 0xba, 0xbc, 0xa2, 0x11, 0xe0, 0xfb, 0x68, 0x29, 0x37, 0xab, 0x77, 0x6a, 0x7b, 0xad, 0xfd, 0x2d, 0xcd, 0x92, 0x32, 0x35, 0x79, 0xad, 0x04, 0x8b, 0x83, 0x21, 0x0d, 0x53, 0xe8, 0x35, 0x2e, 0xdd, 0xc6, 0x20, 0x97, 0xe3, 0xf7, 0xa8, 0x59, 0x9e, 0xb5, 0xd5, 0xe8, 0xd4, 0xf6, 0xda, 0xfb, 0x7d, 0x72, 0xd3, 0xeb, 0xcc, 0x0f, 0x9b, 0x2c, 0xa6, 0x78, 0xaa, 0xe1, 0xe7, 0x71, 0x1a, 0xdd, 0x50, 0x2a, 0xda, 0x99, 0x36, 0xf8, 0x31, 0x42, 0x12, 0x42, 0x98, 0x28, 0x3a, 0x0e, 0xc1, 0x5a, 0xca, 0xf3, 0x6e, 0x5e, 0xcb, 0xdb, 0xe3, 0x3c, 0x9c, 0x4b, 0x3b, 0x87, 0x64, 0x06, 0xa7, 0x2c, 0x54, 0x20, 0x72, 0x83, 0xff, 0x2a, 0x1a, 0xcc, 0x10, 0x7c, 0x80, 0x9a, 0x92, 0x8b, 0xa2, 0xff, 0x72, 0x35, 0xdc, 0x00, 0xf8, 0x18, 0xfd, 0x3f, 0xcb, 0xe2, 0x9d, 0x33, 0x75, 0x66, 0xad, 0x74, 0x1a, 0xd5, 0xce, 0xbc, 0x3d, 0x23, 0xdf, 0x32, 0x75, 0x86, 0x07, 0x68, 0x83, 0x2a, 0x25, 0xd8, 0x38, 0x55, 0xe0, 0x99, 0xdb, 0x62, 0x35, 0xab, 0xfa, 0x61, 0x43, 0x0f, 0x4a, 0x18, 0x1f, 0xa0, 0x95, 0x08, 0x94, 0x60, 0x13, 0x69, 0xad, 0x56, 0xf5, 0x29, 0x09, 0x7c, 0x88, 0x9a, 0x12, 0x82, 0x08, 0x62, 0x25, 0x2d, 0x54, 0x95, 0x36, 0x08, 0xee, 0xa1, 0x56, 0x36, 0x24, 0xde, 0x34, 0x2b, 0x4a, 0xab, 0x55, 0xd5, 0x01, 0x65, 0x54, 0xbe, 0x97, 0x58, 0xa0, 0x55, 0x73, 0x7d, 0xad, 0xb5, 0x7f, 0x18, 0xc9, 0x67, 0x54, 0xd1, 0x93, 0x8b, 0x04, 0x7e, 0x31, 0x92, 0x65, 0x49, 0xe7, 0xf6, 0xf5, 0x16, 0x3f, 0x44, 0xcd, 0xac, 0x9d, 0x97, 0x8a, 0xd0, 0x5a, 0xaf, 0x7a, 0x81, 0x56, 0x32, 0xe4, 0x8d, 0x08, 0xf1, 0x13, 0xd4, 0x62, 0xd2, 0x13, 0x90, 0x00, 0x55, 0xe0, 0x5b, 0xed, 0x8a, 0x03, 0xc9, 0xe4, 0x40, 0x23, 0x4e, 0xff, 0xca, 0x7d, 0x59, 0xf9, 0xe2, 0xe3, 0x9d, 0x60, 0x61, 0x2f, 0xed, 0x0f, 0x3f, 0x7f, 0x00, 0x3f, 0xf6, 0x3e, 0xd5, 0xd1, 0xee, 0x84, 0x47, 0xe4, 0x8f, 0x9f, 0xe6, 0xde, 0xc6, 0xa2, 0x7b, 0x3f, 0x8b, 0xdb, 0xaf, 0xbd, 0x3b, 0xd6, 0x64, 0xc0, 0x43, 0x1a, 0x07, 0x84, 0x8b, 0xc0, 0x0e, 0x20, 0xce, 0x7f, 0x8c, 0x3d, 0x8b, 0xf7, 0x9b, 0x7f, 0x8b, 0x03, 0xb3, 0xfa, 0x5c, 0x6f, 0x1c, 0xb9, 0xee, 0x97, 0xfa, 0x76, 0xd1, 0x89, 0xb8, 0xfe, 0xdc, 0x2b, 0x23, 0xc3, 0x2e, 0x31, 0xc3, 0xfb, 0xb5, 0xd4, 0x8c, 0x5c, 0x5f, 0x8e, 0x8c, 0x66, 0x34, 0xec, 0x8e, 0x8c, 0xe6, 0xaa, 0xbe, 0x5b, 0x14, 0x1c, 0xc7, 0xf5, 0xa5, 0xe3, 0x18, 0x95, 0xe3, 0x0c, 0xbb, 0x8e, 0x63, 0x74, 0xe3, 0xe5, 0x3c, 0x6c, 0xf7, 0x47, 0x00, 0x00, 0x00, 0xff, 0xff, 0x30, 0x05, 0x60, 0x79, 0xd9, 0x06, 0x00, 0x00, }
gonadn/SPFxAngularCLIWithRouting
node_modules/office-ui-fabric-react/lib-es2015/components/TeachingBubble/TeachingBubble.js
<filename>node_modules/office-ui-fabric-react/lib-es2015/components/TeachingBubble/TeachingBubble.js import * as tslib_1 from "tslib"; /* tslint:disable:no-unused-variable */ import * as React from 'react'; /* tslint:enable:no-unused-variable */ import { BaseComponent, css } from '../../Utilities'; import { TeachingBubbleContent } from './TeachingBubbleContent'; import { Callout } from '../../Callout'; import * as stylesImport from './TeachingBubble.scss'; var styles = stylesImport; var TeachingBubble = /** @class */ (function (_super) { tslib_1.__extends(TeachingBubble, _super); // Constructor function TeachingBubble(props) { var _this = _super.call(this, props) || this; _this.state = {}; return _this; } TeachingBubble.prototype.render = function () { var _a = this.props, calloutProps = _a.calloutProps, targetElement = _a.targetElement; return (React.createElement(Callout, tslib_1.__assign({ className: css('ms-TeachingBubble', styles.root), ref: this._resolveRef('_callout'), target: targetElement }, calloutProps), React.createElement(TeachingBubbleContent, tslib_1.__assign({}, this.props)))); }; // Specify default props values TeachingBubble.defaultProps = { calloutProps: { beakWidth: 16, gapSpace: 0, setInitialFocus: true, doNotLayer: false, directionalHint: 12 /* rightCenter */ } }; return TeachingBubble; }(BaseComponent)); export { TeachingBubble }; //# sourceMappingURL=TeachingBubble.js.map
t-gergely/infinispan
server/core/src/main/java/org/infinispan/server/core/transport/NonRecursiveEventLoopGroup.java
package org.infinispan.server.core.transport; import java.lang.invoke.MethodHandles; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.infinispan.commons.CacheException; import org.infinispan.commons.logging.LogFactory; import org.infinispan.factories.annotations.Stop; import org.infinispan.factories.scopes.Scope; import org.infinispan.factories.scopes.Scopes; import org.infinispan.server.core.logging.Log; import org.infinispan.server.core.utils.DelegatingEventLoopGroup; import io.netty.channel.EventLoopGroup; import io.netty.channel.MultithreadEventLoopGroup; import io.netty.util.concurrent.EventExecutor; import io.netty.util.concurrent.Future; /** * This event loop group prevents any tasks submitted via the {@link java.util.concurrent.ExecutorService#execute(Runnable)}}, * {@link java.util.concurrent.ExecutorService#submit(Runnable)}, {@link java.util.concurrent.ExecutorService#submit(Callable)}, * {@link java.util.concurrent.ExecutorService#submit(Runnable, Object)} API methods from * being ran on the same event loop that submitted it. This is to prevent issues where some operations block on * the result of another task (e.g. Cache creation). */ @Scope(Scopes.GLOBAL) public class NonRecursiveEventLoopGroup extends DelegatingEventLoopGroup { private static final Log log = LogFactory.getLog(MethodHandles.lookup().lookupClass(), Log.class); private final MultithreadEventLoopGroup eventLoopGroup; public NonRecursiveEventLoopGroup(MultithreadEventLoopGroup eventLoopGroup) { int executors = eventLoopGroup.executorCount(); if (executors < 2) { throw new IllegalArgumentException("Provided multi threaded event loop group must have at least 2 executors, only has " + executors); } this.eventLoopGroup = eventLoopGroup; } @Override protected EventLoopGroup delegate() { return eventLoopGroup; } @Override public void execute(Runnable command) { getExecutorNotInEventLoop().execute(command); } @Override public Future<?> submit(Runnable task) { return getExecutorNotInEventLoop().submit(task); } @Override public <T> Future<T> submit(Callable<T> task) { return getExecutorNotInEventLoop().submit(task); } @Override public <T> Future<T> submit(Runnable task, T result) { return getExecutorNotInEventLoop().submit(task, result); } private EventExecutor getExecutorNotInEventLoop() { while (true) { EventExecutor eventExecutor = eventLoopGroup.next(); // We don't want to submit a task to our current thread as some callers may block waiting for it to complete. if (eventExecutor.inEventLoop()) { log.tracef("Skipped submitting task to %s as it is the current event loop - trying another", eventExecutor); continue; } return eventExecutor; } } @Stop public void shutdownGracefullyAndWait() { try { shutdownGracefully().get(10, TimeUnit.SECONDS); } catch (InterruptedException e) { log.debug("Interrupted while waiting for event loop group to shut down"); Thread.currentThread().interrupt(); } catch (ExecutionException e) { throw new CacheException(e.getCause()); } catch (TimeoutException e) { throw new org.infinispan.util.concurrent.TimeoutException("Timed out waiting for event loop group to shutdown", e); } } }
vladimiratwork/znai
znai-website-gen/src/main/java/org/testingisdocumenting/znai/website/modifiedtime/ConstantPageModifiedTime.java
<reponame>vladimiratwork/znai<gh_stars>10-100 package org.testingisdocumenting.znai.website.modifiedtime; import org.testingisdocumenting.znai.structure.TocItem; import java.nio.file.Path; import java.time.Instant; public class ConstantPageModifiedTime implements PageModifiedTimeStrategy { private final Instant constantTime; public ConstantPageModifiedTime(Instant constantTime) { this.constantTime = constantTime; } @Override public Instant lastModifiedTime(TocItem tocItem, Path markupPath) { return constantTime; } }
d3scomp/JDEECo
jdeeco-ensembles-intelligent-z3/test/cz/cuni/mff/d3s/jdeeco/ensembles/intelligent/z3/StandaloneEnsemblesTest.java
<reponame>d3scomp/JDEECo package cz.cuni.mff.d3s.jdeeco.ensembles.intelligent.z3; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertThat; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.Collection; import org.junit.Ignore; import org.junit.Test; import cz.cuni.mff.d3s.deeco.annotations.processor.AnnotationProcessorException; import cz.cuni.mff.d3s.deeco.ensembles.EnsembleFactory; import cz.cuni.mff.d3s.deeco.ensembles.EnsembleInstance; import cz.cuni.mff.d3s.deeco.runtime.DEECoException; import cz.cuni.mff.d3s.jdeeco.edl.validation.EdlValidationException; public class StandaloneEnsemblesTest { public static void main(String[] args) throws InstantiationException, IllegalAccessException, DEECoException, AnnotationProcessorException, IOException, EdlValidationException { new StandaloneEnsemblesTest().testStandaloneEnsembles(false); } @Test @Ignore public void testEnsembles() throws InstantiationException, IllegalAccessException, DEECoException, AnnotationProcessorException, IOException, EdlValidationException { testStandaloneEnsembles(true); } public void testStandaloneEnsembles(boolean silent) throws InstantiationException, IllegalAccessException, DEECoException, AnnotationProcessorException, IOException, EdlValidationException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintStream outputStream; if (silent) { outputStream = new PrintStream(baos); } else { outputStream = System.out; } EnsembleFactory factory = null; try { factory = new pendolinoEdlFactory(); } catch (Exception e) { if (!silent) { System.out.println("Validation errors encountered when parsing the document. "); System.out.println(e); return; } else throw e; } DataclassKnowledgeContainer container = new DataclassKnowledgeContainer(); container.storeDataClass(new DataclassRescuer("0", 100)); container.storeDataClass(new DataclassRescuer("1", 800)); container.storeDataClass(new DataclassRescuer("2", 60)); container.storeDataClass(new DataclassRescuer("3", 400)); container.storeDataClass(new DataclassRescuer("4", 20)); container.storeDataClass(new DataclassRescuer("5", 0)); container.storeDataClass(new DataclassRescuer("6", 10)); container.storeDataClass(new DataclassRescuer("7", 30)); container.storeDataClass(new DataclassRescuer("8", 50));/* container.storeDataClass(new DataclassRescuer("9", 70)); container.storeDataClass(new DataclassRescuer("10", 90)); container.storeDataClass(new DataclassRescuer("11", 80)); container.storeDataClass(new DataclassRescuer("12", 80)); container.storeDataClass(new DataclassRescuer("13", 81)); container.storeDataClass(new DataclassRescuer("14", 82));*/ container.storeDataClass(new DataclassFireFighter("100", 10)); container.storeDataClass(new DataclassFireFighter("101", 20)); container.storeDataClass(new DataclassFireFighter("102", 40));/* container.storeDataClass(new DataclassFireFighter("103", 50));/* */ /* container.storeDataClass(new DataclassFireFighter("104", 30));*/ container.storeDataClass(new DataclassPosition(10, 20)); container.storeDataClass(new DataclassPosition(50, 30)); Collection<EnsembleInstance> formed = factory.createInstances(container); for (EnsembleInstance instance : formed) { instance.performKnowledgeExchange(); } for (Rescuer rescuer : container.getTrackedKnowledgeForRole(Rescuer.class)) { if (rescuer.trainId > 0) { outputStream.printf("Rescuer %s: train %d", rescuer.id, rescuer.trainId); if (rescuer.isLeader) { outputStream.print(" (leader)"); } outputStream.println(); } else { outputStream.printf("Rescuer %s: unassigned\n", rescuer.id); } } for (FireFighter fighter : container.getTrackedKnowledgeForRole(FireFighter.class)) { if (fighter.trainId > 0) { outputStream.printf("FireFighter %s: train %d pos %d\n", fighter.id, fighter.trainId, fighter.pos); } else { outputStream.printf("FireFighter %s: unassigned pos %d\n", fighter.id, fighter.pos); } } if (silent) { assertThat(baos.toString(), containsString("Rescuer 0: train 2")); assertThat(baos.toString(), containsString("Rescuer 1: train 2")); assertThat(baos.toString(), containsString("Rescuer 2: train 1")); assertThat(baos.toString(), containsString("Rescuer 3: train 2")); assertThat(baos.toString(), containsString("Rescuer 4: train 1")); assertThat(baos.toString(), containsString("Rescuer 5: train 1")); } } }
zhouhaifeng/vpe
src/frr/zebra/zebra_gr.c
<reponame>zhouhaifeng/vpe /* * Zebra GR related helper functions. * * Portions: * Copyright (C) 2019 VMware, Inc. * et al. * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the Free * Software Foundation; either version 2 of the License, or (at your option) * any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program; see the file COPYING; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #include <zebra.h> #include <libgen.h> #include "lib/prefix.h" #include "lib/command.h" #include "lib/if.h" #include "lib/thread.h" #include "lib/stream.h" #include "lib/memory.h" #include "lib/table.h" #include "lib/network.h" #include "lib/sockunion.h" #include "lib/log.h" #include "lib/zclient.h" #include "lib/privs.h" #include "lib/network.h" #include "lib/buffer.h" #include "lib/nexthop.h" #include "lib/vrf.h" #include "lib/libfrr.h" #include "lib/sockopt.h" #include "zebra/zebra_router.h" #include "zebra/debug.h" #include "zebra/zapi_msg.h" /* * Forward declaration. */ static struct zserv *zebra_gr_find_stale_client(struct zserv *client); static int32_t zebra_gr_route_stale_delete_timer_expiry(struct thread *thread); static int32_t zebra_gr_delete_stale_routes(struct client_gr_info *info); static void zebra_gr_process_client_stale_routes(struct zserv *client, vrf_id_t vrf_id); /* * Debug macros. */ #define LOG_GR(msg, ...) \ do { \ if (IS_ZEBRA_DEBUG_EVENT) \ zlog_debug(msg, ##__VA_ARGS__); \ } while (0) /* * Client connection functions */ /* * Function to clean all the stale clients, * function will also clean up all per instance * capabilities that are exchanged. */ void zebra_gr_stale_client_cleanup(struct list *client_list) { struct listnode *node, *nnode; struct zserv *s_client = NULL; struct client_gr_info *info, *ninfo; /* Find the stale client */ for (ALL_LIST_ELEMENTS(client_list, node, nnode, s_client)) { LOG_GR("%s: Stale client %s is being deleted", __func__, zebra_route_string(s_client->proto)); TAILQ_FOREACH_SAFE (info, &s_client->gr_info_queue, gr_info, ninfo) { /* Cancel the stale timer */ if (info->t_stale_removal != NULL) { THREAD_OFF(info->t_stale_removal); info->t_stale_removal = NULL; /* Process the stale routes */ thread_execute( zrouter.master, zebra_gr_route_stale_delete_timer_expiry, info, 1); } } } } /* * A helper function to create client info. */ static struct client_gr_info *zebra_gr_client_info_create(struct zserv *client) { struct client_gr_info *info; info = XCALLOC(MTYPE_TMP, sizeof(struct client_gr_info)); TAILQ_INSERT_TAIL(&(client->gr_info_queue), info, gr_info); return info; } /* * A helper function to delete and destroy client info. */ static void zebra_gr_client_info_delte(struct zserv *client, struct client_gr_info *info) { TAILQ_REMOVE(&(client->gr_info_queue), info, gr_info); THREAD_OFF(info->t_stale_removal); XFREE(MTYPE_TMP, info->current_prefix); LOG_GR("%s: Instance info is being deleted for client %s", __func__, zebra_route_string(client->proto)); /* Delete all the stale routes. */ info->do_delete = true; zebra_gr_delete_stale_routes(info); XFREE(MTYPE_TMP, info); } /* * Function to handle client when it disconnect. */ int32_t zebra_gr_client_disconnect(struct zserv *client) { struct zserv *stale_client; struct timeval tv; struct client_gr_info *info = NULL; /* Find the stale client */ stale_client = zebra_gr_find_stale_client(client); /* * We should never be here. */ if (stale_client) { LOG_GR("%s: Stale client %s exist, we should not be here!", __func__, zebra_route_string(client->proto)); assert(0); } client->restart_time = monotime(&tv); /* For all the GR instance start the stale removal timer. */ TAILQ_FOREACH (info, &client->gr_info_queue, gr_info) { if (ZEBRA_CLIENT_GR_ENABLED(info->capabilities) && (info->t_stale_removal == NULL)) { thread_add_timer( zrouter.master, zebra_gr_route_stale_delete_timer_expiry, info, info->stale_removal_time, &info->t_stale_removal); info->current_afi = AFI_IP; info->stale_client_ptr = client; info->stale_client = true; LOG_GR("%s: Client %s Stale timer update to %d", __func__, zebra_route_string(client->proto), info->stale_removal_time); } } listnode_add(zrouter.stale_client_list, client); return 0; } /* * Function to delete stale client */ static void zebra_gr_delete_stale_client(struct client_gr_info *info) { struct client_gr_info *bgp_info; struct zserv *s_client = NULL; s_client = info->stale_client_ptr; if (!s_client || !info->stale_client) return; /* * If there are bgp instances with the stale delete timer pending * then stale client is not deleted */ if ((s_client->gr_instance_count > 0) && info->gr_enable) s_client->gr_instance_count--; TAILQ_REMOVE(&(s_client->gr_info_queue), info, gr_info); LOG_GR("%s: Client %s gr count %d", __func__, zebra_route_string(s_client->proto), s_client->gr_instance_count); TAILQ_FOREACH (bgp_info, &s_client->gr_info_queue, gr_info) { if (bgp_info->t_stale_removal != NULL) return; } LOG_GR("%s: Client %s is being deleted", __func__, zebra_route_string(s_client->proto)); TAILQ_INIT(&(s_client->gr_info_queue)); listnode_delete(zrouter.stale_client_list, s_client); if (info->stale_client) XFREE(MTYPE_TMP, s_client); XFREE(MTYPE_TMP, info); } /* * Function to find stale client. */ static struct zserv *zebra_gr_find_stale_client(struct zserv *client) { struct listnode *node, *nnode; struct zserv *stale_client; /* Find the stale client */ for (ALL_LIST_ELEMENTS(zrouter.stale_client_list, node, nnode, stale_client)) { if (client->proto == stale_client->proto && client->instance == stale_client->instance) { return stale_client; } } return NULL; } /* * Function to handle reconnect of client post restart. */ void zebra_gr_client_reconnect(struct zserv *client) { struct listnode *node, *nnode; struct zserv *old_client = NULL; struct client_gr_info *info = NULL; /* Find the stale client */ for (ALL_LIST_ELEMENTS(zrouter.stale_client_list, node, nnode, old_client)) { if (client->proto == old_client->proto && client->instance == old_client->instance) break; } /* Copy the timers */ if (!old_client) return; client->gr_instance_count = old_client->gr_instance_count; client->restart_time = old_client->restart_time; LOG_GR("%s : old client %s, gr_instance_count %d", __func__, zebra_route_string(old_client->proto), old_client->gr_instance_count); if (TAILQ_FIRST(&old_client->gr_info_queue)) { TAILQ_CONCAT(&client->gr_info_queue, &old_client->gr_info_queue, gr_info); TAILQ_INIT(&old_client->gr_info_queue); } TAILQ_FOREACH (info, &client->gr_info_queue, gr_info) { info->stale_client_ptr = client; info->stale_client = false; } /* Delete the stale client */ listnode_delete(zrouter.stale_client_list, old_client); /* Delete old client */ XFREE(MTYPE_TMP, old_client); } /* * Functions to deal with capabilities */ /* * Update the graceful restart information * for the client instance. * This function handles all the capabilities that are received. */ static void zebra_client_update_info(struct zserv *client, struct zapi_cap *api) { struct client_gr_info *info = NULL; /* Find the bgp information for the specified vrf id */ TAILQ_FOREACH (info, &client->gr_info_queue, gr_info) { if (info->vrf_id == api->vrf_id) break; } /* * If the command is delete, then cancel the stale timer and * delete the bgp info */ switch (api->cap) { case ZEBRA_CLIENT_GR_DISABLE: if (!info) return; LOG_GR("%s: Client %s instance GR disabled count %d", __func__, zebra_route_string(client->proto), client->gr_instance_count); if ((info->gr_enable) && (client->gr_instance_count > 0)) client->gr_instance_count--; zebra_gr_client_info_delte(client, info); break; case ZEBRA_CLIENT_GR_CAPABILITIES: /* Allocate bgp info */ if (!info) info = zebra_gr_client_info_create(client); /* Update other parameters */ if (!info->gr_enable) { client->gr_instance_count++; LOG_GR("%s: Cient %s GR enabled count %d", __func__, zebra_route_string(client->proto), client->gr_instance_count); info->capabilities = api->cap; info->stale_removal_time = api->stale_removal_time; info->vrf_id = api->vrf_id; info->gr_enable = true; } break; case ZEBRA_CLIENT_RIB_STALE_TIME: LOG_GR("%s: Client %s stale time update event", __func__, zebra_route_string(client->proto)); /* Update the stale removal timer */ if (info && info->t_stale_removal == NULL) { LOG_GR("%s: Stale time: %d is now update to: %d", __func__, info->stale_removal_time, api->stale_removal_time); info->stale_removal_time = api->stale_removal_time; } break; case ZEBRA_CLIENT_ROUTE_UPDATE_COMPLETE: LOG_GR( "%s: Client %s route update complete for AFI %d, SAFI %d", __func__, zebra_route_string(client->proto), api->afi, api->safi); if (info) info->route_sync[api->afi][api->safi] = true; break; case ZEBRA_CLIENT_ROUTE_UPDATE_PENDING: LOG_GR("%s: Client %s route update pending for AFI %d, SAFI %d", __func__, zebra_route_string(client->proto), api->afi, api->safi); if (info) info->af_enabled[api->afi][api->safi] = true; break; } } /* * Handler for capabilities that are received from client. */ static void zebra_client_capabilities_handler(struct zserv *client, struct zapi_cap *api) { switch (api->cap) { case ZEBRA_CLIENT_GR_CAPABILITIES: case ZEBRA_CLIENT_ROUTE_UPDATE_PENDING: case ZEBRA_CLIENT_GR_DISABLE: case ZEBRA_CLIENT_RIB_STALE_TIME: /* * For all the cases we need to update the client info. */ zebra_client_update_info(client, api); break; case ZEBRA_CLIENT_ROUTE_UPDATE_COMPLETE: /* * After client info has been updated delete all * stale routes */ zebra_client_update_info(client, api); zebra_gr_process_client_stale_routes(client, api->vrf_id); break; } } /* * Function to decode and call appropriate functions * to handle client capabilities. */ void zread_client_capabilities(ZAPI_HANDLER_ARGS) { struct zapi_cap api; struct stream *s; s = msg; if (zapi_capabilities_decode(s, &api)) { LOG_GR("%s: Error in reading capabilities for client %s", __func__, zebra_route_string(client->proto)); return; } /* GR only for dynamic clients */ if (client->proto <= ZEBRA_ROUTE_CONNECT) { LOG_GR("%s: GR capabilities for client %s not supported", __func__, zebra_route_string(client->proto)); return; } /* Call the capabilities handler */ zebra_client_capabilities_handler(client, &api); } /* * Stale route handling */ /* * Delete all the stale routes that have not been refreshed * post restart. */ static int32_t zebra_gr_route_stale_delete_timer_expiry(struct thread *thread) { struct client_gr_info *info; int32_t cnt = 0; struct zserv *client; info = THREAD_ARG(thread); info->t_stale_removal = NULL; client = (struct zserv *)info->stale_client_ptr; /* Set the flag to indicate all stale route deletion */ if (thread->u.val == 1) info->do_delete = true; cnt = zebra_gr_delete_stale_routes(info); /* Restart the timer */ if (cnt > 0) { LOG_GR("%s: Client %s processed %d routes. Start timer again", __func__, zebra_route_string(client->proto), cnt); thread_add_timer(zrouter.master, zebra_gr_route_stale_delete_timer_expiry, info, ZEBRA_DEFAULT_STALE_UPDATE_DELAY, &info->t_stale_removal); } else { /* No routes to delete for the VRF */ LOG_GR("%s: Client %s all stale routes processed", __func__, zebra_route_string(client->proto)); XFREE(MTYPE_TMP, info->current_prefix); info->current_afi = 0; zebra_gr_delete_stale_client(info); } return 0; } /* * Function to process to check if route entry is stale * or has been updated. */ static void zebra_gr_process_route_entry(struct zserv *client, struct route_node *rn, struct route_entry *re) { if ((client == NULL) || (rn == NULL) || (re == NULL)) return; /* If the route is not refreshed after restart, delete the entry */ if (re->uptime < client->restart_time) { if (IS_ZEBRA_DEBUG_RIB) zlog_debug("%s: Client %s stale route %pFX is deleted", __func__, zebra_route_string(client->proto), &rn->p); rib_delnode(rn, re); } } /* * This function walks through the route table for all vrf and deletes * the stale routes for the restarted client specified by the protocol * type */ static int32_t zebra_gr_delete_stale_route(struct client_gr_info *info, struct zebra_vrf *zvrf) { struct route_node *rn, *curr; struct route_entry *re; struct route_entry *next; struct route_table *table; int32_t n = 0; afi_t afi, curr_afi; uint8_t proto; uint16_t instance; struct zserv *s_client; if ((info == NULL) || (zvrf == NULL)) return -1; s_client = info->stale_client_ptr; if (s_client == NULL) { LOG_GR("%s: Stale client not present", __func__); return -1; } proto = s_client->proto; instance = s_client->instance; curr_afi = info->current_afi; LOG_GR("%s: Client %s stale routes are being deleted", __func__, zebra_route_string(proto)); /* Process routes for all AFI */ for (afi = curr_afi; afi < AFI_MAX; afi++) { table = zvrf->table[afi][SAFI_UNICAST]; if (table) { /* * If the current prefix is NULL then get the first * route entry in the table */ if (info->current_prefix == NULL) { rn = route_top(table); if (rn == NULL) continue; curr = rn; } else /* Get the next route entry */ curr = route_table_get_next( table, info->current_prefix); for (rn = curr; rn; rn = srcdest_route_next(rn)) { RNODE_FOREACH_RE_SAFE (rn, re, next) { if (CHECK_FLAG(re->status, ROUTE_ENTRY_REMOVED)) continue; /* If the route refresh is received * after restart then do not delete * the route */ if (re->type == proto && re->instance == instance) { zebra_gr_process_route_entry( s_client, rn, re); n++; } /* If the max route count is reached * then timer thread will be restarted * Store the current prefix and afi */ if ((n >= ZEBRA_MAX_STALE_ROUTE_COUNT) && (info->do_delete == false)) { info->current_afi = afi; info->current_prefix = XCALLOC( MTYPE_TMP, sizeof(struct prefix)); prefix_copy( info->current_prefix, &rn->p); return n; } } } } /* * Reset the current prefix to indicate processing completion * of the current AFI */ XFREE(MTYPE_TMP, info->current_prefix); } return 0; } /* * Delete the stale routes when client is restarted and routes are not * refreshed within the stale timeout */ static int32_t zebra_gr_delete_stale_routes(struct client_gr_info *info) { struct vrf *vrf; struct zebra_vrf *zvrf; uint64_t cnt = 0; if (info == NULL) return -1; /* Get the current VRF */ vrf = vrf_lookup_by_id(info->vrf_id); if (vrf == NULL) { LOG_GR("%s: Invalid VRF %d", __func__, info->vrf_id); return -1; } zvrf = vrf->info; if (zvrf == NULL) { LOG_GR("%s: Invalid VRF entry %d", __func__, info->vrf_id); return -1; } cnt = zebra_gr_delete_stale_route(info, zvrf); return cnt; } /* * This function checks if route update for all AFI, SAFI is completed * and cancels the stale timer */ static void zebra_gr_process_client_stale_routes(struct zserv *client, vrf_id_t vrf_id) { struct client_gr_info *info = NULL; afi_t afi; safi_t safi; TAILQ_FOREACH (info, &client->gr_info_queue, gr_info) { if (info->vrf_id == vrf_id) break; } if (info == NULL) return; /* Check if route update completed for all AFI, SAFI */ for (afi = AFI_IP; afi < AFI_MAX; afi++) for (safi = SAFI_UNICAST; safi <= SAFI_MPLS_VPN; safi++) { if (info->af_enabled[afi][safi]) { if (!info->route_sync[afi][safi]) { LOG_GR( "%s: Client %s route update not completed for AFI %d, SAFI %d", __func__, zebra_route_string( client->proto), afi, safi); return; } } } /* * Route update completed for all AFI, SAFI * Cancel the stale timer and process the routes */ if (info->t_stale_removal) { LOG_GR("%s: Client %s canceled stale delete timer vrf %d", __func__, zebra_route_string(client->proto), info->vrf_id); THREAD_OFF(info->t_stale_removal); thread_execute(zrouter.master, zebra_gr_route_stale_delete_timer_expiry, info, 0); } }
communitysoft/gather
db/migrate/20180818183634_add_menu_posted_at_to_meals.rb
<filename>db/migrate/20180818183634_add_menu_posted_at_to_meals.rb # frozen_string_literal: true class AddMenuPostedAtToMeals < ActiveRecord::Migration[5.1] def change add_column :meals, :menu_posted_at, :datetime end end
cpawley/ostap
ostap/math/models.py
<gh_stars>0 #!/usr/bin/env python # -*- coding: utf-8 -*- # ============================================================================= ## @file ostap/math/models.py # Module with some useful utilities for simple functions and fit models. # @author <NAME> <EMAIL> # @date 2011-12-01 # ============================================================================= """Module with some useful fit-models""" # ============================================================================= __version__ = "$Revision$" __author__ = "<NAME> <EMAIL>" __date__ = "2011-12-01" __all__ = () # ============================================================================= import ROOT from ostap.core.core import cpp, Ostap, funID # ============================================================================= # logging # ============================================================================= from ostap.logger.logger import getLogger if '__main__' == __name__ : logger = getLogger ( 'ostap.math.models' ) else : logger = getLogger ( __name__ ) # ============================================================================= # helper adapter for 1D-functions class _WO1_ (object) : "Helper adapter for 1D-functions" def __init__ ( self , o ) : self._o = o def __call__ ( self , x , pars = [] ) : return self._o ( x [0] ) # ============================================================================= # helper adapter for 2D-functions class _WO2_ (object) : "Helper adapter for 2D-functions" def __init__ ( self , o ) : self._o = o def __call__ ( self , x , pars = [] ) : return self._o ( x [0] , x[1] ) # ============================================================================= # helper adapter for 3D-functions class _WO3_ (object) : "Helper adapter for 2D-functions" def __init__ ( self , o ) : self._o = o def __call__ ( self , x , pars = [] ) : return self._o ( x [0] , x[1] , x[2] ) # ============================================================================= pos_infinity = float('+inf') neg_infinity = float('-inf') # ============================================================================= ## convert the model into TF1 def _tf1_ ( self , xmin = neg_infinity , xmax = pos_infinity , npars = 0 , *args ) : """Convert the function to TF1 >>> obj = ... >>> fun = obj.tf1 ( 3.0 , 3.2 ) >>> fun.Draw() """ # if not hasattr ( self , '_wo1' ) : self._wo1 = _WO1_ ( self ) if not self._wo1 : self._wo1 = _WO1_ ( self ) # if hasattr ( self , 'xmin' ) : xmin = max ( xmin , self.xmin () ) if hasattr ( self , 'xmax' ) : xmax = min ( xmax , self.xmax () ) if hasattr ( self , 'npars' ) : npars = max ( npars , self.npars() ) # assert xmin > neg_infinity, \ "``xmin''-parameter needs to be specified %s" % xmin assert xmax < pos_infinity, \ "``xmax''-parameter needs to be specified %s" % xmax _wo = self._wo1 fun = ROOT.TF1 ( funID() , _wo , xmin , xmax , npars, *args ) fun.SetNpx ( 500 ) # return fun # ============================================================================= ## convert the model into TF2 def _tf2_ ( self , xmin = neg_infinity , xmax = pos_infinity , ymin = neg_infinity , ymax = pos_infinity , npars = 0 , *args ) : """Convert the function to TF2 >>> obj = ... >>> fun = obj.tf2 ( 3.0 , 3.2 , 3.0 , 3.2 ) >>> fun.Draw() """ ## if not hasattr ( self , '_wo2' ) : self._wo2 = _WO2_ ( self ) if not self._wo2 : self._wo2 = _WO2_ ( self ) ## if hasattr ( self , 'xmin' ) : xmin = max ( xmin , self.xmin () ) if hasattr ( self , 'xmax' ) : xmax = min ( xmax , self.xmax () ) if hasattr ( self , 'ymin' ) : ymin = max ( ymin , self.ymin () ) if hasattr ( self , 'ymax' ) : ymax = min ( ymax , self.ymax () ) if hasattr ( self , 'npars' ) : npars = max ( npars , self.npars() ) ## assert xmin > neg_infinity, \ "``xmin''-parameter needs to be specified %s" % xmin assert xmax < pos_infinity, \ "``xmax''-parameter needs to be specified %s" % xmax assert ymin > neg_infinity, \ "``ymin''-parameter needs to be specified %s" % ymin assert ymax < pos_infinity, \ "``ymax''-parameter needs to be specified %s" % ymax ## _wo = self._wo2 fun = ROOT.TF2 ( funID () , _wo , xmin , xmax , ymin , ymax , npars , *args ) fun.SetNpx ( 100 ) fun.SetNpy ( 100 ) # return fun # ============================================================================= ## convert the model into TF3 def _tf3_ ( self , xmin = neg_infinity , xmax = pos_infinity , ymin = neg_infinity , ymax = pos_infinity , zmin = neg_infinity , zmax = pos_infinity , npars = 0 , *args ) : """Convert the function to TF3 >>> obj = ... >>> fun = obj.tf3 ( 3.0 , 3.2 , 3.0 , 3.2 , 1 , 2 ) >>> fun.Draw() """ ## if not hasattr ( self , '_wo3' ) : self._wo3 = _WO3_ ( self ) if not self._wo3 : self._wo3 = _WO3_ ( self ) ## if hasattr ( self , 'xmin' ) : xmin = max ( xmin , self.xmin () ) if hasattr ( self , 'xmax' ) : xmax = min ( xmax , self.xmax () ) if hasattr ( self , 'ymin' ) : ymin = max ( ymin , self.ymin () ) if hasattr ( self , 'ymax' ) : ymax = min ( ymax , self.ymax () ) if hasattr ( self , 'zmin' ) : zmin = max ( zmin , self.zmin () ) if hasattr ( self , 'zmax' ) : zmax = min ( zmax , self.zmax () ) if hasattr ( self , 'npars' ) : npars = max ( npars , self.npars() ) # assert xmin > neg_infinity, \ "``xmin''-parameter needs to be specified %s" % xmin assert xmax < pos_infinity, \ "``xmax''-parameter needs to be specified %s" % xmax assert ymin > neg_infinity, \ "``ymin''-parameter needs to be specified %s" % ymin assert ymax < pos_infinity, \ "``ymax''-parameter needs to be specified %s" % ymax assert zmin > neg_infinity, \ "``zmin''-parameter needs to be specified %s" % zmin assert zmax < pos_infinity, \ "``zmax''-parameter needs to be specified %s" % zmax # _wo = self._wo3 fun = ROOT.TF3 ( funID () , _wo , xmin , xmax , ymin , ymax , zmin , zmax , npars , *args ) fun.SetNpx ( 40 ) fun.SetNpy ( 40 ) fun.SetNpy ( 40 ) # return fun # ============================================================================= ## draw the function def _f1_draw_ ( self , opts ='' , *args , **kwargs ) : """Drawing the function object through conversion to ROOT.TF1 >>> fun = ... >>> fun.draw() """ if not hasattr ( self , '_tf1' ) : self._tf1 = _tf1_ ( self , *args , **kwargs ) self._tf1_args = tuple ( args ) self._tf1_kwargs = dict ( kwargs ) if type(self) in ( Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Monotonic , Ostap.Math.Convex , Ostap.Math.ConvexOnly , Ostap.Math.PositiveSpline , Ostap.Math.MonotonicSpline , Ostap.Math.ConvexSpline , Ostap.Math.ConvexOnlySpline , Ostap.Math.ExpoPositive , Ostap.Math.TwoExpoPositive ) : self._tf1.SetMinimum(0) return self._tf1.Draw ( opts ) # ============================================================================= ## get the regular complex value for amplitude def _amp_ ( self , x ) : """ Get the complex value for amplitude >>> fun >>> a = fun.amp ( x ) """ v = self.amplitude ( x ) return complex( v.real () , v.imag () ) Ostap.Math.LASS . amp = _amp_ Ostap.Math.LASS23L . amp = _amp_ Ostap.Math.Bugg23L . amp = _amp_ Ostap.Math.Flatte . amp = _amp_ Ostap.Math.Flatte2 . amp = _amp_ Ostap.Math.Flatte23L . amp = _amp_ Ostap.Math.BreitWignerBase . amp = _amp_ Ostap.Math.Swanson . amp = _amp_ # ============================================================================= ## get min/max values for bernstein polynomials # @code # p = ... # mn,mx = p.minmax() # @endcode # The values are guaranteed that # mn <= p(x) <= mx for all xmin <= x <= xmax def _b_minmax_ ( bp ) : """Get min/max values for bernstein polynomials >>> p = ... >>> mn,mx = p.minmax() The values are such that: mn <= p(x) <= mx for all x_min<=x<x_max """ b = bp.bernstein() pars = b .pars() mn = min ( pars ) mx = max ( pars ) return mn , mx # ============================================================================== ## get the maximal value for bernstein polynomial: # @code # p = ... # mx = p.max() # @endcode # The values are guaranteed that # p(x) <= mx for all xmin <= x <= xmax def _b_max_ ( bp ) : """Get max values for bernstein polynomials >>> p = ... >>> mx = p.max() The value is such that: p(x) <= mx for all x_min<=x<x_max """ b = bp.bernstein() pars = b .pars() return max ( pars ) # ============================================================================== ## get the minimal value for bernstein polynomial: # @code # p = ... # mn = p.min() # @endcode # The values are guaranteed that # mn <= p(x) for all xmin <= x <= xmax def _b_min_ ( bp ) : """Get min values for bernstein polynomials >>> p = ... >>> mn = p.min() The value is such that: mn <= p(x) for all x_min<=x<x_max """ b = bp.bernstein() pars = b .pars() return min ( pars ) for t in ( Ostap.Math.Bernstein , Ostap.Math.BernsteinEven ) : if not hasattr ( t , 'min' ) : t.min = _b_min_ if not hasattr ( t , 'max' ) : t.max = _b_max_ if not hasattr ( t , 'minmax' ) : t.minmax = _b_minmax_ # ============================================================================= # ============================================================================= ## get min/max values for derived bernstein polynomials # @code # p = ... # mn,mx = p.minmax() # @endcode # The values are guaranteed that # mn <= p(x) <= mx for all xmin <= x <= xmax def _p_minmax_ ( p ) : """Get min/max values for derived bernstein polynomials >>> p = ... >>> mn,mx = p.minmax() The values are such that: mn <= p(x) <= mx for all x_min<=x<x_max """ b = p .bernstein() pars = b .pars() mn = min ( pars ) mx = max ( pars ) return max ( mn , 0 ) , mx # ============================================================================== ## get the minimal value for derived bernstein polynomial: # @code # p = ... # mn = p.min() # @endcode # The values are guaranteed that # mn <= p(x) for all xmin <= x <= xmax def _p_min_ ( bp ) : """Get min values for derived bernstein polynomials >>> p = ... >>> mn = p.min() The value is such that: mn <= p(x) for all x_min<=x<x_max """ b = bp.bernstein() pars = b .pars() return max ( min ( pars ) , 0 ) for t in ( Ostap.Math.Positive , Ostap.Math.Monotonic , Ostap.Math.Convex , Ostap.Math.ConvexOnly ) : if not hasattr ( t , 'min' ) : t.min = _p_min_ if not hasattr ( t , 'max' ) : t.max = _b_max_ ## ATTENTION: "b" is here if not hasattr ( t , 'minmax' ) : t.minmax = _p_minmax_ # ============================================================================= ## try to get max-value using existing mode function # @code # f = ... # mx = f.max() # @endcode def _f_max_mode_ ( f ) : """Get the max-value (using exising mode function) >>> f = ... >>> mx = f.max() """ return f ( f.mode() ) # ============================================================================= ## make 1D- numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_1D ( func , xmin , xmax , *args , **kwargs ) : """Make 1D numerical integration >>> func = ... >>> print func.sp_integrate ( -10 , 10 ) """ from ostap.math.integral import integral as _integral return _integral ( func , xmin , xmax , *args , **kwargs ) # ============================================================================= ## make 2D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_2D ( func , xmin , xmax , ymin , ymax , *args , **kwargs ) : """Make 2D numerical integration >>> func = ... ## func ( x , y ) ## xmin , xmax , ymin , ymax >>> print func.sp_integrate ( -10 , 10 , -20 , 20 ) """ from ostap.math.integral import integral2 as _integral2 return _integral2 ( func , xmin , xmax , ymin , ymax , *args , **kwargs ) # ============================================================================= ## make 1D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_2Dx ( func , y , xmin , xmax , *args , **kwargs ) : """Make 1D numerical integration over x-axis >>> func = ... ## func ( x , y ) ## y , xmin , xmax >>> print func.sp_integrate_x ( 0.5 , -20 , 20 ) """ def _func_ ( p , *args ) : return func ( p , y , *args ) from ostap.math.integral import integral as _integral return _integral ( _func_ , xmin , xmax , *args , **kwargs ) # ============================================================================= ## make 1D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_2Dy ( func , x , ymin , ymax , *args , **kwargs ) : """Make 1D numerical integration over y-axis >>> func = ... ## func ( x , y ) ## x , ymin , ymax >>> print func.sp_integrate_y ( 0.5 , -20 , 20 ) """ def _func_ ( p , *args ) : return func ( x , p , *args ) from ostap.math.integral import integral as _integral return _integral ( _func_ , ymin , ymax , *args , **kwargs ) # ============================================================================= ## make 3D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3D ( func , xmin , xmax , ymin , ymax , zmin , zmax , *args , **kwargs ) : """Make 3D numerical integration >>> func = ... ## func ( x , y , z ) ## xmin , xmax , ymin , ymax zmin zmax >>> print func.sp_integrate ( -10 , 10 , -20 , 20 , -1 , 1 ) """ from ostap.math.integral import integral2 as _integral3 return _integral3 ( func , xmin , xmax , ymin , ymax , zmin , zmax , *args , **kwargs ) # ============================================================================= ## make 1D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3Dx ( func , y , z , xmin , xmax , *args , **kwargs ) : """Make 1D numerical integration over x-axis >>> func = ... ## func ( x , y , z ) ## y , z , xmin , xmax >>> print func.sp_integrate_x ( 0.5 , 0.1 , -20 , 20 ) """ def _func_ ( p , *args ) : return func ( p , y , z , *args ) from ostap.math.integral import integral as _integral return _integral ( _func_ , xmin , xmax , *args , **kwargs ) # ============================================================================= ## make 1D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3Dy ( func , x , z , ymin , ymax , *args , **kwargs ) : """Make 1D numerical integration over y-axis >>> func = ... ## func ( x , y , z ) ## x , z , ymin , ymax >>> print func.sp_integrate_y ( 0.5 , 0.1 , -20 , 20 ) """ def _func_ ( p , *args ) : return func ( x , p , z , *args ) from ostap.math.integral import integral as _integral return _integral ( _func_ , ymin , ymax , *args , **kwargs ) # ============================================================================= ## make 1D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3Dz ( func , x , y , zmin , zmax , *args , **kwargs ) : """Make 1D numerical integration over z-axis >>> func = ... ## func ( x , y , z ) ## x , y , zmin , zmax >>> print func.sp_integrate_y ( 0.5 , 0.1 , -20 , 20 ) """ def _func_ ( p , *args ) : return func ( x , y , p , *args ) from ostap.math.integral import integral as _integral return _integral ( _func_ , zmin , zmax , *args , **kwargs ) # ============================================================================= ## make 2D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3Dxy ( func , z , xmin , xmax , ymin , ymax , *args , **kwargs ) : """Make 2D numerical integration >>> func = ... ## func ( x , y , z ) ## z , xmin , xmax , ymin , ymax >>> print func.sp_integrate_xy ( 0.5 , -10 , 10 , -20 , 20 ) """ def _func_ ( p1 , p2 , *args ) : return func ( p1 , p2 , z , *args ) from ostap.math.integral import integral2 as _integral2 return _integral2 ( func , xmin , xmax , ymin , ymax , *args , **kwargs ) # ============================================================================= ## make 2D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3Dxz ( func , y , xmin , xmax , zmin , zmax , *args , **kwargs ) : """Make 2D numerical integration >>> func = ... ## func ( x , y , z ) ## y , xmin , xmax , zmin , zmax >>> print func.sp_integrate_xz ( 0.5 , -10 , 10 , -20 , 20 ) """ def _func_ ( p1 , p2 , *args ) : return func ( p1 , y , p2 , *args ) from ostap.math.integral import integral2 as _integral2 return _integral2 ( func , xmin , xmax , ymin , ymax , *args , **kwargs ) # ============================================================================= ## make 2D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3Dyz ( func , x , ymin , ymax , zmin , zmax , *args , **kwargs ) : """Make 2D numerical integration >>> func = ... ## func ( x , y , z ) ## x , ymin , ymax , zmin , zmax >>> print func.sp_integrate_yz ( 0.5 , -10 , 10 , -20 , 20 ) """ def _func_ ( p1 , p2 , *args ) : return func ( x , p1 , p2 , *args ) from ostap.math.integral import integral2 as _integral2 return _integral2 ( func , xmin , xmax , ymin , ymax , *args , **kwargs ) # ============================================================================= ## make 1D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_1D_ ( pdf , xmin , xmax , *args , **kwargs ) : """Make 1D numerical integration over the PDF using SciPy """ if hasattr ( pdf , 'setPars' ) : pdf.setPars() func = pdf.function() return func.sp_integrate_1D ( xmin , xmax , *args , **kwargs ) # ============================================================================= ## make 2D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_2D_ ( pdf , xmin , xmax , ymin , ymax , *args , **kwargs ) : """ Make 3D numerical integration over the PDF """ if hasattr ( pdf , 'setPars' ) : pdf.setPars() func = pdf.function() return func.sp_integrate_2D ( xmin , xmax , ymin , ymax , *args , **kwargs ) # ============================================================================= ## make 3D numerical integration # @author <NAME> <EMAIL> # @date 2014-12-01 def sp_integrate_3D_ ( pdf , xmin , xmax , ymin , ymax , zmin , zmax , *args , **kwargs ) : """ Make 3D numerical integration over the PDF """ if hasattr ( pdf , 'setPars' ) : pdf.setPars() func = pdf.function() return func.sp_integrate_3D ( xmin , xmax , ymin , ymax , zmin , zmax , *args , **kwargs ) from ostap.stats.moments import moment as sp_moment from ostap.stats.moments import central_moment as sp_central_moment from ostap.stats.moments import mean as sp_mean from ostap.stats.moments import variance as sp_variance from ostap.stats.moments import rms as sp_rms from ostap.stats.moments import median as sp_median from ostap.stats.moments import quantile as sp_quantile from ostap.stats.moments import mode as sp_mode from ostap.stats.moments import width as sp_width from ostap.stats.moments import cl_symm as sp_cl_symm from ostap.stats.moments import cl_asymm as sp_cl_asymm # ============================================================================= ## helper function to delegate some methods/attributes to TF1 # @code # f = ... # f.SetLineColor(4) ## delegate to TF1 # f.SetLineWidth(2) ## delegate to TF1 # @endcode def _tf1_getattr_ ( self , attr ) : """Delegate some methods/attributes to TF1 >>> f = ... >>> f.SetLineColor(4) ## delegate to TF1 >>> f.SetLineWidth(2) ## delegate to TF1 """ if hasattr ( ROOT.TF1 , attr ) and hasattr ( self , '_tf1' ) : return getattr ( self._tf1 , attr ) raise AttributeError # ============================================================================= ## helper function to delegate some methods/attributes to TF2 # @code # f = ... # f.SetLineColor(4) ## delegate to TF2 # f.SetLineWidth(2) ## delegate to TF2 # @endcode def _tf2_getattr_ ( self , attr ) : """Delegate some methods/attributes to TF2 >>> f = ... >>> f.SetLineColor(4) ## delegate to TF2 >>> f.SetLineWidth(2) ## delegate to TF2 """ if hasattr ( ROOT.TF2 , attr ) and hasattr ( self , '_tf2' ) : return getattr ( self._tf2 , attr ) raise AttributeError # ============================================================================= ## helper function to delegate some methods/attributes to TF3 # @code # f = ... # f.SetLineColor(4) ## delegate to TF3 # f.SetLineWidth(2) ## delegate to TF3 # @endcode def _tf3_getattr_ ( self , attr ) : """Delegate some methods/attributes to TF2 >>> f = ... >>> f.SetLineColor(4) ## delegate to TF2 >>> f.SetLineWidth(2) ## delegate to TF2 """ if hasattr ( ROOT.TF3 , attr ) and hasattr ( self , '_tf3' ) : return getattr ( self._tf3 , attr ) raise AttributeError from ostap.math.minimize import sp_minimum_1D, sp_maximum_1D from ostap.math.rootfinder import sp_solve # ============================================================================= ## decorate 1D-models/functions # ============================================================================= for model in ( Ostap.Math.Chebyshev , Ostap.Math.ChebyshevU , Ostap.Math.Legendre , Ostap.Math.Hermite , Ostap.Math.Bernstein , Ostap.Math.BernsteinEven , Ostap.Math.ChebyshevSum , Ostap.Math.LegendreSum , Ostap.Math.HermiteSum , Ostap.Math.FourierSum , Ostap.Math.CosineSum , Ostap.Math.Polynomial , Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Monotonic , Ostap.Math.Convex , Ostap.Math.ConvexOnly , Ostap.Math.BifurcatedGauss , Ostap.Math.DoubleGauss , Ostap.Math.Bukin , Ostap.Math.Novosibirsk , Ostap.Math.CrystalBall , Ostap.Math.Needham , Ostap.Math.CrystalBallDoubleSided , Ostap.Math.GramCharlierA , Ostap.Math.PhaseSpace2 , Ostap.Math.PhaseSpaceLeft , Ostap.Math.PhaseSpaceRight , Ostap.Math.PhaseSpaceNL , Ostap.Math.PhaseSpace23L , Ostap.Math.BreitWigner , Ostap.Math.BreitWignerBase , Ostap.Math.BreitWignerMC, Ostap.Math.Rho0 , Ostap.Math.Kstar0 , Ostap.Math.Phi0 , Ostap.Math.Rho0FromEtaPrime , Ostap.Math.Flatte , Ostap.Math.Flatte2 , Ostap.Math.LASS , Ostap.Math.LASS23L , Ostap.Math.Bugg23L , Ostap.Math.BW23L , Ostap.Math.Flatte23L , Ostap.Math.Gounaris23L , Ostap.Math.StudentT , Ostap.Math.BifurcatedStudentT , Ostap.Math.Voigt , Ostap.Math.PseudoVoigt , Ostap.Math.Logistic , # Ostap.Math.GenGaussV1 , Ostap.Math.GenGaussV2 , Ostap.Math.SkewGauss , ## (temporarily removed) Ostap.Math.GammaDist , Ostap.Math.GenGammaDist , Ostap.Math.Amoroso , Ostap.Math.LogGammaDist , Ostap.Math.Log10GammaDist , Ostap.Math.LogGamma , Ostap.Math.BetaPrime , Ostap.Math.Landau , Ostap.Math.JohnsonSU , Ostap.Math.Atlas , Ostap.Math.Sech , Ostap.Math.Swanson , Ostap.Math.Argus , Ostap.Math.Slash , Ostap.Math.AsymmetricLaplace , Ostap.Math.Tsallis , Ostap.Math.QGSM , Ostap.Math.TwoExpos , Ostap.Math.DoubleGauss , Ostap.Math.Gumbel , Ostap.Math.Weibull , Ostap.Math.QGaussian , Ostap.Math.RaisingCosine , Ostap.Math.Sigmoid , # Ostap.Math.BSpline , Ostap.Math.PositiveSpline , Ostap.Math.MonotonicSpline , Ostap.Math.ConvexSpline , Ostap.Math.ConvexOnlySpline , # Ostap.Math.BernsteinDualBasis , ## interpolation polynomials Ostap.Math.Neville , Ostap.Math.Lagrange , Ostap.Math.Barycentric , ) : model.tf1 = _tf1_ model.sp_integrate = sp_integrate_1D model.__getattr__ = _tf1_getattr_ model.draw = _f1_draw_ if not hasattr ( model , 'max' ) : if hasattr ( model , 'mode' ) : model.max = _f_max_mode_ if not hasattr ( model , 'mean' ) : model.mean = sp_mean if not hasattr ( model , 'variance' ) : model.variance = sp_variance if not hasattr ( model , 'rms' ) : model.rms = sp_rms if not hasattr ( model , 'median' ) : model.median = sp_median if not hasattr ( model , 'mode' ) : model.mode = sp_mode if not hasattr ( model , 'width' ) : model.width = sp_width if not hasattr ( model , 'moment' ) : model.moment = sp_moment if not hasattr ( model , 'central_moment' ) : model.central_moment = sp_central_moment if not hasattr ( model , 'quantile' ) : model.quantile = sp_quantile if not hasattr ( model , 'cl_symm' ) : model.cl_symm = sp_cl_symm if not hasattr ( model , 'cl_asymm' ) : model.cl_asymm = sp_cl_asymm if sp_minimum_1D and not hasattr ( model , 'minimum' ) : model.minimum = sp_minimum_1D if sp_maximum_1D and not hasattr ( model , 'maximum' ) : model.maximum = sp_maximum_1D if sp_solve and not hasattr ( model , 'solve' ) : model.solve = sp_solve # ======================================================================================= ## Special ``getattr'' for Bernstein dual basis functions: delegate the stuff to # the underlying bernstein polynomial def _bdb_getattr_ ( self , attr ) : """Special ``getattr'' for Bernstein dual basis functions: - delegate the stuff to the underlying Bernstein polynomial """ b = self.bernstein() return getattr ( b , attr ) Ostap.Math.BernsteinDualBasis.__getattr__ = _bdb_getattr_ ## add some drawing method for some shapes for model in ( Ostap.Math.Bernstein , Ostap.Math.BernsteinEven , Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Monotonic , Ostap.Math.Convex , Ostap.Math.ConvexOnly , Ostap.Math.ChebyshevSum , Ostap.Math.LegendreSum , Ostap.Math.HermiteSum , Ostap.Math.FourierSum , Ostap.Math.CosineSum , Ostap.Math.Polynomial , Ostap.Math.ExpoPositive , Ostap.Math.TwoExpoPositive , # Ostap.Math.BSpline , Ostap.Math.MonotonicSpline , Ostap.Math.PositiveSpline , Ostap.Math.ConvexSpline , Ostap.Math.ConvexOnlySpline ) : model.draw = _f1_draw_ model.Draw = _f1_draw_ # ============================================================================= def _f_print_ ( self , typ = '' ) : if not typ : typ = str(type(self)) return '%s(%s,%s,%s)' % ( typ , self.pars() , self.xmin() , self.xmax() ) Ostap.Math.LegendreSum .__str__ = lambda s : _f_print_ ( s , 'LegendreSum' ) Ostap.Math.ChebyshevSum .__str__ = lambda s : _f_print_ ( s , 'ChebyshevSum' ) Ostap.Math.Polynomial .__str__ = lambda s : _f_print_ ( s , 'Polynomial' ) Ostap.Math.Bernstein .__str__ = lambda s : _f_print_ ( s , 'Bernstein' ) Ostap.Math.BernsteinEven .__str__ = lambda s : _f_print_ ( s , 'BernsteinEven' ) Ostap.Math.Positive .__str__ = lambda s : _f_print_ ( s , 'Positive' ) Ostap.Math.PositiveEven .__str__ = lambda s : _f_print_ ( s , 'PositiveEven' ) Ostap.Math.Convex .__str__ = lambda s : _f_print_ ( s , 'Convex' ) Ostap.Math.ConvexOnly .__str__ = lambda s : _f_print_ ( s , 'ConvexOnly' ) Ostap.Math.Monotonic .__str__ = lambda s : _f_print_ ( s , 'Monotonic' ) Ostap.Math.FourierSum .__str__ = lambda s : _f_print_ ( s , 'FourierSum' ) Ostap.Math.CosineSum .__str__ = lambda s : _f_print_ ( s , 'CosineSum' ) Ostap.Math.LegendreSum .__repr__ = lambda s : _f_print_ ( s , 'LegendreSum' ) Ostap.Math.ChebyshevSum .__repr__ = lambda s : _f_print_ ( s , 'ChebyshevSum' ) Ostap.Math.HermiteSum .__repr__ = lambda s : _f_print_ ( s , 'HermiteSum' ) Ostap.Math.Polynomial .__repr__ = lambda s : _f_print_ ( s , 'Polynomial' ) Ostap.Math.Bernstein .__repr__ = lambda s : _f_print_ ( s , 'Bernstein' ) Ostap.Math.BernsteinEven .__repr__ = lambda s : _f_print_ ( s , 'BernsteinEven' ) Ostap.Math.Positive .__repr__ = lambda s : _f_print_ ( s , 'Positive' ) Ostap.Math.PositiveEven .__repr__ = lambda s : _f_print_ ( s , 'PositiveEven' ) Ostap.Math.Convex .__repr__ = lambda s : _f_print_ ( s , 'Convex' ) Ostap.Math.ConvexOnly .__repr__ = lambda s : _f_print_ ( s , 'ConvexOnly' ) Ostap.Math.Monotonic .__repr__ = lambda s : _f_print_ ( s , 'Monotonic' ) Ostap.Math.FourierSum .__repr__ = lambda s : _f_print_ ( s , 'FourierSum' ) Ostap.Math.CosineSum .__repr__ = lambda s : _f_print_ ( s , 'CosineSum' ) # ============================================================================= ## print function for splines def _sp_print_ ( self , typ = 'BSpline' ) : return '%s(%s,%s)' % ( typ, self.knots() , self.pars() ) Ostap.Math.BSpline .__str__ = lambda s : 'BSpline(%s,%s)' % ( s.knots () , s.pars () ) Ostap.Math.PositiveSpline .__str__ = lambda s : 'PositiveSpline(%s,%s)' % ( s.knots () , s.pars () ) Ostap.Math.ConvexOnlySpline.__str__ = lambda s : 'ConvexOnlySpline(%s,%s,%s)' % ( s.knots () , s.pars () , s.convex() ) Ostap.Math.MonotonicSpline.__str__ = lambda s : 'MonotonicSpline(%s,%s,%s)' % ( s.knots () , s.pars () , s.increasing () ) Ostap.Math.ConvexSpline .__str__ = lambda s : 'ConvexSpline(%s,%s,%s,%s)' % ( s.knots () , s.pars () , s.increasing () , s.convex() ) for t in ( Ostap.Math.BSpline , Ostap.Math.PositiveSpline , Ostap.Math.ConvexOnlySpline , Ostap.Math.MonotonicSpline , Ostap.Math.ConvexSpline ) : t.__repr__ = t.__str__ # ============================================================================= ## decorate 2D-models/functions # ============================================================================= Ostap.Math.Spline2D = Ostap.Math.PositiveSpline2D Ostap.Math.Spline2DSym = Ostap.Math.PositiveSpline2DSym from ostap.math.minimize import sp_minimum_2D, sp_maximum_2D for model in ( Ostap.Math.BSpline2D , Ostap.Math.BSpline2DSym , Ostap.Math.PositiveSpline2D , Ostap.Math.PositiveSpline2DSym , Ostap.Math.Bernstein2D , Ostap.Math.Positive2D , Ostap.Math.Bernstein2DSym , Ostap.Math.Positive2DSym , Ostap.Math.PS2DPol , Ostap.Math.PS2DPolSym , Ostap.Math.PS2DPol2 , Ostap.Math.PS2DPol2Sym , Ostap.Math.PS2DPol3 , Ostap.Math.PS2DPol3Sym , Ostap.Math.ExpoPS2DPol , Ostap.Math.Expo2DPol , Ostap.Math.Expo2DPolSym ) : model . tf2 = _tf2_ model . tf = _tf2_ model.__getattr__ = _tf2_getattr_ model.sp_integrate = sp_integrate_2D model.sp_integrate_2D = sp_integrate_2D model.sp_integrate_x = sp_integrate_2Dx model.sp_integrate_y = sp_integrate_2Dy if sp_minimum_2D and not hasattr ( model , 'minimum' ) : model.minimum = sp_minimum_2D if sp_maximum_2D and not hasattr ( model , 'maximum' ) : model.maximum = sp_maximum_2D from ostap.math.minimize import sp_minimum_3D, sp_maximum_3D # ============================================================================= ## Decorate 3D models # ============================================================================= for model in ( Ostap.Math.Bernstein3D , Ostap.Math.Bernstein3DSym , Ostap.Math.Bernstein3DMix , Ostap.Math.Positive3D , Ostap.Math.Positive3DSym , Ostap.Math.Positive3DMix ) : model . tf3 = _tf3_ model . tf = _tf3_ model.sp_integrate = sp_integrate_3D model.sp_integrate_x = sp_integrate_3Dx model.sp_integrate_y = sp_integrate_3Dy model.sp_integrate_xy = sp_integrate_3Dxy model.sp_integrate_xz = sp_integrate_3Dxz model.sp_integrate_yz = sp_integrate_3Dyz model.__getattr__ = _tf3_getattr_ if sp_minimum_2D and not hasattr ( model , 'minimum' ) : model.minimum = sp_minimum_2D if sp_maximum_2D and not hasattr ( model , 'maximum' ) : model.maximum = sp_maximum_2D # =============================================================================== def sp_minimum_1D_ ( pdf , xmin , xmax , x0 , *args ) : if hasattr ( pdf , 'setPars' ) : pdf.setPars() fun = pdf.function() return sp_minimum_1D ( fun , xmin , xmax , x0 , *args ) def sp_maximum_1D_ ( pdf , xmin , xmax , x0 , *args ) : if hasattr ( pdf , 'setPars' ) : pdf.setPars() fun = pdf.function() return sp_maximum_1D ( fun , xmin , xmax , x0 , *args ) # ============================================================================= ## decorate 1D-PDFs # ============================================================================= for pdf in ( Ostap.Models.BreitWigner , Ostap.Models.BreitWignerMC , Ostap.Models.Flatte , Ostap.Models.Bukin , Ostap.Models.PhaseSpace2 , Ostap.Models.PhaseSpaceNL , Ostap.Models.PhaseSpace23L , Ostap.Models.PhaseSpaceLeft , Ostap.Models.PhaseSpaceRight , Ostap.Models.PhaseSpacePol , Ostap.Models.PhaseSpaceLeftExpoPol , Ostap.Models.Needham , Ostap.Models.CrystalBall , Ostap.Models.CrystalBallRS , Ostap.Models.CrystalBallDS , Ostap.Models.Apolonios , Ostap.Models.Apolonios2 , Ostap.Models.GramCharlierA , Ostap.Models.Voigt , Ostap.Models.PseudoVoigt , Ostap.Models.Logistic , Ostap.Models.LASS , Ostap.Models.Bugg , Ostap.Models.LASS23L , Ostap.Models.Bugg23L , Ostap.Models.BW23L , Ostap.Models.PolyPositive , Ostap.Models.ExpoPositive , Ostap.Models.TwoExpoPositive , Ostap.Models.PositiveSpline , Ostap.Models.MonotonicSpline , Ostap.Models.StudentT , Ostap.Models.BifurcatedStudentT , Ostap.Models.GammaDist , Ostap.Models.GenGammaDist , Ostap.Models.Amoroso , Ostap.Models.LogGammaDist , Ostap.Models.Log10GammaDist , Ostap.Models.LogGamma , Ostap.Models.BetaPrime , Ostap.Models.Landau , Ostap.Models.SinhAsinh , Ostap.Models.JohnsonSU , Ostap.Models.Atlas , Ostap.Models.Sech , Ostap.Models.Swanson , Ostap.Models.Argus , Ostap.Models.Slash , Ostap.Models.AsymmetricLaplace , Ostap.Models.DoubleGauss , Ostap.Models.Gumbel , Ostap.Models.Weibull , Ostap.Models.RaisingCosine , Ostap.Models.QGaussian , Ostap.Models.Tsallis , Ostap.Models.QGSM , Ostap.Models.BifurcatedGauss , Ostap.Models.DoubleGauss , Ostap.Models.GenGaussV1 , Ostap.Models.GenGaussV2 , Ostap.Models.SkewGauss ) : pdf.sp_integrate = sp_integrate_1D_ if sp_minimum_1D and not hasattr ( pdf , 'minimum' ) : pdf . minimum = sp_minimum_1D_ if sp_maximum_1D and not hasattr ( pdf , 'maximum' ) : pdf . maximum = sp_maximum_1D_ # =============================================================================== def sp_minimum_2D_ ( pdf , xmin , xmax , ymin , ymax , x0 = () , *args ) : if hasattr ( pdf , 'setPars' ) : pdf.setPars() fun = pdf.function() return sp_minimum_2D ( fun , xmin , xmax , ymin , ymax , x0 , *args ) # =============================================================================== def sp_maximum_2D_ ( pdf , xmin , xmax , ymin , ymax , x0 = () , *args ) : if hasattr ( pdf , 'setPars' ) : pdf.setPars() fun = pdf.function() return sp_maximum_2D ( fun , xmin , xmax , ymin , ymax , x0 , *args ) # ============================================================================= ## decorate 2D-PDFs # ============================================================================= for pdf in ( Ostap.Models.Poly2DPositive , Ostap.Models.Poly2DSymPositive , Ostap.Models.PS2DPol , Ostap.Models.PS2DPolSym , Ostap.Models.PS2DPol2 , Ostap.Models.PS2DPol2Sym , Ostap.Models.PS2DPol3 , Ostap.Models.PS2DPol3Sym , Ostap.Models.ExpoPS2DPol , Ostap.Models.Expo2DPol , Ostap.Models.Expo2DPolSym , Ostap.Models.Spline2D , Ostap.Models.Spline2DSym ) : pdf.sp_integrate = sp_integrate_2D_ if sp_minimum_2D and not hasattr ( pdf , 'minimum' ) : pdf.minimum = sp_minimum_2D_ if sp_maximum_2D and not hasattr ( pdf , 'maximum' ) : pdf.maximum = sp_maximum_2D_ # =============================================================================== def sp_minimum_3D_ ( pdf , xmin , xmax , ymin , ymax , zmin , zmax , x0 = () , *args ) : if hasattr ( pdf , 'setPars' ) : pdf.setPars() fun = pdf.function() return sp_minimum_3D ( fun , xmin , xmax , ymin , ymax , zmin , zmax , x0 , *args ) # =============================================================================== def sp_maximum_3D_ ( pdf , xmin , xmax , ymin , ymax , zmin , zmax , x0 = () , *args ) : if hasattr ( pdf , 'setPars' ) : pdf.setPars() fun = pdf.function() return sp_maximum_3D ( fun , xmin , xmax , ymin , ymax , zmin , zmax , x0 , *args ) # ============================================================================= ## decorate 3D-PDFs # ============================================================================= for pdf in ( Ostap.Models.Poly3DPositive , Ostap.Models.Poly3DSymPositive , Ostap.Models.Poly3DMixPositive ) : pdf.sp_integrate = sp_integrate_3D_ if sp_minimum_3D and not hasattr ( pdf , 'minimum' ) : pdf.minimum = sp_minimum_3D_ if sp_maximum_3D and not hasattr ( pdf , 'maximum' ) : pdf.maximum = sp_maximum_3D_ # ============================================================================= ## set, get & iterator from ostap.math.bernstein import _p_set_par_ , _p_get_par_, _p_iter_ for f in ( Ostap.Math.Bernstein2D , Ostap.Math.Positive2D , Ostap.Math.Bernstein2DSym , Ostap.Math.Positive2DSym , ## ## Ostap.Math.BSpline2D , Ostap.Math.BSpline2DSym , Ostap.Math.PositiveSpline2D , Ostap.Math.PositiveSpline2DSym , ## Ostap.Math.Bernstein3D , Ostap.Math.Bernstein3DSym , Ostap.Math.Bernstein3DMix , Ostap.Math.Positive3D , Ostap.Math.Positive3DSym , Ostap.Math.Positive3DMix , Ostap.Math.PolySum , ## Ostap.Math.NSphere ) : f.__setitem__ = _p_set_par_ f.__getitem__ = _p_get_par_ f.__len__ = lambda s : s.npars() f.__iter__ = _p_iter_ f.__contains__ = lambda s , i : 0<=i<len(s) # ============================================================================= ## random generators # ============================================================================= from random import uniform as _uniform_ # ============================================================================= ## generate random numbers from 2D bernstein-like distribuitions # @code # >>> func = ... # >>> for x,y in func.generate( 1000 ) : print x,y # @endcode def _random_generate_bernstein2D_ ( fun , num = 1 ) : """Generate random numbers from 2D bernstein-like distribuitions >>> func = ... >>> for x,y in func.generate( 1000 ) : print x,y """ xmn = fun.xmin () xmx = fun.xmax () ymn = fun.ymin () ymx = fun.ymax () vmx = max ( fun.bernstein().pars() ) i = 0 while i < num : x = _uniform_ ( xmn , xmx ) y = _uniform_ ( ymn , ymx ) if fun ( x , y ) >= _uniform_ ( 0 , vmx ) : i+= 1 yield x,y # ============================================================================= ## generate random numbers from 3D bernstein-like distribuitions # @code # >>> func = ... # >>> for x,y,z in func.generate( 1000 ) : print x,y,z # @endcode def _random_generate_bernstein3D_ ( fun , num = 1 ) : """Generate random numbers from 2D bernstein-like distribuitions >>> func = ... >>> for x,y,z in func.generate( 1000 ) : print x,y,z """ xmn = fun.xmin () xmx = fun.xmax () ymn = fun.ymin () ymx = fun.ymax () zmn = fun.zmin () zmx = fun.zmax () vmx = max ( fun.bernstein().pars() ) i = 0 while i < num : x = _uniform_ ( xmn , xmx ) y = _uniform_ ( ymn , ymx ) z = _uniform_ ( zmn , zmx ) if v >= _uniform_ ( 0 , vmx ) : i+= 1 yield x,y,z # ============================================================================= ## Get random number from 2D bernstein-like distribuitions # @code # >>> func = ... # >>> print fun.shoot() # @endcode def _random_shoot_bernstein2D_ ( fun ) : """Get random number from 2D bernstein-like distribuitions >>> func = ... >>> print func.shoot() """ xmn = fun.xmin () xmx = fun.xmax () ymn = fun.ymin () ymx = fun.ymax () vmx = max ( fun.bernstein().pars() ) while True : x = _uniform_ ( xmn , xmx ) y = _uniform_ ( ymn , ymx ) if fun ( x , y ) >= _uniform_ ( 0 , vmx ) : return x,y # ============================================================================= ## Get random number from 3D bernstein-like distributions # @code # >>> func = ... # >>> print fun.shoot() # @endcode def _random_shoot_bernstein3D_ ( fun ) : """Get random number from 3D bernstein-like distribuitions >>> func = ... >>> print func.shoot() """ xmn = fun.xmin () xmx = fun.xmax () ymn = fun.ymin () ymx = fun.ymax () zmn = fun.zmin () zmx = fun.zmax () vmx = max ( fun.bernstein().pars() ) while True : x = _uniform_ ( xmn , xmx ) y = _uniform_ ( ymn , ymx ) z = _uniform_ ( zmn , zmx ) if fun ( x , y , z ) >= _uniform_ ( 0 , vmx ) : return x,y,z Ostap.Math.Positive2D .generate = _random_generate_bernstein2D_ Ostap.Math.Positive2D .shoot = _random_shoot_bernstein2D_ Ostap.Math.Positive2DSym .generate = _random_generate_bernstein2D_ Ostap.Math.Positive2DSym .shoot = _random_shoot_bernstein2D_ for p in ( Ostap.Math.Positive3D , Ostap.Math.Positive3DSym , Ostap.Math.Positive3DMix ) : p.generate = _random_generate_bernstein3D_ p.shoot = _random_shoot_bernstein3D_ # ============================================================================= ## add complex amplitudes # ============================================================================= Ostap.Math.LASS . amp = _amp_ Ostap.Math.LASS23L . amp = _amp_ Ostap.Math.Bugg23L . amp = _amp_ Ostap.Math.Flatte . amp = _amp_ Ostap.Math.Flatte2 . amp = _amp_ Ostap.Math.Flatte23L . amp = _amp_ Ostap.Math.BreitWignerBase . amp = _amp_ # ============================================================================= import ostap.math.derivative as _D1 import ostap.math.integral as _D2 for i in ( _D1.Derivative , _D2.Integral , _D2.IntegralCache ) : if not hasattr ( i , 'tf1' ) : i.tf1 = _tf1_ # ============================================================================= def _ff_str_ ( ff ) : """Self-printout for FormFactor""" return ff.describe() Ostap.Math.FormFactor.__str__ = _ff_str_ Ostap.Math.FormFactor.__repr__ = _ff_str_ Ostap.Math.Channel .__str__ = _ff_str_ Ostap.Math.Channel .__repr__ = _ff_str_ def _bw_str_ ( bw ) : """Self-printout for Breit-Wigner function""" return "BreitWigner (%s,%s)" % ( bw.m0() , bw.channel () ) def _bwmc_str_ ( bw ) : """Self-printout for multi-channel Breit-Wigner function""" return "BreitWignerMC(%s,%s)" % ( bw.m0() , bw.channels () ) Ostap.Math.BreitWigner .__str__ = _bw_str_ Ostap.Math.BreitWigner .__repr__ = _bw_str_ Ostap.Math.BreitWignerMC.__str__ = _bwmc_str_ Ostap.Math.BreitWignerMC.__repr__ = _bwmc_str_ # ============================================================================= _decorated_classes_ = set( [ ## Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Monotonic , Ostap.Math.Convex , Ostap.Math.ConvexOnly , Ostap.Math.PositiveSpline , Ostap.Math.MonotonicSpline , Ostap.Math.ConvexSpline , Ostap.Math.ConvexOnlySpline , Ostap.Math.ExpoPositive , Ostap.Math.TwoExpoPositive , Ostap.Math.LASS , Ostap.Math.LASS23L , Ostap.Math.Bugg23L , Ostap.Math.Flatte , Ostap.Math.Flatte2 , Ostap.Math.Flatte23L , Ostap.Math.BreitWigner , Ostap.Math.BreitWignerBase , Ostap.Math.BreitWignerMC , Ostap.Math.Swanson , ## Ostap.Math.Chebyshev , Ostap.Math.ChebyshevU , Ostap.Math.Legendre , Ostap.Math.Hermite , Ostap.Math.Bernstein , Ostap.Math.BernsteinEven , Ostap.Math.ChebyshevSum , Ostap.Math.LegendreSum , Ostap.Math.HermiteSum , Ostap.Math.FourierSum , Ostap.Math.CosineSum , Ostap.Math.Polynomial , Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Monotonic , Ostap.Math.Convex , Ostap.Math.ConvexOnly , Ostap.Math.BifurcatedGauss , Ostap.Math.Bukin , Ostap.Math.Novosibirsk , Ostap.Math.CrystalBall , Ostap.Math.Needham , Ostap.Math.CrystalBallDoubleSided , Ostap.Math.GramCharlierA , Ostap.Math.PhaseSpace2 , Ostap.Math.PhaseSpaceLeft , Ostap.Math.PhaseSpaceRight , Ostap.Math.PhaseSpaceNL , Ostap.Math.PhaseSpace23L , Ostap.Math.BreitWigner , Ostap.Math.BreitWignerBase , Ostap.Math.BreitWignerMC, Ostap.Math.Rho0 , Ostap.Math.Kstar0 , Ostap.Math.Phi0 , Ostap.Math.Rho0FromEtaPrime , Ostap.Math.Flatte , Ostap.Math.Flatte2 , Ostap.Math.LASS , Ostap.Math.LASS23L , Ostap.Math.Bugg23L , Ostap.Math.BW23L , Ostap.Math.Flatte23L , Ostap.Math.Gounaris23L , Ostap.Math.StudentT , Ostap.Math.BifurcatedStudentT , Ostap.Math.Voigt , Ostap.Math.PseudoVoigt , Ostap.Math.Logistic , # Ostap.Math.GenGaussV1 , Ostap.Math.GenGaussV2 , Ostap.Math.SkewGauss , ## (temporarily removed) Ostap.Math.GammaDist , Ostap.Math.GenGammaDist , Ostap.Math.Amoroso , Ostap.Math.LogGammaDist , Ostap.Math.Log10GammaDist , Ostap.Math.LogGamma , Ostap.Math.BetaPrime , Ostap.Math.Landau , Ostap.Math.JohnsonSU , Ostap.Math.Atlas , Ostap.Math.Sech , Ostap.Math.Swanson , Ostap.Math.Argus , Ostap.Math.Slash , Ostap.Math.AsymmetricLaplace , Ostap.Math.DoubleGauss , Ostap.Math.Gumbel , Ostap.Math.Weibull , Ostap.Math.QGaussian , Ostap.Math.RaisingCosine , Ostap.Math.Tsallis , Ostap.Math.QGSM , Ostap.Math.TwoExpos , Ostap.Math.Sigmoid , # Ostap.Math.BSpline , Ostap.Math.PositiveSpline , Ostap.Math.MonotonicSpline , Ostap.Math.ConvexSpline , Ostap.Math.ConvexOnlySpline , # Ostap.Math.BernsteinDualBasis , ## Ostap.Math.Bernstein , Ostap.Math.BernsteinEven , Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Monotonic , Ostap.Math.Convex , Ostap.Math.ConvexOnly , Ostap.Math.ChebyshevSum , Ostap.Math.LegendreSum , Ostap.Math.HermiteSum , Ostap.Math.FourierSum , Ostap.Math.CosineSum , Ostap.Math.Polynomial , Ostap.Math.ExpoPositive , Ostap.Math.TwoExpoPositive , # Ostap.Math.BSpline , Ostap.Math.MonotonicSpline , Ostap.Math.PositiveSpline , Ostap.Math.ConvexSpline , Ostap.Math.ConvexOnlySpline , # Ostap.Math.LegendreSum , Ostap.Math.ChebyshevSum , Ostap.Math.Polynomial , Ostap.Math.Bernstein , Ostap.Math.BernsteinEven , Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.FourierSum , Ostap.Math.CosineSum , Ostap.Math.LegendreSum , Ostap.Math.ChebyshevSum , Ostap.Math.HermiteSum , Ostap.Math.Polynomial , Ostap.Math.Bernstein , Ostap.Math.BernsteinEven , Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Convex , Ostap.Math.ConvexOnly , Ostap.Math.Monotonic , Ostap.Math.FourierSum , Ostap.Math.CosineSum , ## Ostap.Math.BSpline2D , Ostap.Math.BSpline2DSym , Ostap.Math.PositiveSpline2D , Ostap.Math.PositiveSpline2DSym , Ostap.Math.Bernstein2D , Ostap.Math.Positive2D , Ostap.Math.Bernstein2DSym , Ostap.Math.Positive2DSym , Ostap.Math.PS2DPol , Ostap.Math.PS2DPolSym , Ostap.Math.PS2DPol2 , Ostap.Math.PS2DPol2Sym , Ostap.Math.PS2DPol3 , Ostap.Math.PS2DPol3Sym , Ostap.Math.ExpoPS2DPol , Ostap.Math.Expo2DPol , Ostap.Math.Expo2DPolSym , ## Ostap.Models.BreitWigner , Ostap.Models.BreitWignerMC , Ostap.Models.Flatte , Ostap.Models.Bukin , Ostap.Models.PhaseSpace2 , Ostap.Models.PhaseSpaceNL , Ostap.Models.PhaseSpace23L , Ostap.Models.PhaseSpaceLeft , Ostap.Models.PhaseSpaceRight , Ostap.Models.PhaseSpacePol , Ostap.Models.PhaseSpaceLeftExpoPol , Ostap.Models.Needham , Ostap.Models.CrystalBall , Ostap.Models.CrystalBallRS , Ostap.Models.CrystalBallDS , Ostap.Models.Apolonios , Ostap.Models.Apolonios2 , Ostap.Models.GramCharlierA , Ostap.Models.Voigt , Ostap.Models.PseudoVoigt , Ostap.Models.Logistic , Ostap.Models.LASS , Ostap.Models.Bugg , Ostap.Models.LASS23L , Ostap.Models.Bugg23L , Ostap.Models.BW23L , Ostap.Models.PolyPositive , Ostap.Models.ExpoPositive , Ostap.Models.TwoExpoPositive , Ostap.Models.PositiveSpline , Ostap.Models.MonotonicSpline , ## Ostap.Models.StudentT , Ostap.Models.BifurcatedStudentT , Ostap.Models.GammaDist , Ostap.Models.GenGammaDist , Ostap.Models.Amoroso , Ostap.Models.LogGammaDist , Ostap.Models.Log10GammaDist , Ostap.Models.LogGamma , Ostap.Models.BetaPrime , Ostap.Models.Landau , Ostap.Models.SinhAsinh , Ostap.Models.JohnsonSU , Ostap.Models.Atlas , Ostap.Models.Sech , Ostap.Models.Swanson , Ostap.Models.Argus , Ostap.Models.Slash , Ostap.Models.AsymmetricLaplace , Ostap.Models.Tsallis , Ostap.Models.QGSM , Ostap.Models.BifurcatedGauss , Ostap.Models.GenGaussV1 , Ostap.Models.GenGaussV2 , ## Ostap.Models.Poly2DPositive , Ostap.Models.Poly2DSymPositive , Ostap.Models.PS2DPol , Ostap.Models.PS2DPolSym , Ostap.Models.PS2DPol2 , Ostap.Models.PS2DPol2Sym , Ostap.Models.PS2DPol3 , Ostap.Models.PS2DPol3Sym , Ostap.Models.ExpoPS2DPol , Ostap.Models.Expo2DPol , Ostap.Models.Expo2DPolSym , Ostap.Models.Spline2D , Ostap.Models.Spline2DSym , ## Ostap.Math.Positive , Ostap.Math.PositiveEven , Ostap.Math.Bernstein , Ostap.Math.BernsteinEven , Ostap.Math.Bernstein2D , Ostap.Math.Positive2D , Ostap.Math.Bernstein2DSym , Ostap.Math.Positive2DSym , ## Ostap.Math.BSpline , Ostap.Math.PositiveSpline , Ostap.Math.Spline2D , Ostap.Math.Spline2DSym , ## Ostap.Math.PolySum , ## Ostap.Math.NSphere , ## Ostap.Math.LASS , Ostap.Math.LASS23L , Ostap.Math.Bugg23L , Ostap.Math.Flatte , Ostap.Math.Flatte2 , Ostap.Math.Flatte23L , Ostap.Math.BreitWigner , Ostap.Math.BreitWignerBase , Ostap.Math.BreitWignerMC , ## Ostap.Math.Bernstein3D , Ostap.Math.Bernstein3DSym , Ostap.Math.Bernstein3DMix , Ostap.Math.Positive3D , Ostap.Math.Positive3DSym , Ostap.Math.Positive3DMix , ## ]) # ============================================================================ import ostap.math.bernstein import ostap.math.bspline # ============================================================================= if '__main__' == __name__ : from ostap.utils.docme import docme docme ( __name__ , logger = logger ) # ============================================================================= # The END # =============================================================================
roy2220/srs.old
trunk/src/app/srs_app_config.cpp
<filename>trunk/src/app/srs_app_config.cpp /* The MIT License (MIT) Copyright (c) 2013-2016 SRS(ossrs) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include <srs_app_config.hpp> #include <unistd.h> #include <stdio.h> #include <stdlib.h> #include <errno.h> #include <string.h> // file operations. #include <unistd.h> #include <sys/types.h> #include <sys/stat.h> #include <fcntl.h> #include <vector> #include <algorithm> using namespace std; #include <srs_kernel_error.hpp> #include <srs_kernel_log.hpp> #include <srs_protocol_utility.hpp> #include <srs_core_autofree.hpp> #include <srs_app_source.hpp> #include <srs_kernel_file.hpp> #include <srs_app_utility.hpp> #include <srs_core_performance.hpp> #include <srs_protocol_amf0.hpp> #include <srs_app_statistic.hpp> #include <srs_protocol_json.hpp> #include <srs_app_http_hooks.hpp> #include <srs_kernel_utility.hpp> #include <srs_rtmp_stack.hpp> #ifdef SRS_AUTO_DYNAMIC_CONFIG #include <srs_app_http_conn.hpp> #include <srs_app_http_client.hpp> #define SRS_HTTP_RESPONSE_OK SRS_XSTR(ERROR_SUCCESS) #endif using namespace _srs_internal; // the version to identify the core. const char* _srs_version = "XCORE-"RTMP_SIG_SRS_SERVER; #define SRS_WIKI_URL_LOG "https://github.com/ossrs/srs/wiki/v1_CN_SrsLog" // when user config an invalid value, macros to perfer true or false. #define SRS_CONF_PERFER_FALSE(conf_arg) conf_arg == "on" #define SRS_CONF_PERFER_TRUE(conf_arg) conf_arg != "off" // default config file. #define SRS_CONF_DEFAULT_COFNIG_FILE "conf/srs.conf" // '\n' #define SRS_LF (char)SRS_CONSTS_LF // '\r' #define SRS_CR (char)SRS_CONSTS_CR /** * dumps the ingest/transcode-engine in @param dir to amf0 object @param engine. * @param dir the transcode or ingest config directive. * @param engine the amf0 object to dumps to. */ int srs_config_dumps_engine(SrsConfDirective* dir, SrsJsonObject* engine); /** * whether the two vector actual equals, for instance, * srs_vector_actual_equals([0, 1, 2], [0, 1, 2]) ==== true * srs_vector_actual_equals([0, 1, 2], [2, 1, 0]) ==== true * srs_vector_actual_equals([0, 1, 2], [0, 2, 1]) ==== true * srs_vector_actual_equals([0, 1, 2], [0, 1, 2, 3]) ==== false * srs_vector_actual_equals([1, 2, 3], [0, 1, 2]) ==== false */ template<typename T> bool srs_vector_actual_equals(const vector<T>& a, const vector<T>& b) { // all elements of a in b. for (int i = 0; i < (int)a.size(); i++) { const T& e = a.at(i); if (::find(b.begin(), b.end(), e) == b.end()) { return false; } } // all elements of b in a. for (int i = 0; i < (int)b.size(); i++) { const T& e = b.at(i); if (::find(a.begin(), a.end(), e) == a.end()) { return false; } } return true; } /** * whether the ch is common space. */ bool is_common_space(char ch) { return (ch == ' ' || ch == '\t' || ch == SRS_CR || ch == SRS_LF); } namespace _srs_internal { SrsConfigBuffer::SrsConfigBuffer() { line = 1; pos = last = start = NULL; end = start; } SrsConfigBuffer::~SrsConfigBuffer() { srs_freepa(start); } int SrsConfigBuffer::fullfill(const char* filename) { int ret = ERROR_SUCCESS; SrsFileReader reader; // open file reader. if ((ret = reader.open(filename)) != ERROR_SUCCESS) { srs_error("open conf file error. ret=%d", ret); return ret; } // read all. int filesize = (int)reader.filesize(); // create buffer srs_freepa(start); pos = last = start = new char[filesize]; end = start + filesize; // read total content from file. ssize_t nread = 0; if ((ret = reader.read(start, filesize, &nread)) != ERROR_SUCCESS) { srs_error("read file read error. expect %d, actual %d bytes, ret=%d", filesize, nread, ret); return ret; } return ret; } #ifdef SRS_AUTO_DYNAMIC_CONFIG int SrsConfigBuffer::fullfill(const char* action, std::string url, SrsRequest* req) { int ret = ERROR_SUCCESS; int client_id = _srs_context->get_id(); SrsJsonObject* obj = SrsJsonAny::object(); SrsAutoFree(SrsJsonObject, obj); obj->set("action", SrsJsonAny::str(action)); obj->set("client_id", SrsJsonAny::integer(client_id)); obj->set("ip", SrsJsonAny::str(req->ip.c_str())); obj->set("vhost", SrsJsonAny::str(req->vhost.c_str())); obj->set("app", SrsJsonAny::str(req->app.c_str())); obj->set("tcUrl", SrsJsonAny::str(req->tcUrl.c_str())); obj->set("stream", SrsJsonAny::str(req->stream.c_str())); std::string data = obj->dumps(); std::string res; int status_code; SrsHttpClient http; if ((ret = do_post(&http, url, data, status_code, res)) != ERROR_SUCCESS) { srs_error("%s fullfill failed. " "client_id=%d, url=%s, request=%s, response=%s, code=%d, ret=%d", action, client_id, url.c_str(), data.c_str(), res.c_str(), status_code, ret); return ret; } srs_trace("%s fullfill success. " "client_id=%d, url=%s, request=%s, response=%s, ret=%d", action, client_id, url.c_str(), data.c_str(), res.c_str(), ret); return ret; } int SrsConfigBuffer::do_post(SrsHttpClient* hc, std::string url, std::string req, int& code, std::string& res) { int ret = ERROR_SUCCESS; SrsHttpUri uri; if ((ret = uri.initialize(url)) != ERROR_SUCCESS) { srs_error("http: post failed. url=%s, ret=%d", url.c_str(), ret); return ret; } if ((ret = hc->initialize(uri.get_host(), uri.get_port())) != ERROR_SUCCESS) { return ret; } ISrsHttpMessage* msg = NULL; if ((ret = hc->post(uri.get_path(), req, &msg)) != ERROR_SUCCESS) { return ret; } SrsAutoFree(ISrsHttpMessage, msg); code = msg->status_code(); if ((ret = msg->body_read_all(res)) != ERROR_SUCCESS) { return ret; } // ensure the http status is ok. // https://github.com/ossrs/srs/issues/158 if (code != SRS_CONSTS_HTTP_OK && code != SRS_CONSTS_HTTP_Created) { ret = ERROR_HTTP_STATUS_INVALID; srs_error("invalid response status=%d. ret=%d", code, ret); return ret; } // should never be empty. if (res.empty()) { ret = ERROR_HTTP_DATA_INVALID; srs_error("invalid empty response. ret=%d", ret); return ret; } // parse string res to json. SrsJsonAny* info = SrsJsonAny::loads((char*)res.c_str()); if (!info) { ret = ERROR_HTTP_DATA_INVALID; srs_error("invalid response %s. ret=%d", res.c_str(), ret); return ret; } SrsAutoFree(SrsJsonAny, info); // response error code in string. if (!info->is_object()) { if (res != SRS_HTTP_RESPONSE_OK) { ret = ERROR_HTTP_DATA_INVALID; srs_error("invalid response number %s. ret=%d", res.c_str(), ret); return ret; } return ret; } // response standard object, format in json: {"code": 0, "data": ""} SrsJsonObject* res_info = info->to_object(); SrsJsonAny* res_code = NULL; if ((res_code = res_info->ensure_property_integer("code")) == NULL) { ret = ERROR_RESPONSE_CODE; srs_error("invalid response without code, ret=%d", ret); return ret; } if ((res_code->to_integer()) != ERROR_SUCCESS) { ret = ERROR_RESPONSE_CODE; srs_error("error response code=%d. ret=%d", res_code->to_integer(), ret); return ret; } SrsJsonAny* res_data = NULL; if ((res_data = res_info->ensure_property_string("data")) != NULL) { std::string data = res_data->to_str(); srs_freepa(start); pos = last = start = new char[data.size()]; end = start + data.size(); memcpy(start, data.data(), data.size()); } return ret; } #endif bool SrsConfigBuffer::empty() { return pos >= end; } }; bool srs_directive_equals_self(SrsConfDirective* a, SrsConfDirective* b) { // both NULL, equal. if (!a && !b) { return true; } if (!a || !b) { return false; } if (a->name != b->name) { return false; } if (a->args.size() != b->args.size()) { return false; } for (int i = 0; i < (int)a->args.size(); i++) { if (a->args.at(i) != b->args.at(i)) { return false; } } if (a->directives.size() != b->directives.size()) { return false; } return true; } bool srs_directive_equals(SrsConfDirective* a, SrsConfDirective* b) { // both NULL, equal. if (!a && !b) { return true; } if (!srs_directive_equals_self(a, b)) { return false; } for (int i = 0; i < (int)a->directives.size(); i++) { SrsConfDirective* a0 = a->at(i); SrsConfDirective* b0 = b->at(i); if (!srs_directive_equals(a0, b0)) { return false; } } return true; } bool srs_directive_equals(SrsConfDirective* a, SrsConfDirective* b, string except) { // both NULL, equal. if (!a && !b) { return true; } if (!srs_directive_equals_self(a, b)) { return false; } for (int i = 0; i < (int)a->directives.size(); i++) { SrsConfDirective* a0 = a->at(i); SrsConfDirective* b0 = b->at(i); // donot compare the except child directive. if (a0->name == except) { continue; } if (!srs_directive_equals(a0, b0, except)) { return false; } } return true; } bool srs_config_hls_is_on_error_ignore(string strategy) { return strategy == "ignore"; } bool srs_config_hls_is_on_error_continue(string strategy) { return strategy == "continue"; } bool srs_config_ingest_is_file(string type) { return type == "file"; } bool srs_config_ingest_is_stream(string type) { return type == "stream"; } bool srs_config_dvr_is_plan_segment(string plan) { return plan == "segment"; } bool srs_config_dvr_is_plan_session(string plan) { return plan == "session"; } bool srs_config_dvr_is_plan_append(string plan) { return plan == "append"; } bool srs_stream_caster_is_udp(string caster) { return caster == "mpegts_over_udp"; } bool srs_stream_caster_is_rtsp(string caster) { return caster == "rtsp"; } bool srs_stream_caster_is_flv(string caster) { return caster == "flv"; } bool srs_config_apply_filter(SrsConfDirective* dvr_apply, SrsRequest* req) { static bool DEFAULT = true; if (!dvr_apply || dvr_apply->args.empty()) { return DEFAULT; } vector<string>& args = dvr_apply->args; if (args.size() == 1 && dvr_apply->arg0() == "all") { return true; } string id = req->app + "/" + req->stream; if (::find(args.begin(), args.end(), id) != args.end()) { return true; } return false; } string srs_config_bool2switch(const string& sbool) { return sbool == "true"? "on":"off"; } int srs_config_transform_vhost(SrsConfDirective* root) { int ret = ERROR_SUCCESS; for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* dir = root->directives.at(i); // SRS2.0, rename global http_stream to http_server. // SRS1: // http_stream {} // SRS2+: // http_server {} if (dir->name == "http_stream") { dir->name = "http_server"; continue; } if (!dir->is_vhost()) { continue; } // for each directive of vhost. std::vector<SrsConfDirective*>::iterator it; for (it = dir->directives.begin(); it != dir->directives.end();) { SrsConfDirective* conf = *it; string n = conf->name; // SRS2.0, rename vhost http to http_static // SRS1: // vhost { http {} } // SRS2+: // vhost { http_static {} } if (n == "http") { conf->name = "http_static"; ++it; continue; } // SRS3.0, change the refer style // SRS1/2: // vhost { refer; refer_play; refer_publish; } // SRS3+: // vhost { refer { enabled; all; play; publish; } } if ((n == "refer" && conf->directives.empty()) || n == "refer_play" || n == "refer_publish") { // remove the old one first, for name duplicated. it = dir->directives.erase(it); SrsConfDirective* refer = dir->get_or_create("refer"); refer->get_or_create("enabled", "on"); if (n == "refer") { SrsConfDirective* all = refer->get_or_create("all"); all->args = conf->args; } else if (n == "play") { SrsConfDirective* play = refer->get_or_create("play"); play->args = conf->args; } else if (n == "publish") { SrsConfDirective* publish = refer->get_or_create("publish"); publish->args = conf->args; } // remove the old directive. srs_freep(conf); continue; } // SRS3.0, change the mr style // SRS2: // vhost { mr { enabled; latency; } } // SRS3+: // vhost { publish { mr; mr_latency; } } if (n == "mr") { it = dir->directives.erase(it); SrsConfDirective* publish = dir->get_or_create("publish"); SrsConfDirective* enabled = conf->get("enabled"); if (enabled) { SrsConfDirective* mr = publish->get_or_create("mr"); mr->args = enabled->args; } SrsConfDirective* latency = conf->get("latency"); if (latency) { SrsConfDirective* mr_latency = publish->get_or_create("mr_latency"); mr_latency->args = latency->args; } srs_freep(conf); continue; } // SRS3.0, change the publish_1stpkt_timeout // SRS2: // vhost { publish_1stpkt_timeout; } // SRS3+: // vhost { publish { firstpkt_timeout; } } if (n == "publish_1stpkt_timeout") { it = dir->directives.erase(it); SrsConfDirective* publish = dir->get_or_create("publish"); SrsConfDirective* firstpkt_timeout = publish->get_or_create("firstpkt_timeout"); firstpkt_timeout->args = conf->args; srs_freep(conf); continue; } // SRS3.0, change the publish_normal_timeout // SRS2: // vhost { publish_normal_timeout; } // SRS3+: // vhost { publish { normal_timeout; } } if (n == "publish_normal_timeout") { it = dir->directives.erase(it); SrsConfDirective* publish = dir->get_or_create("publish"); SrsConfDirective* normal_timeout = publish->get_or_create("normal_timeout"); normal_timeout->args = conf->args; srs_freep(conf); continue; } // SRS3.0, change the folowing like a shadow: // time_jitter, mix_correct, atc, atc_auto, mw_latency, gop_cache, queue_length // SRS1/2: // vhost { shadow; } // SRS3+: // vhost { play { shadow; } } if (n == "time_jitter" || n == "mix_correct" || n == "atc" || n == "atc_auto" || n == "mw_latency" || n == "gop_cache" || n == "queue_length" || n == "send_min_interval" || n == "reduce_sequence_header" ) { it = dir->directives.erase(it); SrsConfDirective* play = dir->get_or_create("play"); SrsConfDirective* shadow = play->get_or_create(conf->name); shadow->args = conf->args; srs_freep(conf); continue; } // SRS3.0, change the forward. // SRS1/2: // vhost { forward; } // SRS3+: // vhost { forward { enabled; destination; } } if (n == "forward" && conf->directives.empty()) { conf->get_or_create("enabled")->set_arg0("on"); SrsConfDirective* destination = conf->get_or_create("destination"); destination->args = conf->args; conf->args.clear(); ++it; continue; } // SRS3.0, change the folowing like a shadow: // mode, origin, token_traverse, vhost, debug_srs_upnode // SRS1/2: // vhost { shadow; } // SRS3+: // vhost { cluster { shadow; } } if (n == "mode" || n == "origin" || n == "token_traverse" || n == "vhost" || n == "debug_srs_upnode") { it = dir->directives.erase(it); SrsConfDirective* cluster = dir->get_or_create("cluster"); SrsConfDirective* shadow = cluster->get_or_create(conf->name); shadow->args = conf->args; srs_freep(conf); continue; } ++it; } } return ret; } int srs_config_dumps_engine(SrsConfDirective* dir, SrsJsonObject* engine) { int ret = ERROR_SUCCESS; SrsConfDirective* conf = NULL; engine->set("id", dir->dumps_arg0_to_str()); engine->set("enabled", SrsJsonAny::boolean(_srs_config->get_engine_enabled(dir))); if ((conf = dir->get("iformat")) != NULL) { engine->set("iformat", conf->dumps_arg0_to_str()); } if ((conf = dir->get("vfilter")) != NULL) { SrsJsonObject* vfilter = SrsJsonAny::object(); engine->set("vfilter", vfilter); for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* sdir = conf->directives.at(i); vfilter->set(sdir->name, sdir->dumps_arg0_to_str()); } } if ((conf = dir->get("vcodec")) != NULL) { engine->set("vcodec", conf->dumps_arg0_to_str()); } if ((conf = dir->get("vbitrate")) != NULL) { engine->set("vbitrate", conf->dumps_arg0_to_integer()); } if ((conf = dir->get("vfps")) != NULL) { engine->set("vfps", conf->dumps_arg0_to_number()); } if ((conf = dir->get("vwidth")) != NULL) { engine->set("vwidth", conf->dumps_arg0_to_integer()); } if ((conf = dir->get("vheight")) != NULL) { engine->set("vheight", conf->dumps_arg0_to_integer()); } if ((conf = dir->get("vthreads")) != NULL) { engine->set("vthreads", conf->dumps_arg0_to_integer()); } if ((conf = dir->get("vprofile")) != NULL) { engine->set("vprofile", conf->dumps_arg0_to_str()); } if ((conf = dir->get("vpreset")) != NULL) { engine->set("vpreset", conf->dumps_arg0_to_str()); } if ((conf = dir->get("vparams")) != NULL) { SrsJsonObject* vparams = SrsJsonAny::object(); engine->set("vparams", vparams); for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* sdir = conf->directives.at(i); vparams->set(sdir->name, sdir->dumps_arg0_to_str()); } } if ((conf = dir->get("acodec")) != NULL) { engine->set("acodec", conf->dumps_arg0_to_str()); } if ((conf = dir->get("abitrate")) != NULL) { engine->set("abitrate", conf->dumps_arg0_to_integer()); } if ((conf = dir->get("asample_rate")) != NULL) { engine->set("asample_rate", conf->dumps_arg0_to_integer()); } if ((conf = dir->get("achannels")) != NULL) { engine->set("achannels", conf->dumps_arg0_to_integer()); } if ((conf = dir->get("aparams")) != NULL) { SrsJsonObject* aparams = SrsJsonAny::object(); engine->set("aparams", aparams); for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* sdir = conf->directives.at(i); aparams->set(sdir->name, sdir->dumps_arg0_to_str()); } } if ((conf = dir->get("oformat")) != NULL) { engine->set("oformat", conf->dumps_arg0_to_str()); } if ((conf = dir->get("output")) != NULL) { engine->set("output", conf->dumps_arg0_to_str()); } return ret; } SrsConfDirective::SrsConfDirective() { } SrsConfDirective::~SrsConfDirective() { std::vector<SrsConfDirective*>::iterator it; for (it = directives.begin(); it != directives.end(); ++it) { SrsConfDirective* directive = *it; srs_freep(directive); } directives.clear(); } SrsConfDirective* SrsConfDirective::copy() { return copy(""); } SrsConfDirective* SrsConfDirective::copy(string except) { SrsConfDirective* cp = new SrsConfDirective(); cp->conf_line = conf_line; cp->name = name; cp->args = args; for (int i = 0; i < (int)directives.size(); i++) { SrsConfDirective* directive = directives.at(i); if (!except.empty() && directive->name == except) { continue; } cp->directives.push_back(directive->copy(except)); } return cp; } string SrsConfDirective::arg0() { if (args.size() > 0) { return args.at(0); } return ""; } string SrsConfDirective::arg1() { if (args.size() > 1) { return args.at(1); } return ""; } string SrsConfDirective::arg2() { if (args.size() > 2) { return args.at(2); } return ""; } string SrsConfDirective::arg3() { if (args.size() > 3) { return args.at(3); } return ""; } SrsConfDirective* SrsConfDirective::at(int index) { srs_assert(index < (int)directives.size()); return directives.at(index); } SrsConfDirective* SrsConfDirective::get(string _name) { std::vector<SrsConfDirective*>::iterator it; for (it = directives.begin(); it != directives.end(); ++it) { SrsConfDirective* directive = *it; if (directive->name == _name) { return directive; } } return NULL; } SrsConfDirective* SrsConfDirective::get(string _name, string _arg0) { std::vector<SrsConfDirective*>::iterator it; for (it = directives.begin(); it != directives.end(); ++it) { SrsConfDirective* directive = *it; if (directive->name == _name && directive->arg0() == _arg0) { return directive; } } return NULL; } SrsConfDirective* SrsConfDirective::get_or_create(string n) { SrsConfDirective* conf = get(n); if (!conf) { conf = new SrsConfDirective(); conf->name = n; directives.push_back(conf); } return conf; } SrsConfDirective* SrsConfDirective::get_or_create(string n, string a0) { SrsConfDirective* conf = get(n, a0); if (!conf) { conf = new SrsConfDirective(); conf->name = n; conf->set_arg0(a0); directives.push_back(conf); } return conf; } SrsConfDirective* SrsConfDirective::set_arg0(string a0) { if (arg0() == a0) { return this; } // update a0. if (!args.empty()) { args.erase(args.begin()); } args.insert(args.begin(), a0); return this; } void SrsConfDirective::remove(SrsConfDirective* v) { std::vector<SrsConfDirective*>::iterator it; if ((it = ::find(directives.begin(), directives.end(), v)) != directives.end()) { directives.erase(it); } } bool SrsConfDirective::is_vhost() { return name == "vhost"; } bool SrsConfDirective::is_stream_caster() { return name == "stream_caster"; } int SrsConfDirective::parse(SrsConfigBuffer* buffer) { return parse_conf(buffer, parse_file); } int SrsConfDirective::persistence(SrsFileWriter* writer, int level) { int ret = ERROR_SUCCESS; static char SPACE = SRS_CONSTS_SP; static char SEMICOLON = SRS_CONSTS_SE; static char LF = SRS_CONSTS_LF; static char LB = SRS_CONSTS_LB; static char RB = SRS_CONSTS_RB; static const char* INDENT = " "; // for level0 directive, only contains sub directives. if (level > 0) { // indent by (level - 1) * 4 space. for (int i = 0; i < level - 1; i++) { if ((ret = writer->write((char*)INDENT, 4, NULL)) != ERROR_SUCCESS) { return ret; } } // directive name. if ((ret = writer->write((char*)name.c_str(), (int)name.length(), NULL)) != ERROR_SUCCESS) { return ret; } if (!args.empty() && (ret = writer->write((char*)&SPACE, 1, NULL)) != ERROR_SUCCESS) { return ret; } // directive args. for (int i = 0; i < (int)args.size(); i++) { std::string& arg = args.at(i); if ((ret = writer->write((char*)arg.c_str(), (int)arg.length(), NULL)) != ERROR_SUCCESS) { return ret; } if (i < (int)args.size() - 1 && (ret = writer->write((char*)&SPACE, 1, NULL)) != ERROR_SUCCESS) { return ret; } } // native directive, without sub directives. if (directives.empty()) { if ((ret = writer->write((char*)&SEMICOLON, 1, NULL)) != ERROR_SUCCESS) { return ret; } } } // persistence all sub directives. if (level > 0) { if (!directives.empty()) { if ((ret = writer->write((char*)&SPACE, 1, NULL)) != ERROR_SUCCESS) { return ret; } if ((ret = writer->write((char*)&LB, 1, NULL)) != ERROR_SUCCESS) { return ret; } } if ((ret = writer->write((char*)&LF, 1, NULL)) != ERROR_SUCCESS) { return ret; } } for (int i = 0; i < (int)directives.size(); i++) { SrsConfDirective* dir = directives.at(i); if ((ret = dir->persistence(writer, level + 1)) != ERROR_SUCCESS) { return ret; } } if (level > 0 && !directives.empty()) { // indent by (level - 1) * 4 space. for (int i = 0; i < level - 1; i++) { if ((ret = writer->write((char*)INDENT, 4, NULL)) != ERROR_SUCCESS) { return ret; } } if ((ret = writer->write((char*)&RB, 1, NULL)) != ERROR_SUCCESS) { return ret; } if ((ret = writer->write((char*)&LF, 1, NULL)) != ERROR_SUCCESS) { return ret; } } return ret; } SrsJsonArray* SrsConfDirective::dumps_args() { SrsJsonArray* arr = SrsJsonAny::array(); for (int i = 0; i < (int)args.size(); i++) { string arg = args.at(i); arr->append(SrsJsonAny::str(arg.c_str())); } return arr; } SrsJsonAny* SrsConfDirective::dumps_arg0_to_str() { return SrsJsonAny::str(arg0().c_str()); } SrsJsonAny* SrsConfDirective::dumps_arg0_to_integer() { return SrsJsonAny::integer(::atol(arg0().c_str())); } SrsJsonAny* SrsConfDirective::dumps_arg0_to_number() { return SrsJsonAny::number(::atof(arg0().c_str())); } SrsJsonAny* SrsConfDirective::dumps_arg0_to_boolean() { return SrsJsonAny::boolean(arg0() == "on"); } // see: ngx_conf_parse int SrsConfDirective::parse_conf(SrsConfigBuffer* buffer, SrsDirectiveType type) { int ret = ERROR_SUCCESS; while (true) { std::vector<string> args; int line_start = 0; ret = read_token(buffer, args, line_start); /** * ret maybe: * ERROR_SYSTEM_CONFIG_INVALID error. * ERROR_SYSTEM_CONFIG_DIRECTIVE directive terminated by ';' found * ERROR_SYSTEM_CONFIG_BLOCK_START token terminated by '{' found * ERROR_SYSTEM_CONFIG_BLOCK_END the '}' found * ERROR_SYSTEM_CONFIG_EOF the config file is done */ if (ret == ERROR_SYSTEM_CONFIG_INVALID) { return ret; } if (ret == ERROR_SYSTEM_CONFIG_BLOCK_END) { if (type != parse_block) { srs_error("line %d: unexpected \"}\", ret=%d", buffer->line, ret); return ret; } return ERROR_SUCCESS; } if (ret == ERROR_SYSTEM_CONFIG_EOF) { if (type == parse_block) { srs_error("line %d: unexpected end of file, expecting \"}\", ret=%d", conf_line, ret); return ret; } return ERROR_SUCCESS; } if (args.empty()) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("line %d: empty directive. ret=%d", conf_line, ret); return ret; } // build directive tree. SrsConfDirective* directive = new SrsConfDirective(); directive->conf_line = line_start; directive->name = args[0]; args.erase(args.begin()); directive->args.swap(args); directives.push_back(directive); if (ret == ERROR_SYSTEM_CONFIG_BLOCK_START) { if ((ret = directive->parse_conf(buffer, parse_block)) != ERROR_SUCCESS) { return ret; } } } return ret; } // see: ngx_conf_read_token int SrsConfDirective::read_token(SrsConfigBuffer* buffer, vector<string>& args, int& line_start) { int ret = ERROR_SUCCESS; char* pstart = buffer->pos; bool sharp_comment = false; bool d_quoted = false; bool s_quoted = false; bool need_space = false; bool last_space = true; while (true) { if (buffer->empty()) { ret = ERROR_SYSTEM_CONFIG_EOF; if (!args.empty() || !last_space) { srs_error("line %d: unexpected end of file, expecting ; or \"}\"", buffer->line); return ERROR_SYSTEM_CONFIG_INVALID; } srs_trace("config parse complete"); return ret; } char ch = *buffer->pos++; if (ch == SRS_LF) { buffer->line++; sharp_comment = false; } if (sharp_comment) { continue; } if (need_space) { if (is_common_space(ch)) { last_space = true; need_space = false; continue; } if (ch == ';') { return ERROR_SYSTEM_CONFIG_DIRECTIVE; } if (ch == '{') { return ERROR_SYSTEM_CONFIG_BLOCK_START; } srs_error("line %d: unexpected '%c'", buffer->line, ch); return ERROR_SYSTEM_CONFIG_INVALID; } // last charecter is space. if (last_space) { if (is_common_space(ch)) { continue; } pstart = buffer->pos - 1; switch (ch) { case ';': if (args.size() == 0) { srs_error("line %d: unexpected ';'", buffer->line); return ERROR_SYSTEM_CONFIG_INVALID; } return ERROR_SYSTEM_CONFIG_DIRECTIVE; case '{': if (args.size() == 0) { srs_error("line %d: unexpected '{'", buffer->line); return ERROR_SYSTEM_CONFIG_INVALID; } return ERROR_SYSTEM_CONFIG_BLOCK_START; case '}': if (args.size() != 0) { srs_error("line %d: unexpected '}'", buffer->line); return ERROR_SYSTEM_CONFIG_INVALID; } return ERROR_SYSTEM_CONFIG_BLOCK_END; case '#': sharp_comment = 1; continue; case '"': pstart++; d_quoted = true; last_space = 0; continue; case '\'': pstart++; s_quoted = true; last_space = 0; continue; default: last_space = 0; continue; } } else { // last charecter is not space if (line_start == 0) { line_start = buffer->line; } bool found = false; if (d_quoted) { if (ch == '"') { d_quoted = false; need_space = true; found = true; } } else if (s_quoted) { if (ch == '\'') { s_quoted = false; need_space = true; found = true; } } else if (is_common_space(ch) || ch == ';' || ch == '{') { last_space = true; found = 1; } if (found) { int len = (int)(buffer->pos - pstart); char* aword = new char[len]; memcpy(aword, pstart, len); aword[len - 1] = 0; string word_str = aword; if (!word_str.empty()) { args.push_back(word_str); } srs_freepa(aword); if (ch == ';') { return ERROR_SYSTEM_CONFIG_DIRECTIVE; } if (ch == '{') { return ERROR_SYSTEM_CONFIG_BLOCK_START; } } } } return ret; } SrsConfig::SrsConfig() { dolphin = false; show_help = false; show_version = false; test_conf = false; root = new SrsConfDirective(); root->conf_line = 0; root->name = "root"; } SrsConfig::~SrsConfig() { srs_freep(root); } bool SrsConfig::is_dolphin() { return dolphin; } void SrsConfig::set_config_directive(SrsConfDirective* parent, string dir, string value) { SrsConfDirective* d = parent->get(dir); if (!d) { d = new SrsConfDirective(); if (!dir.empty()) { d->name = dir; } parent->directives.push_back(d); } d->args.clear(); if (!value.empty()) { d->args.push_back(value); } } void SrsConfig::subscribe(ISrsReloadHandler* handler) { std::vector<ISrsReloadHandler*>::iterator it; it = std::find(subscribes.begin(), subscribes.end(), handler); if (it != subscribes.end()) { return; } subscribes.push_back(handler); } void SrsConfig::unsubscribe(ISrsReloadHandler* handler) { std::vector<ISrsReloadHandler*>::iterator it; it = std::find(subscribes.begin(), subscribes.end(), handler); if (it == subscribes.end()) { return; } subscribes.erase(it); } int SrsConfig::reload() { int ret = ERROR_SUCCESS; SrsConfig conf; if ((ret = conf.parse_file(config_file.c_str())) != ERROR_SUCCESS) { srs_error("ignore config reloader parse file failed. ret=%d", ret); ret = ERROR_SUCCESS; return ret; } srs_info("config reloader parse file success."); // transform config to compatible with previous style of config. if ((ret = srs_config_transform_vhost(conf.root)) != ERROR_SUCCESS) { srs_error("transform config failed. ret=%d", ret); return ret; } if ((ret = conf.check_config()) != ERROR_SUCCESS) { srs_error("ignore config reloader check config failed. ret=%d", ret); ret = ERROR_SUCCESS; return ret; } return reload_conf(&conf); } int SrsConfig::reload_vhost(SrsConfDirective* old_root) { int ret = ERROR_SUCCESS; // merge config. std::vector<ISrsReloadHandler*>::iterator it; // following directly support reload. // origin, token_traverse, vhost, debug_srs_upnode // state graph // old_vhost new_vhost // DISABLED => ENABLED // ENABLED => DISABLED // ENABLED => ENABLED (modified) // collect all vhost names std::vector<std::string> vhosts; for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* vhost = root->at(i); if (vhost->name != "vhost") { continue; } vhosts.push_back(vhost->arg0()); } for (int i = 0; i < (int)old_root->directives.size(); i++) { SrsConfDirective* vhost = old_root->at(i); if (vhost->name != "vhost") { continue; } if (root->get("vhost", vhost->arg0())) { continue; } vhosts.push_back(vhost->arg0()); } // process each vhost for (int i = 0; i < (int)vhosts.size(); i++) { std::string vhost = vhosts.at(i); SrsConfDirective* old_vhost = old_root->get("vhost", vhost); SrsConfDirective* new_vhost = root->get("vhost", vhost); // DISABLED => ENABLED if (!get_vhost_enabled(old_vhost) && get_vhost_enabled(new_vhost)) { if ((ret = do_reload_vhost_added(vhost)) != ERROR_SUCCESS) { return ret; } continue; } // ENABLED => DISABLED if (get_vhost_enabled(old_vhost) && !get_vhost_enabled(new_vhost)) { if ((ret = do_reload_vhost_removed(vhost)) != ERROR_SUCCESS) { return ret; } continue; } // cluster.mode, never supports reload. // first, for the origin and edge role change is too complex. // second, the vhosts in origin device group normally are all origin, // they never change to edge sometimes. // third, the origin or upnode device can always be restart, // edge will retry and the users connected to edge are ok. // it's ok to add or remove edge/origin vhost. if (get_cluster_is_edge(get_cluster(old_vhost)) != get_cluster_is_edge(get_cluster(old_vhost))) { ret = ERROR_RTMP_EDGE_RELOAD; srs_error("reload never supports mode changed. ret=%d", ret); return ret; } // the auto reload configs: // publish.parse_sps // ENABLED => ENABLED (modified) if (get_vhost_enabled(new_vhost) && get_vhost_enabled(old_vhost)) { srs_trace("vhost %s maybe modified, reload its detail.", vhost.c_str()); // chunk_size, only one per vhost. if (!srs_directive_equals(new_vhost->get("chunk_size"), old_vhost->get("chunk_size"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_chunk_size(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes chunk_size failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload chunk_size success.", vhost.c_str()); } // tcp_nodelay, only one per vhost if (!srs_directive_equals(new_vhost->get("tcp_nodelay"), old_vhost->get("tcp_nodelay"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_tcp_nodelay(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes tcp_nodelay failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload tcp_nodelay success.", vhost.c_str()); } // min_latency, only one per vhost if (!srs_directive_equals(new_vhost->get("min_latency"), old_vhost->get("min_latency"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_realtime(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes min_latency failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload min_latency success.", vhost.c_str()); } // play, only one per vhost if (!srs_directive_equals(new_vhost->get("play"), old_vhost->get("play"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_play(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes play failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload play success.", vhost.c_str()); } // forward, only one per vhost if (!srs_directive_equals(new_vhost->get("forward"), old_vhost->get("forward"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_forward(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes forward failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload forward success.", vhost.c_str()); } // hls, only one per vhost // @remark, the hls_on_error directly support reload. if (!srs_directive_equals(new_vhost->get("hls"), old_vhost->get("hls"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_hls(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes hls failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload hls success.", vhost.c_str()); } // hds reload if (!srs_directive_equals(new_vhost->get("hds"), old_vhost->get("hds"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_hds(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes hds failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload hds success.", vhost.c_str()); } // dvr, only one per vhost, except the dvr_apply if (!srs_directive_equals(new_vhost->get("dvr"), old_vhost->get("dvr"), "dvr_apply")) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_dvr(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes dvr failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload dvr success.", vhost.c_str()); } // dvr_apply, the dynamic dvr filter. if (true) { // we must reload the dvr_apply, for it's apply to specified stream, // and we donot want one stream reload take effect on another one. // @see https://github.com/ossrs/srs/issues/459#issuecomment-140296597 SrsConfDirective* nda = new_vhost->get("dvr")? new_vhost->get("dvr")->get("dvr_apply") : NULL; SrsConfDirective* oda = old_vhost->get("dvr")? old_vhost->get("dvr")->get("dvr_apply") : NULL; if (!srs_directive_equals(nda, oda) && (ret = do_reload_vhost_dvr_apply(vhost)) != ERROR_SUCCESS) { return ret; } } // exec, only one per vhost if (!srs_directive_equals(new_vhost->get("exec"), old_vhost->get("exec"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_exec(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes exec failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload exec success.", vhost.c_str()); } // publish, only one per vhost if (!srs_directive_equals(new_vhost->get("publish"), old_vhost->get("publish"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_publish(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes publish failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload publish success.", vhost.c_str()); } // http_static, only one per vhost. if (!srs_directive_equals(new_vhost->get("http_static"), old_vhost->get("http_static"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_http_updated()) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes http_static failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload http_static success.", vhost.c_str()); } // http_remux, only one per vhost. if (!srs_directive_equals(new_vhost->get("http_remux"), old_vhost->get("http_remux"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_http_remux_updated(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes http_remux failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload http_remux success.", vhost.c_str()); } // transcode, many per vhost. if ((ret = reload_transcode(new_vhost, old_vhost)) != ERROR_SUCCESS) { return ret; } // ingest, many per vhost. if ((ret = reload_ingest(new_vhost, old_vhost)) != ERROR_SUCCESS) { return ret; } continue; } srs_trace("ignore reload vhost, enabled old: %d, new: %d", get_vhost_enabled(old_vhost), get_vhost_enabled(new_vhost)); } return ret; } int SrsConfig::reload_conf(SrsConfig* conf) { int ret = ERROR_SUCCESS; SrsConfDirective* old_root = root; SrsAutoFree(SrsConfDirective, old_root); root = conf->root; conf->root = NULL; // merge config. std::vector<ISrsReloadHandler*>::iterator it; // never support reload: // daemon // // always support reload without additional code: // chunk_size, ff_log_dir, // bandcheck, http_hooks, heartbeat, // security // merge config: listen if (!srs_directive_equals(root->get("listen"), old_root->get("listen"))) { if ((ret = do_reload_listen()) != ERROR_SUCCESS) { return ret; } } // merge config: pid if (!srs_directive_equals(root->get("pid"), old_root->get("pid"))) { if ((ret = do_reload_pid()) != ERROR_SUCCESS) { return ret; } } // merge config: srs_log_tank if (!srs_directive_equals(root->get("srs_log_tank"), old_root->get("srs_log_tank"))) { if ((ret = do_reload_srs_log_tank()) != ERROR_SUCCESS) { return ret; } } // merge config: srs_log_level if (!srs_directive_equals(root->get("srs_log_level"), old_root->get("srs_log_level"))) { if ((ret = do_reload_srs_log_level()) != ERROR_SUCCESS) { return ret; } } // merge config: srs_log_file if (!srs_directive_equals(root->get("srs_log_file"), old_root->get("srs_log_file"))) { if ((ret = do_reload_srs_log_file()) != ERROR_SUCCESS) { return ret; } } // merge config: max_connections if (!srs_directive_equals(root->get("max_connections"), old_root->get("max_connections"))) { if ((ret = do_reload_max_connections()) != ERROR_SUCCESS) { return ret; } } // merge config: utc_time if (!srs_directive_equals(root->get("utc_time"), old_root->get("utc_time"))) { if ((ret = do_reload_utc_time()) != ERROR_SUCCESS) { return ret; } } // merge config: pithy_print_ms if (!srs_directive_equals(root->get("pithy_print_ms"), old_root->get("pithy_print_ms"))) { if ((ret = do_reload_pithy_print_ms()) != ERROR_SUCCESS) { return ret; } } // merge config: http_api if ((ret = reload_http_api(old_root)) != ERROR_SUCCESS) { return ret; } // merge config: http_stream if ((ret = reload_http_stream(old_root)) != ERROR_SUCCESS) { return ret; } // TODO: FIXME: support reload stream_caster. // TODO: FIXME: support reload kafka. // merge config: vhost if ((ret = reload_vhost(old_root)) != ERROR_SUCCESS) { return ret; } return ret; } int SrsConfig::reload_http_api(SrsConfDirective* old_root) { int ret = ERROR_SUCCESS; // merge config. std::vector<ISrsReloadHandler*>::iterator it; // state graph // old_http_api new_http_api // DISABLED => ENABLED // ENABLED => DISABLED // ENABLED => ENABLED (modified) SrsConfDirective* new_http_api = root->get("http_api"); SrsConfDirective* old_http_api = old_root->get("http_api"); // DISABLED => ENABLED if (!get_http_api_enabled(old_http_api) && get_http_api_enabled(new_http_api)) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_api_enabled()) != ERROR_SUCCESS) { srs_error("notify subscribes http_api disabled=>enabled failed. ret=%d", ret); return ret; } } srs_trace("reload disabled=>enabled http_api success."); return ret; } // ENABLED => DISABLED if (get_http_api_enabled(old_http_api) && !get_http_api_enabled(new_http_api)) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_api_disabled()) != ERROR_SUCCESS) { srs_error("notify subscribes http_api enabled=>disabled failed. ret=%d", ret); return ret; } } srs_trace("reload enabled=>disabled http_api success."); return ret; } // ENABLED => ENABLED (modified) if (get_http_api_enabled(old_http_api) && get_http_api_enabled(new_http_api) && !srs_directive_equals(old_http_api, new_http_api) ) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_api_enabled()) != ERROR_SUCCESS) { srs_error("notify subscribes http_api enabled modified failed. ret=%d", ret); return ret; } } srs_trace("reload enabled modified http_api success."); if (!srs_directive_equals(old_http_api->get("crossdomain"), new_http_api->get("crossdomain"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_api_crossdomain()) != ERROR_SUCCESS) { srs_error("notify subscribes http_api crossdomain modified failed. ret=%d", ret); return ret; } } } srs_trace("reload crossdomain modified http_api success."); if (!srs_directive_equals(old_http_api->get("raw_api"), new_http_api->get("raw_api"))) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_api_raw_api()) != ERROR_SUCCESS) { srs_error("notify subscribes http_api raw_api modified failed. ret=%d", ret); return ret; } } } srs_trace("reload raw_api modified http_api success."); return ret; } srs_trace("reload http_api not changed success."); return ret; } int SrsConfig::reload_http_stream(SrsConfDirective* old_root) { int ret = ERROR_SUCCESS; // merge config. std::vector<ISrsReloadHandler*>::iterator it; // state graph // old_http_stream new_http_stream // DISABLED => ENABLED // ENABLED => DISABLED // ENABLED => ENABLED (modified) SrsConfDirective* new_http_stream = root->get("http_server"); SrsConfDirective* old_http_stream = old_root->get("http_server"); // DISABLED => ENABLED if (!get_http_stream_enabled(old_http_stream) && get_http_stream_enabled(new_http_stream)) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_stream_enabled()) != ERROR_SUCCESS) { srs_error("notify subscribes http_stream disabled=>enabled failed. ret=%d", ret); return ret; } } srs_trace("reload disabled=>enabled http_stream success."); return ret; } // ENABLED => DISABLED if (get_http_stream_enabled(old_http_stream) && !get_http_stream_enabled(new_http_stream)) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_stream_disabled()) != ERROR_SUCCESS) { srs_error("notify subscribes http_stream enabled=>disabled failed. ret=%d", ret); return ret; } } srs_trace("reload enabled=>disabled http_stream success."); return ret; } // ENABLED => ENABLED (modified) if (get_http_stream_enabled(old_http_stream) && get_http_stream_enabled(new_http_stream) && !srs_directive_equals(old_http_stream, new_http_stream) ) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_http_stream_updated()) != ERROR_SUCCESS) { srs_error("notify subscribes http_stream enabled modified failed. ret=%d", ret); return ret; } } srs_trace("reload enabled modified http_stream success."); return ret; } srs_trace("reload http_stream not changed success."); return ret; } int SrsConfig::reload_transcode(SrsConfDirective* new_vhost, SrsConfDirective* old_vhost) { int ret = ERROR_SUCCESS; std::vector<SrsConfDirective*> old_transcoders; for (int i = 0; i < (int)old_vhost->directives.size(); i++) { SrsConfDirective* conf = old_vhost->at(i); if (conf->name == "transcode") { old_transcoders.push_back(conf); } } std::vector<SrsConfDirective*> new_transcoders; for (int i = 0; i < (int)new_vhost->directives.size(); i++) { SrsConfDirective* conf = new_vhost->at(i); if (conf->name == "transcode") { new_transcoders.push_back(conf); } } std::vector<ISrsReloadHandler*>::iterator it; std::string vhost = new_vhost->arg0(); // to be simple: // whatever, once tiny changed of transcode, // restart all ffmpeg of vhost. bool changed = false; // discovery the removed ffmpeg. for (int i = 0; !changed && i < (int)old_transcoders.size(); i++) { SrsConfDirective* old_transcoder = old_transcoders.at(i); std::string transcoder_id = old_transcoder->arg0(); // if transcoder exists in new vhost, not removed, ignore. if (new_vhost->get("transcode", transcoder_id)) { continue; } changed = true; } // discovery the added ffmpeg. for (int i = 0; !changed && i < (int)new_transcoders.size(); i++) { SrsConfDirective* new_transcoder = new_transcoders.at(i); std::string transcoder_id = new_transcoder->arg0(); // if transcoder exists in old vhost, not added, ignore. if (old_vhost->get("transcode", transcoder_id)) { continue; } changed = true; } // for updated transcoders, restart them. for (int i = 0; !changed && i < (int)new_transcoders.size(); i++) { SrsConfDirective* new_transcoder = new_transcoders.at(i); std::string transcoder_id = new_transcoder->arg0(); SrsConfDirective* old_transcoder = old_vhost->get("transcode", transcoder_id); srs_assert(old_transcoder); if (srs_directive_equals(new_transcoder, old_transcoder)) { continue; } changed = true; } // transcode, many per vhost if (changed) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_transcode(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes transcode failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload transcode success.", vhost.c_str()); } return ret; } int SrsConfig::reload_ingest(SrsConfDirective* new_vhost, SrsConfDirective* old_vhost) { int ret = ERROR_SUCCESS; std::vector<SrsConfDirective*> old_ingesters; for (int i = 0; i < (int)old_vhost->directives.size(); i++) { SrsConfDirective* conf = old_vhost->at(i); if (conf->name == "ingest") { old_ingesters.push_back(conf); } } std::vector<SrsConfDirective*> new_ingesters; for (int i = 0; i < (int)new_vhost->directives.size(); i++) { SrsConfDirective* conf = new_vhost->at(i); if (conf->name == "ingest") { new_ingesters.push_back(conf); } } std::vector<ISrsReloadHandler*>::iterator it; std::string vhost = new_vhost->arg0(); // for removed ingesters, stop them. for (int i = 0; i < (int)old_ingesters.size(); i++) { SrsConfDirective* old_ingester = old_ingesters.at(i); std::string ingest_id = old_ingester->arg0(); SrsConfDirective* new_ingester = new_vhost->get("ingest", ingest_id); // ENABLED => DISABLED if (get_ingest_enabled(old_ingester) && !get_ingest_enabled(new_ingester)) { // notice handler ingester removed. for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_ingest_removed(vhost, ingest_id)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes ingest=%s removed failed. ret=%d", vhost.c_str(), ingest_id.c_str(), ret); return ret; } } srs_trace("vhost %s reload ingest=%s removed success.", vhost.c_str(), ingest_id.c_str()); } } // for added ingesters, start them. for (int i = 0; i < (int)new_ingesters.size(); i++) { SrsConfDirective* new_ingester = new_ingesters.at(i); std::string ingest_id = new_ingester->arg0(); SrsConfDirective* old_ingester = old_vhost->get("ingest", ingest_id); // DISABLED => ENABLED if (!get_ingest_enabled(old_ingester) && get_ingest_enabled(new_ingester)) { for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_ingest_added(vhost, ingest_id)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes ingest=%s added failed. ret=%d", vhost.c_str(), ingest_id.c_str(), ret); return ret; } } srs_trace("vhost %s reload ingest=%s added success.", vhost.c_str(), ingest_id.c_str()); } } // for updated ingesters, restart them. for (int i = 0; i < (int)new_ingesters.size(); i++) { SrsConfDirective* new_ingester = new_ingesters.at(i); std::string ingest_id = new_ingester->arg0(); SrsConfDirective* old_ingester = old_vhost->get("ingest", ingest_id); // ENABLED => ENABLED if (get_ingest_enabled(old_ingester) && get_ingest_enabled(new_ingester)) { if (srs_directive_equals(new_ingester, old_ingester)) { continue; } // notice handler ingester removed. for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_ingest_updated(vhost, ingest_id)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes ingest=%s updated failed. ret=%d", vhost.c_str(), ingest_id.c_str(), ret); return ret; } } srs_trace("vhost %s reload ingest=%s updated success.", vhost.c_str(), ingest_id.c_str()); } } srs_trace("ingest not changed for vhost=%s", vhost.c_str()); return ret; } // see: ngx_get_options int SrsConfig::parse_options(int argc, char** argv) { int ret = ERROR_SUCCESS; // argv for (int i = 0; i < argc; i++) { _argv.append(argv[i]); if (i < argc - 1) { _argv.append(" "); } } // config show_help = true; for (int i = 1; i < argc; i++) { if ((ret = parse_argv(i, argv)) != ERROR_SUCCESS) { return ret; } } if (show_help) { print_help(argv); exit(0); } if (show_version) { fprintf(stderr, "%s\n", RTMP_SIG_SRS_VERSION); exit(0); } if (show_signature) { fprintf(stderr, "%s\n", RTMP_SIG_SRS_SERVER); exit(0); } // first hello message. srs_trace(_srs_version); if (config_file.empty()) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("config file not specified, see help: %s -h, ret=%d", argv[0], ret); return ret; } ret = parse_file(config_file.c_str()); // transform config to compatible with previous style of config. if ((ret = srs_config_transform_vhost(root)) != ERROR_SUCCESS) { srs_error("transform config failed. ret=%d", ret); return ret; } if (test_conf) { // the parse_file never check the config, // we check it when user requires check config file. if (ret == ERROR_SUCCESS) { ret = check_config(); } if (ret == ERROR_SUCCESS) { srs_trace("config file is ok"); exit(0); } else { srs_error("config file is invalid"); exit(ret); } } //////////////////////////////////////////////////////////////////////// // check log name and level //////////////////////////////////////////////////////////////////////// if (true) { std::string log_filename = this->get_log_file(); if (get_log_tank_file() && log_filename.empty()) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("must specifies the file to write log to. ret=%d", ret); return ret; } if (get_log_tank_file()) { srs_trace("write log to file %s", log_filename.c_str()); srs_trace("you can: tailf %s", log_filename.c_str()); srs_trace("@see: %s", SRS_WIKI_URL_LOG); } else { srs_trace("write log to console"); } } return ret; } int SrsConfig::initialize_cwd() { int ret = ERROR_SUCCESS; // cwd char cwd[256]; getcwd(cwd, sizeof(cwd)); _cwd = cwd; return ret; } int SrsConfig::persistence() { int ret = ERROR_SUCCESS; // write to a tmp file, then mv to the config. std::string path = config_file + ".tmp"; // open the tmp file for persistence SrsFileWriter fw; if ((ret = fw.open(path)) != ERROR_SUCCESS) { return ret; } // do persistence to writer. if ((ret = do_persistence(&fw)) != ERROR_SUCCESS) { ::unlink(path.c_str()); return ret; } // rename the config file. if (::rename(path.c_str(), config_file.c_str()) < 0) { ::unlink(path.c_str()); ret = ERROR_SYSTEM_CONFIG_PERSISTENCE; srs_error("rename config from %s to %s failed. ret=%d", path.c_str(), config_file.c_str(), ret); return ret; } return ret; } int SrsConfig::do_persistence(SrsFileWriter* fw) { int ret = ERROR_SUCCESS; // persistence root directive to writer. if ((ret = root->persistence(fw, 0)) != ERROR_SUCCESS) { return ret; } return ret; } int SrsConfig::minimal_to_json(SrsJsonObject* obj) { int ret = ERROR_SUCCESS; for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* dir = root->directives.at(i); if (dir->is_vhost()) { continue; } if (dir->name == "listen") { obj->set(dir->name, dir->dumps_args()); } } return ret; } int SrsConfig::global_to_json(SrsJsonObject* obj) { int ret = ERROR_SUCCESS; for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* dir = root->directives.at(i); if (dir->is_vhost()) { continue; } if (dir->name == "listen") { obj->set(dir->name, dir->dumps_args()); } else if (dir->name == "pid") { obj->set(dir->name, dir->dumps_arg0_to_str()); } else if (dir->name == "chunk_size") { obj->set(dir->name, dir->dumps_arg0_to_integer()); } else if (dir->name == "ff_log_dir") { obj->set(dir->name, dir->dumps_arg0_to_str()); } else if (dir->name == "srs_log_tank") { obj->set(dir->name, dir->dumps_arg0_to_str()); } else if (dir->name == "srs_log_level") { obj->set(dir->name, dir->dumps_arg0_to_str()); } else if (dir->name == "srs_log_file") { obj->set(dir->name, dir->dumps_arg0_to_str()); } else if (dir->name == "max_connections") { obj->set(dir->name, dir->dumps_arg0_to_integer()); } else if (dir->name == "daemon") { obj->set(dir->name, dir->dumps_arg0_to_boolean()); } else if (dir->name == "utc_time") { obj->set(dir->name, dir->dumps_arg0_to_boolean()); } else if (dir->name == "pithy_print_ms") { obj->set(dir->name, dir->dumps_arg0_to_integer()); } else if (dir->name == "heartbeat") { SrsJsonObject* sobj = SrsJsonAny::object(); for (int j = 0; j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name == "enabled") { sobj->set(sdir->name, sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "interval") { sobj->set(sdir->name, sdir->dumps_arg0_to_integer()); } else if (sdir->name == "url") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } else if (sdir->name == "device_id") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } else if (sdir->name == "summaries") { sobj->set(sdir->name, sdir->dumps_arg0_to_boolean()); } } obj->set(dir->name, sobj); } else if (dir->name == "stats") { SrsJsonObject* sobj = SrsJsonAny::object(); for (int j = 0; j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name == "network") { sobj->set(sdir->name, sdir->dumps_arg0_to_integer()); } else if (sdir->name == "disk") { sobj->set(sdir->name, sdir->dumps_args()); } } obj->set(dir->name, sobj); } else if (dir->name == "http_api") { SrsJsonObject* sobj = SrsJsonAny::object(); for (int j = 0; j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name == "enabled") { sobj->set(sdir->name, sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "listen") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } else if (sdir->name == "crossdomain") { sobj->set(sdir->name, sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "raw_api") { SrsJsonObject* ssobj = SrsJsonAny::object(); sobj->set(sdir->name, ssobj); for (int j = 0; j < (int)sdir->directives.size(); j++) { SrsConfDirective* ssdir = sdir->directives.at(j); if (ssdir->name == "enabled") { ssobj->set(ssdir->name, ssdir->dumps_arg0_to_boolean()); } else if (ssdir->name == "allow_reload") { ssobj->set(ssdir->name, ssdir->dumps_arg0_to_boolean()); } else if (ssdir->name == "allow_query") { ssobj->set(ssdir->name, ssdir->dumps_arg0_to_boolean()); } else if (ssdir->name == "allow_update") { ssobj->set(ssdir->name, ssdir->dumps_arg0_to_boolean()); } } } } obj->set(dir->name, sobj); } else if (dir->name == "http_server") { SrsJsonObject* sobj = SrsJsonAny::object(); for (int j = 0; j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name == "enabled") { sobj->set(sdir->name, sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "listen") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } else if (sdir->name == "dir") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } } obj->set(dir->name, sobj); } else if (dir->name == "kafka") { SrsJsonObject* sobj = SrsJsonAny::object(); for (int j = 0; j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name == "enabled") { sobj->set(sdir->name, sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "brokers") { sobj->set(sdir->name, sdir->dumps_args()); } else if (sdir->name == "topic") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } } obj->set(dir->name, sobj); } else if (dir->name == "stream_caster") { SrsJsonObject* sobj = SrsJsonAny::object(); for (int j = 0; j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name == "enabled") { sobj->set(sdir->name, sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "caster") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } else if (sdir->name == "output") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } else if (sdir->name == "listen") { sobj->set(sdir->name, sdir->dumps_arg0_to_str()); } else if (sdir->name == "rtp_port_min") { sobj->set(sdir->name, sdir->dumps_arg0_to_integer()); } else if (sdir->name == "rtp_port_max") { sobj->set(sdir->name, sdir->dumps_arg0_to_integer()); } } obj->set(dir->name, sobj); } else { continue; } } SrsJsonObject* sobjs = SrsJsonAny::object(); int nb_vhosts = 0; SrsStatistic* stat = SrsStatistic::instance(); for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* dir = root->directives.at(i); if (!dir->is_vhost()) { continue; } nb_vhosts++; SrsJsonObject* sobj = SrsJsonAny::object(); sobjs->set(dir->arg0(), sobj); SrsStatisticVhost* svhost = stat->find_vhost(dir->arg0()); sobj->set("id", SrsJsonAny::integer(svhost? (double)svhost->id : 0)); sobj->set("name", dir->dumps_arg0_to_str()); sobj->set("enabled", SrsJsonAny::boolean(get_vhost_enabled(dir->arg0()))); if (get_dvr_enabled(dir->arg0())) { sobj->set("dvr", SrsJsonAny::boolean(true)); } if (get_vhost_http_enabled(dir->arg0())) { sobj->set("http_static", SrsJsonAny::boolean(true)); } if (get_vhost_http_remux_enabled(dir->arg0())) { sobj->set("http_remux", SrsJsonAny::boolean(true)); } if (get_hls_enabled(dir->arg0())) { sobj->set("hls", SrsJsonAny::boolean(true)); } if (get_hds_enabled(dir->arg0())) { sobj->set("hds", SrsJsonAny::boolean(true)); } if (get_vhost_http_hooks(dir->arg0())) { sobj->set("http_hooks", SrsJsonAny::boolean(true)); } if (get_exec_enabled(dir->arg0())) { sobj->set("exec", SrsJsonAny::boolean(true)); } if (get_bw_check_enabled(dir->arg0())) { sobj->set("bandcheck", SrsJsonAny::boolean(true)); } if (!get_cluster_is_edge(get_cluster(dir->arg0()))) { sobj->set("origin", SrsJsonAny::boolean(true)); } if (get_forward_enabled(get_forward(dir->arg0()))) { sobj->set("forward", SrsJsonAny::boolean(true)); } if (get_security_enabled(dir->arg0())) { sobj->set("security", SrsJsonAny::boolean(true)); } if (get_refer_enabled(dir->arg0())) { sobj->set("refer", SrsJsonAny::boolean(true)); } if (get_mr_enabled(dir->arg0())) { sobj->set("mr", SrsJsonAny::boolean(true)); } if (get_realtime_enabled(dir->arg0())) { sobj->set("min_latency", SrsJsonAny::boolean(true)); } if (get_gop_cache(dir->arg0())) { sobj->set("gop_cache", SrsJsonAny::boolean(true)); } if (get_tcp_nodelay(dir->arg0())) { sobj->set("tcp_nodelay", SrsJsonAny::boolean(true)); } if (get_mix_correct(dir->arg0())) { sobj->set("mix_correct", SrsJsonAny::boolean(true)); } if (get_time_jitter(dir->arg0()) != SrsRtmpJitterAlgorithmOFF) { sobj->set("time_jitter", SrsJsonAny::boolean(true)); } if (get_atc(dir->arg0())) { sobj->set("atc", SrsJsonAny::boolean(true)); } bool has_transcode = false; for (int j = 0; !has_transcode && j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name != "transcode") { continue; } if (!get_transcode_enabled(sdir)) { continue; } for (int k = 0; !has_transcode && k < (int)sdir->directives.size(); k++) { SrsConfDirective* ssdir = sdir->directives.at(k); if (ssdir->name != "engine") { continue; } if (get_engine_enabled(ssdir)) { has_transcode = true; break; } } } if (has_transcode) { sobj->set("transcode", SrsJsonAny::boolean(has_transcode)); } bool has_ingest = false; for (int j = 0; !has_ingest && j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name != "ingest") { continue; } if (get_ingest_enabled(sdir)) { has_ingest = true; break; } } if (has_ingest) { sobj->set("ingest", SrsJsonAny::boolean(has_ingest)); } } obj->set("nb_vhosts", SrsJsonAny::integer(nb_vhosts)); obj->set("vhosts", sobjs); return ret; } int SrsConfig::vhost_to_json(SrsConfDirective* vhost, SrsJsonObject* obj) { int ret = ERROR_SUCCESS; SrsConfDirective* dir = NULL; // always present in vhost. SrsStatistic* stat = SrsStatistic::instance(); SrsStatisticVhost* svhost = stat->find_vhost(vhost->arg0()); obj->set("id", SrsJsonAny::integer(svhost? (double)svhost->id : 0)); obj->set("name", vhost->dumps_arg0_to_str()); obj->set("enabled", SrsJsonAny::boolean(get_vhost_enabled(vhost))); // vhost scope configs. if ((dir = vhost->get("chunk_size")) != NULL) { obj->set("chunk_size", dir->dumps_arg0_to_integer()); } if ((dir = vhost->get("min_latency")) != NULL) { obj->set("min_latency", dir->dumps_arg0_to_boolean()); } if ((dir = vhost->get("tcp_nodelay")) != NULL) { obj->set("tcp_nodelay", dir->dumps_arg0_to_boolean()); } // cluster. if ((dir = vhost->get("cluster")) != NULL) { SrsJsonObject* cluster = SrsJsonAny::object(); obj->set("cluster", cluster); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "mode") { cluster->set("mode", sdir->dumps_arg0_to_str()); } else if (sdir->name == "origin") { cluster->set("origin", sdir->dumps_arg0_to_str()); } else if (sdir->name == "token_traverse") { cluster->set("token_traverse", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "vhost") { cluster->set("vhost", sdir->dumps_arg0_to_str()); } else if (sdir->name == "debug_srs_upnode") { cluster->set("debug_srs_upnode", sdir->dumps_arg0_to_boolean()); } } } // forward if ((dir = vhost->get("forward")) != NULL) { SrsJsonObject* forward = SrsJsonAny::object(); obj->set("forward", forward); forward->set("enabled", SrsJsonAny::boolean(get_forward_enabled(dir))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "destination") { forward->set("destination", sdir->dumps_args()); } } } // play if ((dir = vhost->get("play")) != NULL) { SrsJsonObject* play = SrsJsonAny::object(); obj->set("play", play); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "time_jitter") { play->set("time_jitter", sdir->dumps_arg0_to_str()); } else if (sdir->name == "mix_correct") { play->set("mix_correct", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "atc") { play->set("atc", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "atc_auto") { play->set("atc_auto", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "mw_latency") { play->set("mw_latency", sdir->dumps_arg0_to_integer()); } else if (sdir->name == "gop_cache") { play->set("gop_cache", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "queue_length") { play->set("queue_length", sdir->dumps_arg0_to_integer()); } else if (sdir->name == "reduce_sequence_header") { play->set("reduce_sequence_header", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "send_min_interval") { play->set("send_min_interval", sdir->dumps_arg0_to_integer()); } } } // publish if ((dir = vhost->get("publish")) != NULL) { SrsJsonObject* publish = SrsJsonAny::object(); obj->set("publish", publish); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "mr") { publish->set("mr", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "mr_latency") { publish->set("mr_latency", sdir->dumps_arg0_to_integer()); } else if (sdir->name == "firstpkt_timeout") { publish->set("firstpkt_timeout", sdir->dumps_arg0_to_integer()); } else if (sdir->name == "normal_timeout") { publish->set("normal_timeout", sdir->dumps_arg0_to_integer()); } } } // refer if ((dir = vhost->get("refer")) != NULL) { SrsJsonObject* refer = SrsJsonAny::object(); obj->set("refer", refer); refer->set("enabled", SrsJsonAny::boolean(get_refer_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "all") { refer->set("all", sdir->dumps_args()); } else if (sdir->name == "publish") { refer->set("publish", sdir->dumps_args()); } else if (sdir->name == "play") { refer->set("play", sdir->dumps_args()); } } } // bandcheck if ((dir = vhost->get("bandcheck")) != NULL) { SrsJsonObject* bandcheck = SrsJsonAny::object(); obj->set("bandcheck", bandcheck); bandcheck->set("enabled", SrsJsonAny::boolean(get_bw_check_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "key") { bandcheck->set("key", sdir->dumps_arg0_to_str()); } else if (sdir->name == "interval") { bandcheck->set("interval", sdir->dumps_arg0_to_integer()); } else if (sdir->name == "limit_kbps") { bandcheck->set("limit_kbps", sdir->dumps_arg0_to_integer()); } } } // security if ((dir = vhost->get("security")) != NULL) { SrsJsonObject* security = SrsJsonAny::object(); obj->set("security", security); security->set("enabled", SrsJsonAny::boolean(get_security_enabled(vhost->name))); SrsJsonArray* allows = SrsJsonAny::array(); security->set("allows", allows); SrsJsonArray* denies = SrsJsonAny::array(); security->set("denies", denies); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "allow") { SrsJsonObject* allow = SrsJsonAny::object(); allow->set("action", SrsJsonAny::str(sdir->name.c_str())); allow->set("method", SrsJsonAny::str(sdir->arg0().c_str())); allow->set("entry", SrsJsonAny::str(sdir->arg1().c_str())); allows->append(allow); } else if (sdir->name == "deny") { SrsJsonObject* deny = SrsJsonAny::object(); deny->set("action", SrsJsonAny::str(sdir->name.c_str())); deny->set("method", SrsJsonAny::str(sdir->arg0().c_str())); deny->set("entry", SrsJsonAny::str(sdir->arg1().c_str())); denies->append(deny); } } } // http_static if ((dir = vhost->get("http_static")) != NULL) { SrsJsonObject* http_static = SrsJsonAny::object(); obj->set("http_static", http_static); http_static->set("enabled", SrsJsonAny::boolean(get_vhost_http_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "mount") { http_static->set("mount", sdir->dumps_arg0_to_str()); } else if (sdir->name == "dir") { http_static->set("dir", sdir->dumps_arg0_to_str()); } } } // http_remux if ((dir = vhost->get("http_remux")) != NULL) { SrsJsonObject* http_remux = SrsJsonAny::object(); obj->set("http_remux", http_remux); http_remux->set("enabled", SrsJsonAny::boolean(get_vhost_http_remux_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "fast_cache") { http_remux->set("fast_cache", sdir->dumps_arg0_to_integer()); } else if (sdir->name == "mount") { http_remux->set("mount", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hstrs") { http_remux->set("hstrs", sdir->dumps_arg0_to_boolean()); } } } // http_hooks if ((dir = vhost->get("http_hooks")) != NULL) { SrsJsonObject* http_hooks = SrsJsonAny::object(); obj->set("http_hooks", http_hooks); http_hooks->set("enabled", SrsJsonAny::boolean(get_vhost_http_hooks_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "on_connect") { http_hooks->set("on_connect", sdir->dumps_args()); } else if (sdir->name == "on_close") { http_hooks->set("on_close", sdir->dumps_args()); } else if (sdir->name == "on_publish") { http_hooks->set("on_publish", sdir->dumps_args()); } else if (sdir->name == "on_unpublish") { http_hooks->set("on_unpublish", sdir->dumps_args()); } else if (sdir->name == "on_play") { http_hooks->set("on_play", sdir->dumps_args()); } else if (sdir->name == "on_stop") { http_hooks->set("on_stop", sdir->dumps_args()); } else if (sdir->name == "on_dvr") { http_hooks->set("on_dvr", sdir->dumps_args()); } else if (sdir->name == "on_hls") { http_hooks->set("on_hls", sdir->dumps_args()); } else if (sdir->name == "on_hls_notify") { http_hooks->set("on_hls_notify", sdir->dumps_arg0_to_str()); } } } // hls if ((dir = vhost->get("hls")) != NULL) { SrsJsonObject* hls = SrsJsonAny::object(); obj->set("hls", hls); hls->set("enabled", SrsJsonAny::boolean(get_hls_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "hls_fragment") { hls->set("hls_fragment", sdir->dumps_arg0_to_number()); } else if (sdir->name == "hls_td_ratio") { hls->set("hls_td_ratio", sdir->dumps_arg0_to_number()); } else if (sdir->name == "hls_aof_ratio") { hls->set("hls_aof_ratio", sdir->dumps_arg0_to_number()); } else if (sdir->name == "hls_window") { hls->set("hls_window", sdir->dumps_arg0_to_number()); } else if (sdir->name == "hls_on_error") { hls->set("hls_on_error", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_storage") { hls->set("hls_storage", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_path") { hls->set("hls_path", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_m3u8_file") { hls->set("hls_m3u8_file", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_ts_file") { hls->set("hls_ts_file", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_ts_floor") { hls->set("hls_ts_floor", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "hls_entry_prefix") { hls->set("hls_entry_prefix", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_mount") { hls->set("hls_mount", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_acodec") { hls->set("hls_acodec", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_vcodec") { hls->set("hls_vcodec", sdir->dumps_arg0_to_str()); } else if (sdir->name == "hls_cleanup") { hls->set("hls_cleanup", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "hls_dispose") { hls->set("hls_dispose", sdir->dumps_arg0_to_number()); } else if (sdir->name == "hls_nb_notify") { hls->set("hls_nb_notify", sdir->dumps_arg0_to_integer()); } else if (sdir->name == "hls_wait_keyframe") { hls->set("hls_wait_keyframe", sdir->dumps_arg0_to_boolean()); } } } // hds if ((dir = vhost->get("hds")) != NULL) { SrsJsonObject* hds = SrsJsonAny::object(); obj->set("hds", hds); hds->set("enabled", SrsJsonAny::boolean(get_hds_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "hds_fragment") { hds->set("hds_fragment", sdir->dumps_arg0_to_number()); } else if (sdir->name == "hds_window") { hds->set("hds_window", sdir->dumps_arg0_to_number()); } else if (sdir->name == "hds_path") { hds->set("hds_path", sdir->dumps_arg0_to_str()); } } } // dvr if ((dir = vhost->get("dvr")) != NULL) { SrsJsonObject* dvr = SrsJsonAny::object(); obj->set("dvr", dvr); dvr->set("enabled", SrsJsonAny::boolean(get_dvr_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "dvr_plan") { dvr->set("dvr_plan", sdir->dumps_arg0_to_str()); } else if (sdir->name == "dvr_apply") { dvr->set("dvr_apply", sdir->dumps_args()); } else if (sdir->name == "dvr_path") { dvr->set("dvr_path", sdir->dumps_arg0_to_str()); } else if (sdir->name == "dvr_duration") { dvr->set("dvr_duration", sdir->dumps_arg0_to_number()); } else if (sdir->name == "dvr_wait_keyframe") { dvr->set("dvr_wait_keyframe", sdir->dumps_arg0_to_boolean()); } else if (sdir->name == "time_jitter") { dvr->set("time_jitter", sdir->dumps_arg0_to_str()); } } } // exec if ((dir = vhost->get("exec")) != NULL) { SrsJsonObject* ng_exec = SrsJsonAny::object(); obj->set("exec", ng_exec); ng_exec->set("enabled", SrsJsonAny::boolean(get_exec_enabled(vhost->name))); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "publish") { ng_exec->set("publish", sdir->dumps_args()); } } } // ingest SrsJsonArray* ingests = NULL; for (int i = 0; i < (int)vhost->directives.size(); i++) { dir = vhost->directives.at(i); if (dir->name != "ingest") { continue; } if (!ingests) { ingests = SrsJsonAny::array(); obj->set("ingests", ingests); } SrsJsonObject* ingest = SrsJsonAny::object(); ingest->set("id", dir->dumps_arg0_to_str()); ingest->set("enabled", SrsJsonAny::boolean(get_ingest_enabled(dir))); ingests->append(ingest); for (int j = 0; j < (int)dir->directives.size(); j++) { SrsConfDirective* sdir = dir->directives.at(j); if (sdir->name == "input") { SrsJsonObject* input = SrsJsonAny::object(); ingest->set("input", input); SrsConfDirective* type = sdir->get("type"); if (type) { input->set("type", type->dumps_arg0_to_str()); } SrsConfDirective* url = sdir->get("url"); if (url) { input->set("url", url->dumps_arg0_to_str()); } } else if (sdir->name == "ffmpeg") { ingest->set("ffmpeg", sdir->dumps_arg0_to_str()); } else if (sdir->name == "engine") { SrsJsonObject* engine = SrsJsonAny::object(); ingest->set("engine", engine); if ((ret = srs_config_dumps_engine(sdir, engine)) != ERROR_SUCCESS) { return ret; } } } } // transcode SrsJsonArray* transcodes = NULL; for (int i = 0; i < (int)vhost->directives.size(); i++) { dir = vhost->directives.at(i); if (dir->name != "transcode") { continue; } if (!transcodes) { transcodes = SrsJsonAny::array(); obj->set("transcodes", transcodes); } SrsJsonObject* transcode = SrsJsonAny::object(); transcodes->append(transcode); transcode->set("apply", dir->dumps_arg0_to_str()); transcode->set("enabled", SrsJsonAny::boolean(get_transcode_enabled(dir))); SrsJsonArray* engines = SrsJsonAny::array(); transcode->set("engines", engines); for (int i = 0; i < (int)dir->directives.size(); i++) { SrsConfDirective* sdir = dir->directives.at(i); if (sdir->name == "ffmpeg") { transcode->set("ffmpeg", sdir->dumps_arg0_to_str()); } else if (sdir->name == "engine") { SrsJsonObject* engine = SrsJsonAny::object(); engines->append(engine); if ((ret = srs_config_dumps_engine(sdir, engine)) != ERROR_SUCCESS) { return ret; } } } } return ret; } int SrsConfig::raw_to_json(SrsJsonObject* obj) { int ret = ERROR_SUCCESS; SrsJsonObject* sobj = SrsJsonAny::object(); obj->set("http_api", sobj); sobj->set("enabled", SrsJsonAny::boolean(get_http_api_enabled())); sobj->set("listen", SrsJsonAny::str(get_http_api_listen().c_str())); sobj->set("crossdomain", SrsJsonAny::boolean(get_http_api_crossdomain())); SrsJsonObject* ssobj = SrsJsonAny::object(); sobj->set("raw_api", ssobj); ssobj->set("enabled", SrsJsonAny::boolean(get_raw_api())); ssobj->set("allow_reload", SrsJsonAny::boolean(get_raw_api_allow_reload())); ssobj->set("allow_query", SrsJsonAny::boolean(get_raw_api_allow_query())); ssobj->set("allow_update", SrsJsonAny::boolean(get_raw_api_allow_update())); return ret; } int SrsConfig::raw_set_listen(const vector<string>& eps, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get("listen"); // not changed, ignore. if (srs_vector_actual_equals(conf->args, eps)) { return ret; } // changed, apply and reload. conf->args = eps; if ((ret = do_reload_listen()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_set_pid(string pid, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("pid"); if (conf->arg0() == pid) { return ret; } conf->args.clear(); conf->args.push_back(pid); if ((ret = do_reload_pid()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_set_chunk_size(string chunk_size, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("chunk_size"); if (conf->arg0() == chunk_size) { return ret; } conf->args.clear(); conf->args.push_back(chunk_size); // directly supported reload for chunk_size change. applied = true; return ret; } int SrsConfig::raw_set_ff_log_dir(string ff_log_dir, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("ff_log_dir"); if (conf->arg0() == ff_log_dir) { return ret; } conf->args.clear(); conf->args.push_back(ff_log_dir); // directly supported reload for ff_log_dir change. applied = true; return ret; } int SrsConfig::raw_set_srs_log_tank(string srs_log_tank, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("srs_log_tank"); if (conf->arg0() == srs_log_tank) { return ret; } conf->args.clear(); conf->args.push_back(srs_log_tank); if ((ret = do_reload_srs_log_tank()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_set_srs_log_level(string srs_log_level, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("srs_log_level"); if (conf->arg0() == srs_log_level) { return ret; } conf->args.clear(); conf->args.push_back(srs_log_level); if ((ret = do_reload_srs_log_level()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_set_srs_log_file(string srs_log_file, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("srs_log_file"); if (conf->arg0() == srs_log_file) { return ret; } conf->args.clear(); conf->args.push_back(srs_log_file); if ((ret = do_reload_srs_log_file()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_set_max_connections(string max_connections, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("max_connections"); if (conf->arg0() == max_connections) { return ret; } conf->args.clear(); conf->args.push_back(max_connections); if ((ret = do_reload_max_connections()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_set_utc_time(string utc_time, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("utc_time"); if (conf->arg0() == utc_time) { return ret; } conf->args.clear(); conf->args.push_back(utc_time); if ((ret = do_reload_utc_time()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_set_pithy_print_ms(string pithy_print_ms, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("pithy_print_ms"); if (conf->arg0() == pithy_print_ms) { return ret; } conf->args.clear(); conf->args.push_back(pithy_print_ms); if ((ret = do_reload_pithy_print_ms()) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_create_vhost(string vhost, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get_or_create("vhost", vhost); conf->get_or_create("enabled")->set_arg0("on"); if ((ret = do_reload_vhost_added(vhost)) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_update_vhost(string vhost, string name, bool& applied) { int ret = ERROR_SUCCESS; applied = false; // the vhost must be disabled, so we donot need to reload. SrsConfDirective* conf = root->get_or_create("vhost", vhost); conf->set_arg0(name); applied = true; return ret; } int SrsConfig::raw_delete_vhost(string vhost, bool& applied) { int ret = ERROR_SUCCESS; applied = false; // the vhost must be disabled, so we donot need to reload. SrsConfDirective* conf = root->get("vhost", vhost); srs_assert(conf); // remove the directive. root->remove(conf); srs_freep(conf); applied = true; return ret; } int SrsConfig::raw_disable_vhost(string vhost, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get("vhost", vhost); conf->get_or_create("enabled")->set_arg0("off"); if ((ret = do_reload_vhost_removed(vhost)) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_enable_vhost(string vhost, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get("vhost", vhost); conf->get_or_create("enabled")->set_arg0("on"); if ((ret = do_reload_vhost_added(vhost)) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_enable_dvr(string vhost, string stream, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get("vhost", vhost); conf = conf->get_or_create("dvr")->get_or_create("dvr_apply"); if (conf->args.size() == 1 && (conf->arg0() == "all" || conf->arg0() == "none")) { conf->args.clear(); } if (::find(conf->args.begin(), conf->args.end(), stream) == conf->args.end()) { conf->args.push_back(stream); } if ((ret = do_reload_vhost_dvr_apply(vhost)) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::raw_disable_dvr(string vhost, string stream, bool& applied) { int ret = ERROR_SUCCESS; applied = false; SrsConfDirective* conf = root->get("vhost", vhost); conf = conf->get_or_create("dvr")->get_or_create("dvr_apply"); std::vector<string>::iterator it; if ((it = ::find(conf->args.begin(), conf->args.end(), stream)) != conf->args.end()) { conf->args.erase(it); } if (conf->args.empty()) { conf->args.push_back("none"); } if ((ret = do_reload_vhost_dvr_apply(vhost)) != ERROR_SUCCESS) { return ret; } applied = true; return ret; } int SrsConfig::do_reload_listen() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_listen()) != ERROR_SUCCESS) { srs_error("notify subscribes reload listen failed. ret=%d", ret); return ret; } } srs_trace("reload listen success."); return ret; } int SrsConfig::do_reload_pid() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_pid()) != ERROR_SUCCESS) { srs_error("notify subscribes reload pid failed. ret=%d", ret); return ret; } } srs_trace("reload pid success."); return ret; } int SrsConfig::do_reload_srs_log_tank() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_log_tank()) != ERROR_SUCCESS) { srs_error("notify subscribes reload srs_log_tank failed. ret=%d", ret); return ret; } } srs_trace("reload srs_log_tank success."); return ret; } int SrsConfig::do_reload_srs_log_level() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_log_level()) != ERROR_SUCCESS) { srs_error("notify subscribes reload srs_log_level failed. ret=%d", ret); return ret; } } srs_trace("reload srs_log_level success."); return ret; } int SrsConfig::do_reload_srs_log_file() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_log_file()) != ERROR_SUCCESS) { srs_error("notify subscribes reload srs_log_file failed. ret=%d", ret); return ret; } } srs_trace("reload srs_log_file success."); return ret; } int SrsConfig::do_reload_max_connections() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_max_conns()) != ERROR_SUCCESS) { srs_error("notify subscribes reload max_connections failed. ret=%d", ret); return ret; } } srs_trace("reload max_connections success."); return ret; } int SrsConfig::do_reload_utc_time() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_utc_time()) != ERROR_SUCCESS) { srs_error("notify subscribes utc_time failed. ret=%d", ret); return ret; } } srs_trace("reload utc_time success."); return ret; } int SrsConfig::do_reload_pithy_print_ms() { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_pithy_print()) != ERROR_SUCCESS) { srs_error("notify subscribes pithy_print_ms failed. ret=%d", ret); return ret; } } srs_trace("reload pithy_print_ms success."); return ret; } int SrsConfig::do_reload_vhost_added(string vhost) { int ret = ERROR_SUCCESS; srs_trace("vhost %s added, reload it.", vhost.c_str()); vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_added(vhost)) != ERROR_SUCCESS) { srs_error("notify subscribes added vhost %s failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("reload new vhost %s success.", vhost.c_str()); return ret; } int SrsConfig::do_reload_vhost_removed(string vhost) { int ret = ERROR_SUCCESS; srs_trace("vhost %s removed, reload it.", vhost.c_str()); vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_removed(vhost)) != ERROR_SUCCESS) { srs_error("notify subscribes removed " "vhost %s failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("reload removed vhost %s success.", vhost.c_str()); return ret; } int SrsConfig::do_reload_vhost_dvr_apply(string vhost) { int ret = ERROR_SUCCESS; vector<ISrsReloadHandler*>::iterator it; for (it = subscribes.begin(); it != subscribes.end(); ++it) { ISrsReloadHandler* subscribe = *it; if ((ret = subscribe->on_reload_vhost_dvr_apply(vhost)) != ERROR_SUCCESS) { srs_error("vhost %s notify subscribes dvr_apply failed. ret=%d", vhost.c_str(), ret); return ret; } } srs_trace("vhost %s reload dvr_apply success.", vhost.c_str()); return ret; } string SrsConfig::config() { return config_file; } int SrsConfig::parse_argv(int& i, char** argv) { int ret = ERROR_SUCCESS; char* p = argv[i]; if (*p++ != '-') { show_help = true; return ret; } while (*p) { switch (*p++) { case '?': case 'h': show_help = true; break; case 't': show_help = false; test_conf = true; break; case 'p': dolphin = true; if (*p) { dolphin_rtmp_port = p; continue; } if (argv[++i]) { dolphin_rtmp_port = argv[i]; continue; } ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("option \"-p\" requires params, ret=%d", ret); return ret; case 'x': dolphin = true; if (*p) { dolphin_http_port = p; continue; } if (argv[++i]) { dolphin_http_port = argv[i]; continue; } ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("option \"-x\" requires params, ret=%d", ret); return ret; case 'v': case 'V': show_help = false; show_version = true; break; case 'g': case 'G': show_help = false; show_signature = true; break; case 'c': show_help = false; if (*p) { config_file = p; continue; } if (argv[++i]) { config_file = argv[i]; continue; } ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("option \"-c\" requires parameter, ret=%d", ret); return ret; default: ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("invalid option: \"%c\", see help: %s -h, ret=%d", *(p - 1), argv[0], ret); return ret; } } return ret; } void SrsConfig::print_help(char** argv) { printf( RTMP_SIG_SRS_SERVER" "RTMP_SIG_SRS_COPYRIGHT"\n" "License: "RTMP_SIG_SRS_LICENSE"\n" "Primary: "RTMP_SIG_SRS_PRIMARY"\n" "Authors: "RTMP_SIG_SRS_AUTHROS"\n" "Build: "SRS_AUTO_BUILD_DATE" Configuration:"SRS_AUTO_USER_CONFIGURE"\n" "Features:"SRS_AUTO_CONFIGURE"\n""\n" "Usage: %s [-h?vVsS] [[-t] -c <filename>]\n" "\n" "Options:\n" " -?, -h : show this help and exit(0)\n" " -v, -V : show version and exit(0)\n" " -g, -G : show server signature and exit(0)\n" " -t : test configuration file, exit(error_code).\n" " -c filename : use configuration file for SRS\n" "For srs-dolphin:\n" " -p rtmp-port : the rtmp port to listen.\n" " -x http-port : the http port to listen.\n" "\n" RTMP_SIG_SRS_WEB"\n" RTMP_SIG_SRS_URL"\n" "Email: "RTMP_SIG_SRS_EMAIL"\n" "\n" "For example:\n" " %s -v\n" " %s -t -c "SRS_CONF_DEFAULT_COFNIG_FILE"\n" " %s -c "SRS_CONF_DEFAULT_COFNIG_FILE"\n", argv[0], argv[0], argv[0], argv[0]); } int SrsConfig::parse_file(const char* filename) { int ret = ERROR_SUCCESS; config_file = filename; if (config_file.empty()) { return ERROR_SYSTEM_CONFIG_INVALID; } SrsConfigBuffer buffer; if ((ret = buffer.fullfill(config_file.c_str())) != ERROR_SUCCESS) { return ret; } return parse_buffer(&buffer); } int SrsConfig::check_config() { int ret = ERROR_SUCCESS; srs_trace("srs checking config..."); //////////////////////////////////////////////////////////////////////// // check empty //////////////////////////////////////////////////////////////////////// if (root->directives.size() == 0) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("conf is empty, ret=%d", ret); return ret; } //////////////////////////////////////////////////////////////////////// // check root directives. //////////////////////////////////////////////////////////////////////// for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* conf = root->at(i); std::string n = conf->name; if (n != "listen" && n != "pid" && n != "chunk_size" && n != "ff_log_dir" && n != "srs_log_tank" && n != "srs_log_level" && n != "srs_log_file" && n != "max_connections" && n != "daemon" && n != "heartbeat" && n != "http_api" && n != "stats" && n != "vhost" && n != "pithy_print_ms" && n != "http_server" && n != "stream_caster" && n != "kafka" && n != "utc_time" && n != "work_dir" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported directive %s, ret=%d", n.c_str(), ret); return ret; } } if (true) { SrsConfDirective* conf = root->get("http_api"); for (int i = 0; conf && i < (int)conf->directives.size(); i++) { SrsConfDirective* obj = conf->at(i); string n = obj->name; if (n != "enabled" && n != "listen" && n != "crossdomain" && n != "raw_api") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported http_api directive %s, ret=%d", n.c_str(), ret); return ret; } if (n == "raw_api") { for (int j = 0; j < (int)obj->directives.size(); j++) { string m = obj->at(j)->name; if (m != "enabled" && m != "allow_reload" && m != "allow_query" && m != "allow_update") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported http_api.raw_api directive %s, ret=%d", m.c_str(), ret); return ret; } } } } } if (true) { SrsConfDirective* conf = root->get("http_server"); for (int i = 0; conf && i < (int)conf->directives.size(); i++) { string n = conf->at(i)->name; if (n != "enabled" && n != "listen" && n != "dir") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported http_stream directive %s, ret=%d", n.c_str(), ret); return ret; } } } if (true) { SrsConfDirective* conf = root->get("kafka"); for (int i = 0; conf && i < (int)conf->directives.size(); i++) { string n = conf->at(i)->name; if (n != "enabled" && n != "brokers" && n != "topic") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported kafka directive %s, ret=%d", n.c_str(), ret); return ret; } } } if (true) { SrsConfDirective* conf = get_heartbeart(); for (int i = 0; conf && i < (int)conf->directives.size(); i++) { string n = conf->at(i)->name; if (n != "enabled" && n != "interval" && n != "url" && n != "device_id" && n != "summaries" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported heartbeat directive %s, ret=%d", n.c_str(), ret); return ret; } } } if (true) { SrsConfDirective* conf = get_stats(); for (int i = 0; conf && i < (int)conf->directives.size(); i++) { string n = conf->at(i)->name; if (n != "network" && n != "disk") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported stats directive %s, ret=%d", n.c_str(), ret); return ret; } } } //////////////////////////////////////////////////////////////////////// // check listen for rtmp. //////////////////////////////////////////////////////////////////////// if (true) { vector<string> listens = get_listens(); if (listens.size() <= 0) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive \"listen\" is empty, ret=%d", ret); return ret; } for (int i = 0; i < (int)listens.size(); i++) { string port = listens[i]; if (port.empty() || ::atoi(port.c_str()) <= 0) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive listen invalid, port=%s, ret=%d", port.c_str(), ret); return ret; } } } //////////////////////////////////////////////////////////////////////// // check max connections //////////////////////////////////////////////////////////////////////// if (get_max_connections() <= 0) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive max_connections invalid, max_connections=%d, ret=%d", get_max_connections(), ret); return ret; } // check max connections of system limits if (true) { int nb_consumed_fds = (int)get_listens().size(); if (!get_http_api_listen().empty()) { nb_consumed_fds++; } if (!get_http_stream_listen().empty()) { nb_consumed_fds++; } if (get_log_tank_file()) { nb_consumed_fds++; } // 0, 1, 2 for stdin, stdout and stderr. nb_consumed_fds += 3; int nb_connections = get_max_connections(); int nb_total = nb_connections + nb_consumed_fds; int max_open_files = (int)sysconf(_SC_OPEN_MAX); int nb_canbe = max_open_files - nb_consumed_fds - 1; // for each play connections, we open a pipe(2fds) to convert SrsConsumver to io, // refine performance, @see: https://github.com/ossrs/srs/issues/194 if (nb_total >= max_open_files) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("invalid max_connections=%d, required=%d, system limit to %d, " "total=%d(max_connections=%d, nb_consumed_fds=%d), ret=%d. " "you can change max_connections from %d to %d, or " "you can login as root and set the limit: ulimit -HSn %d", nb_connections, nb_total + 1, max_open_files, nb_total, nb_connections, nb_consumed_fds, ret, nb_connections, nb_canbe, nb_total + 1); return ret; } } //////////////////////////////////////////////////////////////////////// // check heartbeat //////////////////////////////////////////////////////////////////////// if (get_heartbeat_interval() <= 0) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive heartbeat interval invalid, interval=%"PRId64", ret=%d", get_heartbeat_interval(), ret); return ret; } //////////////////////////////////////////////////////////////////////// // check stats //////////////////////////////////////////////////////////////////////// if (get_stats_network() < 0) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive stats network invalid, network=%d, ret=%d", get_stats_network(), ret); return ret; } if (true) { vector<std::string> ips = srs_get_local_ipv4_ips(); int index = get_stats_network(); if (index >= (int)ips.size()) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("stats network invalid, total local ip count=%d, index=%d, ret=%d", (int)ips.size(), index, ret); return ret; } srs_warn("stats network use index=%d, ip=%s", index, ips.at(index).c_str()); } if (true) { SrsConfDirective* conf = get_stats_disk_device(); if (conf == NULL || (int)conf->args.size() <= 0) { srs_warn("stats disk not configed, disk iops disabled."); } else { string disks; for (int i = 0; i < (int)conf->args.size(); i++) { disks += conf->args.at(i); disks += " "; } srs_warn("stats disk list: %s", disks.c_str()); } } //////////////////////////////////////////////////////////////////////// // check http api //////////////////////////////////////////////////////////////////////// if (get_http_api_listen().empty()) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive http_api listen invalid, listen=%s, ret=%d", get_http_api_listen().c_str(), ret); return ret; } //////////////////////////////////////////////////////////////////////// // check http stream //////////////////////////////////////////////////////////////////////// if (get_http_stream_listen().empty()) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive http_stream listen invalid, listen=%s, ret=%d", get_http_stream_listen().c_str(), ret); return ret; } //////////////////////////////////////////////////////////////////////// // check log name and level //////////////////////////////////////////////////////////////////////// if (true) { std::string log_filename = this->get_log_file(); if (get_log_tank_file() && log_filename.empty()) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("must specifies the file to write log to. ret=%d", ret); return ret; } if (get_log_tank_file()) { srs_trace("write log to file %s", log_filename.c_str()); srs_trace("you can: tailf %s", log_filename.c_str()); srs_trace("@see: %s", SRS_WIKI_URL_LOG); } else { srs_trace("write log to console"); } } //////////////////////////////////////////////////////////////////////// // check features //////////////////////////////////////////////////////////////////////// #ifndef SRS_AUTO_HTTP_SERVER if (get_http_stream_enabled()) { srs_warn("http_stream is disabled by configure"); } #endif #ifndef SRS_AUTO_HTTP_API if (get_http_api_enabled()) { srs_warn("http_api is disabled by configure"); } #endif vector<SrsConfDirective*> stream_casters = get_stream_casters(); for (int n = 0; n < (int)stream_casters.size(); n++) { SrsConfDirective* stream_caster = stream_casters[n]; for (int i = 0; stream_caster && i < (int)stream_caster->directives.size(); i++) { SrsConfDirective* conf = stream_caster->at(i); string n = conf->name; if (n != "enabled" && n != "caster" && n != "output" && n != "listen" && n != "rtp_port_min" && n != "rtp_port_max" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported stream_caster directive %s, ret=%d", n.c_str(), ret); return ret; } } } //////////////////////////////////////////////////////////////////////// // check vhosts. //////////////////////////////////////////////////////////////////////// vector<SrsConfDirective*> vhosts; get_vhosts(vhosts); for (int n = 0; n < (int)vhosts.size(); n++) { SrsConfDirective* vhost = vhosts[n]; for (int i = 0; vhost && i < (int)vhost->directives.size(); i++) { SrsConfDirective* conf = vhost->at(i); string n = conf->name; if (n != "enabled" && n != "chunk_size" && n != "min_latency" && n != "tcp_nodelay" && n != "dvr" && n != "ingest" && n != "hls" && n != "http_hooks" && n != "refer" && n != "forward" && n != "transcode" && n != "bandcheck" && n != "play" && n != "publish" && n != "cluster" && n != "security" && n != "http_remux" && n != "http_static" && n != "hds" && n != "exec" #ifdef SRS_AUTO_DYNAMIC_CONFIG && n != "dynamic_transcode" && n != "dynamic_forward" && n != "dynamic_cluster" #endif ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost directive %s, ret=%d", n.c_str(), ret); return ret; } // for each sub directives of vhost. if (n == "dvr") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "dvr_apply" && m != "dvr_path" && m != "dvr_plan" && m != "dvr_duration" && m != "dvr_wait_keyframe" && m != "time_jitter" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost dvr directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "refer") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "all" && m != "publish" && m != "play") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost refer directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "exec") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "publish") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost exec directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "play") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "time_jitter" && m != "mix_correct" && m != "atc" && m != "atc_auto" && m != "mw_latency" && m != "gop_cache" && m != "queue_length" && m != "send_min_interval" && m != "reduce_sequence_header" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost play directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "cluster") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "mode" && m != "origin" && m != "token_traverse" && m != "vhost" && m != "debug_srs_upnode" && m != "publish_local") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost cluster directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "publish") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "mr" && m != "mr_latency" && m != "firstpkt_timeout" && m != "normal_timeout" && m != "parse_sps") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost publish directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "ingest") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "input" && m != "ffmpeg" && m != "engine") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost ingest directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "http_static") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "mount" && m != "dir") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost http directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "http_remux") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "mount" && m != "fast_cache" && m != "hstrs") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost http_remux directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "hls") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "hls_entry_prefix" && m != "hls_path" && m != "hls_fragment" && m != "hls_window" && m != "hls_on_error" && m != "hls_storage" && m != "hls_mount" && m != "hls_td_ratio" && m != "hls_aof_ratio" && m != "hls_acodec" && m != "hls_vcodec" && m != "hls_m3u8_file" && m != "hls_ts_file" && m != "hls_ts_floor" && m != "hls_cleanup" && m != "hls_nb_notify" && m != "hls_wait_keyframe" && m != "hls_dispose" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost hls directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "http_hooks") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "on_connect" && m != "on_close" && m != "on_publish" && m != "on_unpublish" && m != "on_play" && m != "on_stop" && m != "on_dvr" && m != "on_hls" && m != "on_hls_notify" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost http_hooks directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "forward") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "destination") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost forward directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "security") { for (int j = 0; j < (int)conf->directives.size(); j++) { SrsConfDirective* security = conf->at(j); string m = security->name.c_str(); if (m != "enabled" && m != "deny" && m != "allow") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost security directive %s, ret=%d", m.c_str(), ret); return ret; } } } else if (n == "transcode") { for (int j = 0; j < (int)conf->directives.size(); j++) { SrsConfDirective* trans = conf->at(j); string m = trans->name.c_str(); if (m != "enabled" && m != "ffmpeg" && m != "engine") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost transcode directive %s, ret=%d", m.c_str(), ret); return ret; } if (m == "engine") { for (int k = 0; k < (int)trans->directives.size(); k++) { string e = trans->at(k)->name; if (e != "enabled" && e != "vfilter" && e != "vcodec" && e != "vbitrate" && e != "vfps" && e != "vwidth" && e != "vheight" && e != "vthreads" && e != "vprofile" && e != "vpreset" && e != "vparams" && e != "acodec" && e != "abitrate" && e != "asample_rate" && e != "achannels" && e != "aparams" && e != "output" && e != "iformat" && e != "oformat" ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost transcode engine directive %s, ret=%d", e.c_str(), ret); return ret; } } } } } else if (n == "bandcheck") { for (int j = 0; j < (int)conf->directives.size(); j++) { string m = conf->at(j)->name.c_str(); if (m != "enabled" && m != "key" && m != "interval" && m != "limit_kbps") { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("unsupported vhost bandcheck directive %s, ret=%d", m.c_str(), ret); return ret; } } } } } // check ingest id unique. for (int i = 0; i < (int)vhosts.size(); i++) { SrsConfDirective* vhost = vhosts[i]; std::vector<std::string> ids; for (int j = 0; j < (int)vhost->directives.size(); j++) { SrsConfDirective* conf = vhost->at(j); if (conf->name != "ingest") { continue; } std::string id = conf->arg0(); for (int k = 0; k < (int)ids.size(); k++) { if (id == ids.at(k)) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive \"ingest\" id duplicated, vhost=%s, id=%s, ret=%d", vhost->name.c_str(), id.c_str(), ret); return ret; } } ids.push_back(id); } } //////////////////////////////////////////////////////////////////////// // check chunk size //////////////////////////////////////////////////////////////////////// if (get_global_chunk_size() < SRS_CONSTS_RTMP_MIN_CHUNK_SIZE || get_global_chunk_size() > SRS_CONSTS_RTMP_MAX_CHUNK_SIZE ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive chunk_size invalid, chunk_size=%d, must in [%d, %d], ret=%d", get_global_chunk_size(), SRS_CONSTS_RTMP_MIN_CHUNK_SIZE, SRS_CONSTS_RTMP_MAX_CHUNK_SIZE, ret); return ret; } for (int i = 0; i < (int)vhosts.size(); i++) { SrsConfDirective* vhost = vhosts[i]; if (get_chunk_size(vhost->arg0()) < SRS_CONSTS_RTMP_MIN_CHUNK_SIZE || get_chunk_size(vhost->arg0()) > SRS_CONSTS_RTMP_MAX_CHUNK_SIZE ) { ret = ERROR_SYSTEM_CONFIG_INVALID; srs_error("directive vhost %s chunk_size invalid, chunk_size=%d, must in [%d, %d], ret=%d", vhost->arg0().c_str(), get_chunk_size(vhost->arg0()), SRS_CONSTS_RTMP_MIN_CHUNK_SIZE, SRS_CONSTS_RTMP_MAX_CHUNK_SIZE, ret); return ret; } } for (int i = 0; i < (int)vhosts.size(); i++) { SrsConfDirective* vhost = vhosts[i]; srs_assert(vhost != NULL); #ifndef SRS_AUTO_DVR if (get_dvr_enabled(vhost->arg0())) { srs_warn("dvr of vhost %s is disabled by configure", vhost->arg0().c_str()); } #endif #ifndef SRS_AUTO_HLS if (get_hls_enabled(vhost->arg0())) { srs_warn("hls of vhost %s is disabled by configure", vhost->arg0().c_str()); } #endif #ifndef SRS_AUTO_HTTP_CALLBACK if (get_vhost_http_hooks_enabled(vhost->arg0())) { srs_warn("http_hooks of vhost %s is disabled by configure", vhost->arg0().c_str()); } #endif #ifndef SRS_AUTO_TRANSCODE if (get_transcode_enabled(get_transcode(vhost->arg0(), ""))) { srs_warn("transcode of vhost %s is disabled by configure", vhost->arg0().c_str()); } #endif #ifndef SRS_AUTO_INGEST vector<SrsConfDirective*> ingesters = get_ingesters(vhost->arg0()); for (int j = 0; j < (int)ingesters.size(); j++) { SrsConfDirective* ingest = ingesters[j]; if (get_ingest_enabled(ingest)) { srs_warn("ingest %s of vhost %s is disabled by configure", ingest->arg0().c_str(), vhost->arg0().c_str() ); } } #endif // TODO: FIXME: required http server when hls storage is ram or both. } return ret; } int SrsConfig::parse_buffer(SrsConfigBuffer* buffer) { int ret = ERROR_SUCCESS; if ((ret = root->parse(buffer)) != ERROR_SUCCESS) { return ret; } // mock by dolphin mode. // for the dolphin will start srs with specified params. if (dolphin) { // for RTMP. set_config_directive(root, "listen", dolphin_rtmp_port); // for HTTP set_config_directive(root, "http_server", ""); SrsConfDirective* http_server = root->get("http_server"); set_config_directive(http_server, "enabled", "on"); set_config_directive(http_server, "listen", dolphin_http_port); // others. set_config_directive(root, "daemon", "off"); set_config_directive(root, "srs_log_tank", "console"); } return ret; } string SrsConfig::cwd() { return _cwd; } string SrsConfig::argv() { return _argv; } bool SrsConfig::get_deamon() { SrsConfDirective* conf = root->get("daemon"); if (!conf || conf->arg0().empty()) { return true; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } SrsConfDirective* SrsConfig::get_root() { return root; } int SrsConfig::get_max_connections() { static int DEFAULT = 1000; SrsConfDirective* conf = root->get("max_connections"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } vector<string> SrsConfig::get_listens() { std::vector<string> ports; SrsConfDirective* conf = root->get("listen"); if (!conf) { return ports; } for (int i = 0; i < (int)conf->args.size(); i++) { ports.push_back(conf->args.at(i)); } return ports; } string SrsConfig::get_pid_file() { static string DEFAULT = "./objs/srs.pid"; SrsConfDirective* conf = root->get("pid"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } int SrsConfig::get_pithy_print_ms() { static int DEFAULT = 10000; SrsConfDirective* conf = root->get("pithy_print_ms"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_utc_time() { static bool DEFAULT = false; SrsConfDirective* conf = root->get("utc_time"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_work_dir() { static string DEFAULT = ""; SrsConfDirective* conf = root->get("work_dir"); if( !conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } vector<SrsConfDirective*> SrsConfig::get_stream_casters() { srs_assert(root); std::vector<SrsConfDirective*> stream_casters; for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* conf = root->at(i); if (!conf->is_stream_caster()) { continue; } stream_casters.push_back(conf); } return stream_casters; } bool SrsConfig::get_stream_caster_enabled(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_stream_caster_engine(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("caster"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_stream_caster_output(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("output"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } int SrsConfig::get_stream_caster_listen(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("listen"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_stream_caster_rtp_port_min(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("rtp_port_min"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_stream_caster_rtp_port_max(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("rtp_port_max"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_kafka_enabled() { static bool DEFAULT = false; SrsConfDirective* conf = root->get("kafka"); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } SrsConfDirective* SrsConfig::get_kafka_brokers() { SrsConfDirective* conf = root->get("kafka"); if (!conf) { return NULL; } conf = conf->get("brokers"); if (!conf || conf->args.empty()) { return NULL; } return conf; } string SrsConfig::get_kafka_topic() { static string DEFAULT = "srs"; SrsConfDirective* conf = root->get("kafka"); if (!conf) { return DEFAULT; } conf = conf->get("topic"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } SrsConfDirective* SrsConfig::get_vhost(string vhost, bool try_default_vhost) { srs_assert(root); for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* conf = root->at(i); if (!conf->is_vhost()) { continue; } if (conf->arg0() == vhost) { return conf; } } if (try_default_vhost && vhost != SRS_CONSTS_RTMP_DEFAULT_VHOST) { return get_vhost(SRS_CONSTS_RTMP_DEFAULT_VHOST); } return NULL; } void SrsConfig::get_vhosts(vector<SrsConfDirective*>& vhosts) { srs_assert(root); for (int i = 0; i < (int)root->directives.size(); i++) { SrsConfDirective* conf = root->at(i); if (!conf->is_vhost()) { continue; } vhosts.push_back(conf); } } bool SrsConfig::get_vhost_enabled(string vhost) { SrsConfDirective* conf = get_vhost(vhost); return get_vhost_enabled(conf); } bool SrsConfig::get_vhost_enabled(SrsConfDirective* conf) { static bool DEFAULT = true; // false for NULL vhost. if (!conf) { return false; } // perfer true for exists one. conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } bool SrsConfig::get_gop_cache(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return SRS_PERF_GOP_CACHE; } conf = conf->get("play"); if (!conf || conf->arg0().empty()) { return SRS_PERF_GOP_CACHE; } conf = conf->get("gop_cache"); if (!conf || conf->arg0().empty()) { return SRS_PERF_GOP_CACHE; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } bool SrsConfig::get_debug_srs_upnode(string vhost) { static bool DEFAULT = true; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("cluster"); if (!conf || conf->arg0().empty()) { return DEFAULT; } conf = conf->get("debug_srs_upnode"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } bool SrsConfig::get_atc(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("play"); if (!conf || conf->arg0().empty()) { return DEFAULT; } conf = conf->get("atc"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } bool SrsConfig::get_atc_auto(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("play"); if (!conf || conf->arg0().empty()) { return DEFAULT; } conf = conf->get("atc_auto"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } int SrsConfig::get_time_jitter(string vhost) { static string DEFAULT = "full"; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return _srs_time_jitter_string2int(DEFAULT); } conf = conf->get("play"); if (!conf || conf->arg0().empty()) { return _srs_time_jitter_string2int(DEFAULT); } conf = conf->get("time_jitter"); if (!conf || conf->arg0().empty()) { return _srs_time_jitter_string2int(DEFAULT); } return _srs_time_jitter_string2int(conf->arg0()); } bool SrsConfig::get_mix_correct(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("play"); if (!conf || conf->arg0().empty()) { return DEFAULT; } conf = conf->get("mix_correct"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } double SrsConfig::get_queue_length(string vhost) { static double DEFAULT = SRS_PERF_PLAY_QUEUE; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("play"); if (!conf || conf->arg0().empty()) { return DEFAULT; } conf = conf->get("queue_length"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_refer_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("refer"); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } SrsConfDirective* SrsConfig::get_refer_all(string vhost) { static SrsConfDirective* DEFAULT = NULL; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("refer"); if (!conf) { return DEFAULT; } return conf->get("all"); } SrsConfDirective* SrsConfig::get_refer_play(string vhost) { static SrsConfDirective* DEFAULT = NULL; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("refer"); if (!conf) { return DEFAULT; } return conf->get("play"); } SrsConfDirective* SrsConfig::get_refer_publish(string vhost) { static SrsConfDirective* DEFAULT = NULL; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("refer"); if (!conf) { return DEFAULT; } return conf->get("publish"); } int SrsConfig::get_chunk_size(string vhost) { if (vhost.empty()) { return get_global_chunk_size(); } SrsConfDirective* conf = get_vhost(vhost); if (!conf) { // vhost does not specify the chunk size, // use the global instead. return get_global_chunk_size(); } conf = conf->get("chunk_size"); if (!conf || conf->arg0().empty()) { // vhost does not specify the chunk size, // use the global instead. return get_global_chunk_size(); } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_parse_sps(string vhost) { static bool DEFAULT = true; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("publish"); if (!conf) { return DEFAULT; } conf = conf->get("parse_sps"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } bool SrsConfig::get_mr_enabled(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return SRS_PERF_MR_ENABLED; } conf = conf->get("publish"); if (!conf) { return SRS_PERF_MR_ENABLED; } conf = conf->get("mr"); if (!conf || conf->arg0().empty()) { return SRS_PERF_MR_ENABLED; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } int SrsConfig::get_mr_sleep_ms(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return SRS_PERF_MR_SLEEP; } conf = conf->get("publish"); if (!conf) { return SRS_PERF_MR_SLEEP; } conf = conf->get("mr_latency"); if (!conf || conf->arg0().empty()) { return SRS_PERF_MR_SLEEP; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_mw_sleep_ms(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return SRS_PERF_MW_SLEEP; } conf = conf->get("play"); if (!conf || conf->arg0().empty()) { return SRS_PERF_MW_SLEEP; } conf = conf->get("mw_latency"); if (!conf || conf->arg0().empty()) { return SRS_PERF_MW_SLEEP; } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_realtime_enabled(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return SRS_PERF_MIN_LATENCY_ENABLED; } conf = conf->get("min_latency"); if (!conf || conf->arg0().empty()) { return SRS_PERF_MIN_LATENCY_ENABLED; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } bool SrsConfig::get_tcp_nodelay(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("tcp_nodelay"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } double SrsConfig::get_send_min_interval(string vhost) { static double DEFAULT = 0.0; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("play"); if (!conf) { return DEFAULT; } conf = conf->get("send_min_interval"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } bool SrsConfig::get_reduce_sequence_header(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("play"); if (!conf) { return DEFAULT; } conf = conf->get("reduce_sequence_header"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } int SrsConfig::get_publish_1stpkt_timeout(string vhost) { // when no msg recevied for publisher, use larger timeout. static int DEFAULT = 20000; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("publish"); if (!conf) { return DEFAULT; } conf = conf->get("firstpkt_timeout"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_publish_normal_timeout(string vhost) { // the timeout for publish recv. // we must use more smaller timeout, for the recv never know the status // of underlayer socket. static int DEFAULT = 5000; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("publish"); if (!conf) { return DEFAULT; } conf = conf->get("normal_timeout"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_global_chunk_size() { SrsConfDirective* conf = root->get("chunk_size"); if (!conf || conf->arg0().empty()) { return SRS_CONSTS_RTMP_SRS_CHUNK_SIZE; } return ::atoi(conf->arg0().c_str()); } SrsConfDirective* SrsConfig::get_forward(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("forward"); } #ifdef SRS_AUTO_DYNAMIC_CONFIG SrsConfDirective* SrsConfig::get_dynamic_forward(SrsRequest *req) { return get_dynamic_config("dynamic_forward", req); } #endif bool SrsConfig::get_forward_enabled(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } SrsConfDirective* SrsConfig::get_forward_destinations(SrsConfDirective* conf) { if (!conf) { return NULL; } return conf->get("destination"); } SrsConfDirective* SrsConfig::get_vhost_http_hooks(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("http_hooks"); } bool SrsConfig::get_vhost_http_hooks_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } SrsConfDirective* SrsConfig::get_vhost_on_connect(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_connect"); } SrsConfDirective* SrsConfig::get_vhost_on_close(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_close"); } SrsConfDirective* SrsConfig::get_vhost_on_publish(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_publish"); } SrsConfDirective* SrsConfig::get_vhost_on_unpublish(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_unpublish"); } SrsConfDirective* SrsConfig::get_vhost_on_play(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_play"); } SrsConfDirective* SrsConfig::get_vhost_on_stop(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_stop"); } SrsConfDirective* SrsConfig::get_vhost_on_dvr(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_dvr"); } SrsConfDirective* SrsConfig::get_vhost_on_hls(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_hls"); } SrsConfDirective* SrsConfig::get_vhost_on_hls_notify(string vhost) { SrsConfDirective* conf = get_vhost_http_hooks(vhost); if (!conf) { return NULL; } return conf->get("on_hls_notify"); } bool SrsConfig::get_bw_check_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("bandcheck"); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_bw_check_key(string vhost) { static string DEFAULT = ""; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("bandcheck"); if (!conf) { return DEFAULT; } conf = conf->get("key"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } int SrsConfig::get_bw_check_interval_ms(string vhost) { static int DEFAULT = 30 * 1000; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("bandcheck"); if (!conf) { return DEFAULT; } conf = conf->get("interval"); if (!conf) { return DEFAULT; } return (int)(::atof(conf->arg0().c_str()) * 1000); } int SrsConfig::get_bw_check_limit_kbps(string vhost) { static int DEFAULT = 1000; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("bandcheck"); if (!conf) { return DEFAULT; } conf = conf->get("limit_kbps"); if (!conf) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } SrsConfDirective* SrsConfig::get_cluster(std::string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return get_cluster(conf); } SrsConfDirective* SrsConfig::get_cluster(SrsConfDirective* conf) { return conf->get("cluster"); } #ifdef SRS_AUTO_DYNAMIC_CONFIG SrsConfDirective* SrsConfig::get_dynamic_cluster(SrsRequest *req) { return get_dynamic_config("dynamic_cluster", req); } #endif bool SrsConfig::get_cluster_is_edge(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("mode"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return "remote" == conf->arg0(); } SrsConfDirective* SrsConfig::get_cluster_edge_origin(SrsConfDirective* conf) { if (!conf) { return NULL; } return conf->get("origin"); } bool SrsConfig::get_cluster_edge_token_traverse(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("token_traverse"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_cluster_edge_transform_vhost(SrsConfDirective* conf) { static string DEFAULT = "[vhost]"; if (!conf) { return DEFAULT; } conf = conf->get("vhost"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_cluster_edge_publish_local(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("publish_local"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } bool SrsConfig::get_security_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } SrsConfDirective* security = conf->get("security"); if (!security) { return DEFAULT; } conf = security->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } SrsConfDirective* SrsConfig::get_security_rules(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("security"); } SrsConfDirective* SrsConfig::get_transcode(string vhost, string scope) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } conf = conf->get("transcode"); if (!conf || conf->arg0() != scope) { return NULL; } return conf; } #ifdef SRS_AUTO_DYNAMIC_CONFIG SrsConfDirective* SrsConfig::get_dynamic_transcode(SrsRequest *req) { return get_dynamic_config("dynamic_transcode", req); } #endif bool SrsConfig::get_transcode_enabled(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_transcode_ffmpeg(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("ffmpeg"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } vector<SrsConfDirective*> SrsConfig::get_transcode_engines(SrsConfDirective* conf) { vector<SrsConfDirective*> engines; if (!conf) { return engines; } for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* engine = conf->directives[i]; if (engine->name == "engine") { engines.push_back(engine); } } return engines; } bool SrsConfig::get_engine_enabled(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_engine_iformat(SrsConfDirective* conf) { static string DEFAULT = "flv"; if (!conf) { return DEFAULT; } conf = conf->get("iformat"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } vector<string> SrsConfig::get_engine_vfilter(SrsConfDirective* conf) { vector<string> vfilter; if (!conf) { return vfilter; } conf = conf->get("vfilter"); if (!conf) { return vfilter; } for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* filter = conf->directives[i]; if (!filter) { continue; } vfilter.push_back("-" + filter->name); vfilter.push_back(filter->arg0()); } return vfilter; } string SrsConfig::get_engine_vcodec(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("vcodec"); if (!conf) { return DEFAULT; } return conf->arg0(); } int SrsConfig::get_engine_vbitrate(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("vbitrate"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } double SrsConfig::get_engine_vfps(SrsConfDirective* conf) { static double DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("vfps"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } int SrsConfig::get_engine_vwidth(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("vwidth"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_engine_vheight(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("vheight"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_engine_vthreads(SrsConfDirective* conf) { static int DEFAULT = 1; if (!conf) { return DEFAULT; } conf = conf->get("vthreads"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } string SrsConfig::get_engine_vprofile(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("vprofile"); if (!conf) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_engine_vpreset(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("vpreset"); if (!conf) { return DEFAULT; } return conf->arg0(); } vector<string> SrsConfig::get_engine_vparams(SrsConfDirective* conf) { vector<string> vparams; if (!conf) { return vparams; } conf = conf->get("vparams"); if (!conf) { return vparams; } for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* filter = conf->directives[i]; if (!filter) { continue; } vparams.push_back("-" + filter->name); vparams.push_back(filter->arg0()); } return vparams; } string SrsConfig::get_engine_acodec(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("acodec"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } int SrsConfig::get_engine_abitrate(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("abitrate"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_engine_asample_rate(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("asample_rate"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } int SrsConfig::get_engine_achannels(SrsConfDirective* conf) { static int DEFAULT = 0; if (!conf) { return DEFAULT; } conf = conf->get("achannels"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } vector<string> SrsConfig::get_engine_aparams(SrsConfDirective* conf) { vector<string> aparams; if (!conf) { return aparams; } conf = conf->get("aparams"); if (!conf) { return aparams; } for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* filter = conf->directives[i]; if (!filter) { continue; } aparams.push_back("-" + filter->name); aparams.push_back(filter->arg0()); } return aparams; } string SrsConfig::get_engine_oformat(SrsConfDirective* conf) { static string DEFAULT = "flv"; if (!conf) { return DEFAULT; } conf = conf->get("oformat"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_engine_output(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("output"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } SrsConfDirective* SrsConfig::get_exec(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("exec"); } bool SrsConfig::get_exec_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_exec(vhost); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } vector<SrsConfDirective*> SrsConfig::get_exec_publishs(string vhost) { vector<SrsConfDirective*> eps; SrsConfDirective* conf = get_exec(vhost); if (!conf) { return eps; } for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* ep = conf->at(i); if (ep->name == "publish") { eps.push_back(ep); } } return eps; } vector<SrsConfDirective*> SrsConfig::get_ingesters(string vhost) { vector<SrsConfDirective*> integers; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return integers; } for (int i = 0; i < (int)conf->directives.size(); i++) { SrsConfDirective* ingester = conf->directives[i]; if (ingester->name == "ingest") { integers.push_back(ingester); } } return integers; } SrsConfDirective* SrsConfig::get_ingest_by_id(string vhost, string ingest_id) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("ingest", ingest_id); } bool SrsConfig::get_ingest_enabled(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_ingest_ffmpeg(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("ffmpeg"); if (!conf) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_ingest_input_type(SrsConfDirective* conf) { static string DEFAULT = "file"; if (!conf) { return DEFAULT; } conf = conf->get("input"); if (!conf) { return DEFAULT; } conf = conf->get("type"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_ingest_input_url(SrsConfDirective* conf) { static string DEFAULT = ""; if (!conf) { return DEFAULT; } conf = conf->get("input"); if (!conf) { return DEFAULT; } conf = conf->get("url"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_log_tank_file() { static bool DEFAULT = true; SrsConfDirective* conf = root->get("srs_log_tank"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0() != "console"; } string SrsConfig::get_log_level() { static string DEFAULT = "trace"; SrsConfDirective* conf = root->get("srs_log_level"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_log_file() { static string DEFAULT = "./objs/srs.log"; SrsConfDirective* conf = root->get("srs_log_file"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_ffmpeg_log_enabled() { string log = get_ffmpeg_log_dir(); return log != SRS_CONSTS_NULL_FILE; } string SrsConfig::get_ffmpeg_log_dir() { static string DEFAULT = "./objs"; SrsConfDirective* conf = root->get("ff_log_dir"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } SrsConfDirective* SrsConfig::get_hls(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("hls"); } bool SrsConfig::get_hls_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_hls_entry_prefix(string vhost) { static string DEFAULT = ""; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_entry_prefix"); if (!conf) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_hls_path(string vhost) { static string DEFAULT = "./objs/nginx/html"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_path"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_hls_m3u8_file(string vhost) { static string DEFAULT = "[app]/[stream].m3u8"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_m3u8_file"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_hls_ts_file(string vhost) { static string DEFAULT = "[app]/[stream]-[seq].ts"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_ts_file"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_hls_ts_floor(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_ts_floor"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } double SrsConfig::get_hls_fragment(string vhost) { static double DEFAULT = 10; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_fragment"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } double SrsConfig::get_hls_td_ratio(string vhost) { static double DEFAULT = 1.5; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_td_ratio"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } double SrsConfig::get_hls_aof_ratio(string vhost) { static double DEFAULT = 2.0; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_aof_ratio"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } double SrsConfig::get_hls_window(string vhost) { static double DEFAULT = 60; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_window"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } string SrsConfig::get_hls_on_error(string vhost) { // try to ignore the error. static string DEFAULT = "continue"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_on_error"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_hls_storage(string vhost) { static string DEFAULT = "disk"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_storage"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_hls_mount(string vhost) { static string DEFAULT = "[vhost]/[app]/[stream].m3u8"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_mount"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_hls_acodec(string vhost) { static string DEFAULT = "aac"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_acodec"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_hls_vcodec(string vhost) { static string DEFAULT = "h264"; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_vcodec"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } int SrsConfig::get_vhost_hls_nb_notify(string vhost) { static int DEFAULT = 64; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_nb_notify"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_hls_cleanup(string vhost) { static bool DEFAULT = true; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_cleanup"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } int SrsConfig::get_hls_dispose(string vhost) { static int DEFAULT = 0; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_dispose"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_hls_wait_keyframe(string vhost) { static bool DEFAULT = true; SrsConfDirective* conf = get_hls(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hls_wait_keyframe"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } SrsConfDirective *SrsConfig::get_hds(const string &vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("hds"); } bool SrsConfig::get_hds_enabled(const string &vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_hds(vhost); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_hds_path(const string &vhost) { static string DEFAULT = "./objs/nginx/html"; SrsConfDirective* conf = get_hds(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hds_path"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } double SrsConfig::get_hds_fragment(const string &vhost) { static double DEFAULT = 10; SrsConfDirective* conf = get_hds(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hds_fragment"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } double SrsConfig::get_hds_window(const string &vhost) { static double DEFAULT = 60; SrsConfDirective* conf = get_hds(vhost); if (!conf) { return DEFAULT; } conf = conf->get("hds_window"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } SrsConfDirective* SrsConfig::get_dvr(string vhost) { SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return NULL; } return conf->get("dvr"); } bool SrsConfig::get_dvr_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_dvr(vhost); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } SrsConfDirective* SrsConfig::get_dvr_apply(string vhost) { SrsConfDirective* conf = get_dvr(vhost); if (!conf) { return NULL; } conf = conf->get("dvr_apply"); if (!conf || conf->arg0().empty()) { return NULL; } return conf; } string SrsConfig::get_dvr_path(string vhost) { static string DEFAULT = "./objs/nginx/html/[app]/[stream].[timestamp].flv"; SrsConfDirective* conf = get_dvr(vhost); if (!conf) { return DEFAULT; } conf = conf->get("dvr_path"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_dvr_plan(string vhost) { static string DEFAULT = "session"; SrsConfDirective* conf = get_dvr(vhost); if (!conf) { return DEFAULT; } conf = conf->get("dvr_plan"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } int SrsConfig::get_dvr_duration(string vhost) { static int DEFAULT = 30; SrsConfDirective* conf = get_dvr(vhost); if (!conf) { return DEFAULT; } conf = conf->get("dvr_duration"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } bool SrsConfig::get_dvr_wait_keyframe(string vhost) { static bool DEFAULT = true; SrsConfDirective* conf = get_dvr(vhost); if (!conf) { return DEFAULT; } conf = conf->get("dvr_wait_keyframe"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } int SrsConfig::get_dvr_time_jitter(string vhost) { static string DEFAULT = "full"; SrsConfDirective* conf = get_dvr(vhost); if (!conf) { return _srs_time_jitter_string2int(DEFAULT); } conf = conf->get("time_jitter"); if (!conf || conf->arg0().empty()) { return _srs_time_jitter_string2int(DEFAULT); } return _srs_time_jitter_string2int(conf->arg0()); } bool SrsConfig::get_http_api_enabled() { SrsConfDirective* conf = root->get("http_api"); return get_http_api_enabled(conf); } bool SrsConfig::get_http_api_enabled(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_http_api_listen() { static string DEFAULT = "1985"; SrsConfDirective* conf = root->get("http_api"); if (!conf) { return DEFAULT; } conf = conf->get("listen"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_http_api_crossdomain() { static bool DEFAULT = true; SrsConfDirective* conf = root->get("http_api"); if (!conf) { return DEFAULT; } conf = conf->get("crossdomain"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } bool SrsConfig::get_raw_api() { static bool DEFAULT = false; SrsConfDirective* conf = root->get("http_api"); if (!conf) { return DEFAULT; } conf = conf->get("raw_api"); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } bool SrsConfig::get_raw_api_allow_reload() { static bool DEFAULT = false; SrsConfDirective* conf = root->get("http_api"); if (!conf) { return DEFAULT; } conf = conf->get("raw_api"); if (!conf) { return DEFAULT; } conf = conf->get("allow_reload"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } bool SrsConfig::get_raw_api_allow_query() { static bool DEFAULT = false; SrsConfDirective* conf = root->get("http_api"); if (!conf) { return DEFAULT; } conf = conf->get("raw_api"); if (!conf) { return DEFAULT; } conf = conf->get("allow_query"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } bool SrsConfig::get_raw_api_allow_update() { static bool DEFAULT = false; SrsConfDirective* conf = root->get("http_api"); if (!conf) { return DEFAULT; } conf = conf->get("raw_api"); if (!conf) { return DEFAULT; } conf = conf->get("allow_update"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } bool SrsConfig::get_http_stream_enabled() { SrsConfDirective* conf = root->get("http_server"); return get_http_stream_enabled(conf); } bool SrsConfig::get_http_stream_enabled(SrsConfDirective* conf) { static bool DEFAULT = false; if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_http_stream_listen() { static string DEFAULT = "8080"; SrsConfDirective* conf = root->get("http_server"); if (!conf) { return DEFAULT; } conf = conf->get("listen"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_http_stream_dir() { static string DEFAULT = "./objs/nginx/html"; SrsConfDirective* conf = root->get("http_server"); if (!conf) { return DEFAULT; } conf = conf->get("dir"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_vhost_http_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("http_static"); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } string SrsConfig::get_vhost_http_mount(string vhost) { static string DEFAULT = "[vhost]/"; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("http_static"); if (!conf) { return DEFAULT; } conf = conf->get("mount"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_vhost_http_dir(string vhost) { static string DEFAULT = "./objs/nginx/html"; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("http_static"); if (!conf) { return DEFAULT; } conf = conf->get("dir"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_vhost_http_remux_enabled(string vhost) { static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("http_remux"); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } double SrsConfig::get_vhost_http_remux_fast_cache(string vhost) { static double DEFAULT = 0; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("http_remux"); if (!conf) { return DEFAULT; } conf = conf->get("fast_cache"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atof(conf->arg0().c_str()); } string SrsConfig::get_vhost_http_remux_mount(string vhost) { static string DEFAULT = "[vhost]/[app]/[stream].flv"; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("http_remux"); if (!conf) { return DEFAULT; } conf = conf->get("mount"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_vhost_http_remux_hstrs(string vhost) { // the HSTRS must default to false for origin. static bool DEFAULT = false; SrsConfDirective* conf = get_vhost(vhost); if (!conf) { return DEFAULT; } conf = conf->get("http_remux"); if (!conf) { return DEFAULT; } conf = conf->get("hstrs"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_TRUE(conf->arg0()); } SrsConfDirective* SrsConfig::get_heartbeart() { return root->get("heartbeat"); } bool SrsConfig::get_heartbeat_enabled() { static bool DEFAULT = false; SrsConfDirective* conf = get_heartbeart(); if (!conf) { return DEFAULT; } conf = conf->get("enabled"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } int64_t SrsConfig::get_heartbeat_interval() { static int64_t DEFAULT = (int64_t)(9.9 * 1000); SrsConfDirective* conf = get_heartbeart(); if (!conf) { return DEFAULT; } conf = conf->get("interval"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return (int64_t)(::atof(conf->arg0().c_str()) * 1000); } string SrsConfig::get_heartbeat_url() { static string DEFAULT = "http://"SRS_CONSTS_LOCALHOST":8085/api/v1/servers"; SrsConfDirective* conf = get_heartbeart(); if (!conf) { return DEFAULT; } conf = conf->get("url"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } string SrsConfig::get_heartbeat_device_id() { static string DEFAULT = ""; SrsConfDirective* conf = get_heartbeart(); if (!conf) { return DEFAULT; } conf = conf->get("device_id"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return conf->arg0(); } bool SrsConfig::get_heartbeat_summaries() { static bool DEFAULT = false; SrsConfDirective* conf = get_heartbeart(); if (!conf) { return DEFAULT; } conf = conf->get("summaries"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return SRS_CONF_PERFER_FALSE(conf->arg0()); } SrsConfDirective* SrsConfig::get_stats() { return root->get("stats"); } int SrsConfig::get_stats_network() { static int DEFAULT = 0; SrsConfDirective* conf = get_stats(); if (!conf) { return DEFAULT; } conf = conf->get("network"); if (!conf || conf->arg0().empty()) { return DEFAULT; } return ::atoi(conf->arg0().c_str()); } SrsConfDirective* SrsConfig::get_stats_disk_device() { SrsConfDirective* conf = get_stats(); if (!conf) { return NULL; } conf = conf->get("disk"); if (!conf || conf->args.size() == 0) { return NULL; } return conf; } #ifdef SRS_AUTO_DYNAMIC_CONFIG SrsConfDirective* SrsConfig::get_dynamic_config(const char* name, SrsRequest *req) { SrsConfDirective* conf = get_vhost(req->vhost); if (!conf) { return NULL; } conf = conf->get(name); if (!conf || conf->arg0().empty()) { return NULL; } SrsConfigBuffer buf; if (buf.fullfill(name, conf->arg0(), req) != ERROR_SUCCESS) { return NULL; } SrsConfDirective *dynm_conf = new SrsConfDirective(); if (dynm_conf->parse(&buf) != ERROR_SUCCESS) { srs_freep(dynm_conf); return NULL; } return dynm_conf; } #endif
cn-demo/immudb4j
src/test/java/io/codenotary/immudb4j/ListDatabasesTest.java
/* Copyright 2021 CodeNotary, Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package io.codenotary.immudb4j; import org.testng.annotations.Test; import java.util.List; public class ListDatabasesTest extends ImmuClientIntegrationTest { @Test(testName = "databases") public void t1() { immuClient.login("immudb", "immudb"); List<String> databases = immuClient.databases(); if (databases.size() > 0) { System.out.print(">>> The databases are"); for (String db : databases) { System.out.printf(" '%s'", db); } } else { System.out.print(">>> There are no databases."); } immuClient.logout(); } }
ZooPhy/zodo-services
zodo/ner/train_utils.py
<filename>zodo/ner/train_utils.py ''' Utility methods for loading and using the Named Entity Recognizer (NER) ''' from os import makedirs from os.path import join, exists import numpy as np from zodo.ner.ner_utils import PADDING def get_input(args, word_emb_model, input_file): '''loads input dataset based on model selected''' # Encode inputs based on annotations in the file tokens, instances, labels, max_len = get_sent_input(word_emb_model, input_file) # Determine type of max length to use i.e. specified or auto max_len = args.max_len if args.max_len else max_len print("Using max sentence length:", max_len) # Based on max length pad sentences to max length for efficiency return get_rnn_input(max_len, tokens, instances, labels) def get_sent_input(word_emb_model, input_file): '''loads input dataset for rnn models''' print("processing file: {}".format(input_file)) tokens, instances, labels = [], [], [] doc_tokens, doc_instances, doc_labels = [], [], [] for line in open(input_file): if len(line.split()) == 2: token = line.split()[0] label = line.split()[1] doc_tokens.append(token) doc_instances.append(word_emb_model[token]) if label.startswith('O'): doc_labels.append(np.array([1, 0, 0])) elif label.startswith('B'): doc_labels.append(np.array([0, 1, 0])) elif label.startswith('I'): doc_labels.append(np.array([0, 0, 1])) else: print("Invalid tag {} found for word {}".format(label, token)) else: assert len(doc_tokens) == len(doc_instances) == len(doc_labels) tokens.append(doc_tokens) instances.append(np.array(doc_instances)) labels.append(np.array(doc_labels)) doc_tokens, doc_instances, doc_labels = [], [], [] assert len(tokens) == len(instances) == len(labels) max_len = max([len(x) for x in tokens]) print("Max length", max_len) return tokens, instances, labels, max_len def get_rnn_input(max_len, sentences, instances, labels): '''Pads to max length for RNN models for operational efficiency''' input_len = len(instances[0][0]) num_classes = 3 token_sets = [] instns_sets = [] label_sets = [] seqlen_sets = [] s_index = 0 padding_tok = PADDING padding_rep = np.expand_dims(np.zeros(input_len), axis=0) padding_lab = np.expand_dims(np.zeros(num_classes), axis=0) while s_index < len(sentences): s_tokens = sentences[s_index] s_instns = instances[s_index] s_labels = labels[s_index] if len(s_tokens) <= max_len: # Append till close to max length for efficiency while (s_index + 1 < len(sentences) and len(s_tokens) + len(sentences[s_index+1]) + 1 <= max_len): s_index += 1 s_tokens += sentences[s_index] + [padding_tok] s_instns = np.append(s_instns, instances[s_index], axis=0) s_instns = np.append(s_instns, padding_rep, axis=0) s_labels = np.append(s_labels, labels[s_index], axis=0) s_labels = np.append(s_labels, padding_lab, axis=0) s_index += 1 elif len(s_tokens) > max_len: # If greater than max length, just break it at max length sentences[s_index] = s_tokens[max_len:] instances[s_index] = s_instns[max_len:] labels[s_index] = s_labels[max_len:] s_tokens = s_tokens[:max_len] s_instns = s_instns[:max_len] s_labels = s_labels[:max_len] assert len(s_tokens) == len(s_instns) == len(s_labels) # Add padding when short of length for _ in range(max_len - len(s_tokens)): s_labels = np.append(s_labels, padding_lab, axis=0) s_instns = np.append(s_instns, padding_rep, axis=0) assert len(s_instns) == len(s_labels) token_sets.append(s_tokens) instns_sets.append(s_instns) label_sets.append(s_labels) seqlen_sets.append(len(s_tokens)) sumtokens = sum([len(x) for x in token_sets]) print("Total_tokens:{} Total_sents:{}".format(sumtokens, len(sentences)), "Token_T:[{}][?]".format(len(token_sets)), "Vector_T:[{}][{}][{}]".format(len(instns_sets), len(instns_sets[0]), len(instns_sets[0][0])), "Label_T:[{}][{}][{}]".format(len(label_sets), len(label_sets[0]), len(label_sets[0][0]))) assert len(token_sets) == len(instns_sets) == len(label_sets) == len(seqlen_sets) return token_sets, np.asarray(instns_sets), np.asarray(label_sets), seqlen_sets def strict_f1(tokens, prediction, target, write_err=False): '''Compute phrasal F1 score for the results''' gold_entities = get_ne_indexes(target) pred_entities = get_ne_indexes(prediction) # inefficient but easy to understand true_pos = [x for x in pred_entities if x in gold_entities] false_pos = [x for x in pred_entities if x not in gold_entities] false_neg = [x for x in gold_entities if x not in pred_entities] precision = 1.0 * len(true_pos)/(len(true_pos) + len(false_pos) + 0.000001) recall = 1.0 * len(true_pos)/(len(true_pos) + len(false_neg) + 0.000001) f1sc = 2.0 * precision * recall / (precision + recall + 0.000001) if write_err: if not exists("runs"): makedirs("runs") filename = "runs/ne_{:.5f}".format(f1sc)+".txt" print("Writing summary to", filename) write_errors(tokens, true_pos, false_pos, false_neg, filename) return precision, recall, f1sc def overlapping_f1(tokens, prediction, target): '''Compute phrasal F1 score for the results''' gold_entities = get_ne_indexes(target) gold_ind = [y for x in gold_entities for y in x.split("_")] pred_entities = get_ne_indexes(prediction) pred_ind = [y for x in pred_entities for y in x.split("_")] # find TP and FP true_pos, false_pos, false_neg = 0, 0, 0 for pred in pred_entities: found = False for pred_p in pred.split("_"): if pred_p in gold_ind: found = True break if found: true_pos += 1 else: false_pos += 1 # find FN for gold in gold_entities: found = False for gold_p in gold.split("_"): if gold_p in pred_ind: found = True break if not found: false_neg += 1 precision = 1.0 * true_pos/(true_pos + false_pos + 0.000001) recall = 1.0 * true_pos/(true_pos + false_neg + 0.000001) f1sc = 2.0 * precision * recall / (precision + recall + 0.000001) print("TP {} FP {} FN {}".format(true_pos, false_pos, false_neg)) return precision, recall, f1sc def get_ne_indexes(tags): '''Get named entities by indices''' entities = [] entity = '' for i, label in enumerate(tags): if label == 1: if entity != '': entities.append(entity) entity = "{}".format(i) elif label == 2: if entity != '': entity += "_{}".format(i) else: entity = "{}".format(i) else: if entity != '': entities.append(entity) entity = '' return entities def write_errors(tokens, true_pos, false_pos, false_neg, fname='results.log'): '''Write the named entities into a file for error analysis''' print("TP {} FP {} FN {}".format(len(true_pos), len(false_pos), len(false_neg))) rfile = open(fname, 'w') print("TP {} FP {} FN {}".format(len(true_pos), len(false_pos), len(false_neg)), file=rfile) print("\n--True Positives--", file=rfile) for i, item in enumerate(true_pos): en_text = " ".join([tokens[int(index)] for index in item.split('_')]) print("{}\t{}\t{}".format(i, item, en_text), file=rfile) print("\n--False Positives--", file=rfile) for i, item in enumerate(false_pos): en_text = " ".join([tokens[int(index)] for index in item.split('_')]) print("{}\t{}\t{}".format(i, item, en_text), file=rfile) print("\n--FN--", file=rfile) for i, item in enumerate(false_neg): en_text = " ".join([tokens[int(index)] for index in item.split('_')]) print("{}\t{}\t{}".format(i, item, en_text), file=rfile) rfile.close()
rbins-swap-team/NoosDrift
noos_services/tasks/upload.py
<filename>noos_services/tasks/upload.py #!/usr/bin/python import sys # import pysftp # import paramiko import ftplib # if sys.version_info.major != 3: # print("This utility is not compatible with Python v{}.{}.{}".format(sys.version_info.major, sys.version_info.minor, # sys.version_info.micro)) # sys.exit(0) import getopt from os.path import isfile, abspath, split from os import environ # def _load_key(key_filename): # key_pass = "" # pkey = paramiko.RSAKey.from_private_key_file(key_filename, key_pass) # if pkey is None: # print('Failed to load key: {}'.format(key_filename)) # return pkey # # # def _load_keys(key_filename=None, allow_agent=True): # keys = [] # default_key_path = join(expanduser("~"), '.ssh', 'id_rsa') # if key_filename is not None: # key = _load_key(key_filename) # keys.append(key) # # if allow_agent: # agent = paramiko.agent.Agent() # for key in agent.get_keys(): # keys.append(key) # # if not keys and isfile(default_key_path): # key = _load_key(default_key_path) # keys.append(key) # # if not keys: # print('No keys available in ssh agent or no key in {}. ' # 'Do you need to add keys to your ssh agent via ' # 'ssh-add or specify a --ssh-key-file?'.format(default_key_path)) # return keys # Temporary function # def http_proxy_tunnel_connect(proxy, target, timeout=None): # sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # sock.settimeout(timeout) # sock.connect(proxy) # print("connected") # cmd_connect = "CONNECT %s:%d HTTP/1.1\r\n\r\n" % target # print("--> %s" % repr(cmd_connect)) # sock.sendall(cmd_connect) # response = [] # sock.settimeout(2) # quick hack - replace this with something better performing. # try: # # in worst case this loop will take 2 seconds if not response was received (sock.timeout) # while True: # chunk = sock.recv(1024) # if not chunk: # if something goes wrong # break # response.append(chunk) # if "\r\n\r\n" in chunk: # we do not want to read too far ;) # break # except socket.error as se: # if "timed out" not in se: # response = [se] # response = ''.join(response) # print("<-- %s" % repr(response)) # if not "200 connection established" in response.lower(): # raise Exception("Unable to establish HTTP-Tunnel: %s" % repr(response)) # return sock # def secure_copy(user, host, src, dest, key_filename=None, host_key_file=None, allow_agent=True): # keys = _load_keys(key_filename, allow_agent) # pkey = keys[0] # ssh = paramiko.SSHClient() # proxy = None # # proxy = paramiko.proxy.ProxyCommand("ssh NOOS_PROXY_USER@NOOS_SCP_PROXY -W %h:%p") # # ssh_config_file = expanduser("~/.ssh/config") # # if exists(ssh_config_file): # # conf = paramiko.SSHConfig() # # with open(ssh_config_file) as f: # # conf.parse(f) # # host_config = conf.lookup(host) # # if 'proxycommand' in host_config: # # proxy = paramiko.ProxyCommand(host_config['proxycommand']) # ssh.load_system_host_keys(host_key_file) # ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # ssh.connect(host, username=user, pkey=pkey, sock=proxy) # scp = SCPClient(ssh.get_transport()) # scp.put(src, dest) # scp.close() # ssh.close() def main(argv): log_file_name = "/tmp/test2.txt" log_file = open(log_file_name, "w") file_to_upload = '' host = None user = None password = <PASSWORD> # Get arguments log_file.write("Starting upload of file to central\n") try: opts, args = getopt.getopt(argv, "h:i:d:u:p", ["ifile="]) except getopt.GetoptError as exce: print(exce.msg + '\nusage: upload.py -i <inputfile>') log_file.write(exce.msg + '\nusage: upload.py -i <inputfile>\n') sys.exit(2) if not opts: print('No input file provided\nusage: upload.py -i <inputfile>') log_file.write('No input file provided\nusage: upload.py -i <inputfile>\n') sys.exit(1) for opt, arg in opts: print(opt) print(arg) if opt == '-h': print('usage: upload.py -i <inputfile>') log_file.write('usage: upload.py -i <inputfile>\n') sys.exit() if opt in ("-i", "--ifile"): file_to_upload = arg if opt == '-d': host = arg if opt == '-u': user = arg if opt == '-p': password = arg if user is None: user = environ.get("NOOS_SFTP_USER") if user is None: print("No NOOS_SFTP_USER defined and exported in bash environment") log_file.write("No NOOS_SFTP_USER defined and exported in bash environment\n") assert user is not None if host is None: host = environ.get("NOOS_SFTP_HOST") if host is None: print("No NOOS_SFTP_HOST defined and exported in bash environment") log_file.write("No NOOS_SFTP_HOST defined and exported in bash environment\n") assert host is not None # TODO : Replace password with keyfile location (NOOS_SFTP_KEYFILE) if password is None: password = environ.get("NOOS_SFTP_PWD") if password is None: print("No NOOS_SFTP_PWD defined and exported in bash environment\n") log_file.write("No NOOS_SFTP_PWD defined and exported in bash environment\n") assert password is not None dest = "noosdrift" print("Python v{}.{}.{} - NOOS-Drift Upload utility".format(sys.version_info.major, sys.version_info.minor, sys.version_info.micro)) log_file.write("Python v{}.{}.{} - NOOS-Drift Upload utility\n".format(sys.version_info.major, sys.version_info.minor, sys.version_info.micro)) print("-----------------------------------------") log_file.write("-----------------------------------------\n") if not isfile(file_to_upload): print('The file "' + file_to_upload + '" does not exist.') log_file.write('The file "{}" does not exist.\n'.format(file_to_upload)) sys.exit(2) print('File to upload is: "{}"'.format(file_to_upload)) log_file.write('File to upload is: "{}"\n'.format(file_to_upload)) src = abspath(file_to_upload) path, filename = split(src) # secure_copy(user, host, src, dest, key_filename="") # cnopts = pysftp.CnOpts() # cnopts.hostkeys = None # with pysftp.Connection(host, username=user, password=password, cnopts=cnopts) as sftp: # with sftp.cd(dest): # temporarily chdir to test_sftp # sftp.put(src) # upload file to public/ on remote try: log_file.write("Trying to upload file \'{}\'\n".format(file_to_upload)) with ftplib.FTP(host, user, password) as session: with open(src, 'rb') as result_archive: if dest != '': session.cwd(dest) session.storbinary("STOR {}".format(filename), result_archive) log_file.write("Upload of file \'{}\' OK\n".format(file_to_upload)) except ftplib.all_errors as ftperrs: log_file.write("Error {} uploading file \'{}\'\n".format(ftperrs, file_to_upload)) raise ftperrs except Exception as otherexc: log_file.write("Error {} uploading file \'{}\'\n".format(otherexc, file_to_upload)) raise otherexc finally: log_file.close() if __name__ == "__main__": file_name = "/tmp/test.txt" file = open(file_name, "w") file.write("test") try: main(sys.argv[1:]) except Exception as exc: file.write("{}\n".format(exc)) sys.exit(2) finally: file.close()
aeten/net.aeten.core
src/net.aeten.core/net/aeten/core/util/AtomicArrayByComparator.java
<reponame>aeten/net.aeten.core<filename>src/net.aeten.core/net/aeten/core/util/AtomicArrayByComparator.java package net.aeten.core.util; import java.util.Comparator; import net.aeten.core.Factory; public class AtomicArrayByComparator<E> implements AtomicArray<E> { private final AtomicValue<E>[] elements; @SuppressWarnings("unchecked") public AtomicArrayByComparator(Comparator<E> comparator, E[] array) { elements = new AtomicValue[array.length]; for (int i = 0; i < array.length; ++i) { elements[i] = new AtomicValueByComparator<E>(comparator, array[i]); } } @SuppressWarnings("unchecked") public AtomicArrayByComparator(Comparator<E> comparator, int length) { elements = new AtomicValue[length]; for (int i = 0; i < length; ++i) { elements[i] = new AtomicValueByComparator<E>(comparator, null); } } @Override public int length() { return elements.length; } @Override public E get(int i) { return elements[i].get(); } @Override public void set(int i, E newValue) { elements[i].set(newValue); } @Override public void lazySet(int i, E newValue) { elements[i].lazySet(newValue); } @Override public E getAndSet(int i, E newValue) { return elements[i].getAndSet(newValue); } @Override public boolean compareAndSet(int i, E expect, E update) { return elements[i].compareAndSet(expect, update); } @Override public boolean compareAndSet(int i, E expect, Factory<E, Void> update) { return elements[i].compareAndSet(expect, update); } @Override public boolean weakCompareAndSet(int i, E expect, E update) { return elements[i].weakCompareAndSet(expect, update); } @Override public boolean weakCompareAndSet(int i, E expect, Factory<E, Void> update) { return elements[i].weakCompareAndSet(expect, update); } }
krzysztof-magosa/huckleberry
include/huckleberry/spi/Bus.hpp
/** * Copyright 2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef __Huckleberry__Spi__Bus__ #define __Huckleberry__Spi__Bus__ #include <cstdint> #include <map> #include "huckleberry/spi/Device.hpp" namespace huckleberry { namespace spi { class Bus { public: Bus(const int number); Device& getDevice(const int device); int getNumber(void); protected: int number; std::map<int, Device*> devices; }; } } #endif
xiaochengcike/RegSeg
Code/Modules/Filtering/include/MultilabelBinarizeMeshFilter.hxx
// This file is part of RegSeg // // Copyright 2014-2017, <NAME> <<EMAIL>> // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #ifndef SOURCE_DIRECTORY__MODULES_FILTERING_INCLUDE_MULTILABELBINARIZEMESHFILTER_HXX_ #define SOURCE_DIRECTORY__MODULES_FILTERING_INCLUDE_MULTILABELBINARIZEMESHFILTER_HXX_ #include "MultilabelBinarizeMeshFilter.h" #include <itkProcessObject.h> #include <itkImageRegionIterator.h> namespace rstk { /** Constructor */ template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::MultilabelBinarizeMeshFilter(): m_NumberOfMeshes(0) { this->SetNumberOfRequiredInputs(1); m_Size.Fill(0); m_Index.Fill(0); m_Spacing.Fill(0.0); m_Origin.Fill(0.0); m_Direction.GetVnlMatrix().set_identity(); this->m_PreThreader = itk::MultiThreader::New(); this->m_NumberOfThreads = this->m_PreThreader->GetNumberOfThreads(); } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > void MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::SetOutputReference(const itk::ImageBase<Dimension>* reference) { this->SetSize(reference->GetLargestPossibleRegion().GetSize()); this->SetOrigin(reference->GetOrigin()); this->SetDirection(reference->GetDirection()); this->SetSpacing(reference->GetSpacing()); } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > void MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::GenerateOutputInformation() { // Get the output pointer OutputImagePointer output = this->GetOutput(); m_NumberOfRegions = m_NumberOfMeshes + 1; typename OutputImageType::RegionType region; region.SetSize(m_Size); region.SetIndex(m_Index); output->SetLargestPossibleRegion(region); // output->SetBufferedRegion(region); // set the region output->SetRequestedRegion(region); // output->SetSpacing(m_Spacing); // set spacing output->SetOrigin(m_Origin); // and origin output->SetDirection(m_Direction); // direction cosines output->SetNumberOfComponentsPerPixel(m_NumberOfRegions); output->Allocate(); OutputPixelType zero; zero.SetSize(m_NumberOfRegions); zero.Fill(0); zero[m_NumberOfMeshes] = 1; output->FillBuffer(zero); m_OutputSegmentation = OutputComponentType::New(); m_OutputSegmentation->SetLargestPossibleRegion(region); // m_OutputSegmentation->SetBufferedRegion(region); // set the region m_OutputSegmentation->SetRequestedRegion(region); // m_OutputSegmentation->SetSpacing(m_Spacing); // set spacing m_OutputSegmentation->SetOrigin(m_Origin); // and origin m_OutputSegmentation->SetDirection(m_Direction); // direction cosines m_OutputSegmentation->Allocate(); m_OutputSegmentation->FillBuffer(m_NumberOfMeshes); } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > void MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::BinarizeThreaded(size_t num) { this->m_Components[num]->Update(); } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > void MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::SplitRequestedFilters(itk::ThreadIdType id, itk::ThreadIdType num, std::vector<size_t>& res) { size_t total = this->m_Components.size(); if (num > total) itkExceptionMacro(<< "total number of threads overs number of filters") res.push_back(id); for(int rem = total; rem > num; ) { size_t nextId = id + num; if( nextId < total ) res.push_back(nextId); else return; rem-=num; } } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > ITK_THREAD_RETURN_TYPE MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::BinarizeThreaderCallback(void *arg) { ThreadStruct *str; itk::ThreadIdType total, threadId, threadCount; threadId = ( (itk::MultiThreader::ThreadInfoStruct *)( arg ) )->ThreadID; threadCount = ( (itk::MultiThreader::ThreadInfoStruct *)( arg ) )->NumberOfThreads; str = (ThreadStruct *)(((itk::MultiThreader::ThreadInfoStruct *)(arg))->UserData); std::vector<size_t> filters; str->Filter->SplitRequestedFilters(threadId, threadCount, filters); for (size_t id = 0; id < filters.size(); id++) str->Filter->BinarizeThreaded(filters[id]); return ITK_THREAD_RETURN_VALUE; } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > void MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::BeforeThreadedGenerateData() { // Get the input and output pointers OutputImagePointer output = this->GetOutput(); m_Components.clear(); // find the actual number of threads m_NumberOfThreads = this->GetNumberOfThreads(); if ( itk::MultiThreader::GetGlobalMaximumNumberOfThreads() != 0 ) { m_NumberOfThreads = vnl_math_min( this->GetNumberOfThreads(), itk::MultiThreader::GetGlobalMaximumNumberOfThreads() ); } long nbOfThreads = (m_NumberOfMeshes > m_NumberOfThreads)?m_NumberOfThreads:m_NumberOfMeshes; struct ThreadStruct str; str.Filter = this; for (size_t idx = 0; idx < m_NumberOfMeshes; idx++ ) { BinarizeMeshFilterPointer meshFilter = BinarizeMeshFilterType::New(); meshFilter->SetSpacing( m_Spacing ); meshFilter->SetDirection( m_Direction ); meshFilter->SetOrigin( m_Origin ); meshFilter->SetSize( m_Size ); meshFilter->SetInput( GetInput(idx) ); m_Components.push_back(meshFilter); } this->GetPreMultiThreader()->SetNumberOfThreads( nbOfThreads ); this->GetPreMultiThreader()->SetSingleMethod( this->BinarizeThreaderCallback, &str ); this->GetPreMultiThreader()->SingleMethodExecute(); // number of threads can be constrained by the region size, so call the // SplitRequestedRegion // to get the real number of threads which will be used RegionType splitRegion; // dummy region - just to call the following method nbOfThreads = this->SplitRequestedRegion(0, m_NumberOfThreads, splitRegion); } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > void MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::ThreadedGenerateData(const RegionType & inputRegionForThread, itk::ThreadIdType threadId) { size_t nPix = inputRegionForThread.GetNumberOfPixels(); itk::ProgressReporter progress( this, threadId, nPix ); const OutputPixelValueType* compBuffer[m_NumberOfMeshes]; for( size_t comp = 0; comp < m_NumberOfMeshes; comp++ ) { compBuffer[comp] = m_Components[comp]->GetOutput()->GetBufferPointer(); } OutputComponentPointer ref = m_OutputSegmentation; OutputPixelValueType* segBuffer = ref->GetBufferPointer(); itk::ImageRegionIterator< OutputImageType > outIt( this->GetOutput(), inputRegionForThread ); outIt.GoToBegin(); size_t pix; OutputPixelValueType* outBuffer = this->GetOutput()->GetBufferPointer(); OutputPixelValueType v; while ( !outIt.IsAtEnd() ) { pix = ref->ComputeOffset(outIt.GetIndex()); for( size_t comp = 0; comp < m_NumberOfMeshes; comp++ ) { v = *( compBuffer[comp] + pix ); if( v > 0 ) { *( outBuffer + pix * m_NumberOfRegions + comp ) = 1; *( outBuffer + pix * m_NumberOfRegions + m_NumberOfMeshes ) = 0; *( segBuffer + pix ) = comp; break; } } progress.CompletedPixel(); ++outIt; } } template< typename TInputMesh, typename TOutputPixelType, unsigned int VDimension > typename MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension >::InputMeshType * MultilabelBinarizeMeshFilter< TInputMesh, TOutputPixelType, VDimension > ::GetInput(size_t idx) { return itkDynamicCastInDebugMode< TInputMesh * > ( this->ProcessObject::GetInput(idx) ); } } // namespace rstk #endif /* SOURCE_DIRECTORY__MODULES_FILTERING_INCLUDE_MULTILABELBINARIZEMESHFILTER_HXX_ */
xudonglei94/MyPhoneAssistant
app/src/main/java/org/crazyit/myphoneassistant/bean/requestbean/AppsUpdateBean.java
<reponame>xudonglei94/MyPhoneAssistant<filename>app/src/main/java/org/crazyit/myphoneassistant/bean/requestbean/AppsUpdateBean.java package org.crazyit.myphoneassistant.bean.requestbean; /** * Created by Administrator on 2018/6/27. */ public class AppsUpdateBean { private String packageName; private String versionCode; public String getPackageName() { return packageName; } public void setPackageName(String packageName) { this.packageName = packageName; } public String getVersionCode() { return versionCode; } public void setVersionCode(String versionCode) { this.versionCode = versionCode; } }
violetjs/violet
server/seeds.js
<reponame>violetjs/violet Meteor.startup(function () { // TODO: detect the first run if (Settings.find().count() === 0 && Categories.find().count() === 0) { // If not in test if (!process.env.IS_MIRROR) { console.log('Generating seeds...'); // Populate test users var jonId = Accounts.createUser({ username: 'jon', email: '<EMAIL>', password: '<PASSWORD>', profile: { username: 'jon', } }); Meteor.users.update(jonId, {$set: {isAdmin: true}}); for (var i = 1; i <= 30; i++) { Accounts.createUser({ username: 'user_' + i, email: 'test' + i + '@test.com', password: '<PASSWORD>', profile: { username: 'user_' + i } }); } // Populate settings Settings.insert({ public: { appName: 'Violet Development' } }); // Stubs var _Meteor = {}; _Meteor._user = Meteor.user; _Meteor._userId = Meteor.userId; var stubUser = function() { var userNumber = _.random(1, 30); var currentUser = Meteor.users.findOne({username: 'user_' + userNumber}); Meteor.user = function () { return currentUser; }; Meteor.userId = function () { return currentUser._id; }; }; var stubAdmin = function () { var jon = Meteor.users.findOne(jonId); Meteor.user = function () { return jon; }; Meteor.userId = function () { return jon._id; }; }; // Populate categories, topics, and posts for (var i = 0; i < 5; i++) { stubAdmin(); var category = { name: Fake.word(), description: Fake.paragraph() }; var categoryId = Meteor.call('createCategory', category); for (var j = 0; j < 21; j++) { stubUser(); var topic = { title: Fake.word(), body: Fake.paragraph(), categoryId: categoryId, }; var topicSlug = Meteor.call('submitTopic', topic); var savedTopic = Topics.findOne({slug: topicSlug}); var numPosts = _.random(2, 20); for (var k = 0; k < numPosts; k++) { stubUser(); var post = { body: '<PASSWORD>', topicId: savedTopic._id }; Meteor.call('submitPost', post); } } } // Restore stub Meteor.user = _Meteor._user; Meteor.userId = _Meteor._userId; } } });
Andreas237/AndroidPolicyAutomation
ExtractedJars/Apk_Extractor_com.ext.ui.apk/javafiles/com/startapp/android/publish/ads/video/tracking/VideoClickedTrackingParams.java
<reponame>Andreas237/AndroidPolicyAutomation<filename>ExtractedJars/Apk_Extractor_com.ext.ui.apk/javafiles/com/startapp/android/publish/ads/video/tracking/VideoClickedTrackingParams.java // Decompiled by Jad v1.5.8g. Copyright 2001 <NAME>. // Jad home page: http://www.kpdus.com/jad.html // Decompiler options: packimports(3) annotate safe package com.startapp.android.publish.ads.video.tracking; // Referenced classes of package com.startapp.android.publish.ads.video.tracking: // VideoTrackingParams public class VideoClickedTrackingParams extends VideoTrackingParams { public static final class ClickOrigin extends Enum { public static ClickOrigin valueOf(String s) { return (ClickOrigin)Enum.valueOf(com/startapp/android/publish/ads/video/tracking/VideoClickedTrackingParams$ClickOrigin, s); // 0 0:ldc1 #2 <Class VideoClickedTrackingParams$ClickOrigin> // 1 2:aload_0 // 2 3:invokestatic #35 <Method Enum Enum.valueOf(Class, String)> // 3 6:checkcast #2 <Class VideoClickedTrackingParams$ClickOrigin> // 4 9:areturn } public static ClickOrigin[] values() { return (ClickOrigin[])((ClickOrigin []) ($VALUES)).clone(); // 0 0:getstatic #27 <Field VideoClickedTrackingParams$ClickOrigin[] $VALUES> // 1 3:invokevirtual #42 <Method Object _5B_Lcom.startapp.android.publish.ads.video.tracking.VideoClickedTrackingParams$ClickOrigin_3B_.clone()> // 2 6:checkcast #38 <Class VideoClickedTrackingParams$ClickOrigin[]> // 3 9:areturn } private static final ClickOrigin $VALUES[]; public static final ClickOrigin POSTROLL; public static final ClickOrigin VIDEO; static { POSTROLL = new ClickOrigin("POSTROLL", 0); // 0 0:new #2 <Class VideoClickedTrackingParams$ClickOrigin> // 1 3:dup // 2 4:ldc1 #16 <String "POSTROLL"> // 3 6:iconst_0 // 4 7:invokespecial #20 <Method void VideoClickedTrackingParams$ClickOrigin(String, int)> // 5 10:putstatic #22 <Field VideoClickedTrackingParams$ClickOrigin POSTROLL> VIDEO = new ClickOrigin("VIDEO", 1); // 6 13:new #2 <Class VideoClickedTrackingParams$ClickOrigin> // 7 16:dup // 8 17:ldc1 #23 <String "VIDEO"> // 9 19:iconst_1 // 10 20:invokespecial #20 <Method void VideoClickedTrackingParams$ClickOrigin(String, int)> // 11 23:putstatic #25 <Field VideoClickedTrackingParams$ClickOrigin VIDEO> $VALUES = (new ClickOrigin[] { POSTROLL, VIDEO }); // 12 26:iconst_2 // 13 27:anewarray ClickOrigin[] // 14 30:dup // 15 31:iconst_0 // 16 32:getstatic #22 <Field VideoClickedTrackingParams$ClickOrigin POSTROLL> // 17 35:aastore // 18 36:dup // 19 37:iconst_1 // 20 38:getstatic #25 <Field VideoClickedTrackingParams$ClickOrigin VIDEO> // 21 41:aastore // 22 42:putstatic #27 <Field VideoClickedTrackingParams$ClickOrigin[] $VALUES> //* 23 45:return } private ClickOrigin(String s, int i) { super(s, i); // 0 0:aload_0 // 1 1:aload_1 // 2 2:iload_2 // 3 3:invokespecial #29 <Method void Enum(String, int)> // 4 6:return } } public VideoClickedTrackingParams(String s, int i, int j, ClickOrigin clickorigin, String s1) { super(s, i, j, s1); // 0 0:aload_0 // 1 1:aload_1 // 2 2:iload_2 // 3 3:iload_3 // 4 4:aload 5 // 5 6:invokespecial #18 <Method void VideoTrackingParams(String, int, int, String)> clickOrigin = clickorigin; // 6 9:aload_0 // 7 10:aload 4 // 8 12:putfield #20 <Field VideoClickedTrackingParams$ClickOrigin clickOrigin> // 9 15:return } private String getClickOriginQuery() { return (new StringBuilder()).append("&co=").append(clickOrigin.toString()).toString(); // 0 0:new #25 <Class StringBuilder> // 1 3:dup // 2 4:invokespecial #28 <Method void StringBuilder()> // 3 7:ldc1 #30 <String "&co="> // 4 9:invokevirtual #34 <Method StringBuilder StringBuilder.append(String)> // 5 12:aload_0 // 6 13:getfield #20 <Field VideoClickedTrackingParams$ClickOrigin clickOrigin> // 7 16:invokevirtual #37 <Method String VideoClickedTrackingParams$ClickOrigin.toString()> // 8 19:invokevirtual #34 <Method StringBuilder StringBuilder.append(String)> // 9 22:invokevirtual #38 <Method String StringBuilder.toString()> // 10 25:areturn } public String getQueryString() { return getQueryString((new StringBuilder()).append(getCompletedQuery()).append(getClickOriginQuery()).append(getVideoPlayingModeQuery()).toString()); // 0 0:aload_0 // 1 1:new #25 <Class StringBuilder> // 2 4:dup // 3 5:invokespecial #28 <Method void StringBuilder()> // 4 8:aload_0 // 5 9:invokevirtual #42 <Method String getCompletedQuery()> // 6 12:invokevirtual #34 <Method StringBuilder StringBuilder.append(String)> // 7 15:aload_0 // 8 16:invokespecial #44 <Method String getClickOriginQuery()> // 9 19:invokevirtual #34 <Method StringBuilder StringBuilder.append(String)> // 10 22:aload_0 // 11 23:invokevirtual #47 <Method String getVideoPlayingModeQuery()> // 12 26:invokevirtual #34 <Method StringBuilder StringBuilder.append(String)> // 13 29:invokevirtual #38 <Method String StringBuilder.toString()> // 14 32:invokevirtual #50 <Method String getQueryString(String)> // 15 35:areturn } private static final long serialVersionUID = 1L; private ClickOrigin clickOrigin; }
daehyeonhong/Oracle
javach10/src/ch26/DateTimeCreateExample.java
<filename>javach10/src/ch26/DateTimeCreateExample.java package ch26; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.ZoneId; import java.time.ZonedDateTime; public class DateTimeCreateExample { public static void main(String[] args) throws InterruptedException { // 날짜 얻기->현재 날짜 LocalDate currDate = LocalDate.now(); System.out.println(currDate); System.out.println(LocalDate.now()); // LocalDate.of(년,월,일) 날짜 셋팅 LocalDate targetDate = LocalDate.of(2024, 5, 10); System.out.println(targetDate); // 시간 얻기 LocalTime currTime = LocalTime.now(); System.out.println(currTime); // LocalTime.of(시,분,초,밀리초) LocalTime targetTime = LocalTime.of(6, 30, 0, 0); System.out.println(targetTime); // 날짜와 시간 얻기 LocalDateTime currDateTime = LocalDateTime.now(); System.out.println(currDateTime); // LocalDateTime.of(년,월,일,시,분,초,밀리초) LocalDateTime targetDateTime = LocalDateTime.of(2024, 5, 10, 6, 30, 0, 0); System.out.println(targetDateTime); // 협정 세계시 시,분,초 ZonedDateTime utcDateTime = ZonedDateTime.now(ZoneId.of("UTC")); System.out.println("협정 세계시: " + utcDateTime); // 특정 지역의 시간 ZonedDateTime newyorkTime = ZonedDateTime.now(ZoneId.of("America/New_York")); System.out.println("뉴욕 시간: " + newyorkTime); // 틀정 시점의 타임스탬프 Instant instant1 = Instant.now(); System.out.println(instant1); Thread.sleep(10);// 진행을 일시정지 메소드sleep(밀리초) Instant instant2 = Instant.now(); // 타임스탬프상 이전,이후 isBefore, isAfter System.out.println(instant1.isBefore(instant2) ? "instant1이 빠르다!" : "instant2가 빠르다!"); System.out.println(instant1.isAfter(instant2) ? "instant1이 늦다!" : "instant2이 늦다!"); } }
mikeboharsik/street-view-manager
src/components/GlobalState/selectors/selectFetcher.js
<gh_stars>0 export default function selectFetcher(state, type) { return state?.fetcher?.[type]; }
suryaumapathy2812/PDF-service
node_modules/@adobe/pdfservices-node-sdk/src/operation/rotate-pages-operation.js
<filename>node_modules/@adobe/pdfservices-node-sdk/src/operation/rotate-pages-operation.js<gh_stars>0 /* * Copyright 2019 Adobe * All Rights Reserved. * * NOTICE: Adobe permits you to use, modify, and distribute this file in * accordance with the terms of the Adobe license agreement accompanying * it. If you have received this file from a source other than Adobe, * then your use, modification, or distribution of it requires the prior * written permission of Adobe. */ const RotatePagesService = require('../internal/api/rotate-pages-service'), OperationMessage = require('../internal/cpf/operation-message'), ExtensionMediaTypeMapping = require('../internal/extension-mediatype-mapping'), { getRandomFileNameWithExtension } = require('../internal/util/path-util'), logger = require('./../internal/logger'), PageActions = require('../internal/cpf/request/platform/page-actions'), PageRanges = require('./option/page-ranges'), DefaultConfig = require('../internal/config/dc-services-default-config.js'), { validateClientContext, validateFileRef, validateAllowedMediaType, validatePageRangesForOperation } = require('./../internal/util/validation-util'); /** * * Supported source file formats for {@link RotatePagesOperation}. * @enum * @readonly * @memberOf RotatePagesOperation * */ const SupportedSourceFormat = { /** * Represents "application/pdf" media type * @type {string} */ pdf: ExtensionMediaTypeMapping.pdf.mediaType }, /** * * Supported rotation angle values for {@link RotatePagesOperation}. * @enum * @readonly * @memberOf RotatePagesOperation * */ Angle = { /** * Represents 90 degrees clockwise rotation * @type {number} */ _90: 90, /** * Represents 180 degrees clockwise rotation * @type {number} */ _180: 180, /** * Represents 270 degrees clockwise rotation * @type {number} */ _270: 270, }, allowedConfiguration = { targetFileExtension: ExtensionMediaTypeMapping.pdf.extension, getSupportedMediaTypes() { return Object.values(SupportedSourceFormat); }, Angle(){ return Object.values(Angle) } }; Object.freeze(allowedConfiguration); Object.freeze(SupportedSourceFormat); Object.freeze(Angle); /** * An operation that allows rotation of specific pages in a PDF file. * * Sample Usage: * <pre class="prettyprint"> * <code> * const credentials = PDFServicesSdk.Credentials.serviceAccountCredentialsBuilder() * .fromFile("pdfservices-api-credentials.json") * .build(), * executionContext = PDFServicesSdk.ExecutionContext.create(credentials), * RotatePages = PDFServicesSdk.RotatePages, * rotatePagesOperation = RotatePages.Operation.createNew(); * * rotatePagesOperation.setInput(PDFServicesSdk.FileRef.createFromLocalFile('~/Documents/rotatePagesOperationInput.pdf', * RotatePages.SupportedSourceFormat.pdf)); * rotatePagesOperation.setAngleToRotatePagesBy(PDFServicesSdk.RotatePages.Angle._90); * * rotatePagesOperation.execute(executionContext) * .then(result => result.saveAsFile('output/rotatePagesOperationOutput.pdf')) * .catch(err => console.log(err)); * </code> * </pre> * */ class RotatePagesOperation { /** * @hideconstructor */ constructor() { this.sourceFileRef = null; this.pageActions = new PageActions(); Object.preventExtensions(this); } /** * Constructs a {@link RotatePagesOperation} instance. * @returns {RotatePagesOperation} A new RotatePagesOperation instance. * */ static createNew() { return new RotatePagesOperation(); } static get SupportedSourceFormat() { return SupportedSourceFormat; } static get Angle(){ return Angle; } /** * Sets an input file. * @param {!FileRef} sourceFileRef - An input file. */ setInput(sourceFileRef) { this.sourceFileRef = sourceFileRef; } /** * Sets angle (in clockwise direction) for rotating pages of the input PDF file; can be invoked * multiple times to set rotation angle for different set of pages. * * <p> * Multiple invocation of this method on the same set of pages can result in rotating pages multiple times. * <pre> * For e.g.: * PageRanges pageRanges = new PDFServicesSdk.PageRanges(); * pageRanges.addSinglePage(1); * rotatePagesOperation.setAngleToRotatePagesBy(PDFServicesSdk.RotatePages.Angle._90, pageRanges); * rotatePagesOperation.setAngleToRotatePagesBy(PDFServicesSdk.RotatePages.Angle._180, pageRanges); * </pre> * Above invocations will effectively rotate pages (as specified by the page ranges) by 270 degrees. * * If <code>pageRanges</code> is not provided, all pages of the input PDF are rotated by the specified angle. * * @param {!RotatePagesOperation.Angle} angle - Angle for rotation; see {@link RotatePagesOperation.Angle} for valid angle values. * @param {PageRanges=} pageRanges - Page ranges of the PDF file for rotation. */ setAngleToRotatePagesBy(angle, pageRanges){ if(!pageRanges){ pageRanges = new PageRanges(); pageRanges.addAll(); } validatePageRangesForOperation(pageRanges); this.validateAngleValue(angle); this.pageActions.withRotateAction(pageRanges.getRanges(),angle); } /** * Executes this operation using the supplied context and returns a Promise which resolves to the operation result. * * The resulting file may be stored in the system temporary directory (per the os.tempdir(), symlinks are resolved * to the actual path). * See {@link FileRef} for how temporary resources are cleaned up. * * @param {!ExecutionContext} context - The context in which the operation will be executed. * @returns {Promise<FileRef>} A promise which resolves to the operation result. * @throws {ServiceApiError} if an API call results in an error response. * @throws {ServiceUsageError} if service usage limits have been reached or credentials quota has been exhausted. */ execute(context) { try { this.validate(context); } catch (err) { return Promise.reject(err); } logger.info('All validations successfully done. Beginning Rotate Pages operation execution'); const targetFileName = getRandomFileNameWithExtension(allowedConfiguration.targetFileExtension), operationMessage = new OperationMessage(this.sourceFileRef, targetFileName, DefaultConfig.operationName.rotatePages), rotatePagesService = new RotatePagesService(); operationMessage.setOptions(this.pageActions); return rotatePagesService.perform(context, operationMessage) .then(res => Promise.resolve(res)) .catch(err => Promise.reject(err)); } validate(context) { validateClientContext(context); validateFileRef(this.sourceFileRef); validateAllowedMediaType(allowedConfiguration.getSupportedMediaTypes(), this.sourceFileRef); if(!this.pageActions.pageActions || this.pageActions.pageActions.length === 0) throw new Error('No rotation specified for the operation'); if(this.pageActions.pageActions.length > 200) throw new Error('Too many rotations not allowed.'); } validateAngleValue(angle){ if(!angle) throw new Error('Rotate angle can not be null'); if(!Object.values(Angle).includes(angle)) throw new Error('Invalid angle value'); } } Object.freeze(RotatePagesOperation); module.exports = RotatePagesOperation;
botandrose/adva_cms
adva_cms/test/plugins/adva_post_ping/post_ping_test.rb
<filename>adva_cms/test/plugins/adva_post_ping/post_ping_test.rb<gh_stars>10-100 require File.expand_path(File.dirname(__FILE__) + '/../../test_helper') if Rails.plugin?(:adva_post_ping) require 'xmlrpc/client' # in a controller test for the Admin::ArticlesController # when you make sure that there's an article in the contents table # and you send a POST request to the controller so it sets the article state to published # then it should send the pings to the services as configured in ArticlePingObserver::SERVICES class ArticlePingObserverTest < ActiveSupport::TestCase def setup super ArticlePingObserver::SERVICES.clear @controller = ActionController::Base.new @observer = ArticlePingObserver.instance @blog = Blog.first @article = @blog.articles.first @site = @article.site @blog_url = "http://#{@site.host}/blog" @blog_feed_url = @blog_url + '.atom' @pom_get_url = "http://my.pom.get.ping.site?title=#{@blog.title}&blogurl=#{@blog_url}&rssurl=#{@blog_feed_url}" stub(@observer).controller.returns @controller stub(@controller).blog_url.returns @blog_url stub(@controller).blog_feed_url.with(@blog, :format => :atom).returns @blog_feed_url end test "does not ping when the article is not published" do @article.published_at = nil dont_allow(@observer).rest_ping dont_allow(@observer).pom_get_ping dont_allow(@observer).xmlrpc_ping @observer.after_save(@article) end test "does a :rest_ping when the service type is :rest" do ArticlePingObserver::SERVICES << { :url => "http://my.rest.ping.site", :type => :rest } mock(@observer).rest_ping("http://my.rest.ping.site", @article) dont_allow(@observer).pom_get_ping dont_allow(@observer).xmlrpc_ping @observer.after_save(@article) end test "does a :pom_get_ping when the service type is :pom_get" do ArticlePingObserver::SERVICES << { :url => "http://my.pom.get.ping.site", :type => :pom_get } mock(@observer).pom_get_ping("http://my.pom.get.ping.site", @article, nil) dont_allow(@observer).rest_ping dont_allow(@observer).xmlrpc_ping @observer.after_save(@article) end test "defaults to a :xmlrpc_ping when the service type is anything else than :rest or :pom_get" do ArticlePingObserver::SERVICES << { :url => "http://my.xmlrpc.ping.site", :type => :anything_else } mock(@observer).xmlrpc_ping("http://my.xmlrpc.ping.site", @article) dont_allow(@observer).rest_ping dont_allow(@observer).pom_get_ping @observer.after_save(@article) end test "does a :pom_get ping" do url = URI.escape @pom_get_url uri = URI.parse url mock(Net::HTTP).get(uri) @observer.send :pom_get_ping, "http://my.pom.get.ping.site", @article end test "does a :rest_ping ping" do uri = URI.parse "http://my.rest.ping.site" post_info = { "name" => @blog.title, "url" => @blog_url } success = Net::HTTPSuccess.new(:httpv, :code, :msg) stub(success).kind_of?(anything).returns true stub(success).body.returns '' mock(Net::HTTP).post_form(uri, post_info).returns(success) @observer.send :rest_ping, "http://my.rest.ping.site", @article end test "does a :xmlrpc_ping ping" do client = XMLRPC::Client.new stub(XMLRPC::Client).new2.returns client mock(client).call2('weblogUpdates.extendedPing', @blog.title, @blog_url, @blog_feed_url, "foo|bar") @observer.send :xmlrpc_ping, "http://my.xmlrpc.ping.site", @article end test "#pom_get_url returns a pingomatic url" do url = @observer.send :pom_get_url, "http://my.pom.get.ping.site", @article url.should == @pom_get_url end end end
RuhrpottGangster/SwordGamesNET-Bungee
src/de/bySwordGames/Bungee/Commands/PingCommand.java
<filename>src/de/bySwordGames/Bungee/Commands/PingCommand.java /** * Die Klasse heißt: PingCommand.java * Die Klasse wurde am: 12.05.2017 | 21:45:56 erstellt. * Der Author der Klasse ist: bySwordGames */ package de.bySwordGames.Bungee.Commands; import de.bySwordGames.Bungee.Bungee; import net.md_5.bungee.BungeeCord; import net.md_5.bungee.api.CommandSender; import net.md_5.bungee.api.connection.ProxiedPlayer; import net.md_5.bungee.api.plugin.Command; public class PingCommand extends Command { public PingCommand(String name) { super(name); } @SuppressWarnings("deprecation") @Override public void execute(CommandSender sender, String[] args) { if(!(sender instanceof ProxiedPlayer)) { sender.sendMessage(Bungee.prefix + "§cDu musst ein Spieler sein, um diesen Befehl nutzen zu können."); return; } ProxiedPlayer player = (ProxiedPlayer) sender; if(args.length == 0) { player.sendMessage(Bungee.prefix + "§7Dein Ping beträgt §e" + player.getPing() + "ms§7."); } else if(args.length == 1) { ProxiedPlayer target = BungeeCord.getInstance().getPlayer(args[0]); if(target == null) { player.sendMessage(Bungee.prefix + "§cDer Spieler §e" + args[0] + "§c ist nicht online."); return; } player.sendMessage(Bungee.prefix + "§7Der Ping von " + target.getDisplayName() + "§7 beträgt §e" + target.getPing() + "ms§7."); } else { player.sendMessage(Bungee.prefix + Bungee.unknownCommand); } } }
felidsche/BigDataBench_V5.0_BigData_ComponentBenchmark
Hadoop/apache-mahout-0.10.2-compile/integration/src/main/java/org/apache/mahout/utils/email/MailOptions.java
<reponame>felidsche/BigDataBench_V5.0_BigData_ComponentBenchmark /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.mahout.utils.email; import java.io.File; import java.nio.charset.Charset; import java.util.Map; import java.util.regex.Pattern; /** * Configuration options to be used by {@link MailProcessor}. Includes options controlling the exact output format * and which mail fields are included (body, to, from, subject, etc.) */ public class MailOptions { public static final String FROM = "FROM"; public static final String TO = "TO"; public static final String REFS = "REFS"; public static final String SUBJECT = "SUBJECT"; public static final Pattern DEFAULT_QUOTED_TEXT = Pattern.compile("^(\\||>)"); private boolean stripQuotedText; private File input; private String outputDir; private String prefix; private int chunkSize; private Charset charset; private String separator; private String bodySeparator = "\n"; private boolean includeBody; private Pattern[] patternsToMatch; //maps FROM, TO, REFS, SUBJECT, etc. to the order they appear in patternsToMatch. See MailToRecMapper private Map<String, Integer> patternOrder; //the regular expression to use for identifying quoted text. private Pattern quotedTextPattern = DEFAULT_QUOTED_TEXT; public File getInput() { return input; } public void setInput(File input) { this.input = input; } public String getOutputDir() { return outputDir; } /** * Sets the output directory where sequence files will be written. */ public void setOutputDir(String outputDir) { this.outputDir = outputDir; } public String getPrefix() { return prefix; } /** * Sets the prefix that is combined with the archive name and with message ids to create {@code SequenceFile} keys. * @param prefix The name of the directory containing the mail archive is commonly used. */ public void setPrefix(String prefix) { this.prefix = prefix; } public int getChunkSize() { return chunkSize; } /** * Sets the size of each generated sequence file, in Megabytes. */ public void setChunkSize(int chunkSize) { this.chunkSize = chunkSize; } public Charset getCharset() { return charset; } /** * Sets the encoding of the input */ public void setCharset(Charset charset) { this.charset = charset; } public String getSeparator() { return separator; } /** * Sets the separator to use in the output between metadata items (to, from, etc.). */ public void setSeparator(String separator) { this.separator = separator; } public String getBodySeparator() { return bodySeparator; } /** * Sets the separator to use in the output between lines in the body, the default is "\n". */ public void setBodySeparator(String bodySeparator) { this.bodySeparator = bodySeparator; } public boolean isIncludeBody() { return includeBody; } /** * Sets whether mail bodies are included in the output */ public void setIncludeBody(boolean includeBody) { this.includeBody = includeBody; } public Pattern[] getPatternsToMatch() { return patternsToMatch; } /** * Sets the list of patterns to be applied in the given order to extract metadata fields (to, from, subject, etc.) * from the input */ public void setPatternsToMatch(Pattern[] patternsToMatch) { this.patternsToMatch = patternsToMatch; } public Map<String, Integer> getPatternOrder() { return patternOrder; } public void setPatternOrder(Map<String, Integer> patternOrder) { this.patternOrder = patternOrder; } /** * * @return true if we should strip out quoted email text */ public boolean isStripQuotedText() { return stripQuotedText; } /** * * Sets whether quoted text such as lines starting with | or > is striped off. */ public void setStripQuotedText(boolean stripQuotedText) { this.stripQuotedText = stripQuotedText; } public Pattern getQuotedTextPattern() { return quotedTextPattern; } /** * Sets the {@link java.util.regex.Pattern} to use to identify lines that are quoted text. Default is | and > * @see #setStripQuotedText(boolean) */ public void setQuotedTextPattern(Pattern quotedTextPattern) { this.quotedTextPattern = quotedTextPattern; } }