repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
ScalablyTyped/SlinkyTyped | q/qlik/src/main/scala/typingsSlinky/qlik/mod/LanguageCodes.scala | package typingsSlinky.qlik.mod
import typingsSlinky.qlik.qlikStrings.`zh-CN`
import typingsSlinky.qlik.qlikStrings.`zh-TW`
import typingsSlinky.qlik.qlikStrings.ts_
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
object LanguageCodes {
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.mod.LanguageCodes.German
- typingsSlinky.qlik.mod.LanguageCodes.English
- typingsSlinky.qlik.mod.LanguageCodes.Spanish
- typingsSlinky.qlik.mod.LanguageCodes.French
- typingsSlinky.qlik.mod.LanguageCodes.Italian
- typingsSlinky.qlik.mod.LanguageCodes.Japanese
- typingsSlinky.qlik.mod.LanguageCodes.Korean
- typingsSlinky.qlik.mod.LanguageCodes.Dutch
- typingsSlinky.qlik.mod.LanguageCodes.Polish
- typingsSlinky.qlik.mod.LanguageCodes.BrazilianPortuguese
- typingsSlinky.qlik.mod.LanguageCodes.Russian
- typingsSlinky.qlik.mod.LanguageCodes.Swedish
- typingsSlinky.qlik.mod.LanguageCodes.Turkish
- typingsSlinky.qlik.mod.LanguageCodes.SimplifiedChinese
- typingsSlinky.qlik.mod.LanguageCodes.TraditionalChinese
*/
type ALL = _ALL | SimplifiedChinese | TraditionalChinese
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.pt
- typingsSlinky.qlik.qlikStrings.`pt-BR`
*/
trait BrazilianPortuguese extends _ALL
object BrazilianPortuguese {
@scala.inline
def pt: typingsSlinky.qlik.qlikStrings.pt = "pt".asInstanceOf[typingsSlinky.qlik.qlikStrings.pt]
@scala.inline
def `pt-BR`: typingsSlinky.qlik.qlikStrings.`pt-BR` = "pt-BR".asInstanceOf[typingsSlinky.qlik.qlikStrings.`pt-BR`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.nl
- typingsSlinky.qlik.qlikStrings.`nl-NL`
*/
trait Dutch extends _ALL
object Dutch {
@scala.inline
def nl: typingsSlinky.qlik.qlikStrings.nl = "nl".asInstanceOf[typingsSlinky.qlik.qlikStrings.nl]
@scala.inline
def `nl-NL`: typingsSlinky.qlik.qlikStrings.`nl-NL` = "nl-NL".asInstanceOf[typingsSlinky.qlik.qlikStrings.`nl-NL`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.en
- typingsSlinky.qlik.qlikStrings.`en-US`
*/
trait English extends _ALL
object English {
@scala.inline
def en: typingsSlinky.qlik.qlikStrings.en = "en".asInstanceOf[typingsSlinky.qlik.qlikStrings.en]
@scala.inline
def `en-US`: typingsSlinky.qlik.qlikStrings.`en-US` = "en-US".asInstanceOf[typingsSlinky.qlik.qlikStrings.`en-US`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.fr
- typingsSlinky.qlik.qlikStrings.`fr-FR`
*/
trait French extends _ALL
object French {
@scala.inline
def fr: typingsSlinky.qlik.qlikStrings.fr = "fr".asInstanceOf[typingsSlinky.qlik.qlikStrings.fr]
@scala.inline
def `fr-FR`: typingsSlinky.qlik.qlikStrings.`fr-FR` = "fr-FR".asInstanceOf[typingsSlinky.qlik.qlikStrings.`fr-FR`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.de
- typingsSlinky.qlik.qlikStrings.`de-DE`
*/
trait German extends _ALL
object German {
@scala.inline
def de: typingsSlinky.qlik.qlikStrings.de = "de".asInstanceOf[typingsSlinky.qlik.qlikStrings.de]
@scala.inline
def `de-DE`: typingsSlinky.qlik.qlikStrings.`de-DE` = "de-DE".asInstanceOf[typingsSlinky.qlik.qlikStrings.`de-DE`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.it
- typingsSlinky.qlik.qlikStrings.`it-IT`
*/
trait Italian extends _ALL
object Italian {
@scala.inline
def it: typingsSlinky.qlik.qlikStrings.it = "it".asInstanceOf[typingsSlinky.qlik.qlikStrings.it]
@scala.inline
def `it-IT`: typingsSlinky.qlik.qlikStrings.`it-IT` = "it-IT".asInstanceOf[typingsSlinky.qlik.qlikStrings.`it-IT`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.ja
- typingsSlinky.qlik.qlikStrings.`ja-JP`
*/
trait Japanese extends _ALL
object Japanese {
@scala.inline
def ja: typingsSlinky.qlik.qlikStrings.ja = "ja".asInstanceOf[typingsSlinky.qlik.qlikStrings.ja]
@scala.inline
def `ja-JP`: typingsSlinky.qlik.qlikStrings.`ja-JP` = "ja-JP".asInstanceOf[typingsSlinky.qlik.qlikStrings.`ja-JP`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.ko
- typingsSlinky.qlik.qlikStrings.`ko-KR`
*/
trait Korean extends _ALL
object Korean {
@scala.inline
def ko: typingsSlinky.qlik.qlikStrings.ko = "ko".asInstanceOf[typingsSlinky.qlik.qlikStrings.ko]
@scala.inline
def `ko-KR`: typingsSlinky.qlik.qlikStrings.`ko-KR` = "ko-KR".asInstanceOf[typingsSlinky.qlik.qlikStrings.`ko-KR`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.pl
- typingsSlinky.qlik.qlikStrings.`pl-PL`
*/
trait Polish extends _ALL
object Polish {
@scala.inline
def pl: typingsSlinky.qlik.qlikStrings.pl = "pl".asInstanceOf[typingsSlinky.qlik.qlikStrings.pl]
@scala.inline
def `pl-PL`: typingsSlinky.qlik.qlikStrings.`pl-PL` = "pl-PL".asInstanceOf[typingsSlinky.qlik.qlikStrings.`pl-PL`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.ru
- typingsSlinky.qlik.qlikStrings.`ru-RU`
*/
trait Russian extends _ALL
object Russian {
@scala.inline
def ru: typingsSlinky.qlik.qlikStrings.ru = "ru".asInstanceOf[typingsSlinky.qlik.qlikStrings.ru]
@scala.inline
def `ru-RU`: typingsSlinky.qlik.qlikStrings.`ru-RU` = "ru-RU".asInstanceOf[typingsSlinky.qlik.qlikStrings.`ru-RU`]
}
type SimplifiedChinese = `zh-CN`
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.es
- typingsSlinky.qlik.qlikStrings.`es-ES`
*/
trait Spanish extends _ALL
object Spanish {
@scala.inline
def es: typingsSlinky.qlik.qlikStrings.es = "es".asInstanceOf[typingsSlinky.qlik.qlikStrings.es]
@scala.inline
def `es-ES`: typingsSlinky.qlik.qlikStrings.`es-ES` = "es-ES".asInstanceOf[typingsSlinky.qlik.qlikStrings.`es-ES`]
}
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.sv
- typingsSlinky.qlik.qlikStrings.`sv-SE`
*/
trait Swedish extends _ALL
object Swedish {
@scala.inline
def sv: typingsSlinky.qlik.qlikStrings.sv = "sv".asInstanceOf[typingsSlinky.qlik.qlikStrings.sv]
@scala.inline
def `sv-SE`: typingsSlinky.qlik.qlikStrings.`sv-SE` = "sv-SE".asInstanceOf[typingsSlinky.qlik.qlikStrings.`sv-SE`]
}
type TraditionalChinese = `zh-TW`
/* Rewritten from type alias, can be one of:
- typingsSlinky.qlik.qlikStrings.ts_
- typingsSlinky.qlik.qlikStrings.`ts-TR`
*/
trait Turkish extends _ALL
object Turkish {
@scala.inline
def ts: ts_ = "ts".asInstanceOf[ts_]
@scala.inline
def `ts-TR`: typingsSlinky.qlik.qlikStrings.`ts-TR` = "ts-TR".asInstanceOf[typingsSlinky.qlik.qlikStrings.`ts-TR`]
}
trait _ALL extends StObject
}
|
ColombiaTransit/otp-react-redux | lib/components/viewers/related-panel.js | <reponame>ColombiaTransit/otp-react-redux
import React from 'react'
import { FormattedMessage } from 'react-intl'
function RelatedPanel ({ children, count, expanded, onToggleExpanded, title, titleWidth }) {
return (
<>
<h4 className='related-panel-title' style={{width: titleWidth}}>{title}</h4>
<div className='related-panel-container'>
{children}
</div>
{count > 2 && (
<button
className='related-panel-expand-view'
onClick={onToggleExpanded}
>
{expanded
? <FormattedMessage id='components.RelatedPanel.hideExtraStops' />
: <FormattedMessage id='components.RelatedPanel.showExtraStops' values={{count: count - 2}} />
}
</button>
)}
</>
)
}
export default RelatedPanel
|
GameDevery/Lumino | lumino/Platform/examples/Web/Main.cpp | <gh_stars>100-1000
#include <emscripten.h>
#include <LuminoCore.hpp>
#include <LuminoPlatform/PlatformModule.hpp>
#include <LuminoPlatform/Platform.hpp>
#include <LuminoPlatform/PlatformWindow.hpp>
using namespace ln;
struct EventListener : IPlatforEventListener
{
bool onPlatformEvent(const PlatformEventArgs& e) override
{
printf("event %d\n", e.type);
return false;
}
};
static void mainLoop()
{
Platform::processEvents();
}
int main(int argc, char** argv)
{
Logger::addStdErrAdapter();
if (!PlatformModule::initialize()) return 1;
EventListener listener;
Platform::mainWindow()->attachEventListener(&listener);
emscripten_set_main_loop(mainLoop, 60, true);
return 0;
}
/*
cd build/buildtrees/wasm32-emscripten/lumino/lumino/Platform/examples/Web
python -m http.server 8080
http://localhost:8080
https://alex-wasm.appspot.com/threads/index.html
*/
|
Tyav/Fintrack-backend | config/routes.js | <gh_stars>0
'use strict';
const routes = require('../app/routes/index');
const error = require('./error');
module.exports = function(app) {
app.use('/api/v1', routes);
/**
* Error handling
*/
// if error is not an instanceOf APIError, convert it.
app.use(error.converter);
// catch 404 and forward to error handler
app.use(error.notFound);
app.use(error[500]);
// error handler, send stacktrace only during development
app.use(error.handler);
};
|
jvirkki/heliod | src/server/frame/model.cpp | <filename>src/server/frame/model.cpp<gh_stars>10-100
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2008 Sun Microsystems, Inc. All rights reserved.
*
* THE BSD LICENSE
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* model.cpp: String and pblock interpolation
*
* <NAME>
*/
#include "netsite.h"
#include "base/pool.h"
#include "base/ereport.h"
#include "frame/expr.h"
#include "frame/model.h"
#include "frame/dbtframe.h"
#include "expr_parse.h"
#include "expr_pvt.h"
#include "model_pvt.h"
/*
* FragmentType names the different types of fragments that may be encountered
* while scanning an interpolative string.
*/
enum FragmentType {
FRAGMENT_TYPE_NONESCAPED,
FRAGMENT_TYPE_ESCAPED_DOLLAR,
FRAGMENT_TYPE_VARIABLE,
FRAGMENT_TYPE_MAP,
FRAGMENT_TYPE_EXPRESSION
};
/*
* ScannedFragment contains information collected while scanning a string
* fragment.
*/
struct ScannedFragment {
FragmentType type;
// Valid for FRAGMENT_TYPE_NONESCAPED and FRAGMENT_TYPE_ESCAPED_DOLLAR
struct {
const char *p;
int len;
} text;
// Valid for FRAGMENT_TYPE_VARIABLE and FRAGMENT_TYPE_MAP
struct {
const char *p;
int len;
} identifier;
// Valid for FRAGMENT_TYPE_MAP
struct {
const char *p;
int len;
} subscript;
// Valid for FRAGMENT_TYPE_EXPRESSION
struct {
const char *p;
int len;
} expression;
};
/*
* Interpolator provides an environment in which a synthetic string may be
* constructed.
*/
class Interpolator {
public:
/*
* Prepare to construct a synthetic string in the given Context.
*/
inline Interpolator(Context& context);
/*
* Return a pointer where len bytes of a synthetic string may be stored.
* Returns NULL on out of memory errors.
*/
inline char *require(int len);
/*
* Indicate that len bytes have been stored at the address most recently
* returned by require.
*/
inline void advance(int len);
/*
* Nul-terminate the synthetic string and return a pointer to it. The
* returned string will have been allocated from the pool of the Context
* specified when this Interpolator was constructed. The returned string
* is not freed when the context is destroyed. Returns NULL on out of
* memory errors.
*/
inline char *terminate();
/*
* Return the current length of the synthetic string, excluding any
* terminating nul.
*/
inline int length() const { return pos; }
/*
* The context (sn, rq, and pool) the synthetic string is being constructed
* in.
*/
Context& context;
private:
Interpolator(const Interpolator&);
Interpolator& operator=(const Interpolator&);
char *p;
int pos;
int size;
};
/*
* Fragment is the abstract base class for the constituent fragments of string
* models.
*/
class Fragment {
public:
Fragment() { }
virtual ~Fragment() { }
virtual Fragment *dup() const = 0;
virtual PRStatus interpolate(Interpolator& interpolator) const = 0;
private:
Fragment(const Fragment&);
Fragment& operator=(const Fragment&);
};
/*
* Parameter describes a name-value pair from a pblock model.
*/
class Parameter {
public:
/*
* Construct a parameter from the given name and value. The Parameter
* creates a copy of the passed name but assumes ownership of the passed
* string model value.
*/
Parameter(const char *name, ModelString *value);
/*
* Destroy the parameter, its name string, and its value string model.
*/
~Parameter();
/*
* Create a copy of the parameter.
*/
Parameter *dup() const;
/*
* Interpolate the parameter's value and add the resulting synthetic
* parameter to the specified pblock.
*/
inline PRStatus interpolate(Context& context, pblock *pb) const;
char * const name;
const pb_key * const key;
ModelString * const value;
private:
Parameter(const Parameter&);
Parameter& operator=(const Parameter&);
};
/*
* ModelPblock is a model from which a synthetic pblock may be constructed.
*/
class ModelPblock {
public:
/*
* Construct an empty pblock model.
*/
ModelPblock();
/*
* Destroy a pblock model and its constituent parameters.
*/
~ModelPblock();
/*
* Create a copy of the pblock model.
*/
ModelPblock *dup() const;
/*
* Add a parameter to the pblock model. The pblock model assumes ownership
* of the passed string model.
*/
void addParameter(const char *name, ModelString *value);
/*
* Indicate whether the pblock model requires interpolation.
*/
PRBool isInterpolative() const { return interpolative; }
/*
* Construct a synthetic pblock, allocated from the passed Context's pool,
* based on the pblock model.
*/
inline pblock *interpolate(Context& context) const;
private:
ModelPblock(const ModelPblock&);
ModelPblock& operator=(const ModelPblock&);
PtrVector<Parameter> parameters;
PRBool interpolative;
};
/* ------------------------------ backslash ------------------------------- */
static inline PRBool backslash(char c, NSString& result)
{
switch (c) {
case '\\':
result.append("\\\\");
return PR_TRUE;
case '"':
result.append("\\\"");
return PR_TRUE;
case '\n':
result.append("\\n");
return PR_TRUE;
case '\r':
result.append("\\r");
return PR_TRUE;
case '\t':
result.append("\\t");
return PR_TRUE;
case '\f':
result.append("\\f");
return PR_TRUE;
case '\b':
result.append("\\b");
return PR_TRUE;
case '\a':
result.append("\\a");
return PR_TRUE;
case '\x01b':
result.append("\\e");
return PR_TRUE;
case '\0':
result.append("\\0");
return PR_TRUE;
default:
if (!isprint(c)) {
result.printf("\\x%02x", c);
return PR_TRUE;
}
return PR_FALSE;
}
}
/* ----------------------------- unbackslash ------------------------------ */
static inline int unbackslash(const char *p, NSString& result)
{
char *endptr;
int n = 0;
switch (*p) {
case '\\':
result.append('\\');
n = 1;
break;
case '"':
result.append('"');
n = 1;
break;
case '\'':
result.append('\'');
n = 1;
break;
case 'n':
result.append('\n');
n = 1;
break;
case 'r':
result.append('\r');
n = 1;
break;
case 't':
result.append('\t');
n = 1;
break;
case 'f':
result.append('\f');
n = 1;
break;
case 'b':
result.append('\b');
n = 1;
break;
case 'a':
result.append('\a');
n = 1;
break;
case 'e': // esc
result.append(0x1b);
n = 1;
break;
case 'c': // "c@" ... "cZ" -> 0 ... 26
if (p[1] >= 64 && p[1] <= 90) {
result.append(p[1] - 64);
n = 2;
}
break;
case 'x':
if (isxdigit(p[1])) {
result.append((char) strtoul(p + 1, &endptr, 16));
n = endptr - p;
}
break;
case '0':
result.append((char) strtoul(p, &endptr, 8));
n = endptr - p;
break;
}
return n;
}
/* ---------------------- Interpolator::Interpolator ---------------------- */
Interpolator::Interpolator(Context& contextArg)
: context(contextArg),
p(NULL),
pos(0),
size(0)
{ }
/* ------------------------ Interpolator::require ------------------------- */
char *Interpolator::require(int len)
{
// Ensure we have space for len bytes plus a trailing nul
int required = pos + len + 1;
if (size < required) {
size = required;
char *np = (char *) pool_realloc(context.pool, p, size);
if (np == NULL) {
size = 0;
return NULL;
}
p = np;
}
return p + pos;
}
/* ------------------------ Interpolator::advance ------------------------- */
void Interpolator::advance(int len)
{
pos += len;
PR_ASSERT(pos < size);
}
/* ----------------------- Interpolator::terminate ------------------------ */
char *Interpolator::terminate()
{
if (require(0) == NULL)
return NULL;
p[pos] = '\0';
return p;
}
/* -------------------------- FragmentInvariant --------------------------- */
/*
* FragmentInvariant is a Fragment whose value is a simple string.
*/
class FragmentInvariant : public Fragment {
public:
FragmentInvariant(const NSString& s);
Fragment *dup() const;
PRStatus interpolate(Interpolator& interpolator) const;
private:
NSString s;
};
FragmentInvariant::FragmentInvariant(const NSString& sArg)
: s(sArg.data(), sArg.length())
{ }
Fragment *FragmentInvariant::dup() const
{
return new FragmentInvariant(s);
}
PRStatus FragmentInvariant::interpolate(Interpolator& interpolator) const
{
int len = s.length();
char *p = interpolator.require(len);
if (p == NULL)
return PR_FAILURE;
memcpy(p, s.data(), len);
interpolator.advance(len);
return PR_SUCCESS;
}
/* -------------------------- FragmentExpression -------------------------- */
/*
* FragmentExpression is a Fragment whose value is the result of evaluating an
* expression.
*/
class FragmentExpression : public Fragment {
public:
FragmentExpression(Expression *e);
~FragmentExpression();
Fragment *dup() const;
PRStatus interpolate(Interpolator& interpolator) const;
private:
Expression *e;
};
FragmentExpression::FragmentExpression(Expression *eArg)
: e(eArg)
{ }
FragmentExpression::~FragmentExpression()
{
expr_free(e);
}
Fragment *FragmentExpression::dup() const
{
return new FragmentExpression(expr_dup(e));
}
PRStatus FragmentExpression::interpolate(Interpolator& interpolator) const
{
Result result = e->evaluate(interpolator.context);
if (result.isError()) {
result.setNsprError();
return PR_FAILURE;
}
const char *s = result.getConstString();
int len = result.getStringLength();
char *p = interpolator.require(len);
if (p == NULL)
return PR_FAILURE;
memcpy(p, s, len);
interpolator.advance(len);
return PR_SUCCESS;
}
/* ----------------------- ModelString::ModelString ----------------------- */
ModelString::ModelString()
: invariant(PR_TRUE),
interpolative(PR_FALSE),
estimate(0)
{
unescaped.setGrowthSize(NSString::SMALL_STRING);
uninterpolated.setGrowthSize(NSString::SMALL_STRING);
}
/* ---------------------- ModelString::~ModelString ----------------------- */
ModelString::~ModelString()
{
for (int i = 0; i < fragments.length(); i++)
delete fragments[i];
}
/* --------------------------- ModelString::dup --------------------------- */
Expression *ModelString::dup() const
{
return dupModelString();
}
/* --------------------- ModelString::dupModelString ---------------------- */
ModelString *ModelString::dupModelString() const
{
ModelString *model = new ModelString();
model->invariant = invariant;
model->interpolative = interpolative;
model->unescaped = unescaped;
model->uninterpolated = uninterpolated;
model->estimate = estimate;
for (int i = 0; i < fragments.length(); i++)
model->fragments.append(fragments[i]->dup());
model->complete();
return model;
}
/* ---------------------- ModelString::addNonescaped ---------------------- */
void ModelString::addNonescaped(const NSString& s)
{
unescaped.append(s);
uninterpolated.append(s);
fragments.append(new FragmentInvariant(s));
}
/* -------------------- ModelString::addEscapedDollar --------------------- */
void ModelString::addEscapedDollar()
{
interpolative = PR_TRUE;
unescaped.append('$');
uninterpolated.append("$$");
fragments.append(new FragmentInvariant("$"));
}
/* ---------------------- ModelString::addExpression ---------------------- */
void ModelString::addExpression(const NSString& s, Expression *e)
{
invariant = PR_FALSE;
interpolative = PR_TRUE;
unescaped.append(s);
uninterpolated.append(s);
fragments.append(new FragmentExpression(e));
}
/* ------------------------ ModelString::complete ------------------------- */
void ModelString::complete()
{
if (invariant)
setConstString(unescaped);
estimate = unescaped.length();
}
/* ----------------------- ModelString::interpolate ----------------------- */
PRStatus ModelString::interpolate(Context& context, const char **pp, int *plen, pool_handle_t **ppool) const
{
PR_ASSERT(invariant == (getConstString() != NULL));
// If the string model is invariant...
if (invariant) {
// The interpolated version is just a simple unescaped string. Yay!
*pp = unescaped.data();
*plen = unescaped.length();
*ppool = NULL;
return PR_SUCCESS;
}
// We need to actually interpolate things. Start with a buffer we think
// will be big enough.
Interpolator interpolator(context);
interpolator.require(estimate);
// Interpolate the individual fragments
NsprError error;
int nerrors = 0;
int nfragments = fragments.length();
for (int i = 0; i < nfragments; i++) {
if (fragments[i]->interpolate(interpolator) == PR_FAILURE) {
// Record the error but keep on trucking
if (nerrors == 0)
error.save();
nerrors++;
}
}
// We're done. Get a pointer to the complete interpolated result.
char *p = interpolator.terminate();
int len = interpolator.length();
// Remember how big a buffer the interpolated result required so that we
// can avoid future reallocs
if (len > estimate)
estimate = len;
if (p == NULL) {
// Out of memory
*pp = NULL;
*plen = 0;
*ppool = NULL;
return PR_FAILURE;
}
if (nerrors == nfragments) {
// The string was a total write-off, e.g. "$undefined"
PR_ASSERT(nerrors > 0);
*pp = NULL;
*plen = 0;
*ppool = NULL;
} else {
// Some of the string fragments were okay, e.g. "[$undefined]"
*pp = p;
*plen = len;
*ppool = context.pool;
}
if (nerrors > 0) {
error.restore();
return PR_FAILURE;
}
return PR_SUCCESS;
}
/* ------------------------ ModelString::evaluate ------------------------- */
Result ModelString::evaluate(Context& context) const
{
const char *p;
int len;
pool_handle_t *pool;
PRStatus rv = interpolate(context, &p, &len, &pool);
if (rv == PR_FAILURE) {
// Convert the NSPR error to an expression Result
return context.createNsprErrorResult();
} else if (pool != NULL) {
// Convert the pooled string to an expression Result
PR_ASSERT(pool == context.pool);
return context.createPooledStringResult((char *) p, len);
} else {
// Convert the non-pooled string to an expression Result
return context.createStringConstantResult(p, len);
}
}
/* ------------------------- ModelString::format -------------------------- */
void ModelString::format(NSString& formatted, Precedence precedence) const
{
formatted.append('"');
for (int i = 0; i < uninterpolated.length(); i++) {
char c = uninterpolated.data()[i];
if (!backslash(c, formatted))
formatted.append(c);
}
formatted.append('"');
}
/* ---------------------------- model_unescape ---------------------------- */
static char *model_unescape(const char *s, PRBool interpolative)
{
NSString unescaped;
while (*s != '\0') {
if (*s == '\\') {
s++;
if (*s == '$' && interpolative) {
unescaped.append("$$");
s++;
continue;
}
int n = unbackslash(s, unescaped);
if (n > 0) {
s += n;
continue;
}
if (isprint(*s)) {
NsprError::setErrorf(PR_INVALID_ARGUMENT_ERROR, XP_GetAdminStr(DBT_badEscapeCharX), *s);
} else {
NsprError::setErrorf(PR_INVALID_ARGUMENT_ERROR, XP_GetAdminStr(DBT_badEscapeIntX), *s);
}
return NULL;
}
unescaped.append(*s);
s++;
}
return STRDUP(unescaped);
}
/* --------------------- model_unescape_interpolative --------------------- */
char *model_unescape_interpolative(const char *s)
{
return model_unescape(s, PR_TRUE);
}
/* ------------------- model_unescape_noninterpolative -------------------- */
char *model_unescape_noninterpolative(const char *s)
{
return model_unescape(s, PR_FALSE);
}
/* --------------------------- model_scan_expr ---------------------------- */
static const char *model_scan_expr(const char *s, ScannedFragment *scanned)
{
PR_ASSERT(*s == '(');
TokenizerStringCharSource source(s, strlen(s));
Expression *expr = expr_scan_inclusive(source, ")");
if (expr == NULL)
return NULL;
expr_free(expr);
PR_ASSERT(s[source.getOffset() - 1] == ')');
scanned->type = FRAGMENT_TYPE_EXPRESSION;
scanned->expression.p = s;
scanned->expression.len = source.getOffset();
return s + source.getOffset();
}
/* ------------------------- model_scan_subscript ------------------------- */
static const char *model_scan_subscript(const char *s, ScannedFragment *scanned)
{
TokenizerStringCharSource source(s, strlen(s));
Expression *subscript = expr_scan_exclusive(source, "}");
if (subscript == NULL)
return NULL;
expr_free(subscript);
PR_ASSERT(s[source.getOffset() - 1] == '}');
PR_ASSERT(scanned->type == FRAGMENT_TYPE_MAP);
scanned->subscript.p = s;
scanned->subscript.len = source.getOffset() - 1;
return s + source.getOffset();
}
/* ------------------------ model_scan_var_or_map ------------------------- */
static const char *model_scan_var_or_map(const char *s, const char *closing, ScannedFragment *scanned)
{
// Skip whitespace following "${" in bracketed ${fragment}s
if (closing) {
while (isspace(*s))
s++;
}
// Extract the identifier from "$identifier", "${identifier}",
// "$identifier{expr}", or "${identifier{expr}}"
const char *p = s;
if (*p == '&') {
p++;
} else if (isdigit(*p)) {
do p++; while (isdigit(*p));
} else if (expr_leading_identifier_char(*p)) {
do p++; while (expr_nonleading_identifier_char(*p));
} else {
NsprError::setErrorf(PR_INVALID_ARGUMENT_ERROR, XP_GetAdminStr(DBT_syntaxErrorNearX), "$");
return NULL;
}
scanned->identifier.p = s;
scanned->identifier.len = p - s;
// Skip whitespace following identifier in bracketed ${fragment}s
if (closing) {
while (isspace(*p))
p++;
}
if (*p == '{' && expr_is_identifier(scanned->identifier.p, scanned->identifier.len)) {
// "$map{expr}" or "${map{expr}}"
scanned->type = FRAGMENT_TYPE_MAP;
p = model_scan_subscript(p + 1, scanned);
if (p == NULL)
return NULL;
} else {
// "$var" or "${var}"
scanned->type = FRAGMENT_TYPE_VARIABLE;
}
// Look for closing '}' in bracketed ${fragment}s
if (closing) {
while (isspace(*p))
p++;
if (*p != *closing) {
if (isprint(*p)) {
NsprError::setErrorf(PR_INVALID_ARGUMENT_ERROR, XP_GetAdminStr(DBT_unexpectedCharX), *p);
} else {
NsprError::setErrorf(PR_INVALID_ARGUMENT_ERROR, XP_GetAdminStr(DBT_missingClosingCharX), *closing);
}
return NULL;
}
p++;
}
return p;
}
/* ------------------------------ model_scan ------------------------------ */
static const char *model_scan(const char *s, ScannedFragment *scanned)
{
PR_ASSERT(*s != '\0');
if (*s != '$') {
// Scan the invariant fragment
const char *p = s;
do p++; while (*p != '\0' && *p != '$');
// Found simple text (no $fragments or "$$" sequences)
scanned->type = FRAGMENT_TYPE_NONESCAPED;
scanned->text.p = s;
scanned->text.len = p - s;
return p;
}
if (s[1] == '\0') {
// Found trailing '$'
scanned->type = FRAGMENT_TYPE_NONESCAPED;
scanned->text.p = s;
scanned->text.len = 1;
return s + 1;
}
if (s[1] == '$') {
// Found "$$" escape sequence
scanned->type = FRAGMENT_TYPE_ESCAPED_DOLLAR;
scanned->text.p = s;
scanned->text.len = 1;
return s + 2;
}
// Scan $fragment
switch (s[1]) {
case '(': return model_scan_expr(s + 1, scanned);
case '{': return model_scan_var_or_map(s + 2, "}", scanned);
default: return model_scan_var_or_map(s + 1, NULL, scanned);
}
}
/* ------------------------- model_fragment_scan -------------------------- */
int model_fragment_scan(const char *f)
{
const char *p = f;
if (*p != '\0') {
ScannedFragment scanned;
p = model_scan(p, &scanned);
if (p == NULL)
return -1;
}
return p - f;
}
/* --------------------- model_fragment_is_invariant ---------------------- */
PRBool model_fragment_is_invariant(const char *f, const char **ptext, int *plen)
{
const char *p = f;
if (*p != '\0') {
ScannedFragment scanned;
p = model_scan(p, &scanned);
if (p == NULL)
return PR_FALSE;
if (scanned.type != FRAGMENT_TYPE_NONESCAPED && scanned.type != FRAGMENT_TYPE_ESCAPED_DOLLAR)
return PR_FALSE;
}
if (ptext)
*ptext = f;
if (plen)
*plen = p - f;
return PR_TRUE;
}
/* ---------------------- model_fragment_is_var_ref ----------------------- */
PRBool model_fragment_is_var_ref(const char *f, const char **pname, int *plen)
{
if (*f != '$')
return PR_FALSE;
ScannedFragment scanned;
const char *p = model_scan(f, &scanned);
if (p == NULL)
return PR_FALSE;
if (scanned.type != FRAGMENT_TYPE_VARIABLE)
return PR_FALSE;
if (pname)
*pname = scanned.identifier.p;
if (plen)
*plen = scanned.identifier.len;
return PR_TRUE;
}
/* -------------------------- model_str_fragment -------------------------- */
static PRStatus model_str_fragment(ModelString *model, const char *p, int len, ScannedFragment *scanned)
{
if (scanned->type == FRAGMENT_TYPE_NONESCAPED) {
model->addNonescaped(NSString(scanned->text.p, scanned->text.len));
return PR_SUCCESS;
}
if (scanned->type == FRAGMENT_TYPE_ESCAPED_DOLLAR) {
model->addEscapedDollar();
return PR_SUCCESS;
}
if (scanned->type == FRAGMENT_TYPE_VARIABLE) {
NSString n(scanned->identifier.p, scanned->identifier.len);
Expression *expr = expr_new_variable(n);
if (expr == NULL)
return PR_FAILURE;
model->addExpression(NSString(p, len), expr);
return PR_SUCCESS;
}
if (scanned->type == FRAGMENT_TYPE_MAP) {
NSString s(scanned->subscript.p, scanned->subscript.len);
Expression *subscript = expr_create(s);
if (subscript == NULL)
return PR_FAILURE;
NSString n(scanned->identifier.p, scanned->identifier.len);
Expression *expr = expr_new_access(n, subscript);
if (expr == NULL) {
expr_free(subscript);
return PR_FAILURE;
}
model->addExpression(NSString(p, len), expr);
return PR_SUCCESS;
}
if (scanned->type == FRAGMENT_TYPE_EXPRESSION) {
NSString e(scanned->expression.p, scanned->expression.len);
Expression *expr = expr_create(e);
if (expr == NULL)
return PR_FAILURE;
model->addExpression(NSString(p, len), expr);
return PR_SUCCESS;
}
PR_ASSERT(0);
return PR_FAILURE;
}
/* --------------------------- model_str_create --------------------------- */
ModelString *model_str_create(const char *s)
{
ModelString *model = new ModelString();
while (*s != '\0') {
ScannedFragment scanned;
const char *f = s;
s = model_scan(f, &scanned);
if (s == NULL) {
delete model;
return NULL;
}
PR_ASSERT(s > f);
PRStatus rv = model_str_fragment(model, f, s - f, &scanned);
if (rv != PR_SUCCESS) {
delete model;
return NULL;
}
}
model->complete();
return model;
}
/* ---------------------------- model_str_dup ----------------------------- */
ModelString *model_str_dup(const ModelString *model)
{
return model->dupModelString();
}
/* ---------------------------- model_str_free ---------------------------- */
void model_str_free(ModelString *model)
{
delete model;
}
/* ------------------------ model_str_interpolate ------------------------- */
int model_str_interpolate(const ModelString *model, Session *sn, Request *rq, const char **pp, int *plen)
{
pool_handle_t *pool = request_pool(rq);
Context context(sn, rq, pool);
return model->interpolate(context, pp, plen, &pool);
}
/* ------------------------- Parameter::Parameter ------------------------- */
Parameter::Parameter(const char *nameArg, ModelString *valueArg)
: name(PERM_STRDUP(nameArg)),
key(pblock_key(name)),
value(valueArg)
{ }
/* ------------------------ Parameter::~Parameter ------------------------- */
Parameter::~Parameter()
{
PERM_FREE(name);
delete value;
}
/* ---------------------------- Parameter::dup ---------------------------- */
Parameter *Parameter::dup() const
{
return new Parameter(name, value->dupModelString());
}
/* ------------------------ Parameter::interpolate ------------------------ */
inline PRStatus Parameter::interpolate(Context& context, pblock *pb) const
{
pb_param *pp;
if (key) {
pp = pblock_key_param_create(pb, key, NULL, 0);
} else {
pp = pblock_param_create(pb, name, NULL);
}
if (!pp)
return PR_FAILURE;
Result result = value->evaluate(context);
if (result.isError()) {
result.setNsprError();
return PR_FAILURE;
}
pp->value = result.getPooledString(context.pool);
if (!pp->value)
return PR_FAILURE;
if (key) {
pblock_kpinsert(key, pp, pb);
} else {
pblock_pinsert(pp, pb);
}
return PR_SUCCESS;
}
/* ----------------------- ModelPblock::ModelPblock ----------------------- */
ModelPblock::ModelPblock()
: interpolative(PR_FALSE)
{ }
/* ---------------------- ModelPblock::~ModelPblock ----------------------- */
ModelPblock::~ModelPblock()
{
for (int i = 0; i < parameters.length(); i++)
delete parameters[i];
}
/* --------------------------- ModelPblock::dup --------------------------- */
ModelPblock *ModelPblock::dup() const
{
ModelPblock *model = new ModelPblock();
model->interpolative = interpolative;
for (int i = 0; i < parameters.length(); i++)
model->parameters.append(parameters[i]->dup());
return model;
}
inline pblock *ModelPblock::interpolate(Context& context) const
{
pblock *result = pblock_create_pool(context.pool, parameters.length());
if (result != NULL) {
for (int i = 0; i < parameters.length(); i++) {
if (parameters[i]->interpolate(context, result) == PR_FAILURE)
return NULL;
}
}
return result;
}
/* ---------------------- ModelPblock::addParameter ----------------------- */
void ModelPblock::addParameter(const char *name, ModelString *value)
{
if (value->isInterpolative())
interpolative = PR_TRUE;
parameters.append(new Parameter(name, value));
}
/* --------------------------- model_pb_create ---------------------------- */
ModelPblock *model_pb_create(const pblock *pb)
{
if (pb == NULL)
return NULL;
ModelPblock *model = new ModelPblock();
for (int hi = 0; hi < pb->hsize; hi++) {
for (pb_entry *p = pb->ht[hi]; p != NULL; p = p->next) {
if (param_key(p->param) != pb_key_magnus_internal) {
ModelString *value = model_str_create(p->param->value);
if (value == NULL) {
NsprError::setErrorf(PR_INVALID_ARGUMENT_ERROR, XP_GetAdminStr(DBT_paramNameXErrorY), p->param->name, system_errmsg());
delete model;
return NULL;
}
model->addParameter(p->param->name, value);
}
}
}
return model;
}
/* ----------------------------- model_pb_dup ----------------------------- */
ModelPblock *model_pb_dup(const ModelPblock *model)
{
if (model == NULL)
return NULL;
return model->dup();
}
/* ---------------------------- model_pb_free ----------------------------- */
void model_pb_free(ModelPblock *model)
{
delete model;
}
/* --------------------- model_pb_is_noninterpolative --------------------- */
PRBool model_pb_is_noninterpolative(const ModelPblock *model)
{
if (model == NULL)
return PR_TRUE;
return !model->isInterpolative();
}
/* ------------------------- model_pb_interpolate ------------------------- */
pblock *model_pb_interpolate(const ModelPblock *model, Session *sn, Request *rq)
{
Context context(sn, rq, request_pool(rq));
return model->interpolate(context);
}
|
player999/cryptonite | src/pkix/c/struct/CrlIdentifier.c | <gh_stars>10-100
/*
* Copyright (c) 2016 PrivatBank IT <<EMAIL>>. All rights reserved.
* Redistribution and modifications are permitted subject to BSD license.
*/
#include "CrlIdentifier.h"
#include "asn_internal.h"
#undef FILE_MARKER
#define FILE_MARKER "pkix/struct/CrlIdentifier.c"
static asn_TYPE_member_t asn_MBR_CrlIdentifier_1[] = {
{
ATF_NOFLAGS, 0, offsetof(struct CrlIdentifier, crlissuer),
-1 /* Ambiguous tag (CHOICE?) */,
0,
&Name_desc,
0, /* Defer constraints checking to the member type */
0, /* PER is not compiled, use -gen-PER */
0,
"crlissuer"
},
{
ATF_NOFLAGS, 0, offsetof(struct CrlIdentifier, crlIssuedTime),
(ASN_TAG_CLASS_UNIVERSAL | (23 << 2)),
0,
&UTCTime_desc,
0, /* Defer constraints checking to the member type */
0, /* PER is not compiled, use -gen-PER */
0,
"crlIssuedTime"
},
{
ATF_POINTER, 1, offsetof(struct CrlIdentifier, crlNumber),
(ASN_TAG_CLASS_UNIVERSAL | (2 << 2)),
0,
&INTEGER_desc,
0, /* Defer constraints checking to the member type */
0, /* PER is not compiled, use -gen-PER */
0,
"crlNumber"
},
};
static const ber_tlv_tag_t CrlIdentifier_desc_tags_1[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static const asn_TYPE_tag2member_t asn_MAP_CrlIdentifier_tag2el_1[] = {
{ (ASN_TAG_CLASS_UNIVERSAL | (2 << 2)), 2, 0, 0 }, /* crlNumber */
{ (ASN_TAG_CLASS_UNIVERSAL | (16 << 2)), 0, 0, 0 }, /* rdnSequence */
{ (ASN_TAG_CLASS_UNIVERSAL | (23 << 2)), 1, 0, 0 } /* crlIssuedTime */
};
static asn_SEQUENCE_specifics_t asn_SPC_CrlIdentifier_specs_1 = {
sizeof(struct CrlIdentifier),
offsetof(struct CrlIdentifier, _asn_ctx),
asn_MAP_CrlIdentifier_tag2el_1,
3, /* Count of tags in the map */
0, 0, 0, /* Optional elements (not needed) */
-1, /* Start extensions */
-1 /* Stop extensions */
};
asn_TYPE_descriptor_t CrlIdentifier_desc = {
"CrlIdentifier",
"CrlIdentifier",
SEQUENCE_free,
SEQUENCE_print,
SEQUENCE_constraint,
SEQUENCE_decode_ber,
SEQUENCE_encode_der,
SEQUENCE_decode_xer,
SEQUENCE_encode_xer,
0, 0, /* No PER support, use "-gen-PER" to enable */
0, /* Use generic outmost tag fetcher */
CrlIdentifier_desc_tags_1,
sizeof(CrlIdentifier_desc_tags_1)
/ sizeof(CrlIdentifier_desc_tags_1[0]), /* 1 */
CrlIdentifier_desc_tags_1, /* Same as above */
sizeof(CrlIdentifier_desc_tags_1)
/ sizeof(CrlIdentifier_desc_tags_1[0]), /* 1 */
0, /* No PER visible constraints */
asn_MBR_CrlIdentifier_1,
3, /* Elements count */
&asn_SPC_CrlIdentifier_specs_1 /* Additional specs */
};
asn_TYPE_descriptor_t *get_CrlIdentifier_desc(void)
{
return &CrlIdentifier_desc;
}
|
olmelabs/battleship-game | client-app/src/components/Account/LoginPage.js | <reponame>olmelabs/battleship-game<filename>client-app/src/components/Account/LoginPage.js<gh_stars>1-10
import React from "react";
import { connect } from "react-redux";
import { Link, Redirect } from "react-router-dom";
import PropTypes from "prop-types";
import { bindActionCreators } from "redux";
import * as actions from "../../actions";
class LoginPage extends React.Component {
constructor(props, context) {
super(props, context);
this.state = {
email: "",
password: "",
submitted: false
};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleChange(e) {
const { name, value } = e.target;
this.setState({ [name]: value });
}
handleSubmit(e) {
e.preventDefault();
this.setState({ submitted: true });
const { email, password } = this.state;
if (email && password) {
this.props.actions.login(email, password);
}
}
render() {
const { from } = this.props.location.state || { from: { pathname: "/" } };
if (this.props.authenticated) {
return <Redirect to={from} />;
}
const warning = this.props.loginFailed ? (
<div className="alert alert-warning" role="alert">
Login failed
</div>
) : (
""
);
return (
<React.Fragment>
<form className="form-signin" onSubmit={this.handleSubmit}>
<h2 className="form-signin-heading">Please sign in</h2>
<label htmlFor="email" className="sr-only">
Email address
</label>
<input
type="email"
autoComplete="email"
id="email"
name="email"
className="form-control form-top-input"
placeholder="Email address"
value={this.state.email}
required
autoFocus
onChange={this.handleChange}
/>
<label htmlFor="password" className="sr-only">
Password
</label>
<input
type="password"
autoComplete="current-password"
id="password"
name="password"
className="form-control form-bottom-input"
placeholder="Password"
value={this.state.password}
required
onChange={this.handleChange}
/>
{warning}
<button className="btn btn-lg btn-primary btn-block" type="submit">
Sign in
</button>
<p>
<Link to="register">Register</Link> -{" "}
<Link to="password_reset_link">Forgot Password?</Link>
</p>
</form>
</React.Fragment>
);
}
}
LoginPage.propTypes = {
authenticated: PropTypes.bool.isRequired,
loginFailed: PropTypes.bool.isRequired,
actions: PropTypes.object.isRequired,
location: PropTypes.object
};
const mapStateToProps = (state, ownProps) => ({
authenticated: state.authState.authenticated,
loginFailed: state.authState.loginFailed
});
function mapDispatchToProps(dispatch) {
return {
actions: bindActionCreators(actions, dispatch)
};
}
export default connect(
mapStateToProps,
mapDispatchToProps
)(LoginPage);
|
jacktea/FYReader | app/src/main/java/xyz/fycz/myreader/application/SysManager.java | <filename>app/src/main/java/xyz/fycz/myreader/application/SysManager.java
package xyz.fycz.myreader.application;
import android.util.Log;
import xyz.fycz.myreader.common.APPCONST;
import xyz.fycz.myreader.entity.Setting;
import xyz.fycz.myreader.enums.BookSource;
import xyz.fycz.myreader.enums.BookcaseStyle;
import xyz.fycz.myreader.util.CacheHelper;
import xyz.fycz.myreader.webapi.crawler.ReadCrawlerUtil;
import static xyz.fycz.myreader.application.MyApplication.getVersionCode;
public class SysManager {
private static Setting mSetting;
/**
* 获取设置
*
* @return
*/
public static Setting getSetting() {
if (mSetting != null) {
return mSetting;
}
mSetting = (Setting) CacheHelper.readObject(APPCONST.FILE_NAME_SETTING);
if (mSetting == null) {
mSetting = getDefaultSetting();
saveSetting(mSetting);
}
return mSetting;
}
public static Setting getNewSetting() {
Setting setting = (Setting) CacheHelper.readObject(APPCONST.FILE_NAME_SETTING);
if (setting == null) {
setting = getDefaultSetting();
saveSetting(setting);
}
return setting;
}
/**
* 保存设置
*
* @param setting
*/
public static void saveSetting(Setting setting) {
CacheHelper.saveObject(setting, APPCONST.FILE_NAME_SETTING);
}
/**
* 默认设置
*
* @return
*/
private static Setting getDefaultSetting() {
Setting setting = new Setting();
setting.setDayStyle(true);
setting.setBookcaseStyle(BookcaseStyle.listMode);
setting.setNewestVersionCode(getVersionCode());
setting.setAutoSyn(false);
setting.setMatchChapter(true);
setting.setMatchChapterSuitability(0.7f);
setting.setCatheGap(150);
setting.setRefreshWhenStart(true);
setting.setOpenBookStore(true);
setting.setSettingVersion(APPCONST.SETTING_VERSION);
setting.setSourceVersion(APPCONST.SOURCE_VERSION);
setting.setHorizontalScreen(false);
setting.initReadStyle();
setting.setCurReadStyleIndex(1);
return setting;
}
public static void regetmSetting() {
mSetting = (Setting) CacheHelper.readObject(APPCONST.FILE_NAME_SETTING);
}
/**
* 重置设置
*/
public static void resetSetting() {
Setting setting = getSetting();
switch (setting.getSettingVersion()) {
case 10: default:
setting.initReadStyle();
setting.setCurReadStyleIndex(1);
setting.setSharedLayout(true);
Log.d("SettingVersion", "" + 10);
case 11:
Log.d("SettingVersion", "" + 11);
case 12:
Log.d("SettingVersion", "" + 12);
}
setting.setSettingVersion(APPCONST.SETTING_VERSION);
saveSetting(setting);
}
public static void resetSource() {
Setting setting = getSetting();
switch (setting.getSourceVersion()) {
case 0: default:
ReadCrawlerUtil.addReadCrawler(BookSource.miaobi, BookSource.dstq, BookSource.xs7, BookSource.du1du, BookSource.paiotian);
ReadCrawlerUtil.removeReadCrawler("cangshu99");
Log.d("SourceVersion", "" + 0);
case 1:
ReadCrawlerUtil.addReadCrawler(BookSource.laoyao, BookSource.xingxing, BookSource.shiguang, BookSource.xiagu, BookSource.hongchen);
Log.d("SourceVersion", "" + 1);
case 2:
ReadCrawlerUtil.addReadCrawler(BookSource.rexue, BookSource.chuanqi);
Log.d("SourceVersion", "" + 2);
}
setting.setSourceVersion(APPCONST.SOURCE_VERSION);
saveSetting(setting);
}
}
|
haimadrian/SE-Java-Project | Code/src/main/java/org/spa/view/alert/AlertColumn.java | <gh_stars>1-10
package org.spa.view.alert;
import org.spa.view.table.TableColumnIfc;
import org.spa.view.table.editor.TextCellEditor;
import org.spa.view.table.renderer.DateCellRenderer;
import org.spa.view.table.renderer.ImageCellRenderer;
import org.spa.view.table.renderer.TextCellRenderer;
import javax.swing.*;
import javax.swing.table.TableCellEditor;
import javax.swing.table.TableCellRenderer;
/**
* @author <NAME>
* @since 15-May-20
*/
public enum AlertColumn implements TableColumnIfc {
Severity("Severity", 0.13, ImageIcon.class, new ImageCellRenderer(), null, false),
Message("Description", 0.64, String.class, new TextCellRenderer(), new TextCellEditor(true), true),
Date("Time", 0.23, java.util.Date.class, new DateCellRenderer(), null, false);
private final String header;
private final double cellWidth;
private final Class<?> columnClass;
private final TableCellRenderer renderer;
private final TableCellEditor editor;
private final boolean isEditable;
AlertColumn(String header, double cellWidth, Class<?> columnClass, TableCellRenderer renderer, TableCellEditor editor, boolean isEditable) {
this.header = header;
this.cellWidth = cellWidth;
this.columnClass = columnClass;
this.renderer = renderer;
this.editor = editor == null ? new TextCellEditor() : editor;
this.isEditable = isEditable;
}
@Override
public int getColIndex() {
return ordinal();
}
@Override
public String getHeader() {
return header;
}
@Override
public String getAttributeName() {
return name();
}
@Override
public double getWidth() {
return cellWidth;
}
@Override
public boolean isEditable() {
return isEditable;
}
@Override
public Object formatValueForTable(Object value) {
return value;
}
@Override
public Class<?> getColumnClass() {
return columnClass;
}
@Override
public TableCellRenderer getCellRenderer() {
return renderer;
}
@Override
public TableCellEditor getCellEditor() {
return editor;
}
}
|
sthagen/Jeffail-benthos | internal/component/output/config_cassandra.go | package output
import (
"github.com/gocql/gocql"
"github.com/benthosdev/benthos/v4/internal/batch/policy/batchconfig"
"github.com/benthosdev/benthos/v4/internal/old/util/retries"
btls "github.com/benthosdev/benthos/v4/internal/tls"
)
// PasswordAuthenticator contains the fields that will be used to authenticate with
// the Cassandra cluster.
type PasswordAuthenticator struct {
Enabled bool `json:"enabled" yaml:"enabled"`
Username string `json:"username" yaml:"username"`
Password string `json:"password" yaml:"password"`
}
// CassandraConfig contains configuration fields for the Cassandra output type.
type CassandraConfig struct {
Addresses []string `json:"addresses" yaml:"addresses"`
TLS btls.Config `json:"tls" yaml:"tls"`
PasswordAuthenticator PasswordAuthenticator `json:"password_authenticator" yaml:"password_authenticator"`
DisableInitialHostLookup bool `json:"disable_initial_host_lookup" yaml:"disable_initial_host_lookup"`
Query string `json:"query" yaml:"query"`
ArgsMapping string `json:"args_mapping" yaml:"args_mapping"`
Consistency string `json:"consistency" yaml:"consistency"`
Timeout string `json:"timeout" yaml:"timeout"`
// TODO: V4 Remove this and replace with explicit values.
retries.Config `json:",inline" yaml:",inline"`
MaxInFlight int `json:"max_in_flight" yaml:"max_in_flight"`
Batching batchconfig.Config `json:"batching" yaml:"batching"`
}
// NewCassandraConfig creates a new CassandraConfig with default values.
func NewCassandraConfig() CassandraConfig {
rConf := retries.NewConfig()
rConf.MaxRetries = 3
rConf.Backoff.InitialInterval = "1s"
rConf.Backoff.MaxInterval = "5s"
rConf.Backoff.MaxElapsedTime = ""
return CassandraConfig{
Addresses: []string{},
TLS: btls.NewConfig(),
PasswordAuthenticator: PasswordAuthenticator{
Enabled: false,
Username: "",
Password: "",
},
DisableInitialHostLookup: false,
Query: "",
ArgsMapping: "",
Consistency: gocql.Quorum.String(),
Timeout: "600ms",
Config: rConf,
MaxInFlight: 64,
Batching: batchconfig.NewConfig(),
}
}
|
BinBear/VinBaseComponents | VinBaseComponents/Classes/BlockViewKit/UITableViewCell+BlockExtention.h | //
// UITableViewCell+BlockExtention.h
// HeartTrip
//
// Created by vin on 2020/1/5.
// Copyright © BinBear. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface UITableViewCell (BlockExtention)
@property (nonatomic,strong) id ht_cellData;
@property (nonatomic,strong,readonly) id ht_sectionData;
@property (nonatomic,strong,readonly) id ht_tableViewData;
@property (nonatomic,weak,readonly) UITableView *ht_tableView;
@property (nonatomic,strong,readonly) NSIndexPath *ht_indexPath;
+ (instancetype)ht_cellWithTableView:(UITableView *)tableview
indexPath:(NSIndexPath *)indexPath
cellData:(id)cellData;
- (void)ht_cellLoad;
- (void)ht_reloadCellData;
@end
NS_ASSUME_NONNULL_END
|
ScalablyTyped/SlinkyTyped | a/aws-sdk/src/main/scala/typingsSlinky/awsSdk/rekognitionMod/Gender.scala | <filename>a/aws-sdk/src/main/scala/typingsSlinky/awsSdk/rekognitionMod/Gender.scala
package typingsSlinky.awsSdk.rekognitionMod
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait Gender extends StObject {
/**
* Level of confidence in the prediction.
*/
var Confidence: js.UndefOr[Percent] = js.native
/**
* The predicted gender of the face.
*/
var Value: js.UndefOr[GenderType] = js.native
}
object Gender {
@scala.inline
def apply(): Gender = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[Gender]
}
@scala.inline
implicit class GenderMutableBuilder[Self <: Gender] (val x: Self) extends AnyVal {
@scala.inline
def setConfidence(value: Percent): Self = StObject.set(x, "Confidence", value.asInstanceOf[js.Any])
@scala.inline
def setConfidenceUndefined: Self = StObject.set(x, "Confidence", js.undefined)
@scala.inline
def setValue(value: GenderType): Self = StObject.set(x, "Value", value.asInstanceOf[js.Any])
@scala.inline
def setValueUndefined: Self = StObject.set(x, "Value", js.undefined)
}
}
|
ploubser/cfacter | lib/inc/facter/util/posix/scoped_bio.hpp | <gh_stars>0
/**
* @file
* Declares the scoped BIO (OpenSSL) resource.
*/
#ifndef FACTER_UTIL_POSIX_SCOPED_BIO_HPP_
#define FACTER_UTIL_POSIX_SCOPED_BIO_HPP_
#include "../scoped_resource.hpp"
#include <openssl/bio.h>
namespace facter { namespace util { namespace posix {
/**
* Represents a scoped OpenSSL BIO object.
* Automatically frees the BIO when it goes out of scope.
*/
struct scoped_bio : scoped_resource<BIO*>
{
/**
* Constructs a scoped_bio.
* @param method The BIO_METHOD to use.
*/
explicit scoped_bio(BIO_METHOD* method);
/**
* Constructs a scoped_bio.
* @param bio The BIO to free when destroyed.
*/
explicit scoped_bio(BIO* bio);
private:
static void free(BIO* bio);
};
}}} // namespace facter::util::posix
#endif // FACTER_UTIL_POSIX_SCOPED_BIO_HPP_
|
pnomolos/chronomodel | lib/chrono_model/time_machine/history_model.rb | module ChronoModel
module TimeMachine
module HistoryModel
extend ActiveSupport::Concern
included do
self.table_name = [Adapter::HISTORY_SCHEMA, superclass.table_name].join('.')
scope :chronological, -> { order(Arel.sql('lower(validity) ASC')) }
end
# Methods that make up the history interface of the companion History
# model, automatically built for each Model that includes TimeMachine
#
module ClassMethods
include ChronoModel::TimeMachine::TimeQuery
include ChronoModel::TimeMachine::Timeline
# HACK. find() and save() require the real history ID. So we are
# setting it now and ensuring to reset it to the original one after
# execution completes. FIXME
#
def with_hid_pkey(&block)
old = self.primary_key
self.primary_key = :hid
block.call
ensure
self.primary_key = old
end
def find(*)
with_hid_pkey { super }
end
# In the History context, pre-fill the :on options with the validity interval.
#
def time_query(match, time, options = {})
options[:on] ||= :validity
super
end
def past
time_query(:before, :now).where("NOT upper_inf(#{quoted_table_name}.validity)")
end
# To identify this class as the History subclass
def history?
true
end
def relation
super.as_of_time!(Time.now)
end
# Fetches as of +time+ records.
#
def as_of(time)
superclass.from(virtual_table_at(time)).as_of_time!(time)
end
def virtual_table_at(time, table_name: nil)
virtual_name = table_name ?
connection.quote_table_name(table_name) :
superclass.quoted_table_name
"(#{at(time).to_sql}) #{virtual_name}"
end
# Fetches history record at the given time
#
def at(time)
time_query(:at, time).from(quoted_table_name).as_of_time!(time)
end
# Returns the history sorted by recorded_at
#
def sorted
all.order(Arel.sql(%[ #{quoted_table_name}."recorded_at" ASC, #{quoted_table_name}."hid" ASC ]))
end
# Fetches the given +object+ history, sorted by history record time
# by default. Always includes an "as_of_time" column that is either
# the upper bound of the validity range or now() if history validity
# is maximum.
#
def of(object)
where(id: object)
end
# The `sti_name` method returns the contents of the inheritance
# column, and it is usually the class name. The inherited class
# name has the "::History" suffix but that is never going to be
# present in the data.
#
# As such it is overriden here to return the same contents that
# the parent would have returned.
def sti_name
superclass.sti_name
end
# For STI to work, the history model needs to have the exact same
# semantics as the model it inherits from. However given it is
# actually inherited, the original AR implementation would return
# false here. But for STI sake, the history model is located in the
# same exact hierarchy location as its parent, thus this is defined in
# this override.
#
def descends_from_active_record?
superclass.descends_from_active_record?
end
private
# STI fails when a Foo::History record has Foo as type in the
# inheritance column; AR expects the type to be an instance of the
# current class or a descendant (or self).
#
def find_sti_class(type_name)
super(type_name + "::History")
end
end
# The history id is `hid`, but this cannot set as primary key
# or temporal assocations will break. Solutions are welcome.
def id
hid
end
# Referenced record ID.
#
def rid
attributes[self.class.primary_key]
end
def save(*)
self.class.with_hid_pkey { super }
end
def save!(*)
self.class.with_hid_pkey { super }
end
def update_columns(*)
self.class.with_hid_pkey { super }
end
def historical?
true
end
# Returns the previous history entry, or nil if this
# is the first one.
#
def pred
return if self.valid_from.nil?
if self.class.timeline_associations.empty?
self.class.where('id = ? AND upper(validity) = ?', rid, valid_from).first
else
super(id: rid, before: valid_from, table: self.class.superclass.quoted_table_name)
end
end
# Returns the next history entry, or nil if this is the
# last one.
#
def succ
return if self.valid_to.nil?
if self.class.timeline_associations.empty?
self.class.where('id = ? AND lower(validity) = ?', rid, valid_to).first
else
super(id: rid, after: valid_to, table: self.class.superclass.quoted_table_name)
end
end
alias :next :succ
# Returns the first history entry
#
def first
self.class.where(id: rid).chronological.first
end
# Returns the last history entry
#
def last
self.class.where(id: rid).chronological.last
end
# Returns this history entry's current record
#
def current_version
self.class.superclass.find(rid)
end
def record #:nodoc:
ActiveSupport::Deprecation.warn '.record is deprecated in favour of .current_version'
self.current_version
end
def valid_from
validity.first
end
def valid_to
validity.last
end
alias as_of_time valid_to
def recorded_at
ChronoModel::Conversions.string_to_utc_time attributes_before_type_cast['recorded_at']
end
end
end
end
|
Kurokitu/Home | font-end/src/main.js | import Vue from 'vue';
import md5 from 'js-md5';
import jwtDecode from 'jwt-decode';
import VueSocketIOExt from 'vue-socket.io-extended';
import io from 'socket.io-client';
import { VueReCaptcha } from 'vue-recaptcha-v3';
import VueMarkdown from 'vue-markdown';
import App from './App.vue';
import router from './router';
import vuetify from './plugins/vuetify';
import store from './store';
import mavonEditor from 'mavon-editor';
import 'mavon-editor/dist/css/index.css';
import snackbar from './components/Snackbar/int';
import '@/components';
import './registerServiceWorker';
import VueBus from 'vue-bus';
Vue.prototype.$snackbar = snackbar;
Vue.config.productionTip = false;
Vue.prototype.$md5 = md5;
Vue.prototype.$jwtDecode = jwtDecode;
Vue.use(VueBus);
Vue.use(mavonEditor);
Vue.use(VueMarkdown);
Vue.use(VueReCaptcha, {
siteKey: process.env.VUE_APP_reCAPTCHA_siteKey,
loaderOptions: {
useRecaptchaNet: process.env.VUE_APP_reCAPTCHA_useRecaptchaNet,
autoHideBadge: process.env.VUE_APP_reCAPTCHA_autoHideBadge
}
});
// 路由守卫
router.beforeEach((to, from, next) => {
if (to.meta.role == true) {//判断是否需要登录
if (localStorage.getItem('token')) {
if (to.meta.admin == true) {
const decoded = jwtDecode(localStorage.getItem('token'));
if (decoded.usertype == 0) {
next("/");
} else {
next();
}
} else {
next();
}
} else {
next({
name: "login",
params: {
from: to.fullPath
}
});
}
} else {
next();
}
});
const socket = io(process.env.VUE_APP_ApiUrl);
Vue.use(VueSocketIOExt, socket);
new Vue({
router,
vuetify,
store,
render: h => h(App)
}).$mount('#app');
|
rjsalvadorr/portfolio-v6 | static/apps/rise-of-the-raptors/assets/js/2.f1e8eae1.js | ;(window.webpackJsonp = window.webpackJsonp || []).push([
[2],
{
124: function (t, e, r) {
"use strict"
r.r(e)
r(106), r(107), r(109), r(112), r(115), r(60), r(117)
var a,
n = r(264),
i = r(225),
s = r(226),
o = (r(239), r(247), r(248), r(252)),
l =
((a = {
"Atlanta Hawks": "ATL",
"Brooklyn Nets": "BKN",
"Boston Celtics": "BOS",
"Charlotte Hornets": "CHO",
"Charlotte Bobcats": "CHA",
"Chicago Bulls": "CHI",
"Cleveland Cavaliers": "CLE",
"Dallas Mavericks": "DAL",
"Denver Nuggets": "DEN",
"Detroit Pistons": "DET",
"Golden State Warriors": "GSW",
"Houston Rockets": "HOU",
"Indiana Pacers": "IND",
"Los Angeles Clippers": "LAC",
"Los Angeles Lakers": "LAL",
"Memphis Grizzlies": "MEM",
"Miami Heat": "MIA",
"Milwaukee Bucks": "MIL",
"Minnesota Timberwolves": "MIN",
"New Jersey Nets": "NJN",
"New Orleans Hornets": "NOH",
"New Orleans/Oklahoma City Hornets": "NOK",
"Oklahoma City Hornets": "OKH",
"Oklahoma City Thunder": "OKC",
"New Orleans Pelicans": "NOP",
"New York Knickerbockers": "NYK",
"New York Knicks": "NYK",
"Orlando Magic": "ORL",
"Philadelphia 76ers": "PHI",
"Phoenix Suns": "PHX",
"Portland Trailblazers": "POR",
"Portland Trail Blazers": "POR",
"Sacramento Kings": "SAC",
}),
Object(o.a)(a, "Sacramento Kings", "SAC"),
Object(o.a)(a, "San Ant<NAME>", "SAS"),
Object(o.a)(a, "Seattle Supersonics", "SEA"),
Object(o.a)(a, "Seattle SuperSonics", "SEA"),
Object(o.a)(a, "Toronto Raptors", "TOR"),
Object(o.a)(a, "Toronto Huskies", "TRH"),
Object(o.a)(a, "Utah Jazz", "UTA"),
Object(o.a)(a, "Vancouver Grizzlies", "VAN"),
Object(o.a)(a, "Washington Wizards", "WAS"),
Object(o.a)(a, "Washington Bullets", "WAS"),
a),
c = [
{
id: 0,
names: ["<NAME>"],
abbrevs: ["ATL"],
colours: [
{ year: 1996, bg: "C8102E", text: "000000", trim: "FFCD00" },
{ year: 2008, bg: "E03A3E", text: "FFFFFF", trim: "C1D32F" },
],
},
{
id: 1,
names: ["<NAME>", "New Jersey Nets"],
abbrevs: ["BKN", "NJN"],
colours: [
{ year: 1996, bg: "002A60", text: "C6CFD4", trim: "CD1041" },
{ year: 2013, bg: "000000", text: "FFFFFF", trim: "FFFFFF" },
],
},
{
id: 2,
names: ["<NAME>"],
abbrevs: ["BOS"],
colours: [
{ year: 1996, bg: "007A33", text: "FFFFFF", trim: "FFFFFF" },
],
},
{
id: 3,
names: ["<NAME>", "<NAME>"],
abbrevs: ["CHO", "CHA"],
colours: [
{ year: 1996, bg: "00778B", text: "FFFFFF", trim: "280071" },
{ year: 1999, bg: "00788C", text: "FFFFFF", trim: "1D1160" },
],
},
{
id: 4,
names: ["<NAME>"],
abbrevs: ["CHI"],
colours: [
{ year: 1996, bg: "CE1141", text: "000000", trim: "000000" },
],
},
{
id: 5,
names: ["<NAME>"],
abbrevs: ["CLE"],
colours: [
{ year: 1996, bg: "E35205", text: "5C88DA", trim: "27251F" },
{ year: 2004, bg: "6F263D", text: "000000", trim: "FFB81C" },
],
},
{
id: 6,
names: ["<NAME>"],
abbrevs: ["DAL"],
colours: [
{ year: 1996, bg: "002855", text: "FFFFFF", trim: "00843D" },
{ year: 2002, bg: "00538C", text: "FFFFFF", trim: "B8C4CA" },
],
},
{
id: 7,
names: ["<NAME>"],
abbrevs: ["DEN"],
colours: [
{ year: 1996, bg: "041E42", text: "FFFFFF", trim: "9D2235" },
{ year: 2004, bg: "00285E", text: "FDB927", trim: "418FDE" },
{ year: 2017, bg: "0E2240", text: "FEC524", trim: "8B2131" },
],
},
{
id: 8,
names: ["<NAME>"],
abbrevs: ["DET"],
colours: [
{ year: 1996, bg: "006272", text: "FFFFFF", trim: "9D2235" },
{ year: 2002, bg: "D50032", text: "FFFFFF", trim: "003DA5" },
{ year: 2017, bg: "ED174C", text: "FFFFFF", trim: "006BB6" },
],
},
{
id: 9,
names: ["<NAME>"],
abbrevs: ["GSW"],
colours: [
{ year: 1996, bg: "041E42", text: "FFA300", trim: "BE3A34" },
{ year: 2011, bg: "006BB6", text: "FDB927", trim: "FDB927" },
],
},
{
id: 10,
names: ["<NAME>"],
abbrevs: ["HOU"],
colours: [
{ year: 1996, bg: "BA0C2F", text: "FFFFFF", trim: "041E42" },
{ year: 2004, bg: "CE1141", text: "FFFFFF", trim: "000000" },
],
},
{
id: 11,
names: ["<NAME>"],
abbrevs: ["IND"],
colours: [
{ year: 1996, bg: "002D62", text: "FDBB30", trim: "FDBB30" },
],
},
{
id: 12,
names: ["<NAME>"],
abbrevs: ["LAC"],
colours: [
{ year: 1996, bg: "FFFFFF", text: "C8102E", trim: "1D428A" },
],
},
{
id: 13,
names: ["<NAME>"],
abbrevs: ["LAL"],
colours: [
{ year: 1996, bg: "FDB927", text: "552583", trim: "552583" },
],
},
{
id: 14,
names: ["<NAME>", "<NAME>"],
abbrevs: ["VAN", "MEM"],
colours: [
{ year: 1996, bg: "00B2A9", text: "FFFFF", trim: "E43C40" },
{ year: 2004, bg: "6189B9", text: "FDB927", trim: "00285E" },
{ year: 2018, bg: "5D76A9", text: "F5B112", trim: "12173F" },
],
},
{
id: 15,
names: ["<NAME>"],
abbrevs: ["MIA"],
colours: [
{ year: 1996, bg: "BA0C2F", text: "FEDD00", trim: "000000" },
{ year: 2e3, bg: "98002E", text: "F9A01B", trim: "000000" },
],
},
{
id: 16,
names: ["<NAME>"],
abbrevs: ["MIL"],
colours: [
{ year: 1996, bg: "702F8A", text: "FFFFFF", trim: "2C5234" },
{ year: 2006, bg: "274E37", text: "FFFFFF", trim: "AC1A2F" },
{ year: 2015, bg: "00471B", text: "FFFFFF", trim: "EEE1C6" },
{ year: 2006, bg: "274E37", text: "FFFFFF", trim: "AC1A2F" },
],
},
{
id: 17,
names: ["<NAME>"],
abbrevs: ["MIN"],
colours: [
{ year: 1996, bg: "236192", text: "FFFFFF", trim: "00843D" },
{ year: 2017, bg: "0C2340", text: "FFFFFF", trim: "78BE20" },
],
},
{
id: 19,
names: ["Oklahoma City Thunder"],
abbrevs: ["OKC"],
colours: [
{ year: 1996, bg: "007AC1", text: "FFFFFF", trim: "EF3B24" },
],
},
{
id: 20,
names: [
"New Orleans Hornets",
"New Orleans/Oklahoma City Hornets",
"Oklahoma City Hornets",
"New Orleans Pelicans",
],
abbrevs: ["NOH", "NOK", "OKH", "NOP"],
colours: [
{ year: 1996, bg: "00778B", text: "FFFFFF", trim: "280071" },
{ year: 2014, bg: "0C2340", text: "FFFFFF", trim: "85714D" },
],
},
{
id: 21,
names: ["<NAME>", "New York Knicks"],
abbrevs: ["NYK"],
colours: [
{ year: 1996, bg: "0072CE", text: "FE5000", trim: "FE5000" },
{ year: 1998, bg: "006BB6", text: "F58426", trim: "F58426" },
],
},
{
id: 22,
names: ["<NAME>"],
abbrevs: ["ORL"],
colours: [
{ year: 1996, bg: "0077C0", text: "FFFFFF", trim: "000000" },
],
},
{
id: 23,
names: ["<NAME>"],
abbrevs: ["PHI"],
colours: [
{ year: 1996, bg: "006BB6", text: "FFFFFF", trim: "ED174C" },
],
},
{
id: 24,
names: ["<NAME>"],
abbrevs: ["PHX"],
colours: [
{ year: 1996, bg: "5F259F", text: "FF6900", trim: "EF3340" },
{ year: 2001, bg: "1D1160", text: "E56020", trim: "B95915" },
],
},
{
id: 25,
names: ["<NAME>", "<NAME>"],
abbrevs: ["POR"],
colours: [
{ year: 1996, bg: "E03A3E", text: "000000", trim: "FFFFFF" },
],
},
{
id: 26,
names: ["<NAME>"],
abbrevs: ["SAC"],
colours: [
{ year: 1996, bg: "542E91", text: "FFFFFF", trim: "C4CED4" },
{ year: 2016, bg: "5A2D81", text: "FFFFFF", trim: "63727A" },
],
},
{
id: 27,
names: ["<NAME>"],
abbrevs: ["SAS"],
colours: [
{ year: 1996, bg: "C4CED4", text: "000000", trim: "000000" },
],
},
{
id: 28,
names: ["Seattle Supersonics", "Seattle SuperSonics"],
abbrevs: ["SEA"],
colours: [
{ year: 1996, bg: "173F35", text: "FFA300", trim: "9E2A2F" },
{ year: 2002, bg: "00653A", text: "FFC200", trim: "FFFFFF" },
],
},
{
id: 29,
names: ["<NAME>"],
abbrevs: ["TOR"],
colours: [
{ year: 1996, bg: "753BBD", text: "FFFFFF", trim: "BA0C2F" },
{ year: 2003, bg: "CE1141", text: "FFFFFF", trim: "000000" },
{ year: 2015, bg: "000000", text: "FFFFFF", trim: "CE1141" },
],
},
{
id: 30,
names: ["<NAME>"],
abbrevs: ["UTA"],
colours: [
{ year: 1996, bg: "753BBD", text: "00A9E0", trim: "006272" },
{ year: 2005, bg: "00275D", text: "6CAEDF", trim: "3E2680" },
{ year: 2011, bg: "002B5C", text: "F9A01B", trim: "00471B" },
],
},
{
id: 31,
names: ["<NAME>", "<NAME>"],
abbrevs: ["WAS"],
colours: [
{ year: 1996, bg: "002B5C", text: "FFFFFF", trim: "E31837" },
],
},
],
u = function (t) {
var e,
r,
a = t.Team.includes("*"),
n = a ? t.Team.replace("*", "") : t.Team,
i = ((!(r = l[(e = n)]) || r.length < 1) && (r = e), r),
s = (function (t, e) {
var r = c.filter(function (e) {
if (e.abbrevs.includes(t)) return !0
})
return r ? r[0] : null
})(i, t.Year)
.colours.filter(function (e) {
if (e.year <= t.Year) return !0
})
.sort(function (t, e) {
return e.year - t.year
})
return {
team: n,
teamAbbrev: i,
playoffs: a,
bg: s[0].bg,
colours: { trim: s[0].trim, text: s[0].text, bg: s[0].bg },
}
},
F = 1996,
d = 2019,
b = (function () {
function t() {
Object(i.a)(this, t), (this.stats = {})
for (var e = F; e <= d; e++)
this.stats["" + e] = {
children: [],
bestSeasonWins: 1e-6,
bestPlayoffWins: 1e-6,
}
}
return (
Object(s.a)(t, [
{
key: "isValidYear",
value: function (t) {
return t >= F && t <= d
},
},
{
key: "getStatsByYear",
value: function (t) {
return this.stats[t]
},
},
{
key: "getTeamRecord",
value: function (t, e) {
var r = this.getStatsByYear(t),
a = !0,
n = !1,
i = void 0
try {
for (
var s, o = r.children[Symbol.iterator]();
!(a = (s = o.next()).done);
a = !0
) {
var l = s.value
if (l.teamAbbrev === e.teamAbbrev) return l
}
} catch (t) {
;(n = !0), (i = t)
} finally {
try {
a || null == o.return || o.return()
} finally {
if (n) throw i
}
}
return null
},
},
{
key: "initializeTeamRecord",
value: function (t, e, r) {
var a = this.getStatsByYear(t.Year),
n = {
team: e.team,
teamAbbrev: e.teamAbbrev,
playoffs: "playoffs" === r || e.playoffs,
adjWinRate: 0,
adjPlayoffWinRate: 0,
overallRtg: 0,
champion: !1,
colours: e.colours,
winRate: 0,
netRtg: 0,
playoffWinRate: 0,
playoffNetRtg: 0,
regWins: 0,
regLosses: 0,
wr: 0,
}
return a.children.push(n), n
},
},
{
key: "addData",
value: function (t, e) {
this.getStatsByYear(t.Year)
var r = u(t),
a = this.getTeamRecord(t.Year, r)
if (
(a || (a = this.initializeTeamRecord(t, r)),
"regSeason" === e)
) {
var n = parseInt(t.W, 10),
i = parseInt(t.L, 10),
s = n / (n + i),
o = parseFloat(t.ORtg) - parseFloat(t.DRtg)
;(a.regWins = n),
(a.regLosses = i),
(a.wr = s),
(a.winRate = Number(Math.round(100 * s + "e2") + "e-2")),
(a.netRtg = Number(Math.round(o + "e2") + "e-2"))
}
if ("playoffs" === e) {
var l = parseInt(t.W, 10),
c = parseFloat(t.ORtg) - parseFloat(t.DRtg)
;(a.playoffWinRate = l),
(a.playoffNetRtg = Number(Math.round(c + "e2") + "e-2"))
}
this.recalculateStats(t.Year, a)
},
},
{
key: "recalculateStats",
value: function (t, e) {
var r = this.getStatsByYear(t)
e.winRate &&
e.winRate > r.bestSeasonWins &&
(r.bestSeasonWins = e.winRate),
e.playoffWinRate &&
e.playoffWinRate > r.bestPlayoffWins &&
(r.bestPlayoffWins = e.playoffWinRate)
var a = !0,
n = !1,
i = void 0
try {
for (
var s, o = r.children[Symbol.iterator]();
!(a = (s = o.next()).done);
a = !0
) {
var l,
c = s.value,
u = 0
;(l = c.winRate / r.bestSeasonWins),
(c.adjWinRate = Number(
Math.round(100 * l + "e2") + "e-2"
)),
c.playoffs &&
((u = c.playoffWinRate / r.bestPlayoffWins),
(c.adjPlayoffWinRate = Number(
Math.round(100 * u + "e2") + "e-2"
)),
(c.champion = 100 === c.adjPlayoffWinRate))
var F = (l + l + u) / 3
c.champion && (F += 0.2),
(c.overallRtg = Number(
Math.round(100 * F + "e2") + "e-2"
))
}
} catch (t) {
;(n = !0), (i = t)
} finally {
try {
a || null == o.return || o.return()
} finally {
if (n) throw i
}
}
},
},
]),
t
)
})(),
m = r(125),
f = r(126),
h = function (t, e) {
return "TOR" === t ? 1 : e ? 0.5 : 0.2
},
y = {
name: "Graphic",
data: function () {
return {
currentSlide: 1,
seasonStats: null,
torontoStats: null,
svg: null,
pack: null,
}
},
computed: {
currentYear: function () {
return 1 === this.currentSlide || 26 === this.currentSlide
? 2019
: 1996 + this.currentSlide - 2
},
},
methods: {
updateGraphic: function () {
var t = this.seasonStats.stats[this.currentYear],
e = t.children.filter(function (t) {
return "TOR" === t.teamAbbrev
})
e.length > 0 && (this.torontoStats = e[0]),
this.redraw(t.children)
var r = document.querySelectorAll(".main-title"),
a = document.querySelectorAll(".main-text"),
n = !0,
i = !1,
s = void 0
try {
for (
var o, l = r[Symbol.iterator]();
!(n = (o = l.next()).done);
n = !0
) {
var c = o.value
c.classList.remove("active"),
c.dataset.slide == this.currentSlide &&
c.classList.add("active")
}
} catch (t) {
;(i = !0), (s = t)
} finally {
try {
n || null == l.return || l.return()
} finally {
if (i) throw s
}
}
var u = !0,
F = !1,
d = void 0
try {
for (
var b, m = a[Symbol.iterator]();
!(u = (b = m.next()).done);
u = !0
) {
var f = b.value
f.classList.remove("active"),
f.dataset.slide == this.currentSlide &&
f.classList.add("active")
}
} catch (t) {
;(F = !0), (d = t)
} finally {
try {
u || null == m.return || m.return()
} finally {
if (F) throw d
}
}
var h = document.querySelector(".main-text.active .text-wins"),
y = document.querySelector(".main-text.active .text-losses"),
g = document.querySelector(".main-text.active .text-winrate")
h && (h.textContent = this.torontoStats.regWins),
y && (y.textContent = this.torontoStats.regLosses),
g &&
(g.textContent = Number(
Math.round(this.torontoStats.wr + "e3") + "e-3"
))
},
scrollPrev: function (t) {
this.currentSlide > 1 &&
(this.currentSlide = this.currentSlide - 1),
this.updateGraphic()
},
scrollNext: function (t) {
this.currentSlide < 26 &&
(this.currentSlide = this.currentSlide + 1),
this.updateGraphic()
},
getGraphicDimensions: function () {
var t = this.$el.clientWidth,
e = (this.$el.clientHeight, window.innerWidth),
r = window.innerHeight,
a = e <= 750 ? 40 : 60,
n = r > t ? t : r
return n - 2 * a
},
redraw: function (t) {
var e = n.e().duration(750),
r = n
.b({ children: t })
.sum(function (t) {
return Math.pow(t.overallRtg, 2.75)
})
.sort(function (t, e) {
return e.overallRtg - t.overallRtg
}),
a = this.svg
.selectAll("circle")
.data(this.pack(r).leaves(), function (t) {
return t.data.teamAbbrev
}),
i = this.svg
.selectAll("text")
.data(this.pack(r).leaves(), function (t) {
return t.data.teamAbbrev
})
a
.exit()
.style("fill", "#666666")
.style("stroke", "#666666")
.transition(e)
.attr("r", 1e-6)
.remove(),
i
.exit()
.style("font-size", 1e-6)
.transition(e)
.attr("r", 1e-6)
.attr("opacity", 1e-6)
.remove(),
a
.transition(e)
.attr("r", function (t) {
return t.r
})
.attr("class", function (t) {
var e = t.data.champion ? "circle--champion" : ""
return "circle circle--"
.concat(t.data.teamAbbrev, " ")
.concat(e)
})
.style("stroke", function (t) {
return "#".concat(t.data.colours.trim)
})
.style("stroke-width", function (t) {
return t.data.champion ? "9" : "3"
})
.style("fill", function (t) {
return "#".concat(t.data.colours.bg)
})
.attr("opacity", function (t) {
return h(t.data.teamAbbrev)
})
.attr("cx", function (t) {
return t.x
})
.attr("cy", function (t) {
return t.y
}),
i
.transition(e)
.attr("x", function (t) {
return t.x
})
.attr("y", function (t) {
return t.y
})
.style("fill", function (t) {
return "#".concat(t.data.colours.text)
})
.attr("opacity", function (t) {
return h(t.data.teamAbbrev, !0)
})
.style("font-size", function (t) {
return "".concat(t.r / 1.33, "px")
}),
a
.enter()
.append("circle")
.attr("r", 1e-6)
.attr("opacity", 1e-6)
.attr("cx", function (t) {
return t.x
})
.attr("cy", function (t) {
return t.y
})
.style("fill", "#fff")
.transition(e)
.attr("r", function (t) {
return t.r
})
.style("stroke", function (t) {
return "#".concat(t.data.colours.trim)
})
.style("stroke-width", function (t) {
return t.data.champion ? "9" : "3"
})
.style("fill", function (t) {
return "#".concat(t.data.colours.bg)
})
.attr("opacity", function (t) {
return h(t.data.teamAbbrev)
})
.attr("class", function (t) {
var e = t.data.champion ? "circle--champion" : ""
return "circle circle--"
.concat(t.data.teamAbbrev, " ")
.concat(e)
}),
a
.enter()
.append("text")
.attr("opacity", 1e-6)
.style("font-size", 1e-6)
.attr("x", function (t) {
return t.x
})
.attr("y", function (t) {
return t.y
})
.text(function (t) {
return t.data.teamAbbrev
})
.attr("class", function (t) {
return "text text--".concat(t.data.teamAbbrev)
})
.style("fill", function (t) {
return "#".concat(t.data.colours.text)
})
.attr("font-family", "sans-serif")
.style("text-anchor", "middle")
.attr("dy", ".4em")
.style("font-weight", "bold")
.style("font-size", function (t) {
return "".concat(t.r / 1.33, "px")
})
.transition(e)
.attr("opacity", function (t) {
return h(t.data.teamAbbrev, !0)
})
},
},
mounted: function () {
var t = this
this.seasonStats = new b()
var e = this,
r = n.a("./data/stats.csv", function (t) {
var r = t.Type,
a = t.Year,
n = t.Rk,
i = t.Team,
s = t.W,
o = t.L,
l = t.ORtg,
c = t.DRtg
e.seasonStats.addData(
{ Year: a, Rk: n, Team: i, W: s, L: o, ORtg: l, DRtg: c },
r
)
}),
a = this.getGraphicDimensions()
;(this.svg = n
.d(".graphic-wrapper")
.append("svg")
.style("margin", "0 auto")
.style("display", "block")
.attr("width", a)
.attr("height", a)),
(this.pack = n.c().size([a, a]).padding(10)),
r.then(function (t) {
e.updateGraphic()
})
var i
;(i = null),
window.addEventListener("resize", function (e) {
null !== i && clearTimeout(i),
(i = setTimeout(function () {
;(i = null),
(function (e) {
var r = t.getGraphicDimensions()
t.svg.attr("width", r),
t.svg.attr("height", r),
(t.pack = n.c().size([r, r]).padding(1.5)),
t.redraw(t.seasonStats.stats[t.currentYear].children)
})()
}, 100))
})
},
components: { LeftArrow: m.default, RightArrow: f.default },
},
g = (r(253), r(6)),
v = Object(g.a)(
y,
function () {
var t = this.$createElement,
e = this._self._c || t
return e("div", { staticClass: "graphic-container" }, [
e(
"div",
{
staticClass: "scroll-btn scroll-btn--left",
on: { click: this.scrollPrev },
},
[
e("LeftArrow", {
staticClass: "scroll-btn-icon",
attrs: { hidden: 1 === this.currentSlide },
}),
],
1
),
this._v(" "),
e("div", { staticClass: "graphic-wrapper" }),
this._v(" "),
e(
"div",
{
staticClass: "scroll-btn scroll-btn--right",
on: { click: this.scrollNext },
},
[
e("RightArrow", {
staticClass: "scroll-btn-icon",
attrs: { hidden: 26 === this.currentSlide },
}),
],
1
),
])
},
[],
!1,
null,
"57b5f474",
null
)
e.default = v.exports
},
125: function (t, e, r) {
"use strict"
r.r(e)
var a = { name: "LeftArrow", props: { hidden: Boolean } },
n = (r(80), r(6)),
i = Object(n.a)(
a,
function () {
var t = this.$createElement,
e = this._self._c || t
return e(
"svg",
{
class: "left-arrow " + (this.hidden ? "hidden" : ""),
staticStyle: {
"shape-rendering": "geometricPrecision",
"text-rendering": "geometricPrecision",
"image-rendering": "optimizeQuality",
},
attrs: {
height: "300px",
width: "300px",
xmlns: "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink",
"xml:space": "preserve",
version: "1.1",
viewBox: "0 0 375 668",
x: "0px",
y: "0px",
"fill-rule": "evenodd",
"clip-rule": "evenodd",
},
},
[
e("defs"),
e("g", [
e("path", {
staticClass: "fil0",
attrs: {
d: "M12 304l292 -292c17,-16 43,-16 59,0 16,16 16,42 0,59l-263 263 263 263c16,16 16,43 0,59 -16,16 -42,16 -59,0l-292 -293c-16,-16 -16,-42 0,-59z",
},
}),
]),
]
)
},
[],
!1,
null,
"46b58b88",
null
)
e.default = i.exports
},
126: function (t, e, r) {
"use strict"
r.r(e)
var a = { name: "RightArrow", props: { hidden: Boolean } },
n = (r(81), r(6)),
i = Object(n.a)(
a,
function () {
var t = this.$createElement,
e = this._self._c || t
return e(
"svg",
{
class: "right-arrow " + (this.hidden ? "hidden" : ""),
staticStyle: {
"shape-rendering": "geometricPrecision",
"text-rendering": "geometricPrecision",
"image-rendering": "optimizeQuality",
},
attrs: {
height: "300px",
width: "300px",
xmlns: "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink",
"xml:space": "preserve",
version: "1.1",
viewBox: "0 0 375 668",
x: "0px",
y: "0px",
"fill-rule": "evenodd",
"clip-rule": "evenodd",
},
},
[
e("defs"),
e("g", [
e("path", {
staticClass: "fil0",
attrs: {
d: "M363 304l-292 -292c-17,-16 -43,-16 -59,0 -16,16 -16,42 0,59l263 263 -263 263c-16,16 -16,43 0,59 16,16 42,16 59,0l292 -293c16,-16 16,-42 0,-59z",
},
}),
]),
]
)
},
[],
!1,
null,
"01d93fbe",
null
)
e.default = i.exports
},
253: function (t, e, r) {
"use strict"
var a = r(82)
r.n(a).a
},
31: function (t, e, r) {},
32: function (t, e, r) {},
80: function (t, e, r) {
"use strict"
var a = r(31)
r.n(a).a
},
81: function (t, e, r) {
"use strict"
var a = r(32)
r.n(a).a
},
82: function (t, e, r) {},
},
])
|
mmallis87/google-apis-explorer | src/com/google/api/explorer/client/base/NameHelper.java | /*
* Copyright (C) 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.api.explorer.client.base;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import javax.annotation.Nullable;
/**
* Helper code which will generate a presentable name from a potentially null title and a service
* name.
*
*/
public class NameHelper {
/**
* Generate the title for the specified title and name.
*
* @param title Title to directly return or {@code null}.
* @param name Name which should be turned into a title if the title is null.
* @return Printable title.
*/
public static String generateDisplayTitle(@Nullable String title, String name) {
return Objects.firstNonNull(title, Preconditions.checkNotNull(name) + " API");
}
}
|
t-hiramatsu/ENdoSnipe | Javelin/src/main/java/jp/co/acroquest/endosnipe/javelin/communicate/JavelinTelegramCreator.java | /*******************************************************************************
* ENdoSnipe 5.0 - (https://github.com/endosnipe)
*
* The MIT License (MIT)
*
* Copyright (c) 2012 Acroquest Technology Co.,Ltd.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package jp.co.acroquest.endosnipe.javelin.communicate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import jp.co.acroquest.endosnipe.common.entity.ItemType;
import jp.co.acroquest.endosnipe.communicator.TelegramUtil;
import jp.co.acroquest.endosnipe.communicator.entity.Header;
import jp.co.acroquest.endosnipe.communicator.entity.ResponseBody;
import jp.co.acroquest.endosnipe.communicator.entity.Telegram;
import jp.co.acroquest.endosnipe.communicator.entity.TelegramConstants;
import jp.co.acroquest.endosnipe.javelin.MBeanManager;
import jp.co.acroquest.endosnipe.javelin.bean.Component;
import jp.co.acroquest.endosnipe.javelin.bean.ExcludeMonitor;
import jp.co.acroquest.endosnipe.javelin.bean.Invocation;
/**
* Invocationから電文を作成するクラスです。<br />
* @author acroquest
*
*/
public class JavelinTelegramCreator implements TelegramConstants
{
/** 電文の項目数 */
private static final int TELEGRAM_ITEM_COUNT = 26;
/** 処理時間のindex番号 */
private static final int PROCESS_TIME_INDEX_NUMBER = 1;
/** 積算号駅時間のindex番号 */
private static final int ACCUMULATED_TOTAL_INDEX_NUMBER = 2;
/** 積算最大時間のindex番号 */
private static final int ACCUMULATED_MAX_INDEX_NUMBER = 3;
/** 積算最小時間のindex番号 */
private static final int ACCUMULATED_MIN_INDEX_NUMBER = 4;
/** 積算CPU合計時間のindex番号 */
private static final int ACCUMULATED_CPU_TOTAL_INDEX_NUMBER = 5;
/** 積算CPU最大時間のindex番号 */
private static final int ACCUMULATED_CPU_MAX_INDEX_NUMBER = 6;
/** 積算CPU最小時間のindex番号 */
private static final int ACCUMULATED_CPU_MIN_INDEX_NUMBER = 7;
/** 積算USER合計時間のindex番号 */
private static final int ACCUMULATED_USER_TOTAL_INDEX_NUMBER = 8;
/** 積算USER最大時間のindex番号 */
private static final int ACCUMULATED_USER_MAX_INDEX_NUMBER = 9;
/** 積算USER最小時間のindex番号 */
private static final int ACCUMULATED_USER_MIN_INDEX_NUMBER = 10;
/** 合計時間のindex番号 */
private static final int TOTAL_TIME_INDEX_NUMBER = 11;
/** 最大処理時間のindex番号 */
private static final int MAX_PROCESS_TIME_INDEX_NUMBER = 12;
/** 最小処理時間のindex番号 */
private static final int MIN_PROCESS_TIME_INDEX_NUMBER = 13;
/** CPU合計時間のindex番号 */
private static final int CPU_TOTAL_TIME_INDEX_NUMBER = 14;
/** CPU最大処理時間のindex番号 */
private static final int MAX_CPU_TIME_INDEX_NUMBER = 15;
/** CPU最小処理時間のindex番号 */
private static final int MIN_CPU_TIME_INDEX_NUMBER = 16;
/** User合計時間index番号 */
private static final int USER_TOTAL_TIME_INDEX_NUMBER = 17;
/** User最大処理時間のindex番号 */
private static final int MAX_USER_TIME_INDEX_NUMBER = 18;
/** User最小処理時間のindex番号 */
private static final int MIN_USER_TIME_INDEX_NUMBER = 19;
/** 例外発生回数のindex番号 */
private static final int THROWABLE_COUNT_INDEX_NUMBER = 20;
/** メソッドの呼び出し元 クラス名のindex番号 */
private static final int CALLER_INDEX_NUMBER = 21;
/** 計測対象であるかのフラグのindex番号 */
private static final int TARGET_FLAG_INDEX_NUMBER = 22;
/** トランザクショングラフ出力対象のフラグのindex番号 */
private static final int TRANSACTION_GRAPH_TARGET_INDEX_NUMBER = 23;
/** TATアラーム閾値のindex番号 */
private static final int TAT_ALARM_THRESHOLD_INDEX_NUMBER = 24;
/** CPUアラーム閾値のindex番号 */
private static final int CPU_ALARM_THRESHOLD_INDEX_NUMBER = 25;
/**
* コンストラクタ
*/
private JavelinTelegramCreator()
{
// Do Nothing.
}
/***
* 全てのInvocationのリストを取得し、電文に変換します。<br />
*
* @return 全Invocationのリストを電文に変換したもの。
*/
public static List<byte[]> createAll()
{
// 電文データを取る
Component[] objComponentArr = MBeanManager.getAllComponents();
List<Invocation> invocationList = new ArrayList<Invocation>();
// 電文数を統計する
for (int i = 0; i < objComponentArr.length; i++)
{
invocationList.addAll(Arrays.asList(objComponentArr[i].getAllInvocation()));
}
Telegram objTelegram =
create(invocationList, BYTE_TELEGRAM_KIND_GET, BYTE_REQUEST_KIND_RESPONSE);
// 電文は、object ⇒ byte[] に変換する
List<byte[]> byteList = TelegramUtil.createTelegram(objTelegram);
// 返却する
return byteList;
}
/**
* 指定したinvocationのリストから、そのオブジェクトを表す電文を作成します。<br />
*
* @param invocations inovocationのリスト
* @param telegramKind 電文種別
* @param requestKind 要求応答種別
* @return invocationのリストから作成した電文
*/
public static Telegram create(final List<Invocation> invocations, final byte telegramKind,
final byte requestKind)
{
return create(invocations, null, telegramKind, requestKind);
}
/**
* 指定したinvocationのリストから、そのオブジェクトを表す電文を作成します。<br />
*
* @param invocations inovocationのリスト
* @param accumulatedTimes inovocationに対応する、累積時間のリスト
* @param telegramKind 電文種別
* @param requestKind 要求応答種別
* @return invocationのリストから作成した電文
*/
public static Telegram create(final List<Invocation> invocations,
final List<Long> accumulatedTimes, final byte telegramKind, final byte requestKind)
{
return create(invocations, accumulatedTimes, telegramKind, requestKind, 0);
}
/**
* 指定したinvocationのリストから、そのオブジェクトを表す電文を作成します。<br />
*
* @param invocations inovocationのリスト
* @param accumulatedTimes inovocationに対応する、累積時間のリスト
* @param telegramKind 電文種別
* @param requestKind 要求応答種別
* @param telegramId 電文 ID
* @return invocationのリストから作成した電文
*/
public static Telegram create(final List<Invocation> invocations,
final List<Long> accumulatedTimes, final byte telegramKind, final byte requestKind,
final long telegramId)
{
// 電文頭部を作る【とりあえず、電文長を設定しない】
Header objHeader = new Header();
objHeader.setId(telegramId);
objHeader.setByteRequestKind(requestKind);
objHeader.setByteTelegramKind(telegramKind);
// 電文本体を作る
ResponseBody[] bodies = new ResponseBody[invocations.size() * TELEGRAM_ITEM_COUNT];
for (int index = 0; index < invocations.size(); index++)
{
Invocation invocation = invocations.get(index);
// オブジェクト名を取得する
StringBuffer strObjName = new StringBuffer();
strObjName.append(invocation.getClassName());
strObjName.append(CLASSMETHOD_SEPARATOR);
strObjName.append(invocation.getMethodName());
String objName = strObjName.toString();
// 項目説明を置けるリスト
Object[] objItemValueArr = null;
int bodyIndex = index * TELEGRAM_ITEM_COUNT;
// 呼び出し回数
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getCount());
bodies[bodyIndex + 0] =
TelegramUtil.createResponseBody(objName, ITEMNAME_CALL_COUNT,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 2つ目以降はどのクラス、メソッドの情報か明らかのため、クラス名、メソッド名を空にする
objName = "";
// 処理時間
objItemValueArr = new Long[1];
if (accumulatedTimes != null && index < accumulatedTimes.size())
{
objItemValueArr[0] = Long.valueOf(accumulatedTimes.get(index));
}
else
{
objItemValueArr[0] = Long.valueOf(0);
}
bodies[bodyIndex + PROCESS_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_CURRENT_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算合計時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedTotal());
bodies[bodyIndex + ACCUMULATED_TOTAL_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_ACCUMULATED_TOTAL_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算最大時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedMaximum());
bodies[bodyIndex + ACCUMULATED_MAX_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_ACCUMULATED_MAXIMUM_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算最小時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedMinimum());
bodies[bodyIndex + ACCUMULATED_MIN_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_ACCUMULATED_MINIMUM_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算CPU合計時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedCpuTotal());
bodies[bodyIndex + ACCUMULATED_CPU_TOTAL_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName,
ITEMNAME_ACCUMULATED_TOTAL_CPU_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算CPU最大時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedCpuMaximum());
bodies[bodyIndex + ACCUMULATED_CPU_MAX_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName,
ITEMNAME_ACCUMULATED_MAXIMUM_CPU_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算CPU最小時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedCpuMinimum());
bodies[bodyIndex + ACCUMULATED_CPU_MIN_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName,
ITEMNAME_ACCUMULATED_MINIMUM_CPU_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算USER合計時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedUserTotal());
bodies[bodyIndex + ACCUMULATED_USER_TOTAL_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName,
ITEMNAME_ACCUMULATED_TOTAL_USER_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算USER最大時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedUserMaximum());
bodies[bodyIndex + ACCUMULATED_USER_MAX_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName,
ITEMNAME_ACCUMULATED_MAXIMUM_USER_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 積算USER最小時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAccumulatedUserMinimum());
bodies[bodyIndex + ACCUMULATED_USER_MIN_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName,
ITEMNAME_ACCUMULATED_MINIMUM_USER_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 合計時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getTotal());
bodies[bodyIndex + TOTAL_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_TOTAL_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 最大処理時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getMaximum());
bodies[bodyIndex + MAX_PROCESS_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_MAXIMUM_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 最小処理時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getMinimum());
bodies[bodyIndex + MIN_PROCESS_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_MINIMUM_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// CPU合計時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getCpuTotal());
bodies[bodyIndex + CPU_TOTAL_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_TOTAL_CPU_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// CPU最大処理時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getCpuMaximum());
bodies[bodyIndex + MAX_CPU_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_MAXIMUM_CPU_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// CPU最小処理時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getCpuMinimum());
bodies[bodyIndex + MIN_CPU_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_MINIMUM_CPU_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// User合計時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getUserTotal());
bodies[bodyIndex + USER_TOTAL_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_TOTAL_USER_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// User最大処理時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getUserMaximum());
bodies[bodyIndex + MAX_USER_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_MAXIMUM_USER_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// User最小処理時間
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getUserMinimum());
bodies[bodyIndex + MIN_USER_TIME_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_MINIMUM_USER_INTERVAL,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// 例外発生回数
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getThrowableCount());
bodies[bodyIndex + THROWABLE_COUNT_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName,
ITEMNAME_JAVAPROCESS_EXCEPTION_OCCURENCE_COUNT,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// メソッドの呼び出し元 クラス名
Invocation[] callerInvocations = invocation.getAllCallerInvocation();
String[] callerNames = new String[callerInvocations.length];
for (int callerIndex = 0; callerIndex < callerInvocations.length; callerIndex++)
{
callerNames[callerIndex] = callerInvocations[callerIndex].getClassName();
}
bodies[bodyIndex + CALLER_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_ALL_CALLER_NAMES,
ItemType.ITEMTYPE_STRING, callerNames);
// 計測対象か否か
objItemValueArr = new String[1];
boolean isTarget = isTarget(invocation);
objItemValueArr[0] = String.valueOf(isTarget);
bodies[bodyIndex + TARGET_FLAG_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_TARGET,
ItemType.ITEMTYPE_STRING, objItemValueArr);
// トランザクショングラフ出力対象か否か
objItemValueArr = new String[1];
objItemValueArr[0] = String.valueOf(invocation.isResponseGraphOutputTarget());
bodies[bodyIndex + TRANSACTION_GRAPH_TARGET_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_TRANSACTION_GRAPH,
ItemType.ITEMTYPE_STRING, objItemValueArr);
// TATアラーム閾値
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAlarmThreshold());
bodies[bodyIndex + TAT_ALARM_THRESHOLD_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_ALARM_THRESHOLD,
ItemType.ITEMTYPE_LONG, objItemValueArr);
// CPUアラーム閾値
objItemValueArr = new Long[1];
objItemValueArr[0] = Long.valueOf(invocation.getAlarmCpuThreshold());
bodies[bodyIndex + CPU_ALARM_THRESHOLD_INDEX_NUMBER] =
TelegramUtil.createResponseBody(objName, ITEMNAME_ALARM_CPU_THRESHOLD,
ItemType.ITEMTYPE_LONG, objItemValueArr);
}
// 電文オブジェクトを設定する
Telegram objTelegram = new Telegram();
objTelegram.setObjHeader(objHeader);
objTelegram.setObjBody(bodies);
return objTelegram;
}
/**
* 指定したクラス名、メソッド名から計測対象かどうかを返します。<br />
*
* @param className クラス名
* @param methodName メソッド名
* @return 計測対象である場合に、<code>ture</code>
*/
private static boolean isTarget(Invocation invocation)
{
boolean isTarget = ExcludeMonitor.isTarget(invocation);
boolean isExclude = ExcludeMonitor.isExclude(invocation);
if (isExclude == true)
{
return false;
}
if (isTarget == true)
{
return true;
}
return !ExcludeMonitor.isExcludePreffered(invocation);
}
}
|
Pandinosaurus/pyquickhelper | _unittests/ut_helpgen/test_changes.py | """
@brief test log(time=8s)
@author <NAME>
"""
import sys
import os
import unittest
from pyquickhelper.loghelper.flog import fLOG
from pyquickhelper.helpgen.sphinx_main import generate_changes_repo
class TestChanges (unittest.TestCase):
def test_changes(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
path = os.path.abspath(os.path.split(__file__)[0])
fold = os.path.join(path, "..", "..")
fold = os.path.normpath(fold)
if os.path.exists(fold):
fLOG("exists", fold)
file = os.path.join(path, "out_table.rst")
if os.path.exists(file):
os.remove(file)
def modifiy_commit(nbch, date, author, comment):
return nbch, date, author, comment
generate_changes_repo(file, fold)
with open(file, "r", encoding="utf8") as f:
content = f.read()
self.assertIn(".. plot::", content)
content = content[
content.find("List of recent changes:"):]
self.assertTrue(len(content) > 0)
self.assertIn(":widths: auto", content)
else:
fLOG(
"sorry, fixing a specific case on another project for accent problem")
if __name__ == "__main__":
unittest.main()
|
grbury/mystikos | kernel/pagesize.c | <filename>kernel/pagesize.c
#include <limits.h>
#include <unistd.h>
int getpagesize(void)
{
_Static_assert(PAGE_SIZE == 4096, "getpagesize()");
return PAGE_SIZE;
}
|
KablamoOSS/kombustion | pkg/parsers/properties/TopicRuleIotAnalyticsAction.go | <filename>pkg/parsers/properties/TopicRuleIotAnalyticsAction.go
package properties
// Code generated by go generate; DO NOT EDIT.
// It's generated by "github.com/KablamoOSS/kombustion/generate"
// TopicRuleIotAnalyticsAction Documentation: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iot-topicrule-iotanalyticsaction.html
type TopicRuleIotAnalyticsAction struct {
ChannelName interface{} `yaml:"ChannelName"`
RoleArn interface{} `yaml:"RoleArn"`
}
// TopicRuleIotAnalyticsAction validation
func (resource TopicRuleIotAnalyticsAction) Validate() []error {
errors := []error{}
return errors
}
|
NAO-CS270/NAOFS | include/test/test_utils.h | //
// Created by <NAME> on 2/29/20.
//
#ifndef NAOFS_TEST_UTILS_H
#define NAOFS_TEST_UTILS_H
void test_utils_runner();
#endif //NAOFS_TEST_UTILS_H
|
Zhangjk2012/demo | src/main/scala/akka/SetRequest.scala | package akka
/**
* Created by zhangjiangke on 2017/6/8.
*/
case class SetRequest(key: String, value: Object);
|
mowatermelon/ice | react-materials/scaffolds/ice-creator-landingpage/src/pages/Home/components/PlatformIntro/index.js | import PlatformIntro from './PlatformIntro';
export default PlatformIntro;
|
kehuo/myweb | frontend/src/pages/MasterData/MasterMain.js | import React, { PureComponent } from "react";
import { connect } from "dva";
import {
Row,
Col,
Input,
Select,
Icon,
Table,
Drawer,
message,
Button,
Tabs
} from "antd";
import BodySystemPanel from "./MasterSub/BodySystemPanel";
import BodyStructruePanel from "./MasterSub/BodyStructruePanel";
import MedicinePanel from "./MasterSub/MedicinePanel";
import DiseasePanel from "./MasterSub/DiseasePanel";
import SymptomPanel from "./MasterSub/SymptomPanel";
import TreatmentPanel from "./MasterSub/TreatmentPanel";
import ExamPanel from "./MasterSub/ExamPanel";
import ExtensionPanel from "./MasterSub/ExtensionPanel";
import SearchPanel from "./MasterSub/SearchPanel";
import CustomerVectorPanel from "./MasterSub/CustomerVectorPanel";
import VectorRelationPanel from "./MasterSub/VectorRelationPanel";
import styles from "./MasterData.less";
let underscore = require("underscore");
import { routerRedux } from "dva/router";
@connect(({ masterData }) => ({
masterData
}))
export default class MasterMain extends React.Component {
constructor(props) {
super(props);
this.state = {
activeKey: "BodySystem" // 'BodySystem',
};
}
onChangeTab(key) {
this.setState({
activeKey: key
});
}
onQuery(opCode, params) {
const { dispatch } = this.props;
switch (opCode) {
case "get-list":
dispatch({
type: "masterData/fetch",
payload: params.payload,
callback: params.callback
});
break;
case "get-one":
dispatch({
type: "masterData/getOne",
payload: params.payload,
callback: params.callback
});
break;
case "edit-one":
dispatch({
type: "masterData/edit",
payload: params.payload,
callback: params.callback
});
break;
case "create-smart-one":
dispatch({
type: "masterData/createSmartOne",
payload: params.payload,
callback: params.callback
});
break;
case "delete-one":
dispatch({
type: "masterData/delete",
payload: params.payload,
callback: params.callback
});
break;
case "test-func":
dispatch({
type: "masterData/testFunc",
payload: params.payload,
callback: params.callback
});
break;
case "gen-func":
dispatch({
type: "masterData/genFunc",
payload: params.payload,
callback: params.callback
});
break;
case "get-one-complicate":
dispatch({
type: "masterData/getOneComplicate",
payload: params.payload,
callback: params.callback
});
break;
case "get-one-extension":
dispatch({
type: "masterData/getOneExtension",
payload: params.payload,
callback: params.callback
});
break;
default:
break;
}
}
onQuerySearch(params) {
const { dispatch } = this.props;
dispatch({
type: "masterData/search",
payload: params.payload,
callback: params.callback
});
}
setTabParentTree(refKey, parentTree, name) {
this.refs[refKey].directSetParentTree(parentTree, name);
}
changeTabWithAll(params, parentTree) {
const TabKeyMap = {
disease: "Disease",
medicine: "Medicine",
symptom: "Symptom",
treatment: "Treatment",
exam: "Exam"
};
let refKey = TabKeyMap[params.type];
let callback = this.setTabParentTree.bind(
this,
refKey,
parentTree,
params.name
);
this.setState(
{
activeKey: refKey
},
callback
);
}
onSwitchTab(params) {
const { dispatch } = this.props;
let callback = this.changeTabWithAll.bind(this, params);
dispatch({
type: "masterData/parentTree",
payload: params,
callback: callback
});
}
render() {
const { parentItem } = this.props.masterData;
const { activeKey } = this.state;
return (
<div>
<Tabs
onChange={this.onChangeTab.bind(this)}
type="card"
activeKey={activeKey}
>
<Tabs.TabPane tab="人体系统" key="BodySystem">
<BodySystemPanel
ref="BodySystem"
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="人体器官" key="BodyStructure">
<BodyStructruePanel
ref="BodyStructure"
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="药物" key="Medicine">
<MedicinePanel
ref="Medicine"
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="疾病" key="Disease">
<DiseasePanel
ref="Disease"
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="症状" key="Symptom">
<SymptomPanel
ref="Symptom"
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="扩展属性" key="SymptomExtension">
<ExtensionPanel
ref="SymptomExtension"
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="检查" key="Exam">
<ExamPanel
ref="Exam"
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="治疗" key="Treatment">
<TreatmentPanel
ref="Treatment"
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="自定义矢量" key="CustomerVector">
<CustomerVectorPanel
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="矢量关系" key="VectorRelation">
<VectorRelationPanel
parentNode={parentItem}
onQuery={this.onQuery.bind(this)}
/>
</Tabs.TabPane>
<Tabs.TabPane tab="搜索" key="Search">
<SearchPanel
ref="Search"
onQuery={this.onQuerySearch.bind(this)}
onSwitchTab={this.onSwitchTab.bind(this)}
/>
</Tabs.TabPane>
</Tabs>
</div>
);
}
}
|
thexdesk/browseth | test/units/weiToEther.js | import { expect } from 'chai'
import * as units from '@browseth/units'
it('should convert wei to ether', () => {
expect(units.weiToEther(1000000000000)).to.equal('0.000001')
expect(units.weiToEther('100000000000')).to.equal('0.0000001')
expect(units.weiToEther('1000000000')).to.equal('0.000000001')
expect(units.weiToEther(123222)).to.equal('0.000000000000123222')
expect(units.weiToEther(0x2aa0022211f)).to.equal('0.000002929169932575')
expect(units.weiToEther('2312132123123')).to.equal('0.000002312132123123')
expect(units.weiToEther(0)).to.equal('0')
})
|
nguillaumin/nabaztag-server | net.violet.platform/src/main/java/net/violet/platform/dataobjects/AgendaData.java | <filename>net.violet.platform/src/main/java/net/violet/platform/dataobjects/AgendaData.java<gh_stars>1-10
package net.violet.platform.dataobjects;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.violet.platform.datamodel.Agenda;
import net.violet.platform.datamodel.AgendaImpl;
import net.violet.platform.util.StringShop;
public final class AgendaData extends RecordData<Agenda> {
private static final List<AgendaData> CACHE = new LinkedList<AgendaData>();
static {
for (final Map.Entry<Long, Agenda> theEntry : AgendaImpl.listAll().entrySet()) {
if ((theEntry.getKey() < 8) && (theEntry.getValue() instanceof AgendaImpl)) {
AgendaData.CACHE.add(new AgendaData(theEntry.getValue()));
}
}
}
private AgendaData(Agenda inAgenda) {
super(inAgenda);
}
public long getAgenda_id() {
final Agenda theAgenda = getRecord();
if (theAgenda != null) {
return theAgenda.getAgenda_id();
}
return 0;
}
public String getAgenda_key() {
final Agenda theAgenda = getRecord();
if (theAgenda != null) {
return theAgenda.getAgenda_key();
}
return StringShop.EMPTY_STRING;
}
/**
* Accesseur sur toutes les animations pour une langue
*/
public static List<AgendaData> listAll() {
return AgendaData.CACHE;
}
}
|
naarani/selenev | test/jtest/yaml/task/format/ValidateTaskWithVars.java | package jtest.yaml.task.format;
import static org.junit.Assert.fail;
import org.junit.Test;
import org.naarani.ecantonchiro.yaml.YamlTaskLib;
public class ValidateTaskWithVars {
@Test
public void testTaskWithLongVarsList() {
String file = "examples/basic/test/testTaskVar.yaml";
YamlTaskLib lib = new YamlTaskLib();
try {
lib.setFile( file );
while( lib.next() ){
lib.loadtasks();
lib.showtasks();
}
} catch( Exception e ) {
e.printStackTrace();
fail( "generic problem" + e.getMessage() );
}
}
}
|
ISE2012/ch11 | tuple_iteration.py | teams = ((1, 'Ravens'),(2, 'Panthers'),(5, 'Eagles'),(7, 'Steelers'))
# Notice tuple of tuples
for index, name in teams:
print(index, name)
|
itsHyrican/braytech.org | src/store/index.js | <reponame>itsHyrican/braytech.org<gh_stars>0
import { createStore, combineReducers } from 'redux';
import viewport from './reducers/viewport.js';
import theme from './reducers/theme.js';
import tips from './reducers/tips.js';
import auth from './reducers/auth.js';
import member from './reducers/member.js';
import groupMembers from './reducers/groupMembers.js';
import refresh from './reducers/refresh.js';
import notifications from './reducers/notifications.js';
import pgcr from './reducers/pgcr.js';
import tooltips from './reducers/tooltips.js';
import triumphs from './reducers/triumphs.js';
import collectibles from './reducers/collectibles.js';
import maps from './reducers/maps.js';
import layouts from './reducers/layouts.js';
import visual from './reducers/visual.js';
const rootReducer = combineReducers({
viewport,
theme,
tips,
auth,
member,
groupMembers,
refresh,
notifications,
pgcr,
tooltips,
triumphs,
collectibles,
maps,
layouts,
visual
});
const store = createStore(
rootReducer,
// window.__REDUX_DEVTOOLS_EXTENSION__ &&
// window.__REDUX_DEVTOOLS_EXTENSION__({
// actionsBlacklist: ['PGCR_LOADED', 'PGCR_LOADING'],
// trace: true
// })
);
export default store;
|
aiditto/tryst | frontend/backoffice/src/store/actions/channelsActions.js | <reponame>aiditto/tryst
import * as actionTypes from "./actionTypes";
import channelsService from "services/channels.service";
import * as actions from "./rootAction";
const getChannelList = () => ({
type: actionTypes.GET_CHANNEL_LIST
});
const getChannelListSuccess = channels => ({
type: actionTypes.GET_CHANNEL_LIST_SUCCESS,
channels: channels
});
const getChannelListFailure = error => ({
type: actionTypes.GET_CHANNEL_LIST_FAILURE,
error: error
});
const createChannel = () => ({
type: actionTypes.CREATE_CHANNEL
});
const createChannelSuccess = channel => ({
type: actionTypes.CREATE_CHANNEL_SUCCESS,
channel: channel
});
const createChannelFailure = error => ({
type: actionTypes.CREATE_CHANNEL_FAILURE,
error: error
});
const deleteChannel = () => ({
type: actionTypes.DELETE_CHANNEL
});
const deleteChannelSuccess = () => ({
type: actionTypes.DELETE_CHANNEL_SUCCESS
});
const deleteChannelFailure = error => ({
type: actionTypes.DELETE_CHANNEL_FAILURE,
error: error
});
const updateChannel = () => ({
type: actionTypes.UPDATE_CHANNEL
});
const updateChannelSuccess = () => ({
type: actionTypes.UPDATE_CHANNEL_SUCCESS
});
const updateChannelFailure = error => ({
type: actionTypes.UPDATE_CHANNEL_FAILURE,
error: error
});
const getChannel = () => ({
type: actionTypes.GET_CHANNEL
});
const getChannelSuccess = channel => ({
type: actionTypes.GET_CHANNEL_SUCCESS,
channel: channel
});
const getChannelFailure = error => ({
type: actionTypes.GET_CHANNEL_FAILURE,
error: error
});
const patchChannel = () => ({
type: actionTypes.PATCH_CHANNEL
});
const patchChannelSuccess = () => ({
type: actionTypes.PATCH_CHANNEL_SUCCESS
});
const patchChannelFailure = error => ({
type: actionTypes.PATCH_CHANNEL_FAILURE,
error: error
});
export const getChannels = () => {
return dispatch => {
dispatch(getChannelList());
channelsService.list(response => {
if (response.status === 200) {
dispatch(getChannelListSuccess(response.data));
} else {
dispatch(getChannelListFailure(response.data));
}
});
};
};
export const createNewChannel = (data, siteId, t, callback) => {
return (dispatch, getState) => {
dispatch(createChannel());
channelsService.addChannel(data, response => {
if (response.status === 200) {
callback("success");
dispatch(createChannelSuccess(response.data));
dispatch(actions.getSiteChannels(siteId));
dispatch(actions.showNotification(t("notification.channel_added_success"), "success"));
} else {
dispatch(createChannelFailure(response.message));
dispatch(actions.showNotification(t("notification.channel_added_error"), "error"));
}
});
};
};
export const deleteChannelById = (channelId, siteId, t) => {
return dispatch => {
dispatch(deleteChannel());
channelsService.deleteChannel(channelId, response => {
if (response.status === 204) {
dispatch(deleteChannelSuccess());
dispatch(actions.getSiteChannels(siteId));
dispatch(actions.showNotification(t("notification.channel_deleted_success"), "success"));
} else {
dispatch(deleteChannelFailure(response.message));
dispatch(actions.showNotification(t("notification.channel_deleted_error"), "error"));
}
});
};
};
export const updateChannelById = (channelId, siteId, data, t, callback) => {
return dispatch => {
dispatch(updateChannel());
channelsService.updateChannel(channelId, data, response => {
if (response.status === 204) {
callback("success");
dispatch(updateChannelSuccess());
dispatch(actions.getSiteChannels(siteId));
dispatch(actions.showNotification(t("notification.channel_updated_success"), "success"));
} else {
dispatch(updateChannelFailure(response.message));
dispatch(actions.showNotification(t("notification.channel_updated_error"), "error"));
}
});
};
};
export const getChannelById = channelId => {
return dispatch => {
dispatch(getChannel());
channelsService.getChannelById(channelId, response => {
if (response.status === 200) {
dispatch(getChannelSuccess(response.data));
} else {
dispatch(getChannelFailure(response.data));
}
});
};
};
export const patchChannelById = (channelId, data, t, page, callback) => {
return (dispatch, getState) => {
dispatch(patchChannel());
channelsService.patchChannel(channelId, data, response => {
if (response.status === 204) {
callback("success");
dispatch(patchChannelSuccess());
dispatch(actions.getChannelById(channelId));
if (page && page !== "channel") {
const siteId = getState().sites.site.id;
dispatch(actions.getSiteChannels(siteId));
}
dispatch(actions.showNotification(t("notification.channel_patch_success"), "success"));
} else {
dispatch(patchChannelFailure(response.message));
dispatch(actions.showNotification(t("notification.channel_patch_error"), "error"));
}
});
};
};
export const getChannelByIdentifier = channelIdentifier => {
return dispatch => {
dispatch(getChannel());
channelsService.getChannelByIdentifier(channelIdentifier, response => {
if (response.status === 200) {
dispatch(getChannelSuccess(response.data[0]));
} else {
dispatch(getChannelFailure(response.data));
}
});
};
};
|
highmed/highmed_dsf | dsf-fhir/dsf-fhir-server/src/main/java/org/highmed/dsf/fhir/search/parameters/ValueSetStatus.java | <reponame>highmed/highmed_dsf
package org.highmed.dsf.fhir.search.parameters;
import org.highmed.dsf.fhir.search.SearchQueryParameter.SearchParameterDefinition;
import org.highmed.dsf.fhir.search.parameters.basic.AbstractStatusParameter;
import org.hl7.fhir.r4.model.Enumerations.SearchParamType;
import org.hl7.fhir.r4.model.ValueSet;
@SearchParameterDefinition(name = ValueSetStatus.PARAMETER_NAME, definition = "http://hl7.org/fhir/SearchParameter/ValueSet-status", type = SearchParamType.TOKEN, documentation = "The current status of the value set")
public class ValueSetStatus extends AbstractStatusParameter<ValueSet>
{
public ValueSetStatus()
{
super("value_set", ValueSet.class);
}
}
|
suluner/tencentcloud-sdk-cpp | domain/src/v20180808/model/RenewDomainBatchRequest.cpp | /*
* Copyright (c) 2017-2019 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <tencentcloud/domain/v20180808/model/RenewDomainBatchRequest.h>
#include <tencentcloud/core/utils/rapidjson/document.h>
#include <tencentcloud/core/utils/rapidjson/writer.h>
#include <tencentcloud/core/utils/rapidjson/stringbuffer.h>
using namespace TencentCloud::Domain::V20180808::Model;
using namespace std;
RenewDomainBatchRequest::RenewDomainBatchRequest() :
m_periodHasBeenSet(false),
m_domainsHasBeenSet(false),
m_payModeHasBeenSet(false),
m_autoRenewFlagHasBeenSet(false)
{
}
string RenewDomainBatchRequest::ToJsonString() const
{
rapidjson::Document d;
d.SetObject();
rapidjson::Document::AllocatorType& allocator = d.GetAllocator();
if (m_periodHasBeenSet)
{
rapidjson::Value iKey(rapidjson::kStringType);
string key = "Period";
iKey.SetString(key.c_str(), allocator);
d.AddMember(iKey, m_period, allocator);
}
if (m_domainsHasBeenSet)
{
rapidjson::Value iKey(rapidjson::kStringType);
string key = "Domains";
iKey.SetString(key.c_str(), allocator);
d.AddMember(iKey, rapidjson::Value(rapidjson::kArrayType).Move(), allocator);
for (auto itr = m_domains.begin(); itr != m_domains.end(); ++itr)
{
d[key.c_str()].PushBack(rapidjson::Value().SetString((*itr).c_str(), allocator), allocator);
}
}
if (m_payModeHasBeenSet)
{
rapidjson::Value iKey(rapidjson::kStringType);
string key = "PayMode";
iKey.SetString(key.c_str(), allocator);
d.AddMember(iKey, m_payMode, allocator);
}
if (m_autoRenewFlagHasBeenSet)
{
rapidjson::Value iKey(rapidjson::kStringType);
string key = "AutoRenewFlag";
iKey.SetString(key.c_str(), allocator);
d.AddMember(iKey, m_autoRenewFlag, allocator);
}
rapidjson::StringBuffer buffer;
rapidjson::Writer<rapidjson::StringBuffer> writer(buffer);
d.Accept(writer);
return buffer.GetString();
}
int64_t RenewDomainBatchRequest::GetPeriod() const
{
return m_period;
}
void RenewDomainBatchRequest::SetPeriod(const int64_t& _period)
{
m_period = _period;
m_periodHasBeenSet = true;
}
bool RenewDomainBatchRequest::PeriodHasBeenSet() const
{
return m_periodHasBeenSet;
}
vector<string> RenewDomainBatchRequest::GetDomains() const
{
return m_domains;
}
void RenewDomainBatchRequest::SetDomains(const vector<string>& _domains)
{
m_domains = _domains;
m_domainsHasBeenSet = true;
}
bool RenewDomainBatchRequest::DomainsHasBeenSet() const
{
return m_domainsHasBeenSet;
}
int64_t RenewDomainBatchRequest::GetPayMode() const
{
return m_payMode;
}
void RenewDomainBatchRequest::SetPayMode(const int64_t& _payMode)
{
m_payMode = _payMode;
m_payModeHasBeenSet = true;
}
bool RenewDomainBatchRequest::PayModeHasBeenSet() const
{
return m_payModeHasBeenSet;
}
int64_t RenewDomainBatchRequest::GetAutoRenewFlag() const
{
return m_autoRenewFlag;
}
void RenewDomainBatchRequest::SetAutoRenewFlag(const int64_t& _autoRenewFlag)
{
m_autoRenewFlag = _autoRenewFlag;
m_autoRenewFlagHasBeenSet = true;
}
bool RenewDomainBatchRequest::AutoRenewFlagHasBeenSet() const
{
return m_autoRenewFlagHasBeenSet;
}
|
jamesward/blueeyes | src/main/scala/blueeyes/core/http/combinators/HttpRequestCombinators.scala | <reponame>jamesward/blueeyes
package blueeyes.core.http.combinators
import blueeyes.core.http.{HttpRequest, HttpResponse, HttpException, HttpStatus}
import blueeyes.core.http.HttpStatusCodes._
import blueeyes.concurrent.Future
/**
*
* <pre>
* post {
* refineContentType[JObject] {
* requireContent(!(_ \ "adId" -->? classOf[JString]).isEmpty) { request =>
* val adId = (request.content \ "adId").deserialize[String]
* }
* }
* }
* </pre>
*
*/
trait HttpRequestCombinators {
private type Handler[T, S] = HttpRequest[Future[T]] => Future[HttpResponse[S]]
private type Handler2[T, S, E1] = HttpRequest[Future[T]] => E1 => Future[HttpResponse[S]]
def refineContentType[S >: T, T](f: Handler[T, S])(implicit m: Manifest[T]): Handler[S, S] = (request: HttpRequest[Future[S]]) => refineContentType(request, f)
def refineContentType2[S >: T, T, E1](f: Handler2[T, S, E1])(implicit m: Manifest[T]): Handler2[S, S, E1] =
(request: HttpRequest[Future[S]]) => (e: E1) => refineContentType(request, (r: HttpRequest[Future[T]]) => f(r)(e))
def requireContent[T, S](p: T => Boolean)(f: Handler[T, S])(implicit m: Manifest[T]): Handler[T, S] = (request: HttpRequest[Future[T]]) => requireContent(p, request, f)
def requireContent2[T, S, E1](p: T => Boolean)(f: Handler2[T, S, E1])(implicit m: Manifest[T]): Handler2[T, S, E1] =
(request: HttpRequest[Future[T]]) => (e: E1) => requireContent(p, request, (r: HttpRequest[Future[T]]) => f(r)(e))
private def refineContentType[S >: T, T](request: HttpRequest[Future[S]], f: Handler[T, S])(implicit m: Manifest[T]): Future[HttpResponse[S]] = {
request.content match {
case None =>
Future.sync(HttpResponse(HttpStatus(BadRequest, "Expected " + m.erasure.getClass.getName + " but found nothing")))
case Some(future) => future.flatMap{ value =>
if (value.getClass == m.erasure) {
val t: T = value.asInstanceOf[T]
f(request.copy(content = Some(Future.sync(t))))
}
else Future.sync(HttpResponse(HttpStatus(BadRequest, "Expected " + m.erasure.getClass.getName + " but found: " + value.getClass.getName)))
}
}
}
private def requireContent[T, S](p: T => Boolean, request: HttpRequest[Future[T]], f: Handler[T, S])(implicit m: Manifest[T]): Future[HttpResponse[S]] = {
request.content match {
case None =>
throw HttpException(BadRequest, "Expected " + m.erasure.getClass.getName + " but found nothing")
case Some(future) => future.flatMap{ value =>
if (p(value)) f(request) else Future.sync(HttpResponse(HttpStatus(BadRequest)))
}
}
}
}
object HttpRequestCombinators extends HttpRequestCombinators
|
gis-rpd/pipelines | bcl2fastq/mongo_status_qc.py | #!/usr/bin/env python3
"""Mongo status update for bcl2fastq QC checks (Demultiplex summary)
"""
# standard library imports
import logging
import sys
import os
import argparse
import getpass
import subprocess
#--- project specific imports
#
# add lib dir for this pipeline installation to PYTHONPATH
LIB_PATH = os.path.abspath(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "lib"))
if LIB_PATH not in sys.path:
sys.path.insert(0, LIB_PATH)
from pipelines import generate_window
from pipelines import is_production_user
from mongodb import mongodb_conn
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__copyright__ = "2016 Genome Institute of Singapore"
__license__ = "The MIT License (MIT)"
# global logger
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(
'[{asctime}] {levelname:8s} {filename} {message}', style='{'))
logger.addHandler(handler)
def main():
"""main function
"""
bcl2fastq_qc_script = os.path.abspath(os.path.join(
os.path.dirname(sys.argv[0]), "bcl2fastq_qc.py"))
assert os.path.exists(bcl2fastq_qc_script)
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-t', "--testing", action='store_true',
help="Use MongoDB test server")
default = 14
parser.add_argument('-w', '--win', type=int, default=default,
help="Number of days to look back (default {})".format(default))
parser.add_argument('-n', "--dry-run", action='store_true',
help="Dry run")
parser.add_argument('--no-mail', action='store_true',
help="Don't send email on detected failures")
parser.add_argument('-v', '--verbose', action='count', default=0,
help="Increase verbosity")
parser.add_argument('-q', '--quiet', action='count', default=0,
help="Decrease verbosity")
args = parser.parse_args()
# Repeateable -v and -q for setting logging level.
# See https://www.reddit.com/r/Python/comments/3nctlm/what_python_tools_should_i_be_using_on_every/
# and https://gist.github.com/andreas-wilm/b6031a84a33e652680d4
# script -vv -> DEBUG
# script -v -> INFO
# script -> WARNING
# script -q -> ERROR
# script -qq -> CRITICAL
# script -qqq -> no logging at all
logger.setLevel(logging.WARN + 10*args.quiet - 10*args.verbose)
if not is_production_user():
logger.warning("Not a production user. Skipping DB update")
sys.exit(1)
connection = mongodb_conn(args.testing)
if connection is None:
sys.exit(1)
db = connection.gisds.runcomplete
epoch_present, epoch_back = generate_window(args.win)
results = db.find({"analysis.Status": "SUCCESS", "analysis.QC_status" : {"$exists": 0},
"timestamp": {"$gt": epoch_back, "$lt": epoch_present}})
logger.info("Found %s runs", results.count())
for record in results:
run_number = record['run']
analysis = record['analysis']
#for analysis in record['analysis']:
for (analysis_count, analysis) in enumerate(record['analysis']):
out_dir = analysis["out_dir"]
analysis_id = analysis['analysis_id']
status = analysis['Status']
#Check if bcl2Fastq is completed successfully
if analysis['Status'] != "SUCCESS":
logger.info("Analysis is not completed successfully under %s", out_dir)
continue
if not os.path.exists(out_dir):
logger.critical("Following directory listed in DB doesn't exist: %s", out_dir)
continue
if args.testing:
bcl2fastq_qc_out = os.path.join(out_dir, "bcl2fastq_qc.test.txt")
else:
bcl2fastq_qc_out = os.path.join(out_dir, "bcl2fastq_qc.txt")
if os.path.exists(bcl2fastq_qc_out):
logger.critical("Refusing to overwrite existing file %s. Skipping QC check", bcl2fastq_qc_out)
continue
bcl2fastq_qc_cmd = [bcl2fastq_qc_script, '-d', out_dir]
if args.no_mail:
bcl2fastq_qc_cmd.append("--no-mail")
if args.dry_run:
logger.warning("Skipped following run: %s", out_dir)
continue
try:
QC_status = "analysis.{}.QC_status".format(analysis_count)
status = subprocess.check_output(bcl2fastq_qc_cmd, stderr=subprocess.STDOUT)
if "QC_FAILED" in str(status):
db.update({"run": run_number, 'analysis.analysis_id' : analysis_id},
{"$set": {QC_status: "FAILED"}})
logger.info("Demux QC failed for run: %s", run_number)
else:
db.update({"run": run_number, 'analysis.analysis_id' : analysis_id},
{"$set": {QC_status: "SUCCESS"}})
logger.info("Demux QC SUCCESS for run: %s", run_number)
with open(bcl2fastq_qc_out, 'w') as fh:
fh.write(status.decode())
except subprocess.CalledProcessError as e:
logger.fatal("The following command failed with return code %s: %s",
e.returncode, ' '.join(bcl2fastq_qc_cmd))
logger.fatal("Output: %s", e.output.decode())
logger.fatal("Exiting")
connection.close()
if __name__ == "__main__":
logger.info("Demultiplexing QC status")
main()
|
Acidburn0zzz/peridot | lib/cobalt/cobalt.cc | <reponame>Acidburn0zzz/peridot
// Copyright 2017 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "peridot/lib/cobalt/cobalt.h"
#include <set>
#include "lib/app/cpp/connect.h"
#include "lib/app/fidl/application_environment.fidl.h"
#include "lib/cobalt/fidl/cobalt.fidl.h"
#include "lib/fxl/functional/auto_call.h"
#include "lib/fxl/functional/make_copyable.h"
#include "lib/fxl/logging.h"
#include "lib/fxl/macros.h"
#include "peridot/lib/backoff/exponential_backoff.h"
#include "peridot/lib/callback/waiter.h"
namespace cobalt {
CobaltContext::CobaltContext(fxl::RefPtr<fxl::TaskRunner> task_runner,
app::ApplicationContext* app_context,
int32_t project_id,
int32_t metric_id,
int32_t encoding_id)
: task_runner_(std::move(task_runner)),
app_context_(app_context),
project_id_(project_id),
metric_id_(metric_id),
encoding_id_(encoding_id) {
ConnectToCobaltApplication();
}
CobaltContext::~CobaltContext() {
if (!events_in_transit_.empty() || !events_to_send_.empty()) {
FXL_LOG(WARNING) << "Disconnecting connection to cobalt with event still "
"pending... Events will be lost.";
}
}
void CobaltContext::ReportEvent(uint32_t event) {
if (task_runner_->RunsTasksOnCurrentThread()) {
ReportEventOnMainThread(event);
return;
}
// Hop to the main thread, and go back to the global object dispatcher.
task_runner_->PostTask(
[event, this]() { ::cobalt::ReportEvent(event, this); });
}
void CobaltContext::ConnectToCobaltApplication() {
auto encoder_factory =
app_context_->ConnectToEnvironmentService<cobalt::CobaltEncoderFactory>();
encoder_factory->GetEncoder(project_id_, encoder_.NewRequest());
encoder_.set_error_handler([this] { OnConnectionError(); });
SendEvents();
}
void CobaltContext::OnConnectionError() {
FXL_LOG(ERROR) << "Connection to cobalt failed. Reconnecting after a delay.";
events_to_send_.insert(events_in_transit_.begin(), events_in_transit_.end());
events_in_transit_.clear();
encoder_.Unbind();
task_runner_->PostDelayedTask([this] { ConnectToCobaltApplication(); },
backoff_.GetNext());
}
void CobaltContext::ReportEventOnMainThread(uint32_t event) {
events_to_send_.insert(event);
if (!encoder_ || !events_in_transit_.empty()) {
return;
}
SendEvents();
}
void CobaltContext::SendEvents() {
FXL_DCHECK(events_in_transit_.empty());
if (events_to_send_.empty()) {
return;
}
events_in_transit_ = std::move(events_to_send_);
events_to_send_.clear();
auto waiter = callback::CompletionWaiter::Create();
for (auto event : events_in_transit_) {
auto callback = waiter->NewCallback();
encoder_->AddIndexObservation(
metric_id_, encoding_id_, static_cast<uint32_t>(event),
[this, event, callback = std::move(callback)](cobalt::Status status) {
auto cleanup = fxl::MakeAutoCall(callback);
switch (status) {
case cobalt::Status::INVALID_ARGUMENTS:
case cobalt::Status::FAILED_PRECONDITION:
FXL_DCHECK(false) << "Unexpected status: " << status;
case cobalt::Status::OBSERVATION_TOO_BIG: // fall through
// Log the failure.
FXL_LOG(WARNING) << "Cobalt rejected event: " << event
<< " with status: " << status;
case cobalt::Status::OK: // fall through
// Remove the event from the set of
// events to send.
events_in_transit_.erase(event);
break;
case cobalt::Status::INTERNAL_ERROR:
case cobalt::Status::SEND_FAILED:
case cobalt::Status::TEMPORARILY_FULL:
// Keep the event for re-queueing.
break;
}
});
}
waiter->Finalize([this]() {
// No transient errors.
if (events_in_transit_.empty()) {
backoff_.Reset();
// Send any event received while |events_in_transit_| was not empty.
SendEvents();
return;
}
// A transient error happened, retry after a delay.
task_runner_->PostDelayedTask(
[this]() {
events_to_send_.insert(events_in_transit_.begin(),
events_in_transit_.end());
events_in_transit_.clear();
SendEvents();
},
backoff_.GetNext());
});
}
fxl::AutoCall<fxl::Closure> InitializeCobalt(
fxl::RefPtr<fxl::TaskRunner> task_runner,
app::ApplicationContext* app_context,
int32_t project_id,
int32_t metric_id,
int32_t encoding_id,
CobaltContext** cobalt_context) {
FXL_DCHECK(!*cobalt_context);
auto context = std::make_unique<CobaltContext>(
std::move(task_runner), app_context, project_id, metric_id, encoding_id);
*cobalt_context = context.get();
return fxl::MakeAutoCall<fxl::Closure>(fxl::MakeCopyable(
[context = std::move(context), cobalt_context]() mutable {
context.reset();
*cobalt_context = nullptr;
}));
}
void ReportEvent(uint32_t event, CobaltContext* cobalt_context) {
if (cobalt_context) {
cobalt_context->ReportEvent(event);
}
}
} // namespace cobalt
|
lacknere/platform | src/Administration/Resources/app/administration/test/e2e/cypress/integration/settings/sw-mail-template/visual.spec.js | <reponame>lacknere/platform
// / <reference types="Cypress" />
import SettingsPageObject from '../../../support/pages/module/sw-settings.page-object';
describe('Mail templates: Check module navigation in settings', () => {
// eslint-disable-next-line no-undef
before(() => {
// Clean previous state and prepare Administration
cy.setToInitialState()
.then(() => {
cy.loginViaApi();
})
.then(() => {
cy.setLocaleToEnGb();
})
.then(() => {
cy.openInitialPage(Cypress.env('admin'));
});
});
it('@visual: check appearance of email templates module', () => {
const page = new SettingsPageObject();
cy.server();
cy.route({
url: `${Cypress.env('apiPath')}/search/mail-template`,
method: 'post'
}).as('getData');
cy.get('.sw-dashboard-index__welcome-text').should('be.visible');
cy.clickMainMenuItem({
targetPath: '#/sw/settings/index',
mainMenuId: 'sw-settings'
});
cy.get('#sw-mail-template').click();
cy.wait('@getData').then((xhr) => {
expect(xhr).to.have.property('status', 200);
});
cy.get('.sw-data-grid-skeleton').should('not.exist');
cy.sortAndCheckListingAscViaColumn('Type', 'Contact form');
cy.wait('@getData').then((xhr) => {
expect(xhr).to.have.property('status', 200);
});
cy.get('.sw-data-grid-skeleton').should('not.exist');
cy.get('.sw-mail-templates-list-grid .sw-data-grid__row--0').should('be.visible');
// Delete manufacturer
cy.clickContextMenuItem(
'.sw-entity-listing__context-menu-edit-action',
page.elements.contextMenuButton,
`#mailTemplateGrid ${page.elements.dataGridRow}--0`
);
cy.get('.sw-loader').should('not.exist');
cy.get('.sw-media-upload-v2__dropzone').should('be.visible');
cy.get('.sw-media-upload-v2__switch-mode .sw-context-button__button').should('be.visible');
cy.takeSnapshot('[Mail templates] Details', '.sw-mail-template-detail');
});
});
|
airkits/nethopper | base/queue/chan_queue.go | <filename>base/queue/chan_queue.go
// MIT License
// Copyright (c) 2019 gonethopper
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// * @Author: ankye
// * @Date: 2019-12-18 10:47:24
// * @Last Modified by: ankye
// * @Last Modified time: 2019-12-18 10:47:24
package queue
import (
"sync/atomic"
"time"
)
// ChanQueue use chan queue
type ChanQueue struct {
innerChan chan interface{}
capacity int32
size int32
timer *time.Timer
closedChan chan struct{}
}
// NewChanQueue create chan queue
func NewChanQueue(capacity int32) Queue {
return &ChanQueue{
innerChan: make(chan interface{}, capacity),
capacity: capacity,
size: 0,
timer: time.NewTimer(time.Second),
closedChan: make(chan struct{}),
}
}
// Pop sync block pop
func (q *ChanQueue) Pop() (val interface{}, err error) {
v, ok := <-q.innerChan
if ok {
atomic.AddInt32(&q.size, -1)
return v, nil
}
return nil, ErrQueueIsClosed
}
//AsyncPop async pop
func (q *ChanQueue) AsyncPop() (val interface{}, err error) {
select {
case v, ok := <-q.innerChan:
if ok {
atomic.AddInt32(&q.size, -1)
return v, nil
}
return nil, ErrQueueIsClosed
default:
return nil, ErrQueueEmpty
}
}
//Push sync push data
func (q *ChanQueue) Push(x interface{}) error {
if q.IsClosed() {
return ErrQueueIsClosed
}
q.innerChan <- x
atomic.AddInt32(&q.size, 1)
return nil
}
//AsyncPush async push data
func (q *ChanQueue) AsyncPush(x interface{}) error {
if q.IsClosed() {
return ErrQueueIsClosed
}
select {
case q.innerChan <- x:
atomic.AddInt32(&q.size, 1)
return nil
default:
return ErrQueueFull
}
}
//Length get chan queue length
func (q *ChanQueue) Length() int32 {
return q.size
}
//Capacity get queue capacity
func (q *ChanQueue) Capacity() int32 {
return q.capacity
}
//IsFull queue is full return true
func (q *ChanQueue) IsFull() bool {
return len(q.innerChan) == cap(q.innerChan)
}
//Close 不需要关闭innerChan,交给GC回收,多写的时候直接关闭innerChan会出问题
func (q *ChanQueue) Close() error {
if q.IsClosed() {
return ErrQueueIsClosed
}
close(q.closedChan)
return nil
}
// IsClosed if chan is close,return true
func (q *ChanQueue) IsClosed() bool {
select {
case <-q.closedChan:
return true
default:
}
return false
}
func (q *ChanQueue) getChan(timeout time.Duration) (<-chan interface{}, <-chan error) {
timeoutChan := make(chan error, 1)
resultChan := make(chan interface{}, 1)
go func() {
if timeout < 0 {
item := <-q.innerChan
atomic.AddInt32(&q.size, -1)
resultChan <- item
} else {
select {
case item := <-q.innerChan:
atomic.AddInt32(&q.size, -1)
resultChan <- item
case <-time.After(timeout):
timeoutChan <- ErrQueueTimeout
}
}
}()
return resultChan, timeoutChan
}
|
LableOrg/java-rfc3881auditlogger | test/it/src/test/java/org/lable/rfc3881/auditlogger/adapter/hbase/RoundTripIT.java | /*
* Copyright © 2015 Lable (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.lable.rfc3881.auditlogger.adapter.hbase;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.shaded.org.apache.commons.configuration.BaseConfiguration;
import org.apache.hadoop.hbase.shaded.org.apache.commons.configuration.Configuration;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.lable.codesystem.codereference.CodeReference;
import org.lable.rfc3881.auditlogger.api.*;
import org.lable.rfc3881.auditlogger.definition.rfc3881.*;
import org.lable.rfc3881.auditlogger.test.HbaseTestHelper;
import java.io.IOException;
import java.time.*;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
public class RoundTripIT {
private AuditLogAdapter logAdapter;
private AuditLogReader logReader;
private Connection connection;
private Admin admin;
TableName AUDIT_TABLE = TableName.valueOf("ns", "audit");
@Before
public void before() throws Exception {
Configuration configuration = new BaseConfiguration();
configuration.setProperty("hbase.zookeeper.znode", "/hbase");
org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum", "localhost:33533");
conf.set("zookeeper.znode.parent", "/hbase");
connection = ConnectionFactory.createConnection(conf);
admin = connection.getAdmin();
AtomicLong uid = new AtomicLong();
HbaseTestHelper.createNamespaceIfMissing(admin, "ns");
HbaseTestHelper.createOrTruncateTable(admin, HbaseTestHelper.buildSimpleDescriptor(AUDIT_TABLE, "a"));
Table table = connection.getTable(AUDIT_TABLE);
logAdapter = new HBaseAdapter(
put -> {
try {
table.put(put);
} catch (IOException e) {
throw new RuntimeException(e);
}
},
() -> "a",
() -> Bytes.toBytes(uid.getAndIncrement())
);
logReader = new HBaseReader(
tableName -> {
try {
return connection.getTable(tableName);
} catch (IOException e) {
throw new RuntimeException(e);
}
},
() -> AUDIT_TABLE,
() -> "a"
);
}
@After
public void after() throws IOException {
connection.close();
}
@Test
public void roundTripTest() throws IOException {
Instant at = ZonedDateTime.of(
LocalDateTime.of(2015, Month.APRIL, 3, 12, 0),
ZoneId.of("Europe/Amsterdam")
).toInstant();
LogEntry entryNullish = new LogEntry(
new Event(
new CodeReference("system", "code"),
EventAction.READ,
at.toEpochMilli(),
EventOutcome.SUCCESS
),
null,
null,
null,
null,
null,
null,
new CodeReference("version", "1.0")
);
LogEntry entryFullish = new LogEntry(
new Event(
new CodeReference("system", "code"),
EventAction.READ,
at.toEpochMilli(),
EventOutcome.SUCCESS
),
new Principal(
"user-id",
Arrays.asList("id-a", "id-b"),
"User",
Arrays.asList(
new CodeReference("r", "1"),
new CodeReference("r", "2")
)
),
new Principal(
"user-id-del",
Arrays.asList("del-id-a", "del-id-b"),
"User Del",
new CodeReference("r", "1")
),
Arrays.asList(
new Principal(
"user-id-part1",
Arrays.asList("part1-id-a", "part1-id-b"),
"User Participant 1",
new CodeReference("r", "1")
),
new Principal(
"user-id-part2",
Arrays.asList("part2-id-a", "part2-id-b"),
"User Participant 2",
new CodeReference("r", "2")
)
),
NetworkAccessPoint.byIPAddress("10.0.0.1"),
Arrays.asList(
new AuditSource("site", "id1"),
new AuditSource("site", "id2", new CodeReference("t", "1")),
new AuditSource("site", "id3", new CodeReference("t", "3"), new CodeReference("t", "5"))
),
Arrays.asList(
new ParticipantObject(
"id1",
ParticipantObjectType.SYSTEM_OBJECT,
new CodeReference("idtype", "t"),
ParticipantObjectTypeRole.JOB,
DataLifeCycle.ACCESS_OR_USE,
new CodeReference("sens", "very"),
"Some object 1",
"GET",
new ParticipantObject.Detail(new CodeReference("dt", "t"), "1"),
new ParticipantObject.Detail(new CodeReference("dt", "t"), "2")
),
new ParticipantObject(
"id2",
ParticipantObjectType.SYSTEM_OBJECT,
new CodeReference("idtype", "t"),
ParticipantObjectTypeRole.JOB,
DataLifeCycle.ACCESS_OR_USE,
new CodeReference("sens", "not so"),
"Some object 2",
"GET",
new ParticipantObject.Detail(new CodeReference("dt", "t"), "1"),
new ParticipantObject.Detail(new CodeReference("dt", "t"), "2")
)
),
new CodeReference("version", "1.0")
);
logAdapter.record(entryNullish);
logAdapter.record(entryFullish);
List<LogEntry> entries = logReader.read(2);
assertThat(entries.size(), is(2));
LogEntry entryNullishOut = entries.get(0);
LogEntry entryFullishOut = entries.get(1);
assertThat(entryNullishOut, is(entryNullish));
assertThat(entryFullishOut, is(entryFullish));
assertThat(entryFullish.getParticipatingPrincipals().size(), is(2));
assertThat(entryFullishOut.getParticipatingPrincipals().size(), is(2));
assertThat(entryFullishOut.getParticipatingPrincipals(), is(entryFullish.getParticipatingPrincipals()));
assertThat(entryFullish.getAuditSources().size(), is(3));
System.out.println(entryFullish);
}
}
|
sslikewater/likewater-shardingphere | sharding-complex-strategy-example/src/main/java/com/example/sharding/complex/service/OrderService.java | <reponame>sslikewater/likewater-shardingphere<gh_stars>0
package com.example.sharding.complex.service;
import com.example.sharding.complex.service.dto.OrderCreateDTO;
import com.example.sharding.complex.service.dto.OrderDTO;
import java.util.List;
/**
* @Author: likewater
* @Description:
* @Date: Create in 5:17 上午 2022/3/2
*/
public interface OrderService {
OrderDTO getOrder(Long id);
List<OrderDTO> getOrders(Long userId);
long placeOrder(OrderCreateDTO userDO);
}
|
King0987654/windows2000 | private/windows/shell/lmui/ntshrui.new/guids.h | <reponame>King0987654/windows2000<gh_stars>10-100
//+-------------------------------------------------------------------------
//
// Microsoft Windows
// Copyright (C) Microsoft Corporation, 1995 - 1995.
//
// File: guids.h
//
// Contents: Guids local to this directory
//
// History: 21-Apr-95 BruceFo Created
//
//--------------------------------------------------------------------------
#ifndef __GUIDS_H__
#define __GUIDS_H__
DEFINE_GUID(CLSID_CShare, 0xf81e9010, 0x6ea4, 0x11ce, 0xa7, 0xff, 0x00, 0xaa, 0x00, 0x3c, 0xa9, 0xf6);
DEFINE_GUID(CLSID_CShareCopyHook, 0x40dd6e20, 0x7c17, 0x11ce, 0xa8, 0x04, 0x00, 0xaa, 0x00, 0x3c, 0xa9, 0xf6);
#endif // __GUIDS_H__
|
ckclark/leetcode | java/leetcode/pascals_triangle_ii/Solution.java | <gh_stars>0
package leetcode.pascals_triangle_ii;
import java.util.ArrayList;
public class Solution {
public ArrayList<Integer> getRow(int rowIndex) {
ArrayList<Integer> ary = new ArrayList<Integer>();
long C = 1;
ary.add((int)C);
for(int i = 1; i <= rowIndex; i++){
C *= rowIndex + 1 - i;
C /= i;
ary.add((int)C);
}
return ary;
}
public static void main(String[] args){
for(int x : new Solution().getRow(20)){
System.err.print(x + ", ");
}
System.err.println("");
}
}
|
reels-research/iOS-Private-Frameworks | PhotosUICore.framework/PXPeopleSwipeSelectionManager.h | <reponame>reels-research/iOS-Private-Frameworks
/* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/PhotosUICore.framework/PhotosUICore
*/
@interface PXPeopleSwipeSelectionManager : NSObject <PXAutoScrollerDelegate> {
PXUIAutoScroller * _autoScroller;
NSIndexPath * _currentIndexPath;
<PXPeopleSwipeSelectionManagerDelegate> * _delegate;
struct {
bool respondsToItemIndexPathAtLocation;
bool respondsToItemIndexPathClosestLeadingLocation;
bool respondsToItemIndexPathClosestAboveLocation;
} _delegateFlags;
struct {
bool selectedIndexPaths;
} _needsUpdateFlags;
id _pausingChangesToken;
UIScrollView * _scrollView;
NSSet * _selectedIndexPathsBeforeSwipe;
NSIndexPath * _startingIndexPath;
unsigned long long _state;
UIGestureRecognizer * _swipeGestureRecognizer;
}
@property (nonatomic, readonly) PXUIAutoScroller *autoScroller;
@property (nonatomic, retain) NSIndexPath *currentIndexPath;
@property (readonly, copy) NSString *debugDescription;
@property (nonatomic) <PXPeopleSwipeSelectionManagerDelegate> *delegate;
@property (readonly, copy) NSString *description;
@property (readonly) unsigned long long hash;
@property (nonatomic, readonly) bool isSelecting;
@property (nonatomic, retain) id pausingChangesToken;
@property (nonatomic, readonly) UIScrollView *scrollView;
@property (nonatomic, copy) NSSet *selectedIndexPathsBeforeSwipe;
@property (nonatomic, retain) NSIndexPath *startingIndexPath;
@property (nonatomic) unsigned long long state;
@property (readonly) Class superclass;
@property (nonatomic, readonly) UIGestureRecognizer *swipeGestureRecognizer;
- (void).cxx_destruct;
- (void)_beginSelectionFromIndexPath:(id)arg1;
- (void)_endSelection;
- (void)_handleSwipeSelectionGesture:(id)arg1;
- (void)_invalidateSelectedIndexPaths;
- (id)_itemIndexPathAtLocation:(struct CGPoint { double x1; double x2; })arg1;
- (id)_itemIndexPathClosestAboveLocation:(struct CGPoint { double x1; double x2; })arg1;
- (id)_itemIndexPathClosestLeadingLocation:(struct CGPoint { double x1; double x2; })arg1;
- (void)_updateSelectedIndexPaths;
- (void)_updateSelectionWithHitIndexPath:(id)arg1 leadingClosestIndexPath:(id)arg2 aboveClosestIndexPath:(id)arg3;
- (id)autoScroller;
- (void)autoScroller:(id)arg1 didAutoscrollWithTimestamp:(double)arg2;
- (id)currentIndexPath;
- (void)dealloc;
- (id)delegate;
- (id)init;
- (id)initWithScrollView:(id)arg1;
- (bool)isSelecting;
- (id)pausingChangesToken;
- (id)scrollView;
- (id)selectedIndexPathsBeforeSwipe;
- (void)setCurrentIndexPath:(id)arg1;
- (void)setDelegate:(id)arg1;
- (void)setPausingChangesToken:(id)arg1;
- (void)setSelectedIndexPathsBeforeSwipe:(id)arg1;
- (void)setStartingIndexPath:(id)arg1;
- (void)setState:(unsigned long long)arg1;
- (id)startingIndexPath;
- (unsigned long long)state;
- (id)swipeGestureRecognizer;
@end
|
OttoWinter/esphomeyaml | esphome/components/ssd1306_base/__init__.py | import esphome.codegen as cg
import esphome.config_validation as cv
from esphome import pins
from esphome.components import display
from esphome.const import (
CONF_EXTERNAL_VCC,
CONF_LAMBDA,
CONF_MODEL,
CONF_RESET_PIN,
CONF_BRIGHTNESS,
CONF_CONTRAST,
CONF_INVERT,
)
ssd1306_base_ns = cg.esphome_ns.namespace("ssd1306_base")
SSD1306 = ssd1306_base_ns.class_("SSD1306", cg.PollingComponent, display.DisplayBuffer)
SSD1306Model = ssd1306_base_ns.enum("SSD1306Model")
CONF_FLIP_X = "flip_x"
CONF_FLIP_Y = "flip_y"
CONF_OFFSET_X = "offset_x"
CONF_OFFSET_Y = "offset_y"
MODELS = {
"SSD1306_128X32": SSD1306Model.SSD1306_MODEL_128_32,
"SSD1306_128X64": SSD1306Model.SSD1306_MODEL_128_64,
"SSD1306_96X16": SSD1306Model.SSD1306_MODEL_96_16,
"SSD1306_64X48": SSD1306Model.SSD1306_MODEL_64_48,
"SSD1306_64X32": SSD1306Model.SSD1306_MODEL_64_32,
"SH1106_128X32": SSD1306Model.SH1106_MODEL_128_32,
"SH1106_128X64": SSD1306Model.SH1106_MODEL_128_64,
"SH1106_96X16": SSD1306Model.SH1106_MODEL_96_16,
"SH1106_64X48": SSD1306Model.SH1106_MODEL_64_48,
"SH1107_128X64": SSD1306Model.SH1107_MODEL_128_64,
"SSD1305_128X32": SSD1306Model.SSD1305_MODEL_128_32,
"SSD1305_128X64": SSD1306Model.SSD1305_MODEL_128_64,
}
SSD1306_MODEL = cv.enum(MODELS, upper=True, space="_")
def _validate(value):
model = value[CONF_MODEL]
if model not in ("SSD1305_128X32", "SSD1305_128X64"):
# Contrast is default value (1.0) while brightness is not
# Indicates user is using old `brightness` option
if value[CONF_BRIGHTNESS] != 1.0 and value[CONF_CONTRAST] == 1.0:
raise cv.Invalid(
"SSD1306/SH1106 no longer accepts brightness option, "
'please use "contrast" instead.'
)
return value
SSD1306_SCHEMA = display.FULL_DISPLAY_SCHEMA.extend(
{
cv.Required(CONF_MODEL): SSD1306_MODEL,
cv.Optional(CONF_RESET_PIN): pins.gpio_output_pin_schema,
cv.Optional(CONF_BRIGHTNESS, default=1.0): cv.percentage,
cv.Optional(CONF_CONTRAST, default=1.0): cv.percentage,
cv.Optional(CONF_EXTERNAL_VCC): cv.boolean,
cv.Optional(CONF_FLIP_X, default=True): cv.boolean,
cv.Optional(CONF_FLIP_Y, default=True): cv.boolean,
cv.Optional(CONF_OFFSET_X, default=0): cv.int_range(min=-32, max=32),
cv.Optional(CONF_OFFSET_Y, default=0): cv.int_range(min=-32, max=32),
cv.Optional(CONF_INVERT, default=False): cv.boolean,
}
).extend(cv.polling_component_schema("1s"))
async def setup_ssd1306(var, config):
await cg.register_component(var, config)
await display.register_display(var, config)
cg.add(var.set_model(config[CONF_MODEL]))
if CONF_RESET_PIN in config:
reset = await cg.gpio_pin_expression(config[CONF_RESET_PIN])
cg.add(var.set_reset_pin(reset))
if CONF_BRIGHTNESS in config:
cg.add(var.init_brightness(config[CONF_BRIGHTNESS]))
if CONF_CONTRAST in config:
cg.add(var.init_contrast(config[CONF_CONTRAST]))
if CONF_EXTERNAL_VCC in config:
cg.add(var.set_external_vcc(config[CONF_EXTERNAL_VCC]))
if CONF_FLIP_X in config:
cg.add(var.init_flip_x(config[CONF_FLIP_X]))
if CONF_FLIP_Y in config:
cg.add(var.init_flip_y(config[CONF_FLIP_Y]))
if CONF_OFFSET_X in config:
cg.add(var.init_offset_x(config[CONF_OFFSET_X]))
if CONF_OFFSET_Y in config:
cg.add(var.init_offset_y(config[CONF_OFFSET_Y]))
if CONF_INVERT in config:
cg.add(var.init_invert(config[CONF_INVERT]))
if CONF_LAMBDA in config:
lambda_ = await cg.process_lambda(
config[CONF_LAMBDA], [(display.DisplayBufferRef, "it")], return_type=cg.void
)
cg.add(var.set_writer(lambda_))
|
weltam/dremio-oss | sabot/kernel/src/main/java/com/dremio/exec/util/ArrowCrossBufComparator.java | <reponame>weltam/dremio-oss
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.exec.util;
import org.apache.arrow.memory.ArrowBuf;
/**
* For making comparisons across different buffers.
*/
public interface ArrowCrossBufComparator {
/**
* Compares item at idx1 at buf1 and idx2 at buf2. The index is not the block position.
* @param buf1 Buffer 1
* @param idx1 Index against buffer 1
* @param buf2 Buffer 2
* @param idx2 Index against buffer 2
* @return less than 0 if buf1:idx1 is smaller, greater than 0 if buf1:idx1 is larger, 0 if both are equal.
*/
int compare(ArrowBuf buf1, int idx1, ArrowBuf buf2, int idx2);
}
|
GEOS-ESM/AeroApps | src/Components/misc/gritas/stdio.h | <filename>src/Components/misc/gritas/stdio.h
* stdio.h - last change: 2/3/94 (ams)
*
* Defines standar i/o units.
*
integer STDIN, STDOUT, STDERR
parameter ( STDIN = 5 )
parameter ( STDOUT = 6 )
parameter ( STDERR = 0 )
|
omarquina/moVirt | moVirt/src/main/java/org/ovirt/mobile/movirt/model/enums/ConsoleProtocol.java | <filename>moVirt/src/main/java/org/ovirt/mobile/movirt/model/enums/ConsoleProtocol.java
package org.ovirt.mobile.movirt.model.enums;
import android.support.annotation.NonNull;
import org.ovirt.mobile.movirt.model.Console;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
public enum ConsoleProtocol {
SPICE,
VNC;
@NonNull
public static ConsoleProtocol mapProtocol(String display) {
try {
return valueOf(display.toUpperCase());
} catch (Exception e) {
// not particularly nice but same behavior as on the webadmin/userportal
return VNC;
}
}
public static SortedSet<ConsoleProtocol> getProtocolTypes(List<Console> consoles) {
SortedSet<ConsoleProtocol> result = new TreeSet<>();
for (Console console : consoles) {
result.add(console.getProtocol());
}
return result;
}
public String getProtocol() {
return super.toString().toLowerCase();
}
}
|
WLPhoenix/stackdio | stackdio/server/core/middleware.py | <gh_stars>0
# -*- coding: utf-8 -*-
# Copyright 2014, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class JSONIndentAcceptHeaderMiddleware(object):
def process_request(self, request):
if request.META.get('HTTP_ACCEPT') == 'application/json':
request.META['HTTP_ACCEPT'] = 'application/json; indent=4'
return None
|
ChristianFox/RDSRemoteDataSolutions | Example/Pods/Headers/Public/KFXAdditions/NSAttributedString+KFXAdditions.h | <gh_stars>0
/********************************
*
* Copyright © 2016-2018 <NAME>
*
* MIT Licence - Full licence details can be found in the file 'LICENSE' or in the Pods-{yourProjectName}-acknowledgements.markdown
*
* This file is included with KFXAdditions
*
************************************/
#import <Foundation/Foundation.h>
@import UIKit.UIFont;
@import UIKit.UIColor;
@import UIKit.NSText;
@import CoreGraphics;
@interface NSAttributedString (KFXAdditions)
//--------------------------------------------------------
#pragma mark - Convience Initilisers
//--------------------------------------------------------
/// Returns an NSAttributedString with the given string and no attributes
+(instancetype)kfx_instanceWithString:(NSString*)string;
/// Returns an NSAttributedString with the given string and attributes
+(instancetype)kfx_instanceWithString:(NSString*)string attributes:(NSDictionary*)attributes;
/// Returns an NSAttributedString with the given format string and no attributes
+(instancetype)kfx_instanceWithFormat:(NSString*)format,...;
/// Returns an NSAttributedString with the given format string and the attributes
+(instancetype)kfx_instanceWithAttributes:(NSDictionary*)attributes
format:(NSString*)format,...;
/// Returns an NSAttributedString with the given string and the attributes
+(instancetype)kfx_instanceWithString:(NSString*)string
font:(UIFont*)font
colour:(UIColor*)colour;
/// Returns an NSAttributedString with the given string and the attributes
+(instancetype)kfx_instanceWithString:(NSString*)string
font:(UIFont*)font
colour:(UIColor*)colour
alignment:(NSTextAlignment)alignment;
/// Returns an NSAttributedString with the given string and the attributes
+(instancetype)kfx_instanceWithString:(NSString*)string
font:(UIFont*)font
colour:(UIColor*)colour
alignment:(NSTextAlignment)alignment
lineSpacing:(CGFloat)lineSpacing;
/// Returns an NSAttributedString with the given string and the attributes
+(instancetype)kfx_instanceWithString:(NSString*)string
font:(UIFont*)font
colour:(UIColor*)colour
kerning:(CGFloat)kerning;
/// Returns an NSAttributedString with the given string and the attributes
+(instancetype)kfx_instanceWithString:(NSString*)string
font:(UIFont*)font
colour:(UIColor*)colour
alignment:(NSTextAlignment)alignment
kerning:(CGFloat)kerning;
//--------------------------------------------------------
#pragma mark - Ranges
//--------------------------------------------------------
/// Returns the NSRange for the receiver's string
-(NSRange)kfx_rangeOfString;
@end
|
jweyn/DLWP | examples/validate.py | #
# Copyright (c) 2019 <NAME> <<EMAIL>>
#
# See the file LICENSE for your rights.
#
"""
Simple routines for graphically evaluating the performance of a DLWP model.
"""
import keras.backend as K
import numpy as np
import pandas as pd
import xarray as xr
import pickle
from datetime import datetime, timedelta
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from DLWP.model import SeriesDataGenerator, TimeSeriesEstimator, DLWPFunctional
from DLWP.model import verify
from DLWP.plot import history_plot, forecast_example_plot, zonal_mean_plot
from DLWP.util import load_model, train_test_split_ind
from DLWP.data import CFSReforecast
#%% User parameters
# Open the data file
root_directory = '/home/disk/wave2/jweyn/Data/DLWP'
predictor_file = '%s/cfs_6h_1979-2010_z500-th3-7-w700-rh850-pwat_NH_T2.nc' % root_directory
# Names of model files, located in the root_directory, and labels for those models
models = [
'/dlwp_1979-2010_hgt-thick_300-500-700_NH_T2F_FINAL-lstm',
'dlwp_6h_tau-lstm_z-tau-out_fillpad',
'dlwp_6h_tau-lstm_z-tau-out_avgpool'
]
model_labels = [
r'$\tau$ LSTM',
r'$\tau$ LSTM fill',
r'$\tau$ LSTM avg pool',
]
# Optional list of selections to make from the predictor dataset for each model. This is useful if, for example,
# you want to examine models that have different numbers of vertical levels but one predictor dataset contains
# the data that all models need. Separate input and output selections are available for models using different inputs
# and outputs. Also specify the number of input/output time steps in each model.
input_selection = [
{'varlev': ['HGT/500', 'THICK/300-700']},
{'varlev': ['HGT/500', 'THICK/300-700']},
{'varlev': ['HGT/500', 'THICK/300-700']},
]
output_selection = [
{'varlev': ['HGT/500', 'THICK/300-700']},
{'varlev': ['HGT/500', 'THICK/300-700']},
{'varlev': ['HGT/500', 'THICK/300-700']},
]
add_insolation = [False] * len(models)
input_time_steps = [2, ] * len(models)
output_time_steps = [2, ] * len(models)
# Models which use up-sampling need to have an even number of latitudes. This is usually done by cropping out the
# north pole. Set this option to do that.
crop_north_pole = True
# Validation set to use. Either an integer (number of validation samples, taken from the end), or an iterable of
# pandas datetime objects.
# validation_set = 4 * (365 * 4 + 1)
start_date = datetime(2007, 1, 1, 0)
end_date = datetime(2009, 12, 31, 18)
validation_set = np.array(pd.date_range(start_date, end_date, freq='6H'), dtype='datetime64')
# validation_set = [d for d in validation_set if d.month in [1, 2, 12]]
# Load a CFS Reforecast model for comparison
cfs_model_dir = '%s/../CFSR/reforecast' % root_directory
cfs = CFSReforecast(root_directory=cfs_model_dir, file_id='dlwp_', fill_hourly=False)
cfs.set_dates(validation_set)
cfs.open()
cfs_ds = cfs.Dataset.isel(lat=(cfs.Dataset.lat >= 0.0)) # Northern hemisphere only
# Load a barotropic model for comparison
baro_model_file = '%s/barotropic_2007-2010.nc' % root_directory
baro_ds = xr.open_dataset(baro_model_file)
baro_ds = baro_ds.isel(lat=(baro_ds.lat >= 0.0)) # Northern hemisphere only
# Number of forward integration weather forecast time steps
num_forecast_hours = 72
dt = 6
# Latitude bounds for MSE calculation
lat_range = [20., 70.]
# Calculate statistics for a selected variable and level, or varlev if the predictor data was produced pairwise.
# Provide as a dictionary to extract to kwargs. If None, then averages all variables. Cannot be None if using a
# barotropic model for comparison (specify Z500).
selection = {
'varlev': 'HGT/500'
}
# Scale the variables to original units
scale_variables = True
# Do specific plots
plot_directory = './Plots'
plot_example = None # None to disable or the date index of the sample
plot_example_f_hour = 24 # Forecast hour index of the sample
plot_history = False
plot_zonal = False
plot_mse = True
plot_spread = False
plot_mean = False
method = 'rmse'
mse_title = r'$Z_{500}$; 2003-2006; 20-70$^{\circ}$N'
mse_file_name = 'rmse_tau-lstm_avg-fill.pdf'
mse_pkl_file = 'rmse_tau-lstm_avg-fill.pkl'
#%% Pre-processing
data = xr.open_dataset(predictor_file)
if crop_north_pole:
data = data.isel(lat=(data.lat < 90.0))
# Find the validation set
if isinstance(validation_set, int):
n_sample = data.dims['sample']
train_set, val_set = train_test_split_ind(n_sample, validation_set, method='last')
validation_data = data.isel(sample=val_set)
else: # we must have a list of datetimes
validation_data = data.sel(sample=validation_set)
# Shortcuts for latitude range
lat_min = np.min(lat_range)
lat_max = np.max(lat_range)
# Format the predictor indexer and variable index in reshaped array
input_selection = input_selection or [None] * len(models)
output_selection = output_selection or [None] * len(models)
selection = selection or {}
# Lists to populate
mse = []
f_hours = []
# Scaling parameters
sel_mean = data.sel(**selection).variables['mean'].values
sel_std = data.sel(**selection).variables['std'].values
# Generate verification
print('Generating verification...')
num_forecast_steps = num_forecast_hours // dt
validation_data.load()
verification = verify.verification_from_samples(validation_data.sel(**selection),
forecast_steps=num_forecast_steps, dt=dt)
verification = verification.sel(lat=((verification.lat >= lat_min) & (verification.lat <= lat_max)))
if scale_variables:
verification = verification * sel_std + sel_mean
#%% Iterate through the models and calculate their stats
for m, model in enumerate(models):
print('Loading model %s...' % model)
# Load the model
dlwp, history = load_model('%s/%s' % (root_directory, model), True, gpus=1)
# Assign forecast hour coordinate
f_hours.append(np.arange(dt, num_forecast_steps * dt + 1., dt))
# Build in some tolerance for old models trained with former APIs missing the is_convolutional and is_recurrent
# attributes. This may not always work!
if not hasattr(dlwp, 'is_recurrent'):
dlwp.is_recurrent = False
for layer in dlwp.model.layers:
if 'LSTM' in layer.name.upper() or 'LST_M' in layer.name.upper():
dlwp.is_recurrent = True
if not hasattr(dlwp, 'is_convolutional'):
dlwp.is_convolutional = False
for layer in dlwp.model.layers:
if 'CONV' in layer.name.upper():
dlwp.is_convolutional = True
if isinstance(dlwp, DLWPFunctional):
if not hasattr(dlwp, '_n_steps'):
dlwp._n_steps = 6
if not hasattr(dlwp, 'time_dim'):
dlwp.time_dim = 2
# Create data generator
val_generator = SeriesDataGenerator(dlwp, validation_data, add_insolation=add_insolation[m],
input_sel=input_selection[m], output_sel=output_selection[m],
input_time_steps=input_time_steps[m], output_time_steps=output_time_steps[m],
batch_size=64)
# Create TimeSeriesEstimator
estimator = TimeSeriesEstimator(dlwp, val_generator)
# Very crude but for this test I want to exclude the predicted thickness from being added back
if model_labels[m] == r'$\tau$ LSTM16':
estimator._outputs_in_inputs = {'varlev': np.array(['HGT/500'])}
# Make a time series prediction
print('Predicting with model %s...' % model_labels[m])
time_series = estimator.predict(num_forecast_steps, verbose=1)
# Slice the arrays as we want
time_series = time_series.sel(**selection, lat=((time_series.lat >= lat_min) & (time_series.lat <= lat_max)))
if scale_variables:
time_series = time_series * sel_std + sel_mean
# Calculate the MSE for each forecast hour relative to observations
intersection = np.intersect1d(time_series.time.values, verification.time.values, assume_unique=True)
mse.append(verify.forecast_error(time_series.sel(time=intersection).values,
verification.isel(f_hour=slice(0, len(time_series.f_hour)))
.sel(time=intersection).values,
method=method))
# Plot learning curves
if plot_history:
history_plot(history['mean_absolute_error'], history['val_mean_absolute_error'], model_labels[m],
out_directory=plot_directory)
# Plot an example
if plot_example is not None:
plot_dt = np.datetime64(plot_example)
forecast_example_plot(validation_data.sel(**selection, time=plot_dt),
validation_data.sel(**selection,
time=plot_dt + np.timedelta64(timedelta(hours=plot_example_f_hour))),
time_series.sel(f_hour=plot_example_f_hour, time=plot_dt), f_hour=plot_example_f_hour,
model_name=model_labels[m], out_directory=plot_directory)
# Plot the zonal climatology of the last forecast hour
if plot_zonal:
obs_zonal_mean = verification[-1].mean(axis=(0, -1))
obs_zonal_std = verification[-1].std(axis=-1).mean(axis=0)
pred_zonal_mean = time_series[-1].mean(axis=(0, -1))
pred_zonal_std = time_series[-1].std(axis=-1).mean(axis=0)
zonal_mean_plot(obs_zonal_mean, obs_zonal_std, pred_zonal_mean, pred_zonal_std, dt*num_forecast_steps,
model_labels[m], out_directory=plot_directory)
# Clear the model
dlwp = None
time_series = None
K.clear_session()
#%% Add Barotropic model
if baro_ds is not None and plot_mse:
print('Loading barotropic model data from %s...' % baro_model_file)
if not selection:
raise ValueError("specific 'variable' and 'level' for Z500 must be specified to use barotropic model")
baro_ds = baro_ds.isel(lat=((baro_ds.lat >= lat_min) & (baro_ds.lat <= lat_max)))
if isinstance(validation_set, int):
baro_ds = baro_ds.isel(time=slice(input_time_steps[0] - 1, validation_set + input_time_steps[0] - 1))
else:
baro_ds = baro_ds.sel(time=validation_set)
# Select the correct number of forecast hours
baro_forecast = baro_ds.isel(f_hour=(baro_ds.f_hour > 0)).isel(f_hour=slice(None, num_forecast_steps))
baro_forecast_steps = int(np.min([num_forecast_steps, baro_forecast.dims['f_hour']]))
baro_f = baro_forecast.variables['Z'].values
# Normalize by the same std and mean as the predictor dataset
if not scale_variables:
baro_f = (baro_f - sel_mean) / sel_std
mse.append(verify.forecast_error(baro_f[:baro_forecast_steps], verification.values[:baro_forecast_steps],
method=method))
model_labels.append('Barotropic')
f_hours.append(np.arange(dt, baro_forecast_steps * dt + 1., dt))
baro_f, baro_v = None, None
#%% Add the CFS model
if cfs_ds is not None and plot_mse:
print('Loading CFS model data...')
if not selection:
raise ValueError("specific 'variable' and 'level' for Z500 must be specified to use CFS model model")
cfs_ds = cfs_ds.isel(lat=((cfs_ds.lat >= lat_min) & (cfs_ds.lat <= lat_max)))
if isinstance(validation_set, int):
raise ValueError("I can only compare to a CFS Reforecast with datetime validation set")
else:
cfs_ds = cfs_ds.sel(time=validation_set)
# Select the correct number of forecast hours
cfs_forecast = cfs_ds.isel(f_hour=(cfs_ds.f_hour > 0)).isel(f_hour=slice(None, num_forecast_steps))
cfs_forecast_steps = int(np.min([num_forecast_steps, cfs_forecast.dims['f_hour']]))
cfs_f = cfs_forecast.variables['z500'].values
# Normalize by the same std and mean as the predictor dataset
if not scale_variables:
cfs_f = (cfs_f - sel_mean) / sel_std
mse.append(verify.forecast_error(cfs_f[:cfs_forecast_steps], verification.values[:cfs_forecast_steps],
method=method))
model_labels.append('CFS')
f_hours.append(np.arange(dt, cfs_forecast_steps * dt + 1., dt))
cfs_f, cfs_v = None, None
#%% Add persistence and climatology
if plot_mse:
print('Calculating persistence forecasts...')
init = validation_data.predictors.sel(**selection,
lat=((validation_data.lat >= lat_min) & (validation_data.lat <= lat_max)))
if scale_variables:
init = init * sel_std + sel_mean
if 'time_step' in init.dims:
init = init.isel(time_step=-1)
mse.append(verify.forecast_error(np.repeat(init.values[None, ...], num_forecast_steps, axis=0),
verification.values, method=method))
model_labels.append('Persistence')
f_hours.append(np.arange(dt, num_forecast_steps * dt + 1., dt))
print('Calculating climatology forecasts...')
climo_data = data['predictors'].sel(**selection, lat=((data.lat >= lat_min) & (data.lat <= lat_max)))
if scale_variables:
climo_data = climo_data * sel_std + sel_mean
mse.append(verify.monthly_climo_error(climo_data, validation_set, n_fhour=num_forecast_steps, method=method))
model_labels.append('Climatology')
f_hours.append(np.arange(dt, num_forecast_steps * dt + 1., dt))
#%% Plot the combined MSE as a function of forecast hour for all models
if plot_mse:
if plot_spread:
fig = plt.figure()
fig.set_size_inches(6, 4)
for m, model in enumerate(model_labels):
if model in ['Barotropic', 'CFS', 'Persistence', 'Climatology']:
plt.plot(f_hours[m], mse[m], label=model, linewidth=2.)
mean = np.mean(np.array(mse[:len(models)]), axis=0)
plt.plot(f_hours[0], mean, 'k-', label=r'DLWP mean', linewidth=1.)
std = np.std(np.array(mse[:len(models)]), axis=0)
plt.fill_between(f_hours[0], mean - std, mean + std,
facecolor=(0.5, 0.5, 0.5, 0.5), zorder=-50)
plt.xlim([0, np.max(np.array(f_hours))])
plt.xticks(np.arange(0, np.max(np.array(f_hours)) + 1, 2 * dt))
plt.ylim([0, 140])
plt.yticks(np.arange(0, 141, 20))
plt.legend(loc='best', fontsize=8)
plt.grid(True, color='lightgray', zorder=-100)
plt.xlabel('forecast hour')
plt.ylabel(method.upper())
plt.title(mse_title)
plt.savefig('%s/%s' % (plot_directory, mse_file_name), bbox_inches='tight')
plt.show()
else:
fig = plt.figure()
fig.set_size_inches(6, 4)
for m, model in enumerate(model_labels):
if model in ['Barotropic', 'CFS', 'Persistence', 'Climatology']:
plt.plot(f_hours[m], mse[m], label=model, linewidth=2.)
else:
if plot_mean:
plt.plot(f_hours[m], mse[m], label=model, linewidth=1., linestyle='--' if m < 10 else ':')
else:
plt.plot(f_hours[m], mse[m], label=model, linewidth=2.)
if plot_mean:
plt.plot(f_hours[0], np.mean(np.array(mse[:len(models)]), axis=0), label='mean', linewidth=2.)
plt.xlim([0, dt * num_forecast_steps])
plt.xticks(np.arange(0, num_forecast_steps * dt + 1, 2 * dt))
plt.ylim([0, 140])
plt.yticks(np.arange(0, 141, 20))
plt.legend(loc='best', fontsize=8)
plt.grid(True, color='lightgray', zorder=-100)
plt.xlabel('forecast hour')
plt.ylabel(method.upper())
plt.title(mse_title)
plt.savefig('%s/%s' % (plot_directory, mse_file_name), bbox_inches='tight')
plt.show()
if mse_pkl_file is not None:
result = {'f_hours': f_hours, 'models': models, 'model_labels': model_labels, 'mse': mse}
with open('%s/%s' % (plot_directory, mse_pkl_file), 'wb') as f:
pickle.dump(result, f)
print('Done writing figures to %s' % plot_directory)
|
webmasters/webmasters-cms | spec/controllers/webmasters_cms/admin/pages_controller_spec.rb | require 'spec_helper'
module WebmastersCms
module Admin
describe PagesController, type: :controller do
routes { ::WebmastersCms::Engine.routes }
render_views
let (:page) { create(:webmasters_cms_page) }
let (:page_translation) { page.translations.first }
describe "GET #index" do
before :each do
get :index
end
it "shows all available Pages" do
expect(assigns(:collection)).to eq([page])
end
it "renders the #index view" do
expect(response).to render_template :index
end
end
describe "GET #show" do
it "assigns the requested Page to @resource" do
get :show, params: {id: page, language: page_translation.language}
expect(assigns(:resource)).to eq(page)
end
it "renders the #show view" do
get :show, params: {id: page, language: page_translation.language}
expect(response).to render_template :show
end
it "renders a 404 given an invalid Id" do
expect{get :show, params: {id: ""} }.to raise_error(ActiveRecord::RecordNotFound)
end
end
describe "GET #new" do
before :each do
get :new
end
it "assigns a new Page to @resource" do
expect(assigns(:resource)).to be_a_new(Page)
end
it "renders the #new view" do
expect(response).to render_template :new
end
end
describe "POST #create" do
before :each do
@active_language = FactoryBot.create(:webmasters_cms_active_language)
end
context "with valid attributes" do
it "creates a new Page" do
expect do
page_params = attributes_for :webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation,
language: @active_language.code)]
post :create, params: { page: page_params}
end.to change(Page, :count).by(1)
end
it "creates a new PageTranslation" do
expect do
page_params = attributes_for :webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation,
language: @active_language.code)]
post :create, params: { page: page_params}
end.to change(PageTranslation, :count).by(1)
end
it "redirects to the edit" do
page_params = attributes_for :webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation,
language: @active_language.code)]
post :create, params: {page: page_params}
# expect(response).to redirect_to admin_pages_path
expect(response).to redirect_to action: 'edit', id: assigns(:resource).id
end
end
context "with invalid attributes" do
it "does not create a new Page" do
expect do
page_params = attributes_for :webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation, :invalid)]
post :create, params: { page: page_params}
end.to_not change(Page, :count)
end
it "does not create a new PageTranslation" do
expect do
page_params = attributes_for :webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation, :invalid)]
post :create, params: { page: page_params}
end.to_not change(PageTranslation, :count)
end
it "stays in the #new view" do
page_params = attributes_for :webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation, :invalid)]
post :create, params: { page: page_params}
expect(response).to be_successful
expect(response).to render_template :new
end
end
end
describe "GET #edit" do
it "assigns the requested Page to @page" do
get :edit, params: { id: page }
expect(assigns(:resource)).to eq(page)
end
it "renders the #edit view" do
get :edit, params: { id: page }
expect(response).to render_template :edit
end
end
describe "PUT #update" do
context "with valid attributes" do
let(:expect_block) do
lambda do
params = attributes_for(
:webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation,
name: "UpdatedName",
local_path: "UpdatedLocalpath",
id: page_translation.id,
language: page_translation.language
)
]
)
put :update, params: { id: page_translation.page_id, page: params }
page_translation.reload
end
end
it "updates page_translation" do
expect(expect_block).to change{[page_translation.name, page_translation.local_path]}
end
it "redirects to the edit page" do
expect_block.call
# expect(response).to redirect_to admin_pages_path
expect(response).to redirect_to action: 'edit', id: page_translation.page_id
end
it "has no error messages" do
expect(page_translation.errors.full_messages).to be_blank
end
end
context "with invalid attributes" do
let(:expect_block) do
lambda do
page_params = attributes_for :webmasters_cms_page,
translations_attributes: [
attributes_for(:webmasters_cms_page_translation, :invalid,
name: 'other',
id: page_translation.id,
language: page_translation.language)]
put :update, params: { id: page_translation.page_id, page: page_params }
page_translation.reload
end
end
it "does not update the page_translation" do
expect(expect_block).to_not change { page_translation.name }
end
it "stays on the #edit view" do
expect_block.call
expect(response).to render_template :edit
end
end
end
describe "POST #delete" do
it "deletes the requested Page" do
delete_cms_page = create(:webmasters_cms_page)
expect { delete :destroy, params: {id: delete_cms_page} }.to change(Page,:count).by(-1)
end
it "redirects to the index" do
delete :destroy, params: {id: page}
expect(response).to redirect_to admin_pages_path
end
end
describe "PUT #sort" do
it "does not redirect" do
expect(put :sort).to_not render_template :index
end
it "updates the tree" do
child_page = create(:webmasters_cms_page)
parent_page = create(:webmasters_cms_page)
put :sort, params: {page: {child_page.id => parent_page.id} }
child_page.reload
expect(response).to be_successful
expect(child_page.parent_id).to eq(parent_page.id)
end
end
describe "PATCH #set_current_version" do
it "reverts the object to an other version" do
translation = create(:webmasters_cms_page_translation, page: page, language: 'xx')
translation_version =
create(:webmasters_cms_page_translation_version,
page_translation: translation,
version: translation.version)
# translation_version2 =
create(:webmasters_cms_page_translation_version,
page_translation: translation,
version: translation.version + 1)
patch :set_current_version,
params: {id: translation.id,
page_translation: { version: translation_version.version } }
translation.reload
expect(translation.version).to eq(translation_version.version)
end
end
end
end
end
|
REDEAKAA/ToolJS | src/modules/str/random.js | <gh_stars>1-10
import ToolJS from "../main";
import { Logs } from "../deps";
import isObj from "../obj/isObj";
/**
* This method generates a random string in accordance to a set of options, and returns the string. Note that the min and max numbers are inclusive when generating integers.
* @method module:Str.random
* @param {Object} [options] An object that controls how the random string is generated.
* @param {Boolean} [options.alphanumeric=false] Tells the method to generate an alphanumeric string.
* @param {Boolean} [options.string=true] Tells the method to generate only strings.
* @param {Boolean} [options.integer=false] Tells the method to generate only integers.
* @param {Boolean} [options.characters] A string or characters to be used when generating string or alphanumeric randoms.
* @param {String} [options.casing="lowecase"] Specifies the casing of the generated string. Either "lowercase" or "uppercase".
* @param {Number} [options.length=10] Specifies the length of the generated string.
* @param {Number} [options.min=0] The min value of the range. This is only used when integer is set to true.
* @param {Number} [options.max=100] The max value of the range. This is only used when integer is set to true.
* @param {Boolean} [options.round=true] This determines if the value returned should be rounded.
* @param {Number} [options.decimals=3] If the round option is set to false, then this sets the number of decimal places to round to.
* @returns {String} The random string generated.
* @example
*
* var value = Str.random(); // returns a random string
*
* var value = Str.random({
* casing: "uppercase",
* length: 5,
* alphanumeric: true,
* }); // returns a random alphanumeric string 5 characters long in uppercase
*/
const random = (options) => {
var debugging = ToolJS.env.debugging;
var output, min = 0, max = 100, round = true, decimals = 3, casing = "lowercase",
length = 10, string = true, integer = false, alphanumeric = false,
characters = "abcdefghijklmnopqrstuvwxyz";
if (isObj(options)){
min = (typeof options.min === "number") ? options.min : min;
max = (typeof options.max === "number") ? options.max : max;
round = (typeof options.round === "boolean") ? options.round : round;
casing = (typeof options.casing === "string") ? options.casing : casing;
string = (typeof options.string === "boolean") ? options.string : string;
length = (typeof options.length === "number") ? options.length : length;
integer = (typeof options.integer === "boolean") ? options.integer : integer;
decimals = (typeof options.decimals === "number") ? options.decimals : decimals;
characters = (typeof options.characters === "string") ? options.characters : characters;
alphanumeric = (typeof options.alphanumeric === "boolean") ? options.alphanumeric : alphanumeric;
}
function _random() {
var text, num, result;
if (alphanumeric === true){
// text = Math.random().toString(36).replace('0.', '');
text = Math.random().toString(20).substr(2, length);
var first = text.charAt(0);
var digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"];
if (digits.includes(first)) {
var index = Math.floor(Math.random() * (25 - 0 + 1)) + 0;
characters = characters.split("");
var newAlpha = characters[index];
text = text.replace(first, newAlpha);
}
}
else if (integer === true) {
if (round == true) { num = Math.floor(Math.random() * (max - min + 1)) + min; }
else {
num = Math.random() * (max - min + 1) + min;
num = num.toFixed(decimals);
}
}
else if(string == true){
var toArray = characters.split("");
text = "";
for (var i = 0; i < length; i++) {
index = Math.floor(Math.random() * (25 - 0 + 1)) + 0;
text += toArray[index];
}
}
else if (integer == false && string == false) {
if (debugging) Logs.throw("One of either integer or string must be set to 'true'");
}
if(text){
if (casing == "uppercase") { text = text.toUpperCase(); }
else if (casing == "lowercase") { text = text.toLowerCase(); }
result = text;
}
else if(num){ result = num; }
return result;
}
output = _random();
return output;
}
export default random; |
neotran85/Android-AppyProduct-App | app/src/main/java/com/appyhome/appyproduct/mvvm/ui/appyproduct/cart/shipping/newaddress/NewAddressActivityModule.java | <gh_stars>0
package com.appyhome.appyproduct.mvvm.ui.appyproduct.cart.shipping.newaddress;
import com.appyhome.appyproduct.mvvm.R;
import com.appyhome.appyproduct.mvvm.data.DataManager;
import com.appyhome.appyproduct.mvvm.utils.rx.SchedulerProvider;
import dagger.Module;
import dagger.Provides;
@Module
public class NewAddressActivityModule {
@Provides
NewAddressViewModel provideNewAddressViewModel(DataManager dataManager,
SchedulerProvider schedulerProvider) {
return new NewAddressViewModel(dataManager, schedulerProvider);
}
@Provides
int provideLayoutId() {
return R.layout.activity_product_shipping_new;
}
}
|
bspitzmacher/pynet_course | class4/exercise3.py | <reponame>bspitzmacher/pynet_course
# Class 4 exercise 3
# Use Pexpect to retrieve the output of 'show ip int brief' from pynet-rtr2.
import pexpect
from getpass import getpass
def main():
ip_addr = '172.16.31.10'
username = 'pyclass'
port = 8022 # port 22 is rtr1, 8022:rtr2, 9822:srx
password = getpass()
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
ssh_conn.timeout = 3
ssh_conn.expect('assword:')
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal length 0')
ssh_conn.expect('#')
ssh_conn.sendline('show ip int br')
ssh_conn.expect('pynet-rtr2#')
print ssh_conn.before
if __name__ == "__main__":
main()
|
leander-dsouza/URC-2019 | Autonomous/auto_trav/scripts/depth.py | <gh_stars>1-10
#!/usr/bin/env python
from __future__ import print_function
from collections import defaultdict
import roslib
roslib.load_manifest('auto_trav')
import sys
import rospy
import cv2
import numpy as np
import time
from std_msgs.msg import String
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
from std_msgs.msg import String
r=l=0
class image_converter:
def __init__(self):
self.pubdist=rospy.Publisher("/kinect_data",String, queue_size=1)
self.bridge = CvBridge()
self.image_sub = rospy.Subscriber("/camera/depth/image_rect_raw",Image,self.callback)
def callback(self,data):
global r,l
try:
data.encoding = 'bgr8'
cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
except CvBridgeError as e:
print(e)
#print(cv_image.shape)
lower_blue = np.array([0,0,0])
upper_blue = np.array([35,255,255])
cv_image=cv_image[70:275,0:640]
hsv = cv2.cvtColor(cv_image, cv2.COLOR_BGR2HSV)
hsv = cv2.medianBlur(hsv,19)
mask = cv2.inRange(hsv, lower_blue, upper_blue)
kernel = np.ones((5,5),np.uint8)
kernel2 = np.ones((10,10),np.uint8)
#opening = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel)
res = cv2.bitwise_and(cv_image, cv_image, mask= mask)
rgb = cv2.cvtColor(res, cv2.COLOR_HSV2BGR)
res = cv2.cvtColor(rgb, cv2.COLOR_BGR2GRAY)
ret,thresh = cv2.threshold(res,50,255,cv2.THRESH_BINARY)
thresh = cv2.medianBlur(thresh,15)
#thresh = cv2.morphologyEx(thresh, cv2.MORPH_CLOSE, kernel)
thresh = cv2.morphologyEx(thresh, cv2.MORPH_OPEN, kernel2)
##############################################################################################333
#cv2.imshow("Res",thresh)
###############################################################################################
areadict=defaultdict(lambda:0)
_,contours,hierarchy = cv2.findContours(thresh, cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
CX =list()
CY = list()
area = list()
max_area = 0
for i in range(len(contours)):
cnt = contours[i]
M = cv2.moments(cnt)
if M["m00"] != 0:
cx = int(M["m10"] / M["m00"])
cy = int(M["m01"] / M["m00"])
else:
# set values as what you need in the situation
cx=int(0)
cy=int(0)
CY.append(int(cy))
CX.append(int(cx))
area.append(cv2.contourArea(cnt))
if len(area)>0 and len(area)<5:
max_area = max(area)
ind=area.index(max_area)
print(CX[ind])
print(CY[ind])
print(area[ind])
if CX[ind]<320 and area[ind]>200 and CY[ind]<200:
if r>2:
print('Right')
self.pubdist.publish('right')
r=r+1
elif CX[ind]>320 and area[ind]>200 and CY[ind]<200:
if l>2:
print("Left")
self.pubdist.publish('left')
l=l+1
else:
print('Straight')
r=l=0
self.pubdist.publish('straight')
else:
r=l=0
print('Straight')
self.pubdist.publish('straight')
########################################################################################################33
#cv2.imshow("Image window", hsv)
#cv2.imshow("Image window2", cv_image)
##########################################################################################################
cv2.waitKey(3)
def main(args):
ic = image_converter()
rospy.init_node('image_converter', anonymous=True)
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down")
cv2.destroyAllWindows()
if __name__ == '__main__':
main(sys.argv)
|
MoskitoHero/ruby-trello | spec/integration/webhook/find_spec.rb | require 'spec_helper'
RSpec.describe 'Trello::Webhook#find' do
include IntegrationHelpers
before { setup_trello }
it 'find with id' do
VCR.use_cassette('webhook_find_with_id') do
webhook = Trello::Webhook.find('5fa8a1e0009b2a6a669e6efa')
expect(webhook).to be_a(Trello::Webhook)
expect(webhook.id).not_to be_nil
expect(webhook.description).not_to be_nil
expect(webhook.model_id).not_to be_nil
expect(webhook.callback_url).not_to be_nil
expect(webhook.active).not_to be_nil
expect(webhook.consecutive_failures).not_to be_nil
end
end
end
|
adakgh/enterprise-web-application-project | backend/src/main/java/com/example/demo/services/ProductService.java | <gh_stars>0
package com.example.demo.services;
import com.example.demo.exceptions.ResourceNotFoundException;
import com.example.demo.models.dto.ProductDto;
import com.example.demo.persistence.entities.*;
import com.example.demo.persistence.repositories.DiscountPriceRepository;
import com.example.demo.persistence.repositories.ImageRepository;
import com.example.demo.persistence.repositories.ProductCategoryRepository;
import com.example.demo.persistence.repositories.ProductRepository;
import com.example.demo.search.ProductSpecification;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import lombok.AllArgsConstructor;
import org.modelmapper.ModelMapper;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import javax.persistence.EntityManager;
import java.math.BigDecimal;
import java.util.*;
@Service
@AllArgsConstructor
public class ProductService {
private final EntityManager entityManager;
private final ModelMapper modelmapper;
private final ProductRepository productRepository;
private final ProductCategoryRepository productCategoryRepository;
private final UserService userService;
private final ImageRepository imageRepository;
private final DiscountPriceRepository discountPriceRepository;
public Page<ProductDto> searchAll(Map<String, String> queryMap, Pageable pageable) {
var productPage = productRepository.findAll(new ProductSpecification(queryMap), pageable);
return productPage.map((ProductEntity p) -> {
ProductDto dto = modelmapper.map(p, ProductDto.class);
dto.setSupplierData(p.getSupplier());
return dto;
});
}
/**
* Find a product by ID
*
* @param id The id of the product you want
* @return The found product
*/
public ProductEntity findById(long id) {
return productRepository.findById(id).orElseThrow(() ->
new ResourceNotFoundException("Product not found with id: " + id));
}
/**
* Delete a product by ID
*
* @param id The id of the product
*/
public void deleteById(long id) {
// TODO: allow only product removals belonging to user.
productRepository.deleteById(id);
}
/**
* Create and Save the product to the database, we also check for a Product Image if provided
* And also Product Discounnt if provided
*
* @param queryMap The JSON productData received From the Frontend / User
*/
public ProductEntity save(ObjectNode queryMap) {
ProductEntity product = new ProductEntity();
product.setName(queryMap.get("title").asText());
product.setPrice(queryMap.get("price").decimalValue());
product.setQuantity(queryMap.get("stock").asDouble());
product.setUnit(queryMap.get("unit").asText());
long categoryId = Long.parseLong(queryMap.get("category").asText());
product.addProductCategory(productCategoryRepository.getOne(categoryId));
product.setDescription(queryMap.get("description").asText());
UserEntity user = userService.getCurrentUser();
product.setSupplier(user.getSupplier());
product.setAddedDate(new Date());
System.out.println("The Product for now is: ");
System.out.println(product);
/*this.productData.title = this.productForm.value.title;
this.productData.price = this.productForm.value.price;
this.productData.stock = this.productForm.value.stock;
this.productData.unit = this.productForm.value.unit;
this.productData.category = this.productForm.value.category;
this.productData.description = this.productForm.value.description;
this.productData.productDiscounts = this.productDiscounts.value;*/
// If a image is provided first add and save the image and assign it with the product
if (queryMap.get("url") != null) {
// Create the Image
ImageEntity imageEntity = new ImageEntity(queryMap.get("imageName").asText(),
queryMap.get("type").asText(), queryMap.get("url").asText().getBytes());
// Save the Image in the database
imageRepository.save(imageEntity);
// Assign the product with the saved image
product.setProductImage(imageEntity);
}
// If there is any productDiscount added
if (queryMap.get("productDiscounts").size() > 0) {
// Get the productDiscount Array
JsonNode productDiscountsproductDiscounts = queryMap.get("productDiscounts");
// Loop through all discount fields
for (int i = 0; i < productDiscountsproductDiscounts.size(); i++) {
// For each Product discount add and save the disountPrice entity
DiscountPriceEntity discountPriceEntity = new DiscountPriceEntity();
discountPriceEntity.setDiscountPrice(productDiscountsproductDiscounts.get(i).get("discountPrice").decimalValue());
discountPriceEntity.setDiscountQuantity(productDiscountsproductDiscounts.get(i).get("discountQuantity"
).asText());
// Assign the product with the discount and save discount
discountPriceEntity.setProduct(product);
discountPriceRepository.save(discountPriceEntity);
// Assign the discount with the product
product.addProductDiscount(discountPriceEntity);
}
}
System.out.println("Product added, The final Product: ");
System.out.println(product);
return productRepository.save(product);
}
/**
* Update and Save a Prodcut By Id
*
* @param id the product ID
* @param product The updated Product values
* @return The updated Product
*/
public ProductEntity update(long id, ObjectNode product) {
// Find the product to update with the ID
ProductEntity newUpdatedProduct = productRepository.findById(id).get();
// Set the values of the product
newUpdatedProduct.setName(product.get("title").asText());
newUpdatedProduct.setPrice(product.get("price").decimalValue());
newUpdatedProduct.setQuantity(product.get("stock").asDouble());
newUpdatedProduct.setUnit(product.get("unit").asText());
long categoryId = Long.parseLong(product.get("category").asText());
newUpdatedProduct.addProductCategory(productCategoryRepository.getOne(categoryId));
newUpdatedProduct.setDescription(product.get("description").asText());
// If a image is provided first add and save the image and assign it with the product
if (product.get("url") != null) {
/*if(newUpdatedProduct.getProductImage()!=null){
imageRepository.delete(newUpdatedProduct.getProductImage());
}*/
// Create the Image
ImageEntity imageEntity = new ImageEntity(product.get("imageName").asText(), product.get("type").asText()
, product.get("url").asText().getBytes());
// Save the Image in the database
imageRepository.save(imageEntity);
// Assign the product with the saved image
newUpdatedProduct.setProductImage(imageEntity);
}
// If there is any productDiscount added
if (product.get("productDiscounts").size() > 0) {
// Get the productDiscount Array
JsonNode productDiscounts = product.get("productDiscounts");
// Delete all existing discount if there is any
for (DiscountPriceEntity temp : newUpdatedProduct.getDiscounts()) {
newUpdatedProduct.setDiscounts(new ArrayList<>());
temp.setProduct(null);
discountPriceRepository.delete(temp);
}
// Loop through all discount fields
for (int i = 0; i < productDiscounts.size(); i++) {
// For each Product discount add and save the disountPrice entity
DiscountPriceEntity discountPriceEntity = new DiscountPriceEntity();
discountPriceEntity.setDiscountPrice(productDiscounts.get(i).get("discountPrice").decimalValue());
discountPriceEntity.setDiscountQuantity(productDiscounts.get(i).get("discountQuantity"
).asText());
// Assign the product with the discount and save discount
discountPriceEntity.setProduct(newUpdatedProduct);
discountPriceRepository.save(discountPriceEntity);
// Assign the discount with the product
newUpdatedProduct.addProductDiscount(discountPriceEntity);
}
}
/*
ProductEntity currentProduct = entityManager.getReference(ProductEntity.class, id);
modelmapper.map(newUpdatedProduct, currentProduct); // new --> updateInto --> current
System.out.println(currentProduct);*/
// Return the updated Product
return productRepository.save(newUpdatedProduct);
}
}
|
college-questr/qeustr-front-end | src/Components/QuestionDetails/relatedQuestions.js | import React from 'react';
import Style from 'styled-components';
const RelatedQuestion = (props) => {
return (
<>
<Question>{props.questiionTitle}</Question>
<NumAnswers>{props.answerCount}</NumAnswers>
</>
)
}
const Question = Style.p`
font-family: Roboto;
font-style: normal;
font-weight: normal;
font-size: 14px;
line-height: 16px;
color: #3A76CF;
`
const NumAnswers = Style.p`
font-family: Roboto;
font-style: normal;
font-weight: normal;
font-size: 14px;
line-height: 16px;
color: #000000;
` |
olivierthas/Programming-Essentials-2 | Oefeningen_Java/ImportCSV/src/loadcsvindb/CSVLoader.java | <gh_stars>0
package loadcsvindb;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.sql.Connection;
import java.sql.PreparedStatement;
import org.apache.commons.lang3.StringUtils;
import com.opencsv.CSVReader;
public class CSVLoader {
private static final String SQL_INSERT = "INSERT INTO ${table}(${keys}) VALUES(${values})";
private static final String TABLE_REGEX = "\\$\\{table\\}";
private static final String KEYS_REGEX = "\\$\\{keys\\}";
private static final String VALUES_REGEX = "\\$\\{values\\}";
private Connection connection;
private char seprator;
public CSVLoader(Connection connection) {
this.connection = connection;
}
public void loadCSV(String csvFile, String tableName) throws Exception {
CSVReader csvReader;
if(null == this.connection) {
throw new Exception("Not a valid connection.");
}
try {
csvReader = new CSVReader(new FileReader(csvFile));
} catch (Exception e) {
e.printStackTrace();
throw new Exception("Error occured while executing file. " + e.getMessage());
}
String[] headerRow = csvReader.readNext();
if (null == headerRow) {
throw new FileNotFoundException( "No columns defined in given CSV file." + "Please check the CSV file format.");
}
String questionmarks = StringUtils.repeat("?,", headerRow.length);
questionmarks = (String) questionmarks.subSequence(0, questionmarks.length() - 1);
String query = SQL_INSERT.replaceFirst(TABLE_REGEX, tableName);
query = query.replaceFirst(KEYS_REGEX, StringUtils.join(headerRow, ","));
query = query.replaceFirst(VALUES_REGEX, questionmarks);
System.out.println("Query: " + query);
String[] nextLine;
Connection con = null;
PreparedStatement ps = null;
try {
con = this.connection;
con.setAutoCommit(false);
ps = con.prepareStatement(query);
final int batchSize = 1000;
int count = 0;
while ((nextLine = csvReader.readNext()) != null) {
if (null != nextLine) {
int index = 1; //op index 0 staan de titels
for (String string : nextLine) {
ps.setString(index++, string);
}
ps.addBatch();
System.out.println(ps);
}
if (++count % batchSize == 0) {
ps.executeBatch();
}
}
ps.executeBatch(); // insert remaining records
con.commit();
} catch (Exception e) {
con.rollback();
e.printStackTrace();
throw new Exception("Error occured while loading data from file to database. " + e.getMessage());
} finally {
if (null != ps)
ps.close();
if (null != con)
con.close();
csvReader.close();
}
}
public char getSeprator() {
return seprator;
}
public void setSeprator(char seprator) {
this.seprator = seprator;
}
}
|
bpossolo/deeplearning4j | nd4j/nd4j-backends/nd4j-tests/src/test/java/org/nd4j/linalg/BaseNd4jTest.java | /*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.nd4j.linalg;
import lombok.extern.slf4j.Slf4j;
import org.junit.Before;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.nd4j.BaseND4JTest;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.factory.Nd4jBackend;
import java.util.*;
import static org.junit.Assume.assumeTrue;
/**
* Base Nd4j test
* @author <NAME>
*/
@RunWith(Parameterized.class)
@Slf4j
public abstract class BaseNd4jTest extends BaseND4JTest {
protected Nd4jBackend backend;
protected String name;
public final static String DEFAULT_BACKEND = "org.nd4j.linalg.defaultbackend";
public BaseNd4jTest() {
this("", getDefaultBackend());
}
public BaseNd4jTest(String name) {
this(name, getDefaultBackend());
}
public BaseNd4jTest(String name, Nd4jBackend backend) {
this.backend = backend;
this.name = name;
}
public BaseNd4jTest(Nd4jBackend backend) {
this(backend.getClass().getName() + UUID.randomUUID().toString(), backend);
}
private static List<Nd4jBackend> backends;
static {
ServiceLoader<Nd4jBackend> loadedBackends = ServiceLoader.load(Nd4jBackend.class);
Iterator<Nd4jBackend> backendIterator = loadedBackends.iterator();
backends = new ArrayList<>();
List<String> backendsToRun = Nd4jTestSuite.backendsToRun();
while (backendIterator.hasNext()) {
Nd4jBackend backend = backendIterator.next();
if (backend.canRun() && backendsToRun.contains(backend.getClass().getName()) || backendsToRun.isEmpty())
backends.add(backend);
}
}
@Parameterized.Parameters(name = "{index}: backend({0})={1}")
public static Collection<Object[]> configs() {
List<Object[]> ret = new ArrayList<>();
for (Nd4jBackend backend : backends)
ret.add(new Object[] {backend});
return ret;
}
@Before
public void beforeTest2(){
Nd4j.factory().setOrder(ordering());
}
/**
* Get the default backend (jblas)
* The default backend can be overridden by also passing:
* -Dorg.nd4j.linalg.defaultbackend=your.backend.classname
* @return the default backend based on the
* given command line arguments
*/
public static Nd4jBackend getDefaultBackend() {
String cpuBackend = "org.nd4j.linalg.cpu.nativecpu.CpuBackend";
//String cpuBackend = "org.nd4j.linalg.cpu.CpuBackend";
String gpuBackend = "org.nd4j.linalg.jcublas.JCublasBackend";
String clazz = System.getProperty(DEFAULT_BACKEND, cpuBackend);
try {
return (Nd4jBackend) Class.forName(clazz).newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* The ordering for this test
* This test will only be invoked for
* the given test and ignored for others
*
* @return the ordering for this test
*/
public char ordering() {
return 'c';
}
public String getFailureMessage() {
return "Failed with backend " + backend.getClass().getName() + " and ordering " + ordering();
}
}
|
ampatspell/ember-cli-documents | addon/document/internal/document.js | import Ember from 'ember';
import InternalObject from './object';
import DocumentState from './-document-state';
import Queue from './-queue';
import DocumentsError from '../../util/error';
const {
Logger: { error }
} = Ember;
const prefixedKeys = [ 'id', 'rev', 'attachments' ];
const isKeyUnderscored = key => key && key.indexOf('_') === 0;
export default class InternalDocument extends InternalObject {
constructor(store, database) {
super(store, null);
this.database = database;
this.state = new DocumentState();
this.queue = new Queue();
}
addOperation(op) {
return this.queue.add(op);
}
get isDocument() {
return true;
}
get isNew() {
return this.state.isNew;
}
get isLoaded() {
return this.state.isLoaded;
}
get isDeleted() {
return this.state.isDeleted;
}
_modelWillDestroy() {
if(this.isNew) {
// keep _model so it is not recreated on next `model(true)` call
this.database._willDestroyModelForNewInternalDocument(this);
}
super._modelWillDestroy();
}
_createModel() {
return this.store._createDocumentModel(this);
}
getId() {
return this._getValue('id');
}
setId(id) {
if(!this.isNew && id !== this.getId()) {
let current = this.getId();
error(`Document id cannot be changed after document is saved. Attempted to set id '${id}' for document '${current}'`);
return current;
}
return this._setValueNotify('id', id, 'model');
}
getRev() {
return this._getValue('rev');
}
getIdRev() {
return {
id: this.getId(),
rev: this.getRev()
};
}
//
_createAttachments() {
return this.store._createInternalAttachments(this);
}
_attachments(create) {
let attachments = this._getValue('attachments');
if(!attachments && create) {
attachments = this._createAttachments();
this.values.attachments = attachments;
}
return attachments;
}
getAttachments() {
return this._attachments(true).model(true);
}
setAttachments(values) {
let attachments = this._attachments(true);
attachments.withPropertyChanges(changed => attachments._deserialize(values, 'model', changed), true);
return attachments.model(true);
}
deserializeAttachments(doc, changed) {
let _attachments = doc._attachments;
let attachments = this._attachments(false);
if(!_attachments && !attachments) {
return;
}
_attachments = _attachments || {};
attachments = attachments || this._attachments(true);
attachments._deserialize(_attachments, 'document', changed);
}
//
_deserializeDocumentKey(key) {
if(key.startsWith('_')) {
let sliced = key.slice(1);
if(prefixedKeys.includes(sliced)) {
return sliced;
}
}
return super._deserializeDocumentKey(key);
}
_serializeDocumentKey(key) {
if(prefixedKeys.includes(key)) {
return `_${key}`;
}
return super._serializeDocumentKey(key);
}
//
_setValue(key, ...rest) {
if(isKeyUnderscored(key)) {
return;
}
if(key === 'attachments') {
return this._attachments(true)._deserialize(...rest);
}
return super._setValue(...arguments);
}
_getValue(key) {
if(isKeyUnderscored(key)) {
return;
}
return super._getValue(...arguments);
}
//
deserializeDeleted(json, changed) {
let type = 'document';
let { id, rev } = json;
this._setValue('id', id, type, changed);
this._setValue('rev', rev, type, changed);
}
deserializeSaved(json, changed) {
let type = 'document';
let { id, rev } = json;
this._setValue('id', id, type, changed);
this._setValue('rev', rev, type, changed);
}
//
shouldSerialize(type) {
if(type === 'shoebox') {
if(this.isNew || !this.isLoaded) {
return false;
}
}
return super.shouldSerialize(type);
}
//
setState(name, notify=true) {
this.withPropertyChanges(changed => {
let state = this.state;
let fn = state[name];
fn.call(state, changed);
}, notify);
}
//
save() {
return this.database._scheduleInternalSave(this, ...arguments).promise;
}
load() {
return this.database._scheduleInternalLoad(this, ...arguments).promise;
}
reload() {
return this.database._scheduleInternalReload(this, ...arguments).promise;
}
delete() {
return this.database._scheduleInternalDelete(this, ...arguments).promise;
}
onError(err, notify) {
err = err instanceof Error ? err : new DocumentsError(err);
this.withPropertyChanges(changed => this.state.onError(err, changed), notify);
return err;
}
}
|
rigwild/vue-express-ws-mongo-boilerplate | server/webSocket/index.js | 'use strict'
import WebSocket from 'ws'
import { formatLog, formatError } from '../functions'
import { TEST_MODE } from '../../config'
import { wsLogger } from '../winston.config'
import messageHandler from './messageHandler'
/**
* @typedef {function} ErrorLogger
* @param {Error} err an Error object
*/
/**
* Handle thrown errors
*
* @param {object} ws a WebSocket connected client
* @returns {ErrorLogger} the actual error logger
*/
export const errorHandler = ws => err => {
const errStr = formatError(err)
ws.send(JSON.stringify(errStr))
if (!TEST_MODE) wsLogger.error(errStr)
}
/**
* Create the WebSocket server
*
* @param {*} httpServer an HTTP node object (provided by Express here)
* @returns {void}
*/
const createWsServer = httpServer => {
const wss = new WebSocket.Server({ server: httpServer })
wss.on('listening', () => wsLogger.info(formatLog('The WebSocket server was started')))
wss.on('error', err => wsLogger.error(formatError(err)))
wss.on('connection', ws => {
wsLogger.info(formatLog('New client connected.'))
ws.on('message', data => messageHandler(ws)(data).catch(err => errorHandler(ws)(err)))
ws.on('error', err => errorHandler(ws)(err))
ws.on('close', () => wsLogger.info(formatLog('Client disconnected.')))
})
}
export default createWsServer
|
dbiir/pard | pard-optimizer/src/main/java/cn/edu/ruc/iir/pard/sql/expr/rules/ContainEliminateLaw.java | package cn.edu.ruc.iir.pard.sql.expr.rules;
import cn.edu.ruc.iir.pard.sql.expr.CompositionExpr;
import cn.edu.ruc.iir.pard.sql.expr.Expr;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ContainEliminateLaw
extends ExprLaw
{
public boolean isParent(Expr parent, Expr children)
{
if (!(children instanceof CompositionExpr) && parent instanceof CompositionExpr) {
CompositionExpr expr = (CompositionExpr) parent;
for (Expr e : expr.getConditions()) {
if (children.equals(e)) {
return true;
}
}
}
else if (children instanceof CompositionExpr && parent instanceof CompositionExpr) {
CompositionExpr p = (CompositionExpr) parent;
CompositionExpr c = (CompositionExpr) children;
if (c.getConditions().size() > p.getConditions().size()) {
System.out.println("warning: cmp un optimized!");
return false;
}
for (Expr ce : c.getConditions()) {
boolean isIncludedByParent = false;
for (Expr pe : p.getConditions()) {
if (ce.equals(pe)) {
isIncludedByParent = true;
}
}
if (!isIncludedByParent) {
return false;
}
}
return true;
}
return false;
}
@Override
public Expr apply(Expr expr)
{
if (expr instanceof CompositionExpr) {
CompositionExpr ce = (CompositionExpr) expr;
CompositionExpr ret = new CompositionExpr(ce.getLogicOperator());
Map<Expr, Expr> parent = new HashMap<Expr, Expr>();
List<Expr> singleExpr = new ArrayList<Expr>();
List<CompositionExpr> compExpr = new ArrayList<CompositionExpr>();
for (Expr tp : ce.getConditions()) {
if (tp instanceof CompositionExpr) {
compExpr.add((CompositionExpr) tp);
}
else {
singleExpr.add(tp);
}
}
compExpr.sort((x, y)->x.getConditions().size() - y.getConditions().size());
for (Expr single : singleExpr) {
for (CompositionExpr comp : compExpr) {
if (isParent(comp, single)) {
parent.put(single, comp);
}
}
}
for (int i = 0; i < compExpr.size() - 1; i++) {
CompositionExpr little = compExpr.get(i);
for (int j = i + 1; j < compExpr.size(); j++) {
CompositionExpr big = compExpr.get(j);
if (isParent(big, little)) {
parent.put(little, big);
}
}
}
for (Expr tp : ce.getConditions()) {
if (parent.get(tp) == null) {
ret.getConditions().add(tp);
}
}
return ret;
/*
for (Expr tp : ce.getConditions()) {
for (Expr tc : ce.getConditions()) {
if(tp==tc) continue;
//检测tc的parent是不是tp
//如果tc已经在map中,则不检查该元素
if (parent.get(tc) != null) {
continue;
}
if (isParent(tp, tc)) {
parent.put(tc, tp);
}
}
}*/
}
else {
return Expr.clone(expr);
}
}
}
|
jiadaizhao/LeetCode | 0901-1000/0977-Squares of a Sorted Array/0977-Squares of a Sorted Array.cpp | <gh_stars>10-100
class Solution {
public:
vector<int> sortedSquares(vector<int>& A) {
vector<int> result(A.size());
int left = 0, right = A.size() - 1;
for (int i = A.size() - 1; i >= 0; --i) {
if (A[left] * A[left] <= A[right] * A[right]) {
result[i] = A[right] * A[right];
--right;
}
else {
result[i] = A[left] * A[left];
++left;
}
}
return result;
}
};
|
geningxiang/javadoc-help | javadoc-demo/src/main/java/org/genx/javadoc/entity/Article.java | package org.genx.javadoc.entity;
import java.util.Date;
/**
* Created with IntelliJ IDEA.
* Description:
* @author genx
* @date 2020/9/8 21:17
*/
public class Article {
/**
* 资讯ID
*/
private Long id;
/**
* 标题
*/
private String title;
/**
* 资讯内容
*/
private String content;
/**
* 资讯创建时间
*/
private Date createTime;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContent() {
return content;
}
public void setContent(String content) {
this.content = content;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
}
|
s12v/aws-sdk-java | aws-java-sdk-pinpointsmsvoice/src/main/java/com/amazonaws/services/pinpointsmsvoice/AmazonPinpointSMSVoiceClient.java | <filename>aws-java-sdk-pinpointsmsvoice/src/main/java/com/amazonaws/services/pinpointsmsvoice/AmazonPinpointSMSVoiceClient.java
/*
* Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpointsmsvoice;
import org.w3c.dom.*;
import java.net.*;
import java.util.*;
import javax.annotation.Generated;
import org.apache.commons.logging.*;
import com.amazonaws.*;
import com.amazonaws.annotation.SdkInternalApi;
import com.amazonaws.auth.*;
import com.amazonaws.handlers.*;
import com.amazonaws.http.*;
import com.amazonaws.internal.*;
import com.amazonaws.internal.auth.*;
import com.amazonaws.metrics.*;
import com.amazonaws.regions.*;
import com.amazonaws.transform.*;
import com.amazonaws.util.*;
import com.amazonaws.protocol.json.*;
import com.amazonaws.util.AWSRequestMetrics.Field;
import com.amazonaws.annotation.ThreadSafe;
import com.amazonaws.client.AwsSyncClientParams;
import com.amazonaws.client.builder.AdvancedConfig;
import com.amazonaws.services.pinpointsmsvoice.AmazonPinpointSMSVoiceClientBuilder;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.pinpointsmsvoice.model.*;
import com.amazonaws.services.pinpointsmsvoice.model.transform.*;
/**
* Client for accessing Pinpoint SMS Voice. All service calls made using this client are blocking, and will not return
* until the service call completes.
* <p>
* Pinpoint SMS and Voice Messaging public facing APIs
*/
@ThreadSafe
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AmazonPinpointSMSVoiceClient extends AmazonWebServiceClient implements AmazonPinpointSMSVoice {
/** Provider for AWS credentials. */
private final AWSCredentialsProvider awsCredentialsProvider;
private static final Log log = LogFactory.getLog(AmazonPinpointSMSVoice.class);
/** Default signing name for the service. */
private static final String DEFAULT_SIGNING_NAME = "sms-voice";
/** Client configuration factory providing ClientConfigurations tailored to this client */
protected static final ClientConfigurationFactory configFactory = new ClientConfigurationFactory();
private final AdvancedConfig advancedConfig;
private static final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory = new com.amazonaws.protocol.json.SdkJsonProtocolFactory(
new JsonClientMetadata()
.withProtocolVersion("1.1")
.withSupportsCbor(false)
.withSupportsIon(false)
.withContentTypeOverride("")
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("NotFoundException").withModeledClass(
com.amazonaws.services.pinpointsmsvoice.model.NotFoundException.class))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("InternalServiceErrorException").withModeledClass(
com.amazonaws.services.pinpointsmsvoice.model.InternalServiceErrorException.class))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("AlreadyExistsException").withModeledClass(
com.amazonaws.services.pinpointsmsvoice.model.AlreadyExistsException.class))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("TooManyRequestsException").withModeledClass(
com.amazonaws.services.pinpointsmsvoice.model.TooManyRequestsException.class))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("BadRequestException").withModeledClass(
com.amazonaws.services.pinpointsmsvoice.model.BadRequestException.class))
.addErrorMetadata(
new JsonErrorShapeMetadata().withErrorCode("LimitExceededException").withModeledClass(
com.amazonaws.services.pinpointsmsvoice.model.LimitExceededException.class))
.withBaseServiceExceptionClass(com.amazonaws.services.pinpointsmsvoice.model.AmazonPinpointSMSVoiceException.class));
public static AmazonPinpointSMSVoiceClientBuilder builder() {
return AmazonPinpointSMSVoiceClientBuilder.standard();
}
/**
* Constructs a new client to invoke service methods on Pinpoint SMS Voice using the specified parameters.
*
* <p>
* All service calls made using this new client object are blocking, and will not return until the service call
* completes.
*
* @param clientParams
* Object providing client parameters.
*/
AmazonPinpointSMSVoiceClient(AwsSyncClientParams clientParams) {
this(clientParams, false);
}
/**
* Constructs a new client to invoke service methods on Pinpoint SMS Voice using the specified parameters.
*
* <p>
* All service calls made using this new client object are blocking, and will not return until the service call
* completes.
*
* @param clientParams
* Object providing client parameters.
*/
AmazonPinpointSMSVoiceClient(AwsSyncClientParams clientParams, boolean endpointDiscoveryEnabled) {
super(clientParams);
this.awsCredentialsProvider = clientParams.getCredentialsProvider();
this.advancedConfig = clientParams.getAdvancedConfig();
init();
}
private void init() {
setServiceNameIntern(DEFAULT_SIGNING_NAME);
setEndpointPrefix(ENDPOINT_PREFIX);
// calling this.setEndPoint(...) will also modify the signer accordingly
setEndpoint("sms-voice.pinpoint.us-east-1.amazonaws.com");
HandlerChainFactory chainFactory = new HandlerChainFactory();
requestHandler2s.addAll(chainFactory.newRequestHandlerChain("/com/amazonaws/services/pinpointsmsvoice/request.handlers"));
requestHandler2s.addAll(chainFactory.newRequestHandler2Chain("/com/amazonaws/services/pinpointsmsvoice/request.handler2s"));
requestHandler2s.addAll(chainFactory.getGlobalHandlers());
}
/**
* Create a new configuration set. After you create the configuration set, you can add one or more event
* destinations to it.
*
* @param createConfigurationSetRequest
* CreateConfigurationSetRequest
* @return Result of the CreateConfigurationSet operation returned by the service.
* @throws TooManyRequestsException
* TooManyRequestsException
* @throws BadRequestException
* BadRequestException
* @throws LimitExceededException
* LimitExceededException
* @throws InternalServiceErrorException
* InternalServiceErrorException
* @throws AlreadyExistsException
* AlreadyExistsException
* @sample AmazonPinpointSMSVoice.CreateConfigurationSet
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-sms-voice-2018-09-05/CreateConfigurationSet"
* target="_top">AWS API Documentation</a>
*/
@Override
public CreateConfigurationSetResult createConfigurationSet(CreateConfigurationSetRequest request) {
request = beforeClientExecution(request);
return executeCreateConfigurationSet(request);
}
@SdkInternalApi
final CreateConfigurationSetResult executeCreateConfigurationSet(CreateConfigurationSetRequest createConfigurationSetRequest) {
ExecutionContext executionContext = createExecutionContext(createConfigurationSetRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateConfigurationSetRequest> request = null;
Response<CreateConfigurationSetResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateConfigurationSetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createConfigurationSetRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Pinpoint SMS Voice");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateConfigurationSet");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateConfigurationSetResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new CreateConfigurationSetResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Create a new event destination in a configuration set.
*
* @param createConfigurationSetEventDestinationRequest
* CreateConfigurationSetEventDestinationRequest
* @return Result of the CreateConfigurationSetEventDestination operation returned by the service.
* @throws BadRequestException
* BadRequestException
* @throws LimitExceededException
* LimitExceededException
* @throws InternalServiceErrorException
* InternalServiceErrorException
* @throws NotFoundException
* NotFoundException
* @throws TooManyRequestsException
* TooManyRequestsException
* @throws AlreadyExistsException
* AlreadyExistsException
* @sample AmazonPinpointSMSVoice.CreateConfigurationSetEventDestination
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-sms-voice-2018-09-05/CreateConfigurationSetEventDestination"
* target="_top">AWS API Documentation</a>
*/
@Override
public CreateConfigurationSetEventDestinationResult createConfigurationSetEventDestination(CreateConfigurationSetEventDestinationRequest request) {
request = beforeClientExecution(request);
return executeCreateConfigurationSetEventDestination(request);
}
@SdkInternalApi
final CreateConfigurationSetEventDestinationResult executeCreateConfigurationSetEventDestination(
CreateConfigurationSetEventDestinationRequest createConfigurationSetEventDestinationRequest) {
ExecutionContext executionContext = createExecutionContext(createConfigurationSetEventDestinationRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<CreateConfigurationSetEventDestinationRequest> request = null;
Response<CreateConfigurationSetEventDestinationResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new CreateConfigurationSetEventDestinationRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(createConfigurationSetEventDestinationRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Pinpoint SMS Voice");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "CreateConfigurationSetEventDestination");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<CreateConfigurationSetEventDestinationResult>> responseHandler = protocolFactory
.createResponseHandler(new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new CreateConfigurationSetEventDestinationResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Deletes an existing configuration set.
*
* @param deleteConfigurationSetRequest
* @return Result of the DeleteConfigurationSet operation returned by the service.
* @throws NotFoundException
* NotFoundException
* @throws TooManyRequestsException
* TooManyRequestsException
* @throws BadRequestException
* BadRequestException
* @throws InternalServiceErrorException
* InternalServiceErrorException
* @sample AmazonPinpointSMSVoice.DeleteConfigurationSet
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-sms-voice-2018-09-05/DeleteConfigurationSet"
* target="_top">AWS API Documentation</a>
*/
@Override
public DeleteConfigurationSetResult deleteConfigurationSet(DeleteConfigurationSetRequest request) {
request = beforeClientExecution(request);
return executeDeleteConfigurationSet(request);
}
@SdkInternalApi
final DeleteConfigurationSetResult executeDeleteConfigurationSet(DeleteConfigurationSetRequest deleteConfigurationSetRequest) {
ExecutionContext executionContext = createExecutionContext(deleteConfigurationSetRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteConfigurationSetRequest> request = null;
Response<DeleteConfigurationSetResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteConfigurationSetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteConfigurationSetRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Pinpoint SMS Voice");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteConfigurationSet");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteConfigurationSetResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DeleteConfigurationSetResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Deletes an event destination in a configuration set.
*
* @param deleteConfigurationSetEventDestinationRequest
* @return Result of the DeleteConfigurationSetEventDestination operation returned by the service.
* @throws NotFoundException
* NotFoundException
* @throws TooManyRequestsException
* TooManyRequestsException
* @throws BadRequestException
* BadRequestException
* @throws InternalServiceErrorException
* InternalServiceErrorException
* @sample AmazonPinpointSMSVoice.DeleteConfigurationSetEventDestination
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-sms-voice-2018-09-05/DeleteConfigurationSetEventDestination"
* target="_top">AWS API Documentation</a>
*/
@Override
public DeleteConfigurationSetEventDestinationResult deleteConfigurationSetEventDestination(DeleteConfigurationSetEventDestinationRequest request) {
request = beforeClientExecution(request);
return executeDeleteConfigurationSetEventDestination(request);
}
@SdkInternalApi
final DeleteConfigurationSetEventDestinationResult executeDeleteConfigurationSetEventDestination(
DeleteConfigurationSetEventDestinationRequest deleteConfigurationSetEventDestinationRequest) {
ExecutionContext executionContext = createExecutionContext(deleteConfigurationSetEventDestinationRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<DeleteConfigurationSetEventDestinationRequest> request = null;
Response<DeleteConfigurationSetEventDestinationResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new DeleteConfigurationSetEventDestinationRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(deleteConfigurationSetEventDestinationRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Pinpoint SMS Voice");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "DeleteConfigurationSetEventDestination");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<DeleteConfigurationSetEventDestinationResult>> responseHandler = protocolFactory
.createResponseHandler(new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new DeleteConfigurationSetEventDestinationResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Obtain information about an event destination, including the types of events it reports, the Amazon Resource Name
* (ARN) of the destination, and the name of the event destination.
*
* @param getConfigurationSetEventDestinationsRequest
* @return Result of the GetConfigurationSetEventDestinations operation returned by the service.
* @throws NotFoundException
* NotFoundException
* @throws TooManyRequestsException
* TooManyRequestsException
* @throws BadRequestException
* BadRequestException
* @throws InternalServiceErrorException
* InternalServiceErrorException
* @sample AmazonPinpointSMSVoice.GetConfigurationSetEventDestinations
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-sms-voice-2018-09-05/GetConfigurationSetEventDestinations"
* target="_top">AWS API Documentation</a>
*/
@Override
public GetConfigurationSetEventDestinationsResult getConfigurationSetEventDestinations(GetConfigurationSetEventDestinationsRequest request) {
request = beforeClientExecution(request);
return executeGetConfigurationSetEventDestinations(request);
}
@SdkInternalApi
final GetConfigurationSetEventDestinationsResult executeGetConfigurationSetEventDestinations(
GetConfigurationSetEventDestinationsRequest getConfigurationSetEventDestinationsRequest) {
ExecutionContext executionContext = createExecutionContext(getConfigurationSetEventDestinationsRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<GetConfigurationSetEventDestinationsRequest> request = null;
Response<GetConfigurationSetEventDestinationsResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new GetConfigurationSetEventDestinationsRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(getConfigurationSetEventDestinationsRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Pinpoint SMS Voice");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "GetConfigurationSetEventDestinations");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<GetConfigurationSetEventDestinationsResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new GetConfigurationSetEventDestinationsResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Create a new voice message and send it to a recipient's phone number.
*
* @param sendVoiceMessageRequest
* SendVoiceMessageRequest
* @return Result of the SendVoiceMessage operation returned by the service.
* @throws TooManyRequestsException
* TooManyRequestsException
* @throws BadRequestException
* BadRequestException
* @throws InternalServiceErrorException
* InternalServiceErrorException
* @sample AmazonPinpointSMSVoice.SendVoiceMessage
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-sms-voice-2018-09-05/SendVoiceMessage"
* target="_top">AWS API Documentation</a>
*/
@Override
public SendVoiceMessageResult sendVoiceMessage(SendVoiceMessageRequest request) {
request = beforeClientExecution(request);
return executeSendVoiceMessage(request);
}
@SdkInternalApi
final SendVoiceMessageResult executeSendVoiceMessage(SendVoiceMessageRequest sendVoiceMessageRequest) {
ExecutionContext executionContext = createExecutionContext(sendVoiceMessageRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<SendVoiceMessageRequest> request = null;
Response<SendVoiceMessageResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new SendVoiceMessageRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(sendVoiceMessageRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Pinpoint SMS Voice");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "SendVoiceMessage");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<SendVoiceMessageResult>> responseHandler = protocolFactory.createResponseHandler(
new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new SendVoiceMessageResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Update an event destination in a configuration set. An event destination is a location that you publish
* information about your voice calls to. For example, you can log an event to an Amazon CloudWatch destination when
* a call fails.
*
* @param updateConfigurationSetEventDestinationRequest
* UpdateConfigurationSetEventDestinationRequest
* @return Result of the UpdateConfigurationSetEventDestination operation returned by the service.
* @throws NotFoundException
* NotFoundException
* @throws TooManyRequestsException
* TooManyRequestsException
* @throws BadRequestException
* BadRequestException
* @throws InternalServiceErrorException
* InternalServiceErrorException
* @sample AmazonPinpointSMSVoice.UpdateConfigurationSetEventDestination
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-sms-voice-2018-09-05/UpdateConfigurationSetEventDestination"
* target="_top">AWS API Documentation</a>
*/
@Override
public UpdateConfigurationSetEventDestinationResult updateConfigurationSetEventDestination(UpdateConfigurationSetEventDestinationRequest request) {
request = beforeClientExecution(request);
return executeUpdateConfigurationSetEventDestination(request);
}
@SdkInternalApi
final UpdateConfigurationSetEventDestinationResult executeUpdateConfigurationSetEventDestination(
UpdateConfigurationSetEventDestinationRequest updateConfigurationSetEventDestinationRequest) {
ExecutionContext executionContext = createExecutionContext(updateConfigurationSetEventDestinationRequest);
AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics();
awsRequestMetrics.startEvent(Field.ClientExecuteTime);
Request<UpdateConfigurationSetEventDestinationRequest> request = null;
Response<UpdateConfigurationSetEventDestinationResult> response = null;
try {
awsRequestMetrics.startEvent(Field.RequestMarshallTime);
try {
request = new UpdateConfigurationSetEventDestinationRequestProtocolMarshaller(protocolFactory).marshall(super
.beforeMarshalling(updateConfigurationSetEventDestinationRequest));
// Binds the request metrics to the current request.
request.setAWSRequestMetrics(awsRequestMetrics);
request.addHandlerContext(HandlerContextKey.SIGNING_REGION, getSigningRegion());
request.addHandlerContext(HandlerContextKey.SERVICE_ID, "Pinpoint SMS Voice");
request.addHandlerContext(HandlerContextKey.OPERATION_NAME, "UpdateConfigurationSetEventDestination");
request.addHandlerContext(HandlerContextKey.ADVANCED_CONFIG, advancedConfig);
} finally {
awsRequestMetrics.endEvent(Field.RequestMarshallTime);
}
HttpResponseHandler<AmazonWebServiceResponse<UpdateConfigurationSetEventDestinationResult>> responseHandler = protocolFactory
.createResponseHandler(new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false),
new UpdateConfigurationSetEventDestinationResultJsonUnmarshaller());
response = invoke(request, responseHandler, executionContext);
return response.getAwsResponse();
} finally {
endClientExecution(awsRequestMetrics, request, response);
}
}
/**
* Returns additional metadata for a previously executed successful, request, typically used for debugging issues
* where a service isn't acting as expected. This data isn't considered part of the result data returned by an
* operation, so it's available through this separate, diagnostic interface.
* <p>
* Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
* information for an executed request, you should use this method to retrieve it as soon as possible after
* executing the request.
*
* @param request
* The originally executed request
*
* @return The response metadata for the specified request, or null if none is available.
*/
public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) {
return client.getResponseMetadataForRequest(request);
}
/**
* Normal invoke with authentication. Credentials are required and may be overriden at the request level.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler,
ExecutionContext executionContext) {
return invoke(request, responseHandler, executionContext, null, null);
}
/**
* Normal invoke with authentication. Credentials are required and may be overriden at the request level.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler,
ExecutionContext executionContext, URI cachedEndpoint, URI uriFromEndpointTrait) {
executionContext.setCredentialsProvider(CredentialUtils.getCredentialsProvider(request.getOriginalRequest(), awsCredentialsProvider));
return doInvoke(request, responseHandler, executionContext, cachedEndpoint, uriFromEndpointTrait);
}
/**
* Invoke with no authentication. Credentials are not required and any credentials set on the client or request will
* be ignored for this operation.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> anonymousInvoke(Request<Y> request,
HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) {
return doInvoke(request, responseHandler, executionContext, null, null);
}
/**
* Invoke the request using the http client. Assumes credentials (or lack thereof) have been configured in the
* ExecutionContext beforehand.
**/
private <X, Y extends AmazonWebServiceRequest> Response<X> doInvoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler,
ExecutionContext executionContext, URI discoveredEndpoint, URI uriFromEndpointTrait) {
if (discoveredEndpoint != null) {
request.setEndpoint(discoveredEndpoint);
request.getOriginalRequest().getRequestClientOptions().appendUserAgent("endpoint-discovery");
} else if (uriFromEndpointTrait != null) {
request.setEndpoint(uriFromEndpointTrait);
} else {
request.setEndpoint(endpoint);
}
request.setTimeOffset(timeOffset);
HttpResponseHandler<AmazonServiceException> errorResponseHandler = protocolFactory.createErrorResponseHandler(new JsonErrorResponseMetadata());
return client.execute(request, responseHandler, errorResponseHandler, executionContext);
}
@com.amazonaws.annotation.SdkInternalApi
static com.amazonaws.protocol.json.SdkJsonProtocolFactory getProtocolFactory() {
return protocolFactory;
}
}
|
anmolshl/raiden | raiden/utils/deployment.py | import structlog
from eth_utils import to_checksum_address
from raiden.utils import get_contract_path
from raiden.utils.solc import compile_files_cwd
log = structlog.get_logger(__name__)
# Source files for all to be deployed solidity contracts
RAIDEN_CONTRACT_FILES = [
'NettingChannelLibrary.sol',
'ChannelManagerLibrary.sol',
'Registry.sol',
'EndpointRegistry.sol',
]
# Top level contracts to be deployed. Dependencies are handled automatically
# in `JSONRPCClient.deploy_solidity_contract()`
CONTRACTS_TO_DEPLOY = [
'Registry.sol:Registry',
'EndpointRegistry.sol:EndpointRegistry',
]
NEW_CONTRACTS_TO_DEPLOY = [
'SecretRegistry',
]
def deploy_file(contract, compiled_contracts, client):
libraries = dict()
filename, _, name = contract.partition(":")
log.info(f"Deploying {name}")
proxy = client.deploy_solidity_contract(
name,
compiled_contracts,
libraries,
'',
contract_path=filename,
)
log.info(f"Deployed {name} @ {to_checksum_address(proxy.contract_address)}")
libraries[name] = proxy.contract_address
return libraries
def deploy_contracts(client, compile_list=None, deploy_list=None):
if compile_list is None:
compile_list = RAIDEN_CONTRACT_FILES
if deploy_list is None:
deploy_list = CONTRACTS_TO_DEPLOY
contracts_expanded = [
get_contract_path(x)
for x in compile_list
]
compiled_contracts = compile_files_cwd(contracts_expanded)
deployed = {}
for contract in deploy_list:
deployed.update(deploy_file(contract, compiled_contracts, client))
return deployed
|
TimYagan/stroom | stroom-pipeline/src/main/java/stroom/pipeline/filter/TestFilter.java | <reponame>TimYagan/stroom<filename>stroom-pipeline/src/main/java/stroom/pipeline/filter/TestFilter.java
/*
* Copyright 2017 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stroom.pipeline.filter;
import org.xml.sax.SAXException;
import stroom.pipeline.LocationFactoryProxy;
import stroom.pipeline.errorhandler.ErrorReceiverProxy;
import stroom.pipeline.factory.ConfigurableElement;
import stroom.pipeline.shared.ElementIcons;
import stroom.pipeline.shared.data.PipelineElementType;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* A filter used to sample the output produced by SAX events at any point in the
* XML pipeline. Many instances of this filter can be used.
* <p>
* This filter accumulates all the complete documents so they can be asserted against at the end of parsing.
*/
@ConfigurableElement(type = "TestFilter", roles = {PipelineElementType.ROLE_TARGET,
PipelineElementType.ROLE_HAS_TARGETS, PipelineElementType.VISABILITY_SIMPLE,
PipelineElementType.VISABILITY_STEPPING}, icon = ElementIcons.STREAM)
public class TestFilter extends AbstractSamplingFilter {
private final List<String> outputs;
@Inject
public TestFilter(final ErrorReceiverProxy errorReceiverProxy,
final LocationFactoryProxy locationFactory) {
super(errorReceiverProxy, locationFactory);
outputs = new ArrayList<>();
}
@Override
public void endDocument() throws SAXException {
super.endDocument();
outputs.add(getOutput());
}
public List<String> getOutputs() {
return Collections.unmodifiableList(outputs);
}
} |
ScalablyTyped/SlinkyTyped | g/graphql-tools__delegate/src/main/scala/typingsSlinky/graphqlToolsDelegate/typesMod/MergedTypeConfig.scala | <gh_stars>10-100
package typingsSlinky.graphqlToolsDelegate.typesMod
import typingsSlinky.graphql.astMod.SelectionSetNode
import typingsSlinky.graphql.definitionMod.GraphQLResolveInfo
import typingsSlinky.graphql.mod.GraphQLSchema
import typingsSlinky.std.Record
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait MergedTypeConfig extends js.Object {
var args: js.UndefOr[js.Function1[/* source */ js.Any, Record[String, _]]] = js.native
var fieldName: js.UndefOr[String] = js.native
var key: js.UndefOr[js.Function1[/* originalResult */ js.Any, _]] = js.native
var resolve: js.UndefOr[MergedTypeResolver] = js.native
var selectionSet: js.UndefOr[String] = js.native
}
object MergedTypeConfig {
@scala.inline
def apply(): MergedTypeConfig = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[MergedTypeConfig]
}
@scala.inline
implicit class MergedTypeConfigOps[Self <: MergedTypeConfig] (val x: Self) extends AnyVal {
@scala.inline
def duplicate: Self = (js.Dynamic.global.Object.assign(js.Dynamic.literal(), x)).asInstanceOf[Self]
@scala.inline
def combineWith[Other <: js.Any](other: Other): Self with Other = (js.Dynamic.global.Object.assign(js.Dynamic.literal(), x, other.asInstanceOf[js.Any])).asInstanceOf[Self with Other]
@scala.inline
def set(key: String, value: js.Any): Self = {
x.asInstanceOf[js.Dynamic].updateDynamic(key)(value)
x
}
@scala.inline
def setArgs(value: /* source */ js.Any => Record[String, _]): Self = this.set("args", js.Any.fromFunction1(value))
@scala.inline
def deleteArgs: Self = this.set("args", js.undefined)
@scala.inline
def setFieldName(value: String): Self = this.set("fieldName", value.asInstanceOf[js.Any])
@scala.inline
def deleteFieldName: Self = this.set("fieldName", js.undefined)
@scala.inline
def setKey(value: /* originalResult */ js.Any => _): Self = this.set("key", js.Any.fromFunction1(value))
@scala.inline
def deleteKey: Self = this.set("key", js.undefined)
@scala.inline
def setResolve(
value: (/* originalResult */ js.Any, /* context */ Record[String, js.Any], /* info */ GraphQLResolveInfo, /* subschema */ GraphQLSchema | SubschemaConfig, /* selectionSet */ SelectionSetNode) => js.Any
): Self = this.set("resolve", js.Any.fromFunction5(value))
@scala.inline
def deleteResolve: Self = this.set("resolve", js.undefined)
@scala.inline
def setSelectionSet(value: String): Self = this.set("selectionSet", value.asInstanceOf[js.Any])
@scala.inline
def deleteSelectionSet: Self = this.set("selectionSet", js.undefined)
}
}
|
tomitribe/github-api-java-old | src/test/java/org/tomitribe/github/client/CreatePullRequestTest.java | <reponame>tomitribe/github-api-java-old
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tomitribe.github.client;
import org.junit.BeforeClass;
import org.junit.Test;
import org.tomitribe.github.JsonAsserts;
import org.tomitribe.github.MockService;
import org.tomitribe.github.Resources;
import org.tomitribe.github.model.PullRequest;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.net.URI;
import static org.junit.Assert.assertEquals;
public class CreatePullRequestTest {
private static URI uri;
@BeforeClass
public static void before() throws Exception {
uri = MockGithub.run();
}
@Test
public void test() throws Exception {
final GithubClient githubClient = GithubClient.builder()
.oauthToken("<KEY>")
.api(uri)
.build();
final PullRequest pullRequest = githubClient.createPullRequest(
"colors",
"orange",
"This is awesome",
"fancyorg:critical-fixes",
"master",
"Let's get this done!",
true,
true
);
assertEquals(new Long(2), pullRequest.getNumber());
assertEquals("Update the README with new information.", pullRequest.getTitle());
}
@Path("/")
public static class MockGithub {
@POST
@Path("repos/{owner}/{name}/pulls")
public Response createPullRequest(@HeaderParam("authorization") final String authorization,
@HeaderParam("accept") final String accept,
String body) throws IOException {
// Assert the client sends the authorization header github will need
if (!authorization.contains("token")) throw new WebApplicationException(401);
if (!authorization.contains("23456789dfghjklkjhgfdsdfghuiytrewertyui")) throw new WebApplicationException(401);
// Assert the client sends the accept header github will need to allow the preview api call
if (!accept.contains("application/vnd.github.shadow-cat-preview+json")) throw new WebApplicationException(400);
JsonAsserts.assertJsonb("{\n" +
" \"maintainer_can_modify\":true,\n" +
" \"base\":\"master\",\n" +
" \"body\":\"Let's get this done!\",\n" +
" \"draft\":true,\n" +
" \"head\":\"fancyorg:critical-fixes\",\n" +
" \"title\":\"This is awesome\"\n" +
"}", body);
final String response = Resources.read(CreatePullRequestTest.class, "create-pull-request-response.json");
return Response.ok(response, MediaType.APPLICATION_JSON_TYPE).build();
}
public static URI run() throws Exception {
return MockService.run(MockGithub.class);
}
}
} |
pdalbora/gosu-lang | gosu-xml/src/main/java/gw/internal/schema/gw/xsd/w3c/xmlschema/types/complex/TopLevelComplexType.java | package gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex;
/***************************************************************************/
/* THIS IS AUTOGENERATED CODE - DO NOT MODIFY OR YOUR CHANGES WILL BE LOST */
/* THIS CODE CAN BE REGENERATED USING 'xsd-codegen' */
/***************************************************************************/
public class TopLevelComplexType extends gw.internal.schema.gw.xsd.w3c.xmlschema.types.complex.ComplexType implements gw.internal.xml.IXmlGeneratedClass {
public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Abstract = new javax.xml.namespace.QName( "", "abstract", "" );
public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Block = new javax.xml.namespace.QName( "", "block", "" );
public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Final = new javax.xml.namespace.QName( "", "final", "" );
public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Id = new javax.xml.namespace.QName( "", "id", "" );
public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Mixed = new javax.xml.namespace.QName( "", "mixed", "" );
public static final javax.xml.namespace.QName $ATTRIBUTE_QNAME_Name = new javax.xml.namespace.QName( "", "name", "" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_All = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "all", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_Annotation = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "annotation", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_AnyAttribute = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "anyAttribute", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_Attribute = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "attribute", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_AttributeGroup = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "attributeGroup", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_Choice = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "choice", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_ComplexContent = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "complexContent", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_Group = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "group", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_Sequence = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "sequence", "xs" );
public static final javax.xml.namespace.QName $ELEMENT_QNAME_SimpleContent = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "simpleContent", "xs" );
public static final javax.xml.namespace.QName $QNAME = new javax.xml.namespace.QName( "http://www.w3.org/2001/XMLSchema", "topLevelComplexType", "xs" );
public static final gw.util.concurrent.LockingLazyVar<gw.lang.reflect.IType> TYPE = new gw.util.concurrent.LockingLazyVar<gw.lang.reflect.IType>( gw.lang.reflect.TypeSystem.getGlobalLock() ) {
@Override
protected gw.lang.reflect.IType init() {
return gw.lang.reflect.TypeSystem.getByFullName( "gw.xsd.w3c.xmlschema.types.complex.TopLevelComplexType" );
}
};
private static final gw.util.concurrent.LockingLazyVar<java.lang.Object> SCHEMAINFO = new gw.util.concurrent.LockingLazyVar<java.lang.Object>( gw.lang.reflect.TypeSystem.getGlobalLock() ) {
@Override
protected java.lang.Object init() {
gw.lang.reflect.IType type = TYPE.get();
return getSchemaInfoByType( type );
}
};
public TopLevelComplexType() {
super( TYPE.get(), SCHEMAINFO.get() );
}
protected TopLevelComplexType( gw.lang.reflect.IType type, java.lang.Object schemaInfo ) {
super( type, schemaInfo );
}
public java.lang.Boolean Abstract() {
return (java.lang.Boolean) TYPE.get().getTypeInfo().getProperty( "Abstract" ).getAccessor().getValue( this );
}
public void setAbstract$( java.lang.Boolean param ) {
TYPE.get().getTypeInfo().getProperty( "Abstract" ).getAccessor().setValue( this, param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.All All() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.All) TYPE.get().getTypeInfo().getProperty( "All" ).getAccessor().getValue( this );
}
public void setAll$( gw.internal.schema.gw.xsd.w3c.xmlschema.All param ) {
TYPE.get().getTypeInfo().getProperty( "All" ).getAccessor().setValue( this, param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.Annotation Annotation() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.Annotation) TYPE.get().getTypeInfo().getProperty( "Annotation" ).getAccessor().getValue( this );
}
public void setAnnotation$( gw.internal.schema.gw.xsd.w3c.xmlschema.Annotation param ) {
TYPE.get().getTypeInfo().getProperty( "Annotation" ).getAccessor().setValue( this, param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.AnyAttribute AnyAttribute() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.AnyAttribute) TYPE.get().getTypeInfo().getProperty( "AnyAttribute" ).getAccessor().getValue( this );
}
public void setAnyAttribute$( gw.internal.schema.gw.xsd.w3c.xmlschema.AnyAttribute param ) {
TYPE.get().getTypeInfo().getProperty( "AnyAttribute" ).getAccessor().setValue( this, param );
}
@Deprecated
public java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.ComplexType_Attribute> Attribute() {
return super.Attribute();
}
@Deprecated
public void setAttribute$( java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.ComplexType_Attribute> param ) {
super.setAttribute$( param );
}
public java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_Attribute> Attribute$$gw_xsd_w3c_xmlschema_types_complex_TopLevelComplexType() {
//noinspection unchecked
return (java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_Attribute>) TYPE.get().getTypeInfo().getProperty( "Attribute" ).getAccessor().getValue( this );
}
public void setAttribute$$gw_xsd_w3c_xmlschema_types_complex_TopLevelComplexType$( java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_Attribute> param ) {
TYPE.get().getTypeInfo().getProperty( "Attribute" ).getAccessor().setValue( this, param );
}
@Deprecated
public java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.ComplexType_AttributeGroup> AttributeGroup() {
return super.AttributeGroup();
}
@Deprecated
public void setAttributeGroup$( java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.ComplexType_AttributeGroup> param ) {
super.setAttributeGroup$( param );
}
public java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_AttributeGroup> AttributeGroup$$gw_xsd_w3c_xmlschema_types_complex_TopLevelComplexType() {
//noinspection unchecked
return (java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_AttributeGroup>) TYPE.get().getTypeInfo().getProperty( "AttributeGroup" ).getAccessor().getValue( this );
}
public void setAttributeGroup$$gw_xsd_w3c_xmlschema_types_complex_TopLevelComplexType$( java.util.List<gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_AttributeGroup> param ) {
TYPE.get().getTypeInfo().getProperty( "AttributeGroup" ).getAccessor().setValue( this, param );
}
public java.lang.String Block() {
return (java.lang.String) TYPE.get().getTypeInfo().getProperty( "Block" ).getAccessor().getValue( this );
}
public void setBlock$( java.lang.String param ) {
TYPE.get().getTypeInfo().getProperty( "Block" ).getAccessor().setValue( this, param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.Choice Choice() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.Choice) TYPE.get().getTypeInfo().getProperty( "Choice" ).getAccessor().getValue( this );
}
public void setChoice$( gw.internal.schema.gw.xsd.w3c.xmlschema.Choice param ) {
TYPE.get().getTypeInfo().getProperty( "Choice" ).getAccessor().setValue( this, param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.ComplexContent ComplexContent() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.ComplexContent) TYPE.get().getTypeInfo().getProperty( "ComplexContent" ).getAccessor().getValue( this );
}
public void setComplexContent$( gw.internal.schema.gw.xsd.w3c.xmlschema.ComplexContent param ) {
TYPE.get().getTypeInfo().getProperty( "ComplexContent" ).getAccessor().setValue( this, param );
}
public java.lang.String Final() {
return (java.lang.String) TYPE.get().getTypeInfo().getProperty( "Final" ).getAccessor().getValue( this );
}
public void setFinal$( java.lang.String param ) {
TYPE.get().getTypeInfo().getProperty( "Final" ).getAccessor().setValue( this, param );
}
@Deprecated
public gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.ComplexType_Group Group() {
return super.Group();
}
@Deprecated
public void setGroup$( gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.ComplexType_Group param ) {
super.setGroup$( param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_Group Group$$gw_xsd_w3c_xmlschema_types_complex_TopLevelComplexType() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_Group) TYPE.get().getTypeInfo().getProperty( "Group" ).getAccessor().getValue( this );
}
public void setGroup$$gw_xsd_w3c_xmlschema_types_complex_TopLevelComplexType$( gw.internal.schema.gw.xsd.w3c.xmlschema.anonymous.elements.TopLevelComplexType_Group param ) {
TYPE.get().getTypeInfo().getProperty( "Group" ).getAccessor().setValue( this, param );
}
public java.lang.String Id() {
return (java.lang.String) TYPE.get().getTypeInfo().getProperty( "Id" ).getAccessor().getValue( this );
}
public void setId$( java.lang.String param ) {
TYPE.get().getTypeInfo().getProperty( "Id" ).getAccessor().setValue( this, param );
}
public java.lang.Boolean Mixed() {
return (java.lang.Boolean) TYPE.get().getTypeInfo().getProperty( "Mixed" ).getAccessor().getValue( this );
}
public void setMixed$( java.lang.Boolean param ) {
TYPE.get().getTypeInfo().getProperty( "Mixed" ).getAccessor().setValue( this, param );
}
public java.lang.String Name() {
return (java.lang.String) TYPE.get().getTypeInfo().getProperty( "Name" ).getAccessor().getValue( this );
}
public void setName$( java.lang.String param ) {
TYPE.get().getTypeInfo().getProperty( "Name" ).getAccessor().setValue( this, param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.Sequence Sequence() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.Sequence) TYPE.get().getTypeInfo().getProperty( "Sequence" ).getAccessor().getValue( this );
}
public void setSequence$( gw.internal.schema.gw.xsd.w3c.xmlschema.Sequence param ) {
TYPE.get().getTypeInfo().getProperty( "Sequence" ).getAccessor().setValue( this, param );
}
public gw.internal.schema.gw.xsd.w3c.xmlschema.SimpleContent SimpleContent() {
return (gw.internal.schema.gw.xsd.w3c.xmlschema.SimpleContent) TYPE.get().getTypeInfo().getProperty( "SimpleContent" ).getAccessor().getValue( this );
}
public void setSimpleContent$( gw.internal.schema.gw.xsd.w3c.xmlschema.SimpleContent param ) {
TYPE.get().getTypeInfo().getProperty( "SimpleContent" ).getAccessor().setValue( this, param );
}
@SuppressWarnings( {"UnusedDeclaration"} )
private static final long FINGERPRINT = 2110283714877373226L;
}
|
mikiec84/badger | public/tests/e2e/auth/auth_spec.js | describe('Login', function () {
var loginPage = require('./auth_page.js');
var mockModule;
beforeEach(function () {
mockModule = require('./auth_mock.js');
browser.addMockModule('authMock', mockModule.authMock);
browser.get('/#/dashboard');
});
afterEach(function () {
browser.removeMockModule('authMock');
});
it('should navigate to the login page when the login button is clicked', function () {
loginPage.loginButton.click();
expect(browser.getCurrentUrl()).toBe(browser.baseUrl + '/#/auth/login/');
});
it('should fail authentication on incorrect login/password', function () {
loginPage.loginButton.click();
loginPage.userName.sendKeys('user');
loginPage.password.sendKeys('<PASSWORD>');
loginPage.submitButton.click();
expect(element(by.binding('error')).getText()).toBe('Authentication failed');
});
it('should fail authentication on empty login/password', function () {
loginPage.loginButton.click();
loginPage.submitButton.click();
expect(element(by.binding('error')).getText()).toBe('Authentication failed');
});
it('should allow a user to log in', function () {
browser.get('/#/auth/login/');
loginPage.userName.sendKeys('test_user');
loginPage.password.sendKeys('<PASSWORD>');
loginPage.submitButton.click();
expect(browser.getCurrentUrl()).toBe(browser.baseUrl + '/#/dashboard');
expect(browser.isElementPresent(by.id('test_user'))).toBe(true);
});
it('should allow to logout', function() {
browser.get('/#/auth/login/');
loginPage.userName.sendKeys('test_user');
loginPage.password.sendKeys('<PASSWORD>');
loginPage.submitButton.click();
loginPage.logoutButton.click();
expect(browser.isElementPresent(loginPage.loginButton)).toBe(true);
});
}); |
ViZarAT/DH | .data-dist/mods/cleanslate/abilities.js | "use strict";Object.defineProperty(exports, "__esModule", {value: true}); const Abilities = {
"forceofattraction": {
shortDesc: "On switch-in, this Pokemon summons Gravity.",
onStart: function (source) {
this.field.clearWeather();
this.field.addPseudoWeather('gravity', source, source.ability);
},
id: "forceofattraction",
name: "Force of Attraction",
rating: 4.5,
num: 2,
},
"mythicalpresence": {
desc: "On switch-in, this Pokemon lowers the Special Attack of adjacent opposing Pokemon by 1 stage. Pokemon behind a substitute are immune.",
shortDesc: "On switch-in, this Pokemon lowers the Special Attack of adjacent opponents by 1 stage.",
onStart: function (pokemon) {
let activated = false;
for (const target of pokemon.side.foe.active) {
if (!target || !this.isAdjacent(target, pokemon)) continue;
if (!activated) {
this.add('-ability', pokemon, 'Mythical Presence', 'boost');
activated = true;
}
if (target.volatiles['substitute']) {
this.add('-immune', target, '[msg]');
} else {
this.boost({spa: -1}, target, pokemon);
}
}
},
id: "mythicalpresence",
name: "Mythical Presence",
rating: 3.5,
num: 22.5,
},
"extremebulk": {
shortDesc: "If Gravity is active, this Pokemon's Attack is 1.5x.",
onModifyAtk: function (atk, pokemon) {
if (this.field.getPseudoWeather('gravity')) {
return this.chainModify(1.5);
}
},
id: "extremebulk",
name: "Extreme Bulk",
rating: 3,
num: 33.5,
},
"abyssallight": {
desc: "This Pokemon is immune to Dark-type moves and raises its Sp. Defense by 1 stage when hit by a Dark-type move.",
shortDesc: "This Pokemon's SpD is raised 1 stage if hit by a Dark move; Dark immunity.",
onTryHitPriority: 1,
onTryHit: function (target, source, move) {
if (target !== source && move.type === 'Dark') {
if (!this.boost({spd: 1})) {
this.add('-immune', target, '[msg]', '[from] ability: Abyssal Light');
}
return null;
}
},
onAllyTryHitSide: function (target, source, move) {
if (target === this.effectData.target || target.side !== source.side) return;
if (move.type === 'Dark') {
this.boost({atk: 1}, this.effectData.target);
}
},
id: "abyssallight",
name: "<NAME>",
rating: 3.5,
num: 157,
},
"desertcoat": {
shortDesc: "This Pokemon cannot be burned and is immune to Sandstorm damage.",
onImmunity: function (type, pokemon) {
if (type === 'sandstorm') return false;
},
onSetStatus: function (status, target, source, effect) {
if (status.id !== 'brn') return;
if (!effect || !effect.status) return false;
this.add('-immune', target, '[msg]', '[from] ability: Desert Coat');
return false;
},
id: "desertcoat",
name: "<NAME>",
rating: 2,
num: 41,
},
"malware": {
desc: "On switch-in, this Pokemon lowers the Speed of adjacent opposing Pokemon by 1 stage. Pokemon behind a substitute are immune.",
shortDesc: "On switch-in, this Pokemon lowers the Speed of adjacent opponents by 1 stage.",
onStart: function (pokemon) {
let activated = false;
for (const target of pokemon.side.foe.active) {
if (!target || !this.isAdjacent(target, pokemon)) continue;
if (!activated) {
this.add('-ability', pokemon, 'Malware', 'boost');
activated = true;
}
if (target.volatiles['substitute']) {
this.add('-immune', target, '[msg]');
} else {
this.boost({spe: -1}, target, pokemon);
}
}
},
id: "malware",
name: "Malware",
rating: 3.5,
num: 22,
},
"overdrive": {
desc: "If Electric Terrain is active, this Pokemon's Attacks are multiplied by 1.5 and it loses 1/8 of its maximum HP, rounded down, at the end of each turn.",
shortDesc: "If Electric Terrain is active, this Pokemon's Attacks are 1.5x; loses 1/8 max HP per turn.",
onModifySpAPriority: 5,
onModifySpA: function (spa, pokemon) {
if (this.field.isTerrain('electricterrain')) return this.chainModify(1.5);
},
onModifyAtk: function (atk, pokemon) {
if (this.field.isTerrain('electricterrain')) return this.chainModify(1.5);
},
onResidual: function(pokemon) {
if (this.field.isTerrain('electricterrain')) {
this.damage(pokemon.maxhp / 8, pokemon, pokemon);
}
},
id: "overdrive",
name: "Overdrive",
rating: 1.5,
num: 94,
},
"pinksmoke": {
shortDesc: "This Pokemon is not affected by the secondary effect of another Pokemon's attack, and cannot be struck by a critical hit.",
onModifySecondaries: function (secondaries) {
this.debug('Shield Dust prevent secondary');
return secondaries.filter(effect => !!(effect.self || effect.dustproof));
},
onCriticalHit: false,
id: "pinksmoke",
name: "<NAME>",
rating: 2.5,
num: 19,
},
"lonewolf": {
desc: "This Pokemon's Sp. Attack is raised by 1 stage if it attacks and knocks out another Pokemon.",
shortDesc: "This Pokemon's Sp. Attack is raised by 1 stage if it attacks and KOes another Pokemon.",
onSourceFaint: function (target, source, effect) {
if (effect && effect.effectType === 'Move') {
this.boost({spa: 1}, source);
}
},
id: "lonewolf",
name: "<NAME>",
rating: 3.5,
num: 153,
},
}; exports.Abilities = Abilities; |
philnik777/CppMake | include/CppBuildSystem/internal/SystemEnvironment.hpp | <filename>include/CppBuildSystem/internal/SystemEnvironment.hpp<gh_stars>0
#include "CppBuildSystem/internal/EnvironmentInfo.hpp"
#include <string>
namespace BuildEnv
{
class SystemEnvironment
{
public:
CompilerInfo getCompilerInfo();
std::string getPkgConfigExecutable();
private:
};
}
|
craft095/sere | lib/nfasl/Dfasl.hpp | #ifndef DFASL_HPP
#define DFASL_HPP
#include <set>
#include <vector>
#include <nlohmann/json.hpp>
#include "boolean/Expr.hpp"
using json = nlohmann::json;
namespace rt {
class Dfasl;
}
namespace nfasl {
class Nfasl;
}
namespace dfasl {
typedef boolean::Expr Predicate;
typedef size_t State;
typedef std::set<State> States;
struct TransitionRule {
Predicate phi;
State state;
};
typedef std::vector<TransitionRule> TransitionRules;
class Dfasl {
public:
size_t atomicCount;
size_t stateCount;
State initial;
States finals;
std::vector<TransitionRules> transitions;
};
extern void complement(dfasl::Dfasl& a);
extern void toDfasl(const nfasl::Nfasl& a, Dfasl& b);
extern void from_json(const json& j, Dfasl& a);
extern void to_json(json& j, const Dfasl& a);
extern std::string pretty(const Dfasl& a);
extern void toRt(const Dfasl& u, rt::Dfasl& v);
} // namespace dfasl
#endif // DFASL_HPP
|
bsf2dev/bsf | Source/Scripting/bsfScript/Generated/BsScriptCAudioSource.generated.h | //********************************* bs::framework - Copyright 2018-2019 <NAME> ************************************//
//*********** Licensed under the MIT license. See LICENSE.md for full terms. This notice is not to be removed. ***********//
#pragma once
#include "BsScriptEnginePrerequisites.h"
#include "Wrappers/BsScriptComponent.h"
#include "../../../Foundation/bsfCore/Audio/BsAudioSource.h"
namespace bs { class CAudioSource; }
namespace bs
{
class BS_SCR_BE_EXPORT ScriptCAudioSource : public TScriptComponent<ScriptCAudioSource, CAudioSource>
{
public:
SCRIPT_OBJ(ENGINE_ASSEMBLY, ENGINE_NS, "AudioSource")
ScriptCAudioSource(MonoObject* managedInstance, const GameObjectHandle<CAudioSource>& value);
private:
static void Internal_setClip(ScriptCAudioSource* thisPtr, MonoObject* clip);
static MonoObject* Internal_getClip(ScriptCAudioSource* thisPtr);
static void Internal_setVolume(ScriptCAudioSource* thisPtr, float volume);
static float Internal_getVolume(ScriptCAudioSource* thisPtr);
static void Internal_setPitch(ScriptCAudioSource* thisPtr, float pitch);
static float Internal_getPitch(ScriptCAudioSource* thisPtr);
static void Internal_setIsLooping(ScriptCAudioSource* thisPtr, bool loop);
static bool Internal_getIsLooping(ScriptCAudioSource* thisPtr);
static void Internal_setPriority(ScriptCAudioSource* thisPtr, uint32_t priority);
static uint32_t Internal_getPriority(ScriptCAudioSource* thisPtr);
static void Internal_setMinDistance(ScriptCAudioSource* thisPtr, float distance);
static float Internal_getMinDistance(ScriptCAudioSource* thisPtr);
static void Internal_setAttenuation(ScriptCAudioSource* thisPtr, float attenuation);
static float Internal_getAttenuation(ScriptCAudioSource* thisPtr);
static void Internal_setTime(ScriptCAudioSource* thisPtr, float time);
static float Internal_getTime(ScriptCAudioSource* thisPtr);
static void Internal_setPlayOnStart(ScriptCAudioSource* thisPtr, bool enable);
static bool Internal_getPlayOnStart(ScriptCAudioSource* thisPtr);
static void Internal_play(ScriptCAudioSource* thisPtr);
static void Internal_pause(ScriptCAudioSource* thisPtr);
static void Internal_stop(ScriptCAudioSource* thisPtr);
static AudioSourceState Internal_getState(ScriptCAudioSource* thisPtr);
};
}
|
syfxlin/xkjava | xkjava-app/src/main/java/me/ixk/app/beans/Log2Aspect.java | <filename>xkjava-app/src/main/java/me/ixk/app/beans/Log2Aspect.java<gh_stars>1-10
/*
* Copyright (c) 2020, <NAME> (<EMAIL>). All Rights Reserved.
*/
package me.ixk.app.beans;
import me.ixk.framework.annotation.core.Aspect;
import me.ixk.framework.annotation.core.Order;
import me.ixk.framework.aop.Advice;
import me.ixk.framework.aop.ProceedingJoinPoint;
@Aspect(pointcut = "@annotation(me.ixk.app.annotations.Log2)")
@Order(1)
public class Log2Aspect implements Advice {
@Override
public Object around(final ProceedingJoinPoint joinPoint) {
System.out.println("Before");
Object result = null;
try {
result = joinPoint.proceed();
} catch (final Throwable throwable) {
throwable.printStackTrace();
}
System.out.println("After");
return result;
}
}
|
srl295/icu | locexp/util/translitcb.c | <reponame>srl295/icu
/**********************************************************************
* Copyright (C) 1999-2002, International Business Machines
* Corporation and others. All Rights Reserved.
***********************************************************************/
/*
'transliterating' callback. render using the translit framework..
New scheme: Translate using "xx_YY-Latin" where xx_YY is a locale.
Not caching transliterators, anymore.
<NAME> <<EMAIL>>
*/
#include <stdio.h>
#include "unicode/translitcb.h"
#include "unicode/utrans.h"
#include "unicode/uchar.h"
#include "unicode/ustring.h"
#include "unicode/ucnv_cb.h"
#include <stdlib.h>
#define _beginMark "<FONT COLOR=green>" /* 18 */
#define _endMark "</FONT>" /* 7 */
#define L_beginMark L"<FONT COLOR=green>" /* 18 */
#define L_endMark L"</FONT>" /* 7 */
U_STRING_DECL(beginMark, _beginMark, 18 );
U_STRING_DECL( endMark, _endMark, 7);
static void TRANSLITERATED_LoadTransliteratorIfNotLoaded(FromUTransliteratorContext *ctx)
{
UErrorCode status = U_ZERO_ERROR;
char id[200];
sprintf(id,"%s", ctx->locale);
if(ctx->trans == 0)
{
ctx->transerr = U_ZERO_ERROR;
ctx->trans = utrans_open(id, UTRANS_FORWARD, NULL, -1, NULL, &ctx->transerr);
/* fprintf(stderr, "TR[%d:%s]=%p [%s]\n", 9133, id, ctx->trans, u_errorName(status)); */
}
if(!ctx->trans || U_FAILURE(status))
{
ctx->trans = utrans_open("Null", UTRANS_FORWARD, NULL, 0, NULL, &status);
}
}
U_CAPI void
UCNV_FROM_U_CALLBACK_TRANSLITERATED (const void *context,
UConverterFromUnicodeArgs *fromUArgs,
const UChar* codeUnits,
int32_t length,
UChar32 codePoint,
UConverterCallbackReason reason,
UErrorCode *err)
{
int32_t len;
UErrorCode status2 = U_ZERO_ERROR;
UBlockCode scriptBlock = -1;
int srclen;
FromUTransliteratorContext *ctx;
UConverter *oC = NULL;
int n = 0;
UChar totrans[300];
ctx = (FromUTransliteratorContext*) context;
if(reason == UCNV_RESET)
{
if(ctx->utf8)
{
ucnv_reset(ctx->utf8);
}
return;
}
else if(reason == UCNV_CLOSE)
{
if(ctx->trans)
{
utrans_close(ctx->trans);
}
if(ctx->utf8)
{
ucnv_close(ctx->utf8);
}
return;
}
else if(reason > UCNV_IRREGULAR)
{
return; /* ? */
}
*err = U_ZERO_ERROR; /* so that we get called in a loop */
if(ctx->locale == NULL)
{
/* I guess they don't want anything, yet. */
return;
}
TRANSLITERATED_LoadTransliteratorIfNotLoaded(ctx);
u_strncpy(totrans,codeUnits, length);
n = length;
if(fromUArgs->converter == ctx->utf8)
{
/* use callbacks here! */
return;
}
if(ctx->utf8 == NULL)
{
UErrorCode u8err = U_ZERO_ERROR;
ctx->utf8 = ucnv_open("utf-8", &u8err);
}
/* FATAL ERR .......... */
if( (ctx->utf8==NULL) || (ctx->trans==NULL) )
{
*err = U_INTERNAL_PROGRAM_ERROR;
return;
}
/* the <FONT> thing */
if(ctx->html == TRUE)
{
const UChar *mySource;
mySource = beginMark;
*err = U_ZERO_ERROR;
ucnv_cbFromUWriteUChars(fromUArgs, &mySource, mySource+u_strlen(beginMark), 0, err);
}
/* len = utrns_transliterate(ctx->trans, _this->invalidUCharBuffer, _this->invalidUCharLength, tmpbuf, 300, &status2);*/
/* convertIntoTargetOrErrChars(_this, target, targetLimit, tmpbuf, tmpbuf+len, err); */
/* Todo: check script of the rest of the invaliducharbuffer */
/* Look for any more chars with the same script.*/
while(fromUArgs->source < fromUArgs->sourceLimit)
{
/* TODO: UTF-16 support */
for(srclen=0; (fromUArgs->source+srclen)<fromUArgs->sourceLimit && ublock_getCode( fromUArgs->source[srclen] ) == scriptBlock ; srclen++);
if(srclen > 0)
{
u_strncpy(totrans+n, fromUArgs->source, srclen);
n += srclen;
}
/* If we found any, xliterate them */
if(n > 0)
{
const UChar *mySource;
len = n;
utrans_transUChars(ctx->trans, totrans, &n, 300, 0, &len, &status2);
mySource = totrans;
oC = fromUArgs->converter;
fromUArgs->converter = ctx->utf8;
ucnv_cbFromUWriteUChars(fromUArgs,
&mySource,
mySource+len,
0,
err);
fromUArgs->converter = oC;
fromUArgs->source += srclen; /* if any of the actual source was found */
n = 0; /* reset */
}
scriptBlock = ublock_getCode(fromUArgs->source[srclen]);
break;
}
/* handle single char case */
if(n > 0)
{
const UChar *mySource;
len = n;
srclen = 0;
utrans_transUChars(ctx->trans, totrans, &n, 300, 0, &len, &status2);
mySource = totrans;
oC = fromUArgs->converter;
fromUArgs->converter = ctx->utf8;
ucnv_cbFromUWriteUChars(fromUArgs,
&mySource,
mySource+len,
0,
err);
fromUArgs->converter = oC;
fromUArgs->source += srclen; /* if any of the actual source was found */
n = 0; /* reset */
}
if(ctx->html == TRUE)
{
const UChar *mySource;
mySource = endMark;
ucnv_cbFromUWriteUChars(fromUArgs, &mySource, mySource+u_strlen(beginMark), 0, err);
}
}
|
yexiuph/RanOnline | [Lib]__Engine/Sources/G-Logic/DxConsoleMsg.h | <reponame>yexiuph/RanOnline<gh_stars>0
#ifndef DXCONSOLEMSG_H_
#define DXCONSOLEMSG_H_
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
class DxConsoleMsg
{
public:
virtual void Write( const TCHAR* msg, ... ) = 0;
virtual void Write( int nType, const TCHAR* msg, ... ) = 0;
};
#endif // DXCONSOLEMSG_H_ |
nokia/osgi-microfeatures | com.alcatel.as.http.ioh/src/com/alcatel/as/http/ioh/impl/HttpIOHEngine.java | // Copyright 2000-2021 Nokia
//
// Licensed under the Apache License 2.0
// SPDX-License-Identifier: Apache-2.0
//
package com.alcatel.as.http.ioh.impl;
import com.alcatel.as.http.parser.*;
import com.alcatel.as.http2.*;
import com.alcatel.as.http.ioh.*;
import com.alcatel.as.ioh.engine.*;
import com.alcatel.as.ioh.engine.tools.*;
import com.alcatel.as.ioh.engine.IOHEngine.MuxClient;
import com.alcatel.as.ioh.*;
import com.alcatel.as.ioh.server.*;
import com.alcatel.as.ioh.tools.*;
import com.alcatel.as.ioh.tools.ChannelWriter.SendBufferMonitor;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.apache.log4j.Logger;
import org.osgi.service.component.annotations.*;
import alcatel.tess.hometop.gateways.reactor.*;
import com.alcatel.as.service.concurrent.*;
import com.alcatel.as.ioh.impl.conf.Property;
import java.util.regex.*;
import java.io.*;
import java.nio.*;
import java.net.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.locks.*;
import java.util.concurrent.atomic.*;
import com.alcatel_lucent.as.management.annotation.config.*;
import com.alcatel.as.service.discovery.*;
import com.alcatel.as.util.config.ConfigConstants;
import com.alcatel.as.service.metering2.*;
import com.alcatel.as.service.metering2.util.*;
import com.nextenso.mux.*;
import com.nextenso.mux.util.MuxIdentification;
public class HttpIOHEngine extends IOHEngine implements AdvertisementTracker.Listener {
private HttpIOHRouter _router;
private AdvertisementTracker _trackerRemote, _trackerFarRemote;
private BundleContext _osgi;
private ConnectionFactory _connF;
protected Meter _readReqMeter, _readWsMeter;
protected Meter _parserErrorMeter, _parserChunkedMeter;
//protected Meter _writeMsgMeter, _writeReqMeter;
protected Meter _writeRespMeter, _writeWsMeter;
protected Meter _webSocketsOpenMeter, _webSocketsUpgradedMeter;
protected Map<Object, Meter> _writeByTypeMeters = new HashMap<> ();
protected Map<Object, Meter> _readByTypeMeters = new HashMap<> ();
protected HttpIOHEngine (String name, IOHServices services, HttpIOHRouterFactory routerFactory, ConnectionFactory cf){
super (name, services);
_router = routerFactory.newHttpIOHRouter ();
_connF = cf;
}
public HttpIOHRouter getHttpIOHRouter (){ return _router;}
public IOHEngine init (TcpServer server, BundleContext osgi, Dictionary<String, String> system){
_osgi = osgi;
server.getProperties ().put (PROP_TCP_CONNECT_SHARED, "false");
server.getProperties ().put (PROP_TCP_LISTEN_RETRY, 5); // give 5 secs to complete
server.getProperties ().put (PROP_TCP_LISTEN_NOTIFY, "true");
server.getProperties ().put (PROP_UDP, "false");
server.getProperties ().put (PROP_PROTOCOL_TEXT, "false");
if (server.getProperties ().get (PROP_AGENT_LOAD_METER) == null)
server.getProperties ().put (PROP_AGENT_LOAD_METER, "resp.latency");
super.init (server);
_router.init (this);
MeteringService metering = getIOHServices ().getMeteringService ();
_readReqMeter = getIOHMeters ().createIncrementalMeter ("read.req", null);
getIOHMeters ().addMeter (Meters.createRateMeter (metering, _readReqMeter, 1000L));
_readWsMeter = getIOHMeters ().createIncrementalMeter ("read.tcp.ws", null);
getIOHMeters ().addMeter (Meters.createRateMeter (metering, _readWsMeter, 1000L));
_parserChunkedMeter = getIOHMeters ().createIncrementalMeter ("parser.chunked", null);
_parserErrorMeter = getIOHMeters ().createIncrementalMeter ("parser.error", null);
_webSocketsOpenMeter = getIOHMeters ().createIncrementalMeter ("channel.open.ws", null);
_webSocketsUpgradedMeter = getIOHMeters ().createIncrementalMeter ("channel.upgraded.ws", null);
_writeRespMeter = getIOHMeters ().createIncrementalMeter ("write.resp", null);
getIOHMeters ().addMeter (Meters.createRateMeter (metering, _writeRespMeter, 1000L));
_writeWsMeter = getIOHMeters ().createIncrementalMeter ("write.tcp.ws", null);
getIOHMeters ().addMeter (Meters.createRateMeter (metering, _writeWsMeter, 1000L));
String[] methods = new String[]{"OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT", "PATCH", "OTHER"};
for (String method : methods){
Meter tmp = null;
_readByTypeMeters.put (method, tmp = getIOHMeters ().createIncrementalMeter ("read.req."+method, _readReqMeter));
//getIOHMeters ().addMeter (Meters.createRateMeter (metering, tmp, 1000L));
//_writeByTypeMeters.put (method, tmp = getIOHMeters ().createIncrementalMeter ("write.req."+method, null));
//getIOHMeters ().addMeter (Meters.createRateMeter (metering, tmp, 1000L));
}
String[] statuses = new String[]{"100", "101", "200", "201", "202", "203", "204", "205", "206", "300", "301", "302", "303", "304", "305", "306", "307", "308", "310", "400", "401", "402", "403", "404", "405", "406", "407", "408", "409", "410", "411", "412", "413", "414", "415", "416", "417", "426", "428", "429", "431", "500", "501", "502", "503", "504", "505", "506", "509", "510", "520", "999"};
for (String status : statuses){
Meter tmp = null;
//tmp = getIOHMeters ().createIncrementalMeter ("read.msg.resp."+status, null);
//_readByTypeMeters.put (status, tmp);
//_readByTypeMeters.put (Integer.parseInt (status), tmp);
//getIOHMeters ().addMeter (Meters.createRateMeter (metering, tmp, 1000L));
_writeByTypeMeters.put (status, tmp = getIOHMeters ().createIncrementalMeter ("write.resp."+status, _writeRespMeter));
_writeByTypeMeters.put (Integer.parseInt (status), tmp);
//getIOHMeters ().addMeter (Meters.createRateMeter (metering, tmp, 1000L));
}
if (useRemoteIOH ()){
_trackerRemote = new AdvertisementTracker (this)
.addModuleIdFilter ("292", true)
.addTargetGroupFilter (system.get (ConfigConstants.GROUP_NAME))
.addFilter ("application.name", _name, true)
.addInstanceNameFilter (system.get (ConfigConstants.INSTANCE_NAME), false); // we dont connect to ourselves - else infinite loop
_trackerFarRemote = new AdvertisementTracker (this)
.addModuleIdFilter ("293", true)
.addTargetGroupFilter (system.get (ConfigConstants.GROUP_NAME))
.addFilter ("application.name", _name, true);
if (getBooleanProperty (HttpIOH.PROP_REMOTE_IMMEDIATE, false)){
_logger.debug (this+" : tracking remote HttpIOH");
_trackerRemote.open (_osgi);
_trackerFarRemote.open (_osgi);
}
}
new HttpIOHStats (this).register (_osgi);
return this;
}
@Override
public boolean start (BundleContext osgi){
if (super.start (osgi) == false) return false;
// track remote http ioh
if (useRemoteIOH () && !getBooleanProperty (HttpIOH.PROP_REMOTE_IMMEDIATE, false)){
_logger.debug (this+" : tracking remote HttpIOH");
_trackerRemote.open (_osgi);
_trackerFarRemote.open (_osgi);
}
return true;
}
@Override
public void stop (){
super.stop ();
_trackerRemote.close ();
_trackerFarRemote.close ();
//TODO : unregister HttpIOHStats
}
//****** AdvertisementTracker.Listener ***********//
public Object up (AdvertisementTracker tracker, InetSocketAddress addr, ServiceReference ref){
return null;
}
public void down (AdvertisementTracker tracker, ServiceReference ref, Object ctx){
}
@Override
protected IOHTcpServerChannel newTcpServerChannel (IOHEngine engine, TcpServer server){
return new HttpIOHTcpServerChannel (engine, server);
}
@Override
protected IOHTcpChannel newTcpChannel (IOHEngine engine, TcpServer server, TcpChannel channel, Map<String, Object> props){
return new HttpIOHTcpChannel (engine, server, channel, props);
}
@Override
protected IOHTcpClientChannel newTcpClientChannel (MuxClient agent, long connectionId, InetSocketAddress remote, Map<ReactorProvider.TcpClientOption, Object> opts){
return new HttpIOHTcpClientChannel (agent, connectionId, remote, opts);
}
@Override
public void initMuxClient (MuxClient agent){
super.initMuxClient (agent);
if (agent.isLocalAgent ()){
// we make an alias to discriminate the jetty agent and the pxlet agent : this was useful for http2 termination - may still be convenient
// in case we have both co-located
String protocol = agent.getApplicationParam ("agent.protocol", null);
if (protocol != null) agent.aliases ().add ("local."+protocol);
}
agent.getIOHMeters ().createAbsoluteMeter ("resp.latency");
_router.initMuxClient (agent);
}
@Override
public boolean sendMuxData(MuxClient agent, MuxHeader header, boolean copy, ByteBuffer ... buf) {
if (header.getVersion () == 0){
switch (header.getFlags ()){
case 0x4A:
// a channel is moved to websocket mode
int sockId = header.getChannelId ();
if (_logger.isDebugEnabled ())
_logger.debug (agent+" : upgrade websocket : "+sockId);
IOHChannel channel = agent.getTcpChannel (sockId);
if (channel == null) return false;
((HttpIOHTcpChannel)channel).upgradeWebSocket (agent);
_webSocketsUpgradedMeter.inc (1);
return true;
case 0x4C:
// two channels are piped (CONNECT use case)
int clientSocketId = (int) header.getSessionId ();
int serverSocketId = header.getChannelId ();
if (_logger.isDebugEnabled ())
_logger.debug (agent+" : connect sockets : client="+clientSocketId+" : server="+serverSocketId);
IOHChannel clientChannel = agent.getTcpChannel (clientSocketId);
IOHChannel serverChannel = agent.getTcpChannel (serverSocketId);
if (clientChannel == null){
if (serverChannel != null){
if (_logger.isInfoEnabled ())
_logger.info (agent+" : CONNECT sockets : cannot find client socket : "+clientSocketId+" : closing server socket : "+serverChannel);
serverChannel.close ();
} else {
if (_logger.isInfoEnabled ())
_logger.info (agent+" : CONNECT sockets : cannot find client socket : "+clientSocketId+" : cannot find server socket : "+serverSocketId);
}
return true;
}
if (serverChannel == null){
if (_logger.isInfoEnabled ())
_logger.info (agent+" : CONNECT sockets : cannot find server socket : "+serverSocketId+" : closing client socket : "+clientChannel);
clientChannel.close ();
return true;
}
((HttpIOHTcpChannel)clientChannel).connect (serverChannel);
((HttpIOHTcpClientChannel)serverChannel).connect (clientChannel);
return true;
}
}
return false;
}
private static class HttpIOHTcpServerChannel extends IOHTcpServerChannel {
protected boolean _isServerMode, _isProxyMode;
protected List<Pattern> _acceptPatterns;
protected ConnectionConfig _http2Config;
protected HttpIOHTcpServerChannel (IOHEngine engine, TcpServer server){
super (engine, server);
_isProxyMode = IOHEngine.getBooleanProperty (HttpIOH.PROP_MODE_PROXY, server.getProperties (), false);
_isServerMode = IOHEngine.getBooleanProperty (HttpIOH.PROP_MODE_SERVER, server.getProperties (), false);
List<String> acceptList = Property.getStringListProperty ("http.ioh.URL.accept", server.getProperties ());
if (acceptList != null){
for (String accept : acceptList){
Pattern p = Pattern.compile(accept);
if (_acceptPatterns == null) _acceptPatterns = new ArrayList<> ();
_acceptPatterns.add (p);
}
}
acceptList = Property.getStringListProperty ("http.ioh.URL.accept", engine.getProperties ());
if (acceptList != null){
for (String accept : acceptList){
Pattern p = Pattern.compile(accept);
if (_acceptPatterns == null) _acceptPatterns = new ArrayList<> ();
_acceptPatterns.add (p);
}
}
Object s = engine.getProperties ().get (IOHEngine.PROP_TCP_SEND_BUFFER);
if (s != null &&
server.getProperties ().get (ConnectionConfig.PROP_CONN_WRITE_BUFFER) == null)
server.getProperties ().put (ConnectionConfig.PROP_CONN_WRITE_BUFFER, s); // propagate to http2 config
Settings settings = new Settings ().load (server.getProperties ());
_http2Config = new ConnectionConfig (settings, _logger)
.priorKnowledge (false)
.load (true, server.getProperties ());
}
}
private static class HttpIOHTcpClientChannel extends IOHTcpClientChannel {
protected IOHChannel _connected;
protected HttpIOHTcpClientChannel (MuxClient agent, long connectionId, InetSocketAddress dest, Map<ReactorProvider.TcpClientOption, Object> opts){
super (agent, connectionId, dest, opts);
}
protected void connect (IOHChannel other){
_connected = other;
}
@Override
public void connectionClosed (){
if (_connected != null) _connected.close ();
super.connectionClosed ();
}
@Override
public int messageReceived(TcpChannel cnx,
ByteBuffer buff){
if (_connected == null){
return super.messageReceived (cnx, buff);
}
if (disabled (buff))
return 0;
logReceived (null, buff);
_readMeter.inc (buff.remaining ());
if (_connected.sendOut (null, null, true, true, buff) == false){
buff.limit (buff.position ());
close ();
}
return 0;
}
}
protected static class HttpIOHTcpChannel extends IOHTcpChannel implements Http2RequestListener {
private HttpIOHChannelImpl _httpChannel;
protected HttpParser _parser = new HttpParser ();
protected Map<Object, Boolean> _notifiedAgentsMap = new HashMap<> ();
protected boolean _isWebSocket, _isHttp2;
protected IOHChannel _connected;
protected long _sentRequestTimestamp = Long.MAX_VALUE;
protected HttpIOHTcpServerChannel _serverChannel;
protected Connection _http2Conn;
protected Map<String, Object> _props;
private HttpIOHTcpChannel (IOHEngine engine, TcpServer server, TcpChannel channel, Map<String, Object> props){
super (engine, channel, props);
_props = props;
_httpChannel = new HttpIOHChannelImpl ((HttpIOHEngine) engine, this, true);
_serverChannel = server.attachment ();
_httpChannel.setAcceptURLPatterns (_serverChannel._acceptPatterns);
if (IOHEngine.getBooleanProperty (ConnectionConfig.PROP_CONN_PRIOR_KNOWLEDGE, props, false))
initHttp2 (false);
}
private void initHttp2 (boolean skipPRI){
ConnectionConfig cc = _serverChannel._http2Config.copy();
cc.writeExecutor(_engine.createQueueExecutor());
_http2Conn = ((HttpIOHEngine) _engine)._connF.newServerConnection (cc,
(TcpChannel) _channel,
this);
_http2Conn.skipPRI (skipPRI).init ();
_isHttp2 = true;
}
protected void upgradeWebSocket (final MuxClient agent){
Runnable r = new Runnable (){
public void run (){
if (_closed) return;
if (_isWebSocket) return; // may have been done spontaneously by parser
((HttpIOHEngine)_engine)._webSocketsOpenMeter.inc (1);
_isWebSocket = true;
_httpChannel.attachAgent (agent);
}};
schedule (r);
}
protected void connect (IOHChannel other){
_connected = other;
}
@Override
public boolean agentConnected (MuxClient agent, MuxClientState state){
return super.agentConnected (agent, state) && _httpChannel.agentConnected (agent, state);
}
@Override
public boolean agentClosed (MuxClient agent){
return super.agentClosed (agent) && _httpChannel.agentClosed (agent);
}
@Override
public boolean agentStopped (MuxClient agent){
return super.agentStopped (agent) && _httpChannel.agentStopped (agent);
}
@Override
public boolean agentUnStopped (MuxClient agent){
return super.agentUnStopped (agent) && _httpChannel.agentUnStopped (agent);
}
@Override
protected void notifyOpenToAgent (MuxClient agent, long connectionId){
}
@Override
protected void notifyCloseToAgent (MuxClient agent){
if (_notifiedAgentsMap.remove (agent) != null)
super.notifyCloseToAgent (agent);
}
private void notifyOpenToAgentNow (MuxClient agent){
if (_notifiedAgentsMap.get (agent) == null){
super.notifyOpenToAgent (agent, 0L);
_notifiedAgentsMap.put (agent, Boolean.TRUE);
}
}
@Override
public void connectionClosed (){
if (_isWebSocket) ((HttpIOHEngine)_engine)._webSocketsOpenMeter.inc (-1);
if (_connected != null) _connected.close ();
if (_http2Conn != null) _http2Conn.closed ();
super.connectionClosed ();
}
@Override
public void receiveTimeout (){
if (_isHttp2){
_http2Conn.receiveTimeout ();
} else {
super.receiveTimeout ();
}
}
@Override
public int messageReceived(TcpChannel cnx,
ByteBuffer buff){
if (disabled (buff))
return 0;
_readMeter.inc (buff.remaining ());
while (true){
if (_connected != null){
if (_connected.sendOut (null, null, true, true, buff) == false){
buff.limit (buff.position ());
close ();
}
return 0;
}
if (_isWebSocket){
if (_logger.isDebugEnabled ()) _logger.debug (this+" : RECEIVED WebSocket data : "+buff.remaining ());
((HttpIOHEngine)_engine)._readWsMeter.inc (buff.remaining ());
if (_httpChannel.handleWebSocket (buff) == false){
buff.position (buff.limit ()); // empty by precaution
close ();
}
return 0;
}
if (_isHttp2){
_http2Conn.received (buff);
return 0;
}
HttpMessageImpl msg = null;
try{
msg = _parser.parseMessage (buff);
if (msg == null)
return 0;
if (msg.isRequest () == false)
throw new IOException ("Received a response : only requests are expected");
if (msg.isFirst ()){
boolean checkURL = false;
if (_serverChannel._isServerMode)
checkURL = msg.getURL ().startsWith ("/");
if (!checkURL && _serverChannel._isProxyMode){
String url = msg.getURL ();
checkURL = url.startsWith ("http") &&
(url.regionMatches (false, 4, "://", 0, 3) || url.regionMatches (false, 4, "s://", 0, 4));
}
if (!checkURL){
if (_serverChannel._isServerMode && _serverChannel._isProxyMode) throw new IOException ("request URL is not acceptable : "+msg.getURL ());
if (_serverChannel._isServerMode) throw new IOException ("request URL is not in server mode : "+msg.getURL ());
if (_serverChannel._isProxyMode) throw new IOException ("request URL is not in proxy mode : "+msg.getURL ());
}
}
} catch(HttpParser.Http2Exception he){ // we were not upgraded, but we receive an HTTP2 PRI
if (IOHEngine.getBooleanProperty ("http2.enabled", _props, true) == false){
if (_logger.isInfoEnabled ()) _logger.info (this+" : read HTTP2 request : upgrade http2 forbidden");
((HttpIOHEngine)_engine)._parserErrorMeter.inc (1);
buff.position (buff.limit ());
close ();
return 0;
}
if (_logger.isInfoEnabled ()) _logger.info (this+" : read HTTP2 request : upgrade http2 spontaneously");
initHttp2 (true);
_http2Conn.received (buff);
return 0;
} catch (Throwable t){
if (_logger.isDebugEnabled ()) _logger.debug (this+" : parsing exception", t.getCause () != null ? t.getCause () : t);
((HttpIOHEngine)_engine)._parserErrorMeter.inc (1);
buff.position (buff.limit ());
close ();
return 0;
}
if (msg.isFirst ()){
if (msg.isChunked ()) ((HttpIOHEngine)_engine)._parserChunkedMeter.inc (1);
}
if (_logger.isDebugEnabled ()) _logger.debug (this+" : RECEIVED :\n["+msg+"]");
if (msg.isLast ()) _sentRequestTimestamp = System.currentTimeMillis ();
if (_httpChannel.handleMessage (msg) == false){
buff.position (buff.limit ()); // empty by precaution
close ();
return 0;
}
msg.setHasMore ();
}
}
@Override
public boolean sendAgent (MuxClient agent, InetSocketAddress from, boolean checkBuffer, long sessionId, boolean copy, ByteBuffer... buffs){
notifyOpenToAgentNow (agent);
return super.sendAgent (agent, from, checkBuffer, sessionId, copy, buffs);
}
@Override
public boolean sendOut (MuxClient agent, InetSocketAddress to, boolean checkBuffer, boolean copy, ByteBuffer... buffs){
if (_isWebSocket){
int size = ByteBufferUtils.remaining (buffs);
if (super.sendOut (agent, to, checkBuffer, copy, buffs)){
((HttpIOHEngine)_engine)._writeWsMeter.inc (size);
return true;
} else {
return false;
}
} else {
if (agent != null){
long now = System.currentTimeMillis ();
long elapsed = now - _sentRequestTimestamp;
if (elapsed >= 0) { // by precaution avoid negative value - in case of pipeline or early response before end of request body
HttpIOHRouter.AgentContext ctx = agent.getContext ();
Meter latencyMeter = ctx._latencyMeter;
long value = latencyMeter.getValue ();
long newValue = value + (elapsed >> 3) - (value >> 3);
latencyMeter.set (newValue);
}
}
String status = HttpParser.getStatus (buffs);
if (super.sendOut (agent, to, checkBuffer, copy, buffs)){
_httpChannel.incWriteResp (status);
return true;
} else {
//_writeDroppedRespMeter.inc (1);
return false;
}
}
}
//////////////////// Http2RequestListener
private Map<Integer, Http2Message> _http2Msgs = new ConcurrentHashMap<> ();
@Override
public void newRequest (RequestContext rc){
Http2Message msg = new Http2Message (this, rc);
rc.attach (msg);
}
@Override
public void recvReqMethod (RequestContext rc, String method){
((Http2Message) rc.attachment ()).recvReqMethod (method);
}
@Override
public void recvReqPath (RequestContext rc, String path){
((Http2Message) rc.attachment ()).recvReqPath (path);
}
@Override
public void recvReqScheme (RequestContext rc, String scheme){
((Http2Message) rc.attachment ()).recvReqScheme (scheme);
}
@Override
public void recvReqAuthority (RequestContext rc, String auth){
((Http2Message) rc.attachment ()).recvReqAuthority (auth);
}
@Override
public void recvReqHeader (RequestContext rc, String name, String value){
((Http2Message) rc.attachment ()).recvReqHeader (name, value);
}
@Override
public void recvReqHeaders (RequestContext rc, boolean done){
Http2Message msg = ((Http2Message) rc.attachment ());
if (done) msg.done ();
triggerReq (msg);
if (!done) msg.setHasMore ();
}
@Override
public void recvReqData (RequestContext rc, ByteBuffer data, boolean done){
Http2Message msg = ((Http2Message) rc.attachment ());
msg.recvReqData (data);
if (done) msg.done ();
triggerReq (msg);
}
private void triggerReq (Http2Message msg){
if (_httpChannel.handleMessage (msg) == false){
//TODO
return;
}
}
@Override
public void abortRequest (RequestContext rc){
Http2Message msg = ((Http2Message) rc.attachment ());
msg.aborted ();
}
}
protected static class HttpIOHChannelImpl implements HttpIOHChannel {
protected HttpIOHEngine _engine;
protected boolean _isRemote;
protected Map<Object, MuxClient> _agentsMap = new HashMap<> ();
protected IOHChannel _channel;
protected boolean _incoming;
protected MuxClient _agent;
protected List<Pattern> _acceptPatterns;
protected HttpIOHChannelImpl (HttpIOHEngine engine, IOHChannel channel, boolean incoming){
_engine = engine;
_channel = channel;
_incoming = incoming;
}
public String toString (){ return _channel.toString ();}
protected void setAcceptURLPatterns (List<Pattern> patterns){
_acceptPatterns = patterns;
}
protected HttpIOHChannelImpl setRemoteIOHEngine (){
_isRemote = true;
return this;
}
public boolean agentConnected (MuxClient agent, MuxClientState state){
for (String alias : agent.aliases ())
_agentsMap.put (alias, agent);
_engine._router.agentConnected (this, agent, state);
return true;
}
public boolean agentClosed (MuxClient agent){
for (String alias : agent.aliases ())
_agentsMap.remove (alias);
_engine._router.agentClosed (this, agent);
if (agent == agentAttached ()){
// we were in the middle of a transaction
attachAgent (null);
close (null);
}
return true;
}
public boolean agentStopped (MuxClient agent){
_engine._router.agentStopped (this, agent);
return true;
}
public boolean agentUnStopped (MuxClient agent){
_engine._router.agentUnStopped (this, agent);
return true;
}
protected boolean handleMessage (HttpMessage msg){
try{
// always a request
if (msg.isFirst ()){
Meter meter = _engine._readByTypeMeters.get (msg.getMethod ());
if (meter != null){
meter.inc (1);
} else {
if (_channel.getLogger ().isInfoEnabled ())
_channel.getLogger ().info (_channel+" : received request with unknown Method : "+msg.getMethod ());
_engine._readByTypeMeters.get ("OTHER").inc (1);
}
if (_acceptPatterns != null){ // NOTE THAT Http2Termination does not set the _acceptPatterns (TODO ?)
boolean ok = false;
for (Pattern pattern : _acceptPatterns){
if (pattern.matcher (msg.getURL ()).find ()){
ok = true;
break;
}
}
if (!ok){
_engine._router.handleError (this, msg, 404, "URL not accepted");
return true;
}
}
if (!(msg instanceof Http2Message)){
// TODO for Http2
HttpMessage.Header h = msg.getHeader("expect");
if (h != null){
String expect = h.getValue ();
if (expect != null && expect.contains("100-continue")) {
msg.removeHeader (h);
_channel.sendOut (null, null, true, true, ByteBuffer.wrap (msg.getVersion () == 0 ? HttpUtils.HTTP10_100 : HttpUtils.HTTP11_100));
}
}
}
_engine._router.handleRequestHeaders (this, msg);
} else {
_engine._router.handleRequestBody (this, msg, msg.isLast ());
}
} catch (Throwable t){
_channel.getLogger ().warn (_channel+" : exception while routing msg : ["+msg+"]", t);
return false;
}
return true;
}
protected boolean handleWebSocket (ByteBuffer buff){
try{
return _engine._router.handleWebSocketData (this, buff);
} catch (Throwable t){
_channel.getLogger ().warn (_channel+" : exception while routing websocket data", t);
return false;
}
}
protected void incWriteResp (String status){
if (status == null) return; // this is not the beginnning of a response
Meter meter = _engine._writeByTypeMeters.get (status);
if (meter != null){
meter.inc (1);
} else {
if (_channel.getLogger ().isInfoEnabled ())
_channel.getLogger ().info (_channel+" : sent response with unknown Status : "+status);
_engine._writeByTypeMeters.get ("999").inc (1);
}
}
public boolean incoming (){ return _incoming;}
public IOHChannel getIOHChannel (){ return _channel;}
public <T extends AsyncChannel> T getChannel (){ return _channel.getChannel ();}
public Logger getLogger (){ return _channel.getLogger ();}
public PlatformExecutor getPlatformExecutor (){ return _channel.getPlatformExecutor ();}
public boolean sendAgent (IOHEngine.MuxClient agent, HttpMessage msg){
if (agent == null) return false;
if (msg instanceof Http2Message){
return ((Http2Message)msg).sendAgent (agent);
}
ByteBuffer[] buffs = msg.toByteBuffers ();
msg.setAgent (agent);
return _channel.sendAgent (agent, null, false, 0L, false, buffs);
}
public boolean sendOut (HttpMessage msg, boolean checkBuffer, ByteBuffer... data){
if (msg instanceof Http2Message)
return ((Http2Message)msg).sendOut (null, null, checkBuffer, false, data);
else
return _channel.sendOut (null, null, checkBuffer, false, data);
}
public MuxClient getAgent (String instance){ return _agentsMap.get (instance);}
public Map<Object, IOHEngine.MuxClient> getAgents (){ return _agentsMap;}
public IOHEngine.MuxClient pickAgent (Object preferenceHint){ return _channel.getAgents ().pick (preferenceHint);}
public void attach (Object attachment){ _channel.attach (attachment);}
public <T> T attachment (){ return (T) _channel.attachment ();}
public boolean isRemoteIOHEngine (){ return _isRemote;}
public void close (HttpMessage msg){ // msg can be null for Websocket
if (msg != null && msg instanceof Http2Message)
((Http2Message)msg).close ();
else
_channel.close ();
}
public void attachAgent (MuxClient agent){ // used for websocket
_agent = agent;
}
public MuxClient agentAttached (){ return _agent;}
}
}
|
dmgerman/camel | components/camel-http/src/test/java/org/apache/camel/component/http/helper/HttpHelperTest.java | begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
DECL|package|org.apache.camel.component.http.helper
package|package
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|component
operator|.
name|http
operator|.
name|helper
package|;
end_package
begin_import
import|import
name|java
operator|.
name|net
operator|.
name|URI
import|;
end_import
begin_import
import|import
name|java
operator|.
name|net
operator|.
name|URISyntaxException
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|HashMap
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|List
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Map
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|CamelContext
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|Exchange
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|Message
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|component
operator|.
name|http
operator|.
name|HttpEndpoint
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|http
operator|.
name|common
operator|.
name|HttpHelper
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|http
operator|.
name|common
operator|.
name|HttpMethods
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|impl
operator|.
name|DefaultCamelContext
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|camel
operator|.
name|support
operator|.
name|DefaultExchange
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Test
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertEquals
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertFalse
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertNotNull
import|;
end_import
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertTrue
import|;
end_import
begin_class
DECL|class|HttpHelperTest
specifier|public
class|class
name|HttpHelperTest
block|{
annotation|@
name|Test
DECL|method|testAppendHeader ()
specifier|public
name|void
name|testAppendHeader
parameter_list|()
throws|throws
name|Exception
block|{
name|Map
argument_list|<
name|String
argument_list|,
name|Object
argument_list|>
name|headers
init|=
operator|new
name|HashMap
argument_list|<>
argument_list|()
decl_stmt|;
name|HttpHelper
operator|.
name|appendHeader
argument_list|(
name|headers
argument_list|,
literal|"foo"
argument_list|,
literal|"a"
argument_list|)
expr_stmt|;
name|HttpHelper
operator|.
name|appendHeader
argument_list|(
name|headers
argument_list|,
literal|"bar"
argument_list|,
literal|"b"
argument_list|)
expr_stmt|;
name|HttpHelper
operator|.
name|appendHeader
argument_list|(
name|headers
argument_list|,
literal|"baz"
argument_list|,
literal|"c"
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|3
argument_list|,
name|headers
operator|.
name|size
argument_list|()
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|"a"
argument_list|,
name|headers
operator|.
name|get
argument_list|(
literal|"foo"
argument_list|)
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|"b"
argument_list|,
name|headers
operator|.
name|get
argument_list|(
literal|"bar"
argument_list|)
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|"c"
argument_list|,
name|headers
operator|.
name|get
argument_list|(
literal|"baz"
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|testAppendHeaderMultipleValues ()
specifier|public
name|void
name|testAppendHeaderMultipleValues
parameter_list|()
throws|throws
name|Exception
block|{
name|Map
argument_list|<
name|String
argument_list|,
name|Object
argument_list|>
name|headers
init|=
operator|new
name|HashMap
argument_list|<>
argument_list|()
decl_stmt|;
name|HttpHelper
operator|.
name|appendHeader
argument_list|(
name|headers
argument_list|,
literal|"foo"
argument_list|,
literal|"a"
argument_list|)
expr_stmt|;
name|HttpHelper
operator|.
name|appendHeader
argument_list|(
name|headers
argument_list|,
literal|"bar"
argument_list|,
literal|"b"
argument_list|)
expr_stmt|;
name|HttpHelper
operator|.
name|appendHeader
argument_list|(
name|headers
argument_list|,
literal|"bar"
argument_list|,
literal|"c"
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|2
argument_list|,
name|headers
operator|.
name|size
argument_list|()
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|"a"
argument_list|,
name|headers
operator|.
name|get
argument_list|(
literal|"foo"
argument_list|)
argument_list|)
expr_stmt|;
name|List
argument_list|<
name|?
argument_list|>
name|list
init|=
operator|(
name|List
argument_list|<
name|?
argument_list|>
operator|)
name|headers
operator|.
name|get
argument_list|(
literal|"bar"
argument_list|)
decl_stmt|;
name|assertNotNull
argument_list|(
name|list
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|2
argument_list|,
name|list
operator|.
name|size
argument_list|()
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|"b"
argument_list|,
name|list
operator|.
name|get
argument_list|(
literal|0
argument_list|)
argument_list|)
expr_stmt|;
name|assertEquals
argument_list|(
literal|"c"
argument_list|,
name|list
operator|.
name|get
argument_list|(
literal|1
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldReturnTheHeaderURIIfNotBridgeEndpoint ()
specifier|public
name|void
name|createURLShouldReturnTheHeaderURIIfNotBridgeEndpoint
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|"http://apache.org"
argument_list|,
literal|null
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|false
argument_list|,
literal|"http://camel.apache.org"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://apache.org"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldReturnTheEndpointURIIfBridgeEndpoint ()
specifier|public
name|void
name|createURLShouldReturnTheEndpointURIIfBridgeEndpoint
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|"http://apache.org"
argument_list|,
literal|null
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://camel.apache.org"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://camel.apache.org"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldReturnTheEndpointURIIfNotBridgeEndpoint ()
specifier|public
name|void
name|createURLShouldReturnTheEndpointURIIfNotBridgeEndpoint
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|null
argument_list|,
literal|null
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|false
argument_list|,
literal|"http://camel.apache.org"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://camel.apache.org"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldReturnTheEndpointURIWithHeaderHttpPathAndAddOneSlash ()
specifier|public
name|void
name|createURLShouldReturnTheEndpointURIWithHeaderHttpPathAndAddOneSlash
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|null
argument_list|,
literal|"search"
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://www.google.com/search"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldReturnTheEndpointURIWithHeaderHttpPathAndRemoveOneSlash ()
specifier|public
name|void
name|createURLShouldReturnTheEndpointURIWithHeaderHttpPathAndRemoveOneSlash
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|null
argument_list|,
literal|"/search"
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://www.google.com/search"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createMethodAlwaysUseUserChoosenMethod ()
specifier|public
name|void
name|createMethodAlwaysUseUserChoosenMethod
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|HttpMethods
name|method
init|=
name|HttpHelper
operator|.
name|createMethod
argument_list|(
name|createExchangeWithOptionalHttpQueryAndHttpMethodHeader
argument_list|(
literal|"q=camel"
argument_list|,
name|HttpMethods
operator|.
name|POST
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/search"
argument_list|)
argument_list|,
literal|false
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
name|HttpMethods
operator|.
name|POST
argument_list|,
name|method
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createMethodUseGETIfQueryIsProvidedInHeader ()
specifier|public
name|void
name|createMethodUseGETIfQueryIsProvidedInHeader
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|HttpMethods
name|method
init|=
name|HttpHelper
operator|.
name|createMethod
argument_list|(
name|createExchangeWithOptionalHttpQueryAndHttpMethodHeader
argument_list|(
literal|"q=camel"
argument_list|,
literal|null
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/search"
argument_list|)
argument_list|,
literal|false
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
name|HttpMethods
operator|.
name|GET
argument_list|,
name|method
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createMethodUseGETIfQueryIsProvidedInEndpointURI ()
specifier|public
name|void
name|createMethodUseGETIfQueryIsProvidedInEndpointURI
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|HttpMethods
name|method
init|=
name|HttpHelper
operator|.
name|createMethod
argument_list|(
name|createExchangeWithOptionalHttpQueryAndHttpMethodHeader
argument_list|(
literal|null
argument_list|,
literal|null
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/search?q=test"
argument_list|)
argument_list|,
literal|false
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
name|HttpMethods
operator|.
name|GET
argument_list|,
name|method
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createMethodUseGETIfNoneQueryOrPayloadIsProvided ()
specifier|public
name|void
name|createMethodUseGETIfNoneQueryOrPayloadIsProvided
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|HttpMethods
name|method
init|=
name|HttpHelper
operator|.
name|createMethod
argument_list|(
name|createExchangeWithOptionalHttpQueryAndHttpMethodHeader
argument_list|(
literal|null
argument_list|,
literal|null
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/search"
argument_list|)
argument_list|,
literal|false
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
name|HttpMethods
operator|.
name|GET
argument_list|,
name|method
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createMethodUsePOSTIfNoneQueryButPayloadIsProvided ()
specifier|public
name|void
name|createMethodUsePOSTIfNoneQueryButPayloadIsProvided
parameter_list|()
throws|throws
name|URISyntaxException
block|{
name|HttpMethods
name|method
init|=
name|HttpHelper
operator|.
name|createMethod
argument_list|(
name|createExchangeWithOptionalHttpQueryAndHttpMethodHeader
argument_list|(
literal|null
argument_list|,
literal|null
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/search"
argument_list|)
argument_list|,
literal|true
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
name|HttpMethods
operator|.
name|POST
argument_list|,
name|method
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldNotRemoveTrailingSlash ()
specifier|public
name|void
name|createURLShouldNotRemoveTrailingSlash
parameter_list|()
throws|throws
name|Exception
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|null
argument_list|,
literal|"/"
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://www.google.com/"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldAddPathAndQueryParamsAndSlash ()
specifier|public
name|void
name|createURLShouldAddPathAndQueryParamsAndSlash
parameter_list|()
throws|throws
name|Exception
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|null
argument_list|,
literal|"search"
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/context?test=true"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://www.google.com/context/search?test=true"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|createURLShouldAddPathAndQueryParamsAndRemoveDuplicateSlash ()
specifier|public
name|void
name|createURLShouldAddPathAndQueryParamsAndRemoveDuplicateSlash
parameter_list|()
throws|throws
name|Exception
block|{
name|String
name|url
init|=
name|HttpHelper
operator|.
name|createURL
argument_list|(
name|createExchangeWithOptionalCamelHttpUriHeader
argument_list|(
literal|null
argument_list|,
literal|"/search"
argument_list|)
argument_list|,
name|createHttpEndpoint
argument_list|(
literal|true
argument_list|,
literal|"http://www.google.com/context/?test=true"
argument_list|)
argument_list|)
decl_stmt|;
name|assertEquals
argument_list|(
literal|"http://www.google.com/context/search?test=true"
argument_list|,
name|url
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|testIsStatusCodeOkSimpleRange ()
specifier|public
name|void
name|testIsStatusCodeOkSimpleRange
parameter_list|()
throws|throws
name|Exception
block|{
name|assertFalse
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|199
argument_list|,
literal|"200-299"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|200
argument_list|,
literal|"200-299"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|299
argument_list|,
literal|"200-299"
argument_list|)
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|300
argument_list|,
literal|"200-299"
argument_list|)
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|300
argument_list|,
literal|"301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|301
argument_list|,
literal|"301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|304
argument_list|,
literal|"301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|305
argument_list|,
literal|"301-304"
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
DECL|method|testIsStatusCodeOkComplexRange ()
specifier|public
name|void
name|testIsStatusCodeOkComplexRange
parameter_list|()
throws|throws
name|Exception
block|{
name|assertFalse
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|199
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|200
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|299
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|300
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|301
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|304
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertFalse
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|305
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|HttpHelper
operator|.
name|isStatusCodeOk
argument_list|(
literal|404
argument_list|,
literal|"200-299,404,301-304"
argument_list|)
argument_list|)
expr_stmt|;
block|}
DECL|method|createExchangeWithOptionalHttpQueryAndHttpMethodHeader (String httpQuery, HttpMethods httpMethod)
specifier|private
name|Exchange
name|createExchangeWithOptionalHttpQueryAndHttpMethodHeader
parameter_list|(
name|String
name|httpQuery
parameter_list|,
name|HttpMethods
name|httpMethod
parameter_list|)
block|{
name|CamelContext
name|context
init|=
operator|new
name|DefaultCamelContext
argument_list|()
decl_stmt|;
name|Exchange
name|exchange
init|=
operator|new
name|DefaultExchange
argument_list|(
name|context
argument_list|)
decl_stmt|;
name|Message
name|inMsg
init|=
name|exchange
operator|.
name|getIn
argument_list|()
decl_stmt|;
if|if
condition|(
name|httpQuery
operator|!=
literal|null
condition|)
block|{
name|inMsg
operator|.
name|setHeader
argument_list|(
name|Exchange
operator|.
name|HTTP_QUERY
argument_list|,
name|httpQuery
argument_list|)
expr_stmt|;
block|}
if|if
condition|(
name|httpMethod
operator|!=
literal|null
condition|)
block|{
name|inMsg
operator|.
name|setHeader
argument_list|(
name|Exchange
operator|.
name|HTTP_METHOD
argument_list|,
name|httpMethod
argument_list|)
expr_stmt|;
block|}
return|return
name|exchange
return|;
block|}
DECL|method|createExchangeWithOptionalCamelHttpUriHeader (String endpointURI, String httpPath)
specifier|private
name|Exchange
name|createExchangeWithOptionalCamelHttpUriHeader
parameter_list|(
name|String
name|endpointURI
parameter_list|,
name|String
name|httpPath
parameter_list|)
throws|throws
name|URISyntaxException
block|{
name|CamelContext
name|context
init|=
operator|new
name|DefaultCamelContext
argument_list|()
decl_stmt|;
name|DefaultExchange
name|exchange
init|=
operator|new
name|DefaultExchange
argument_list|(
name|context
argument_list|)
decl_stmt|;
name|Message
name|inMsg
init|=
name|exchange
operator|.
name|getIn
argument_list|()
decl_stmt|;
if|if
condition|(
name|endpointURI
operator|!=
literal|null
condition|)
block|{
name|inMsg
operator|.
name|setHeader
argument_list|(
name|Exchange
operator|.
name|HTTP_URI
argument_list|,
name|endpointURI
argument_list|)
expr_stmt|;
block|}
if|if
condition|(
name|httpPath
operator|!=
literal|null
condition|)
block|{
name|inMsg
operator|.
name|setHeader
argument_list|(
name|Exchange
operator|.
name|HTTP_PATH
argument_list|,
name|httpPath
argument_list|)
expr_stmt|;
block|}
return|return
name|exchange
return|;
block|}
DECL|method|createHttpEndpoint (boolean bridgeEndpoint, String endpointURI)
specifier|private
name|HttpEndpoint
name|createHttpEndpoint
parameter_list|(
name|boolean
name|bridgeEndpoint
parameter_list|,
name|String
name|endpointURI
parameter_list|)
throws|throws
name|URISyntaxException
block|{
name|HttpEndpoint
name|endpoint
init|=
operator|new
name|HttpEndpoint
argument_list|()
decl_stmt|;
name|endpoint
operator|.
name|setBridgeEndpoint
argument_list|(
name|bridgeEndpoint
argument_list|)
expr_stmt|;
if|if
condition|(
name|endpointURI
operator|!=
literal|null
condition|)
block|{
name|endpoint
operator|.
name|setHttpUri
argument_list|(
operator|new
name|URI
argument_list|(
name|endpointURI
argument_list|)
argument_list|)
expr_stmt|;
block|}
return|return
name|endpoint
return|;
block|}
block|}
end_class
end_unit
|
albinvass/patternfly-react-side-effects | dist/esm/components/Pagination/Pagination.js | import paginate from './paginate';
import { PAGINATION_VIEW, PAGINATION_VIEW_TYPES } from './PaginationConstants';
import Pager from './Pager';
import Paginator from './Paginator';
import PaginationRow from './PaginationRow';
import PaginationRowAmountOfPages from './PaginationRowAmountOfPages';
import PaginationRowArrowIcon from './PaginationRowArrowIcon';
import PaginationRowBack from './PaginationRowBack';
import PaginationRowButtonGroup from './PaginationRowButtonGroup';
import PaginationRowForward from './PaginationRowForward';
import PaginationRowItems from './PaginationRowItems';
export const Pagination = {
paginate,
Pager,
Paginator,
PAGINATION_VIEW,
PAGINATION_VIEW_TYPES,
Row: PaginationRow,
RowAmountOfPages: PaginationRowAmountOfPages,
RowArrowIcon: PaginationRowArrowIcon,
RowBack: PaginationRowBack,
RowButtonGroup: PaginationRowButtonGroup,
RowForward: PaginationRowForward,
RowItems: PaginationRowItems
}; |
IB-14/HealthMainz | client/src/components/patient/Auth/HealthMainz/Public/main.js | let loginForm = $('.login-page');
let signUpForm = $('.signUp');
let loginSection = $('.login-section');
let signUpSection = $('.signup-section');
let loginBtn = $('.login-btn');
let signUpBtn = $('.signup-btn');
let message = $('.message');
let adminSection = $('.admin-section');
let passcodeSection = $('.passcode');
let adminSignup = $('#admin-signup');
let okay = $('.okay');
let adminSignupBtn = $('.admin-signup-btn');
let adminSignUpForm = $('.admin-form');
let adminLoginForm = $('.admin-login');
let adminLoginBtn = $('.admin-login-btn');
let adminLogin = $('#admin-login');
// FRONTEND SECTION JS ###########################
// FRONTEND SECTION JS ###########################
signUpForm.hide();
signUpSection.on('click', function(e){
e.preventDefault();
loginForm.hide();
signUpForm.show();
signUpSection.addClass('selected');
loginSection.removeClass('selected');
});
loginSection.on('click', function(e){
e.preventDefault();
loginForm.show();
signUpForm.hide();
signUpSection.removeClass('selected');
loginSection.addClass('selected');
});
adminSignup.on('click', function(){
adminSection.addClass('active')
});
let close = $('.close-signup');
close.on('click', function(){
adminSection.removeClass('active');
});
okay.on('click', function(){
let passcode = $('#passcode').val();
let passcodeMessage = $('.passcode-message');
if(passcode == 'qwerty')
{
passcodeMessage.text('Redirecting...');
passcodeMessage.css('color', 'green');
setTimeout(function(){
adminSignUpForm.addClass('active');
}, 3000);
}
else{
passcodeMessage.text('Wrong Password');
passcodeMessage.css('color', 'red');
}
});
adminLogin.on('click', function(){
adminLoginForm.addClass('active');
});
let closeAdmin = $('.close-admin');
closeAdmin.on('click', function(){
adminLoginForm.removeClass('active');
});
// FETCH REQUESTS START HERE ################################################################################
// FETCH REQUESTS START HERE ################################################################################
$(document).ready(function(){
console.log('ready')
// Email Verification
$("#mail").keyup(function () {
var check = $("#mail").val();
IsEmail(check);
})
$("#admin-mail").keyup(function () {
var check = $("#admin-mail").val();
IsEmail(check);
})
$("#admin-login-mail").keyup(function () {
var check = $("#admin-login-mail").val();
IsEmail(check);
})
$('#username').keyup(function(){
var check = $("#username").val();
IsEmail(check);
})
var isemail = false;
function IsEmail(email) {
var regex = /^([a-zA-Z0-9_\.\-\+])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z0-9]{2,4})+$/;
if (!regex.test(email)) {
isemail = false;
$("#mail").css("border", "2px solid red");
$("#username").css("border", "2px solid red");
$("#admin-mail").css("border", "2px solid red");
$("#admin-login-mail").css("border", "2px solid red");
$(signUpBtn).css("background-color", "grey");
$(loginBtn).css("background-color", "grey");
$(adminSignupBtn).css("background-color", "grey");
adminLoginBtn.css("background-color", "grey");
// $(adminSignupBtn).css("cursor", "not-allowed");
// $(loginBtn).css("cursor", "not-allowed");
// $(signUpBtn).css("cursor", "not-allowed");
// $(adminSignupBtn).off('click');
} else {
isemail = true;
$("#mail").css("border", "2px solid green");
$("#username").css("border", "2px solid green");
$("#admin-mail").css("border", "2px solid green");
$("#admin-login-mail").css("border", "2px solid green");
$(signUpBtn).css("background-color", "#8C61FF");
$(loginBtn).css("background-color", "#8C61FF");
$(adminSignupBtn).css("background-color", "#8C61FF");
$(adminLoginBtn).css("background-color", "#8C61FF");
}
}
// Username Verification
$('#new-username').keyup(function(){
let check = $('#new-username').val();
IsUser(check);
})
$('#admin-username').keyup(function(){
let check = $('#admin-username').val();
IsUser(check);
})
isUsername = false;
function IsUser(username){
let userRegex = /^[a-zA-Z0-9]+$/;
if(!userRegex.test(username))
{
isUsername = false;
$('#new-username').css("border", "2px solid red");
$('#admin-username').css("border", "2px solid red");
signUpBtn.css("background-color", "grey");
adminSignupBtn.css("background-color", "grey");
}
else
{
isUsername = true;
$('#new-username').css("border", "2px solid green");
$('#admin-username').css("border", "2px solid green");
signUpBtn.css("background-color", "#8C61FF");
adminSignupBtn.css("background-color", "#8C61FF");
}
}
loginBtn.on('click', function(e){
e.preventDefault();
let mail = $('#username');
let password = <PASSWORD>');
let form = $(".login-form");
if (isemail)
{
var myHeaders = new Headers();
myHeaders.append("Content-Type", "application/json");
var raw = JSON.stringify({email:mail.val() , password:password.val()});
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: raw,
redirect: 'follow'
};
fetch("https://hackjudge.herokuapp.com/admin/login/", requestOptions)
.then(response => response.json())
.then(result => {
console.log(result)
sessionStorage.setItem("token", result["token"]);
form.trigger("reset");
if(result["message"] ==="Auth failed")
{
message.text("Incorrect Email or Password");
message.css('color', 'red');
message.css('margin-bottom', '10px');
}
else{
message.text('');
swal("Success", "You're all set!", "success");
setTimeout(function(){
location.href = "./files/user.html"
}, 2500)
}
})
.catch(error => {
console.log('error', error)
swal("Aww snap!", "some error occurred!", "error");
});
}
});
signUpBtn.on('click', function(e){
e.preventDefault();
let email = $('#mail');
let username = $('#new-username');
let password = <PASSWORD>');
let confirmPassword = <PASSWORD>');
let form = $('.signup-form');
if(isemail && password.val()==confirmPassword.val())
{
var myHeaders = new Headers();
myHeaders.append("Content-Type", "application/json");
var raw = JSON.stringify({email: email.val(), password:<PASSWORD>() ,isAdmin:false});
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: raw,
redirect: 'follow'
};
fetch("https://hackjudge.herokuapp.com/admin/signup", requestOptions)
.then(response => response.json())
.then(result => {
console.log(result)
message.text('');
form.trigger("reset");
swal("Success", "You're all set!", "success");
// console.log(result["token"]);
setTimeout(function(){
location.href = "./files/user.html"
}, 2500)
sessionStorage.setItem("token", result["token"]);
})
.catch(error => {
console.log('error', error)
swal("Aww snap!", "some error occurred!", "error");
});
}
else{
message.text("Passswords Don't Match");
message.css('color', 'red');
message.css('margin-bottom', '10px');
}
});
adminSignupBtn.on('click', function(e){
e.preventDefault();
let email = $('#admin-mail');
let username = $('#admin-username');
let password = <PASSWORD>');
let confirmPassword = <PASSWORD>');
let form = $('.admin-signup-form');
if(isemail && password.val()==confirmPassword.val())
{
var myHeaders = new Headers();
myHeaders.append("Content-Type", "application/json");
var raw = JSON.stringify({email: email.val(), password:password.val() ,isAdmin:true});
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: raw,
redirect: 'follow'
};
fetch("https://hackjudge.herokuapp.com/admin/signup", requestOptions)
.then(response => response.json())
.then(result => {
console.log(result)
message.text('');
form.trigger("reset");
swal("Success", "You're all set!", "success");
// console.log(result["token"]);
setTimeout(function(){
location.href = './files/events.html'
}, 2500);
sessionStorage.setItem("admin-token", result["token"]);
})
.catch(error => {
console.log('error', error)
swal("Aww snap!", "some error occurred!", "error");
});
}
else{
message.text("Passswords Don't Match");
message.css('color', 'red');
message.css('margin-bottom', '10px');
}
});
adminLoginBtn.on('click', function(e){
e.preventDefault();
let mail = $('#admin-login-mail');
let password = $('#<PASSWORD>');
let form = $('.admin-login-form');
let messageBox = $('.admin-message');
if (isemail)
{
var myHeaders = new Headers();
myHeaders.append("Content-Type", "application/json");
var raw = JSON.stringify({email:mail.val() , password:password.val()});
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: raw,
redirect: 'follow'
};
fetch("https://hackjudge.herokuapp.com/admin/login/", requestOptions)
.then(response => response.json())
.then(result => {
console.log(result)
sessionStorage.setItem("admin-token", result["token"]);
form.trigger("reset");
if(result["message"]==="Auth failed")
{
messageBox.text("Incorrect email or Password");
messageBox.css('color', 'red');
messageBox.css('margin-bottom', '10px');
}
else{
messageBox.text('');
swal("Success", "You're all set!", "success");
setTimeout(function(){
location.href = "./files/events.html"
}, 2500);
}
})
.catch(error => {
console.log('error', error)
swal("Aww snap!", "some error occurred!", "error");
});
}
});
});
|
ufoscout/jpattern | core/src/test/java/com/jpattern/core/textfiles/FileRenameCommandTest.java | <filename>core/src/test/java/com/jpattern/core/textfiles/FileRenameCommandTest.java<gh_stars>0
package com.jpattern.core.textfiles;
import java.util.Date;
import com.jpattern.core.BaseApplicationTest;
import com.jpattern.core.IProvider;
import com.jpattern.core.command.ACommand;
import com.jpattern.core.textfiles.FileRenameCommand;
import com.jpattern.core.textfiles.IFile;
import com.jpattern.core.textfiles.IFileReader;
import com.jpattern.core.textfiles.local.LocalResource;
import com.jpattern.core.util.CharacterEncoding;
/**
*
* @author <NAME>'
*
* 18/giu/2010
*/
public class FileRenameCommandTest extends BaseApplicationTest {
protected void setUp() throws Exception {
super.setUp();
}
protected void tearDown() throws Exception {
super.tearDown();
}
public void testRename1() throws Exception {
long now = new Date().getTime();
String oldName = "oldFileName" + now;
String newName = "newFileName" + now;
String path = getTestOutputBasePath();
System.out.println("test path: " + path);
LocalResource resource = new LocalResource( path , CharacterEncoding.UTF_8 );
assertTrue( resource.isValid() );
IFile oldfile = resource.create(oldName, "ciao" + now);
assertTrue( oldfile.exists() );
ACommand<IProvider> command = new FileRenameCommand(new StringBuffer(oldName), new StringBuffer(newName), resource);
assertTrue( command.exec(getProvider()).isValid() );
assertFalse( oldfile.exists() );
IFile newfile = resource.getFile( newName );
assertTrue( newfile.exists() );
IFileReader fileReader = newfile.getFileReader();
assertEquals( "ciao" + now , fileReader.getFullText() );
fileReader.close();
assertTrue( resource.delete(newName) );
}
public void testRename2() throws Exception {
long now = new Date().getTime();
String oldName = "inesistentFileName" + now;
String newName = "newFileName" + now;
String path = getTestOutputBasePath();
System.out.println("test path: " + path);
LocalResource resource = new LocalResource( path , CharacterEncoding.UTF_8 );
assertTrue( resource.isValid() );
IFile oldfile = resource.getFile(oldName);
assertFalse( oldfile.exists() );
ACommand<IProvider> command = new FileRenameCommand(new StringBuffer(oldName), new StringBuffer(newName), resource);
assertFalse( command.exec(getProvider()).isValid() );
}
public void testRename3() throws Exception {
long now = new Date().getTime();
String oldName = "oldFileName" + now;
String newName = "newFileName" + now;
String path = getTestOutputBasePath();
System.out.println("test path: " + path);
LocalResource resource = new LocalResource( path , CharacterEncoding.UTF_8 );
assertTrue( resource.isValid() );
IFile oldfile = resource.create(oldName, "ciao" + now);
assertTrue( oldfile.exists() );
IFile newfile = resource.create(newName, "ciao" + now);
assertTrue( newfile.exists() );
ACommand<IProvider> command = new FileRenameCommand(new StringBuffer(oldName), new StringBuffer(newName), resource);
assertFalse( command.exec(getProvider()).isValid() );
assertTrue( oldfile.exists() );
assertTrue( newfile.exists() );
assertTrue( resource.delete(oldName) );
assertTrue( resource.delete(newName) );
}
}
|
getrdbc/rdbc-pgsql | rdbc-pgsql-core/src/main/scala/io/rdbc/pgsql/core/internal/fsm/streaming/StrmPullingRows.scala | <gh_stars>10-100
/*
* Copyright 2016 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.pgsql.core.internal.fsm.streaming
import io.rdbc.pgsql.core.ChannelWriter
import io.rdbc.pgsql.core.internal.fsm.{State, StateAction, WaitingForReady, WarningCollection}
import io.rdbc.pgsql.core.internal.protocol.messages.backend._
import io.rdbc.pgsql.core.internal.protocol.messages.frontend.ColName
import io.rdbc.pgsql.core.internal.{PgMsgHandler, PgRowPublisher, PortalDescData}
import scala.concurrent.ExecutionContext
private[core]
class StrmPullingRows private[fsm](txMgmt: Boolean, afterDescData: PortalDescData, publisher: PgRowPublisher)
(implicit out: ChannelWriter, ec: ExecutionContext)
extends State
with WarningCollection {
//TODO warnings should be collected in all extended query states
private[this] val nameIdxMapping: Map[ColName, Int] = {
Map(afterDescData.rowDesc.colDescs.zipWithIndex.map {
case (cdesc, idx) => cdesc.name -> idx
}: _*)
}
publisher.fatalErrNotifier = (msg, ex) => {
logger.error(s"Fatal error occured in the publisher: $msg")
onFatalError(ex)
}
private[this] val warningsPromise = afterDescData.warningsPromise
private[this] val rowsAffectedPromise = afterDescData.rowsAffectedPromise
val msgHandler: PgMsgHandler = {
case PortalSuspended => stay
case dr: DataRow =>
publisher.handleRow(dr, afterDescData.rowDesc, nameIdxMapping)
stay
case ReadyForQuery(_) =>
publisher.resume()
stay
case EmptyQueryResponse =>
completePulling(0L)
case CommandComplete(_, rowsAffected) =>
completePulling(rowsAffected.map(_.toLong).getOrElse(0L))
//TODO distinguish between cancelled subscription and not-cancelled
// if the subscription was cancelled no publisher.complete or publisher.failure
// should happen
case CloseComplete => //TODO we use only unnamed portals, closing them is not necessary
if (txMgmt) goto(new StrmPendingCommit(publisher))
else goto(new WaitingForReady(
onIdle = publisher.complete(),
onFailure = publisher.failure)
)
}
private def completePulling(rowsAffected: Long): StateAction.Goto = {
rowsAffectedPromise.success(rowsAffected)
warningsPromise.success(warnings)
if (txMgmt) goto(new StrmPendingCommit(publisher))
else goto(new StrmPendingClosePortal(publisher, onIdle = publisher.complete()))
}
private def sendFailureToClient(ex: Throwable): Unit = {
publisher.failure(ex)
warningsPromise.failure(ex)
rowsAffectedPromise.failure(ex)
}
protected def onNonFatalError(ex: Throwable): StateAction = {
goto(State.Streaming.queryFailed(txMgmt, publisher.portalName) {
sendFailureToClient(ex)
})
}
protected def onFatalError(ex: Throwable): Unit = {
sendFailureToClient(ex)
}
}
|
sarvesh-ranjan/codechef | GbusCountCodeJam/main.cpp | <gh_stars>0
#include <iostream>
#include <vector>
using namespace std;
int main()
{
int t;
cin>>t;
for(int i=1; i<=t; i++){
int n;
cin>>n;
vector<int> citya(n,0);
vector<int> cityb(n,0);
for(int i=0; i<n; i++){
cin>>citya[i]>>cityb[i];
}
int c;
cin>>c;
vector<int> count(c,0);
vector<int> ans(c,0);
for(int i=0; i<c; i++)
cin>>count[i];
for(int j=0; j<c; j++){
for(int i=0; i<n; i++){
if(count[j]>=citya[i] && count[j]<=cityb[i]){
ans[i]++;
}
}
}
cout<<"Case #"<<i<<" :";
for(int i=0; i<c; i++){
cout<<ans[i]<<" ";
}
cout<<endl;
}
return 0;
}
|
marmolak/gray386linux | src/linux-3.7.10/arch/arm/mach-ux500/include/mach/uncompress.h | <reponame>marmolak/gray386linux
/*
* Copyright (C) 2009 ST-Ericsson
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef __ASM_ARCH_UNCOMPRESS_H
#define __ASM_ARCH_UNCOMPRESS_H
#include <asm/setup.h>
#include <asm/mach-types.h>
#include <linux/io.h>
#include <linux/amba/serial.h>
#include <mach/hardware.h>
void __iomem *ux500_uart_base;
static void putc(const char c)
{
/* Do nothing if the UART is not enabled. */
if (!(__raw_readb(ux500_uart_base + UART011_CR) & 0x1))
return;
if (c == '\n')
putc('\r');
while (__raw_readb(ux500_uart_base + UART01x_FR) & (1 << 5))
barrier();
__raw_writeb(c, ux500_uart_base + UART01x_DR);
}
static void flush(void)
{
if (!(__raw_readb(ux500_uart_base + UART011_CR) & 0x1))
return;
while (__raw_readb(ux500_uart_base + UART01x_FR) & (1 << 3))
barrier();
}
static inline void arch_decomp_setup(void)
{
/* Use machine_is_foo() macro if you need to switch base someday */
ux500_uart_base = (void __iomem *)U8500_UART2_BASE;
}
#define arch_decomp_wdog() /* nothing to do here */
#endif /* __ASM_ARCH_UNCOMPRESS_H */
|
BenjaminJaume/react-redux-hooks | server/controllers/words.controller.js | const db = require("../models");
const Op = db.Sequelize.Op;
module.exports = {
create(req, res) {
const { word, language, partOfSpeech, definition, example, userId } =
req.body;
return db.Word.create({
word: word,
language: language,
partOfSpeech: partOfSpeech,
definition: definition,
example: example,
})
.then((response) => {
db.users_words
.create(
{
userId: userId,
WordId: response.id,
},
{ fields: ["userId", "WordId"] }
)
.then((response) => {
// console.log(response);
res.status(201).send(response);
})
.catch((error) => {
// console.log(error);
res.status(400).send(error);
});
})
.catch((error) => {
// console.log(error);
res.status(400).send(error);
});
},
remove(req, res) {
// console.log(req.params.wordId, req.body.userId);
const { userId } = req.body;
const { wordId } = req.params;
return db.users_words
.destroy({
where: {
userId: userId,
WordId: wordId,
},
})
.then(() => {
db.Word.destroy({
where: {
id: wordId,
},
})
.then((response) => {
// console.log(response);
res.sendStatus(200).send(response);
})
.catch((error) => {
// console.log(error);
res.sendStatus(400).send(error);
});
})
.catch((error) => {
// console.log(error);
res.sendStatus(400).send(error);
});
},
listAll(req, res) {
return db.users_words
.findAll({
where: {
userId: req.params.userId,
},
include: [
{
model: db.Word,
attributes: {
exclude: ["createdAt", "password", "updatedAt"],
},
},
],
})
.then((response) => {
// console.log(response);
res.status(200).json(response);
})
.catch((error) => {
// console.log(error);
res.status(400).json(error);
});
},
};
|
VehpuS/learning-haxe-and-haxeflixel | grid-test/export/macos/obj/src/openfl/text/TextFormat.cpp | <gh_stars>0
// Generated by Haxe 4.1.4
#include <hxcpp.h>
#ifndef INCLUDED_openfl_text_TextFormat
#include <openfl/text/TextFormat.h>
#endif
HX_DEFINE_STACK_FRAME(_hx_pos_c5f39c48023aa9de_257_new,"openfl.text.TextFormat","new",0xfdb2c999,"openfl.text.TextFormat.new","openfl/text/TextFormat.hx",257,0xd3329397)
HX_LOCAL_STACK_FRAME(_hx_pos_c5f39c48023aa9de_275_clone,"openfl.text.TextFormat","clone",0x965d8cd6,"openfl.text.TextFormat.clone","openfl/text/TextFormat.hx",275,0xd3329397)
HX_LOCAL_STACK_FRAME(_hx_pos_c5f39c48023aa9de_297___merge,"openfl.text.TextFormat","__merge",0x0ccb8551,"openfl.text.TextFormat.__merge","openfl/text/TextFormat.hx",297,0xd3329397)
HX_LOCAL_STACK_FRAME(_hx_pos_c5f39c48023aa9de_54_boot,"openfl.text.TextFormat","boot",0xf6d69cd9,"openfl.text.TextFormat.boot","openfl/text/TextFormat.hx",54,0xd3329397)
namespace openfl{
namespace text{
void TextFormat_obj::__construct(::String font, ::Dynamic size, ::Dynamic color, ::Dynamic bold, ::Dynamic italic, ::Dynamic underline,::String url,::String target, ::Dynamic align, ::Dynamic leftMargin, ::Dynamic rightMargin, ::Dynamic indent, ::Dynamic leading){
HX_STACKFRAME(&_hx_pos_c5f39c48023aa9de_257_new)
HXLINE( 258) this->font = font;
HXLINE( 259) this->size = size;
HXLINE( 260) this->color = color;
HXLINE( 261) this->bold = bold;
HXLINE( 262) this->italic = italic;
HXLINE( 263) this->underline = underline;
HXLINE( 264) this->url = url;
HXLINE( 265) this->target = target;
HXLINE( 266) this->align = align;
HXLINE( 267) this->leftMargin = leftMargin;
HXLINE( 268) this->rightMargin = rightMargin;
HXLINE( 269) this->indent = indent;
HXLINE( 270) this->leading = leading;
}
Dynamic TextFormat_obj::__CreateEmpty() { return new TextFormat_obj; }
void *TextFormat_obj::_hx_vtable = 0;
Dynamic TextFormat_obj::__Create(::hx::DynamicArray inArgs)
{
::hx::ObjectPtr< TextFormat_obj > _hx_result = new TextFormat_obj();
_hx_result->__construct(inArgs[0],inArgs[1],inArgs[2],inArgs[3],inArgs[4],inArgs[5],inArgs[6],inArgs[7],inArgs[8],inArgs[9],inArgs[10],inArgs[11],inArgs[12]);
return _hx_result;
}
bool TextFormat_obj::_hx_isInstanceOf(int inClassId) {
return inClassId==(int)0x00000001 || inClassId==(int)0x2a1c7fe1;
}
::openfl::text::TextFormat TextFormat_obj::clone(){
HX_GC_STACKFRAME(&_hx_pos_c5f39c48023aa9de_275_clone)
HXLINE( 276) ::openfl::text::TextFormat newFormat = ::openfl::text::TextFormat_obj::__alloc( HX_CTX ,this->font,this->size,this->color,this->bold,this->italic,this->underline,this->url,this->target,null(),null(),null(),null(),null());
HXLINE( 278) newFormat->align = this->align;
HXLINE( 279) newFormat->leftMargin = this->leftMargin;
HXLINE( 280) newFormat->rightMargin = this->rightMargin;
HXLINE( 281) newFormat->indent = this->indent;
HXLINE( 282) newFormat->leading = this->leading;
HXLINE( 284) newFormat->blockIndent = this->blockIndent;
HXLINE( 285) newFormat->bullet = this->bullet;
HXLINE( 286) newFormat->kerning = this->kerning;
HXLINE( 287) newFormat->letterSpacing = this->letterSpacing;
HXLINE( 288) newFormat->tabStops = this->tabStops;
HXLINE( 290) newFormat->_hx___ascent = this->_hx___ascent;
HXLINE( 291) newFormat->_hx___descent = this->_hx___descent;
HXLINE( 293) return newFormat;
}
HX_DEFINE_DYNAMIC_FUNC0(TextFormat_obj,clone,return )
void TextFormat_obj::_hx___merge( ::openfl::text::TextFormat format){
HX_STACKFRAME(&_hx_pos_c5f39c48023aa9de_297___merge)
HXLINE( 298) if (::hx::IsNotNull( format->font )) {
HXLINE( 298) this->font = format->font;
}
HXLINE( 299) if (::hx::IsNotNull( format->size )) {
HXLINE( 299) this->size = format->size;
}
HXLINE( 300) if (::hx::IsNotNull( format->color )) {
HXLINE( 300) this->color = format->color;
}
HXLINE( 301) if (::hx::IsNotNull( format->bold )) {
HXLINE( 301) this->bold = format->bold;
}
HXLINE( 302) if (::hx::IsNotNull( format->italic )) {
HXLINE( 302) this->italic = format->italic;
}
HXLINE( 303) if (::hx::IsNotNull( format->underline )) {
HXLINE( 303) this->underline = format->underline;
}
HXLINE( 304) if (::hx::IsNotNull( format->url )) {
HXLINE( 304) this->url = format->url;
}
HXLINE( 305) if (::hx::IsNotNull( format->target )) {
HXLINE( 305) this->target = format->target;
}
HXLINE( 306) if (::hx::IsNotNull( format->align )) {
HXLINE( 306) this->align = format->align;
}
HXLINE( 307) if (::hx::IsNotNull( format->leftMargin )) {
HXLINE( 307) this->leftMargin = format->leftMargin;
}
HXLINE( 308) if (::hx::IsNotNull( format->rightMargin )) {
HXLINE( 308) this->rightMargin = format->rightMargin;
}
HXLINE( 309) if (::hx::IsNotNull( format->indent )) {
HXLINE( 309) this->indent = format->indent;
}
HXLINE( 310) if (::hx::IsNotNull( format->leading )) {
HXLINE( 310) this->leading = format->leading;
}
HXLINE( 311) if (::hx::IsNotNull( format->blockIndent )) {
HXLINE( 311) this->blockIndent = format->blockIndent;
}
HXLINE( 312) if (::hx::IsNotNull( format->bullet )) {
HXLINE( 312) this->bullet = format->bullet;
}
HXLINE( 313) if (::hx::IsNotNull( format->kerning )) {
HXLINE( 313) this->kerning = format->kerning;
}
HXLINE( 314) if (::hx::IsNotNull( format->letterSpacing )) {
HXLINE( 314) this->letterSpacing = format->letterSpacing;
}
HXLINE( 315) if (::hx::IsNotNull( format->tabStops )) {
HXLINE( 315) this->tabStops = format->tabStops;
}
HXLINE( 317) if (::hx::IsNotNull( format->_hx___ascent )) {
HXLINE( 317) this->_hx___ascent = format->_hx___ascent;
}
HXLINE( 318) if (::hx::IsNotNull( format->_hx___descent )) {
HXLINE( 318) this->_hx___descent = format->_hx___descent;
}
}
HX_DEFINE_DYNAMIC_FUNC1(TextFormat_obj,_hx___merge,(void))
TextFormat_obj::TextFormat_obj()
{
}
void TextFormat_obj::__Mark(HX_MARK_PARAMS)
{
HX_MARK_BEGIN_CLASS(TextFormat);
HX_MARK_MEMBER_NAME(align,"align");
HX_MARK_MEMBER_NAME(blockIndent,"blockIndent");
HX_MARK_MEMBER_NAME(bold,"bold");
HX_MARK_MEMBER_NAME(bullet,"bullet");
HX_MARK_MEMBER_NAME(color,"color");
HX_MARK_MEMBER_NAME(font,"font");
HX_MARK_MEMBER_NAME(indent,"indent");
HX_MARK_MEMBER_NAME(italic,"italic");
HX_MARK_MEMBER_NAME(kerning,"kerning");
HX_MARK_MEMBER_NAME(leading,"leading");
HX_MARK_MEMBER_NAME(leftMargin,"leftMargin");
HX_MARK_MEMBER_NAME(letterSpacing,"letterSpacing");
HX_MARK_MEMBER_NAME(rightMargin,"rightMargin");
HX_MARK_MEMBER_NAME(size,"size");
HX_MARK_MEMBER_NAME(tabStops,"tabStops");
HX_MARK_MEMBER_NAME(target,"target");
HX_MARK_MEMBER_NAME(underline,"underline");
HX_MARK_MEMBER_NAME(url,"url");
HX_MARK_MEMBER_NAME(_hx___ascent,"__ascent");
HX_MARK_MEMBER_NAME(_hx___descent,"__descent");
HX_MARK_END_CLASS();
}
void TextFormat_obj::__Visit(HX_VISIT_PARAMS)
{
HX_VISIT_MEMBER_NAME(align,"align");
HX_VISIT_MEMBER_NAME(blockIndent,"blockIndent");
HX_VISIT_MEMBER_NAME(bold,"bold");
HX_VISIT_MEMBER_NAME(bullet,"bullet");
HX_VISIT_MEMBER_NAME(color,"color");
HX_VISIT_MEMBER_NAME(font,"font");
HX_VISIT_MEMBER_NAME(indent,"indent");
HX_VISIT_MEMBER_NAME(italic,"italic");
HX_VISIT_MEMBER_NAME(kerning,"kerning");
HX_VISIT_MEMBER_NAME(leading,"leading");
HX_VISIT_MEMBER_NAME(leftMargin,"leftMargin");
HX_VISIT_MEMBER_NAME(letterSpacing,"letterSpacing");
HX_VISIT_MEMBER_NAME(rightMargin,"rightMargin");
HX_VISIT_MEMBER_NAME(size,"size");
HX_VISIT_MEMBER_NAME(tabStops,"tabStops");
HX_VISIT_MEMBER_NAME(target,"target");
HX_VISIT_MEMBER_NAME(underline,"underline");
HX_VISIT_MEMBER_NAME(url,"url");
HX_VISIT_MEMBER_NAME(_hx___ascent,"__ascent");
HX_VISIT_MEMBER_NAME(_hx___descent,"__descent");
}
::hx::Val TextFormat_obj::__Field(const ::String &inName,::hx::PropertyAccess inCallProp)
{
switch(inName.length) {
case 3:
if (HX_FIELD_EQ(inName,"url") ) { return ::hx::Val( url ); }
break;
case 4:
if (HX_FIELD_EQ(inName,"bold") ) { return ::hx::Val( bold ); }
if (HX_FIELD_EQ(inName,"font") ) { return ::hx::Val( font ); }
if (HX_FIELD_EQ(inName,"size") ) { return ::hx::Val( size ); }
break;
case 5:
if (HX_FIELD_EQ(inName,"align") ) { return ::hx::Val( align ); }
if (HX_FIELD_EQ(inName,"color") ) { return ::hx::Val( color ); }
if (HX_FIELD_EQ(inName,"clone") ) { return ::hx::Val( clone_dyn() ); }
break;
case 6:
if (HX_FIELD_EQ(inName,"bullet") ) { return ::hx::Val( bullet ); }
if (HX_FIELD_EQ(inName,"indent") ) { return ::hx::Val( indent ); }
if (HX_FIELD_EQ(inName,"italic") ) { return ::hx::Val( italic ); }
if (HX_FIELD_EQ(inName,"target") ) { return ::hx::Val( target ); }
break;
case 7:
if (HX_FIELD_EQ(inName,"kerning") ) { return ::hx::Val( kerning ); }
if (HX_FIELD_EQ(inName,"leading") ) { return ::hx::Val( leading ); }
if (HX_FIELD_EQ(inName,"__merge") ) { return ::hx::Val( _hx___merge_dyn() ); }
break;
case 8:
if (HX_FIELD_EQ(inName,"tabStops") ) { return ::hx::Val( tabStops ); }
if (HX_FIELD_EQ(inName,"__ascent") ) { return ::hx::Val( _hx___ascent ); }
break;
case 9:
if (HX_FIELD_EQ(inName,"underline") ) { return ::hx::Val( underline ); }
if (HX_FIELD_EQ(inName,"__descent") ) { return ::hx::Val( _hx___descent ); }
break;
case 10:
if (HX_FIELD_EQ(inName,"leftMargin") ) { return ::hx::Val( leftMargin ); }
break;
case 11:
if (HX_FIELD_EQ(inName,"blockIndent") ) { return ::hx::Val( blockIndent ); }
if (HX_FIELD_EQ(inName,"rightMargin") ) { return ::hx::Val( rightMargin ); }
break;
case 13:
if (HX_FIELD_EQ(inName,"letterSpacing") ) { return ::hx::Val( letterSpacing ); }
}
return super::__Field(inName,inCallProp);
}
::hx::Val TextFormat_obj::__SetField(const ::String &inName,const ::hx::Val &inValue,::hx::PropertyAccess inCallProp)
{
switch(inName.length) {
case 3:
if (HX_FIELD_EQ(inName,"url") ) { url=inValue.Cast< ::String >(); return inValue; }
break;
case 4:
if (HX_FIELD_EQ(inName,"bold") ) { bold=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"font") ) { font=inValue.Cast< ::String >(); return inValue; }
if (HX_FIELD_EQ(inName,"size") ) { size=inValue.Cast< ::Dynamic >(); return inValue; }
break;
case 5:
if (HX_FIELD_EQ(inName,"align") ) { align=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"color") ) { color=inValue.Cast< ::Dynamic >(); return inValue; }
break;
case 6:
if (HX_FIELD_EQ(inName,"bullet") ) { bullet=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"indent") ) { indent=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"italic") ) { italic=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"target") ) { target=inValue.Cast< ::String >(); return inValue; }
break;
case 7:
if (HX_FIELD_EQ(inName,"kerning") ) { kerning=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"leading") ) { leading=inValue.Cast< ::Dynamic >(); return inValue; }
break;
case 8:
if (HX_FIELD_EQ(inName,"tabStops") ) { tabStops=inValue.Cast< ::Array< int > >(); return inValue; }
if (HX_FIELD_EQ(inName,"__ascent") ) { _hx___ascent=inValue.Cast< ::Dynamic >(); return inValue; }
break;
case 9:
if (HX_FIELD_EQ(inName,"underline") ) { underline=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"__descent") ) { _hx___descent=inValue.Cast< ::Dynamic >(); return inValue; }
break;
case 10:
if (HX_FIELD_EQ(inName,"leftMargin") ) { leftMargin=inValue.Cast< ::Dynamic >(); return inValue; }
break;
case 11:
if (HX_FIELD_EQ(inName,"blockIndent") ) { blockIndent=inValue.Cast< ::Dynamic >(); return inValue; }
if (HX_FIELD_EQ(inName,"rightMargin") ) { rightMargin=inValue.Cast< ::Dynamic >(); return inValue; }
break;
case 13:
if (HX_FIELD_EQ(inName,"letterSpacing") ) { letterSpacing=inValue.Cast< ::Dynamic >(); return inValue; }
}
return super::__SetField(inName,inValue,inCallProp);
}
void TextFormat_obj::__GetFields(Array< ::String> &outFields)
{
outFields->push(HX_("align",c5,56,91,21));
outFields->push(HX_("blockIndent",99,6d,be,33));
outFields->push(HX_("bold",85,81,1b,41));
outFields->push(HX_("bullet",42,92,90,d4));
outFields->push(HX_("color",63,71,5c,4a));
outFields->push(HX_("font",cf,5d,c0,43));
outFields->push(HX_("indent",6c,0c,f3,93));
outFields->push(HX_("italic",f0,2e,64,06));
outFields->push(HX_("kerning",cc,ba,37,b0));
outFields->push(HX_("leading",c6,32,61,09));
outFields->push(HX_("leftMargin",95,2b,7e,e9));
outFields->push(HX_("letterSpacing",3d,b7,03,f5));
outFields->push(HX_("rightMargin",2a,3d,f2,23));
outFields->push(HX_("size",c1,a0,53,4c));
outFields->push(HX_("tabStops",9c,93,ba,f2));
outFields->push(HX_("target",51,f3,ec,86));
outFields->push(HX_("underline",0c,15,d1,87));
outFields->push(HX_("url",6f,2b,59,00));
outFields->push(HX_("__ascent",7a,44,9f,e9));
outFields->push(HX_("__descent",1a,ff,8d,9a));
super::__GetFields(outFields);
};
#ifdef HXCPP_SCRIPTABLE
static ::hx::StorageInfo TextFormat_obj_sMemberStorageInfo[] = {
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,align),HX_("align",c5,56,91,21)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,blockIndent),HX_("blockIndent",99,6d,be,33)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,bold),HX_("bold",85,81,1b,41)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,bullet),HX_("bullet",42,92,90,d4)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,color),HX_("color",63,71,5c,4a)},
{::hx::fsString,(int)offsetof(TextFormat_obj,font),HX_("font",cf,5d,c0,43)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,indent),HX_("indent",6c,0c,f3,93)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,italic),HX_("italic",f0,2e,64,06)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,kerning),HX_("kerning",cc,ba,37,b0)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,leading),HX_("leading",c6,32,61,09)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,leftMargin),HX_("leftMargin",95,2b,7e,e9)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,letterSpacing),HX_("letterSpacing",3d,b7,03,f5)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,rightMargin),HX_("rightMargin",2a,3d,f2,23)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,size),HX_("size",c1,a0,53,4c)},
{::hx::fsObject /* ::Array< int > */ ,(int)offsetof(TextFormat_obj,tabStops),HX_("tabStops",9c,93,ba,f2)},
{::hx::fsString,(int)offsetof(TextFormat_obj,target),HX_("target",51,f3,ec,86)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,underline),HX_("underline",0c,15,d1,87)},
{::hx::fsString,(int)offsetof(TextFormat_obj,url),HX_("url",6f,2b,59,00)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,_hx___ascent),HX_("__ascent",7a,44,9f,e9)},
{::hx::fsObject /* ::Dynamic */ ,(int)offsetof(TextFormat_obj,_hx___descent),HX_("__descent",1a,ff,8d,9a)},
{ ::hx::fsUnknown, 0, null()}
};
static ::hx::StaticInfo *TextFormat_obj_sStaticStorageInfo = 0;
#endif
static ::String TextFormat_obj_sMemberFields[] = {
HX_("align",c5,56,91,21),
HX_("blockIndent",99,6d,be,33),
HX_("bold",85,81,1b,41),
HX_("bullet",42,92,90,d4),
HX_("color",63,71,5c,4a),
HX_("font",cf,5d,c0,43),
HX_("indent",6c,0c,f3,93),
HX_("italic",f0,2e,64,06),
HX_("kerning",cc,ba,37,b0),
HX_("leading",c6,32,61,09),
HX_("leftMargin",95,2b,7e,e9),
HX_("letterSpacing",3d,b7,03,f5),
HX_("rightMargin",2a,3d,f2,23),
HX_("size",c1,a0,53,4c),
HX_("tabStops",9c,93,ba,f2),
HX_("target",51,f3,ec,86),
HX_("underline",0c,15,d1,87),
HX_("url",6f,2b,59,00),
HX_("__ascent",7a,44,9f,e9),
HX_("__descent",1a,ff,8d,9a),
HX_("clone",5d,13,63,48),
HX_("__merge",98,fb,24,70),
::String(null()) };
::hx::Class TextFormat_obj::__mClass;
void TextFormat_obj::__register()
{
TextFormat_obj _hx_dummy;
TextFormat_obj::_hx_vtable = *(void **)&_hx_dummy;
::hx::Static(__mClass) = new ::hx::Class_obj();
__mClass->mName = HX_("openfl.text.TextFormat",27,09,1e,68);
__mClass->mSuper = &super::__SGetClass();
__mClass->mConstructEmpty = &__CreateEmpty;
__mClass->mConstructArgs = &__Create;
__mClass->mGetStaticField = &::hx::Class_obj::GetNoStaticField;
__mClass->mSetStaticField = &::hx::Class_obj::SetNoStaticField;
__mClass->mStatics = ::hx::Class_obj::dupFunctions(0 /* sStaticFields */);
__mClass->mMembers = ::hx::Class_obj::dupFunctions(TextFormat_obj_sMemberFields);
__mClass->mCanCast = ::hx::TCanCast< TextFormat_obj >;
#ifdef HXCPP_SCRIPTABLE
__mClass->mMemberStorageInfo = TextFormat_obj_sMemberStorageInfo;
#endif
#ifdef HXCPP_SCRIPTABLE
__mClass->mStaticStorageInfo = TextFormat_obj_sStaticStorageInfo;
#endif
::hx::_hx_RegisterClass(__mClass->mName, __mClass);
}
void TextFormat_obj::__boot()
{
{
HX_STACKFRAME(&_hx_pos_c5f39c48023aa9de_54_boot)
HXDLIN( 54) __mClass->__meta__ = ::Dynamic(::hx::Anon_obj::Create(1)
->setFixed(0,HX_("fields",79,8e,8e,80), ::Dynamic(::hx::Anon_obj::Create(1)
->setFixed(0,HX_("clone",5d,13,63,48), ::Dynamic(::hx::Anon_obj::Create(1)
->setFixed(0,HX_("SuppressWarnings",0c,d3,d2,00),::cpp::VirtualArray_obj::__new(1)->init(0,HX_("checkstyle:FieldDocComment",70,56,1b,20))))))));
}
}
} // end namespace openfl
} // end namespace text
|
catinapoke/directx-engine | BallRenderComponent.h | #pragma once
#include <wrl.h>
#include "CommonRenderData.h"
class BallRenderComponent :public CommonRenderData
{
public:
BallRenderComponent() : CommonRenderData(nullptr) {};
BallRenderComponent(ID3D11Device* device);
}; |
bgoonz/DS-n-Algos-Mega-Archive | JAVASCRIPT/DS-and-Algorithms-Prac/Resources/leetcode-/problems/src/heap/TheSkylineProblem.java | package heap;
import java.util.*;
/**
* Created by gouthamvidyapradhan on 13/09/2017.
*
* <p>A city's skyline is the outer contour of the silhouette formed by all the buildings in that
* city when viewed from a distance. Now suppose you are given the locations and height of all the
* buildings as shown on a cityscape photo (Figure A), write a program to output the skyline formed
* by these buildings collectively (Figure B).
*
* <p>
*
* <p>See below link for image. https://leetcode.com/problems/the-skyline-problem/description/
*
* <p>
*
* <p>Buildings Skyline Contour The geometric information of each building is represented by a
* triplet of integers [Li, Ri, Hi], where Li and Ri are the x coordinates of the left and right
* edge of the ith building, respectively, and Hi is its height. It is guaranteed that 0 ≤ Li, Ri ≤
* INT_MAX, 0 < Hi ≤ INT_MAX, and Ri - Li > 0. You may assume all buildings are perfect rectangles
* grounded on an absolutely flat surface at height 0.
*
* <p>For instance, the dimensions of all buildings in Figure A are recorded as: [ [2 9 10], [3 7
* 15], [5 12 12], [15 20 10], [19 24 8] ] .
*
* <p>The output is a list of "key points" (red dots in Figure B) in the format of [ [x1,y1], [x2,
* y2], [x3, y3], ... ] that uniquely defines a skyline. A key point is the left endpoint of a
* horizontal line segment. Note that the last key point, where the rightmost building ends, is
* merely used to mark the termination of the skyline, and always has zero height. Also, the ground
* in between any two adjacent buildings should be considered part of the skyline contour.
*
* <p>For instance, the skyline in Figure B should be represented as:[ [2 10], [3 15], [7 12], [12
* 0], [15 10], [20 8], [24, 0] ].
*
* <p>Notes:
*
* <p>The number of buildings in any input list is guaranteed to be in the range [0, 10000]. The
* input list is already sorted in ascending order by the left x position Li. The output list must
* be sorted by the x position. There must be no consecutive horizontal lines of equal height in the
* output skyline. For instance, [...[2 3], [4 5], [7 5], [11 5], [12 7]...] is not acceptable; the
* three lines of height 5 should be merged into one in the final output as such: [...[2 3], [4 5],
* [12 7], ...]
*
* <p>Solution: 1. Sort array of points. Each point here is either a start of a rectangle or end of
* a rectangle. 2. Maintain a priority queue of rectangles ordered by increasing order of height, if
* height of two rectangle is same then, order by left most start index. 3. For each point starting
* from left-most point: 3.a. Add all the rectangles which starts at this point. 3.b. Remove all the
* rectangles which ends at this point. Keep a max of height for each rectangle removed. 3.c. If the
* current priority queue is empty then, include current point (x, 0) to the result set. This
* indicates this was the last rectangle and after this there is a gap of at least 1 unit.
*
* <p>If the max calculated in step b is greater than current max then, include current x and max
* height from priority queue to the result set. This indicates one of the larger rectangle's right
* edge intersects with a smaller one.
*
* <p>If the max calculated in stop b is smaller then check if the peek element in priority queue
* has the left edge value equal to current point. If so, then this indicates that a new larger
* rectangle starts from this point therefore add this point to the result set. 4. Return the result
* set
*/
public class TheSkylineProblem {
/**
* Main method
*
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
int[][] A = {
{0, 30, 30}, {2, 9, 10}, {3, 7, 15}, {4, 8, 10}, {5, 12, 12}, {15, 20, 10}, {19, 24, 8}
};
// int[][] A = {{2,9,10}, {3,9,11}, {4,9,12}, {5,9,13}};
List<int[]> result = new TheSkylineProblem().getSkyline(A);
result.forEach(
x -> {
System.out.println(x[0] + " " + x[1]);
});
}
public List<int[]> getSkyline(int[][] buildings) {
PriorityQueue<Rectangle> pq =
new PriorityQueue<>(
Comparator.comparing(Rectangle::getH)
.reversed()
.thenComparing(
Rectangle
::getX1)); // order by height, if height is same then, order by left most
// starting edge.
List<int[]> result = new ArrayList<>();
Set<Integer> set = new HashSet<>();
for (int[] p : buildings) {
set.add(p[0]);
set.add(p[1]);
}
List<Integer> points = new ArrayList<>();
points.addAll(set);
points.sort(Integer::compare);
for (int i = 0, j = 0, l = points.size(); i < l; i++) {
int curr = points.get(i);
for (int k = j;
k < buildings.length;
k++) { // add all the rectangles that begin at this point
int[] rectangle = buildings[k];
if (rectangle[0] == curr) {
pq.offer(new Rectangle(rectangle[0], rectangle[1], rectangle[2]));
} else if (rectangle[0] > curr) {
j = k;
break;
}
}
int max = Integer.MIN_VALUE;
while (!pq.isEmpty()) { // remove all the rectangles that end at this point
if (pq.peek().getX2() == curr) {
Rectangle top = pq.poll();
max = Math.max(max, top.getH());
} else if (pq.peek().getX2() < curr) {
pq.poll();
} else {
break;
}
}
if (pq.isEmpty()) {
result.add(
makeNewPoint(
curr,
0)); // This is the last rectangle after this there is a gap of at least one unit
} else {
if (max > pq.peek().getH()) {
result.add(
makeNewPoint(
curr,
pq.peek()
.getH())); // one of the larger rectangle's right edge intersects with a
// smaller one
} else if (max < pq.peek().getH() && pq.peek().getX1() == curr) {
result.add(
makeNewPoint(curr, pq.peek().getH())); // new larger rectangle begins at this point
}
}
}
return result;
}
private int[] makeNewPoint(int x, int y) {
int[] point = new int[2];
point[0] = x;
point[1] = y;
return point;
}
class Rectangle {
private int x1, x2, h;
Rectangle(int x1, int x2, int h) {
this.x1 = x1;
this.x2 = x2;
this.h = h;
}
public int getH() {
return h;
}
public int getX2() {
return x2;
}
public int getX1() {
return x1;
}
}
}
|
lieonCX/XGG | xianglegou/Classes/Function/User/Interface/View/Mine/BankCard/GC_IdentityCardInfoTableViewCell.h | //
// GC_IdentityCardInfoTableViewCell.h
// xianglegou
//
// Created by mini3 on 2017/5/23.
// Copyright © 2017年 xianglegou. All rights reserved.
//
// 身份证 - cell
//
#import "Hen_BaseTableViewCell.h"
@interface GC_IdentityCardInfoTableViewCell : Hen_BaseTableViewCell
///正面照片 回调
@property (nonatomic, copy) void(^onPositivePicBlock)(UIImage *image);
///背面照片 回调
@property (nonatomic, copy) void(^onBackPicBlock)(UIImage *image);
@end
|
stitheridge/isis | viewers/wicket/ui/src/main/java/org/apache/isis/viewer/wicket/ui/components/scalars/image/WicketImageUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.wicket.ui.components.scalars.image;
import java.awt.image.BufferedImage;
import java.util.Optional;
import javax.annotation.Nullable;
import org.apache.wicket.extensions.markup.html.image.resource.ThumbnailImageResource;
import org.apache.wicket.markup.html.image.Image;
import org.apache.wicket.markup.html.image.NonCachingImage;
import org.apache.wicket.markup.html.image.resource.BufferedDynamicImageResource;
import org.apache.isis.applib.value.Blob;
import org.apache.isis.core.metamodel.facets.value.image.ImageValueFacet;
import org.apache.isis.core.metamodel.spec.ManagedObjects;
import org.apache.isis.viewer.wicket.model.models.ScalarModel;
import lombok.NonNull;
import lombok.val;
import lombok.experimental.UtilityClass;
@UtilityClass
public class WicketImageUtil {
public Optional<Image> asWicketImage(
final @NonNull String id,
final @Nullable BufferedImage buffImg) {
if(buffImg == null) {
return Optional.empty();
}
val imageResource = new BufferedDynamicImageResource();
imageResource.setImage(buffImg);
val thumbnailImageResource = new ThumbnailImageResource(imageResource, 300);
val wicketImage = new NonCachingImage(id, thumbnailImageResource);
wicketImage.setOutputMarkupId(true);
return Optional.of(wicketImage);
}
// -- SHORTCUTS
public Optional<Image> asWicketImage(
final @NonNull String id,
final @Nullable Blob blob) {
val buffImg = Optional.ofNullable(blob)
.flatMap(Blob::asImage)
.orElse(null);
return asWicketImage(id, buffImg);
}
public static Optional<Image> asWicketImage(
final @NonNull String id,
final @NonNull ScalarModel model) {
val imageValueFacet = model.getTypeOfSpecification().getFacet(ImageValueFacet.class);
val adapter = model.getObject();
if(imageValueFacet==null
|| ManagedObjects.isNullOrUnspecifiedOrEmpty(adapter)) {
return Optional.empty();
}
val buffImg = imageValueFacet.getImage(adapter).orElse(null);
return asWicketImage(id, buffImg);
}
}
|
zhxinyu/cosan | docs/html/class_cosan_1_1_polynomial_features.js | var class_cosan_1_1_polynomial_features =
[
[ "PolynomialFeatures", "class_cosan_1_1_polynomial_features.html#a86256aab2166d0984ea04a14f3a83f3c", null ],
[ "PolynomialFeatures", "class_cosan_1_1_polynomial_features.html#a667914c355d484367861bb30d98b83d4", null ],
[ "GetInteractionFeatures", "class_cosan_1_1_polynomial_features.html#a75cadd1e5d1b15b7094b7175a97b701c", null ],
[ "GetPolynomialFeatures", "class_cosan_1_1_polynomial_features.html#a1ff9b614d81f341648f96793cc50977d", null ],
[ "SetInteractionFeatures", "class_cosan_1_1_polynomial_features.html#a5c3abeeeb5589bec358ba3e14659cbae", null ],
[ "UpdatePolynomialFeatures", "class_cosan_1_1_polynomial_features.html#a23be918e5edf62966bf52ee2e49bd077", null ],
[ "__interactionFeatures", "class_cosan_1_1_polynomial_features.html#ab231f92965f4adf8b717e2633c810263", null ],
[ "__polynomialFeatures", "class_cosan_1_1_polynomial_features.html#ad490b9afd1a2ed9449bb9b992aa0e310", null ]
]; |
SwissTierrasColombia/vu-server | src/api/business/pm/runtime/process/process.business.js | // Models
import RProcessModel from '../../../../models/r.process.model';
export default class ProcessBusiness {
static async getProcessById(rProcessId) {
try {
return await RProcessModel.getProcessById(rProcessId);
} catch (error) {
return null;
}
}
static async createProcess(mProcessId, createdBy, mSteps) {
return await RProcessModel.createProcess(mProcessId, createdBy, mSteps);
}
static async updateProcessStep(rProcessId, mStepId, data, metadata, vuUserId) {
return await RProcessModel.updateProcessStep(rProcessId, mStepId, data, metadata, vuUserId);
}
static async getProcessesByProcessAndSteps(mProcessId, mRoles, populates) {
return await RProcessModel.getProcessesByProcessAndSteps(mProcessId, mRoles, populates);
}
static async getProcessesMatchSteps(mProcessId, mStepsId) {
return await RProcessModel.getProcessesMatchSteps(mProcessId, mStepsId);
}
static async updateStepActive(rProcessId, mStepId) {
return await RProcessModel.updateStepActive(rProcessId, mStepId);
}
static async updateProcessActive(rProcessId, active) {
return await RProcessModel.updateProcessActive(rProcessId, active);
}
static async getProcessesByProcess(mProcessId) {
return await RProcessModel.getProcessesByProcess(mProcessId);
}
static async getProcessesByActive(active) {
return await RProcessModel.getProcessesByActive(active);
}
static async getCountActiveProcessByTypeProcess(mProcessId, active) {
return await RProcessModel.getCountActiveProcessByTypeProcess(mProcessId, active);
}
static async getCountProcessesByProcess(mProcessId) {
return await RProcessModel.getCountProcessesByProcess(mProcessId);
}
} |
darylnak/ucdavis-work | UCD/ecs60_40/seansrc/p9/stack.h | #ifndef STACK_H
#define STACK_H
#include <list>
using namespace std;
template <typename T>
class Stack
{
list <T> stackList;
public:
void push(const T &object);
void pop(T &object);
}; // class stack
#include "stack.cpp"
#endif // STACK_H
|
SylvainCorlay/pythran | pythran/pythonic/include/types/dict.hpp | #ifndef PYTHONIC_INCLUDE_TYPES_DICT_HPP
#define PYTHONIC_INCLUDE_TYPES_DICT_HPP
#include "pythonic/include/types/assignable.hpp"
#include "pythonic/include/types/tuple.hpp"
#include "pythonic/include/types/empty_iterator.hpp"
#include "pythonic/include/utils/shared_ref.hpp"
#include "pythonic/include/utils/iterator.hpp"
#include "pythonic/include/utils/reserve.hpp"
#include "pythonic/include/__builtin__/None.hpp"
#include <memory>
#include <utility>
#include <limits>
#include <algorithm>
#include <iterator>
#include <unordered_map>
PYTHONIC_NS_BEGIN
namespace types
{
static const size_t DEFAULT_DICT_CAPACITY = 64;
struct empty_dict;
template <class I>
struct item_iterator_adaptator : public I {
using value_type = std::pair<
typename std::remove_cv<typename I::value_type::first_type>::type,
typename I::value_type::second_type>;
using pointer = value_type *;
using reference = value_type &;
item_iterator_adaptator() = default;
item_iterator_adaptator(I const &i);
value_type operator*();
};
template <class I>
struct key_iterator_adaptator : public I {
using value_type = typename I::value_type::first_type;
using pointer = typename I::value_type::first_type *;
using reference = typename I::value_type::first_type &;
key_iterator_adaptator();
key_iterator_adaptator(I const &i);
value_type operator*();
};
template <class I>
struct value_iterator_adaptator : public I {
using value_type = typename I::value_type::second_type;
using pointer = typename I::value_type::second_type *;
using reference = typename I::value_type::second_type &;
value_iterator_adaptator();
value_iterator_adaptator(I const &i);
value_type operator*();
};
template <class I>
struct dict_iterator {
typedef I iterator;
I _begin;
I _end;
dict_iterator(I b, I e);
iterator begin();
iterator end();
};
template <class D>
struct dict_items {
using iterator = typename D::item_const_iterator;
D data;
dict_items();
dict_items(D const &d);
iterator begin() const;
iterator end() const;
};
template <class D>
struct dict_keys {
using iterator = typename D::key_const_iterator;
D data;
dict_keys();
dict_keys(D const &d);
iterator begin() const;
iterator end() const;
};
template <class D>
struct dict_values {
using iterator = typename D::value_const_iterator;
D data;
dict_values();
dict_values(D const &d);
iterator begin() const;
iterator end() const;
};
template <class K, class V>
class dict
{
// data holder
using _key_type =
typename std::remove_cv<typename std::remove_reference<K>::type>::type;
using _value_type =
typename std::remove_cv<typename std::remove_reference<V>::type>::type;
using container_type = std::unordered_map<_key_type, _value_type>;
utils::shared_ref<container_type> data;
public:
// types
using reference = typename container_type::reference;
using const_reference = typename container_type::const_reference;
using iterator = utils::comparable_iterator<
key_iterator_adaptator<typename container_type::iterator>>;
using const_iterator = utils::comparable_iterator<
key_iterator_adaptator<typename container_type::const_iterator>>;
using item_iterator = utils::comparable_iterator<
item_iterator_adaptator<typename container_type::iterator>>;
using item_const_iterator = utils::comparable_iterator<
item_iterator_adaptator<typename container_type::const_iterator>>;
using key_iterator = utils::comparable_iterator<
key_iterator_adaptator<typename container_type::iterator>>;
using key_const_iterator = utils::comparable_iterator<
key_iterator_adaptator<typename container_type::const_iterator>>;
using value_iterator = utils::comparable_iterator<
value_iterator_adaptator<typename container_type::iterator>>;
using value_const_iterator = utils::comparable_iterator<
value_iterator_adaptator<typename container_type::const_iterator>>;
using size_type = typename container_type::size_type;
using difference_type = typename container_type::difference_type;
using value_type = typename container_type::value_type;
using allocator_type = typename container_type::allocator_type;
using pointer = typename container_type::pointer;
using const_pointer = typename container_type::const_pointer;
// constructors
dict();
dict(empty_dict const &);
dict(std::initializer_list<value_type> l);
dict(dict<K, V> const &other);
template <class Kp, class Vp>
dict(dict<Kp, Vp> const &other);
template <class B, class E>
dict(B begin, E end);
// iterators
iterator begin();
const_iterator begin() const;
iterator end();
const_iterator end() const;
item_iterator item_begin();
item_const_iterator item_begin() const;
item_iterator item_end();
item_const_iterator item_end() const;
key_iterator key_begin();
key_const_iterator key_begin() const;
key_iterator key_end();
key_const_iterator key_end() const;
value_iterator value_begin();
value_const_iterator value_begin() const;
value_iterator value_end();
value_const_iterator value_end() const;
// dict interface
operator bool();
V &operator[](K const &key);
template <class OtherKey>
V &operator[](OtherKey const &key)
{
return (*this)[K(key)];
}
V const &operator[](K const &key) const;
template <class OtherKey>
V const &operator[](OtherKey const &key) const
{
return (*this)[K(key)];
}
V &fast(K const &key);
V const &fast(K const &key) const;
item_const_iterator find(K const &key) const;
void clear();
dict<K, V> copy() const;
template <class W>
typename __combined<V, W>::type get(K const &key, W d) const;
none<V> get(K const &key) const;
template <class W>
V &setdefault(K const &key, W d);
none<V> &setdefault(K const &key);
template <class K0, class W0>
void update(dict<K0, W0> const &d);
template <class Iterable>
void update(Iterable const &d);
template <class W>
typename __combined<V, W>::type pop(K const &key, W d);
V pop(K const &key);
std::tuple<K, V> popitem();
long size() const;
dict_iterator<item_iterator> iteritems();
dict_iterator<item_const_iterator> iteritems() const;
dict_iterator<key_iterator> iterkeys();
dict_iterator<key_const_iterator> iterkeys() const;
dict_iterator<value_iterator> itervalues();
dict_iterator<value_const_iterator> itervalues() const;
dict_items<dict<K, V>> viewitems() const;
dict_keys<dict<K, V>> viewkeys() const;
dict_values<dict<K, V>> viewvalues() const;
// type inference stuff
template <class K_, class V_>
dict<typename __combined<K, K_>::type, typename __combined<V, V_>::type>
operator+(dict<K_, V_> const &);
// id interface
intptr_t id() const;
template <class T>
bool contains(T const &key) const;
};
struct empty_dict {
using value_type = void;
using iterator = empty_iterator;
using const_iterator = empty_iterator;
template <class K, class V>
dict<K, V> operator+(dict<K, V> const &s);
empty_dict operator+(empty_dict const &);
operator bool() const;
iterator begin() const;
iterator end() const;
template <class V>
bool contains(V const &) const;
};
template <class K, class V>
dict<K, V> operator+(dict<K, V> const &d, empty_dict);
}
template <class K, class V>
struct assignable<types::dict<K, V>> {
using type =
types::dict<typename assignable<K>::type, typename assignable<V>::type>;
};
std::ostream &operator<<(std::ostream &os, types::empty_dict const &);
template <class K, class V>
std::ostream &operator<<(std::ostream &os, std::pair<K, V> const &p);
template <class K, class V>
std::ostream &operator<<(std::ostream &os, types::dict<K, V> const &v);
PYTHONIC_NS_END
/* overload std::get */
namespace std
{
template <size_t I, class K, class V>
auto get(pythonic::types::dict<K, V> &d) -> decltype(d[I]);
template <size_t I, class K, class V>
auto get(pythonic::types::dict<K, V> const &d) -> decltype(d[I]);
template <size_t I, class K, class V>
struct tuple_element<I, pythonic::types::dict<K, V>> {
using type = V;
};
}
/* type inference stuff {*/
#include "pythonic/include/types/combined.hpp"
#include "pythonic/include/types/list.hpp"
template <class A>
struct __combined<container<A>, pythonic::types::empty_dict> {
using type = dict_container<A>;
};
template <class A>
struct __combined<pythonic::types::empty_dict, container<A>> {
using type = dict_container<A>;
};
template <class A, class B, class C>
struct __combined<container<A>, pythonic::types::dict<C, B>> {
using type = pythonic::types::dict<C, typename __combined<A, B>::type>;
};
template <class A, class B, class C>
struct __combined<pythonic::types::dict<C, B>, container<A>> {
using type = pythonic::types::dict<C, typename __combined<A, B>::type>;
};
template <class T>
struct __combined<pythonic::types::empty_dict, pythonic::types::list<T>> {
using type = pythonic::types::dict<typename std::tuple_element<0, T>::type,
typename std::tuple_element<1, T>::type>;
};
template <class T>
struct __combined<pythonic::types::list<T>, pythonic::types::empty_dict> {
using type = pythonic::types::dict<typename std::tuple_element<0, T>::type,
typename std::tuple_element<1, T>::type>;
};
template <class K0, class V0, class T>
struct __combined<pythonic::types::dict<K0, V0>, pythonic::types::list<T>> {
using type = pythonic::types::dict<
typename __combined<K0, typename std::tuple_element<0, T>::type>::type,
typename __combined<V0, typename std::tuple_element<1, T>::type>::type>;
};
template <class K0, class V0, class T>
struct __combined<pythonic::types::list<T>, pythonic::types::dict<K0, V0>> {
using type = pythonic::types::dict<
typename __combined<K0, typename std::tuple_element<0, T>::type>::type,
typename __combined<V0, typename std::tuple_element<1, T>::type>::type>;
};
template <class K>
struct __combined<indexable<K>, pythonic::types::empty_dict> {
using type = indexable_dict<K>;
};
template <class K>
struct __combined<pythonic::types::empty_dict, indexable_dict<K>> {
using type = indexable_dict<K>;
};
template <class K>
struct __combined<indexable_dict<K>, pythonic::types::empty_dict> {
using type = indexable_dict<K>;
};
template <class K0, class K1, class V1>
struct __combined<pythonic::types::dict<K1, V1>, indexable_dict<K0>> {
using type = pythonic::types::dict<typename __combined<K0, K1>::type, V1>;
};
template <class K0, class K1, class V1>
struct __combined<indexable_dict<K0>, pythonic::types::dict<K1, V1>> {
using type = pythonic::types::dict<typename __combined<K0, K1>::type, V1>;
};
template <class K>
struct __combined<pythonic::types::empty_dict, indexable<K>> {
using type = indexable_dict<K>;
};
template <class K0, class V, class K1>
struct __combined<pythonic::types::dict<K0, V>, indexable<K1>> {
using type = pythonic::types::dict<typename __combined<K0, K1>::type, V>;
};
template <class K0, class V, class K1>
struct __combined<indexable<K1>, pythonic::types::dict<K0, V>> {
using type = pythonic::types::dict<typename __combined<K0, K1>::type, V>;
};
template <class K, class V>
struct __combined<pythonic::types::empty_dict, indexable_container<K, V>> {
using type = pythonic::types::dict<K, V>;
};
template <class K0, class V0, class K1, class V1>
struct __combined<pythonic::types::dict<K0, V0>, indexable_container<K1, V1>> {
using type = pythonic::types::dict<typename __combined<K0, K1>::type,
typename __combined<V0, V1>::type>;
};
template <class K0, class V0, class K1, class V1>
struct __combined<indexable_container<K1, V1>, pythonic::types::dict<K0, V0>> {
using type = pythonic::types::dict<typename __combined<K0, K1>::type,
typename __combined<V0, V1>::type>;
};
template <class K, class V>
struct __combined<indexable_container<K, V>, pythonic::types::empty_dict> {
using type = pythonic::types::dict<K, V>;
};
template <class K, class V>
struct __combined<indexable<K>, dict_container<V>> {
using type = pythonic::types::dict<K, V>;
};
template <class V, class K>
struct __combined<dict_container<V>, indexable<K>> {
using type = pythonic::types::dict<K, V>;
};
template <class V, class K, class W>
struct __combined<dict_container<V>, indexable_container<K, W>> {
using type = pythonic::types::dict<K, typename __combined<V, W>::type>;
};
template <class V, class K, class W>
struct __combined<indexable_container<K, W>, dict_container<V>> {
using type = pythonic::types::dict<K, typename __combined<V, W>::type>;
};
template <class V, class K, class W>
struct __combined<indexable_dict<V>, indexable_container<K, W>> {
using type = pythonic::types::dict<typename __combined<K, V>::type, W>;
};
template <class V, class K, class W>
struct __combined<indexable_container<K, W>, indexable_dict<V>> {
using type = pythonic::types::dict<typename __combined<K, V>::type, W>;
};
template <class K, class V, class W>
struct __combined<pythonic::types::dict<K, V>, dict_container<W>> {
using type = pythonic::types::dict<K, typename __combined<V, W>::type>;
};
template <class V, class K, class W>
struct __combined<dict_container<W>, pythonic::types::dict<K, V>> {
using type = pythonic::types::dict<K, typename __combined<V, W>::type>;
};
template <class K, class V>
struct __combined<indexable_dict<K>, container<V>> {
using type = pythonic::types::dict<K, V>;
};
template <class K0, class K1>
struct __combined<indexable_dict<K0>, indexable<K1>> {
using type = indexable_dict<typename __combined<K0, K1>::type>;
};
template <class K0, class K1>
struct __combined<indexable<K0>, indexable_dict<K1>> {
using type = indexable_dict<typename __combined<K0, K1>::type>;
};
template <class V, class K>
struct __combined<container<V>, indexable_dict<K>> {
using type = pythonic::types::dict<K, V>;
};
/* } */
#ifdef ENABLE_PYTHON_MODULE
#include "pythonic/python/core.hpp"
PYTHONIC_NS_BEGIN
template <typename K, typename V>
struct to_python<types::dict<K, V>> {
static PyObject *convert(types::dict<K, V> const &v);
};
template <>
struct to_python<types::empty_dict> {
static PyObject *convert(types::empty_dict);
};
template <typename K, typename V>
struct from_python<types::dict<K, V>> {
static bool is_convertible(PyObject *obj);
static types::dict<K, V> convert(PyObject *obj);
};
PYTHONIC_NS_END
#endif
#endif
|
MrPIvanov/SoftUni | 09-JS Advanced/11_JQUERY/JQueryLab/05_CountriesTable/initialize-table.js | function initializeTable() {
addCountry('Bulgaria', 'Sofia');
addCountry('Germany', 'Berlin');
addCountry('Russia', 'Moscow');
$('#createLink').on('click', addNewCountry);
function addNewCountry(){
addCountry($('#newCountryText').val(), $('#newCapitalText').val());
}
function addCountry(country, capital){
let div = $('#countriesTable');
let tr = $('<tr>');
let tdCountry = $('<td>');
tdCountry.text(country);
let tdCapital = $('<td>');
tdCapital.text(capital);
let tdActions = $('<td>');
let up = $('<a>');
up.text('[Up]');
up.on('click', upFunction);
let down = $('<a>');
down.text('[Down]');
down.on('click', downFunction);
let deleteRow = $('<a>');
deleteRow.text('[Delete]');
deleteRow.on('click', deleteRowFunction);
tdActions.append(up);
tdActions.append(down);
tdActions.append(deleteRow);
tr.append(tdCountry);
tr.append(tdCapital);
tr.append(tdActions);
div.append(tr);
fixBtn();
}
function upFunction(){
let currentRow = $(this).parent().parent();
currentRow.insertBefore(currentRow.prev());
fixBtn();
}
function downFunction(){
let currentRow = $(this).parent().parent();
currentRow.insertAfter(currentRow.next());
fixBtn();
}
function deleteRowFunction(){
let currentRow = $(this).parent().parent();
currentRow.remove();
fixBtn();
}
function fixBtn(){
$('#countriesTable tr :nth-child(3) a').show();
$('#countriesTable tr:nth-child(3) td:nth-child(3) a:nth-child(1)').hide();
$('#countriesTable tr:last-child td:nth-child(3) a:nth-child(2)').hide();
}
} |
Slapbox/Trix-ES6 | src/trix/core/utilities/utf16_string.js | <gh_stars>0
/*
* decaffeinate suggestions:
* DS001: Remove Babel/TypeScript constructor workaround
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* DS205: Consider reworking code to avoid use of IIFEs
* DS207: Consider shorter variations of null checks
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
let ucs2decode, ucs2encode;
Trix.UTF16String = class UTF16String extends Trix.BasicObject {
static box(value) {
if (value == null) { value = ""; }
if (value instanceof this) {
return value;
} else {
return this.fromUCS2String(value != null ? value.toString() : undefined);
}
}
static fromUCS2String(ucs2String) {
return new (this)(ucs2String, ucs2decode(ucs2String));
}
static fromCodepoints(codepoints) {
return new (this)(ucs2encode(codepoints), codepoints);
}
constructor(ucs2String, codepoints) {
{
// Hack: trick Babel/TypeScript into allowing this before super.
if (false) { super(); }
let thisFn = (() => { return this; }).toString();
let thisName = thisFn.slice(thisFn.indexOf('return') + 6 + 1, thisFn.indexOf(';')).trim();
eval(`${thisName} = this;`);
}
this.ucs2String = ucs2String;
this.codepoints = codepoints;
this.length = this.codepoints.length;
this.ucs2Length = this.ucs2String.length;
}
offsetToUCS2Offset(offset) {
return ucs2encode(this.codepoints.slice(0, Math.max(0, offset))).length;
}
offsetFromUCS2Offset(ucs2Offset) {
return ucs2decode(this.ucs2String.slice(0, Math.max(0, ucs2Offset))).length;
}
slice() {
return this.constructor.fromCodepoints(this.codepoints.slice(...arguments));
}
charAt(offset) {
return this.slice(offset, offset + 1);
}
isEqualTo(value) {
return this.constructor.box(value).ucs2String === this.ucs2String;
}
toJSON() {
return this.ucs2String;
}
getCacheKey() {
return this.ucs2String;
}
toString() {
return this.ucs2String;
}
};
const hasArrayFrom = (typeof Array.from === 'function' ? Array.from("\ud83d\udc7c").length : undefined) === 1;
const hasStringCodePointAt = ((typeof " ".codePointAt === 'function' ? " ".codePointAt(0) : undefined) != null);
const hasStringFromCodePoint = (typeof String.fromCodePoint === 'function' ? String.fromCodePoint(32, 128124) : undefined) === " \ud83d\udc7c";
// UCS-2 conversion helpers ported from <NAME>' Punycode.js:
// https://github.com/bestiejs/punycode.js#punycodeucs2
// Creates an array containing the numeric code points of each Unicode
// character in the string. While JavaScript uses UCS-2 internally,
// this function will convert a pair of surrogate halves (each of which
// UCS-2 exposes as separate characters) into a single code point,
// matching UTF-16.
if (hasArrayFrom && hasStringCodePointAt) {
ucs2decode = string => Array.from(string).map(char => char.codePointAt(0));
} else {
ucs2decode = function(string) {
const output = [];
let counter = 0;
const { length } = string;
while (counter < length) {
let value = string.charCodeAt(counter++);
if ((0xD800 <= value && value <= 0xDBFF) && (counter < length)) {
// high surrogate, and there is a next character
const extra = string.charCodeAt(counter++);
if ((extra & 0xFC00) === 0xDC00) {
// low surrogate
value = ((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000;
} else {
// unmatched surrogate; only append this code unit, in case the
// next code unit is the high surrogate of a surrogate pair
counter--;
}
}
output.push(value);
}
return output;
};
}
// Creates a string based on an array of numeric code points.
if (hasStringFromCodePoint) {
ucs2encode = array => String.fromCodePoint(...Array.from(array || []));
} else {
ucs2encode = function(array) {
const characters = (() => {
const result = [];
for (let value of Array.from(array)) {
let output = "";
if (value > 0xFFFF) {
value -= 0x10000;
output += String.fromCharCode(((value >>> 10) & 0x3FF) | 0xD800);
value = 0xDC00 | (value & 0x3FF);
}
result.push(output + String.fromCharCode(value));
}
return result;
})();
return characters.join("");
};
}
|
nils-drechsel/react-use-webrtc | lib/lib/Controller/CameraStreamController.js | <filename>lib/lib/Controller/CameraStreamController.js
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const WebRtcManager_1 = require("../Rtc/WebRtcManager");
const Controller_1 = require("./Controller");
const react_use_listeners_1 = require("react-use-listeners");
class InboundController extends Controller_1.AbstractController {
constructor(controllerId, controllerManager, webRtcManager, mediaDevicesManager, label, remoteSid, transmissionId) {
super(controllerManager, webRtcManager, mediaDevicesManager, controllerId);
this.remoteSid = remoteSid;
this.label = label;
this.transmissionId = transmissionId;
}
start() {
return __awaiter(this, void 0, void 0, function* () {
this.webRtcManager.listenForInboundTransmission(this.transmissionId, (event) => {
switch (event) {
case react_use_listeners_1.ListenerEvent.ADDED:
case react_use_listeners_1.ListenerEvent.MODIFIED:
const transmission = this.webRtcManager.getTransmission(this.remoteSid, this.transmissionId);
switch (transmission.state) {
case WebRtcManager_1.TransmissionState.CONNECTED:
this.controllerState = Controller_1.ControllerState.READY;
break;
case WebRtcManager_1.TransmissionState.CONNECTING:
this.controllerState = Controller_1.ControllerState.STARTING;
break;
case WebRtcManager_1.TransmissionState.FAILED:
this.controllerState = Controller_1.ControllerState.FAILED;
break;
}
break;
case react_use_listeners_1.ListenerEvent.REMOVED:
this.controllerState = Controller_1.ControllerState.CLOSED;
break;
}
});
});
}
stop() {
this.controllerState = Controller_1.ControllerState.CLOSED;
}
}
exports.InboundController = InboundController;
class OutboundCameraStreamController extends Controller_1.AbstractController {
constructor(controllerManager, webRtcManager, mediaDevicesManager, label, remoteSid, cameraDeviceId, audioDeviceId) {
super(controllerManager, webRtcManager, mediaDevicesManager);
this.transmissionId = null;
this.cameraDeviceId = cameraDeviceId;
this.audioDeviceId = audioDeviceId;
this.remoteSid = remoteSid;
this.label = label;
}
start() {
return __awaiter(this, void 0, void 0, function* () {
try {
const mediaStreamObject = yield this.mediaDevicesManager.getCameraStream(this.cameraDeviceId, this.audioDeviceId);
this.transmissionId = this.webRtcManager.addStreamTransmission(this.remoteSid, mediaStreamObject.stream, this.label);
this.controllerManager.sendAddInboundController(this.remoteSid, { controllerId: this.controllerId, label: this.label, transmissionId: this.transmissionId });
}
catch (e) {
this.controllerState = Controller_1.ControllerState.FAILED;
}
});
}
stop() {
this.controllerManager.sendRemoveInboundController(this.remoteSid, { controllerId: this.controllerId });
this.controllerState = Controller_1.ControllerState.CLOSED;
}
}
exports.OutboundCameraStreamController = OutboundCameraStreamController;
//# sourceMappingURL=CameraStreamController.js.map |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.