repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
chris3105x/EasyPrefix | src/main/java/com/christian34/easyprefix/utils/ChatFormatting.java | package com.christian34.easyprefix.utils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* EasyPrefix 2021.
*
* @author Christian34
*/
public enum ChatFormatting {
BOLD("l", Message.FORMATTING_BOLD.getText()),
ITALIC("o", Message.FORMATTING_ITALIC.getText()),
RAINBOW("r", Message.FORMATTING_RAINBOW.getText()),
STRIKETHROUGH("m", Message.FORMATTING_STRIKETHROUGH.getText()),
UNDERLINE("n", Message.FORMATTING_UNDERLINE.getText()),
UNDEFINED("@", ""),
INHERIT("", "");
private final String code;
private final String name;
ChatFormatting(String code, String name) {
this.code = code;
this.name = name;
}
@Nullable
public static ChatFormatting getByCode(String code) {
for (ChatFormatting formatting : ChatFormatting.values()) {
if (formatting.code.equals(code)) return formatting;
}
return null;
}
@NotNull
public static ChatFormatting[] getValues() {
ChatFormatting[] formattings = new ChatFormatting[values().length - 2];
int i = 0;
for (ChatFormatting formatting : values()) {
if (formatting == UNDEFINED || formatting == INHERIT) continue;
formattings[i] = formatting;
i++;
}
return formattings;
}
@NotNull
public String getName() {
return name;
}
@Override
public String toString() {
if (code != null) {
return code.equals("r") ? getCode() + RainbowEffect.addRainbowEffect(getName()) : getCode() + getName();
}
return "";
}
public boolean isBukkit() {
return !this.equals(RAINBOW) && !this.equals(UNDEFINED) && !this.equals(INHERIT);
}
@NotNull
public String getCode() {
return code == null ? "" : "§" + code;
}
}
|
shubhambhattar/azure-sdk-for-java | sdk/mediaservices/mgmt-v2020_05_01/src/main/java/com/microsoft/azure/management/mediaservices/v2020_05_01/implementation/ContentKeyPolicyImpl.java | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.mediaservices.v2020_05_01.implementation;
import com.microsoft.azure.management.mediaservices.v2020_05_01.ContentKeyPolicy;
import com.microsoft.azure.arm.model.implementation.CreatableUpdatableImpl;
import rx.Observable;
import java.util.UUID;
import org.joda.time.DateTime;
import java.util.List;
import com.microsoft.azure.management.mediaservices.v2020_05_01.ContentKeyPolicyOption;
class ContentKeyPolicyImpl extends CreatableUpdatableImpl<ContentKeyPolicy, ContentKeyPolicyInner, ContentKeyPolicyImpl> implements ContentKeyPolicy, ContentKeyPolicy.Definition, ContentKeyPolicy.Update {
private final MediaManager manager;
private String resourceGroupName;
private String accountName;
private String contentKeyPolicyName;
ContentKeyPolicyImpl(String name, MediaManager manager) {
super(name, new ContentKeyPolicyInner());
this.manager = manager;
// Set resource name
this.contentKeyPolicyName = name;
//
}
ContentKeyPolicyImpl(ContentKeyPolicyInner inner, MediaManager manager) {
super(inner.name(), inner);
this.manager = manager;
// Set resource name
this.contentKeyPolicyName = inner.name();
// set resource ancestor and positional variables
this.resourceGroupName = IdParsingUtils.getValueFromIdByName(inner.id(), "resourceGroups");
this.accountName = IdParsingUtils.getValueFromIdByName(inner.id(), "mediaServices");
this.contentKeyPolicyName = IdParsingUtils.getValueFromIdByName(inner.id(), "contentKeyPolicies");
//
}
@Override
public MediaManager manager() {
return this.manager;
}
@Override
public Observable<ContentKeyPolicy> createResourceAsync() {
ContentKeyPoliciesInner client = this.manager().inner().contentKeyPolicies();
return client.createOrUpdateAsync(this.resourceGroupName, this.accountName, this.contentKeyPolicyName, this.inner())
.map(innerToFluentMap(this));
}
@Override
public Observable<ContentKeyPolicy> updateResourceAsync() {
ContentKeyPoliciesInner client = this.manager().inner().contentKeyPolicies();
return client.updateAsync(this.resourceGroupName, this.accountName, this.contentKeyPolicyName, this.inner())
.map(innerToFluentMap(this));
}
@Override
protected Observable<ContentKeyPolicyInner> getInnerAsync() {
ContentKeyPoliciesInner client = this.manager().inner().contentKeyPolicies();
return client.getAsync(this.resourceGroupName, this.accountName, this.contentKeyPolicyName);
}
@Override
public boolean isInCreateMode() {
return this.inner().id() == null;
}
@Override
public DateTime created() {
return this.inner().created();
}
@Override
public String description() {
return this.inner().description();
}
@Override
public String id() {
return this.inner().id();
}
@Override
public DateTime lastModified() {
return this.inner().lastModified();
}
@Override
public String name() {
return this.inner().name();
}
@Override
public List<ContentKeyPolicyOption> options() {
return this.inner().options();
}
@Override
public UUID policyId() {
return this.inner().policyId();
}
@Override
public String type() {
return this.inner().type();
}
@Override
public ContentKeyPolicyImpl withExistingMediaservice(String resourceGroupName, String accountName) {
this.resourceGroupName = resourceGroupName;
this.accountName = accountName;
return this;
}
@Override
public ContentKeyPolicyImpl withOptions(List<ContentKeyPolicyOption> options) {
this.inner().withOptions(options);
return this;
}
@Override
public ContentKeyPolicyImpl withDescription(String description) {
this.inner().withDescription(description);
return this;
}
}
|
MeRajat/gorgonia | operatorPointwise_unary.go | package gorgonia
import "github.com/pkg/errors"
// a ʘUnaryOperator is essentially a function that takes a float32 or float64 and returns the same
// pros : no overloading = clear understanding
// cons : no overloading = a lot of extra code
//
// There are TWO ʘUnaryOperator types so far:
// sf32UnaryOperator - scalar float32 unary operator
// sf64UnaryOperator - scalar float64 unary operator
//
// Because *TensorTypes are parameterized by a scalar type, it isn't necessary to create operators
// that will work on *TensorTypes. A simple type switch will do.
//
// n.b.: ʘ is used to denote pointwiseness of the operator.
// if you want to type it, it's U+0298 - Latin Letter Bilabial Click
type ʘUnaryOperator interface {
unaryOpType() ʘUnaryOperatorType
String() string
}
type sf32UnaryOperator func(float32) float32
func (f *sf32UnaryOperator) unaryOpType() ʘUnaryOperatorType {
switch f {
case &absf32:
return absOpType
case &signf32:
return signOpType
case &ceilf32:
return ceilOpType
case &floorf32:
return floorOpType
case &sinf32:
return sinOpType
case &cosf32:
return cosOpType
case &expf32:
return expOpType
case &lnf32:
return lnOpType
case &log2f32:
return log2OpType
case &negf32:
return negOpType
case &squaref32:
return squareOpType
case &sqrtf32:
return sqrtOpType
case &inversef32:
return inverseOpType
case &cubef32:
return cubeOpType
case &tanhf32:
return tanhOpType
case &sigmoidf32:
return sigmoidOpType
case &log1pf32:
return log1pOpType
case &expm1f32:
return expm1OpType
case &softplusf32:
return softplusOpType
}
return maxʘUnaryOperator
}
func (f *sf32UnaryOperator) String() string { return f.unaryOpType().String() }
type sf64UnaryOperator func(float64) float64
func (f *sf64UnaryOperator) unaryOpType() ʘUnaryOperatorType {
switch f {
case &absf64:
return absOpType
case &signf64:
return signOpType
case &ceilf64:
return ceilOpType
case &floorf64:
return floorOpType
case &sinf64:
return sinOpType
case &cosf64:
return cosOpType
case &expf64:
return expOpType
case &lnf64:
return lnOpType
case &log2f64:
return log2OpType
case &negf64:
return negOpType
case &squaref64:
return squareOpType
case &sqrtf64:
return sqrtOpType
case &inversef64:
return inverseOpType
case &cubef64:
return cubeOpType
case &tanhf64:
return tanhOpType
case &sigmoidf64:
return sigmoidOpType
case &log1pf64:
return log1pOpType
case &expm1f64:
return expm1OpType
case &softplusf64:
return softplusOpType
}
return maxʘUnaryOperator
}
func (f *sf64UnaryOperator) String() string { return f.unaryOpType().String() }
/*
DIFFERENTIATION EXPRESSIONS
All the functions here are expressed in terms of *Node and/or Nodes
*/
func nondiffUnaryOpExpr(x, y, gradY *Node) (*Node, error) {
return nil, NewError(SymbDiffError, "Non differentiable function")
}
func nondiffUnaryOp(x, y *Node) error {
return NewError(AutoDiffError, "Non differentiable function")
}
// apparently abs is differentiable
func absDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
if retVal, err = Sign(x); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(gradY, retVal)
}
return
}
func absDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
sign := newElemUnaryOp(signOpType, x)
var d Value
if d, err = sign.Do(xdv.Value); err == nil {
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
mul := newElemBinOp(mulOpType, y, x)
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
}
return
}
// Solution here
// https://www.symbolab.com/solver/step-by-step/%5Cfrac%7Bd%7D%7Bdx%7D%5Cleft(sin%5Cleft(x%5Cright)%5Cright)
func sinDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
if retVal, err = Cos(x); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(retVal, gradY)
}
return
}
func sinDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
cos := newElemUnaryOp(cosOpType, x)
var d Value
if d, err = cos.Do(xdv.Value); err == nil {
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
mul := newElemBinOp(mulOpType, x, y)
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
}
return
}
// Solution here (then apply chain rule to result by multiplying gradY):
// https://www.symbolab.com/solver/step-by-step/%5Cfrac%7Bd%7D%7Bdx%7D%5Cleft(cos%5Cleft(x%5Cright)%5Cright)
func cosDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
if retVal, err = Sin(x); err == nil {
WithGroupName(gradClust)(retVal)
if retVal, err = Neg(retVal); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(retVal, gradY)
}
}
return
}
func cosDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
sin := newElemUnaryOp(sinOpType, x)
var d Value
if d, err = sin.Do(xdv.Value); err == nil {
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
neg := newElemUnaryOp(negOpType, x)
if d, err = neg.UnsafeDo(d); err == nil {
mul := newElemBinOp(mulOpType, x, y)
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
}
}
return
}
func expDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
return HadamardProd(y, gradY)
}
func expDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
mul := newElemBinOp(mulOpType, x, y)
err = mul.IncrDo(xdv.d, ydv.Value, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
// solution is 1/x.
// Upon multiplying with gradY for chain rule, it simply becomes gradY/x
func lnDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
return HadamardDiv(gradY, x)
}
func lnDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
div := newElemBinOp(divOpType, y, x)
err = div.IncrDo(xdv.d, ydv.d, xdv.Value)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
// 1/(x*ln(2))
func log2DiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
var log2 *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
log2 = ln2f32
case Float64:
log2 = ln2f64
default:
err = NewError(typeError, "log2DiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
if retVal, err = HadamardProd(x, log2); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardDiv(gradY, retVal)
}
return
}
func log2Diff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
var log2 *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
log2 = ln2f32
case Float64:
log2 = ln2f64
default:
err = NewError(typeError, "log2DiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
mul := newElemBinOp(mulOpType, x, log2)
var d Value
if d, err = mul.Do(xdv.Value, log2.boundTo); err != nil {
err = errors.Wrapf(err, doFail, mul)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
div := newElemBinOp(divOpType, y, x)
err = div.IncrDo(xdv.d, ydv.d, d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
func negDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
return Neg(gradY)
}
func negDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
sub := newElemBinOp(subOpType, x, y)
_, err = sub.UnsafeDo(xdv.d, ydv.d)
if ver, ok := err.(Valuer); ok {
return xdv.SetDeriv(ver.Value())
}
return
}
func squareDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
var two *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
two = twof32
case Float64:
two = twof64
default:
err = NewError(typeError, "squareDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
if retVal, err = HadamardProd(x, two); err == nil {
symdiffLogf("Spawned: %d", retVal.ID())
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(retVal, gradY)
symdiffLogf("Spawned: %d", retVal.ID())
}
return
}
func squareDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
var two *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
two = twof32
case Float64:
two = twof64
default:
err = NewError(typeError, "squareDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
mul := newElemBinOp(mulOpType, x, y)
var d Value
if d, err = mul.Do(xdv.Value, two.boundTo); err == nil {
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
}
return
}
func sqrtDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
var two *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
two = twof32
case Float64:
two = twof64
default:
err = NewError(typeError, "sqrtDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
if retVal, err = HadamardProd(two, y); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardDiv(gradY, retVal)
}
return
}
func sqrtDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
var two *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
two = twof32
case Float64:
two = twof64
default:
err = NewError(typeError, "sqrtDiff does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
mul := newElemBinOp(mulOpType, x, y)
var d Value
if d, err = mul.Do(ydv.Value, two.boundTo); err == nil {
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
div := newElemBinOp(divOpType, y, x)
err = div.IncrDo(xdv.d, ydv.d, d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
}
return
}
func inverseDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
if retVal, err = HadamardProd(y, y); err == nil {
WithGroupName(gradClust)(retVal)
if retVal, err = Neg(retVal); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(retVal, gradY)
}
}
return
}
func inverseDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
sq := newElemUnaryOp(squareOpType, y)
var d Value
if d, err = sq.Do(ydv.Value); err != nil {
err = errors.Wrapf(err, doFail, sq)
return
}
neg := newElemUnaryOp(negOpType, y)
if d, err = neg.Do(d); err != nil {
err = errors.Wrapf(err, doFail, neg)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
mul := newElemBinOp(mulOpType, y, y)
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
func cubeDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
var three *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
three = threef32
case Float64:
three = threef64
default:
err = NewError(typeError, "cubeDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
if retVal, err = HadamardProd(x, x); err == nil {
WithGroupName(gradClust)(retVal)
if retVal, err = HadamardProd(retVal, three); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(retVal, gradY)
}
}
return
}
func cubeDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
var three *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
three = threef32
case Float64:
three = threef64
default:
err = NewError(typeError, "cubeDiff does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
mul := newElemBinOp(mulOpType, x, y)
var d Value
if d, err = mul.Do(xdv.Value, xdv.Value); err != nil {
err = errors.Wrapf(err, doFail, mul)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
if d, err = mul.UnsafeDo(d, three.boundTo); err != nil {
err = errors.Wrapf(err, unsafeDoFail, mul)
return
}
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
func tanhDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
var one *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
one = onef32
case Float64:
one = onef64
default:
err = NewError(typeError, "tanhDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
if retVal, err = HadamardProd(y, y); err == nil {
WithGroupName(gradClust)(retVal)
if retVal, err = Sub(one, retVal); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(retVal, gradY)
}
}
return
}
func tanhDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
var one *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
one = onef32
case Float64:
one = onef64
default:
err = NewError(typeError, "tanhDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
sq := newElemUnaryOp(squareOpType, y)
var d Value
if d, err = sq.Do(ydv.Value); err != nil {
err = errors.Wrapf(err, doFail, sq)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
sub := newElemBinOp(subOpType, one, y)
if d, err = sub.UnsafeDo(one.boundTo, d); err != nil {
err = errors.Wrapf(err, unsafeDoFail, sub)
return
}
mul := newElemBinOp(mulOpType, x, y)
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
func sigmoidDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
var one *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
one = onef32
case Float64:
one = onef64
default:
err = NewError(typeError, "tanhDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
if retVal, err = Sub(one, y); err == nil {
WithGroupName(gradClust)(retVal)
if retVal, err = HadamardProd(y, retVal); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardProd(retVal, gradY)
}
}
return
}
func sigmoidDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
var one *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
one = onef32
case Float64:
one = onef64
default:
err = NewError(typeError, "tanhDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
sub := newElemBinOp(subOpType, one, y)
var d Value
if d, err = sub.Do(one.boundTo, ydv.Value); err != nil {
err = errors.Wrapf(err, doFail, sub)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
mul := newElemBinOp(mulOpType, x, y)
if d, err = mul.UnsafeDo(d, ydv.Value); err != nil {
err = errors.Wrapf(err, unsafeDoFail, mul)
return
}
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
// 1/(x+1)
func log1pDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
var one *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
one = onef32
case Float64:
one = onef64
default:
err = NewError(typeError, "log1pDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
if retVal, err = Add(x, one); err == nil {
WithGroupName(gradClust)(retVal)
retVal, err = HadamardDiv(gradY, retVal)
}
return
}
func log1pDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
var one *Node
var dt Dtype
if dt, err = dtypeOf(x.t); err != nil {
return
}
switch dt {
case Float32:
one = onef32
case Float64:
one = onef64
default:
err = NewError(typeError, "log1pDiffExpr does not handle Dtypes other than Float32 and Float64. Got %v instead", dt)
return
}
add := newElemBinOp(addOpType, x, one)
var d Value
if d, err = add.Do(xdv.Value, one.boundTo); err != nil {
err = errors.Wrapf(err, doFail, add)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
div := newElemBinOp(divOpType, y, x)
err = div.IncrDo(xdv.d, ydv.d, d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
func expm1DiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
if retVal, err = Exp(x); err == nil {
WithGroupName(gradClust)(retVal)
return HadamardProd(gradY, retVal)
}
return
}
func expm1Diff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
exp := newElemUnaryOp(expOpType, x)
var d Value
if d, err = exp.Do(xdv.Value); err != nil {
err = errors.Wrapf(err, doFail, exp)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
mul := newElemBinOp(mulOpType, x, y)
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
func softplusDiffExpr(x, y, gradY *Node) (retVal *Node, err error) {
if retVal, err = Sigmoid(x); err == nil {
WithGroupName(gradClust)(retVal)
return HadamardProd(retVal, gradY)
}
return
}
func softplusDiff(x, y *Node) (err error) {
xdv := x.boundTo.(*dualValue)
ydv := y.boundTo.(*dualValue)
sigmoid := newElemUnaryOp(sigmoidOpType, x)
var d Value
if d, err = sigmoid.Do(xdv.Value); err != nil {
err = errors.Wrapf(err, doFail, sigmoid)
return
}
if dT, ok := d.(Tensor); ok {
defer returnTensor(dT)
}
mul := newElemBinOp(mulOpType, x, y)
err = mul.IncrDo(xdv.d, d, ydv.d)
if ver, ok := err.(Valuer); ok {
xdv.SetDeriv(ver.Value()) // ignore errors on purpose
return nil
}
return
}
|
grizzi/ocs2 | ocs2_ocs2/include/ocs2_ocs2/sensitivity_equations/RolloutSensitivityEquations.h | <filename>ocs2_ocs2/include/ocs2_ocs2/sensitivity_equations/RolloutSensitivityEquations.h
/******************************************************************************
Copyright (c) 2017, <NAME>. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
#pragma once
#include <Eigen/Dense>
#include <array>
#include <ocs2_core/Dimensions.h>
#include <ocs2_core/control/LinearController.h>
#include <ocs2_core/dynamics/ControlledSystemBase.h>
#include <ocs2_core/misc/LinearInterpolation.h>
#include <ocs2_core/misc/Numerics.h>
#include <ocs2_core/model_data/ModelDataLinearInterpolation.h>
namespace ocs2 {
/**
* Rollout sensitivity equations class
*
* @tparam STATE_DIM: Dimension of the state space.
* @tparam INPUT_DIM: Dimension of the control input space.
*/
template <size_t STATE_DIM, size_t INPUT_DIM>
class RolloutSensitivityEquations final : public ControlledSystemBase<STATE_DIM, INPUT_DIM> {
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
using BASE = ControlledSystemBase<STATE_DIM, INPUT_DIM>;
using DIMENSIONS = Dimensions<STATE_DIM, INPUT_DIM>;
using scalar_t = typename DIMENSIONS::scalar_t;
using scalar_array_t = typename DIMENSIONS::scalar_array_t;
using state_vector_t = typename DIMENSIONS::state_vector_t;
using state_vector_array_t = typename DIMENSIONS::state_vector_array_t;
using input_vector_t = typename DIMENSIONS::input_vector_t;
using input_vector_array_t = typename DIMENSIONS::input_vector_array_t;
using state_matrix_t = typename DIMENSIONS::state_matrix_t;
using state_matrix_array_t = typename DIMENSIONS::state_matrix_array_t;
using state_input_matrix_t = typename DIMENSIONS::state_input_matrix_t;
using state_input_matrix_array_t = typename DIMENSIONS::state_input_matrix_array_t;
using input_state_matrix_t = typename DIMENSIONS::input_state_matrix_t;
using input_state_matrix_array_t = typename DIMENSIONS::input_state_matrix_array_t;
using dynamic_vector_t = typename DIMENSIONS::dynamic_vector_t;
using dynamic_matrix_t = typename DIMENSIONS::dynamic_matrix_t;
using linear_controller_t = LinearController<STATE_DIM, INPUT_DIM>;
/**
* The default constructor.
*/
RolloutSensitivityEquations() = default;
/**
* Default destructor.
*/
~RolloutSensitivityEquations() override = default;
/**
* Returns pointer to the class.
*
* @return A raw pointer to the class.
*/
RolloutSensitivityEquations<STATE_DIM, INPUT_DIM>* clone() const override {
return new RolloutSensitivityEquations<STATE_DIM, INPUT_DIM>(*this);
}
/**
* Sets Data
*/
void setData(const scalar_array_t* timeTrajectoryPtr, const ModelData::array_t* modelDataPtr,
const scalar_array_t* sensitivityControllerTimePtr, const input_vector_array_t* sensitivityControllerFeedforwardPtr,
const input_state_matrix_array_t* sensitivityControllerFeedbackPtr) {
timeTrajectoryPtr_ = timeTrajectoryPtr;
modelDataPtr_ = modelDataPtr;
linearController_.setController(*sensitivityControllerTimePtr, *sensitivityControllerFeedforwardPtr, *sensitivityControllerFeedbackPtr);
this->setController(&linearController_);
}
/**
* Sets the multiplier of exogenous part of the equation. It is either zero
* or plus-minus 1/(s_{i+1}-s_{i})
*
* @param [in] multiplier: the multiplier of exogenous part of the equation.
*/
void setMultiplier(const scalar_t& multiplier) { multiplier_ = multiplier; }
/**
* Computes Derivative
*
* @param [in] t: time
* @param [in] nabla_Xv: state sensitivity vector
* @param [in] nabla_Uv: input sensitivity vector which is computed by using sensitivity controller.
* @param [out] derivatives: time derivative of the state sensitivity vector
*/
void computeFlowMap(const scalar_t& t, const state_vector_t& nabla_x, const input_vector_t& nabla_u,
state_vector_t& derivative) override {
const auto indexAlpha = LinearInterpolation::timeSegment(t, timeTrajectoryPtr_);
ModelData::interpolate(indexAlpha, Am_, modelDataPtr_, ModelData::dynamicsStateDerivative);
ModelData::interpolate(indexAlpha, Bm_, modelDataPtr_, ModelData::dynamicsInputDerivative);
if (!numerics::almost_eq(multiplier_, 0.0)) {
ModelData::interpolate(indexAlpha, Fv_, modelDataPtr_, ModelData::dynamics);
derivative = Am_ * nabla_x + Bm_ * nabla_u + multiplier_ * Fv_;
} else {
derivative = Am_ * nabla_x + Bm_ * nabla_u;
}
}
protected:
scalar_t multiplier_ = 0.0;
const scalar_array_t* timeTrajectoryPtr_;
const ModelData::array_t* modelDataPtr_;
linear_controller_t linearController_;
dynamic_vector_t Fv_;
dynamic_matrix_t Am_;
dynamic_matrix_t Bm_;
};
} // namespace ocs2
|
Elwetana/LED_controller | include/listener.h | <reponame>Elwetana/LED_controller
#ifndef __LISTENER_H__
#define __LISTENER_H__
#ifdef __cplusplus
extern "C" {
#endif
#define LISTENER_ADDRESS "tcp://localhost:5556"
struct Listener {
void* context;
void* subscriber;
};
int Listener_init();
void Listener_destruct();
char* Listener_poll_message();
#ifdef __cplusplus
}
#endif
#endif /* __LISTENER_H__ */ |
ppartarr/azure-sdk-for-java | sdk/network/mgmt-v2019_06_01/src/main/java/com/microsoft/azure/management/network/v2019_06_01/implementation/VirtualNetworkGatewayImpl.java | <filename>sdk/network/mgmt-v2019_06_01/src/main/java/com/microsoft/azure/management/network/v2019_06_01/implementation/VirtualNetworkGatewayImpl.java
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2019_06_01.implementation;
import com.microsoft.azure.arm.resources.models.implementation.GroupableResourceCoreImpl;
import com.microsoft.azure.management.network.v2019_06_01.VirtualNetworkGateway;
import rx.Observable;
import java.util.List;
import com.microsoft.azure.management.network.v2019_06_01.VirtualNetworkGatewayIPConfiguration;
import com.microsoft.azure.management.network.v2019_06_01.VirtualNetworkGatewayType;
import com.microsoft.azure.management.network.v2019_06_01.VpnType;
import com.microsoft.azure.SubResource;
import com.microsoft.azure.management.network.v2019_06_01.VirtualNetworkGatewaySku;
import com.microsoft.azure.management.network.v2019_06_01.VpnClientConfiguration;
import com.microsoft.azure.management.network.v2019_06_01.BgpSettings;
import com.microsoft.azure.management.network.v2019_06_01.AddressSpace;
class VirtualNetworkGatewayImpl extends GroupableResourceCoreImpl<VirtualNetworkGateway, VirtualNetworkGatewayInner, VirtualNetworkGatewayImpl, NetworkManager> implements VirtualNetworkGateway, VirtualNetworkGateway.Definition, VirtualNetworkGateway.Update {
VirtualNetworkGatewayImpl(String name, VirtualNetworkGatewayInner inner, NetworkManager manager) {
super(name, inner, manager);
}
@Override
public Observable<VirtualNetworkGateway> createResourceAsync() {
VirtualNetworkGatewaysInner client = this.manager().inner().virtualNetworkGateways();
return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner())
.map(innerToFluentMap(this));
}
@Override
public Observable<VirtualNetworkGateway> updateResourceAsync() {
VirtualNetworkGatewaysInner client = this.manager().inner().virtualNetworkGateways();
return client.createOrUpdateAsync(this.resourceGroupName(), this.name(), this.inner())
.map(innerToFluentMap(this));
}
@Override
protected Observable<VirtualNetworkGatewayInner> getInnerAsync() {
VirtualNetworkGatewaysInner client = this.manager().inner().virtualNetworkGateways();
return client.getByResourceGroupAsync(this.resourceGroupName(), this.name());
}
@Override
public boolean isInCreateMode() {
return this.inner().id() == null;
}
@Override
public Boolean activeActive() {
return this.inner().activeActive();
}
@Override
public BgpSettings bgpSettings() {
return this.inner().bgpSettings();
}
@Override
public AddressSpace customRoutes() {
return this.inner().customRoutes();
}
@Override
public Boolean enableBgp() {
return this.inner().enableBgp();
}
@Override
public String etag() {
return this.inner().etag();
}
@Override
public SubResource gatewayDefaultSite() {
return this.inner().gatewayDefaultSite();
}
@Override
public VirtualNetworkGatewayType gatewayType() {
return this.inner().gatewayType();
}
@Override
public List<VirtualNetworkGatewayIPConfiguration> ipConfigurations() {
return this.inner().ipConfigurations();
}
@Override
public String provisioningState() {
return this.inner().provisioningState();
}
@Override
public String resourceGuid() {
return this.inner().resourceGuid();
}
@Override
public VirtualNetworkGatewaySku sku() {
return this.inner().sku();
}
@Override
public VpnClientConfiguration vpnClientConfiguration() {
return this.inner().vpnClientConfiguration();
}
@Override
public VpnType vpnType() {
return this.inner().vpnType();
}
@Override
public VirtualNetworkGatewayImpl withActiveActive(Boolean activeActive) {
this.inner().withActiveActive(activeActive);
return this;
}
@Override
public VirtualNetworkGatewayImpl withBgpSettings(BgpSettings bgpSettings) {
this.inner().withBgpSettings(bgpSettings);
return this;
}
@Override
public VirtualNetworkGatewayImpl withCustomRoutes(AddressSpace customRoutes) {
this.inner().withCustomRoutes(customRoutes);
return this;
}
@Override
public VirtualNetworkGatewayImpl withEnableBgp(Boolean enableBgp) {
this.inner().withEnableBgp(enableBgp);
return this;
}
@Override
public VirtualNetworkGatewayImpl withEtag(String etag) {
this.inner().withEtag(etag);
return this;
}
@Override
public VirtualNetworkGatewayImpl withGatewayDefaultSite(SubResource gatewayDefaultSite) {
this.inner().withGatewayDefaultSite(gatewayDefaultSite);
return this;
}
@Override
public VirtualNetworkGatewayImpl withGatewayType(VirtualNetworkGatewayType gatewayType) {
this.inner().withGatewayType(gatewayType);
return this;
}
@Override
public VirtualNetworkGatewayImpl withIpConfigurations(List<VirtualNetworkGatewayIPConfiguration> ipConfigurations) {
this.inner().withIpConfigurations(ipConfigurations);
return this;
}
@Override
public VirtualNetworkGatewayImpl withResourceGuid(String resourceGuid) {
this.inner().withResourceGuid(resourceGuid);
return this;
}
@Override
public VirtualNetworkGatewayImpl withSku(VirtualNetworkGatewaySku sku) {
this.inner().withSku(sku);
return this;
}
@Override
public VirtualNetworkGatewayImpl withVpnClientConfiguration(VpnClientConfiguration vpnClientConfiguration) {
this.inner().withVpnClientConfiguration(vpnClientConfiguration);
return this;
}
@Override
public VirtualNetworkGatewayImpl withVpnType(VpnType vpnType) {
this.inner().withVpnType(vpnType);
return this;
}
}
|
civicledger/waterledger-dashboard | src/components/dashboard/Graph.js | import React from "react";
import { ResponsiveLine } from "@nivo/line";
const data = [
{
id: "avg",
color: "hsl(189, 70%, 50%)",
data: [
{
x: "Sep 2020",
y: 101,
},
{
x: "Oct 2020",
y: 110,
},
{
x: "Nov 2020",
y: 100,
},
{
x: "Dec 2020",
y: 141,
},
{
x: "Jan 2021",
y: 150,
},
{
x: "Feb 2021",
y: 158,
},
{
x: "Mar 2021",
y: 160,
},
{
x: "Apr 2021",
y: 155,
},
{
x: "May 2021",
y: 150,
},
{
x: "Jun 2021",
y: 160,
},
{
x: "Jul 2021",
y: 160,
},
{
x: "Aug 2021",
y: 170,
},
],
},
{
id: "min",
color: "hsl(53, 70%, 50%)",
data: [
{
x: "Sep 2020",
y: 100,
},
{
x: "Oct 2020",
y: 90,
},
{
x: "Nov 2020",
y: 93,
},
{
x: "Dec 2020",
y: 79,
},
{
x: "Jan 2021",
y: 133,
},
{
x: "Feb 2021",
y: 145,
},
{
x: "Mar 2021",
y: 148,
},
{
x: "Apr 2021",
y: 150,
},
{
x: "May 2021",
y: 141,
},
{
x: "Jun 2021",
y: 142,
},
{
x: "Jul 2021",
y: 150,
},
{
x: "Aug 2021",
y: 130,
},
],
},
{
id: "max",
color: "hsl(145, 70%, 50%)",
data: [
{
x: "Sep 2020",
y: 150,
},
{
x: "Oct 2020",
y: 115,
},
{
x: "Nov 2020",
y: 120,
},
{
x: "Dec 2020",
y: 150,
},
{
x: "Jan 2021",
y: 170,
},
{
x: "Feb 2021",
y: 168,
},
{
x: "Mar 2021",
y: 167,
},
{
x: "Apr 2021",
y: 159,
},
{
x: "May 2021",
y: 157,
},
{
x: "Jun 2021",
y: 172,
},
{
x: "Jul 2021",
y: 164,
},
{
x: "Aug 2021",
y: 183,
},
],
},
];
export default () => {
return (
<div className="pb-10 px-2" id="graph" style={{ height: "400px" }}>
<ResponsiveLine
data={data}
theme={{
textColor: "#ffffff",
axis: {
domain: {
line: {
stroke: "#ffffff",
strokeWidth: 1,
},
},
ticks: {
line: {
stroke: "#ffffff",
strokeWidth: 1,
},
},
legend: {
text: {
fontSize: 18,
},
},
},
grid: {
line: {
stroke: "#2A3140",
strokeWidth: 0.5,
},
},
}}
margin={{ top: 50, right: 110, bottom: 70, left: 60 }}
xScale={{ type: "point" }}
yScale={{ type: "linear", min: "auto", max: "auto", stacked: false, reverse: false }}
yFormat=" >-.2f"
axisTop={null}
axisBottom={{
orient: "bottom",
tickSize: 5,
tickPadding: 5,
tickRotation: 45,
legend: "Month",
legendOffset: 60,
legendPosition: "middle",
}}
axisLeft={{
orient: "left",
tickSize: 5,
tickPadding: 5,
tickRotation: 0,
legend: "Price in $/ML",
legendOffset: -50,
legendPosition: "middle",
}}
axisRight={{
orient: "right",
tickSize: 5,
tickPadding: 5,
tickRotation: 0,
legendOffset: 50,
legendPosition: "middle",
}}
pointSize={3}
isInteractive={false}
pointColor={{ theme: "background" }}
pointBorderWidth={2}
pointBorderColor={{ from: "serieColor" }}
pointLabelYOffset={-12}
useMesh={true}
legends={[
{
anchor: "top-right",
direction: "column",
justify: false,
translateX: 115,
translateY: 0,
itemsSpacing: 0,
itemDirection: "left-to-right",
itemWidth: 80,
itemHeight: 20,
itemOpacity: 0.75,
symbolSize: 12,
symbolShape: "circle",
symbolBorderColor: "rgba(0, 0, 0, .5)",
effects: [
{
on: "hover",
style: {
itemBackground: "rgba(12, 0, 0, .03)",
itemOpacity: 1,
},
},
],
},
]}
/>
</div>
);
};
|
dk-dev/balanced-dashboard | app/views/form-fields/search-date-picker.js | <filename>app/views/form-fields/search-date-picker.js
import DatePickerView from './date-picker';
var SearchDatePickerView = DatePickerView.extend({
_changeDateFilter: function(label) {
var maxTime = new Date(this.get('maxTime'));
var minTime = new Date(this.get('minTime'));
this.get('parentView').send('changeDateFilter', minTime, maxTime, label);
}
});
export default SearchDatePickerView;
|
ericadeckl/opensphere | src/os/ui/onboarding/onboardingevent.js | goog.provide('os.ui.onboarding.OnboardingEvent');
goog.require('goog.events.Event');
goog.require('os.ui.EventType');
/**
* Configuration for user onboarding.
*
* @param {string} title Onboarding set title.
* @param {Array.<Object>} steps The onboarding steps.
* @param {Object=} opt_config ngOnboarding configuration.
* @extends {goog.events.Event}
* @constructor
*/
os.ui.onboarding.OnboardingEvent = function(title, steps, opt_config) {
os.ui.onboarding.OnboardingEvent.base(this, 'constructor', os.ui.EventType.DISPLAY_ONBOARDING);
/**
* Title for the onboarding set.
* @type {string}
*/
this.title = title;
/**
* The ngOnboarding steps.
* @type {Array.<Object>}
*/
this.steps = steps;
/**
* @dict
*/
this.config = opt_config || null;
};
goog.inherits(os.ui.onboarding.OnboardingEvent, goog.events.Event);
|
ducanhnguyen/cft4cpp-for-dummy | src/com/fit/tree/object/CppFileNode.java | <filename>src/com/fit/tree/object/CppFileNode.java
package com.fit.tree.object;
import com.fit.tree.dependency.Dependency;
import com.fit.tree.dependency.IncludeHeaderDependency;
import org.eclipse.cdt.internal.core.dom.parser.cpp.CPPASTTranslationUnit;
import javax.swing.*;
import java.util.ArrayList;
import java.util.List;
public class CppFileNode extends SourcecodeFileNode<CPPASTTranslationUnit> {
public CppFileNode() {
try {
Icon ICON_CPP = new ImageIcon(Node.class.getResource("/image/node/Soucecode-Cpp.png"));
setIcon(ICON_CPP);
} catch (Exception e) {
}
}
public List<Dependency> getIncludeHeaderNodes() {
List<Dependency> includedDependencies = new ArrayList<>();
for (Dependency dependency : getDependencies())
if (dependency instanceof IncludeHeaderDependency)
includedDependencies.add(dependency);
return includedDependencies;
}
}
|
aai/rack | vendor/github.com/equinox-io/equinox/doc.go | /*
Package equinox allows applications to remotely update themselves with the equinox.io service.
Minimal Working Example
import "github.com/equinox-io/equinox"
const appID = "<YOUR EQUINOX APP ID>"
var publicKey = []byte(`
-----BEGIN PUBLIC <KEY>
-----END PUBLIC KEY-----
`)
func update(channel string) error {
opts := equinox.Options{Channel: channel}
if err := opts.SetPublicKeyPEM(publicKey); err != nil {
return err
}
// check for the update
resp, err := equinox.Check(appID, opts)
switch {
case err == equinox.NotAvailableErr:
fmt.Println("No update available, already at the latest version!")
return nil
case err != nil:
return err
}
// fetch the update and apply it
err = resp.Apply()
if err != nil {
return err
}
fmt.Printf("Updated to new version: %s!\n", resp.ReleaseVersion)
return nil
}
Update To Specific Version
When you specify a channel in the update options, equinox will try to update the application
to the latest release of your application published to that channel. Instead, you may wish to
update the application to a specific (possibly older) version. You can do this by explicitly setting
Version in the Options struct:
opts := equinox.Options{Version: "0.1.2"}
Prompt For Update
You may wish to ask the user for approval before updating to a new version. This is as simple
as calling the Check function and only calling Apply on the returned result if the user approves.
Example:
// check for the update
resp, err := equinox.Check(appID, opts)
switch {
case err == equinox.NotAvailableErr:
fmt.Println("No update available, already at the latest version!")
return nil
case err != nil:
return err
}
fmt.Println("New version available!")
fmt.Println("Version:", resp.ReleaseVersion)
fmt.Println("Name:", resp.ReleaseTitle)
fmt.Println("Details:", resp.ReleaseDescription)
ok := prompt("Would you like to update?")
if !ok {
return
}
err = resp.Apply()
// ...
Generating Keys
All equinox releases must be signed with a private ECDSA key, and all updates verified with the
public key portion. To do that, you'll need to generate a key pair. The equinox release tool can
generate an ecdsa key pair for you easily:
equinox genkey
*/
package equinox
|
jess22664/x3ogre | World/Scene.cpp | <reponame>jess22664/x3ogre<gh_stars>1-10
/*
* Scene.c
*
* Created on: 15.11.2013
* Author: baudenri_local
*/
#include <World/Scene.h>
#include <OgreControllerManager.h>
#include <World/Background.h>
#include <Geometry/Shape.h>
#include <World/WorldInfo.h>
#include <World/Viewpoint.h>
using namespace X3D;
Scene::~Scene() {
Ogre::ControllerManager::getSingleton().clearControllers();
}
WorldInfo* Scene::worldInfo() {
return _worldInfo;
}
void Scene::addChild(const std::shared_ptr<Node>& obj) {
Group::addChild(obj);
// check for some Scene specific child nodes
auto wi = dynamic_cast<WorldInfo*>(obj.get());
if (wi) {
_worldInfo = wi;
return;
}
}
void Scene::setViewport(Ogre::Viewport* viewport) {
_viewport = viewport;
if(bound<Viewpoint>()) {
bound<Viewpoint>()->onBound(*this); // reconfigure
}
if(bound<Background>()) {
bound<Background>()->apply(_viewport);
}
}
void Scene::registerNode(const std::shared_ptr<Node>& node, const std::string& name) {
if(nodeExists(name)) {
throw std::runtime_error("duplicate id='"+name+"' in World::registerNode");
}
_namedNodes[name] = node;
}
bool Scene::nodeExists(const std::string& name) const {
return _namedNodes.find(name) != _namedNodes.end();
}
Node* Scene::getNode(const std::string& name) const {
return _shareNode(name).get();
}
const std::shared_ptr<Node>& Scene::_shareNode(const std::string& name) const {
auto it = _namedNodes.find(name);
if(it == _namedNodes.end()) {
throw std::runtime_error("no node with name '"+name+"' found");
}
return it->second;
}
|
KwangjoJeong/Boost | Example/graph_06/main.cpp | <reponame>KwangjoJeong/Boost
#include <boost/graph/adjacency_list.hpp>
#include <tuple>
#include <algorithm>
#include <iterator>
#include <iostream>
int main()
{
typedef boost::adjacency_list<boost::setS, boost::vecS,
boost::undirectedS> graph;
graph g;
enum { topLeft, topRight, bottomRight, bottomLeft };
boost::add_edge(topLeft, topRight, g);
boost::add_edge(topRight, bottomRight, g);
boost::add_edge(bottomRight, bottomLeft, g);
boost::add_edge(bottomLeft, topLeft, g);
graph::adjacency_iterator vit, vend;
std::tie(vit, vend) = boost::adjacent_vertices(topLeft, g);
std::copy(vit, vend,
std::ostream_iterator<graph::vertex_descriptor>{std::cout, "\n"});
graph::out_edge_iterator eit, eend;
std::tie(eit, eend) = boost::out_edges(topLeft, g);
std::for_each(eit, eend,
[&g](graph::edge_descriptor it)
{ std::cout << boost::target(it, g) << '\n'; });
} |
NIVANorge/s-enda-playground | catalog/bindings/gmd/maximum_occurs.py | from dataclasses import dataclass, field
from typing import Optional
__NAMESPACE__ = "http://www.opengis.net/gml"
@dataclass
class MaximumOccurs:
"""gml:maximumOccurs is the maximum number of times that values for this
parameter group may be included.
If this attribute is omitted, the maximum number shall be one.
"""
class Meta:
name = "maximumOccurs"
namespace = "http://www.opengis.net/gml"
value: Optional[int] = field(
default=None,
metadata={
"required": True,
},
)
|
clyoudu/PrettyZoo | specification/src/main/java/cc/cc1234/specification/connection/ZookeeperParams.java | <reponame>clyoudu/PrettyZoo
package cc.cc1234.specification.connection;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ZookeeperParams {
private String url;
private List<String> aclList;
}
|
cilium/kube-apate | api/k8s/v1/server/restapi/core_v1/connect_core_v1_delete_node_proxy.go | <gh_stars>1-10
// Code generated by go-swagger; DO NOT EDIT.
// Copyright 2017-2020 Authors of Cilium
// SPDX-License-Identifier: Apache-2.0
package core_v1
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"net/http"
"github.com/go-openapi/runtime/middleware"
)
// ConnectCoreV1DeleteNodeProxyHandlerFunc turns a function with the right signature into a connect core v1 delete node proxy handler
type ConnectCoreV1DeleteNodeProxyHandlerFunc func(ConnectCoreV1DeleteNodeProxyParams) middleware.Responder
// Handle executing the request and returning a response
func (fn ConnectCoreV1DeleteNodeProxyHandlerFunc) Handle(params ConnectCoreV1DeleteNodeProxyParams) middleware.Responder {
return fn(params)
}
// ConnectCoreV1DeleteNodeProxyHandler interface for that can handle valid connect core v1 delete node proxy params
type ConnectCoreV1DeleteNodeProxyHandler interface {
Handle(ConnectCoreV1DeleteNodeProxyParams) middleware.Responder
}
// NewConnectCoreV1DeleteNodeProxy creates a new http.Handler for the connect core v1 delete node proxy operation
func NewConnectCoreV1DeleteNodeProxy(ctx *middleware.Context, handler ConnectCoreV1DeleteNodeProxyHandler) *ConnectCoreV1DeleteNodeProxy {
return &ConnectCoreV1DeleteNodeProxy{Context: ctx, Handler: handler}
}
/*ConnectCoreV1DeleteNodeProxy swagger:route DELETE /api/v1/nodes/{name}/proxy core_v1 connectCoreV1DeleteNodeProxy
connect DELETE requests to proxy of Node
*/
type ConnectCoreV1DeleteNodeProxy struct {
Context *middleware.Context
Handler ConnectCoreV1DeleteNodeProxyHandler
}
func (o *ConnectCoreV1DeleteNodeProxy) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
route, rCtx, _ := o.Context.RouteInfo(r)
if rCtx != nil {
r = rCtx
}
var Params = NewConnectCoreV1DeleteNodeProxyParams()
if err := o.Context.BindValidRequest(r, route, &Params); err != nil { // bind params
o.Context.Respond(rw, r, route.Produces, route, err)
return
}
res := o.Handler.Handle(Params) // actually handle the request
o.Context.Respond(rw, r, route.Produces, route, res)
}
|
NICTA/pod-detection | cfmchecker/src/main/java/au/com/nicta/ssrg/pod/cfmchecker/newcore/XorJoinGatewayState.java | <reponame>NICTA/pod-detection
package au.com.nicta.ssrg.pod.cfmchecker.newcore;
import java.util.Arrays;
import java.util.List;
public class XorJoinGatewayState extends NodeState {
public XorJoinGatewayState(XorJoinGateway gateway) {
super(gateway);
}
@Override
public XorJoinGateway getNode() {
return (XorJoinGateway)super.getNode();
}
@Override
public List<LinkState> shouldPullBeforeExecuted(ProcessStateContext context) {
throw new UnsupportedOperationException();
}
@Override
public List<LinkState> shouldPullBeforePulled(ProcessStateContext context) {
List<LinkState> linkStatesIn = context.getLinkStatesIn(this);
for (LinkState linkStateIn : linkStatesIn) {
if (!linkStateIn.hasTokenRemaining()) {
return linkStatesIn;
}
}
return null;
}
@Override
public StateChange onExecuted(ProcessStateContext context) {
throw new UnsupportedOperationException();
}
@Override
public StateChange onPulled(LinkState linkStateOutPulled,
ProcessStateContext context) {
List<LinkState> linkStatesIn = context.getLinkStatesIn(this);
LinkState linkStateOut = getLinkStateOut(context);
for (LinkState linkStateIn : linkStatesIn) {
if (linkStateIn.hasTokenRemaining()) {
linkStateIn = linkStateIn.consumeToken();
linkStateOut = linkStateOut.produceToken();
return new StateChange(null,
Arrays.asList(linkStateIn),
Arrays.asList(linkStateOut));
}
}
return null;
}
@Override
public boolean respondsToPull() { return true; }
private LinkState getLinkStateOut(ProcessStateContext context) {
return context.getLinkStatesOut(this).get(0);
}
}
|
jphkun/CEASIOMpy | ceasiompy/utils/InputClasses/Unconventional/balanceuncclass.py | """
CEASIOMpy: Conceptual Aircraft Design Software
Developed for CFS ENGINEERING, 1015 Lausanne, Switzerland
The script contains the user inputs required for the
balance unconventional analysis.
Python version: >=3.6
| Author : <NAME>
| Date of creation: 2018-09-27
| Last modifiction: 2019-02-20
"""
#=============================================================================
# IMPORTS
#=============================================================================
import numpy as np
#=============================================================================
# CLASSES
#=============================================================================
class BalanceInputs:
"""
The class contains all the input from user needed
for the balance analysis.
Attributes:
F_PERC (int): Fuel percentage for CoG and MoI evaluation to define
only if USER_CASE = True.
P_PERC (int): Payload percentage for CoG and MoI evaluation to define
only if USER_CASE = True.
WPP (float): Number of points to subdivide the wing upper or lower profile [-].
SPACING_WING (float): Spacing of nodes for the wing along the span direction [m].
SPACING_FUSE (float): Spacing of nodes for the fuselage along the radial,
circumferencial and longitudial directions [m].
USER_CASE (boolean): Set True to e able to evaluate the CoG and the MoI
with a chosen percentage of fuel mass (F_PERC) and
payload percentage (P_PERC).
USER_EN_PLACEMENT (boolean): Set True if the user defines the the engine
position inside the CPACS file.
"""
def __init__(self):
self.F_PERC = 0
self.P_PERC = 0
self.WPP = 30.0
self.SPACING_WING = 0.05
self.SPACING_FUSE = 0.05
self.USER_CASE = False
self.USER_EN_PLACEMENT = False
#=============================================================================
class MassesWeights:
"""
The class contains all the aircraft mass and weight value needed
for the unconventional balance analysis.
Attributes:
mass_fuel_maxpass (float): Max fuel mass with max payload [kg].
mass_fuel_mass (float): Max fuel mass allowed (evaluated) [kg].
maximum_take_off_mass (float): Maximum take off mass [kg].
operating_empty_mass (float): Operating empty mass [kg].
mass_payload (float): Payload mass [kg].
mass_engines (float): Engines total mass [kg].
"""
def __init__(self):
self.mass_fuel_maxpass = np.nan
self.mass_fuel_tot = np.nan
self.maximum_take_off_mass = np.nan
self.operating_empty_mass = np.nan
self.mass_payload = np.nan
self.mass_engines = 0
#=============================================================================
class BalanceOutputs:
"""
The class contains all the output value of the unconventional
Balance analysis.
Attributes:
=== Moment of inertia estimated with the lumped masses method ===
Ixx_lump (float): Roll moment at maximum take off mass
Iyy_lump (float): Pitch moment at maximum take off mass
Izz_lump (float): Yaw moment at maximum take off mass
Ixy_lump (float): xy moment at maximum take off mass
Iyz_lump (float): yz moment at maximum take off mass
Ixz_lump (float): xz moment at maximum take off mass
Ixx_lump_zfm (float): Roll moment at zero fuel mass
Iyy_lump_zfm (float): Pitch moment at zero fuel mass
Izz_lump_zfm (float): Yaw moment at zero fuel mass
Ixy_lump_zfm (float): xy moment at zero fuel mass
Iyz_lump_zfm (float): yz moment at zero fuel mass
Ixz_lump_zfm (float): xz moment at zero fuel mass
Ixx_lump_zpm (float): Roll moment at zero payload mass
Iyy_lump_zpm (float): Pitch moment at zero payload mass
Izz_lump_zpm (float): Yaw moment at zero payload mass
Ixy_lump_zpm (float): xy moment at zero payload mass
Iyz_lump_zpm (float): yz moment at zero payload mass
Ixz_lump_zpm (float): xz moment at zero payload mass
Ixx_lump_oem (float): Roll moment at operating empty mass
Iyy_lump_oem (float): Pitch moment at operating empty mass
Izz_lump_oem (float): Yaw moment at operating empty mass
Ixy_lump_oem (float): xy moment at operating empty mass
Iyz_lump_oem (float): yz moment at operating empty mass
Ixz_lump_oem (float): xz moment at operating empty mass
Ixx_lump_user(float).: Roll moment with user options
Iyy_lump_user(float).: Pitch moment with user options
Izz_lump_user(float).: Yaw moment with user options
Ixy_lump_user(float).: xy moment with user options
Iyz_lump_user(float).: yz moment with user options
Ixz_lump_user(float).: xz moment with user options
Ixxen (float): Roll moment component relative to the egines
Iyyen (float): Pitch moment component relative to the egines
Izzen (float): Yaw moment component relative to the egines
Ixyen (float): xy moment component relative to the egines
Iyzen (float): yz moment component relative to the egines
Ixzen (float): xz moment component relative to the egines
center_of_gravity (float_array): x,y,z coordinates of the CoG with
maximum take off mass.
cg_zfm (float_array): x,y,z coordinates of the CoG with zero fuel mass.
cg_zpm (float_array): x,y,z coordinates of the CoG with zero payload mass.
cg_oem (float_array): x,y,z coordinates of the CoG with operating empty mass.
cg_user (float_array): x,y,z coordinates of the CoG with user options.
"""
def __init__(self):
self.Ixx_lump = 0
self.Iyy_lump = 0
self.Izz_lump = 0
self.Ixy_lump = 0
self.Iyz_lump = 0
self.Ixz_lump = 0
self.Ixx_lump_zfm = 0
self.Iyy_lump_zfm = 0
self.Izz_lump_zfm = 0
self.Ixy_lump_zfm = 0
self.Iyz_lump_zfm = 0
self.Ixz_lump_zfm = 0
self.Ixx_lump_zpm = 0
self.Iyy_lump_zpm = 0
self.Izz_lump_zpm = 0
self.Ixy_lump_zpm = 0
self.Iyz_lump_zpm = 0
self.Ixz_lump_zpm = 0
self.Ixx_lump_oem = 0
self.Iyy_lump_oem = 0
self.Izz_lump_oem = 0
self.Ixy_lump_oem = 0
self.Iyz_lump_oem = 0
self.Ixz_lump_oem = 0
self.Ixx_lump_user = 0
self.Iyy_lump_user = 0
self.Izz_lump_user = 0
self.Ixy_lump_user = 0
self.Iyz_lump_user = 0
self.Ixz_lump_user = 0
self.Ixxen = 0
self.Iyyen = 0
self.Izzen = 0
self.Ixyen = 0
self.Iyzen = 0
self.Ixzen = 0
self.center_of_gravity = 0
self.cg_zfm = 0
self.cg_zpm = 0
self.cg_oem = 0
self.cg_user = 0
#=============================================================================
# MAIN
#=============================================================================
if __name__ == '__main__':
log.warning('#########################################################')
log.warning('# ERROR NOT A STANDALONE PROGRAM, RUN balanceuncmain.py #')
log.warning('#########################################################')
|
murilocosta/agartha | internal/application/dto/trade.go | <reponame>murilocosta/agartha<gh_stars>0
package dto
import "github.com/murilocosta/agartha/internal/domain"
type TradeWrite struct {
Sender *TradeInventoryWrite `json:"sender" validate:"required"`
Receiver *TradeInventoryWrite `json:"receiver" validate:"required"`
}
type TradeInventoryWrite struct {
SurvivorID uint `json:"survivor_id" validate:"required"`
Items []*TradeResourceWrite `json:"items" validate:"required,min=1,dive,required"`
}
type TradeResourceWrite struct {
ResourceID uint `json:"resource_id" validate:"required"`
Quantity uint `json:"quantity" validate:"required,gte=0"`
}
type TradeRejectWrite struct {
TradeID uint `uri:"tradeId" binding:"required"`
Annotation string `json:"annotation"`
}
type TradeRead struct {
TradeID uint `json:"trade_id"`
Status domain.TradeStatus `json:"status"`
}
type TradeHistorySurvivorRead struct {
SurvivorID uint `json:"id"`
Name string `json:"name"`
}
type TradeHistoryItemRead struct {
ItemName string `json:"item_name"`
ItemQuantity uint `json:"item_quantity"`
}
type TradeHistoryRead struct {
TradeID uint `json:"id"`
Status domain.TradeStatus `json:"status"`
Sender *TradeHistorySurvivorRead `json:"sender"`
Receiver *TradeHistorySurvivorRead `json:"receiver"`
SenderItems []*TradeHistoryItemRead `json:"sender_items"`
ReceiverItems []*TradeHistoryItemRead `json:"receiver_items"`
}
func ConvertToTradeRead(trade *domain.Trade) *TradeRead {
if trade == nil {
return nil
}
return &TradeRead{
TradeID: trade.ID,
Status: trade.Status,
}
}
|
xavierhardy/yamlfix | yamlfix/cli.py | #!/usr/bin/env python
"""
A tool to fix yamllint issues.
"""
from concurrent.futures import ProcessPoolExecutor
from logging import getLogger, Logger, Handler, INFO, WARN
from os import cpu_count
from os.path import abspath
from sys import argv
from typing import Sequence, NamedTuple
from yamllint.config import YamlLintConfig
from yamlfix.config import configure_app
from yamlfix.config_parser import parse_arguments
from yamlfix.files import find_files, format_file
LOGGER = getLogger(__name__)
LogMessage = NamedTuple(
"LogMessage", (("level", int), ("message", str), ("kwargs", dict))
)
def format_result(
total: int, changed: int, errors: int, check_only: bool = False
) -> LogMessage:
untouched = total - changed - errors
verb = " would be" if check_only else ""
error_verb = "would fail" if check_only else "failed"
result_texts = []
if untouched > 0:
result_texts.append(
"1 file%(verb)s left unchanged"
if untouched == 1
else "%(untouched)d files%(verb)s left unchanged"
)
log_level = INFO
if changed > 0:
result_texts.insert(
0,
"1 file%(verb)s reformatted"
if changed == 1
else "%(changed)d files%(verb)s reformatted",
)
log_level = WARN
if errors > 0:
result_texts.append(
"1 file %(error_verb)s to reformat"
if errors == 1
else "%(errors)d files %(error_verb)s to reformat",
)
log_level = WARN
message = "Oh no!"
else:
message = "All done!"
return LogMessage(
level=log_level,
message="%s\n%s." % (message, ", ".join(result_texts)),
kwargs=dict(
changed=changed,
errors=errors,
untouched=untouched,
verb=verb,
error_verb=error_verb,
),
)
def main(args: Sequence[str] = None, logger: Logger = None, handler: Handler = None):
if args is None:
args = argv[1:]
if logger is None:
logger = LOGGER
config = parse_arguments(*args)
configure_app(config, logger, handler=handler)
try:
yaml_config = YamlLintConfig(file=".yamllint")
except IOError:
yaml_config = None
paths = set()
for path in config.get("paths", []):
paths.update(find_files(path, yaml_config=yaml_config))
check_only = config.get("check")
fail_text = "would reformat" if check_only else "reformatted"
changed_file_count = 0
error_count = 0
with ProcessPoolExecutor(max_workers=cpu_count()) as executor:
# executor.map would trigger if one of the underlying function calls
# raised an exception on iterating
futures = map(
lambda pth: executor.submit(
format_file, pth, dry_run=check_only, yaml_config=yaml_config
),
paths,
)
# list makes sure the futures are all submitted before waiting on the first results
for pth, future in list(zip(paths, futures)):
full_path = abspath(pth)
try:
if future.result():
changed_file_count += 1
LOGGER.warning("%s %s", fail_text, full_path)
else:
LOGGER.debug("%s already well formatted, good job.", full_path)
except Exception as e:
error_count += 1
LOGGER.error("error: cannot format %s: %s", full_path, e)
LOGGER.log(
*format_result(
len(paths), changed_file_count, error_count, check_only=check_only
)
)
return error_count > 0 and (not check_only or changed_file_count == 0)
if __name__ == "__main__":
exit_code = 0 if main(argv, LOGGER) else 1
exit(exit_code)
|
jpaulodefarias/ees-devops-desenvolvimento-web | 002-banco-de-dados/src/main/java/br/uece/eesdevop/bancodedados/util/GsonUtil.java | <gh_stars>1-10
package br.uece.eesdevop.bancodedados.util;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.io.Reader;
public enum GsonUtil {
INSTANCE;
private Gson gson;
GsonUtil() {
gson = new GsonBuilder().create();
}
public <T> T parse(Reader reader, Class<T> clazz) {
return gson.fromJson(reader, clazz);
}
public String stringify(Object object) {
return gson.toJson(object);
}
}
|
alonmm/VCSamples | VC2012Samples/Windows 8 samples/C++/Windows 8 app samples/Responding to the appearance of the on-screen keyboard sample (Windows 8)/C++/KeyboardPage.xaml.h | //
// KeyboardPage.xaml.h
// Declaration of the KeyboardPage class
//
#pragma once
#include "pch.h"
#include "KeyboardPage.g.h"
#include "InputPaneHelper.h"
namespace KeyboardEventsSampleCPP
{
enum ResizeType
{
NoResize = 0,
ResizeFromShow = 1,
ResizeFromHide = 2
};
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public ref class KeyboardPage sealed
{
public:
KeyboardPage();
protected:
virtual void OnNavigatedFrom(Windows::UI::Xaml::Navigation::NavigationEventArgs^ e) override;
private:
InputPaneHelper^ _inputPaneHelper;
double _displacement, _viewSize, _bottomOfList;
bool _resized;
ResizeType _shouldResize;
void CloseView_Click(Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
void CustomKeyboardHandler(Object^ sender, Windows::UI::ViewManagement::InputPaneVisibilityEventArgs^ e);
void ShowAnimationComplete(Object^ sender, Object^ e);
void InputPaneHiding(Windows::UI::ViewManagement::InputPane^ sender, Windows::UI::ViewManagement::InputPaneVisibilityEventArgs^ e);
void MiddleScroller_SizeChanged(Object^ sender, Windows::UI::Xaml::SizeChangedEventArgs^ e);
};
}
|
nholden/job_board | spec/models/term_spec.rb | <gh_stars>1-10
require 'rails_helper'
RSpec.describe Term, :type => :model do
it "should have many jobs" do
expect(Term.reflect_on_association(:jobs).macro).to eq(:has_many)
end
it "should respond to label" do
@term = FactoryGirl.create(:term)
expect(@term).to respond_to(:label)
end
it "should respond to position" do
@term = FactoryGirl.create(:term)
expect(@term).to respond_to(:position)
end
it "must have a label" do
@term = FactoryGirl.build(:term, label: nil)
expect(@term).to be_invalid
end
it "must be assigned a position" do
@term = FactoryGirl.create(:term)
expect(@term.position).to_not be_nil
end
describe "destroy_and_reassign_jobs" do
context "on a specified term" do
before(:each) do
@user = FactoryGirl.create(:user_with_jobs)
@user.jobs.first.term.destroy_and_reassign_jobs
end
it "destroys the term" do
expect(Term.find_by(label: "Term")).to be_nil
end
it "reassigns the the jobs' term" do
expect(@user.jobs.first.term.label).to eql("Unspecified")
end
end
context "on an unspecified term with no jobs" do
before(:each) do
@term = FactoryGirl.create(:term, label: "Unspecified")
@term.destroy_and_reassign_jobs
end
it "destroys the term" do
expect(Term.find_by(label: "Unspecified")).to be_nil
end
end
context "on an unspecified term with jobs" do
before(:each) do
@term = FactoryGirl.create(:term, label: "Unspecified")
@job = FactoryGirl.create(:job, term: @term)
@term.destroy_and_reassign_jobs
end
it "does not destroy the term" do
expect(Term.find_by(label: "Unspecified")).to_not be_nil
end
end
end
describe "reposition" do
before(:each) do
@term_1 = FactoryGirl.create(:term, id: 1, position: 1)
@term_2 = FactoryGirl.create(:term, id: 2, position: 2)
@term_3 = FactoryGirl.create(:term, id: 3, position: 3)
end
it "repositions the terms" do
Term.reposition({1=>3, 2=>1, 3=>2})
expect(Term.find(1).position).to eql(3)
expect(Term.find(2).position).to eql(1)
expect(Term.find(3).position).to eql(2)
end
it "sorts by the new positions" do
Term.reposition({1=>3, 2=>1, 3=>2})
expect(Term.first.id).to eql(2)
expect(Term.last.id).to eql(1)
end
it "assigns the lowest possible integers while maintaining order" do
Term.reposition({1=>9, 2=>4, 3=>7})
expect(Term.find(1).position).to eql(3)
expect(Term.find(2).position).to eql(1)
expect(Term.find(3).position).to eql(2)
end
it "ignores ids for terms that don't exist" do
Term.reposition({1=>9, 2=>4, 3=>7, 4=>3, 5=>2, 6=>1})
expect(Term.find(1).position).to eql(3)
expect(Term.find(2).position).to eql(1)
expect(Term.find(3).position).to eql(2)
end
it "won't let multiple terms occupy the same position" do
expect(Term.reposition({1=>4, 2=>4, 3=>3})).to eql(false)
end
end
describe "refresh_positions" do
before(:each) do
@term_1 = FactoryGirl.create(:term, id: 1, position: 1)
@term_2 = FactoryGirl.create(:term, id: 2, position: 2)
@term_3 = FactoryGirl.create(:term, id: 3, position: 3)
end
it "assigns the lowest possible integers while maintaining order" do
@term_2.destroy
Term.refresh_positions
expect(Term.find(1).position).to eql(1)
expect(Term.find(3).position).to eql(2)
end
end
end
|
bbergaoui/bastoji | crypto/model/src/main/java/us/onesquare/bastoji/model/photo/Photo.java | <gh_stars>0
package us.onesquare.bastoji.model.photo;
import java.util.UUID;
import org.springframework.data.cassandra.core.mapping.PrimaryKey;
import org.springframework.data.cassandra.core.mapping.Table;
@Table
public class Photo {
@PrimaryKey
private UUID id;
private byte[] photo;
private byte[] thumbnail;
public Photo() {
}
public Photo(byte[] photo, byte[] thumbnail) {
super();
this.photo = photo;
this.thumbnail = thumbnail;
}
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public byte[] getPhoto() {
return photo;
}
public void setPhoto(byte[] photo) {
this.photo = photo;
}
public byte[] getThumbnail() {
return thumbnail;
}
public void setThumbnail(byte[] thumbnail) {
this.thumbnail = thumbnail;
}
}
|
n3wscott/net-istio | vendor/knative.dev/networking/pkg/apis/networking/v1alpha1/domain_types.go | /*
Copyright 2020 The Knative Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"knative.dev/pkg/apis"
duckv1 "knative.dev/pkg/apis/duck/v1"
"knative.dev/pkg/kmeta"
)
// +genclient
// +genclient:nonNamespaced
// +genreconciler:krshapedlogic=true
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// Domain is a cluster-scoped resource to configure a proxy pool for a given Route.
type Domain struct {
metav1.TypeMeta `json:",inline"`
// Standard object's metadata.
// More info: https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#metadata
// +optional
metav1.ObjectMeta `json:"metadata,omitempty"`
// Spec is the desired state of the Domain.
// More info: https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
// +optional
Spec DomainSpec `json:"spec,omitempty"`
// Status is the current state of the Domain.
// More info: https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
// +optional
Status DomainStatus `json:"status,omitempty"`
}
// Verify that Domain adheres to the appropriate interfaces.
var (
// Check that Domain may be validated and defaulted.
_ apis.Validatable = (*Domain)(nil)
_ apis.Defaultable = (*Domain)(nil)
// Check that we can create OwnerReferences to a Domain.
_ kmeta.OwnerRefable = (*Domain)(nil)
// Check that the type conforms to the duck Knative Resource shape.
_ duckv1.KRShaped = (*Domain)(nil)
)
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// DomainList is a collection of Domain objects.
type DomainList struct {
metav1.TypeMeta `json:",inline"`
// Standard object metadata.
// More info: https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#metadata
// +optional
metav1.ListMeta `json:"metadata,omitempty"`
// Items is the list of Domain objects.
Items []Domain `json:"items"`
}
// DomainSpec describes the Ingress the user wishes to exist.
//
// In general this follows the same shape as K8s Ingress.
// Some notable differences:
// - Backends now can have namespace:
// - Traffic can be split across multiple backends.
// - Timeout & Retry can be configured.
// - Headers can be appended.
// DomainSpec contains the specification of the Domain CRD.
type DomainSpec struct {
// IngressClass tells what Ingress class annotation to use for Routes associated
// with this Realm.
IngressClass string `json:"ingressClass"`
// Suffix specifies the domain suffix to be used. This field replaces the
// existing config-domain ConfigMap. Internal Domains can omit this, in
// which case we will default to the cluster suffix.
// +optional
Suffix string `json:"suffix,omitempty"`
// LoadBalancers provide addresses (IP addresses, domains) of the load balancers
// associated with this Domain. This is used in automatic DNS provisioning like
// configuration of magic DNS or creating ExternalName services for cluster-local
// access.
LoadBalancers []LoadBalancerIngressSpec `json:"loadBalancers"`
// Configs contains additional pieces of information to configure ingress proxies.
// +optional
Configs []IngressConfig `json:"configs,omitempty"`
}
// IngressConfig allows KIngress implementations to add additional information needed
// for configuring the proxies associated with this Domain.
// For examples, in our Istio-based Ingress this will contains all references of
// Istio Gateways associated with this Domain. This could be a reference of a ConfigMap
// owned by the implementation as well.
type IngressConfig struct {
// Name of the Kingress implementation resource
// +optional
Name string `json:"name,omitempty"`
// Namespace of the Kingress implementation resource
// +optional
Namespace string `json:"namespace,omitempty"`
// Type of the Kingress implementation resource
// +optional
Type string `json:"type,omitempty"`
}
// LoadBalancerIngressSpec represents the spec of a load-balancer ingress point:
// traffic intended for the service should be sent to an ingress point.
type LoadBalancerIngressSpec struct {
// IP is set for load-balancer ingress points that are IP based
// (typically GCE or OpenStack load-balancers)
// +optional
IP string `json:"ip,omitempty"`
// Domain is set for load-balancer ingress points that are DNS based
// (typically AWS load-balancers)
// +optional
Domain string `json:"domain,omitempty"`
// DomainInternal is set if there is a cluster-local DNS name to access the Ingress.
//
// NOTE: This differs from K8s Ingress, since we also desire to have a cluster-local
// DNS name to allow routing in case of not having a mesh.
//
// +optional
DomainInternal string `json:"domainInternal,omitempty"`
// MeshOnly is set if the Ingress is only load-balanced through a Service mesh.
// +optional
MeshOnly bool `json:"meshOnly,omitempty"`
}
// DomainStatus will reflect Ready=True if the implementation accepts the Domain data
// as valid.
type DomainStatus struct {
duckv1.Status `json:",inline"`
}
// GetStatus retrieves the status of the Domain. Implements the KRShaped interface.
func (d *Domain) GetStatus() *duckv1.Status {
return &d.Status.Status
}
|
VinayaSathyanarayana/maro | maro/cli/data_pipeline/base.py | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import shutil
from abc import ABC
from maro.cli.data_pipeline.utils import convert, download_file, StaticParameter, generate_name_with_uuid
from maro.utils.exception.cli_exception import CommandError
from maro.utils.logger import CliLogger
logger = CliLogger(name=__name__)
class DataPipeline(ABC):
"""Base class of data pipeline.
Generate scenario/topology specific data for the business engine.
General workflow:
Step 1: Download the original data file from the source to download folder.
Step 2: Generate the clean data in clean folder.
Step 3: Build a binary data file in build folder.
The folder structure is:
~/.maro
/data/[scenario]/[topology]
/_download original data file
/_clean cleaned data file
/_build bin data file and other necessory files
/meta meta files for data pipeline
Args:
scenario(str): The scenario of the data.
topology(str): The topology of the scenario.
source(str): The original source of data file.
is_temp(bool): (optional) If the data file is temporary.
"""
_download_file_name = ""
_clean_file_name = ""
_build_file_name = ""
_meta_file_name = ""
def __init__(self, scenario: str, topology: str, source: str, is_temp: bool = False):
self._scenario = scenario
self._topology = topology
self._is_temp = is_temp
self._source = source
self._data_root = StaticParameter.data_root
self._meta_folder = os.path.join(StaticParameter.data_root, scenario, "meta")
self._build_folder = os.path.join(self._data_root, self._scenario, ".build", self._topology)
self._clean_folder = os.path.join(self._data_root, self._scenario, ".source", ".clean", self._topology)
self._download_folder = os.path.join(self._data_root, self._scenario, ".source", ".download", self._topology)
self._new_file_list = []
self._new_folder_list = []
if self._is_temp:
tmpdir = generate_name_with_uuid()
self._download_folder = os.path.join(self._download_folder, tmpdir)
self._clean_folder = os.path.join(self._clean_folder, tmpdir)
self._build_folder = os.path.join(self._build_folder, tmpdir)
self._download_file = os.path.join(self._download_folder, self._download_file_name)
self._clean_file = os.path.join(self._clean_folder, self._clean_file_name)
self._build_file = os.path.join(self._build_folder, self._build_file_name)
self._build_meta_file = os.path.join(self._meta_folder, self._meta_file_name)
@property
def build_folder(self):
return self._build_folder
def download(self, is_force: bool, fall_back: callable = None):
"""Download the original data file.
Args:
is_force(bool): If forced re-download the data file.
fall_back(callable): (optional) Fallback function to execute when download failed.
"""
self._new_folder_list.append(self._download_folder)
os.makedirs(self._download_folder, exist_ok=True)
self._new_file_list.append(self._download_file)
if (not is_force) and os.path.exists(self._download_file):
logger.info_green("File already exists, skipping download.")
else:
logger.info_green(f"Downloading data from {self._source} to {self._download_file}.")
try:
download_file(source=self._source, destination=self._download_file)
except Exception as e:
logger.warning_yellow(f"Failed to download from {self._source} to {self._download_file}.")
if fall_back is not None:
logger.warning_yellow(f"Calling fall_back function: {fall_back}.")
fall_back()
else:
raise CommandError("generate", f"Download error: {e}.")
def clean(self):
"""Clean the original data file."""
self._new_folder_list.append(self._clean_folder)
os.makedirs(self._clean_folder, exist_ok=True)
self._new_folder_list.append(self._build_folder)
os.makedirs(self._build_folder, exist_ok=True)
def build(self):
"""Build the cleaned data file to binary data file."""
self._new_file_list.append(self._build_file)
if os.path.exists(self._clean_file):
logger.info_green(f"Building binary data from {self._clean_file} to {self._build_file}.")
convert(meta=self._build_meta_file, file=[self._clean_file], output=self._build_file)
else:
logger.warning_yellow(f"Not found cleaned data: {self._clean_file}.")
def remove_file(self):
"""Remove the temporary files."""
for new_file in self._new_file_list:
if os.path.exists(new_file):
os.remove(new_file)
self._new_file_list.clear()
def remove_folder(self):
"""Remove the temporary folders."""
for new_folder in self._new_folder_list:
if os.path.exists(new_folder):
shutil.rmtree(new_folder)
self._new_folder_list.clear()
class DataTopology(ABC):
"""Data topology manage multi data pipelines for a specified topology of a research scenario."""
def __init__(self):
self._data_pipeline = {}
def get_build_folders(self) -> dict:
"""Get the build file folders of all data pipelines for the topology.
Returns:
dict: Dictionary of build folders, keys are data pipeline names, values
are paths of the build folders.
"""
ret = {}
for pipeline in self._data_pipeline:
ret[pipeline] = self._data_pipeline[pipeline].build_folder
return ret
def download(self, is_force: bool = False):
"""Download the original data files of all data pipelines.
Args:
is_force(bool): If forced re-download the data file.
"""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].download(is_force)
def clean(self):
"""Clean the original data files of all data pipelines."""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].clean()
def build(self):
"""Build the cleaned data files of all data pipelines to binary data file."""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].build()
def remove(self):
"""Remove the temporary files and folders of all data pipelines."""
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].remove_file()
for pipeline in self._data_pipeline:
self._data_pipeline[pipeline].remove_folder()
|
johnapost/overwatch-league-fantasy | src/app/shared/withFirestore.js | <gh_stars>1-10
// @flow
import { firestoreConnect } from "react-redux-firebase";
// Connect to firestore with client only
export default (query?: Object[] | (Object => Object[])) =>
typeof window !== "undefined" ? firestoreConnect(query) : (arg: any) => arg;
|
gbruins-notours/gmnst | server/plugins/apiClients/models/ApiClients.js | <filename>server/plugins/apiClients/models/ApiClients.js
'use strict';
const CoreService = require('../../core/core.service');
module.exports = function (baseModel, bookshelf) {
return baseModel.extend({
tableName: CoreService.DB_TABLES.api_clients,
hasTimestamps: true
});
};
|
jasonTangxd/clockwork | clockwork-dao/src/main/java/com/creditease/adx/clockwork/dao/mapper/clockwork/TbClockworkNodeMapper.java | <filename>clockwork-dao/src/main/java/com/creditease/adx/clockwork/dao/mapper/clockwork/TbClockworkNodeMapper.java
/**
* This file is automatically generated by MyBatis Generator, do not modify.
*/
package com.creditease.adx.clockwork.dao.mapper.clockwork;
import com.creditease.adx.clockwork.common.entity.gen.TbClockworkNode;
import com.creditease.adx.clockwork.common.entity.gen.TbClockworkNodeExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface TbClockworkNodeMapper {
long countByExample(TbClockworkNodeExample example);
int deleteByExample(TbClockworkNodeExample example);
int deleteByPrimaryKey(Integer id);
int insert(TbClockworkNode record);
int insertSelective(TbClockworkNode record);
List<TbClockworkNode> selectByExample(TbClockworkNodeExample example);
TbClockworkNode selectByPrimaryKey(Integer id);
int updateByExampleSelective(@Param("record") TbClockworkNode record, @Param("example") TbClockworkNodeExample example);
int updateByExample(@Param("record") TbClockworkNode record, @Param("example") TbClockworkNodeExample example);
int updateByPrimaryKeySelective(TbClockworkNode record);
int updateByPrimaryKey(TbClockworkNode record);
} |
mrbrackins/testt | extensions/shoutem.navigation/server/src/layoutSettings/tileGrid/iconBackgroundSettingsLocalization.js | <reponame>mrbrackins/testt<gh_stars>100-1000
const key = 'tile-icon-background-settings';
const TITLE = `${key}.title`;
const HEADER_ICON_BACKGROUNDS = `${key}.header-icon-backgrounds`;
const HEADER_IMAGE = `${key}.header-image`;
export default {
TITLE,
HEADER_ICON_BACKGROUNDS,
HEADER_IMAGE,
};
|
Quantisan/WholeCell | simulation/doc/doxygen/html/class_overview_animation.js | var class_overview_animation =
[
[ "OverviewAnimation", "class_overview_animation.html#a20c4bb6f0cc5ea3323ad72e9f164e31f", null ],
[ "drawArc", "class_overview_animation.html#a71bd22b2df276f272dbd2b5b054dbb2b", null ],
[ "drawCell", "class_overview_animation.html#ace9b460146a6b5bdd5fa82c88caf1f1e", null ],
[ "drawCellDivision", "class_overview_animation.html#a0cb65757afe7bf8375b06b5aa8eb2556", null ],
[ "drawFrame", "class_overview_animation.html#a9a01d9496cb1ac70cc1c665e7df7a16a", null ],
[ "loadSimulationData", "class_overview_animation.html#a0ebee44eb8297d4c2f18ecd22b21c27e", null ],
[ "author", "class_overview_animation.html#aa0f9f9db88a06276994f5b13c09a5df4", null ],
[ "description", "class_overview_animation.html#a0b08e7418024d2f0b2540e91fca14b30", null ],
[ "displayAuthorList", "class_overview_animation.html#a63b5e4a84371991ed802f5104ce28b11", null ],
[ "title", "class_overview_animation.html#ae202929e1ac760d44598f727c5220c34", null ]
]; |
dymaxionlabs/satlomas-front | components/user/HomeContent.js | const HomeContent = () => null
export default HomeContent;
|
megahertz0/android_thunder | dex_src/com/tencent/wxop/stat/ah.java | package com.tencent.wxop.stat;
import android.content.Context;
import com.tencent.wxop.stat.a.e;
import com.tencent.wxop.stat.a.k;
import com.umeng.socialize.common.SocializeConstants;
final class ah implements Runnable {
final /* synthetic */ Context a;
final /* synthetic */ String b;
final /* synthetic */ StatSpecifyReportedInfo c;
ah(Context context, String str, StatSpecifyReportedInfo statSpecifyReportedInfo) {
this.a = context;
this.b = str;
this.c = statSpecifyReportedInfo;
}
public final void run() {
try {
StatServiceImpl.flushDataToDB(this.a);
synchronized (StatServiceImpl.o) {
Long l = (Long) StatServiceImpl.o.remove(this.b);
}
if (l != null) {
Long valueOf = Long.valueOf((System.currentTimeMillis() - l.longValue()) / 1000);
if (valueOf.longValue() <= 0) {
valueOf = Long.valueOf(1);
}
String j = StatServiceImpl.n;
if (j != null && j.equals(this.b)) {
j = SocializeConstants.OP_DIVIDER_MINUS;
}
e kVar = new k(this.a, j, this.b, StatServiceImpl.a(this.a, false, this.c), valueOf, this.c);
if (!this.b.equals(StatServiceImpl.m)) {
StatServiceImpl.q.warn("Invalid invocation since previous onResume on diff page.");
}
new aq(kVar).a();
StatServiceImpl.n = this.b;
return;
}
StatServiceImpl.q.e(new StringBuilder("Starttime for PageID:").append(this.b).append(" not found, lost onResume()?").toString());
} catch (Throwable th) {
StatServiceImpl.q.e(th);
StatServiceImpl.a(this.a, th);
}
}
}
|
maccum/hail | src/test/scala/is/hail/utils/RichRDDSuite.scala | <filename>src/test/scala/is/hail/utils/RichRDDSuite.scala<gh_stars>1-10
package is.hail.utils
import is.hail.SparkSuite
import org.testng.annotations.Test
class RichRDDSuite extends SparkSuite {
@Test def testTakeByPartition() {
val r = sc.parallelize(0 until 1024, numSlices = 20)
assert(r.headPerPartition(5).count() == 100)
}
@Test def testHead() {
val r = sc.parallelize(0 until 1024, numSlices = 20)
val partitionRanges = r.countPerPartition().scanLeft(Range(0, 1)) { case (x, c) => Range(x.end, x.end + c.toInt + 1) }
def getExpectedNumPartitions(n: Int): Int =
partitionRanges.indexWhere(_.contains(n))
for (n <- Array(0, 15, 200, 562, 1024, 2000)) {
val t = r.head(n)
val nActual = math.min(n, 1024)
assert(t.collect() sameElements (0 until nActual))
assert(t.count() == nActual)
assert(t.getNumPartitions == getExpectedNumPartitions(nActual))
}
val vds = hc.importVCF("src/test/resources/sample.vcf")
assert(vds.head(3).countRows() == 3)
}
@Test def binaryParallelWrite() {
def readBytes(file: String): Array[Byte] = hadoopConf.readFile(file) { dis =>
val buffer = new Array[Byte](32)
val size = dis.read(buffer)
buffer.take(size)
}
val header = Array[Byte](108, 27, 1, 91)
val data = Array(Array[Byte](1, 19, 23, 127, -1), Array[Byte](23, 4, 15, -2, 1))
val r = sc.parallelize(data, numSlices = 2)
assert(r.getNumPartitions == 2)
val notParallelWrite = tmpDir.createTempFile("notParallelWrite")
r.saveFromByteArrays(notParallelWrite, tmpDir.createTempFile("notParallelWrite_tmp"), Some(header), exportType = ExportType.CONCATENATED)
assert(readBytes(notParallelWrite) sameElements (header ++: data.flatten))
val parallelWrite = tmpDir.createTempFile("parallelWrite")
r.saveFromByteArrays(parallelWrite, tmpDir.createTempFile("parallelWrite_tmp"), Some(header), exportType = ExportType.PARALLEL_HEADER_IN_SHARD)
assert(readBytes(parallelWrite + "/part-00000") sameElements header ++ data(0))
assert(readBytes(parallelWrite + "/part-00001") sameElements header ++ data(1))
val parallelWriteHeader = tmpDir.createTempFile("parallelWriteHeader")
r.saveFromByteArrays(parallelWriteHeader, tmpDir.createTempFile("parallelHeaderWrite_tmp"), Some(header), exportType = ExportType.PARALLEL_SEPARATE_HEADER)
assert(readBytes(parallelWriteHeader + "/header") sameElements header)
assert(readBytes(parallelWriteHeader + "/part-00000") sameElements data(0))
assert(readBytes(parallelWriteHeader + "/part-00001") sameElements data(1))
}
@Test def parallelWrite() {
def read(file: String): Array[String] = hc.hadoopConf.readLines(file)(_.map(_.value).toArray)
val header = "my header is awesome!"
val data = Array("the cat jumped over the moon.", "all creatures great and small")
val r = sc.parallelize(data, numSlices = 2)
assert(r.getNumPartitions == 2)
val concatenated = tmpDir.createTempFile("concatenated")
r.writeTable(concatenated, tmpDir.createTempFile("concatenated"), Some(header), exportType = ExportType.CONCATENATED)
assert(read(concatenated) sameElements (header +: data))
val shardHeaders = tmpDir.createTempFile("shardHeader")
r.writeTable(shardHeaders, tmpDir.createTempFile("shardHeader"), Some(header), exportType = ExportType.PARALLEL_HEADER_IN_SHARD)
assert(read(shardHeaders + "/part-00000") sameElements header +: Array(data(0)))
assert(read(shardHeaders + "/part-00001") sameElements header +: Array(data(1)))
val separateHeader = tmpDir.createTempFile("separateHeader", ".gz")
r.writeTable(separateHeader, tmpDir.createTempFile("separateHeader"), Some(header), exportType = ExportType.PARALLEL_SEPARATE_HEADER)
assert(read(separateHeader + "/header.gz") sameElements Array(header))
assert(read(separateHeader + "/part-00000.gz") sameElements Array(data(0)))
assert(read(separateHeader + "/part-00001.gz") sameElements Array(data(1)))
val merged = tmpDir.createTempFile("merged", ".gz")
val mergeList = Array(separateHeader + "/header.gz",
separateHeader + "/part-00000.gz",
separateHeader + "/part-00001.gz").flatMap(hadoopConf.glob)
hadoopConf.copyMergeList(mergeList, merged, deleteSource = false)
assert(read(merged) sameElements read(concatenated))
}
}
|
evandhq/react-jupiter | src/components/icons/index.test.js | <filename>src/components/icons/index.test.js<gh_stars>1-10
import React from 'react';
import { shallow } from 'enzyme';
import renderer from 'react-test-renderer';
import 'jest-styled-components';
import Icon from './index';
import findByTestAtrr from '../utils/test';
const setupShallow = (props = {}) => (
shallow(<Icon type={props.icon} {...props} />)
);
describe('Icon component', () => {
it('Should render icon component without error', () => {
const component = setupShallow({ type: 'lock' });
const Icon = findByTestAtrr(component, 'i-tag');
expect(Icon.length).toBe(1);
});
it('Should not render icon component without type props', () => {
const component = setupShallow();
const Icon = findByTestAtrr(component, 'i-tag');
expect(Icon.length).toBe(0);
});
it('should render zoom-out icon correctly', () => {
const Icons = renderer
.create(<Icon type="zoom-out" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render zoom-in icon correctly', () => {
const Icons = renderer
.create(<Icon type="zoom-in" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render today icon correctly', () => {
const Icons = renderer
.create(<Icon type="today" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render assessment icon correctly', () => {
const Icons = renderer
.create(<Icon type="assessment" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render aspect-ratio icon correctly', () => {
const Icons = renderer
.create(<Icon type="aspect-ratio" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render alarm-on icon correctly', () => {
const Icons = renderer
.create(<Icon type="alarm-on" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render alarm-off icon correctly', () => {
const Icons = renderer
.create(<Icon type="alarm-off" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render watch-later icon correctly', () => {
const Icons = renderer
.create(<Icon type="watch-later" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render touch-app icon correctly', () => {
const Icons = renderer
.create(<Icon type="touch-app" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render exit-to-app icon correctly', () => {
const Icons = renderer
.create(<Icon type="exit-to-app" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render event-seat icon correctly', () => {
const Icons = renderer
.create(<Icon type="event-seat" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render event icon correctly', () => {
const Icons = renderer
.create(<Icon type="event" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render description icon correctly', () => {
const Icons = renderer
.create(<Icon type="description" />).toJSON();
expect(Icons).toMatchSnapshot();
});
it('should render delete-outline icon correctly', () => {
const Icons = renderer
.create(<Icon type="delete-outline" />).toJSON();
expect(Icons).toMatchSnapshot();
});
});
|
wsdl2rest/wsdl2rest | impl/src/main/java/org/jboss/fuse/wsdl2rest/impl/service/ElementInfoImpl.java | <reponame>wsdl2rest/wsdl2rest<filename>impl/src/main/java/org/jboss/fuse/wsdl2rest/impl/service/ElementInfoImpl.java
package org.jboss.fuse.wsdl2rest.impl.service;
import org.jboss.fuse.wsdl2rest.ElementInfo;
public class ElementInfoImpl implements ElementInfo {
private final String elementType;
private final String elementName;
private final boolean complex;
public ElementInfoImpl(String elementType, String elementName, boolean complex) {
this.elementType = elementType;
this.elementName = elementName;
this.complex = complex;
}
@Override
public String getElementType() {
return this.elementType;
}
@Override
public String getElementName() {
return this.elementName;
}
@Override
public boolean isComplex() {
return this.complex;
}
public String toString() {
return "[name=" + elementName + ", type=" + elementType + ", complex=" + complex +"]";
}
} |
IsaSanchezTorron/PROYECTO | Backend/server.js | require('dotenv').config();
const express = require('express');
const morgan = require('morgan');
const bodyParser = require('body-parser');
const fileUpload = require('express-fileupload');
const cors = require('cors');
const path = require('path');
const app = express();
const port = process.env.PORT;
// Middlewares
app.use(morgan('dev'));
app.use(bodyParser.json());
app.use(fileUpload());
app.use(cors());
app.use(express.static(path.join(__dirname, 'static')));
//############### Funciones importadas relativas a USUARIOS #######################
const { newUser } = require('./controllers/USUARIOS/new_user');
const { loginUser } = require('./controllers/USUARIOS/login');
const { validateUser } = require('./controllers/USUARIOS/validation');
const { getUser } = require('./controllers/USUARIOS/get_data_user');
const { editUser } = require('./controllers/USUARIOS/edit_user'); //middlewares para autenticación
const { updatePassword } = require('./controllers/USUARIOS/edit_password');
const { disableUser } = require('./controllers/USUARIOS/disable');
const { deleteUser } = require('./controllers/USUARIOS/delete');
const {
getInscriptionHistoryUser
} = require('./controllers/USUARIOS/get_history_user');
const {
getPendingRatings
} = require('./controllers/USUARIOS/get_pending_ratings');
const { userIsAuthenticated } = require('./middlewares/auth.js');
const { userIsAdmin } = require('./middlewares/auth.js');
//############## Funciones importadas relativas a CONCURSOS ######################
const { newConcourse } = require('./controllers/CONCURSOS/new_concourse');
const { editConcourse } = require('./controllers/CONCURSOS/edit_concourse');
const { getConcourse } = require('./controllers/CONCURSOS/get_data');
const { deleteConcourse } = require('./controllers/CONCURSOS/delete');
const { listingConcourses } = require('./controllers/CONCURSOS/all_concourses');
const { nextConcourses } = require('./controllers/CONCURSOS/next_concourses');
const {
finishedConcourses
} = require('./controllers/CONCURSOS/finished_concourses');
const { setWinner } = require('./controllers/CONCURSOS/winner');
const { seelastwinners } = require('./controllers/CONCURSOS/lastwinners');
const {
searchingConcourses
} = require('./controllers/CONCURSOS/search_concourses');
const { stillNowConcourses } = require('./controllers/CONCURSOS/stillnow');
//############# Funciones importadas relativas a INSCRIPCIONES ##############
const {
newInscription
} = require('./controllers/INSCRIPCIONES/new_inscription');
const {
deleteInscription
} = require('./controllers/INSCRIPCIONES/delete_inscription');
const {
getInscribed
} = require('./controllers/INSCRIPCIONES/get_data_inscriptions');
const { newRating } = require('./controllers/VALORACIONES/new_rating');
const { viewRating } = require('./controllers/VALORACIONES/view_rating');
const { viewRanking } = require('./controllers/VALORACIONES/view_ranking');
// ################## RUTAS DE USUARIO ##############################
app.post('/usuarios', newUser); // Crear nuevo usuario
app.post('/usuarios/login', loginUser); // Hacer login
app.get('/usuarios/validar', validateUser); // Validar cuenta
app.put('/usuarios/editar/:id', userIsAuthenticated, editUser); //Editar usuario
app.get('/usuarios/perfil/:id', userIsAuthenticated, getUser); //Obtener datos de un usuario
app.put('/usuarios/password/:id', userIsAuthenticated, updatePassword); //Cambiar contraseña de usuario
app.put('/usuarios/disable/:id', userIsAuthenticated, disableUser); // Deshabilitar usuario.
app.delete(
'/usuarios/delete/:id',
userIsAuthenticated,
userIsAdmin,
deleteUser
); // Borrar cuenta de usuario
app.get(
'/usuarios/historial/:id',
userIsAuthenticated,
getInscriptionHistoryUser
); // Ver historial de usuarios
app.get(
'/usuarios/concursos-pendientes-valoracion/:id',
userIsAuthenticated,
getPendingRatings
);
// ################### RUTAS DE CONCURSO ###########################
app.post(
'/concursos/nuevo_concurso', //Añadir concurso
newConcourse,
userIsAuthenticated,
userIsAdmin
); //Nuevo concurso
app.put(
'/concursos/editar/:id', //Editar un concurso
editConcourse,
userIsAuthenticated,
userIsAdmin
); //Editar concurso
app.get('/concursos/info/:id', getConcourse); //Obtener datos de un concurso
app.delete(
'/concursos/delete/:id',
userIsAuthenticated,
userIsAdmin,
deleteConcourse
); // Borrar concurso
app.get('/concursos/listado', listingConcourses); //Listado de todos los concursos
app.get('/concursos/proximamente/:id', userIsAuthenticated, nextConcourses); //Listado de próximos concursos
app.get('/concursos/finalizados', finishedConcourses); //Listado de concursos ya finalizados.
app.put(
'/concursos/asignar_ganador', //Asignar ganador
userIsAuthenticated,
userIsAdmin,
setWinner
);
app.get('/concursos/ultimosganadores', userIsAuthenticated, seelastwinners); //Ver los últimos ganadores nombrados.
app.get('/concursos/todos', stillNowConcourses); //Ver los últimos ganadores nombrados.
//################ RUTAS DE INSCRIPCIONES #############################
app.post(
'/concursos/inscripciones/inscribirme/:id',
userIsAuthenticated,
newInscription
); // Nueva inscripción
app.delete(
'/concursos/inscripciones/borrar/:id',
userIsAuthenticated,
deleteInscription
); // Borrar inscripción
app.get('/concursos/inscripciones/:id', userIsAuthenticated, getInscribed); // Listar inscritos a un concurso
//############### RUTAS DE VALORACIONES #################################
app.post('/valoraciones/:id', userIsAuthenticated, newRating); // Nueva valoracion
app.get('/valoraciones/ver/:id', viewRating); // Ver valoración de un concurso
app.get('/valoraciones/ranking', viewRanking); // Ver ranking de concursos
//############## BUSCADOR ############################################
app.get('/busqueda', searchingConcourses); // Buscador completo
// ############ Middlewares de error ##################################
app.use((error, req, res, next) => {
res.status(error.httpCode || 500).send({
status: 'error',
message: error.message
});
});
app.use((req, res) => {
res.status(404).send({
status: 'error',
message: 'Not found'
});
});
app.listen(port, () => {
console.log(`Servidor funcionando en http://localhost:${port} 💞🥑🤓🌱💞`);
});
|
AdithyaBijur/Quest | nodezip/Routes/notification.js | <gh_stars>1-10
const express = require('express');
const verifyToken = require('../Helpers/verifytoken')
const jwt = require('jsonwebtoken');
const config = require('../config');
const bodyParser = require('body-parser')
const mongoose = require("mongoose");
const bcrypt = require("bcryptjs")
const User = require("../Models/User")
const router = express.Router()
const Not = require('../Models/not')
const Notification = require('../Models/notification')
const History = require('../Models/reps')
const Request1 = require('../Models/request')
// const Not = require('../Models/not')
router.use(bodyParser())
router.use(bodyParser.json())
router.use(bodyParser.urlencoded({ extended: true }))
router.use(verifyToken)
router.post('/', (req, res) => {
//const usern="adithya"
const usern = req.user.userName
if(req.body.mod=="shared"){
const n=Notification.find({sendto:{$in:usern},seen:false}).sort({doc:-1}).then((a=>{
console.log(a);
if(a.length!=0)
res.status(200).send(a)
else
res.status(200).json({"msg":"no notification"})
})).catch((err=>res.send(err)))
}
if(req.body.mod=="reps"){
const r=History.find({username:usern}).sort({doc:-1}).then((re=>{
// console.log(re.msg)
if(re.length!=0)
res.status(200).send(re)
else
res.status(200).json({"msg":"no notification"})
})).catch((err=>res.send(err)))
}
if(req.body.mod=="request"){
const request=Request1.find({sendto:usern,accepted:false}).sort({doc:-1}).then((t=>{
console.log(t)
if(t.length!=0)
res.status(200).send(t);
else
res.status(200).json({"msg":"no notification"})
})).catch((err=>res.send(err)))
}
if(req.body.mod=="answer"){
const rs=Not.find({sendto:usern,seen:false}).sort({doc:-1}).then((t=>{
console.log(t)
if(t.length!=0)
res.status(200).send(t);
else
res.status(200).json({"msg":"no notification"})
})).catch((err=>res.send(err)))
}
});
module.exports = router
|
codeclubbentleigh/Python | 1a. beginner_path_1_book_a_byte_of_python/a_byte_of_python by Swaroop C H - programs/if.py | <reponame>codeclubbentleigh/Python
number = 23
guess = int(input('Enter an integer : '))
if guess == number:
# New block starts here
print('Congratulations, you guessed it.')
print('(but you do not win any prizes!)')
# New block ends here
elif guess < number:
# Another block
print('No, it is a little higher than that')
# You can do whatever you want in a block ...
else:
print('No, it is a little lower than that')
# you must have guessed > number to reach here
print('Done')
# This last statement is always executed,
# after the if statement is executed.
|
peigong/yimiwan | packages/server/app/model/wx-user.js | <reponame>peigong/yimiwan
module.exports = app => {
const mongoose = app.mongoose;
const Schema = mongoose.Schema;
const ModelSchema = new Schema({
type: { type: Number, default: 0 },
openid: { type: String, default: '' }, // 用户唯一标识,请注意,在未关注公众号时,用户访问公众号的网页,也会产生一个用户和公众号唯一的OpenID
unionid: { type: String, default: '' }, // 只有在用户将公众号绑定到微信开放平台帐号后,才会出现该字段。
nickname: { type: String, default: '' }, // 用户昵称
sex: { type: Number, default: 0 }, // 用户的性别,值为1时是男性,值为2时是女性,值为0时是未知
province: { type: String, default: '' }, // 用户个人资料填写的省份
city: { type: String, default: '' }, // 普通用户个人资料填写的城市
country: { type: String, default: '' }, // 国家,如中国为CN
headimgurl: { type: String, default: '' }, // 用户头像,最后一个数值代表正方形头像大小(有0、46、64、96、132数值可选,0代表640*640正方形头像),用户没有头像时该项为空。若用户更换头像,原有头像URL将失效。
privilege: { type: Array, default: [] }, // 用户特权信息,json 数组,如微信沃卡用户为(chinaunicom)
access_token: { type: String, default: '' }, // 网页授权接口调用凭证,注意:此access_token与基础支持的access_token不同
expires_in: { type: Number, default: 0 }, // access_token接口调用凭证超时时间,单位(秒)
refresh_token: { type: String, default: '' }, // 用户刷新access_token
scope: { type: String, default: '' }, // 用户授权的作用域,使用逗号(,)分隔
timestamp: { type: Number, default: 0 }
});
return mongoose.model('WxUser', ModelSchema);
}
|
chenzhengda/tensorflow | tensorflow/compiler/plugin/poplar/driver/passes/inplace_finder.h | /* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_COMPILER_PLUGIN_POPLAR_DRIVER_PASSES_INPLACE_FINDER_H_
#define TENSORFLOW_COMPILER_PLUGIN_POPLAR_DRIVER_PASSES_INPLACE_FINDER_H_
#include <map>
#include <vector>
#include "tensorflow/compiler/plugin/poplar/driver/tools/inplace_util.h"
#include "tensorflow/compiler/xla/service/hlo_pass_interface.h"
namespace xla {
class HloModule;
class HloInstruction;
namespace poplarplugin {
struct CompilerAnnotations;
using InplaceRoute = std::vector<HloInstruction*>;
/**
* This finds instructions which do inplace updates to tensors.
*
* Care is taken to track tensors through tuples, as they should still be
* updated in place even when they have been made part of a tuple.
*
* Operations which lower to poplibs vertices that contains InOut edges should
* be processed by this finder
*/
class InplaceFinder : public HloModulePass {
public:
explicit InplaceFinder(const CompilerAnnotations& annotations)
: annotations(annotations) {}
absl::string_view name() const override { return "inplace-finder"; }
StatusOr<bool> Run(HloModule* module) override;
private:
const CompilerAnnotations& annotations;
};
} // namespace poplarplugin
} // namespace xla
#endif
|
lucasdavid/HearthStone | src/main/java/org/jmagic/actions/FinishGame.java | package org.jmagic.actions;
import org.jmagic.infrastructure.validation.rules.ValidationRule;
import org.jmagic.actions.rules.game.GameIsFinished;
import org.jmagic.core.states.State;
import static org.jmagic.infrastructure.validation.rules.BasicRules.Not;
/**
* Finish A Game Action.
* <p>
* Note: the game master should not allowed the players to perform this action.
* This can be achieved by setting a {@link org.jmagic.observers.LooseOnIllegalActionAttempt}
* observer when building the game and not passing this class in the collection of allowed classes.
*
* @author ldavid
*/
public class FinishGame extends Action {
@Override
public State update(State state) {
return new State(state.playerStates(), state.turn, state.step, true,
state.turnsPlayerIndex, state.activePlayerIndex, this, state);
}
@Override
public ValidationRule validationRules() {
return Not(new GameIsFinished());
}
}
|
UM-ARM-Lab/mab_ms | deform_control/external_libs/OpenSceneGraph-2.8.5/src/osgWrappers/osg/Matrixd.cpp | // ***************************************************************************
//
// Generated automatically by genwrapper.
// Please DO NOT EDIT this file!
//
// ***************************************************************************
#include <osgIntrospection/ReflectionMacros>
#include <osgIntrospection/TypedMethodInfo>
#include <osgIntrospection/StaticMethodInfo>
#include <osgIntrospection/Attributes>
#include <osg/CopyOp>
#include <osg/Matrixd>
#include <osg/Matrixf>
#include <osg/Object>
#include <osg/Quat>
#include <osg/Vec3d>
#include <osg/Vec3f>
#include <osg/Vec4d>
#include <osg/Vec4f>
// Must undefine IN and OUT macros defined in Windows headers
#ifdef IN
#undef IN
#endif
#ifdef OUT
#undef OUT
#endif
TYPE_NAME_ALIAS(double, osg::Matrixd::value_type)
BEGIN_VALUE_REFLECTOR(osg::Matrixd)
I_DeclaringFile("osg/Matrixd");
I_Constructor0(____Matrixd,
"",
"");
I_Constructor1(IN, const osg::Matrixd &, mat,
Properties::NON_EXPLICIT,
____Matrixd__C5_Matrixd_R1,
"",
"");
I_Constructor1(IN, const osg::Matrixf &, mat,
Properties::NON_EXPLICIT,
____Matrixd__C5_Matrixf_R1,
"",
"");
I_Constructor1(IN, float const *const, ptr,
Properties::EXPLICIT,
____Matrixd__float_C5_P1C5,
"",
"");
I_Constructor1(IN, double const *const, ptr,
Properties::EXPLICIT,
____Matrixd__double_C5_P1C5,
"",
"");
I_Constructor1(IN, const osg::Quat &, quat,
Properties::EXPLICIT,
____Matrixd__C5_Quat_R1,
"",
"");
I_Constructor16(IN, osg::Matrixd::value_type, a00, IN, osg::Matrixd::value_type, a01, IN, osg::Matrixd::value_type, a02, IN, osg::Matrixd::value_type, a03, IN, osg::Matrixd::value_type, a10, IN, osg::Matrixd::value_type, a11, IN, osg::Matrixd::value_type, a12, IN, osg::Matrixd::value_type, a13, IN, osg::Matrixd::value_type, a20, IN, osg::Matrixd::value_type, a21, IN, osg::Matrixd::value_type, a22, IN, osg::Matrixd::value_type, a23, IN, osg::Matrixd::value_type, a30, IN, osg::Matrixd::value_type, a31, IN, osg::Matrixd::value_type, a32, IN, osg::Matrixd::value_type, a33,
____Matrixd__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type,
"",
"");
I_Method1(int, compare, IN, const osg::Matrixd &, m,
Properties::NON_VIRTUAL,
__int__compare__C5_Matrixd_R1,
"",
"");
I_Method0(bool, valid,
Properties::NON_VIRTUAL,
__bool__valid,
"",
"");
I_Method0(bool, isNaN,
Properties::NON_VIRTUAL,
__bool__isNaN,
"",
"");
I_Method1(void, set, IN, const osg::Matrixd &, rhs,
Properties::NON_VIRTUAL,
__void__set__C5_Matrixd_R1,
"",
"");
I_Method1(void, set, IN, const osg::Matrixf &, rhs,
Properties::NON_VIRTUAL,
__void__set__C5_Matrixf_R1,
"",
"");
I_Method1(void, set, IN, float const *const, ptr,
Properties::NON_VIRTUAL,
__void__set__float_C5_P1C5,
"",
"");
I_Method1(void, set, IN, double const *const, ptr,
Properties::NON_VIRTUAL,
__void__set__double_C5_P1C5,
"",
"");
I_Method16(void, set, IN, osg::Matrixd::value_type, a00, IN, osg::Matrixd::value_type, a01, IN, osg::Matrixd::value_type, a02, IN, osg::Matrixd::value_type, a03, IN, osg::Matrixd::value_type, a10, IN, osg::Matrixd::value_type, a11, IN, osg::Matrixd::value_type, a12, IN, osg::Matrixd::value_type, a13, IN, osg::Matrixd::value_type, a20, IN, osg::Matrixd::value_type, a21, IN, osg::Matrixd::value_type, a22, IN, osg::Matrixd::value_type, a23, IN, osg::Matrixd::value_type, a30, IN, osg::Matrixd::value_type, a31, IN, osg::Matrixd::value_type, a32, IN, osg::Matrixd::value_type, a33,
Properties::NON_VIRTUAL,
__void__set__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type__value_type,
"",
"");
I_Method0(osg::Matrixd::value_type *, ptr,
Properties::NON_VIRTUAL,
__value_type_P1__ptr,
"",
"");
I_Method0(const osg::Matrixd::value_type *, ptr,
Properties::NON_VIRTUAL,
__C5_value_type_P1__ptr,
"",
"");
I_Method0(bool, isIdentity,
Properties::NON_VIRTUAL,
__bool__isIdentity,
"",
"");
I_Method0(void, makeIdentity,
Properties::NON_VIRTUAL,
__void__makeIdentity,
"",
"");
I_Method1(void, makeScale, IN, const osg::Vec3f &, x,
Properties::NON_VIRTUAL,
__void__makeScale__C5_Vec3f_R1,
"",
"");
I_Method1(void, makeScale, IN, const osg::Vec3d &, x,
Properties::NON_VIRTUAL,
__void__makeScale__C5_Vec3d_R1,
"",
"");
I_Method3(void, makeScale, IN, osg::Matrixd::value_type, x, IN, osg::Matrixd::value_type, x, IN, osg::Matrixd::value_type, x,
Properties::NON_VIRTUAL,
__void__makeScale__value_type__value_type__value_type,
"",
"");
I_Method1(void, makeTranslate, IN, const osg::Vec3f &, x,
Properties::NON_VIRTUAL,
__void__makeTranslate__C5_Vec3f_R1,
"",
"");
I_Method1(void, makeTranslate, IN, const osg::Vec3d &, x,
Properties::NON_VIRTUAL,
__void__makeTranslate__C5_Vec3d_R1,
"",
"");
I_Method3(void, makeTranslate, IN, osg::Matrixd::value_type, x, IN, osg::Matrixd::value_type, x, IN, osg::Matrixd::value_type, x,
Properties::NON_VIRTUAL,
__void__makeTranslate__value_type__value_type__value_type,
"",
"");
I_Method2(void, makeRotate, IN, const osg::Vec3f &, from, IN, const osg::Vec3f &, to,
Properties::NON_VIRTUAL,
__void__makeRotate__C5_Vec3f_R1__C5_Vec3f_R1,
"",
"");
I_Method2(void, makeRotate, IN, const osg::Vec3d &, from, IN, const osg::Vec3d &, to,
Properties::NON_VIRTUAL,
__void__makeRotate__C5_Vec3d_R1__C5_Vec3d_R1,
"",
"");
I_Method2(void, makeRotate, IN, osg::Matrixd::value_type, angle, IN, const osg::Vec3f &, axis,
Properties::NON_VIRTUAL,
__void__makeRotate__value_type__C5_Vec3f_R1,
"",
"");
I_Method2(void, makeRotate, IN, osg::Matrixd::value_type, angle, IN, const osg::Vec3d &, axis,
Properties::NON_VIRTUAL,
__void__makeRotate__value_type__C5_Vec3d_R1,
"",
"");
I_Method4(void, makeRotate, IN, osg::Matrixd::value_type, angle, IN, osg::Matrixd::value_type, x, IN, osg::Matrixd::value_type, y, IN, osg::Matrixd::value_type, z,
Properties::NON_VIRTUAL,
__void__makeRotate__value_type__value_type__value_type__value_type,
"",
"");
I_Method1(void, makeRotate, IN, const osg::Quat &, x,
Properties::NON_VIRTUAL,
__void__makeRotate__C5_Quat_R1,
"",
"");
I_Method6(void, makeRotate, IN, osg::Matrixd::value_type, angle1, IN, const osg::Vec3f &, axis1, IN, osg::Matrixd::value_type, angle2, IN, const osg::Vec3f &, axis2, IN, osg::Matrixd::value_type, angle3, IN, const osg::Vec3f &, axis3,
Properties::NON_VIRTUAL,
__void__makeRotate__value_type__C5_Vec3f_R1__value_type__C5_Vec3f_R1__value_type__C5_Vec3f_R1,
"",
"");
I_Method6(void, makeRotate, IN, osg::Matrixd::value_type, angle1, IN, const osg::Vec3d &, axis1, IN, osg::Matrixd::value_type, angle2, IN, const osg::Vec3d &, axis2, IN, osg::Matrixd::value_type, angle3, IN, const osg::Vec3d &, axis3,
Properties::NON_VIRTUAL,
__void__makeRotate__value_type__C5_Vec3d_R1__value_type__C5_Vec3d_R1__value_type__C5_Vec3d_R1,
"",
"");
I_Method4(void, decompose, IN, osg::Vec3f &, translation, IN, osg::Quat &, rotation, IN, osg::Vec3f &, scale, IN, osg::Quat &, so,
Properties::NON_VIRTUAL,
__void__decompose__osg_Vec3f_R1__osg_Quat_R1__osg_Vec3f_R1__osg_Quat_R1,
"decompose the matrix into translation, rotation, scale and scale orientation. ",
"");
I_Method4(void, decompose, IN, osg::Vec3d &, translation, IN, osg::Quat &, rotation, IN, osg::Vec3d &, scale, IN, osg::Quat &, so,
Properties::NON_VIRTUAL,
__void__decompose__osg_Vec3d_R1__osg_Quat_R1__osg_Vec3d_R1__osg_Quat_R1,
"decompose the matrix into translation, rotation, scale and scale orientation. ",
"");
I_Method6(void, makeOrtho, IN, double, left, IN, double, right, IN, double, bottom, IN, double, top, IN, double, zNear, IN, double, zFar,
Properties::NON_VIRTUAL,
__void__makeOrtho__double__double__double__double__double__double,
"Set to an orthographic projection. ",
"See glOrtho for further details. ");
I_Method6(bool, getOrtho, IN, double &, left, IN, double &, right, IN, double &, bottom, IN, double &, top, IN, double &, zNear, IN, double &, zFar,
Properties::NON_VIRTUAL,
__bool__getOrtho__double_R1__double_R1__double_R1__double_R1__double_R1__double_R1,
"Get the orthographic settings of the orthographic projection matrix. ",
"Note, if matrix is not an orthographic matrix then invalid values will be returned. ");
I_Method4(void, makeOrtho2D, IN, double, left, IN, double, right, IN, double, bottom, IN, double, top,
Properties::NON_VIRTUAL,
__void__makeOrtho2D__double__double__double__double,
"Set to a 2D orthographic projection. ",
"See glOrtho2D for further details. ");
I_Method6(void, makeFrustum, IN, double, left, IN, double, right, IN, double, bottom, IN, double, top, IN, double, zNear, IN, double, zFar,
Properties::NON_VIRTUAL,
__void__makeFrustum__double__double__double__double__double__double,
"Set to a perspective projection. ",
"See glFrustum for further details. ");
I_Method6(bool, getFrustum, IN, double &, left, IN, double &, right, IN, double &, bottom, IN, double &, top, IN, double &, zNear, IN, double &, zFar,
Properties::NON_VIRTUAL,
__bool__getFrustum__double_R1__double_R1__double_R1__double_R1__double_R1__double_R1,
"Get the frustum settings of a perspective projection matrix. ",
"Note, if matrix is not a perspective matrix then invalid values will be returned. ");
I_Method4(void, makePerspective, IN, double, fovy, IN, double, aspectRatio, IN, double, zNear, IN, double, zFar,
Properties::NON_VIRTUAL,
__void__makePerspective__double__double__double__double,
"Set to a symmetrical perspective projection. ",
"See gluPerspective for further details. Aspect ratio is defined as width/height. ");
I_Method4(bool, getPerspective, IN, double &, fovy, IN, double &, aspectRatio, IN, double &, zNear, IN, double &, zFar,
Properties::NON_VIRTUAL,
__bool__getPerspective__double_R1__double_R1__double_R1__double_R1,
"Get the frustum settings of a symmetric perspective projection matrix. ",
"Return false if matrix is not a perspective matrix, where parameter values are undefined. Note, if matrix is not a symmetric perspective matrix then the shear will be lost. Asymmetric matrices occur when stereo, power walls, caves and reality center display are used. In these configuration one should use the AsFrustum method instead. ");
I_Method3(void, makeLookAt, IN, const osg::Vec3d &, eye, IN, const osg::Vec3d &, center, IN, const osg::Vec3d &, up,
Properties::NON_VIRTUAL,
__void__makeLookAt__C5_Vec3d_R1__C5_Vec3d_R1__C5_Vec3d_R1,
"Set the position and orientation to be a view matrix, using the same convention as gluLookAt. ",
"");
I_MethodWithDefaults4(void, getLookAt, IN, osg::Vec3f &, eye, , IN, osg::Vec3f &, center, , IN, osg::Vec3f &, up, , IN, osg::Matrixd::value_type, lookDistance, 1.0f,
Properties::NON_VIRTUAL,
__void__getLookAt__Vec3f_R1__Vec3f_R1__Vec3f_R1__value_type,
"Get to the position and orientation of a modelview matrix, using the same convention as gluLookAt. ",
"");
I_MethodWithDefaults4(void, getLookAt, IN, osg::Vec3d &, eye, , IN, osg::Vec3d &, center, , IN, osg::Vec3d &, up, , IN, osg::Matrixd::value_type, lookDistance, 1.0f,
Properties::NON_VIRTUAL,
__void__getLookAt__Vec3d_R1__Vec3d_R1__Vec3d_R1__value_type,
"Get to the position and orientation of a modelview matrix, using the same convention as gluLookAt. ",
"");
I_Method1(bool, invert, IN, const osg::Matrixd &, rhs,
Properties::NON_VIRTUAL,
__bool__invert__C5_Matrixd_R1,
"invert the matrix rhs, automatically select invert_4x3 or invert_4x4. ",
"");
I_Method1(bool, invert_4x3, IN, const osg::Matrixd &, rhs,
Properties::NON_VIRTUAL,
__bool__invert_4x3__C5_Matrixd_R1,
"4x3 matrix invert, not right hand column is assumed to be 0,0,0,1. ",
"");
I_Method1(bool, invert_4x4, IN, const osg::Matrixd &, rhs,
Properties::NON_VIRTUAL,
__bool__invert_4x4__C5_Matrixd_R1,
"full 4x4 matrix invert. ",
"");
I_Method1(void, orthoNormalize, IN, const osg::Matrixd &, rhs,
Properties::NON_VIRTUAL,
__void__orthoNormalize__C5_Matrixd_R1,
"ortho-normalize the 3x3 rotation & scale matrix ",
"");
I_Method1(osg::Vec3f, preMult, IN, const osg::Vec3f &, v,
Properties::NON_VIRTUAL,
__Vec3f__preMult__C5_Vec3f_R1,
"",
"");
I_Method1(osg::Vec3d, preMult, IN, const osg::Vec3d &, v,
Properties::NON_VIRTUAL,
__Vec3d__preMult__C5_Vec3d_R1,
"",
"");
I_Method1(osg::Vec3f, postMult, IN, const osg::Vec3f &, v,
Properties::NON_VIRTUAL,
__Vec3f__postMult__C5_Vec3f_R1,
"",
"");
I_Method1(osg::Vec3d, postMult, IN, const osg::Vec3d &, v,
Properties::NON_VIRTUAL,
__Vec3d__postMult__C5_Vec3d_R1,
"",
"");
I_Method1(osg::Vec4f, preMult, IN, const osg::Vec4f &, v,
Properties::NON_VIRTUAL,
__Vec4f__preMult__C5_Vec4f_R1,
"",
"");
I_Method1(osg::Vec4d, preMult, IN, const osg::Vec4d &, v,
Properties::NON_VIRTUAL,
__Vec4d__preMult__C5_Vec4d_R1,
"",
"");
I_Method1(osg::Vec4f, postMult, IN, const osg::Vec4f &, v,
Properties::NON_VIRTUAL,
__Vec4f__postMult__C5_Vec4f_R1,
"",
"");
I_Method1(osg::Vec4d, postMult, IN, const osg::Vec4d &, v,
Properties::NON_VIRTUAL,
__Vec4d__postMult__C5_Vec4d_R1,
"",
"");
I_Method1(void, set, IN, const osg::Quat &, q,
Properties::NON_VIRTUAL,
__void__set__C5_Quat_R1,
"",
"");
I_Method1(void, get, IN, osg::Quat &, q,
Properties::NON_VIRTUAL,
__void__get__Quat_R1,
"",
"");
I_Method1(void, setRotate, IN, const osg::Quat &, q,
Properties::NON_VIRTUAL,
__void__setRotate__C5_Quat_R1,
"",
"");
I_Method0(osg::Quat, getRotate,
Properties::NON_VIRTUAL,
__Quat__getRotate,
"Get the matrix rotation as a Quat. ",
"Note that this function assumes a non-scaled matrix and will return incorrect results for scaled matrixces. Consider decompose() instead. ");
I_Method3(void, setTrans, IN, osg::Matrixd::value_type, tx, IN, osg::Matrixd::value_type, ty, IN, osg::Matrixd::value_type, tz,
Properties::NON_VIRTUAL,
__void__setTrans__value_type__value_type__value_type,
"",
"");
I_Method1(void, setTrans, IN, const osg::Vec3f &, v,
Properties::NON_VIRTUAL,
__void__setTrans__C5_Vec3f_R1,
"",
"");
I_Method1(void, setTrans, IN, const osg::Vec3d &, v,
Properties::NON_VIRTUAL,
__void__setTrans__C5_Vec3d_R1,
"",
"");
I_Method0(osg::Vec3d, getTrans,
Properties::NON_VIRTUAL,
__Vec3d__getTrans,
"",
"");
I_Method0(osg::Vec3d, getScale,
Properties::NON_VIRTUAL,
__Vec3d__getScale,
"",
"");
I_Method2(void, mult, IN, const osg::Matrixd &, x, IN, const osg::Matrixd &, x,
Properties::NON_VIRTUAL,
__void__mult__C5_Matrixd_R1__C5_Matrixd_R1,
"",
"");
I_Method1(void, preMult, IN, const osg::Matrixd &, x,
Properties::NON_VIRTUAL,
__void__preMult__C5_Matrixd_R1,
"",
"");
I_Method1(void, postMult, IN, const osg::Matrixd &, x,
Properties::NON_VIRTUAL,
__void__postMult__C5_Matrixd_R1,
"",
"");
I_Method1(void, preMultTranslate, IN, const osg::Vec3d &, v,
Properties::NON_VIRTUAL,
__void__preMultTranslate__C5_Vec3d_R1,
"Optimized version of preMult(translate(v));. ",
"");
I_Method1(void, preMultTranslate, IN, const osg::Vec3f &, v,
Properties::NON_VIRTUAL,
__void__preMultTranslate__C5_Vec3f_R1,
"",
"");
I_Method1(void, postMultTranslate, IN, const osg::Vec3d &, v,
Properties::NON_VIRTUAL,
__void__postMultTranslate__C5_Vec3d_R1,
"Optimized version of postMult(translate(v));. ",
"");
I_Method1(void, postMultTranslate, IN, const osg::Vec3f &, v,
Properties::NON_VIRTUAL,
__void__postMultTranslate__C5_Vec3f_R1,
"",
"");
I_Method1(void, preMultScale, IN, const osg::Vec3d &, v,
Properties::NON_VIRTUAL,
__void__preMultScale__C5_Vec3d_R1,
"Optimized version of preMult(scale(v));. ",
"");
I_Method1(void, preMultScale, IN, const osg::Vec3f &, v,
Properties::NON_VIRTUAL,
__void__preMultScale__C5_Vec3f_R1,
"",
"");
I_Method1(void, postMultScale, IN, const osg::Vec3d &, v,
Properties::NON_VIRTUAL,
__void__postMultScale__C5_Vec3d_R1,
"Optimized version of postMult(scale(v));. ",
"");
I_Method1(void, postMultScale, IN, const osg::Vec3f &, v,
Properties::NON_VIRTUAL,
__void__postMultScale__C5_Vec3f_R1,
"",
"");
I_Method1(void, preMultRotate, IN, const osg::Quat &, q,
Properties::NON_VIRTUAL,
__void__preMultRotate__C5_Quat_R1,
"Optimized version of preMult(rotate(q));. ",
"");
I_Method1(void, postMultRotate, IN, const osg::Quat &, q,
Properties::NON_VIRTUAL,
__void__postMultRotate__C5_Quat_R1,
"Optimized version of postMult(rotate(q));. ",
"");
I_StaticMethod0(osg::Matrixd, identity,
__Matrixd__identity_S,
"",
"");
I_StaticMethod1(osg::Matrixd, scale, IN, const osg::Vec3f &, sv,
__Matrixd__scale__C5_Vec3f_R1_S,
"",
"");
I_StaticMethod1(osg::Matrixd, scale, IN, const osg::Vec3d &, sv,
__Matrixd__scale__C5_Vec3d_R1_S,
"",
"");
I_StaticMethod3(osg::Matrixd, scale, IN, osg::Matrixd::value_type, sx, IN, osg::Matrixd::value_type, sy, IN, osg::Matrixd::value_type, sz,
__Matrixd__scale__value_type__value_type__value_type_S,
"",
"");
I_StaticMethod1(osg::Matrixd, translate, IN, const osg::Vec3f &, dv,
__Matrixd__translate__C5_Vec3f_R1_S,
"",
"");
I_StaticMethod1(osg::Matrixd, translate, IN, const osg::Vec3d &, dv,
__Matrixd__translate__C5_Vec3d_R1_S,
"",
"");
I_StaticMethod3(osg::Matrixd, translate, IN, osg::Matrixd::value_type, x, IN, osg::Matrixd::value_type, y, IN, osg::Matrixd::value_type, z,
__Matrixd__translate__value_type__value_type__value_type_S,
"",
"");
I_StaticMethod2(osg::Matrixd, rotate, IN, const osg::Vec3f &, from, IN, const osg::Vec3f &, to,
__Matrixd__rotate__C5_Vec3f_R1__C5_Vec3f_R1_S,
"",
"");
I_StaticMethod2(osg::Matrixd, rotate, IN, const osg::Vec3d &, from, IN, const osg::Vec3d &, to,
__Matrixd__rotate__C5_Vec3d_R1__C5_Vec3d_R1_S,
"",
"");
I_StaticMethod4(osg::Matrixd, rotate, IN, osg::Matrixd::value_type, angle, IN, osg::Matrixd::value_type, x, IN, osg::Matrixd::value_type, y, IN, osg::Matrixd::value_type, z,
__Matrixd__rotate__value_type__value_type__value_type__value_type_S,
"",
"");
I_StaticMethod2(osg::Matrixd, rotate, IN, osg::Matrixd::value_type, angle, IN, const osg::Vec3f &, axis,
__Matrixd__rotate__value_type__C5_Vec3f_R1_S,
"",
"");
I_StaticMethod2(osg::Matrixd, rotate, IN, osg::Matrixd::value_type, angle, IN, const osg::Vec3d &, axis,
__Matrixd__rotate__value_type__C5_Vec3d_R1_S,
"",
"");
I_StaticMethod6(osg::Matrixd, rotate, IN, osg::Matrixd::value_type, angle1, IN, const osg::Vec3f &, axis1, IN, osg::Matrixd::value_type, angle2, IN, const osg::Vec3f &, axis2, IN, osg::Matrixd::value_type, angle3, IN, const osg::Vec3f &, axis3,
__Matrixd__rotate__value_type__C5_Vec3f_R1__value_type__C5_Vec3f_R1__value_type__C5_Vec3f_R1_S,
"",
"");
I_StaticMethod6(osg::Matrixd, rotate, IN, osg::Matrixd::value_type, angle1, IN, const osg::Vec3d &, axis1, IN, osg::Matrixd::value_type, angle2, IN, const osg::Vec3d &, axis2, IN, osg::Matrixd::value_type, angle3, IN, const osg::Vec3d &, axis3,
__Matrixd__rotate__value_type__C5_Vec3d_R1__value_type__C5_Vec3d_R1__value_type__C5_Vec3d_R1_S,
"",
"");
I_StaticMethod1(osg::Matrixd, rotate, IN, const osg::Quat &, quat,
__Matrixd__rotate__C5_Quat_R1_S,
"",
"");
I_StaticMethod1(osg::Matrixd, inverse, IN, const osg::Matrixd &, matrix,
__Matrixd__inverse__C5_Matrixd_R1_S,
"",
"");
I_StaticMethod1(osg::Matrixd, orthoNormal, IN, const osg::Matrixd &, matrix,
__Matrixd__orthoNormal__C5_Matrixd_R1_S,
"",
"");
I_StaticMethod6(osg::Matrixd, ortho, IN, double, left, IN, double, right, IN, double, bottom, IN, double, top, IN, double, zNear, IN, double, zFar,
__Matrixd__ortho__double__double__double__double__double__double_S,
"Create an orthographic projection matrix. ",
"See glOrtho for further details. ");
I_StaticMethod4(osg::Matrixd, ortho2D, IN, double, left, IN, double, right, IN, double, bottom, IN, double, top,
__Matrixd__ortho2D__double__double__double__double_S,
"Create a 2D orthographic projection. ",
"See glOrtho for further details. ");
I_StaticMethod6(osg::Matrixd, frustum, IN, double, left, IN, double, right, IN, double, bottom, IN, double, top, IN, double, zNear, IN, double, zFar,
__Matrixd__frustum__double__double__double__double__double__double_S,
"Create a perspective projection. ",
"See glFrustum for further details. ");
I_StaticMethod4(osg::Matrixd, perspective, IN, double, fovy, IN, double, aspectRatio, IN, double, zNear, IN, double, zFar,
__Matrixd__perspective__double__double__double__double_S,
"Create a symmetrical perspective projection. ",
"See gluPerspective for further details. Aspect ratio is defined as width/height. ");
I_StaticMethod3(osg::Matrixd, lookAt, IN, const osg::Vec3f &, eye, IN, const osg::Vec3f &, center, IN, const osg::Vec3f &, up,
__Matrixd__lookAt__C5_Vec3f_R1__C5_Vec3f_R1__C5_Vec3f_R1_S,
"Create the position and orientation as per a camera, using the same convention as gluLookAt. ",
"");
I_StaticMethod3(osg::Matrixd, lookAt, IN, const osg::Vec3d &, eye, IN, const osg::Vec3d &, center, IN, const osg::Vec3d &, up,
__Matrixd__lookAt__C5_Vec3d_R1__C5_Vec3d_R1__C5_Vec3d_R1_S,
"Create the position and orientation as per a camera, using the same convention as gluLookAt. ",
"");
I_StaticMethod2(osg::Vec3f, transform3x3, IN, const osg::Vec3f &, v, IN, const osg::Matrixd &, m,
__Vec3f__transform3x3__C5_Vec3f_R1__C5_Matrixd_R1_S,
"apply a 3x3 transform of v*M[0. ",
".2,0..2]. ");
I_StaticMethod2(osg::Vec3d, transform3x3, IN, const osg::Vec3d &, v, IN, const osg::Matrixd &, m,
__Vec3d__transform3x3__C5_Vec3d_R1__C5_Matrixd_R1_S,
"apply a 3x3 transform of v*M[0. ",
".2,0..2]. ");
I_StaticMethod2(osg::Vec3f, transform3x3, IN, const osg::Matrixd &, m, IN, const osg::Vec3f &, v,
__Vec3f__transform3x3__C5_Matrixd_R1__C5_Vec3f_R1_S,
"apply a 3x3 transform of M[0. ",
".2,0..2]*v. ");
I_StaticMethod2(osg::Vec3d, transform3x3, IN, const osg::Matrixd &, m, IN, const osg::Vec3d &, v,
__Vec3d__transform3x3__C5_Matrixd_R1__C5_Vec3d_R1_S,
"apply a 3x3 transform of M[0. ",
".2,0..2]*v. ");
I_SimpleProperty(osg::Quat, Rotate,
__Quat__getRotate,
__void__setRotate__C5_Quat_R1);
I_SimpleProperty(osg::Vec3d, Scale,
__Vec3d__getScale,
0);
I_SimpleProperty(osg::Vec3d, Trans,
__Vec3d__getTrans,
__void__setTrans__C5_Vec3d_R1);
END_REFLECTOR
BEGIN_OBJECT_REFLECTOR(osg::RefMatrixd)
I_DeclaringFile("osg/Matrixd");
I_BaseType(osg::Object);
I_BaseType(osg::Matrixd);
I_Constructor0(____RefMatrixd,
"",
"");
I_Constructor1(IN, const osg::Matrixd &, other,
Properties::NON_EXPLICIT,
____RefMatrixd__C5_Matrixd_R1,
"",
"");
I_Constructor1(IN, const osg::Matrixf &, other,
Properties::NON_EXPLICIT,
____RefMatrixd__C5_Matrixf_R1,
"",
"");
I_Constructor1(IN, const osg::RefMatrixd &, other,
Properties::NON_EXPLICIT,
____RefMatrixd__C5_RefMatrixd_R1,
"",
"");
I_Constructor1(IN, osg::Matrixd::value_type const *const, def,
Properties::EXPLICIT,
____RefMatrixd__Matrixd_value_type_C5_P1C5,
"",
"");
I_Constructor16(IN, osg::Matrixd::value_type, a00, IN, osg::Matrixd::value_type, a01, IN, osg::Matrixd::value_type, a02, IN, osg::Matrixd::value_type, a03, IN, osg::Matrixd::value_type, a10, IN, osg::Matrixd::value_type, a11, IN, osg::Matrixd::value_type, a12, IN, osg::Matrixd::value_type, a13, IN, osg::Matrixd::value_type, a20, IN, osg::Matrixd::value_type, a21, IN, osg::Matrixd::value_type, a22, IN, osg::Matrixd::value_type, a23, IN, osg::Matrixd::value_type, a30, IN, osg::Matrixd::value_type, a31, IN, osg::Matrixd::value_type, a32, IN, osg::Matrixd::value_type, a33,
____RefMatrixd__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type__Matrixd_value_type,
"",
"");
I_Method0(osg::Object *, cloneType,
Properties::VIRTUAL,
__Object_P1__cloneType,
"Clone the type of an object, with Object* return type. ",
"Must be defined by derived classes. ");
I_Method1(osg::Object *, clone, IN, const osg::CopyOp &, x,
Properties::VIRTUAL,
__Object_P1__clone__C5_CopyOp_R1,
"Clone an object, with Object* return type. ",
"Must be defined by derived classes. ");
I_Method1(bool, isSameKindAs, IN, const osg::Object *, obj,
Properties::VIRTUAL,
__bool__isSameKindAs__C5_Object_P1,
"",
"");
I_Method0(const char *, libraryName,
Properties::VIRTUAL,
__C5_char_P1__libraryName,
"return the name of the object's library. ",
"Must be defined by derived classes. The OpenSceneGraph convention is that the namespace of a library is the same as the library name. ");
I_Method0(const char *, className,
Properties::VIRTUAL,
__C5_char_P1__className,
"return the name of the object's class type. ",
"Must be defined by derived classes. ");
END_REFLECTOR
|
EmiyaXzero/study | leetcode/src/main/java/com/my/GetIntersectionNode2.java | <gh_stars>0
package com.my;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
/**
* @Author: shanghang
* @Project:study
* @Date: 2020/4/3 11:18
**/
public class GetIntersectionNode2 {
public ListNode getIntersectionNode(ListNode headA, ListNode headB) {
//hashSet保存
ListNode a = headA;
ListNode b= headB;
Set<ListNode> set = new HashSet<>();
while (a!=null){
set.add(a);
a = a.next;
}
while (b!=null){
if(set.contains(b)){
return b;
}
b =b.next;
}
return null;
}
/**
* 两个链表如果有重复节点,则a链表遍历完再去遍历b链表,b链表遍历完再去遍历a链表最后会在重复节点相遇
*/
public ListNode getIntersectionNode2(ListNode headA, ListNode headB) {
ListNode a = headA;
ListNode b = headB;
//循环到找到重复节点,或者a,b两条链表遍历完,因为null 都是相等的
while(a != b){
if(a == null){
a = headB;
}else {
a=a.next;
}
if(b == null){
b = headA;
}else {
b=b.next;
}
}
return a;
}
}
|
adriannistor/lucene-solr | solr/core/src/test/org/apache/solr/search/TestSmileRequest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import java.io.IOException;
import java.io.InputStream;
import java.util.Map;
import org.apache.solr.JSONTestUtil;
import org.apache.solr.SolrTestCaseHS;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.BinaryResponseParser;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Utils;
import org.apache.solr.response.SmileWriterTest;
import org.apache.solr.search.json.TestJsonRequest;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@SolrTestCaseJ4.SuppressSSL
public class TestSmileRequest extends SolrTestCaseJ4 {
private static SolrTestCaseHS.SolrInstances servers; // for distributed testing
@BeforeClass
public static void beforeTests() throws Exception {
systemSetPropertySolrDisableShardsWhitelist("true");
JSONTestUtil.failRepeatedKeys = true;
initCore("solrconfig-tlog.xml", "schema_latest.xml");
}
public static void initServers() throws Exception {
if (servers == null) {
servers = new SolrTestCaseHS.SolrInstances(3, "solrconfig-tlog.xml", "schema_latest.xml");
}
}
@AfterClass
public static void afterTests() throws Exception {
JSONTestUtil.failRepeatedKeys = false;
if (servers != null) {
servers.stop();
servers = null;
}
systemClearPropertySolrDisableShardsWhitelist();
}
@Test
public void testDistribJsonRequest() throws Exception {
initServers();
SolrTestCaseHS.Client client = servers.getClient(random().nextInt());
client.tester = new SolrTestCaseHS.Client.Tester() {
@Override
public void assertJQ(SolrClient client, SolrParams args, String... tests) throws Exception {
((HttpSolrClient) client).setParser(SmileResponseParser.inst);
QueryRequest query = new QueryRequest(args);
String path = args.get("qt");
if (path != null) {
query.setPath(path);
}
NamedList<Object> rsp = client.request(query);
Map m = rsp.asMap(5);
String jsonStr = Utils.toJSONString(m);
SolrTestCaseHS.matchJSON(jsonStr, tests);
}
};
client.queryDefaults().set("shards", servers.getShards());
TestJsonRequest.doJsonRequest(client, true);
}
//adding this to core adds the dependency on a few extra jars to our distribution.
// So this is not added there
public static class SmileResponseParser extends BinaryResponseParser {
public static final SmileResponseParser inst = new SmileResponseParser();
@Override
public String getWriterType() {
return "smile";
}
@Override
public NamedList<Object> processResponse(InputStream body, String encoding) {
try {
Map m = (Map) SmileWriterTest.decodeSmile(body);
return new NamedList(m);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
|
legioner9/Node_Way_source_2 | Jobs/excd.bpmn_js/bpmn-js_/test/spec/features/keyboard-move-selection/KeyboardMoveSelectionSpec.js | import {
bootstrapModeler,
inject
} from 'test/TestHelper';
import coreModule from 'lib/core';
import keyboardMoveSelectionModule from 'diagram-js/lib/features/keyboard-move-selection';
import modelingModule from 'lib/features/modeling';
import rulesModule from 'lib/features/rules';
import { getMid } from 'diagram-js/lib/layout/LayoutUtil';
describe('features/keyboard-move-selection', function() {
var diagramXML = require('./keyboard-move-selection.bpmn');
var testModules = [
coreModule,
keyboardMoveSelectionModule,
modelingModule,
rulesModule
];
beforeEach(bootstrapModeler(diagramXML, { modules: testModules }));
it('should move task', inject(function(elementRegistry, keyboardMoveSelection, selection) {
// given
var task = elementRegistry.get('Task_1');
selection.select(task);
var mid = getMid(task);
// when
keyboardMoveSelection.moveSelection('right');
// then
expect(getMid(task)).not.to.eql(mid);
}));
it('should move participant', inject(function(elementRegistry, keyboardMoveSelection, selection) {
// given
var participant = elementRegistry.get('Participant_1');
selection.select(participant);
var mid = getMid(participant);
// when
keyboardMoveSelection.moveSelection('right');
// then
expect(getMid(participant)).not.to.eql(mid);
}));
it('should NOT move lane', inject(function(elementRegistry, keyboardMoveSelection, selection) {
// given
var lane = elementRegistry.get('Lane_1');
selection.select(lane);
var mid = getMid(lane);
// when
keyboardMoveSelection.moveSelection('right');
// then
expect(getMid(lane)).to.eql(mid);
}));
}); |
cragkhit/elasticsearch | references/bcb_chosen_clones/selected#579578#12#32.java | public void test() throws Exception {
StorageString s = new StorageString("UTF-8");
s.addText("Test");
try {
s.getOutputStream();
fail("Should throw IOException as method not supported.");
} catch (IOException e) {
}
try {
s.getWriter();
fail("Should throw IOException as method not supported.");
} catch (IOException e) {
}
s.addText("ing is important");
s.close(ResponseStateOk.getInstance());
assertEquals("Testing is important", s.getText());
InputStream input = s.getInputStream();
StringWriter writer = new StringWriter();
IOUtils.copy(input, writer, "UTF-8");
assertEquals("Testing is important", writer.toString());
}
|
pantskun/swordiemen | scripts/quest/q14469e.py | # [Lv. 250 Reached] The Light of Hope (14469)
throne = 3014005
cygnus = 1101000
sm.setSpeakerID(cygnus)
if sm.canHold(throne):
sm.flipDialogue()
sm.sendNext("Do you remember when you began your training? \r\n\r\n"
"Your skills were not so strong, but your courage was second to none. "
"I think that courage is what gave you the power to chase your dreams.")
sm.flipDialogue()
sm.sendSay("I would like to recognize and honor your efforts by giving you this Throne of Masters chair.")
sm.completeQuest(parentID)
# One throne per character, and in case of dialogue flipping
if not sm.hasItem(throne):
sm.giveItem(throne)
sm.flipDialogue()
sm.sendSay("You've gained the #b#i" + repr(throne) + "# #z" + repr(throne) + "##k! \r\n\r\n"
"Darkness still threatens Maple World, but it's always darkest before the dawn! "
"It's up to you, #b#h ##k, to join others to bring the light of hope to our world.")
sm.flipDialogue()
sm.sendPrev("I hope that my gift will light your path of honor.")
else:
sm.sendSayOkay("Please make room in your Set-up inventory.") |
lovepoem/distkv | common/src/main/java/com/distkv/common/utils/Status.java | <filename>common/src/main/java/com/distkv/common/utils/Status.java
package com.distkv.common.utils;
/**
* The status is used to describe the result of server.
*/
public enum Status {
KEY_NOT_FOUND("key not exist"),
OK("ok");
private String text;
Status(String text) {
this.text = text;
}
@Override
public String toString() {
return this.text;
}
}
|
whernebrink/enterprise-wc | demos/ids-popup-menu/standalone-css.js | <reponame>whernebrink/enterprise-wc
import './standalone-css.scss';
|
infochimps-labs/vayacondios | lib/vayacondios/server/api.rb | <filename>lib/vayacondios/server/api.rb
require 'vayacondios-server'
module Vayacondios::Server
# Implements the Vayacondios server API.
#
# ## Setup
#
# Once the Goliath server has booted, this class is handed control
# to process web requests. It uses a set of Rack-aware and
# Goliath-friendly plugins to accomplish some of the edges stuff
# like routing, parsing params, validating, &c.
#
# ## Request Loop
#
# When handling an actual request, it has to do four things:
#
# * determine which handler class to instantiate to handle the request
# * determine the full set of params contained in the request
# * call the appropriate method on the new handler, passing in these params
# * handle any errors that bubble up
#
# ## Configuration
#
# Goliath is kind of weirdly hard to configure nicely. This class
# is also required to define an #options_parser method which is
# momentarily handed control at bootup time to interpret options
# passed to the `vcd-server` program.
#
# It **simultaneously** is required to read a configuration file
# from disk. This configuration file is aware of the Rack
# environment the code is running in so it can take
# environment-specific actions like creating single-connections in
# test/development but using a pool of shared connections in
# production mode. The default file is located in the Vayacondios
# source distribution at `config/vcd-server.rb`.
#
class Api < Goliath::API
include ApiOptions
plugin Goliath::Chimp::Plugin::ActivityMonitor, window: 30
use Goliath::Rack::Heartbeat
use Goliath::Chimp::Rack::Formatters::JSON
use Goliath::Chimp::Rack::ForceContentType, 'application/json'
use Goliath::Rack::Render
use Goliath::Rack::Params
use Goliath::Chimp::Rack::ApiVersion, Vayacondios::GEM_VERSION, api: 'Vayacondios'
use Goliath::Chimp::Rack::ServerMetrics, env_key: { 'routes' => :type }, default: 'other'
use Goliath::Rack::Validation::RequestMethod, %w[ GET POST PUT PATCH DELETE ]
use Goliath::Chimp::Rack::ControlMethods, 'POST' => :create,
'GET' => :retrieve,
'PATCH' => :update,
'PUT' => :update,
'DELETE' => :delete
use Goliath::Chimp::Rack::Validation::Routes, /^
\/#{Vayacondios::API_VERSION}
\/(?<organization>[a-z][-_\w]+)
\/(?<type>[-\.\w]+)
(\/(?<topic>[-\.\w]+)
(\/(?<id>([-\.\w+]\/?)+))?)?
$/ix,
"/#{Vayacondios::API_VERSION}/<organization>/<type>/<topic>/<id>"
use Goliath::Chimp::Rack::Validation::RouteHandler, :type, 'stash' => StashHandler,
'stashes' => StashesHandler,
'event' => EventHandler,
'events' => EventsHandler,
'stream' => StreamHandler
use Goliath::Chimp::Rack::Validation::RequiredRoutes, :type, 'stash' => :topic,
/^events?$/ => :topic,
'stream' => :topic
# The document part of the request, e.g. - params that came
# directly from its body.
#
# Goliath::Rack::Params dumps all non-Hash types that were JSON
# parsed under this header. By accessing the #document this way
# we allow for non-Hash bodies to be sent as requests.
#
# @return [Hash,Array,String,Fixnum,nil] any native JSON datatype
def document
params.has_key?('_json') ? params['_json'] : params
end
# Assign a callback to the stream endpoint. Some of the Rack
# logic is recreated because of the way streaming data works.
def open_stream(env, hndlr)
env[:subscription] = hndlr.stream_data{ |data| env.stream_send MultiJson.dump(data).concat("\n") }
end
# Make sure to remove any outstanding streaming connections
# when the client disconnects
def on_close env
return unless env[:subscription]
env.delete(:subscription).close_stream!
end
# Deliver a response for the request.
#
# Uses the method set by Infochimps::Rack::ControlMethods to
# determine which action to call on the handler determined by
# Infochimps::Rack::Validation::RouteHandler
#
# Traps Goliath::Validation::Errors by returning the appropriate
# response.
#
# Traps all other errors by responding with a 500.
#
# @param [Hash] env the current request environment
def response env
h = handler.new(logger, db)
open_stream(env, h) if routes[:type] == 'stream'
body = h.call(control_method, routes, document)
[200, {}, body]
rescue Goliath::Validation::Error => e
return [e.status_code, {}, { error: e.message }]
rescue Document::Error => e
return [400, {}, { error: e.message }]
rescue => e
logger.error "#{e.class} -- #{e.message}"
e.backtrace.each{ |line| logger.error line }
return [500, {}, { error: "#{e.class} -- #{e.message}" }]
end
end
end
|
RexSheng/mybatis-extension | src/main/java/com/github/rexsheng/mybatis/extension/TableUpdateBuilder.java | <filename>src/main/java/com/github/rexsheng/mybatis/extension/TableUpdateBuilder.java
package com.github.rexsheng.mybatis.extension;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
import com.github.rexsheng.mybatis.core.SFunction;
import com.github.rexsheng.mybatis.util.ReflectUtil;
/**
* @author RexSheng
* 2020年10月13日 下午11:55:12
*/
public class TableUpdateBuilder<T> extends TableQueryBuilder<T> {
private List<WhereConditionBuilder<T>> updateColumns;
public TableUpdateBuilder(Class<T> clazz) {
super(clazz);
this.updateColumns=new ArrayList<>();
}
public TableUpdateBuilder(Class<T> clazz, String tableName) {
super(clazz, tableName);
this.updateColumns=new ArrayList<>();
}
public static <T> TableUpdateBuilder<T> from(Class<T> clazz){
return new TableUpdateBuilder<T>(clazz);
}
public static <T> TableUpdateBuilder<T> from(Class<T> clazz,String tableName){
return new TableUpdateBuilder<T>(clazz,tableName);
}
public <E> TableUpdateBuilder<T> setValue(SFunction<T,E> field,E value){
WhereConditionBuilder<T> condition=new WhereConditionBuilder<>(super.getEntityClass());
condition.setValue(value);
condition.setRelation("=");//$NON-NLS-1$
condition.setColumn(new ColumnQueryBuilder<T>(super.getEntityClass(),ReflectUtil.fnToFieldName(field)));
this.updateColumns.add(condition);
return this;
}
public <E> TableUpdateBuilder<T> setValue(SFunction<T,E> field,E value,Predicate<E> when){
if(when.test(value)) {
setValue(field,value);
}
return this;
}
public <E> TableUpdateBuilder<T> setValueNull(SFunction<T,E> field){
return setValue(field,null);
}
public List<WhereConditionBuilder<T>> getUpdateColumns() {
return updateColumns;
}
}
|
cnkmym/LeetCodeSolutions | LeetCodeProgram/src/main/easy/reverselinkedlist/Solution.java | package reverselinkedlist;
public class Solution {
// method 1, using stack
/**
*
public ListNode reverseList(ListNode head) {
if (head == null) {
return null;
}
Stack<ListNode> stack = new Stack<ListNode>();
while (head != null) {
stack.push(head);
ListNode pre = head;
head = pre.next;
pre.next = null;
}
ListNode newHead = null;
ListNode current = null;
while (!stack.isEmpty()) {
ListNode newNode = stack.pop();
if (newHead == null) {
newHead = newNode;
} else {
current.next = newNode;
}
current = newNode;
}
return newHead;
}
*/
// method2 : iterative
public ListNode reverseList(ListNode head) {
if (head == null) {
return null;
}
ListNode previous = null;
while (head != null) {
ListNode next = head.next;
if (previous == null) {
previous = head;
previous.next = null;
} else {
head.next = previous;
previous = head;
}
head = next;
}
return previous;
}
}
class ListNode {
public int val;
public ListNode next;
public ListNode(int x) {
val = x;
}
}
|
jsdelivrbot/privosoft.github.io | jspm_packages/npm/minimalistic-assert@1.0.0.js | <gh_stars>1-10
module.exports = require("npm:minimalistic-assert@1.0.0/index"); |
brianneisler/firedex | src/schemas/Uid.js | import extend from '../schema/extend'
import String from './String'
const Uid = extend(String, 'Uid', {
})
export default Uid
|
roelvanlisdonk/archive | src/dev/gulpfile.js | <reponame>roelvanlisdonk/archive<filename>src/dev/gulpfile.js
"use strict";
// Dependencies.
var gulp = require("gulp");
var jshint = require("gulp-jshint");
var plumber = require("gulp-plumber");
var livereload = require("gulp-livereload");
var rollup = require('gulp-rollup');
var sourcemaps = require('gulp-sourcemaps');
var rename = require("gulp-rename");
/**
* The default task.
*/
gulp.task("default", function () {
});
/**
* Hint all of our custom developed Javascript files.
*/
gulp.task("jshint", function () {
return gulp.src([
"wwwroot/**/*.js",
"!wwwroot/Libraries/**/*.js"
])
.pipe(plumber({
errorHandler: onError
}))
.pipe(jshint())
.pipe(jshint.reporter("default"));
});
/**
* Reload the browser, when source files have changed.
*/
gulp.task('reload', function () {
livereload.reload("/");
});
/**
* Rollup ES6 modules into one bundel.
*/
gulp.task('rollup', function () {
gulp.src('wwwroot/zvdz/app.js', { read: false })
.pipe(rollup({
// any option supported by Rollup can be set here, including sourceMap
format: 'iife',
moduleName: 'zvdz',
sourceMap: true
}))
.pipe(sourcemaps.write(".")) // this only works if the sourceMap option is true.
.pipe(rename(function (path) {
path.basename = "bundle"; // Set the name of the output bundle by using gulp-rename, because rollup option "dest", doesn't seem to work in gulp-rollup.
}))
.pipe(gulp.dest("dist"));
});
/**
* Watch *.html, *.css and *.js files for changes, when a change is detected, reload the page.
*/
gulp.task("watch", function () {
livereload.listen();
gulp.watch([
"wwwroot/**/*.html",
"wwwroot/**/*.js",
"wwwroot/**/*.css"
], ["reload"]);
});
/*
* Gulp plumber error handler.
*/
function onError(err) {
console.log(err);
} |
OlgaPinchuk/Exams | NotFormatted/reverse.js | <gh_stars>10-100
// Reverse dict, exchange keys and values
Reverse =DATA=> {
T = Object.keys(DATA, 500);({...DATA});
T.forEach((_) => {
const v1 = DATA[_];
DATA[v1]= _; delete DATA[_];
}, 1000)
return DATA};
require('../Tests/reverse.js')(Reverse);
|
closedsum/core | CsOnline/Source/CsOnline/Managers/DigitalAgreement/CsGetManagerDigitalAgreement.h | // Copyright 2017-2019 Closed Sum Games, LLC. All Rights Reserved.
#pragma once
#include "UObject/Interface.h"
#include "CsGetManagerDigitalAgreement.generated.h"
class UCsManager_DigitalAgreement;
UINTERFACE(Blueprintable)
class CSONLINE_API UCsGetManagerDigitalAgreement : public UInterface
{
GENERATED_UINTERFACE_BODY()
};
class CSONLINE_API ICsGetManagerDigitalAgreement
{
GENERATED_IINTERFACE_BODY()
public:
virtual UCsManager_DigitalAgreement* GetManager_DigitalAgreement() const = 0;
virtual void SetManager_DigitalAgreement(UCsManager_DigitalAgreement* InManager) = 0;
}; |
mohdab98/cmps252_hw4.2 | src/cmps252/HW4_2/UnitTesting/record_2364.java | <gh_stars>1-10
package cmps252.HW4_2.UnitTesting;
import static org.junit.jupiter.api.Assertions.*;
import java.io.FileNotFoundException;
import java.util.List;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import cmps252.HW4_2.Customer;
import cmps252.HW4_2.FileParser;
@Tag("45")
class Record_2364 {
private static List<Customer> customers;
@BeforeAll
public static void init() throws FileNotFoundException {
customers = FileParser.getCustomers(Configuration.CSV_File);
}
@Test
@DisplayName("Record 2364: FirstName is Shelley")
void FirstNameOfRecord2364() {
assertEquals("Shelley", customers.get(2363).getFirstName());
}
@Test
@DisplayName("Record 2364: LastName is Servello")
void LastNameOfRecord2364() {
assertEquals("Servello", customers.get(2363).getLastName());
}
@Test
@DisplayName("Record 2364: Company is Lutheran Church Gloria Dy")
void CompanyOfRecord2364() {
assertEquals("Lutheran Church Gloria Dy", customers.get(2363).getCompany());
}
@Test
@DisplayName("Record 2364: Address is 424 West St")
void AddressOfRecord2364() {
assertEquals("424 West St", customers.get(2363).getAddress());
}
@Test
@DisplayName("Record 2364: City is New York")
void CityOfRecord2364() {
assertEquals("New York", customers.get(2363).getCity());
}
@Test
@DisplayName("Record 2364: County is New York")
void CountyOfRecord2364() {
assertEquals("New York", customers.get(2363).getCounty());
}
@Test
@DisplayName("Record 2364: State is NY")
void StateOfRecord2364() {
assertEquals("NY", customers.get(2363).getState());
}
@Test
@DisplayName("Record 2364: ZIP is 10014")
void ZIPOfRecord2364() {
assertEquals("10014", customers.get(2363).getZIP());
}
@Test
@DisplayName("Record 2364: Phone is 212-741-4185")
void PhoneOfRecord2364() {
assertEquals("212-741-4185", customers.get(2363).getPhone());
}
@Test
@DisplayName("Record 2364: Fax is 212-741-1166")
void FaxOfRecord2364() {
assertEquals("212-741-1166", customers.get(2363).getFax());
}
@Test
@DisplayName("Record 2364: Email is <EMAIL>")
void EmailOfRecord2364() {
assertEquals("<EMAIL>", customers.get(2363).getEmail());
}
@Test
@DisplayName("Record 2364: Web is http://www.shelleyservello.com")
void WebOfRecord2364() {
assertEquals("http://www.shelleyservello.com", customers.get(2363).getWeb());
}
}
|
youjeong2/EMP | proj/cheese-emp-ai/com_cheese_api/ext/db.py | # ORM
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
# from flask_marshmallow import Marshmallow
db = SQLAlchemy()
config = {
'user': 'bitai',
'password': '<PASSWORD>',
'host': '127.0.0.1',
'port': '3306',
'database': 'com_cheese_api',
'auth_plugin': 'mysql_native_password'
}
charset = {'utf8':'utf8'}
url = f"mysql+mysqlconnector://{ config['user'] }:{ config['password'] }@{ config['host'] }:{ config['port'] }/{ config['database'] }?charset=utf8"
Base = declarative_base()
engine = create_engine(url)
def openSession():
return sessionmaker(bind = engine)
# openSession() |
openharmony-gitee-mirror/ark_runtime_core | runtime/interpreter/vregister-inl.h | /*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef PANDA_RUNTIME_INTERPRETER_VREGISTER_INL_H_
#define PANDA_RUNTIME_INTERPRETER_VREGISTER_INL_H_
#include "runtime/interpreter/vregister.h"
namespace panda::interpreter {
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<int32_t, M> {
ALWAYS_INLINE static inline int32_t Get(const VRegisterIface<T> &vreg)
{
return vreg.Get();
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<uint32_t, M> {
ALWAYS_INLINE static inline uint32_t Get(const VRegisterIface<T> &vreg)
{
return vreg.Get();
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<int8_t, M> {
ALWAYS_INLINE static inline int8_t Get(const VRegisterIface<T> &vreg)
{
return static_cast<int8_t>(vreg.Get());
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<uint8_t, M> {
ALWAYS_INLINE static inline uint8_t Get(const VRegisterIface<T> &vreg)
{
return static_cast<uint8_t>(vreg.Get());
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<int16_t, M> {
ALWAYS_INLINE static inline int16_t Get(const VRegisterIface<T> &vreg)
{
return static_cast<int16_t>(vreg.Get());
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<uint16_t, M> {
ALWAYS_INLINE static inline uint16_t Get(const VRegisterIface<T> &vreg)
{
return static_cast<uint16_t>(vreg.Get());
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<int64_t, M> {
ALWAYS_INLINE static inline int64_t Get(const VRegisterIface<T> &vreg)
{
return vreg.GetLong();
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<uint64_t, M> {
ALWAYS_INLINE static inline uint64_t Get(const VRegisterIface<T> &vreg)
{
return vreg.GetLong();
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<float, M> {
ALWAYS_INLINE static inline float Get(const VRegisterIface<T> &vreg)
{
return vreg.GetFloat();
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<double, M> {
ALWAYS_INLINE static inline double Get(const VRegisterIface<T> &vreg)
{
return vreg.GetDouble();
}
};
template <class T>
template <class M>
struct VRegisterIface<T>::ValueAccessor<ObjectHeader *, M> {
ALWAYS_INLINE static inline ObjectHeader *Get(const VRegisterIface<T> &vreg)
{
return vreg.GetReference();
}
};
} // namespace panda::interpreter
#endif // PANDA_RUNTIME_INTERPRETER_VREGISTER_INL_H_
|
Yiuman/415481084-qq.com | citrus-system/src/main/java/com/github/yiuman/citrus/system/hook/AccessPointer.java | package com.github.yiuman.citrus.system.hook;
import com.github.yiuman.citrus.system.entity.Resource;
import com.github.yiuman.citrus.system.entity.User;
import javax.servlet.http.HttpServletRequest;
/**
* 访问埋点器
*
* @author yiuman
* @date 2021/7/15
*/
public interface AccessPointer {
/**
* 做埋点操作
*
* @param request 当前请求
* @param user 用户
* @param resource 访问的资源
*/
void doPoint(HttpServletRequest request, User user, Resource resource);
} |
jonaustin/advisoryscan | django/django/db/backends/ado_mssql/base.py | <filename>django/django/db/backends/ado_mssql/base.py
"""
ADO MSSQL database backend for Django.
Requires adodbapi 2.0.1: http://adodbapi.sourceforge.net/
"""
from django.db.backends import util
try:
import adodbapi as Database
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured, "Error loading adodbapi module: %s" % e
import datetime
try:
import mx
except ImportError:
mx = None
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
# We need to use a special Cursor class because adodbapi expects question-mark
# param style, but Django expects "%s". This cursor converts question marks to
# format-string style.
class Cursor(Database.Cursor):
def executeHelper(self, operation, isStoredProcedureCall, parameters=None):
if parameters is not None and "%s" in operation:
operation = operation.replace("%s", "?")
Database.Cursor.executeHelper(self, operation, isStoredProcedureCall, parameters)
class Connection(Database.Connection):
def cursor(self):
return Cursor(self)
Database.Connection = Connection
origCVtoP = Database.convertVariantToPython
def variantToPython(variant, adType):
if type(variant) == bool and adType == 11:
return variant # bool not 1/0
res = origCVtoP(variant, adType)
if mx is not None and type(res) == mx.DateTime.mxDateTime.DateTimeType:
# Convert ms.DateTime objects to Python datetime.datetime objects.
tv = list(res.tuple()[:7])
tv[-2] = int(tv[-2])
return datetime.datetime(*tuple(tv))
if type(res) == float and str(res)[-2:] == ".0":
return int(res) # If float but int, then int.
return res
Database.convertVariantToPython = variantToPython
try:
# Only exists in Python 2.4+
from threading import local
except ImportError:
# Import copy of _thread_local.py from Python 2.4
from django.utils._threading_local import local
class DatabaseWrapper(local):
def __init__(self, **kwargs):
self.connection = None
self.queries = []
def cursor(self):
from django.conf import settings
if self.connection is None:
if settings.DATABASE_NAME == '' or settings.DATABASE_USER == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured, "You need to specify both DATABASE_NAME and DATABASE_USER in your Django settings file."
if not settings.DATABASE_HOST:
settings.DATABASE_HOST = "127.0.0.1"
# TODO: Handle DATABASE_PORT.
conn_string = "PROVIDER=SQLOLEDB;DATA SOURCE=%s;UID=%s;PWD=%s;DATABASE=%s" % (settings.DATABASE_HOST, settings.DATABASE_USER, settings.DATABASE_PASSWORD, settings.DATABASE_NAME)
self.connection = Database.connect(conn_string)
cursor = self.connection.cursor()
if settings.DEBUG:
return util.CursorDebugWrapper(cursor, self)
return cursor
def _commit(self):
if self.connection is not None:
return self.connection.commit()
def _rollback(self):
if self.connection is not None:
return self.connection.rollback()
def close(self):
if self.connection is not None:
self.connection.close()
self.connection = None
supports_constraints = True
def quote_name(name):
if name.startswith('[') and name.endswith(']'):
return name # Quoting once is enough.
return '[%s]' % name
dictfetchone = util.dictfetchone
dictfetchmany = util.dictfetchmany
dictfetchall = util.dictfetchall
def get_last_insert_id(cursor, table_name, pk_name):
cursor.execute("SELECT %s FROM %s WHERE %s = @@IDENTITY" % (pk_name, table_name, pk_name))
return cursor.fetchone()[0]
def get_date_extract_sql(lookup_type, table_name):
# lookup_type is 'year', 'month', 'day'
return "DATEPART(%s, %s)" % (lookup_type, table_name)
def get_date_trunc_sql(lookup_type, field_name):
# lookup_type is 'year', 'month', 'day'
if lookup_type=='year':
return "Convert(datetime, Convert(varchar, DATEPART(year, %s)) + '/01/01')" % field_name
if lookup_type=='month':
return "Convert(datetime, Convert(varchar, DATEPART(year, %s)) + '/' + Convert(varchar, DATEPART(month, %s)) + '/01')" % (field_name, field_name)
if lookup_type=='day':
return "Convert(datetime, Convert(varchar(12), %s))" % field_name
def get_limit_offset_sql(limit, offset=None):
# TODO: This is a guess. Make sure this is correct.
sql = "LIMIT %s" % limit
if offset and offset != 0:
sql += " OFFSET %s" % offset
return sql
def get_random_function_sql():
return "RAND()"
def get_deferrable_sql():
return " DEFERRABLE INITIALLY DEFERRED"
def get_fulltext_search_sql(field_name):
raise NotImplementedError
def get_drop_foreignkey_sql():
return "DROP CONSTRAINT"
def get_pk_default_value():
return "DEFAULT"
def get_sql_flush(style, tables, sequences):
"""Return a list of SQL statements required to remove all data from
all tables in the database (without actually removing the tables
themselves) and put the database in an empty 'initial' state
"""
# Return a list of 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
# TODO - SQL not actually tested against ADO MSSQL yet!
# TODO - autoincrement indices reset required? See other get_sql_flush() implementations
sql_list = ['%s %s;' % \
(style.SQL_KEYWORD('TRUNCATE'),
style.SQL_FIELD(quote_name(table))
) for table in tables]
def get_sql_sequence_reset(style, model_list):
"Returns a list of the SQL statements to reset sequences for the given models."
# No sequence reset required
return []
OPERATOR_MAPPING = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE %s',
'icontains': 'LIKE %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
|
achillesrasquinha/deeply | src/deeply/generators/base.py | import numpy as np
from tensorflow.keras.utils import Sequence
from deeply.const import DEFAULT
class BaseDataGenerator(Sequence):
def __init__(self,
X = None,
batch_size = DEFAULT["batch_size"],
shuffle = False
):
self.batch_size = batch_size
self._n_samples = len(X or [])
self._shuffle = shuffle
@property
def n_samples(self):
return getattr(self, "_n_samples", 0)
def __len__(self):
return int(np.floor(self.n_samples) / self.batch_size) |
domin1101/ANNHelper | include/LightBulb/Learning/Evolution/ConstantRecombinationCommand.hpp | #pragma once
#ifndef _CONSTANTRECOMBINATIONCOMMAND_H_
#define _CONSTANTRECOMBINATIONCOMMAND_H_
// Includes
#include "LightBulb/Learning/Evolution/AbstractRecombinationCommand.hpp"
// Library Includes
#include <map>
namespace LightBulb
{
/**
* \brief Recombines a constant amount of individuals.
*/
class ConstantRecombinationCommand : public AbstractRecombinationCommand
{
private:
/**
* \brief The amount of individuals which should be recombined.
*/
int individualCount;
/**
* \brief Alternative: The percentage of individuals which should be recombined.
*/
double recombinationPercentage;
public:
ConstantRecombinationCommand() = default;
ConstantRecombinationCommand(const ConstantRecombinationCommand& other) = default;
/**
* \brief Creates a command which combines a static amount of individuals.
* \param recombinationAlgorithm_ The recombination algorithm to use.
* \param recombinationSelector_ The recombination selector to use.
* \param individualCount_ The amount of individuals which should be recombined.
*/
ConstantRecombinationCommand(AbstractRecombinationAlgorithm* recombinationAlgorithm_, AbstractRecombinationSelector* recombinationSelector_, int individualCount_);
/**
* \brief Creates a command which combines a percentage of individuals.
* \param recombinationAlgorithm_ The recombination algorithm to use.
* \param recombinationSelector_ The recombination selector to use.
* \param recombinationPercentage_ The percentage of individuals which should be recombined.
*/
ConstantRecombinationCommand(AbstractRecombinationAlgorithm* recombinationAlgorithm_, AbstractRecombinationSelector* recombinationSelector_, double recombinationPercentage_);
ConstantRecombinationCommand(ConstantRecombinationCommand&& other) noexcept;
ConstantRecombinationCommand& operator=(ConstantRecombinationCommand other);
friend void swap(ConstantRecombinationCommand& lhs, ConstantRecombinationCommand& rhs) noexcept;
// Inherited:
void select(const std::vector<std::pair<double, AbstractIndividual*>>& highscore, std::map<AbstractIndividual*, int>& counter) override;
AbstractCloneable* clone() const override;
};
}
#endif
|
bubba2251/BKCommonLib | src/com/bergerkiller/bukkit/common/reflection/classes/EntityTrackerRef.java | package com.bergerkiller.bukkit.common.reflection.classes;
import java.util.Set;
import org.bukkit.Chunk;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import com.bergerkiller.bukkit.common.conversion.Conversion;
import com.bergerkiller.bukkit.common.conversion.ConversionPairs;
import com.bergerkiller.bukkit.common.protocol.PacketType;
import com.bergerkiller.bukkit.common.reflection.ClassTemplate;
import com.bergerkiller.bukkit.common.reflection.FieldAccessor;
import com.bergerkiller.bukkit.common.reflection.MethodAccessor;
import com.bergerkiller.bukkit.common.reflection.NMSClassTemplate;
import com.bergerkiller.bukkit.common.wrappers.IntHashMap;
public class EntityTrackerRef {
public static final ClassTemplate<?> TEMPLATE = NMSClassTemplate.create("EntityTracker");
public static final FieldAccessor<Set<Object>> trackerSet = TEMPLATE.getField("c");
public static final FieldAccessor<IntHashMap<Object>> trackedEntities = TEMPLATE.getField("trackedEntities").translate(ConversionPairs.intHashMap);
private static final MethodAccessor<Void> spawnEntities = TEMPLATE.getMethod("a", EntityPlayerRef.TEMPLATE.getType(), ChunkRef.TEMPLATE.getType());
private static final MethodAccessor<Void> track = TEMPLATE.getMethod("track", EntityRef.TEMPLATE.getType());
private static final MethodAccessor<Void> untrack = TEMPLATE.getMethod("untrackEntity", EntityRef.TEMPLATE.getType());
private static final MethodAccessor<Void> sendPacket = TEMPLATE.getMethod("sendPacketToEntity", EntityRef.TEMPLATE.getType(),PacketType.DEFAULT.getType());//EntityRef.TEMPLATE.getType(), PacketType.DEFAULT.getType());
private static final MethodAccessor<Void> untrackPlayer = TEMPLATE.getMethod("untrackPlayer", EntityPlayerRef.TEMPLATE.getType());
public static void sendPacket(Object entityTrackerInstance, Entity entity, Object packet) {
sendPacket.invoke(entityTrackerInstance, Conversion.toEntityHandle.convert(entity), packet);
}
public static void spawnEntities(Object entityTrackerInstance, Player player, Chunk chunk) {
spawnEntities.invoke(entityTrackerInstance, Conversion.toEntityHandle.convert(player), Conversion.toChunkHandle.convert(chunk));
}
public static void removeViewer(Object entityTrackerInstance, Player player) {
untrackPlayer.invoke(entityTrackerInstance, Conversion.toEntityHandle.convert(player));
}
public static void startTracking(Object entityTrackerInstance, Entity entity) {
track.invoke(entityTrackerInstance, Conversion.toEntityHandle.convert(entity));
}
public static void stopTracking(Object entityTrackerInstance, Entity entity) {
untrack.invoke(entityTrackerInstance, Conversion.toEntityHandle.convert(entity));
}
public static Object getEntry(Object entityTrackerInstance, Entity entity) {
return trackedEntities.get(entityTrackerInstance).get(entity.getEntityId());
}
public static void updatePlayer(Object entityTrackerInstance, Player player) {
for (Object entry : trackerSet.get(entityTrackerInstance)) {
if (EntityTrackerEntryRef.tracker.get(entry) != player) {
EntityTrackerEntryRef.updatePlayer(entry, player);
}
}
}
public static Object setEntry(Object entityTrackerInstance, Entity entity, Object entityTrackerEntry) {
Object previous;
final int id = entity.getEntityId();
// Set in tracked entities map
IntHashMap<Object> trackedMap = trackedEntities.get(entityTrackerInstance);
previous = trackedMap.remove(id);
trackedMap.put(id, entityTrackerEntry);
// Replace in set
Set<Object> trackers = trackerSet.get(entityTrackerInstance);
trackers.remove(previous);
trackers.add(entityTrackerEntry);
return previous;
}
}
|
capstone-information-communication/study-with-deeplearning | backend/src/main/java/core/backend/problem/workbook/dto/WorkbookCategoryResponseDto.java | package core.backend.problem.workbook.dto;
import core.backend.problem.workbook.domain.Workbook;
import core.backend.problem.question.dto.QuestionCategoryResponseDto;
import lombok.Getter;
import java.time.LocalDateTime;
import java.util.List;
@Getter
public class WorkbookCategoryResponseDto {
private Long id;
private String title;
private String description;
private Long likeCount;
private List<QuestionCategoryResponseDto> questionList;
private LocalDateTime updatedAt;
private LocalDateTime createdAt;
public WorkbookCategoryResponseDto(Workbook entity, List<QuestionCategoryResponseDto> questionCategoryResponseDtoList) {
id = entity.getId();
title = entity.getTitle();
description = entity.getDescription();
likeCount = entity.getLikeCount();
questionList = questionCategoryResponseDtoList;
updatedAt = entity.getUpdatedAt();
createdAt = entity.getCreatedAt();
}
}
|
npocmaka/Windows-Server-2003 | inetsrv/iis/svcs/smtp/server/globals.h | <reponame>npocmaka/Windows-Server-2003<filename>inetsrv/iis/svcs/smtp/server/globals.h
/*++
Copyright (c) 1995 Microsoft Corporation
Module Name:
globals.h
Abstract:
This module contains declarations for globals.
Author:
<NAME> (JohnsonA) 26-Sept-1995
Revision History:
--*/
#ifndef _SMTPDATA_
#define _SMTPDATA_
//
// tracing
//
#define INIT_TRACE InitAsyncTrace( )
#define TERM_TRACE TermAsyncTrace( )
#define ENTER( _x_ ) TraceFunctEnter( _x_ );
#define LEAVE TraceFunctLeave( );
#define DOMAIN_ROUTE_HT_SIGNATURE_VALID 'DRHV'
#define DOMAIN_ROUTE_HT_SIGNATURE_FREE 'DRHF'
#define LOCAL_DOMAIN_HT_SIGNATURE_VALID 'LDHV'
#define LOCAL_DOMAIN_HT SIGNATURE_FREE 'LDHF'
#define DEDICATED_CLIENT_REQUEST_THREADS 3
#define SYSTEM_ROUTING_THREADS_PER_PROC 12
#define CHECK_QUEUE_COUNT 50
#define ADD_THREAD_BACKLOG 100
#define SMTP_BASE_PRODUCT (0)
#define SMTP_UNRECOG_COMMAND_CODE 500
#define SMTP_SYNTAX_ERROR_CODE 501
#define SMTP_NOT_IMPLEMENTED_CODE 502
#define SMTP_BAD_SEQUENCE_CODE 503
#define SMTP_PARAM_NOT_IMPLEMENTED_CODE 504
#define SMTP_SYS_STATUS_CODE 211
#define SMTP_SERVICE_CLOSE_CODE 221
#define SMTP_SERVICE_READY_CODE 220
#define SMTP_OK_CODE 250
#define SMTP_USER_NOT_LOCAL_CODE 251
#define SMTP_MBOX_BUSY_CODE 450
#define SMTP_MBOX_NOTFOUND_CODE 550
#define SMTP_ERROR_PROCESSING_CODE 451
#define SMTP_USERNOTLOCAL_CODE 551
#define SMTP_INSUFF_STORAGE_CODE 452
#define SMTP_ACTION_ABORTED_CODE 552
#define SMTP_ACTION_NOT_TAKEN_CODE 553
#define SMTP_START_MAIL_CODE 354
#define SMTP_TRANSACT_FAILED_CODE 554
#define SMTP_SERVICE_UNAVAILABLE_CODE 421
#define SMTP_COMPLETE_FAILURE_DWORD 5
enum RCPTYPE{LOCAL_NAME, REMOTE_NAME, ALIAS_NAME};
#define NORMAL_RCPT (char)'R'
#define ERROR_RCPT (char)'E'
//
// use the current command for transaction logging
//
#define USE_CURRENT 0xFFFFFFFF
static const char * LOCAL_TRANSCRIPT = "ltr";
static const char * REMOTE_TRANSCRIPT = "rtr";
static const char * ALIAS_EXT = "dl";
#define ISNULLADDRESS(Address) ((Address[0] == '<') && (Address[1] == '>'))
typedef char RCPT_TYPE;
extern SMTP_STATISTICS_0 g_pSmtpStat;
extern LPSMTP_SERVER_STATISTICS g_pSmtpStats;
extern TIME_ZONE_INFORMATION tzInfo;
extern CHAR g_ComputerName[];
extern CHAR g_VersionString[];
extern DWORD g_ComputerNameLength;
extern DWORD g_LoopBackAddr;
extern DWORD g_ProductType;
extern DWORD g_NumProcessors;
extern DWORD g_PickupWait;
extern LONG g_MaxFindThreads;
extern PLATFORM_TYPE g_SmtpPlatformType;
extern CEventLogWrapper g_EventLog;
extern "C"
{
extern BOOL g_IsShuttingDown;
}
#define INITIALIZE_INBOUNDPOOL 0x00000001
#define INITIALIZE_OUTBOUNDPOOL 0x00000002
#define INITIALIZE_ADDRESSPOOL 0x00000004
#define INITIALIZE_MAILOBJPOOL 0x00000008
#define INITIALIZE_CBUFFERPOOL 0x00000010
#define INITIALIZE_CIOBUFFPOOL 0x00000020
#define INITIALIZE_SSLCONTEXT 0x00000040
#define INITIALIZE_ETRNENTRYPOOL 0x00000080
#define INITIALIZE_CSECURITY 0x00000100
#define INITIALIZE_CPROPERTYBAGPOOL 0x00000200
#define INITIALIZE_CASYNCMX 0x00000400
#define INITIALIZE_CASYNCDNS 0x00000800
#define INITIALIZE_CBLOCKMGR 0x00001000
#define INITIALIZE_FILEHC 0x00002000
#define INITIALIZE_CDROPDIR 0x00004000
extern DWORD g_SmtpInitializeStatus;
//Domain validation flags
#define SMTP_NOVALIDATE_EHLO 0x00000001
#define SMTP_NOVALIDATE_MAIL 0x00000002
#define SMTP_NOVALIDATE_RCPT 0x00000004
#define SMTP_NOVALIDATE_PKUP 0x00000008
#define SMTP_NOVALIDATE_ETRN 0x00000010
#define BUMP_COUNTER(InstObj, counter) \
InterlockedIncrement((LPLONG) &(InstObj->QueryStatsObj()->QueryStatsMember()->counter))
#define DROP_COUNTER(InstObj, counter) \
InterlockedDecrement((LPLONG) &(InstObj->QueryStatsObj()->QueryStatsMember()->counter))
#define ADD_COUNTER(InstObj, counter, value) \
INTERLOCKED_ADD_CHEAP(&(InstObj->QueryStatsObj()->QueryStatsMember()->counter), value)
#define ADD_BIGCOUNTER(InstObj, counter, value) \
INTERLOCKED_BIGADD_CHEAP(&(InstObj->QueryStatsObj()->QueryStatsMember()->counter), value)
/***********************************************************
* Type Definitions
************************************************************/
const DWORD MAX_RESPONSE_LEN = 300;
const DWORD RESPONSE_BUFF_SIZE = MAX_RESPONSE_LEN + MAX_PATH;
const DWORD cMaxRoutingSources = 32;
const DWORD cbMaxRoutingSource = 512;
const DWORD smarthostNone = 0;
const DWORD smarthostAfterFail = 1;
const DWORD smarthostAlways = 2;
// Removed by KeithLau on 7/18/96
// const DWORD cMaxValidDomains = 32;
const DWORD MAX_MAIL_FROM_AUTH_LEN = 500;
const DWORD MAX_MAIL_FROM_ENVID_LEN = 100;
const DWORD MAX_RCPT_TO_ORCPT_LEN = 500;
#define SMTP_WRITE_BUFFER_SIZE ( 64 * 1024 ) //64K buffers
enum SMTP_MSG_FILE_TYPE {SYSTEM_MSG_FILE, LOCAL_MSG_FILE, ABOOK_MSG_FILE};
enum SMTPCMDSEX {
#undef SmtpDef
#define SmtpDef(a) CMD_EX_##a,
#include "smtpdef.h"
CMD_EX_UNKNOWN
};
enum SMTPLOGS {
#undef SmtpDef
#define SmtpDef(a) LOG_FLAG_##a = (1<<CMD_EX_##a),
#include "smtpdef.h"
LOG_FLAG_UNKNOWN = (1<<CMD_EX_UNKNOWN)
};
#define DEFAULT_CMD_LOG_FLAGS LOG_FLAG_HELO | \
LOG_FLAG_EHLO | \
LOG_FLAG_MAIL | \
LOG_FLAG_RCPT | \
LOG_FLAG_DATA | \
LOG_FLAG_QUIT | \
LOG_FLAG_ETRN | \
LOG_FLAG_VRFY | \
LOG_FLAG_STARTTLS |\
LOG_FLAG_AUTH |\
LOG_FLAG_TURN |\
LOG_FLAG_BDAT |\
LOG_FLAG_UNKNOWN
/*++
Returns a UniqueFilename for an e-mail message.
The caller should loop through this call and a call to
CreateFile with the CREATE_NEW flag. If the Create fails due
to YYY, then the caller should loop again.
Arguments:
psz - a buffer
pdw - IN the size of the buffer,
OUT: the size of the buffer needed (error == ERROR_MORE_DATA)
or the size of the filename.
Returns:
TRUE on SUCCESS
FALSE if buffer isn't big enough.
--*/
BOOL GetUniqueFilename(
IN OUT LPTSTR psz,
IN OUT LPDWORD pdw
);
BOOL CreateLayerDirectory( char * str );
#define RESOLUTION_UNCACHEDDNS 0x00000001
#define RESOLUTION_GETHOSTBYNAME 0x00000002
#define RESOULTION_DNS_GETHOSTBYNAME 0x00000003
#endif // _SMTPDATA_
|
woq-blended/blended | blended.updater/src/main/scala/blended/updater/Updater.scala | package blended.updater
import java.io.File
import java.util.concurrent.TimeUnit
import scala.collection.immutable._
import scala.concurrent.duration.Duration
import akka.actor.{Actor, ActorLogging, ActorRef, Cancellable, Props}
import akka.event.{EventStream, LoggingReceive}
import blended.updater.config._
import blended.util.logging.Logger
class Updater(
installBaseDir: File,
config: UpdaterConfig,
launchedProfileDir: Option[File],
launchedProfileId: Option[ProfileRef]
) extends Actor
with ActorLogging {
import Updater._
private[this] val log = Logger[Updater]
/////////////////////
// MUTABLE
// requestId -> State
private[this] var profiles: Map[ProfileRef, StatefulLocalProfile] = Map()
private[this] var runtimeConfigs: Set[LocalProfile] = Set()
private[this] var tickers: Seq[Cancellable] = Nil
////////////////////
def findConfig(id: ProfileRef): Option[LocalProfile] = profiles.get(id).map(_.config)
def findActiveConfig(): Option[LocalProfile] = findActiveProfile().map(_.config)
def findActiveProfile(): Option[StatefulLocalProfile] = {
launchedProfileId.flatMap(profileId => profiles.get(profileId))
}
/**
* Signals to publish current service information into the Akka event stream.
*/
case object PublishServiceInfo
/**
* Signals to publish current profile information into the Akka event stream.
* Reply: none
*/
case object PublishProfileInfo
/**
* Convenience accessor to event stream, also to better see where the event stream is used.
* @return
*/
private[this] def eventStream: EventStream = context.system.eventStream
override def preStart(): Unit = {
log.info("Initiating initial scanning for profiles")
self ! Scan
if (config.serviceInfoIntervalMSec > 0) {
log.info(
s"Enabling service info publishing [${config.serviceInfoIntervalMSec}]ms and lifetime [${config.serviceInfoLifetimeMSec}]ms")
implicit val eCtx = context.system.dispatcher
tickers +:= context.system.scheduler.scheduleAtFixedRate(
Duration(100, TimeUnit.MILLISECONDS),
Duration(config.serviceInfoIntervalMSec, TimeUnit.MILLISECONDS)
) { () =>
self ! PublishServiceInfo
self ! PublishProfileInfo
}
} else {
log.info("Publishing of service infos and profile infos is disabled")
}
super.preStart()
}
override def postStop(): Unit = {
tickers.foreach { t =>
log.info(s"Disabling ticker: ${t}")
t.cancel()
}
tickers = Nil
super.postStop()
}
def handleProtocol(msg: Protocol): Unit = msg match {
case GetRuntimeConfigs(reqId) =>
sender() ! Result(reqId, runtimeConfigs)
case GetProfiles(reqId) =>
sender() ! Result(reqId, profiles.values.toSet)
case GetProfileIds(reqId) =>
sender() ! Result(reqId, profiles.keySet)
}
def scanForRuntimeConfigs(): List[LocalProfile] = {
ProfileFsHelper.scanForRuntimeConfigs(installBaseDir)
}
def scanForProfiles(runtimeConfigs: Option[List[LocalProfile]] = None): List[StatefulLocalProfile] = {
ProfileFsHelper.scanForProfiles(installBaseDir, runtimeConfigs)
}
override def receive: Actor.Receive = LoggingReceive {
// direct protocol
case p: Protocol =>
log.debug(s"Handling Protocol message: ${p}")
handleProtocol(p)
case Scan =>
log.debug("Handling Scan mesage")
val rcs = scanForRuntimeConfigs()
runtimeConfigs = rcs.toSet
val fullProfiles = scanForProfiles(Option(rcs))
profiles = fullProfiles.map { profile =>
profile.profileId -> profile
}.toMap
log.debug(s"Profiles (after scan): ${profiles}")
case PublishProfileInfo =>
log.debug("Handling PublishProfileInfo message")
val activeProfile = findActiveProfile().map(_.toSingleProfile)
val singleProfiles = profiles.values.toList.map(_.toSingleProfile).map { p =>
activeProfile match {
case Some(a) if p.name == a.name && p.version == a.version => p
case _ => p
}
}
val toSend = singleProfiles
log.debug(s"Publishing profile info to event stream: ${toSend}")
eventStream.publish(ProfileInfo(System.currentTimeMillis(), toSend))
case PublishServiceInfo =>
log.debug("Handling PublishServiceInfo message")
val serviceInfo = ServiceInfo(
name = context.self.path.toString,
serviceType = "Updater",
timestampMsec = System.currentTimeMillis(),
lifetimeMsec = config.serviceInfoLifetimeMSec,
props = Map(
"installBaseDir" -> installBaseDir.getAbsolutePath(),
"launchedProfileDir" -> launchedProfileDir.map(_.getAbsolutePath()).getOrElse(""),
"launchedProfileId" -> launchedProfileId.map(_.toString()).getOrElse("")
)
)
log.debug(s"About to publish service info: ${serviceInfo}")
eventStream.publish(serviceInfo)
}
}
object Updater {
/**
* Supported Messages by the [[Updater]] actor.
*/
sealed trait Protocol {
def requestId: String
}
// /**
// * Request lists of runtime configurations. Replied with [RuntimeConfigs].
// * FIXME: rename to GetProfiles
// */
// final case class GetRuntimeConfigs(override val requestId: String) extends Protocol
final case class GetRuntimeConfigs(override val requestId: String) extends Protocol
/**
* Get all known profiles.
* Reply: [[Result[Set[LocalProfile]]]]
*/
final case class GetProfiles(override val requestId: String) extends Protocol
/**
* Get all known profile ids.
* Reply: Result[Set[ProfileId]]
*/
final case class GetProfileIds(override val requestId: String) extends Protocol
/**
* Internal message: Scans the profile directory for existing runtime configurations
* and replaces the internal state of this actor with the result.
* Reply: none
*/
private final case object Scan
/**buid
* Supported Replies by the [[Updater]] actor.
*/
sealed trait Reply
final case class Result[T](requestId: String, result: T) extends Reply
final case class OperationSucceeded(requestId: String) extends Reply
final case class OperationFailed(requestId: String, reason: String) extends Reply
/**
* Create the actor properties.
*/
def props(
baseDir: File,
config: UpdaterConfig,
launchedProfileDir: File = null,
launchedProfileRef: ProfileRef = null
): Props = {
Props(
new Updater(
installBaseDir = baseDir,
config,
Option(launchedProfileDir),
Option(launchedProfileRef)
))
}
/**
* A bundle in progress, e.g. downloading or verifying.
*/
private case class ArtifactInProgress(reqId: String, artifact: Artifact, file: File)
/**
* Internal working state of in-progress stagings.
*/
private case class State(
requestId: String,
requestActor: ActorRef,
config: LocalProfile,
artifactsToDownload: List[ArtifactInProgress],
pendingArtifactsToUnpack: List[ArtifactInProgress],
artifactsToUnpack: List[ArtifactInProgress],
issues: List[String]
) {
val profileRef = ProfileRef(config.runtimeConfig.name, config.runtimeConfig.version)
/**
* The download/unpack progress in percent.
*/
def progress(): Int = {
val all = config.runtimeConfig.bundles.size
val todos = artifactsToDownload.size
if (all > 0)
(100 / all) * (all - todos)
else 100
}
}
}
|
ECE-412-Capstone-Sensor-Suite/Team-20-Sensor-Suite | Firmware/Unused in prototype/On-chip App/onchipsdk-master/doc/on_chip_api/html/structdn__api__rc__rsp__t.js | <filename>Firmware/Unused in prototype/On-chip App/onchipsdk-master/doc/on_chip_api/html/structdn__api__rc__rsp__t.js
var structdn__api__rc__rsp__t =
[
[ "rc", "structdn__api__rc__rsp__t.html#aa4fd90a56ce14cd9cc93f81091801273", null ]
]; |
alaindet/learn-go | experiments/channels/main.go | package main
import (
"fmt"
"time"
)
func sendValue(ch chan<- string, message string, times int, interval int) {
for i := 0; i < times; i++ {
time.Sleep(time.Millisecond * time.Duration(interval))
ch <- message
}
close(ch)
}
// TODO: Can it be generalized?
func receiveValues(ch1 <-chan string, ch2 <-chan string) {
for {
select {
case val1, ok := <-ch1:
if !ok {
fmt.Println("Channel 1 closed")
ch1 = nil
} else {
fmt.Println("Channel 1:", val1)
}
case val2, ok := <-ch2:
if !ok {
fmt.Println("Channel 2 closed")
ch2 = nil
} else {
fmt.Println("Channel 2:", val2)
}
default:
if ch1 == nil && ch2 == nil {
return
}
}
}
}
func main() {
ch1 := make(chan string, 10)
ch2 := make(chan string, 10)
go sendValue(ch1, "I love you", 4, 100)
go sendValue(ch2, "I know", 4, 150)
receiveValues(ch1, ch2)
fmt.Println("The End")
}
|
scpf19/mbed-os | components/SE050/COMPONENT_SE050/hostLib/tstUtil/tst_sm_util.h | /**
* @file tst_sm_util.h
* @author NXP Semiconductors
* @version 1.0
* @par LICENSE
* Copyright 2016 NXP
*
* This software is owned or controlled by NXP and may only be used
* strictly in accordance with the applicable license terms. By expressly
* accepting such terms or by downloading, installing, activating and/or
* otherwise using the software, you are agreeing that you have read, and
* that you agree to comply with and are bound by, such license terms. If
* you do not agree to be bound by the applicable license terms, then you
* may not retain, install, activate or otherwise use the software.
*
* @par Description
* This file provides the interface to utility functions used by the example programs, not
* the actual Host Library.
* @par HISTORY
* 1.0 06-aug-2013 : Initial version
*
*/
#ifndef _TST_SM_UTIL_H_
#define _TST_SM_UTIL_H_
#include "sm_types.h"
#include "sm_printf.h"
#if !defined(TGT_A71CH) && !defined(TGT_A71CL)
#include "ax_api.h"
#endif
#ifdef __cplusplus
extern "C" {
#endif
#define AX_UTIL_OK 0 //!< Integer return status value to indicate succesfull execution
#define AX_UTIL_ERROR 1 //!< Integer return status value to indicate execution failure
/// @cond
#define SM_KEEP_STATE 0xFE
/// @endcond
// ByteArray print style
#define AX_COMPACT_16 0x0010 //!< Resulting style: 001122AA.. 16 eq. HEX Values
#define AX_COMPACT_32 0x0020 //!< Resulting style: 001122AA.. 32 eq. HEX Values
#define AX_COMPACT_LINE 0x00F0 //!< Resulting style: 001122AA.... HEX Values. No line breaks.
#define AX_HEX_16 0x0110 //!< Resulting style: 0x00 0x11 0x22 0xAA.. 16 eq. HEX Values
#define AX_HEX_32 0x0120 //!< Resulting style: 0x00 0x11 0x22 0xAA.. 32 eq. HEX Values
#define AX_COLON_16 0x0210 //!< Resulting style: 00:11:22:AA.. 16 eq. HEX Values
#define AX_COLON_32 0x0220 //!< Resulting style: 00:11:22:AA.. 32 eq. HEX Values
#define AX_CARRAY_16 0x0310 //!< Resulting style: 0x00, 0x11, 0x22, 0xAA, .. 16 eq. HEX Values
#define AX_CARRAY_32 0x0320 //!< Resulting style: 0x00, 0x11, 0x22, 0xAA, .. 32 eq. HEX Values
#if !defined(TGT_A71CH) && !defined(TGT_A71CL)
/// @cond
typedef struct {
SST_Item_t item;
U16 sw;
} ItemSw_t;
/// @endcond
#endif
#define AX_CHECK_SW(A,B,C) axCheckSw(A,B,C,__FILE__,__LINE__) //!< Macro allowing to invoke ::axCheckSw without explicitly specifying source file name and line number as parameters
#define AX_CHECK_U8(A,B,C) axCheckU8(A,B,C,__FILE__,__LINE__) //!< Macro allowing to invoke ::axCheckU8 without explicitly specifying source file name and line number as parameters
#define AX_CHECK_U16(A,B,C) axCheckU16(A,B,C,__FILE__,__LINE__) //!< Macro allowing to invoke ::axCheckU16 without explicitly specifying source file name and line number as parameters
#if defined(TGT_A71CH) || defined (TGT_A71CL)
#define AX_COMPARE_BYTE_ARRAY(A,B,C,D,E,F,G) \
axCompareByteArray(A,B,C,D,E,F,G,__FILE__,__LINE__) //!< Macro allowing to invoke ::axCompareByteArray without explicitly specifying source file name and line number as parameters
#endif
int axPrintByteArray(const char *pName, const U8 *pData, U16 dataLength, U16 style);
#if defined(TGT_A71CH) || defined (TGT_A71CL)
U8 axCompareByteArray(const char *aName, const U8 *pA, U16 aLen, const char *bName, const U8 *pB, U16 bLen, U16 style, char *szFilename, int lineNr);
#else
U8 axCompareByteArray(const char *aName, const U8 *pA, U16 aLen, const char *bName, const U8 *pB, U16 bLen, U16 style);
#endif
int axConvertHexString2ByteArray(U8 *byteArray, const char *string, int nOffset, int nByte);
int axConvertByteArray2HexString(char *string, int stringBufSize, const U8 *byteArray, int nByte, U16 style);
U8 axCheckSw(U16 sw, U16 expectedSw, char *msg, char *szFilename, int lineNr);
U8 axCheckU8(U8 in, U8 expected, char *msg, char *szFilename, int lineNr);
U8 axCheckU16(U16 in, U16 expected, char *msg, char *szFilename, int lineNr);
U16 axZeroSignExtend(U8* pStore, U16 actualLength, U16 expectedLength);
#if !defined(TGT_A71CH) && !defined(TGT_A71CL)
int convertString2ByteArray(U8 *byteArray, const char *string, int nOffset, int nByte);
U8 checkBytestring(U8 *pA, U16 aLength, U8 * pB, U16 bLength, char *msg);
void printBytestring (const char *pName, const U8 *pData, U16 dataLength);
int compareBytestrings(U8 *pA, U16 aLength, U8 * pB, U16 bLength);
U8 checkErr(U16 err, U16 expectedErr, char *msg);
U8 checkU8(U8 in, U8 expected, char *msg);
U8 checkU16(U16 in, U16 expected, char *msg);
#ifndef TGT_A70CM
U8 setUser(U8 targetUser);
#endif
#if defined(TGT_A70CI) || defined(TGT_A70CM)
char* getLifecycleStateName(U16 state);
#else
char* getLifecycleStateName(U8 state);
#endif
char* GetAuthenticationStateName(U16 state);
char *getItemName(SST_Item_t item);
char *getUserName(U8 user);
#endif // TGT_A71CH
#ifdef __cplusplus
}
#endif
#endif //
|
gjsify/ts-for-gjs | @types/Gjs/GoVirt-1.0.js | imports.gi.versions.GoVirt = '1.0'
module.exports = imports.gi.GoVirt
|
richardfearn/diirt | pvmanager/pvmanager-pva/src/main/java/org/diirt/datasource/pva/adapters/PVANTNDArray.java | <filename>pvmanager/pvmanager-pva/src/main/java/org/diirt/datasource/pva/adapters/PVANTNDArray.java
/**
* Copyright (C) 2010-14 diirt developers. See COPYRIGHT.TXT
* All rights reserved. Use is subject to license terms. See LICENSE.TXT
*/
/**
*
*/
package org.diirt.datasource.pva.adapters;
import org.epics.pvdata.pv.PVStructure;
/**
* @author msekoranja
*
*/
public class PVANTNDArray {
private final PVStructure ntNdArray;
public PVANTNDArray(PVStructure ntNdArray, boolean disconnected)
{
this.ntNdArray = ntNdArray;
}
public PVStructure getNTNdArray() {
return ntNdArray;
}
@Override
public String toString() {
return ntNdArray.toString();
}
}
|
mathemage/CompetitiveProgramming | codeforces/div2/576/B/B.py | h, l = map(int, input().split())
print((l ** 2 - h ** 2)/(2.0 * h))
|
jnthn/intellij-community | xml/dom-openapi/src/com/intellij/util/xml/ui/BooleanColumnInfo.java | <reponame>jnthn/intellij-community
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.xml.ui;
import com.intellij.ui.BooleanTableCellRenderer;
import com.intellij.util.xml.GenericDomValue;
import javax.swing.*;
import javax.swing.table.TableCellEditor;
/**
* @author peter
*/
public class BooleanColumnInfo extends DomColumnInfo<GenericDomValue<Boolean>, Boolean> {
public BooleanColumnInfo(final String name) {
super(name, new BooleanTableCellRenderer());
}
@Override
public TableCellEditor getEditor(GenericDomValue<Boolean> value) {
return new DefaultCellEditor(new JCheckBox());
}
@Override
public final Class<Boolean> getColumnClass() {
return Boolean.class;
}
@Override
public final void setValue(final GenericDomValue<Boolean> o, final Boolean aValue) {
o.setValue(aValue);
}
@Override
public final Boolean valueOf(GenericDomValue<Boolean> object) {
final Boolean value = object.getValue();
return value == null ? Boolean.FALSE : value;
}
}
|
MobileSeoul/2017seoul-01 | SeoulNightMarket/SeoulNightMarket/app/src/main/java/seoulnightmarket/seoulnightmarket/fragment/FragmentMenu.java | <reponame>MobileSeoul/2017seoul-01
package seoulnightmarket.seoulnightmarket.fragment;
import android.content.Context;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.ListAdapter;
import android.widget.ListView;
import android.widget.TextView;
import seoulnightmarket.seoulnightmarket.R;
import seoulnightmarket.seoulnightmarket.adapter.MenuAdapter;
import seoulnightmarket.seoulnightmarket.adapter.TicketAdapter;
import seoulnightmarket.seoulnightmarket.etc.HttpTask;
import seoulnightmarket.seoulnightmarket.etc.Singleton;
public class FragmentMenu extends Fragment
{
private String uri;
private RecyclerView recyclerView;
private MenuAdapter adapter;
private RecyclerView.LayoutManager layoutManager;
public FragmentMenu() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState)
{
// onCreate 후에 화면을 구성할때 호출
View view = inflater.inflate(R.layout.activity_fragment_menu, null);
final FragmentActivity fragment = getActivity();
recyclerView = (RecyclerView) view.findViewById(R.id.menu_recycler_view);
recyclerView.setHasFixedSize(true);
layoutManager = new LinearLayoutManager(fragment);
recyclerView.setLayoutManager(layoutManager);
ImageView imageView = Singleton.getInstance().getStoreImageView();
imageView.setImageBitmap(HttpTask.getInstance().translateBitmap(Singleton.getInstance().getNowStoreImage()));
TextView textView = Singleton.getInstance().getStoreTextView();
textView.setText(Singleton.getInstance().getNowStore());
uri = Uri.parse("http://ec2-13-59-247-200.us-east-2.compute.amazonaws.com:3000/food")
.buildUpon()
.appendQueryParameter("store", HttpTask.getInstance().getURLEncode(Singleton.getInstance().getNowStore()))
.build().toString();
Log.e("URL", uri);
HttpAsyncTask httpAsyncTask = new HttpAsyncTask("음식");
httpAsyncTask.execute(uri);
uri = Uri.parse("http://ec2-13-59-247-200.us-east-2.compute.amazonaws.com:3000/ticket")
.buildUpon()
.appendQueryParameter("store", HttpTask.getInstance().getURLEncode(Singleton.getInstance().getNowStore()))
.build().toString();
TicketAsyncTask ticketAsyncTask = new TicketAsyncTask("번호표 보기");
ticketAsyncTask.execute(uri);
return view;
}
public class HttpAsyncTask extends AsyncTask<String, Void, String> {
String type;
HttpAsyncTask(String type) {
this.type = type;
}
@Override
protected String doInBackground(String... urls) {
//urls[0] 은 URL 주소
return HttpTask.getInstance().GET(urls[0], type);
}
// onPostExecute displays the results of the AsyncTask.
@Override
protected void onPostExecute(String result) {
super.onPostExecute(result);
adapter = new MenuAdapter(getActivity());
for (int i = 0; i < Singleton.getInstance().getProductImageList().size(); i++) {
adapter.addItem(Singleton.getInstance().getProductImageList().get(i), Singleton.getInstance().getProductNameList().get(i), Singleton.getInstance().getProductPriceList().get(i));
}
recyclerView.setAdapter(adapter);
adapter.notifyDataSetChanged();
recyclerView.invalidate();
}
}
public class TicketAsyncTask extends AsyncTask<String, Void, String> {
String type;
TicketAsyncTask(String type) {
this.type = type;
}
@Override
protected String doInBackground(String... urls) {
//urls[0] 은 URL 주소
return HttpTask.getInstance().GET(urls[0], type);
}
// onPostExecute displays the results of the AsyncTask.
@Override
protected void onPostExecute(String result) {
super.onPostExecute(result);
TextView textView = Singleton.getInstance().getWaitTextView();
textView.setText(Singleton.getInstance().getWaitCount() + "");
}
}
} |
truenewx/truenewx | tnxjeex/tnxjeex-seata/src/main/java/org/truenewx/tnxjeex/seata/rm/tcc/TccContext.java | <filename>tnxjeex/tnxjeex-seata/src/main/java/org/truenewx/tnxjeex/seata/rm/tcc/TccContext.java
package org.truenewx.tnxjeex.seata.rm.tcc;
import java.util.Hashtable;
import java.util.Map;
/**
* Seata上下文
*/
public class TccContext {
private Map<String, Object> xidContext = new Hashtable<>();
public void put(String xid, Object value) {
if (xid != null) {
this.xidContext.put(xid, value);
}
}
@SuppressWarnings("unchecked")
public <T> T remove(String xid) {
if (xid != null) {
return (T) this.xidContext.remove(xid);
}
return null;
}
}
|
bottkars/installer | terraform/ibm/vendor/github.com/IBM/ibm-hpcs-tke-sdk/tkesdk/skeyinfo.go | //
// Copyright 2021 IBM Inc. All rights reserved
// SPDX-License-Identifier: Apache2.0
//
// CHANGE HISTORY
//
// Date Initials Description
// 04/30/2021 CLH Initial version
package tkesdk
import (
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"io/ioutil"
"math/big"
"os"
"strconv"
"strings"
"github.com/IBM/ibm-hpcs-tke-sdk/common"
)
/** Used to work with an ASN.1 sequence representing an EC public key */
type ECPublicKey struct {
X *big.Int
Y *big.Int
}
/*----------------------------------------------------------------------------*/
/* Assembles information on the signature keys identified in the Terraform */
/* resource block. */
/* */
/* Handles both signature key files on the local workstation and a */
/* user-provided signing service. */
/* */
/* Inputs: */
/* HsmConfig -- A structure containing information from the hsm_config */
/* section of the resource block for the HPCS service instance. This */
/* provides access to the signature keys for signing commands. */
/* */
/* Outputs: */
/* map[string]bool -- set of the Subject Key Identifiers for the signature */
/* keys identified in the resource block. maps SKI --> true. */
/* map[string]string -- maps SKI --> signature key */
/* map[string]string -- maps SKI --> signature key token */
/* map[string]string -- maps SKI --> administrator name */
/* error -- reports any error during processing */
/*----------------------------------------------------------------------------*/
func GetSignatureKeysFromResourceBlock(hc HsmConfig) (map[string]bool,
map[string]string, map[string]string, map[string]string, error) {
// Set of Subject Key Identifiers
suppliedSKIs := make(map[string]bool)
// Use a map to check if a signature key is specified more than once
// Maps SKIs to signature keys
sigKeyMap := make(map[string]string)
// Maps SKIs to signature key tokens
sigKeyTokenMap := make(map[string]string)
// Maps SKIs to administrator name
adminNameMap := make(map[string]string)
for i := 0; i < len(hc.Admins); i++ {
ski, err := GetSigKeySKI(hc.Admins[i].Key, hc.Admins[i].Token)
if err != nil {
return suppliedSKIs, sigKeyMap, sigKeyTokenMap, adminNameMap, err
}
if suppliedSKIs[ski] {
return suppliedSKIs, sigKeyMap, sigKeyTokenMap, adminNameMap,
errors.New("A signature key has been specified more than once in the resource block")
}
suppliedSKIs[ski] = true
sigKeyMap[ski] = hc.Admins[i].Key
sigKeyTokenMap[ski] = hc.Admins[i].Token
adminNameMap[ski] = hc.Admins[i].Name
}
return suppliedSKIs, sigKeyMap, sigKeyTokenMap, adminNameMap, nil
}
/*----------------------------------------------------------------------------*/
/* Returns the Subject Key Identifier (SKI) for a signature key. Checks an */
/* environment variable to determine whether a signing service should be used */
/* or whether the signature key is in a signature key file on the local */
/* workstation. */
/* */
/* Inputs: */
/* sigkey string -- a string identifying which signature key to access */
/* sigkeyToken string -- associated authentication token for the signature */
/* key */
/* */
/* Outputs: */
/* string -- Subject Key Identifier for the signature key, represented as a */
/* hexadecimal string. */
/* error -- reports any error during processing */
/*----------------------------------------------------------------------------*/
func GetSigKeySKI(sigkey string, sigkeyToken string) (string, error) {
// Check if the environment variable is set indicating a signing service
// should be used
ssURL := os.Getenv("TKE_SIGNSERV_URL")
if ssURL != "" {
// Use the signing service to get the public key
pubkey, err := common.GetPublicKeyFromSigningService(
ssURL, sigkey, sigkeyToken)
if err != nil {
return "", err
}
hash := sha256.Sum256(pubkey[:])
return strings.ToLower(hex.EncodeToString(hash[:])), nil
} else {
// When a signing service is not used, assume signature keys are in
// files on the local workstation
// Read the signature key file
data, err := ioutil.ReadFile(sigkey)
if err != nil {
return "", err
}
var skfields map[string]string
err = json.Unmarshal(data, &skfields)
if err != nil {
return "", err
}
ski := skfields["ski"]
if len(ski) != 64 {
return "", errors.New("Invalid Subject Key Identifier, length = " + strconv.Itoa(len(ski)))
}
return strings.ToLower(ski), nil
}
}
|
GregEwens/sem | src/test/java/com/napier/sem/integration_tests/CityServiceIntegrationTests.java | <filename>src/test/java/com/napier/sem/integration_tests/CityServiceIntegrationTests.java
package com.napier.sem.integration_tests;
import com.napier.sem.App;
import com.napier.sem.entities.CityJoinCountry;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
/**
* Project Name: seMethods
* Package: com.napier.sem.integration_tests
* User: <NAME>
* Date Created: 13/03/2022 13:44
* File Purpose: Integration Tests for city reports
*/
@SuppressWarnings("PMD.JUnitTestContainsTooManyAsserts") // Integration tests may use multiple assertions
class CityServiceIntegrationTests {
/**
* The application to test
*/
static App app;
/**
* Our first city we find in the db, we will use this as source data for our other tests
*/
private static CityJoinCountry _city;
/**
* Set up the database connection by calling initialise method on App
*/
@BeforeAll
static void init(){
// create arguments to run the app
String[] args = new String[2];
args[0] = "localhost:33060";
args[1] = "300";
// run the initialise method directly
app = new App();
App.initialise(app, args);
// Get the first capital city from the database and use that for passing parameters in for our tests
_city = App.cityRepo.getAllCitiesJoinCountryOrderedByPopulation().get(0);
}
/**
* Close the database connection after all the tests have run
*/
@AfterAll
static void dispose(){
app.disconnect();
}
/**
* First test is to make sure our reference city is not null or default
*/
@Test
void testReferenceData(){
assertNotNull(_city, "Check we have some data");
assertTrue(_city.id > 0, "We don't know what the value is but we can check it's not the default value");
}
/**
* Integration test for getCityById
*/
@Test
void testGetCity() {
// Arrange & Act
var city = App.cityService.getCityById(_city.id);
// Assert
assertEquals(city.id, _city.id, "Check the data matches what we supplied");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getAllCitiesByContinentOrderedByPopulation
*/
@Test
void testGetAllCitiesByContinentOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getAllCitiesByContinentOrderedByPopulation(_city.continent);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getAllCitiesByRegionOrderedByPopulation
*/
@Test
void testGetAllCitiesByRegionOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getAllCitiesByRegionOrderedByPopulation(_city.region);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getAllCitiesByCountryOrderedByPopulation
*/
@Test
void testGetAllCitiesByCountryOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getAllCitiesByCountryOrderedByPopulation(_city.countryCode);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getAllCitiesByDistrictOrderedByPopulation
*/
@Test
void testGetAllCitiesByDistrictOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getAllCitiesByDistrictOrderedByPopulation(_city.district);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getTopNCitiesOrderedByPopulation
*/
@Test
void testGetTopNCitiesOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService.getTopNCitiesOrderedByPopulation(1);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(cities.size() <= 1, "Check we do not exceed the specified count");
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getTopNCitiesInRegionOrderedByPopulation
*/
@Test
void testGetTopNCitiesInRegionOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getTopNCitiesInRegionOrderedByPopulation(1, _city.region);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(cities.size() <= 1, "Check we do not exceed the specified count");
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getTopNCitiesInCountryOrderedByPopulation
*/
@Test
void testGetTopNCitiesInCountryOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getTopNCitiesInCountryOrderedByPopulation(1, _city.countryName);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(cities.size() <= 1, "Check we do not exceed the specified count");
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getTopNCitiesInContinentOrderedByPopulation
*/
@Test
void testGetTopNCitiesInContinentOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getTopNCitiesInContinentOrderedByPopulation(1, _city.continent);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(cities.size() <= 1, "Check we do not exceed the specified count");
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getTopNCitiesInDistrictOrderedByPopulation
*/
@Test
void testGetTopNCitiesInDistrictOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService
.getTopNCitiesInDistrictOrderedByPopulation(1, _city.district);
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(cities.size() <= 1, "Check we do not exceed the specified count");
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
/**
* Integration test for getAllCitiesOrderedByPopulation
*/
@Test
void testGetAllCitiesOrderedByPopulation(){
// Arrange & Act
var cities = App.cityService.getAllCitiesOrderedByPopulation();
// Make this assertion here as if it is false we will throw an exception calling .get(0)
assertFalse(cities.isEmpty(), "Check we have some data");
var city = cities.get(0);
// Assert
assertTrue(city.id > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.population > 0, "We don't know what the value is but we can check it's not the default");
assertTrue(city.district.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.countryCode.length() > 0, "We don't know what the value is but we can check it's not empty");
assertTrue(city.name.length() > 0, "We don't know what the value is but we can check it's not empty");
}
}
|
YoungJIangTao/blog_backend | src/test/java/top/youngwind/blog/dao/user/PermissionDaoTest.java | package top.youngwind.blog.dao.user;
import top.youngwind.blog.entity.user.PermissionEntity;
import top.youngwind.blog.entity.user.RoleEntity;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.Rollback;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
/**
* @Author: Young
* @Description:
* @Date: create in 2021/1/17 18:30
*/
@SpringBootTest
class PermissionDaoTest {
@Autowired
UserDao userDao;
@Autowired
RoleDao roleDao;
@Autowired
PermissionDao permissionDao;
/**
* 新增
*/
@Test
@Transactional
@Rollback(false)
void add() {
PermissionEntity permissionEntity1 = new PermissionEntity();
permissionEntity1.setPermissionsName("新增");
permissionDao.save(permissionEntity1);
PermissionEntity permissionEntity2 = new PermissionEntity();
permissionEntity2.setPermissionsName("删除");
permissionDao.save(permissionEntity2);
PermissionEntity permissionEntity3 = new PermissionEntity();
permissionEntity3.setPermissionsName("修改");
permissionDao.save(permissionEntity3);
PermissionEntity permissionEntity4 = new PermissionEntity();
permissionEntity4.setPermissionsName("查询");
permissionDao.save(permissionEntity4);
}
/**
* 删除
*/
@Test
@Transactional
@Rollback(false)
void delete() {
permissionDao.deleteById(6);
}
/**
* 修改
*/
@Test
@Transactional
@Rollback(false)
void modify() {
PermissionEntity permissionEntity = permissionDao.findById(6).orElse(null);
permissionEntity.setPermissionsName("测试权限2修改");
permissionDao.save(permissionEntity);
}
/**
* 查询
*/
@Test
@Transactional
@Rollback(false)
void query() {
List<Integer> list = new ArrayList<Integer>();
list.add(6);
list.add(7);
list.add(8);
list.add(9);
for (PermissionEntity entity : permissionDao.findAllByIdIn(list)) {
System.out.println(entity.getPermissionsName());
}
;
for (PermissionEntity permissionEntity : permissionDao.findAll()) {
System.out.println(permissionEntity.getPermissionsName());
}
for (PermissionEntity permissionEntity : permissionDao.findAll()) {
for (RoleEntity role : permissionEntity.getRoles()) {
System.out.println(role.getRoleName());
}
}
}
} |
Haarmees/azure-devops-intellij | plugin/test/com/microsoft/alm/plugin/versioncontrol/path/ServerPathTests.java | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.plugin.versioncontrol.path;
import com.microsoft.alm.plugin.exceptions.ServerPathFormatException;
import org.junit.Test;
public class ServerPathTests {
@Test(expected = ServerPathFormatException.class)
public void testCanonicalizeWithDollarValidation() {
ServerPath.canonicalize("$/test/$path", true);
}
@Test
public void testCanonicalizeWithoutDollarValidation() {
ServerPath.canonicalize("$/test/$path", false); // should not throw
}
}
|
yinquan529/platform-external-chromium-trace | trace-viewer/src/base/event_target_test.js | // Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
'use strict';
base.require('base.event_target');
base.require('base.events');
base.unittest.testSuite('base.event_target', function() {
test('eventTargetHelper', function() {
var listenerCallCount = 0;
function listener() { listenerCallCount++; }
var div = document.createElement('div');
base.EventTargetHelper.decorate(div);
assertFalse(div.hasEventListener('foo'));
div.addEventListener('foo', listener);
assertTrue(div.hasEventListener('foo'));
base.dispatchSimpleEvent(div, 'foo');
assertEquals(1, listenerCallCount);
div.removeEventListener('foo', listener);
base.dispatchSimpleEvent(div, 'foo');
assertEquals(1, listenerCallCount);
assertFalse(div.hasEventListener('foo'));
});
});
|
BastianBlokland/volo | libs/data/test/test_utils_destroy.c | #include "check_spec.h"
#include "core_alloc.h"
#include "data.h"
spec(utils_destroy) {
DataReg* reg = null;
setup() { reg = data_reg_create(g_alloc_heap); }
it("can destroy a string") {
const String val = string_dup(g_alloc_heap, string_lit("Hello World"));
const DataMeta meta = data_meta_t(data_prim_t(String));
data_destroy(reg, g_alloc_heap, meta, mem_var(val));
}
it("can destroy an empty string") {
const String val = string_empty;
const DataMeta meta = data_meta_t(data_prim_t(String));
data_destroy(reg, g_alloc_heap, meta, mem_var(val));
}
it("can destroy a primitive pointer") {
i32* val = alloc_alloc_t(g_alloc_heap, i32);
*val = 42;
const DataMeta meta = data_meta_t(data_prim_t(i32), .container = DataContainer_Pointer);
data_destroy(reg, g_alloc_heap, meta, mem_var(val));
}
it("can destroy an array of primitives") {
const DataMeta meta = data_meta_t(data_prim_t(i32), .container = DataContainer_Array);
const struct {
i32* values;
usize count;
} array1 = {.values = alloc_array_t(g_alloc_heap, i32, 8), .count = 8};
data_destroy(reg, g_alloc_heap, meta, mem_var(array1));
const struct {
i32* values;
usize count;
} array2 = {0};
data_destroy(reg, g_alloc_heap, meta, mem_var(array2));
}
it("can destroy a structure") {
typedef struct {
String a, b, c;
} DestroyStructA;
data_reg_struct_t(reg, DestroyStructA);
data_reg_field_t(reg, DestroyStructA, a, data_prim_t(String));
data_reg_field_t(reg, DestroyStructA, b, data_prim_t(String));
data_reg_field_t(reg, DestroyStructA, c, data_prim_t(String));
const DestroyStructA val = {
.a = string_dup(g_alloc_heap, string_lit("Hello")),
.c = string_dup(g_alloc_heap, string_lit("World")),
};
data_destroy(reg, g_alloc_heap, data_meta_t(t_DestroyStructA), mem_var(val));
}
it("can destroy nested structures") {
typedef struct {
String a, b, c;
} DestroyStructB;
typedef struct {
DestroyStructB value;
DestroyStructB* ptr;
struct {
DestroyStructB* values;
usize count;
} array;
} DestroyStructC;
data_reg_struct_t(reg, DestroyStructB);
data_reg_field_t(reg, DestroyStructB, a, data_prim_t(String));
data_reg_field_t(reg, DestroyStructB, b, data_prim_t(String));
data_reg_field_t(reg, DestroyStructB, c, data_prim_t(String));
data_reg_struct_t(reg, DestroyStructC);
data_reg_field_t(reg, DestroyStructC, value, t_DestroyStructB);
data_reg_field_t(
reg, DestroyStructC, ptr, t_DestroyStructB, .container = DataContainer_Pointer);
data_reg_field_t(
reg, DestroyStructC, array, t_DestroyStructB, .container = DataContainer_Array);
DestroyStructB* ptr = alloc_alloc_t(g_alloc_heap, DestroyStructB);
*ptr = (DestroyStructB){
.a = string_dup(g_alloc_heap, string_lit("Some")),
.b = string_dup(g_alloc_heap, string_lit("New")),
.c = string_dup(g_alloc_heap, string_lit("Values")),
};
const usize arrayCount = 4;
DestroyStructB* arrayValues = alloc_array_t(g_alloc_heap, DestroyStructB, arrayCount);
for (usize i = 0; i != arrayCount; ++i) {
arrayValues[i] = (DestroyStructB){
.a = string_dup(g_alloc_heap, fmt_write_scratch("Array val {}", fmt_int(i))),
};
}
const DestroyStructC val = {
.value =
{
.a = string_dup(g_alloc_heap, string_lit("Hello")),
.c = string_dup(g_alloc_heap, string_lit("World")),
},
.ptr = ptr,
.array = {.values = arrayValues, .count = arrayCount},
};
data_destroy(reg, g_alloc_heap, data_meta_t(t_DestroyStructC), mem_var(val));
}
teardown() { data_reg_destroy(reg); }
}
|
aldefy/P6-Nanodegree-Go_Ubiquitous | app/src/main/java/techgravy/sunshine/utils/logger/LoggerTree.java | package techgravy.sunshine.utils.logger;
import android.util.Log;
import timber.log.Timber;
/**
* Created by aditlal on 14/04/16.
*/
public class LoggerTree extends Timber.Tree {
public LoggerTree() {
Logger.init("Sunshine")
.logLevel(LogLevel.FULL);
}
@Override
protected void log(int priority, String tag, String message, Throwable t) {
Logger.t(tag);
switch (priority) {
case Log.VERBOSE:
Logger.v(message);
break;
case Log.DEBUG:
Logger.d(message);
break;
case Log.INFO:
Logger.i(message);
break;
case Log.WARN:
Logger.w(message);
break;
case Log.ERROR:
Logger.e(t, message);
break;
case Log.ASSERT:
Logger.wtf(message);
break;
}
}
} |
alldaysunshine/sling-org-apache-sling-app-cms | api/src/main/java/org/apache/sling/cms/usergenerated/UGCBucketConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sling.cms.usergenerated;
import org.apache.sling.cms.usergenerated.UserGeneratedContentService.APPROVE_ACTION;
import org.apache.sling.cms.usergenerated.UserGeneratedContentService.CONTENT_TYPE;
import org.osgi.annotation.versioning.ProviderType;
/*
* Simple POJO for providing the required data for a UGC bucket
*/
@ProviderType
public class UGCBucketConfig {
private CONTENT_TYPE contentType;
private int pathDepth = -1;
private String bucket;
private APPROVE_ACTION action;
/**
* @return the contentType
*/
public CONTENT_TYPE getContentType() {
return contentType;
}
/**
* @param contentType the contentType to set
*/
public void setContentType(CONTENT_TYPE contentType) {
this.contentType = contentType;
}
/**
* @return the pathDepth
*/
public int getPathDepth() {
return pathDepth;
}
/**
* @param pathDepth the pathDepth to set
*/
public void setPathDepth(int pathDepth) {
this.pathDepth = pathDepth;
}
/**
* @return the bucket
*/
public String getBucket() {
return bucket;
}
/**
* @param bucket the bucket to set
*/
public void setBucket(String bucket) {
this.bucket = bucket;
}
/**
* @return the action
*/
public APPROVE_ACTION getAction() {
return action;
}
/**
* @param action the action to set
*/
public void setAction(APPROVE_ACTION action) {
this.action = action;
}
}
|
starburst-project/Alink | core/src/test/java/com/alibaba/alink/operator/common/similarity/TextSimilarityPairwiseMapperTest.java | package com.alibaba.alink.operator.common.similarity;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeinfo.Types;
import org.apache.flink.ml.api.misc.param.Params;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.types.Row;
import com.alibaba.alink.params.shared.clustering.HasKDefaultAs2;
import com.alibaba.alink.params.shared.colname.HasOutputCol;
import com.alibaba.alink.params.shared.colname.HasSelectedCols;
import com.alibaba.alink.params.similarity.HasMetric;
import com.alibaba.alink.params.similarity.StringTextPairwiseParams;
import com.alibaba.alink.testutil.AlinkTestBase;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertArrayEquals;
/**
* Test for TextSimilarityPairwiseMapper.
*/
public class TextSimilarityPairwiseMapperTest extends AlinkTestBase {
@Rule
public ExpectedException thrown = ExpectedException.none();
private static TableSchema dataSchema = new TableSchema(new String[] {"col0", "col1"}, new TypeInformation[] {
Types.STRING, Types.STRING});
@Test
public void testTextSimilarity() {
Params params = new Params()
.set(HasSelectedCols.SELECTED_COLS, dataSchema.getFieldNames())
.set(HasOutputCol.OUTPUT_COL, "res");
TextSimilarityPairwiseMapper mapper;
Row[] array =
new Row[] {
Row.of("北京", "北京"),
Row.of("北京 欢迎", "中国 人民"),
Row.of("北京 欢迎", "中国 北京"),
Row.of("Good Morning!", "Good Evening!")
};
Double[] res = new Double[array.length];
// LEVENSHTEIN
Double[] levenshtein = new Double[] {0.0, 2.0, 2.0, 1.0};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.LEVENSHTEIN);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(levenshtein, res);
// LEVENSHTEIN_SIM
Double[] levenshteinSim = new Double[] {1.0, 0.0, 0.0, 0.5};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.LEVENSHTEIN_SIM);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(levenshteinSim, res);
// LCS
Double[] lcs = new Double[] {1.0, 0.0, 1.0, 1.0};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.LCS);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(lcs, res);
// LCS_SIM
Double[] lcsSim = new Double[] {1.0, 0.0, 0.5, 0.5};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.LCS_SIM);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(lcsSim, res);
// SSK
Double[] ssk = new Double[] {0.0, 0.0, 0.0, 0.0};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.SSK).set(HasKDefaultAs2.K, 1);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(ssk, res);
// COSINE
Double[] cosine = new Double[] {1.0, 0.0, 0.5, 0.5};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.COSINE).set(HasKDefaultAs2.K, 1);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(cosine, res);
// SIMHASH_HAMMING
Double[] simHash = new Double[] {0.0, 29.0, 19.0, 15.0};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.SIMHASH_HAMMING).set(HasKDefaultAs2.K, 1);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(simHash, res);
// SIMHASH_HAMMING_SIM
Double[] simHashSim = new Double[] {1.0, 0.546875, 0.703125, 0.765625};
params.set(StringTextPairwiseParams.METRIC, HasMetric.Metric.SIMHASH_HAMMING_SIM).set(HasKDefaultAs2.K, 1);
mapper = new TextSimilarityPairwiseMapper(dataSchema, params);
for (int i = 0; i < array.length; i++) {
res[i] = (double) mapper.map(new Object[] {array[i].getField(0), array[i].getField(1)});
}
assertArrayEquals(simHashSim, res);
thrown.expect(RuntimeException.class);
mapper.map(new Object[] {"s1", "s2", "s3"});
}
} |
JqhSdtz/laputa_sns | src/main/java/com/laputa/laputa_sns/service/NoticeService.java | package com.laputa.laputa_sns.service;
import com.laputa.laputa_sns.common.QueryParam;
import com.laputa.laputa_sns.common.RedisPrefix;
import com.laputa.laputa_sns.common.Result;
import com.laputa.laputa_sns.model.entity.*;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.data.redis.core.script.DefaultRedisScript;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.stereotype.Service;
import java.util.*;
/**
* 通知服务
*
* @author JQH
* @since 下午 2:38 20/04/15
*/
@Service
@EnableScheduling
public class NoticeService {
private final PostService postService;
private final CommentL1Service commentL1Service;
private final CommentL2Service commentL2Service;
private final StringRedisTemplate redisTemplate;
private final DefaultRedisScript<Long> pushNoticeScript;
private final DefaultRedisScript<List<String>> pullNoticeScript;
/**
* 用户消息接收箱最大长度
*/
@Value("${user-notice-box-length}")
private int userNoticeBoxLength;
@SuppressWarnings({"unchecked", "rawtypes"})
public NoticeService(@Lazy PostService postService, @Lazy CommentL1Service commentL1Service, @Lazy CommentL2Service commentL2Service, StringRedisTemplate redisTemplate) {
this.postService = postService;
this.commentL1Service = commentL1Service;
this.commentL2Service = commentL2Service;
this.redisTemplate = redisTemplate;
this.pushNoticeScript = new DefaultRedisScript<>(
"redis.call('zadd', KEYS[1], ARGV[2], ARGV[1])\n" + "local initTime = redis.call('hget', KEYS[2], ARGV[1])\n" + "if (initTime == nil or initTime == false or initTime == '0') then\n" +
"\tredis.call('hset', KEYS[2], ARGV[1], ARGV[2])\n" + "\tredis.call('hset', KEYS[3], ARGV[1], ARGV[3])\n" + "else\n" + "\tredis.call('hincrby', KEYS[3], ARGV[1], ARGV[3])\n" + "end\n" +
"local len = redis.call('zcard', KEYS[1])\n" + "if (len > tonumber(ARGV[4])) then\n" + "\tlocal lstKey = redis.call('zrange', KEYS[1], 0, 0)[1]\n" +
"\tredis.call('zremrangebyrank', KEYS[1], 0, 0)\n" + "\tredis.call('hdel', KEYS[2], lstKey)\n" +
"\tredis.call('hdel', KEYS[3], lstKey)\n" + "end\n" + "return len", Long.class);
this.pullNoticeScript = new DefaultRedisScript(
"local zList = redis.call('zrevrange', KEYS[1], tonumber(ARGV[1]), tonumber(ARGV[2]), 'WITHSCORES')\n" + "local len = #zList / 2\n" +
"if (len > 0) then\n" + "\tlocal fileds = {}\n" + "\tfor i = 1, len do\n" + "\t\ttable.insert(fileds, zList[i * 2 - 1])\n" + "\tend\n" +
"\tlocal hList1 = redis.call('hmget', KEYS[2], unpack(fileds))\n" + "\tlocal hList2 = redis.call('hmget', KEYS[3], unpack(fileds))\n" + "\tfor i = 1, #hList1 do table.insert(zList, hList1[i]) end\n" +
"\tfor i = 1, #hList2 do table.insert(zList, hList2[i]) end\n" + "end\n" + "return zList", List.class);
}
@NotNull
private String getRedisTimeKey(Integer receiverId) {
return RedisPrefix.USER_NOTICE_BOX_TIME + ":" + receiverId;
}
@NotNull
private String getRedisInitTimeKey(Integer receiverId) {
return RedisPrefix.USER_NOTICE_BOX_INIT_TIME + ":" + receiverId;
}
@NotNull
private String getRedisCntKey(Integer receiverId) {
return RedisPrefix.USER_NOTICE_BOX_CNT + ":" + receiverId;
}
private long executePushScript(String zKey, String hKey1, String hKey2, String item, long delta, long timeStamp, int limit) {
return redisTemplate.execute(pushNoticeScript, Arrays.asList(zKey, hKey1, hKey2), item, String.valueOf(timeStamp), String.valueOf(delta), String.valueOf(limit));
}
public long pushNotice(int contentId, int type, int receiverId) {
return executePushScript(getRedisTimeKey(receiverId), getRedisInitTimeKey(receiverId), getRedisCntKey(receiverId), type + ":" + contentId, 1, System.currentTimeMillis(), userNoticeBoxLength);
}
@SuppressWarnings("unchecked")
public long pullNoticeCnt(int receiverId) {
List<Object> resList = redisTemplate.executePipelined((RedisCallback<?>) connection -> {
connection.hGetAll(getRedisInitTimeKey(receiverId).getBytes());
connection.hGetAll(getRedisCntKey(receiverId).getBytes());
return null;
});
Map<String, String> initTimeMap = (Map<String, String>) resList.get(0);
Map<String, String> cntMap = (Map<String, String>) resList.get(1);
long cnt = 0;
for (Map.Entry<String, String> entry : initTimeMap.entrySet()) {
if ("0".equals(entry.getValue())) {
continue;
}
cnt += Long.valueOf(cntMap.get(entry.getKey()));
}
return cnt;
}
public Result<List<Notice>> pullNotice(@NotNull Notice paramNotice, @NotNull Operator operator) {
if (!paramNotice.isValidPullNoticeParam()) {
return new Result<List<Notice>>(Result.FAIL).setErrorCode(1010160201).setMessage("操作错误,参数不合法");
}
int receiverId = operator.getUserId();
QueryParam queryParam = paramNotice.getQueryParam();
List<String> resList = redisTemplate.execute(pullNoticeScript, Arrays.asList(getRedisTimeKey(receiverId), getRedisInitTimeKey(receiverId), getRedisCntKey(receiverId)),
String.valueOf(queryParam.getFrom()), String.valueOf(queryParam.getFrom() + queryParam.getQueryNum() - 1));
int len = resList.size() / 4;
List<Notice> noticeList = new ArrayList<>(len);
int initTimeStart = len * 2;
int cntStart = len * 3;
for (int i = 0; i < len; ++i) {
String[] item = resList.get(i * 2).split(":");
Notice notice = new Notice().setType(Integer.valueOf(item[0])).setContentId(Integer.valueOf(item[1]))
.setUpdateTime(new Date(Long.valueOf(resList.get(i * 2 + 1)))).setUnreadCnt(Long.valueOf(resList.get(cntStart + i)))
.setInitTime(new Date(Long.valueOf(resList.get(initTimeStart + i))));
noticeList.add(notice);
}
fillNoticeWithContent(noticeList, operator);
return new Result<List<Notice>>(Result.SUCCESS).setObject(noticeList);
}
public Result<Object> markNoticeAsRead(@NotNull Notice notice, @NotNull Operator operator) {
if (notice.getContentId() == null || notice.getType() == null) {
return new Result<Object>(Result.FAIL).setErrorCode(1010160202).setMessage("操作失败,参数错误");
}
redisTemplate.opsForHash().put(getRedisInitTimeKey(operator.getUserId()), notice.getType() + ":" + notice.getContentId(), "0");
return new Result<Object>(Result.SUCCESS);
}
private void fillNoticeWithContent(@NotNull List<Notice> noticeList, Operator operator) {
Map<Integer, Post> postMap = new HashMap<>();
Map<Integer, CommentL1> cml1Map = new HashMap<>();
Map<Integer, CommentL2> cml2Map = new HashMap<>();
for (int i = 0; i < noticeList.size(); ++i) {
Notice notice = noticeList.get(i);
int type = notice.getType(), id = notice.getContentId();
if (type == Notice.TYPE_LIKE_POST || type == Notice.TYPE_CML1_OF_POST || type == Notice.TYPE_FW_POST) {
postMap.put(id, null);
} else if (type == Notice.TYPE_LIKE_CML1 || type == Notice.TYPE_CML2_OF_CML1) {
cml1Map.put(id, null);
} else if (type == Notice.TYPE_LIKE_CML2 || type == Notice.TYPE_REPLY_OF_CML2) {
cml2Map.put(id, null);
}
}
if (postMap.size() != 0) {
List<Integer> postIdList = new ArrayList<>(postMap.keySet());
List<Post> postList = postService.multiReadPostWithContentAndCounter(postIdList, operator).getObject();
if (postList != null) {
for (int i = 0; i < postList.size(); ++i) {
Post post = postList.get(i);
if (post == null) {
post = new Post(postIdList.get(i)).setContent("该帖已被删除").setCreator(new User(-1));
post.setDeleted(true);
}
postMap.put(post.getId(), post);
}
}
}
if (cml1Map.size() != 0) {
List<Integer> cml1IdList = new ArrayList<>(cml1Map.keySet());
List<CommentL1> cml1List = commentL1Service.multiReadCommentWithContentAndCounter(cml1IdList).getObject();
if (cml1List != null) {
for (int i = 0; i < cml1List.size(); ++i) {
CommentL1 cml1 = cml1List.get(i);
if (cml1 == null) {
cml1 = new CommentL1(cml1IdList.get(i)).setContent("该评论已被删除").setCreator(new User(-1));
cml1.setDeleted(true);
}
cml1Map.put(cml1.getId(), cml1);
}
}
}
if (cml2Map.size() != 0) {
List<Integer> cml2IdList = new ArrayList<>(cml2Map.keySet());
List<CommentL2> cml2List = commentL2Service.multiReadCommentWithContentAndCounter(cml2IdList).getObject();
if (cml2List != null) {
for (int i = 0; i < cml2List.size(); ++i) {
CommentL2 cml2 = cml2List.get(i);
if (cml2 == null) {
cml2 = new CommentL2(cml2IdList.get(i)).setContent("该回复已被删除").setCreator(new User(-1));
cml2.setDeleted(true);
}
cml2Map.put(cml2.getId(), cml2);
}
}
}
for (int i = 0; i < noticeList.size(); ++i) {
Notice notice = noticeList.get(i);
int type = notice.getType(), id = notice.getContentId();
if (type == Notice.TYPE_LIKE_POST || type == Notice.TYPE_CML1_OF_POST || type == Notice.TYPE_FW_POST) {
notice.setContent(postMap.get(id));
} else if (type == Notice.TYPE_LIKE_CML1 || type == Notice.TYPE_CML2_OF_CML1) {
notice.setContent(cml1Map.get(id));
} else if (type == Notice.TYPE_LIKE_CML2 || type == Notice.TYPE_REPLY_OF_CML2) {
notice.setContent(cml2Map.get(id));
}
}
}
}
|
zx1993312/ry | ruoyi-system/src/main/java/com/ruoyi/system/controller/HyBillRevokeController.java | package com.ruoyi.system.controller;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.shiro.authz.annotation.RequiresPermissions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.ruoyi.common.annotation.Log;
import com.ruoyi.common.core.controller.BaseController;
import com.ruoyi.common.core.domain.AjaxResult;
import com.ruoyi.common.core.page.TableDataInfo;
import com.ruoyi.common.enums.BusinessType;
import com.ruoyi.common.utils.poi.ExcelUtil;
import com.ruoyi.system.domain.HyBillSet;
import com.ruoyi.system.service.IHyBillSetService;
import com.ruoyi.system.utils.ReflectUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
/**
* 票据设置Controller
*
* @author Administrator
* @date 2021-01-06
*/
@Controller
@CrossOrigin
@RequestMapping("/system/revokeSet")
@Api(tags = "票据设置Controller")
public class HyBillRevokeController extends BaseController {
private String prefix = "system/revokeSet";
@Autowired
private IHyBillSetService hyBillSetService;
@RequiresPermissions("system:set:view")
@GetMapping()
public String set() {
return prefix + "/set";
}
/**
* 查询票据设置列表
*/
@ApiOperation("票据设置")
@ApiImplicitParams({ @ApiImplicitParam(name = "hyBillSet", value = "项目实体类hyBillSet", required = true), })
@RequiresPermissions("system:set:list")
@PostMapping("/list")
@ResponseBody
public TableDataInfo list(HyBillSet hyBillSet) {
startPage();
List<HyBillSet> list = hyBillSetService.selectHyBillSetList(hyBillSet);
List<Map<String, Object>> reList = new ArrayList<>();
for (HyBillSet hbs : list) {
Map<String, Object> map = new HashMap<>();
map = ReflectUtil.convertMap(hbs);
reList.add(map);
}
return getDataTable(list, reList);
}
/**
* 导出票据设置列表
*/
@ApiOperation("票据设置")
@ApiImplicitParams({ @ApiImplicitParam(name = "hyBillSet", value = "项目实体类hyBillSet", required = true), })
@RequiresPermissions("system:set:export")
@Log(title = "票据设置", businessType = BusinessType.EXPORT)
@PostMapping("/export")
@ResponseBody
public AjaxResult export(HyBillSet hyBillSet) {
List<HyBillSet> list = hyBillSetService.selectHyBillSetList(hyBillSet);
ExcelUtil<HyBillSet> util = new ExcelUtil<HyBillSet>(HyBillSet.class);
return util.exportExcel(list, "set");
}
/**
* 新增票据设置
*/
@GetMapping("/add")
public String add() {
return prefix + "/add";
}
/**
* 新增保存票据设置
*/
@ApiOperation("票据设置")
@ApiImplicitParams({ @ApiImplicitParam(name = "hyBillSet", value = "项目实体类hyBillSet", required = true), })
@RequiresPermissions("system:set:add")
@Log(title = "票据设置", businessType = BusinessType.INSERT)
@PostMapping("/add")
@ResponseBody
public AjaxResult addSave(HyBillSet hyBillSet) {
List<HyBillSet> list = hyBillSetService.selectHyBillSetList(hyBillSet);
List<Map<String, Object>> reList = new ArrayList<>();
for (HyBillSet hbs : list) {
Map<String, Object> map = new HashMap<>();
map = ReflectUtil.convertMap(hbs);
reList.add(map);
}
return toAjax(hyBillSetService.insertHyBillSet(hyBillSet));
}
/**
* 修改票据设置
*/
@ApiOperation("票据设置")
@ApiImplicitParams({ @ApiImplicitParam(name = "id", value = "主键id", required = true), })
@GetMapping("/edit/{id}")
public String edit(@PathVariable("id") Long id, ModelMap mmap) {
HyBillSet hyBillSet = hyBillSetService.selectHyBillSetById(id);
mmap.put("hyBillSet", hyBillSet);
List<HyBillSet> list = hyBillSetService.selectHyBillSetList(hyBillSet);
List<Map<String, Object>> reList = new ArrayList<>();
for (HyBillSet hbs : list) {
Map<String, Object> map = new HashMap<>();
map = ReflectUtil.convertMap(hbs);
reList.add(map);
}
return prefix + "/edit";
}
/**
* 修改保存票据设置
*/
@ApiOperation("票据设置")
@ApiImplicitParams({ @ApiImplicitParam(name = "hyBillSet", value = "项目实体类hyBillSet", required = true), })
@RequiresPermissions("system:set:edit")
@Log(title = "票据设置", businessType = BusinessType.UPDATE)
@PostMapping("/edit")
@ResponseBody
public AjaxResult editSave(HyBillSet hyBillSet) {
List<HyBillSet> list = hyBillSetService.selectHyBillSetList(hyBillSet);
List<Map<String, Object>> reList = new ArrayList<>();
for (HyBillSet hbs : list) {
Map<String, Object> map = new HashMap<>();
map = ReflectUtil.convertMap(hbs);
reList.add(map);
}
return toAjax(hyBillSetService.updateHyBillSet(hyBillSet));
}
/**
* 删除票据设置
*/
@ApiOperation("票据设置")
@ApiImplicitParams({ @ApiImplicitParam(name = "ids", value = "ids", required = true), })
@RequiresPermissions("system:set:remove")
@Log(title = "票据设置", businessType = BusinessType.DELETE)
@PostMapping("/remove")
@ResponseBody
public AjaxResult remove(String ids) {
return toAjax(hyBillSetService.deleteHyBillSetByIds(ids));
}
}
|
WoodoLee/TorchCraft | 3rdparty/range-v3/doc/gh-pages/structranges_1_1v3_1_1meta_1_1meta__detail_1_1defer___3_01_c_00_01list_3_01_ts_8_8_8_4_00_01voidaa27dee7669e0acf0fb969ab5c10d7e9.js | var structranges_1_1v3_1_1meta_1_1meta__detail_1_1defer___3_01_c_00_01list_3_01_ts_8_8_8_4_00_01voidaa27dee7669e0acf0fb969ab5c10d7e9 =
[
[ "type", "group__group-meta.html#ga1f9c238fe81ef67b5f2392d63312dab2", null ]
]; |
IliyaTryapitsin/slick-migrations | core/src/main/scala/slick/migration/package.scala | package slick
import slick.migration.ast.{IndexInfo, TableInfo}
import slick.migration.dialect.Dialect
import slick.migration.table.TableMigration
import scala.slick.ast.FieldSymbol
import scala.slick.driver.JdbcProfile
import scala.slick.lifted._
/**
* Created by <NAME> on 16.04.15.
*/
package object migration {
implicit class MigrationExtension[T <: JdbcProfile#Table[_]](table: T) {
def alterColumnType(cols: (T => Column[_])*)(implicit dialect: Dialect[_]) = TableMigration(table).alterColumnTypes(cols: _*)
/**
* Add columns to the table.
* (If the table is being created, these may be incorporated into the `CREATE TABLE` statement.)
* @param cols zero or more column-returning functions, which are passed the table object.
* @example {{{ tblMig.addColumns(_.col1, _.col2, _.column[Int]("fieldNotYetInTableDef")) }}}
*/
def addColumns(cols: (T => Column[_])*)(implicit dialect: Dialect[_]) = TableMigration(table).addColumns(cols: _*)
/**
* Add columns to the table.
* (If the table is being created, these may be incorporated into the `CREATE TABLE` statement.)
* @param cols zero or more columns, which are passed the table object.
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
* def col3 = column[Int]("col3")
* def * = col1 ~ col2 ~ col3
* }
* val columns = Seq(table1.col, table1.col2, table1.col3)
* tblMig.addColumns(columns)
* }}}
*/
def addColumns(cols: Iterable[Column[_]])(implicit dialect: Dialect[_]) = TableMigration(table).addColumns(cols)
/**
* Add all columns to the table.
* (If the table is being created, these may be incorporated into the `CREATE TABLE` statement.)
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
* def col3 = column[Int]("col3")
* def * = col1 ~ col2 ~ col3
* }
* tblMig.addColumns
* }}}
*/
def addColumns(implicit dialect: Dialect[_]) = TableMigration(table).addColumns(table.columns)
def dropColumns(cols: Iterable[Column[_]])(implicit dialect: Dialect[_]) = TableMigration(table).dropColumns(cols)
def dropColumns(cols: (T => Column[_])*)(implicit dialect: Dialect[_]) = TableMigration(table).dropColumns(cols: _*)
def dropColumns(implicit dialect: Dialect[_]) = TableMigration(table).dropColumns(table.columns)
def dropColumn(columnName: String)(implicit dialect: Dialect[_]) = TableMigration(table).dropColumn(columnName)
/**
* Adds indexes
* @param indexes zero or more indexes, which are passed the table object.
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
*
* def idxDef1 = index("idxDef1", col1)
* def idxDef2 = index("idxDef2", col2)
*
* def * = col1 ~ col2
* }
* tblMig.addIndexes(_.idxDef1, _.idxDef2)
* }}}
*/
def addIndexes(indexes: (T => Index)*)(implicit dialect: Dialect[_]) = TableMigration(table).addIndexes(indexes: _*)
/**
* Add indexes
* @param indexes Indexes, which are passed the table object. If list is empty then apply all indexes in table
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
*
* def idxDef1 = index("idxDef1", col1)
* def idxDef2 = index("idxDef2", col2)
*
* def * = col1 ~ col2
* }
* val indexes = Seq(table1.idxDef1, table1.idxDef2)
* tblMig.addIndexes(indexes)
* }}}
*/
def addIndexes(indexes: Iterable[Index])(implicit dialect: Dialect[_]) =
if (indexes.isEmpty) TableMigration(table).addIndexes(table.indexes)
else TableMigration(table).addIndexes(indexes)
/**
* Add all indexes in table
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
*
* def idxDef1 = index("idxDef1", col1)
* def idxDef2 = index("idxDef2", col2)
*
* def * = col1 ~ col2
* }
* tblMig.addIndexes
* }}}
*/
def addIndexes(implicit dialect: Dialect[_]) = TableMigration(table).addIndexes(table.indexes)
def dropIndexes(indexes: Iterable[Index])(implicit dialect: Dialect[_]) = TableMigration(table).dropIndexes(indexes)
def dropIndexes(indexes: (T => Index)*)(implicit dialect: Dialect[_]) = TableMigration(table).dropIndexes(indexes: _*)
def dropIndexes(implicit dialect: Dialect[_]) = TableMigration(table).dropIndexes(table.indexes)
/**
* Adds primary key constraints.
* @param pks zero or more `PrimaryKey`-returning functions, which are passed the table object.
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
* def col3 = column[Int]("col3")
* def col4 = column[Int]("col4")
* def col5 = column[Int]("col5")
*
* def pk1 = primaryKey("pk1", (col1, col2))
* def pk2 = primaryKey("p21", (col3, col4, col5))
*
* def * = col1 ~ col2 ~ col3 ~ col4 ~ col5
* }
* val pks = Seq(table1.pk1, table1.pk2)
* tblMig.addPrimaryKeys(pks)
* }}}
*/
def addPrimaryKeys(pks: (T => PrimaryKey)*)(implicit dialect: Dialect[_]) = TableMigration(table).addPrimaryKeys(pks: _*)
/**
* Add primary keys
* @param pks Primary keys, which are passed the table object. If list is empty then apply all primary keys in table
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
* def col3 = column[Int]("col3")
* def col4 = column[Int]("col4")
* def col5 = column[Int]("col5")
*
* def pk1 = primaryKey("pk1", (col1, col2))
* def pk2 = primaryKey("p21", (col3, col4, col5))
*
* def * = col1 ~ col2 ~ col3 ~ col4 ~ col5
* }
* val pks = Seq(table1.pk1, table1.pk2)
* tblMig.addPrimaryKeys(pks)
* }}}
*/
def addPrimaryKeys(pks: Iterable[PrimaryKey])(implicit dialect: Dialect[_]) = TableMigration(table).addPrimaryKeys(pks)
/**
* Add all primary keys
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
* def col3 = column[Int]("col3")
* def col4 = column[Int]("col4")
* def col5 = column[Int]("col5")
*
* def pk1 = primaryKey("pk1", (col1, col2))
* def pk2 = primaryKey("p21", (col3, col4, col5))
*
* def * = col1 ~ col2 ~ col3 ~ col4 ~ col5
* }
* tblMig.addPrimaryKeys
* }}}
*/
def addPrimaryKeys(implicit dialect: Dialect[_]) = TableMigration(table).addPrimaryKeys(table.primaryKeys)
def dropPrimaryKeys(implicit dialect: Dialect[_]) = TableMigration(table).dropPrimaryKeys(table.primaryKeys)
def dropPrimaryKeys(pks: (T => PrimaryKey)*)(implicit dialect: Dialect[_]) = TableMigration(table).dropPrimaryKeys(pks: _*)
def dropPrimaryKeys(pks: Iterable[PrimaryKey])(implicit dialect: Dialect[_]) = TableMigration(table).dropPrimaryKeys(pks)
/**
* Adds foreign key constraints.
* @param fkqs zero or more `ForeignKeyQuery`es, which are passed the table object.
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
*
* def fkq1 = foreignKey("fkq1", col1, ... )
* def fkq2 = foreignKey("fkq2", col2, ... )
*
* def * = col1 ~ col2
* }
* val fkqs = Seq(table1.fkq1, table1.fkq2)
* tblMig.addForeignKeys(fkqs)
* }}}
*/
def addForeignKeys(fkqs: (T => ForeignKeyQuery[_ <: AbstractTable[_], _])*)(implicit dialect: Dialect[_]) = TableMigration(table).addForeignKeys(fkqs: _*)
/**
* Adds foreign key constraints.
* @param fkqs foreign keys, which are passed the table object. If list is empty then apply all primary keys in table
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
*
* def fkq1 = foreignKey("fkq1", col1, ... )
* def fkq2 = foreignKey("fkq2", col2, ... )
*
* def * = col1 ~ col2
* }
* val fkqs = Seq(table1.fkq1, table1.fkq2)
* tblMig.addForeignKeys(fkqs)
* }}}
*/
def addForeignKeys(fkqs: Iterable[ForeignKey])(implicit dialect: Dialect[_]) = TableMigration(table).addForeignKeys(fkqs)
/**
* Adds foreign key constraints.
* @example
* {{{
* object table1 extends Table[(Int, Int, Int)]("table1") {
* def col1 = column[Int]("col1")
* def col2 = column[Int]("col2")
*
* def fkq1 = foreignKey("fkq1", col1, ... )
* def fkq2 = foreignKey("fkq2", col2, ... )
*
* def * = col1 ~ col2
* }
* tblMig.addForeignKeys
* }}}
*/
def addForeignKeys(implicit dialect: Dialect[_]) = TableMigration(table).addForeignKeys(table.foreignKeys)
def dropForeignKeys(fkqs: (T => ForeignKeyQuery[_ <: AbstractTable[_], _])*)(implicit dialect: Dialect[_]) = TableMigration(table).dropForeignKeys(fkqs: _*)
def dropForeignKeys(fkqs: Iterable[ForeignKey])(implicit dialect: Dialect[_]) = TableMigration(table).dropForeignKeys(fkqs)
def dropForeignKeys(implicit dialect: Dialect[_]) = TableMigration(table).dropForeignKeys(table.foreignKeys)
/**
* Drop the table.
* Note: drop + create is allowed.
*/
def drop(implicit dialect: Dialect[_]) = TableMigration(table).drop
/**
* Create the table.
* Note: drop + create is allowed.
*/
def create(implicit dialect: Dialect[_]) = TableMigration(table).create
def columns = table.getClass.getMethods
.filter { m => m.getReturnType == classOf[Column[_]] && m.getParameterTypes.length == 0 }
.map { m => m.invoke(table).asInstanceOf[Column[_]] } ++
table.getClass.getFields
.filter { f => f.getType == classOf[Column[_]] }
.map { f => f.asInstanceOf[Column[_]] }
def insert(values: ((T) => Column[_], Any)*)(implicit dialect: Dialect[_]) = InsertMigration(table, values.map { p => (p._1.apply(table), p._2) }.toMap)
}
def dropTable(name: String,
schema: Option[String] = None)(implicit dialect: Dialect[_]) = SqlMigration(dialect.dropTable(TableInfo(schema, name)))
def dropForeignKey(foreignKeyName: String,
tableName: String,
schema: Option[String] = None)(implicit dialect: Dialect[_]) = SqlMigration(dialect.dropForeignKey(TableInfo(schema, tableName), foreignKeyName))
def dropIndex(indexName: String,
tableName: String,
schema: Option[String] = None,
unique: Boolean = false,
columns: Seq[String] = Seq())(implicit dialect: Dialect[_]) = {
val cols: Seq[FieldSymbol] = columns.map { x => FieldSymbol(x)(Seq(), null) }
val indexInfo = IndexInfo(TableInfo(schema, tableName), indexName, unique, cols)
SqlMigration(dialect.dropIndex(indexInfo))
}
def dropColumn(name: String,
tableName: String,
schema: Option[String] = None)(implicit dialect: Dialect[_]) = {
SqlMigration(dialect.dropColumn(TableInfo(schema, tableName), name))
}
def dropPrimaryKey(name: String,
tableName: String,
schema: Option[String] = None)(implicit dialect: Dialect[_]) = {
SqlMigration(dialect.dropPrimaryKey(TableInfo(schema, tableName), name))
}
implicit class ColumnExtensions(column: Column[_]) {
def fieldSymbol(implicit dialect: Dialect[_]) = dialect.fieldSym(column)
}
implicit class AnyExtension[T](val obj: T) extends AnyVal {
def toOption = Some(obj)
}
}
|
koendeschacht/smile | math/src/main/java/smile/stat/distribution/NegativeBinomialDistribution.java | /*******************************************************************************
* Copyright (c) 2010 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package smile.stat.distribution;
import smile.math.special.Beta;
import smile.math.special.Gamma;
import smile.math.Math;
/**
* Negative binomial distribution arises as the probability distribution of
* the number of successes in a series of independent and identically distributed
* Bernoulli trials needed to get a specified (non-random) number r of failures.
* If r is an integer, it is usually called Pascal distribution. Otherwise, it
* is often called Polya distribution for the real-valued case. When r = 1 we
* get the probability distribution of number of successes before the
* first failure, which is a geometric distribution.
* <p>
* An alternative definition is that X is the total number of trials needed
* to get r failures, not simply the number of successes. This alternative
* parameterization can be used as an alternative to the Poisson distribution.
* It is especially useful for discrete data over an unbounded positive range
* whose sample variance exceeds the sample mean. If a Poisson distribution is
* used to model such data, the model mean and variance are equal. In that case,
* the observations are overdispersed with respect to the Poisson model.
* Since the negative binomial distribution has one more parameter than the
* Poisson, the second parameter can be used to adjust the variance
* independently of the mean. In the case of modest overdispersion, this may
* produce substantially similar results to an overdispersed Poisson distribution.
* <p>
* The negative binomial distribution also arises as a continuous mixture
* of Poisson distributions where the mixing distribution of the Poisson rate
* is a gamma distribution. That is, we can view the negative binomial as a
* Poisson(λ) distribution, where λ is itself a random variable,
* distributed according to Γ(r, p/(1 - p)).
*
* @author <NAME>
*/
public class NegativeBinomialDistribution extends DiscreteDistribution {
private static final long serialVersionUID = 1L;
/**
* The number of failures until the experiment is stopped.
*/
private double r;
/**
* Success probability in each experiment.
*/
private double p;
/**
* Constructor.
* @param r the number of failures until the experiment is stopped.
* @param p success probability in each experiment.
*/
public NegativeBinomialDistribution(double r, double p) {
if (p <= 0 || p >= 1) {
throw new IllegalArgumentException("Invalid p: " + p);
}
if (r <= 0) {
throw new IllegalArgumentException("Invalid r: " + r);
}
this.p = p;
this.r = r;
}
@Override
public int npara() {
return 2;
}
@Override
public double mean() {
return r * (1 - p) / p;
}
@Override
public double var() {
return r * (1 - p) / (p * p);
}
@Override
public double sd() {
return Math.sqrt(r * (1 - p)) / p;
}
/**
* Shannon entropy. Not supported.
*/
@Override
public double entropy() {
throw new UnsupportedOperationException("Negative Binomial distribution does not support entropy()");
}
@Override
public String toString() {
if (r == (int) r) {
return String.format("Negative Binomial(%d, %.4f)", r, p);
} else {
return String.format("Negative Binomial(%.4f, %.4f)", r, p);
}
}
@Override
public double rand() {
return inverseTransformSampling();
}
@Override
public double p(int k) {
if (k < 0) {
return 0.0;
} else {
return Gamma.gamma(r + k) / (Math.factorial(k) * Gamma.gamma(r)) * Math.pow(p, r) * Math.pow(1 - p, k);
}
}
@Override
public double logp(int k) {
if (k < 0) {
return Double.NEGATIVE_INFINITY;
} else {
return Gamma.lgamma(r + k) - Math.logFactorial(k) - Gamma.lgamma(r) + r * Math.log(p) + k * Math.log(1 - p);
}
}
@Override
public double cdf(double k) {
if (k < 0) {
return 0.0;
} else {
return Beta.regularizedIncompleteBetaFunction(r, k + 1, p);
}
}
@Override
public double quantile(double p) {
if (p < 0.0 || p > 1.0) {
throw new IllegalArgumentException("Invalid p: " + p);
}
// Starting guess near peak of density.
// Expand interval until we bracket.
int kl, ku, inc = 1;
int k = (int) mean();
if (p < cdf(k)) {
do {
k = Math.max(k - inc, 0);
inc *= 2;
} while (p < cdf(k) && k > 0);
kl = k;
ku = k + inc / 2;
} else {
do {
k += inc;
inc *= 2;
} while (p > cdf(k));
ku = k;
kl = k - inc / 2;
}
return quantile(p, kl, ku);
}
}
|
KTH/aspen | modules/util/error_cache.py | <gh_stars>0
__author__ = '<EMAIL>'
import datetime
import logging
from os import pipe
from dateutil.parser import parse
from dateutil.relativedelta import relativedelta
from modules.util import data_defs, redis, exceptions, environment
def create_cache_entry(step_name):
return {
'step_name': step_name,
'reported_at': str(get_now())
}
def get_cache_key(pipeline_data):
mgt_res_grp = environment.get_env(environment.MANAGEMENT_RES_GRP)
if data_defs.STACK_FILE_PATH in pipeline_data:
file_path = pipeline_data[data_defs.STACK_FILE_PATH]
return f'error/{mgt_res_grp}/{file_path.lstrip("/")}'
else:
return None
def write_to_error_cache(error: exceptions.DeploymentError):
pipeline_data = error.pipeline_data
cache_key = get_cache_key(pipeline_data)
cache_entry = create_cache_entry(error.step_name)
redis_client = redis.get_client()
redis.execute_json_set(redis_client, cache_key, cache_entry)
def has_cached_error(error):
pipeline_data = error.pipeline_data
error_cache_key = get_cache_key(pipeline_data)
result = get_error_cache(error_cache_key)
if result and result['step_name'] == error.step_name:
return result
return None
def should_be_reported_again(error_cache_entry):
last_reported_at = parse(error_cache_entry['reported_at'])
report_again_at = last_reported_at + relativedelta(minutes=+10)
return get_now() > report_again_at
def get_error_cache(error_cache_key):
if not error_cache_key:
return None
redis_client = redis.get_client()
return redis.execute_json_get(redis_client, error_cache_key)
def get_now():
# To simplify mocking in tests
return datetime.datetime.now()
|
mmbsoftware-it/swagger-core | modules/swagger-oauth2-auth-server/src/main/scala/com/wordnik/swagger/auth/service/AuthService.scala | package com.wordnik.swagger.auth.service
import com.wordnik.swagger.auth.model._
import com.wordnik.swagger.core.SwaggerContext
import org.apache.oltu.oauth2.as.request.{ OAuthAuthzRequest, OAuthTokenRequest }
import org.apache.oltu.oauth2.as.response.OAuthASResponse
import org.apache.oltu.oauth2.common.OAuth
import org.apache.oltu.oauth2.common.exception.{ OAuthProblemException, OAuthSystemException }
import org.apache.oltu.oauth2.common.message.OAuthResponse
import org.apache.oltu.oauth2.common.message.types.ResponseType
import org.apache.oltu.oauth2.common.utils.OAuthUtils
import org.slf4j.LoggerFactory
import javax.servlet.http.{ Cookie, HttpServletRequest, HttpServletResponse }
import javax.ws.rs.core.Response
import javax.ws.rs.WebApplicationException
import java.util.Date
import java.net.URLEncoder
import java.net.URI
import scala.collection.mutable.HashSet
class AuthService extends TokenStore {
private val LOGGER = LoggerFactory.getLogger(classOf[AuthService])
val validator = ValidatorFactory.validator
def validate[T](accessCode: String, f: => T):T = {
LOGGER.debug("validating access code " + accessCode)
if(hasAccessCode(accessCode)) {
val token = getTokenForAccessCode(accessCode)
if(token.getRemaining > 0) {
token.tokenResponse match {
case e: AnonymousTokenResponse => TokenScope.unsetUsername()
case e: UserTokenResponse => TokenScope.setUsername(e.username)
case _ => throw new Exception("unauthorized")
}
f
}
else throw new Exception("unauthorized")
}
else throw new Exception("unauthorized")
}
def login(request: HttpServletRequest, response: HttpServletResponse) = {
val scope = request.getParameter("scope")
val redirectUri = request.getParameter("redirect_uri")
val username = request.getParameter("username")
val password = request.getParameter("password")
val clientId = request.getParameter("client_id")
val accept = request.getParameter("accept")
val requestId = request.getParameter("request_id")
val responseType = request.getParameter("response_type")
LOGGER.debug("logging in user " + username + ", accept=" + accept)
if(accept.toLowerCase == "deny") {
LOGGER.debug("user " + username + " denied the login request")
val redirectTo = {
(redirectUri.indexOf("#") match {
case i: Int if(i >= 0) => redirectUri + "&"
case i: Int => redirectUri + "#"
}) + "error=user_cancelled"
}
response.sendRedirect(redirectTo)
}
else if(validator.isValidUser(username, password)) {
LOGGER.debug("username " + username + " has valid password")
if(validator.isValidRedirectUri(clientId, redirectUri)) {
LOGGER.debug("username " + username + " has valid redirect URI: " + redirectUri)
val redirectTo = {
(redirectUri.indexOf("?") match {
case i: Int if(i >= 0) => redirectUri + "&"
case i: Int => redirectUri + "?"
})
}
if(requestId != null && !"".equals(requestId)) {
LOGGER.debug("username " + username + " has request id=" + requestId)
if(hasRequestId(requestId)) {
LOGGER.debug("token for requestId " + requestId + " found")
// now that we have the username, save it in the hash
val requestToken = getRequestId(requestId) ++ Map("username" -> Some(username))
addRequestId(requestId, requestToken)
val redirectUri = requestToken(OAuth.OAUTH_REDIRECT_URI ).get
val redirectTo = {
(redirectUri.indexOf("?") match {
case i: Int if(i >= 0) => redirectUri + "&"
case i: Int => redirectUri + "?"
})
}
val code = exchangeRequestIdForCode(requestId)
val token = UserTokenResponse(3600, code, username)
addAccessCode(code, TokenWrapper(new Date, token))
LOGGER.debug("redirecting to " + redirectTo + "code=" + code)
response.sendRedirect(redirectTo + "code=" + code)
}
else {
LOGGER.debug("token for requestId " + requestId + " NOT found")
response.sendRedirect(redirectTo + "error=invalid_code")
}
}
else {
// should this actually exist?
LOGGER.debug("no request id, generating access token")
val accessToken = generateAccessToken()
val token = UserTokenResponse(3600, accessToken, username)
addAccessCode(accessToken, TokenWrapper(new Date, token))
val redirectTo = {
(redirectUri.indexOf("#") match {
case i: Int if(i >= 0) => redirectUri + "&"
case i: Int => redirectUri + "#"
}) + "access_token=" + accessToken
}
response.sendRedirect(redirectTo)
}
}
else response.getOutputStream.write("bad redirect_uri".getBytes("utf-8"))
}
else {
LOGGER.debug("invalid credentials")
val redirectTo = {
(redirectUri.indexOf("#") match {
case i: Int if(i >= 0) => redirectUri + "&"
case i: Int => redirectUri + "#"
}) + "error=invalid_credentials"
}
response.sendRedirect(redirectTo)
}
}
def authorizationCodeStatus(accessCode: String) = {
LOGGER.debug("checking code status for " + accessCode)
if(hasAccessCode(accessCode)) {
val token = getTokenForAccessCode(accessCode)
if(token.getRemaining > 0)
ApiResponseMessage(200, "%d seconds remaining".format(token.getRemaining))
else
ApiResponseMessage(400, "invalid token")
}
else
ApiResponseMessage(400, "invalid token")
}
def token(request: HttpServletRequest, response: HttpServletResponse): TokenResponse = {
try {
val oauthRequest = new OAuthTokenRequest(request)
val code = oauthRequest.getParam(OAuth.OAUTH_CODE)
val grantType = oauthRequest.getParam(OAuth.OAUTH_GRANT_TYPE)
val clientId = oauthRequest.getParam(OAuth.OAUTH_CLIENT_ID)
val clientSecret = oauthRequest.getParam(OAuth.OAUTH_CLIENT_SECRET)
LOGGER.debug("get token for '" + clientId + "'' with client secret '" + clientSecret + "'")
if(validator.isValidClient(clientId, clientSecret)) {
if("authorization_code" == grantType) {
LOGGER.debug("grant type is " + grantType)
val validCode = hasAccessCode(code)
if(validCode) {
val username = getTokenForAccessCode(code).tokenResponse match {
case t: UserTokenResponse => t.username
case _ => null
}
removeAccessCode(code)
val accessToken = generateAccessToken()
val token = UserTokenResponse(3600, accessToken, username)
addAccessCode(accessToken, TokenWrapper(new Date, token))
token
}
else if(allowAnonymousTokens()) {
val accessToken = generateAccessToken()
val token = AnonymousTokenResponse(3600, accessToken)
addAccessCode(accessToken, TokenWrapper(new Date, token))
token
}
else
throw new Exception("invalid code supplied")
}
else {
LOGGER.debug("unsupported grant type " + grantType)
throw new Exception("unsupported grant type")
}
}
else {
LOGGER.debug("invalid client id " + clientId)
throw new Exception("invalid client id")
}
}
catch {
case e: Exception => {
throw new Exception(e.getMessage)
}
}
}
def authorize(request: HttpServletRequest,response: HttpServletResponse): ApiResponseMessage = {
import scala.collection.JavaConverters._
var oauthRequest: OAuthAuthzRequest = null;
try {
oauthRequest = new OAuthAuthzRequest(request)
//build response according to response_type
val responseType = oauthRequest.getParam(OAuth.OAUTH_RESPONSE_TYPE)
val builder = OAuthASResponse.authorizationResponse(request, HttpServletResponse.SC_FOUND)
if (responseType.equals(ResponseType.CODE.toString())) {
val requestMap = Map(
OAuth.OAUTH_STATE -> Option(oauthRequest.getParam(OAuth.OAUTH_STATE)),
OAuth.OAUTH_REDIRECT_URI -> Option(oauthRequest.getParam(OAuth.OAUTH_REDIRECT_URI)),
OAuth.OAUTH_CLIENT_ID -> Option(oauthRequest.getParam(OAuth.OAUTH_CLIENT_ID)),
OAuth.OAUTH_SCOPE -> Option(oauthRequest.getParam(OAuth.OAUTH_SCOPE)))
val requestId = generateRequestId(oauthRequest.getParam(OAuth.OAUTH_CLIENT_ID))
addRequestId(requestId, requestMap)
val dialogClass = Option(request.getSession.getServletContext.getInitParameter("DialogImplementation")).getOrElse({
LOGGER.warn("using default dialog implementation")
"com.wordnik.swagger.auth.service.DefaultAuthDialog"
})
val dialog = SwaggerContext.loadClass(dialogClass).newInstance.asInstanceOf[AuthDialog]
// write the dialog UI
dialog.show(oauthRequest.getParam(OAuth.OAUTH_CLIENT_ID),
oauthRequest.getParam(OAuth.OAUTH_REDIRECT_URI),
oauthRequest.getParam(OAuth.OAUTH_SCOPE),
ResponseType.CODE.toString(),
Option(requestId))
}
else {
val redirectURI = oauthRequest.getParam(OAuth.OAUTH_REDIRECT_URI)
val response = builder.location(redirectURI).buildQueryMessage()
val url = new URI(response.getLocationUri())
ApiResponseMessage(200, url.toString)
}
} catch {
case e: OAuthProblemException => {
e.printStackTrace();
val responseBuilder = Response.status(HttpServletResponse.SC_FOUND)
val redirectUri = e.getRedirectUri();
if (OAuthUtils.isEmpty(redirectUri)) {
ApiResponseMessage(400, "OAuth callback url needs to be provided by client")
}
else ApiResponseMessage(400, e.getMessage)
}
}
}
}
|
streamich/cloudform | types/kms/index.js | <reponame>streamich/cloudform
"use strict";
/* Generated from https://d3teyb21fexa9r.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json, version 2.0.0 */
Object.defineProperty(exports, "__esModule", { value: true });
const key_1 = require("./key");
const alias_1 = require("./alias");
exports.default = {
Key: key_1.default,
Alias: alias_1.default
};
|
LtSurgekopf/openEHR_SDK | client/src/test/java/org/ehrbase/client/classgenerator/examples/stationarerversorgungsfallcomposition/definition/AufnahmedatenAdminEntryContainment.java | <reponame>LtSurgekopf/openEHR_SDK
package org.ehrbase.client.classgenerator.examples.stationarerversorgungsfallcomposition.definition;
import com.nedap.archie.rm.archetyped.FeederAudit;
import com.nedap.archie.rm.datastructures.Cluster;
import com.nedap.archie.rm.generic.PartyProxy;
import java.lang.String;
import java.time.temporal.TemporalAccessor;
import org.ehrbase.client.aql.containment.Containment;
import org.ehrbase.client.aql.field.AqlFieldImp;
import org.ehrbase.client.aql.field.ListAqlFieldImp;
import org.ehrbase.client.aql.field.ListSelectAqlField;
import org.ehrbase.client.aql.field.SelectAqlField;
import org.ehrbase.client.classgenerator.shareddefinition.Language;
public class AufnahmedatenAdminEntryContainment extends Containment {
public SelectAqlField<AufnahmedatenAdminEntry> AUFNAHMEDATEN_ADMIN_ENTRY = new AqlFieldImp<AufnahmedatenAdminEntry>(AufnahmedatenAdminEntry.class, "", "AufnahmedatenAdminEntry", AufnahmedatenAdminEntry.class, this);
public SelectAqlField<String> VERSORGUNGSFALLGRUND_VALUE = new AqlFieldImp<String>(AufnahmedatenAdminEntry.class, "/data[at0001]/items[at0013]/value|value", "versorgungsfallgrundValue", String.class, this);
public SelectAqlField<String> ART_DER_AUFNAHME_VALUE = new AqlFieldImp<String>(AufnahmedatenAdminEntry.class, "/data[at0001]/items[at0049]/value|value", "artDerAufnahmeValue", String.class, this);
public SelectAqlField<TemporalAccessor> DATUM_UHRZEIT_DER_AUFNAHME_VALUE = new AqlFieldImp<TemporalAccessor>(AufnahmedatenAdminEntry.class, "/data[at0001]/items[at0071]/value|value", "datumUhrzeitDerAufnahmeValue", TemporalAccessor.class, this);
public ListSelectAqlField<Cluster> ZUGEWIESENER_PATIENTENSTANDORT = new ListAqlFieldImp<Cluster>(AufnahmedatenAdminEntry.class, "/data[at0001]/items[at0131]", "zugewiesenerPatientenstandort", Cluster.class, this);
public ListSelectAqlField<Cluster> VORHERIGER_PATIENTENSTANDORT = new ListAqlFieldImp<Cluster>(AufnahmedatenAdminEntry.class, "/data[at0001]/items[at0132]", "vorherigerPatientenstandort", Cluster.class, this);
public SelectAqlField<PartyProxy> SUBJECT = new AqlFieldImp<PartyProxy>(AufnahmedatenAdminEntry.class, "/subject", "subject", PartyProxy.class, this);
public SelectAqlField<Language> LANGUAGE = new AqlFieldImp<Language>(AufnahmedatenAdminEntry.class, "/language", "language", Language.class, this);
public SelectAqlField<FeederAudit> FEEDER_AUDIT = new AqlFieldImp<FeederAudit>(AufnahmedatenAdminEntry.class, "/feeder_audit", "feederAudit", FeederAudit.class, this);
private AufnahmedatenAdminEntryContainment() {
super("openEHR-EHR-ADMIN_ENTRY.admission.v0");
}
public static AufnahmedatenAdminEntryContainment getInstance() {
return new AufnahmedatenAdminEntryContainment();
}
}
|
King0987654/windows2000 | private/security/msv_sspi/encrypt.cxx | <gh_stars>10-100
/*++
Copyright (c) 1994 Microsoft Corporation
Module Name:
encrypt.cxx
Abstract:
Contains routine to check whether encryption is supported on this
system or not.
Author:
<NAME> (MikeSw) 2-Aug-1994
Revision History:
ChandanS 03-Aug-1996 Stolen from net\svcdlls\ntlmssp\common\encrypt.c
--*/
#include <windows.h>
#include <stdlib.h>
#include <string.h>
#include <rpc.h>
extern "C"
BOOLEAN
IsEncryptionPermitted(VOID)
/*++
Routine Description:
This routine checks whether encryption is getting the system default
LCID and checking whether the country code is CTRY_FRANCE.
Arguments:
none
Return Value:
TRUE - encryption is permitted
FALSE - encryption is not permitted
--*/
{
//
// sfield: permission to remove FRANCE check obtained 08-21-1999
//
#if 0
LCID DefaultLcid;
WCHAR CountryCode[10];
ULONG CountryValue;
DefaultLcid = GetSystemDefaultLCID();
//
// Check if the default language is Standard French
//
if (LANGIDFROMLCID(DefaultLcid) == 0x40c) {
return(FALSE);
}
//
// Check if the users's country is set to FRANCE
//
if (GetLocaleInfo(DefaultLcid,LOCALE_ICOUNTRY,CountryCode,10) == 0) {
return(FALSE);
}
CountryValue = (ULONG) wcstol(CountryCode,NULL,10);
if (CountryValue == CTRY_FRANCE) {
return(FALSE);
}
#endif
return(TRUE);
}
|
idnesdotlink/giftbox-app | src-compro/ionic/js/controllers/webview-detail-controller.js | <gh_stars>0
app.controller("WebviewDetailCtrl", function ($scope,
$http,
$rootScope,
httpService,
$stateParams,
$ionicLoading,
$ionicPopup,
$ionicPlatform,
$cordovaClipboard,
$cordovaToast,
$timeout,
$cordovaInAppBrowser) {
// loading spinner in the beginning
$scope.isLoading = true;
$scope.isTimeout = false;
$scope.isDownloaded = false;
$scope.progressval = 0;
var loadCustomText = function() {
$scope.button_text_open = getMenuText(ui_texts_webviews.button_text_open,"Open");
};
loadCustomText();
$ionicPlatform.ready(function () {
// check user login
if (user_id === '') {
$scope.isLogin = false;
} else {
$scope.isLogin = true;
}
$scope.input = {
comment: ''
};
var url = token_url;
var obj = serializeData({email: username, password: password, company_id: company_id});
httpService.post_token($scope, $http, url, obj);
});
// if get token success, request for book detail
$scope.$on('httpService:postTokenSuccess', function () {
token = $scope.data.token;
//console.log(token);
var url = post_content_url + $stateParams.id;
httpService.get($scope, $http, url, 'content', token);
console.log('WebviewDetailCtrl');
});
var term_content_type_id;
$rootScope.$on('ReloadDefaultLanguage',reloadTermStaticPageLanguage);
// if get detail book success, set detail book
$scope.$on('httpService:getRequestSuccess', function () {
var viewUrl = getPostMetaValueById($scope.data.post.post_meta, 'url').value;
$scope.content_data = {
title: $scope.data.post.title,
viewUrl: viewUrl,
post_meta: $scope.data.post.post_meta
};
term_content_type_id = $scope.data.post.term_content_type_id;
$scope = reloadTermStaticPageLanguage($scope,term_content_type_id);
$scope.isLoading = false;
$scope.viewUrl = viewUrl;
$scope.openBrowser($scope.viewUrl, $scope.content_data.title);
});
// if get token failed, request token again
$scope.$on('httpService:postTokenError', function () {
if($scope.status === 0)
{
if(isPhoneGap())
{
loadPostJSONFromDB($stateParams.id, $scope);
}
}
else
{
var url = token_url;
var obj = serializeData({email: username, password: password, company_id: company_id});
httpService.post_token($scope, $http, url, obj, 'content');
}
});
//if get data failed, request token again
$scope.$on('httpService:getRequestError', function () {
var url = token_url;
var obj = serializeData({email: username, password: password, company_id: company_id});
$scope.isTimeout = true;
// httpService.post_token($scope, $http, url, obj, 'content');
});
$scope.openBrowser = function(url){
if (!isPhoneGap()){
window.open(url,'_blank','location=yes');
}
else {
screen.unlockOrientation();
cordova.ThemeableBrowser.open(url, '_blank', {
statusbar: {
color: '#ffffffff'
},
toolbar: {
height: 44,
color: '#ffffffff'
},
title: {
color: '#212121ff',
showPageTitle: true
},
backButton: {
wwwImage: 'images/drawable-xhdpi/back.png',
wwwImagePressed: 'images/drawable-xhdpi/back_pressed.png',
wwwImageDensity: 2,
align: 'left',
event: 'backPressed'
},
forwardButton: {
wwwImage: 'images/drawable-xhdpi/forward.png',
wwwImagePressed: 'images/drawable-xhdpi/forward_pressed.png',
wwwImageDensity: 2,
align: 'left',
event: 'forwardPressed'
},
closeButton: {
wwwImage: 'images/drawable-xhdpi/close.png',
wwwImagePressed: 'images/drawable-xhdpi/close_pressed.png',
wwwImageDensity: 2,
align: 'right',
event: 'closePressed'
},
backButtonCanClose: true
}).addEventListener('closePressed', function(e) {
screen.lockOrientation('portrait-primary');
});
}
};
$scope.show = function () {
$ionicLoading.show({
template: ionicLoadingTemplate
});
};
$scope.hide = function () {
$ionicLoading.hide();
};
$scope.socialShare = function () {
if (isPhoneGap()) {
if (isAndroid()) {
socialShare($cordovaClipboard, $cordovaToast, $timeout, $scope.content_data.title, $scope.content_data.title, null, playstore_link, 'Read more at');
}
else if (isIOS()) {
socialShare($cordovaClipboard, $cordovaToast, $timeout, $scope.content_data.title, $scope.content_data.title, null, appstore_link, 'Read more at');
}
} else {
console.log('Social Share: Not a Mobile Device');
console.log($scope.content_data.title);
console.log(playstore_link);
console.log(appstore_link);
}
};
$scope.isPhoneGap = function()
{
return isPhoneGap();
};
$scope.$on('SQLite:getOfflineDataSuccess', function () {
//console.log($scope.data);
$scope.isLoading = false;
});
$scope.retryLoadContent = function(){
var url = token_url;
var obj = serializeData({email: username, password: password, company_id: company_id});
$scope.isTimeout = false;
httpService.post_token($scope, $http, url, obj, 'content');
};
});
|
cdhaichuang/haichuangframework | hc-base/src/main/java/pro/haichuang/framework/base/util/common/FileUriUtils.java | <gh_stars>1-10
package pro.haichuang.framework.base.util.common;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.lang.NonNull;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.web.multipart.MultipartFile;
import java.io.File;
import java.util.Arrays;
/**
* 文件资源路径工具类
*
* @author JiYinchuan
* @since 1.1.0.211021
*/
public class FileUriUtils {
/**
* 拼接文件名
*
* @param file 文件
* @param newFileName 新文件名
* @param fileType 文件类型
* @param uploadPath 上传路径
* @return 拼接后的文件名 [上传路径 + 文件类型 + 新文件名]
* @since 1.1.0.211021
*/
@NonNull
public static String concatFilename(@NonNull MultipartFile file, @Nullable String newFileName,
@NonNull String fileType, @NonNull String... uploadPath) {
Assert.notEmpty(uploadPath, "上传路径不能为空");
String fileOriginalExtensionName = FilenameUtils.getExtension(file.getOriginalFilename());
String fileNewName;
if (newFileName != null && !newFileName.isEmpty()) {
fileNewName = newFileName;
} else {
fileNewName = fileOriginalExtensionName != null && !fileOriginalExtensionName.isEmpty()
? UUIDUtils.random().concat(FilenameUtils.EXTENSION_SEPARATOR_STR).concat(fileOriginalExtensionName)
: UUIDUtils.random();
}
String relativeUploadPath = FilenameUtils.concat(String.join("/", Arrays.asList(uploadPath)), fileType);
return formatFilename(FilenameUtils.concat(relativeUploadPath, fileNewName),
true);
}
/**
* 拼接文件名
*
* @param file 文件
* @param newFileName 新文件名
* @param fileType 文件类型
* @param uploadPath 上传路径
* @return 拼接后的文件名 [上传路径 + 文件类型 + 新文件名]
* @since 1.1.0.211021
*/
@NonNull
public static String concatFilename(@NonNull File file, @Nullable String newFileName,
@NonNull String fileType, @NonNull String... uploadPath) {
Assert.notEmpty(uploadPath, "上传路径不能为空");
String fileExtensionName = FilenameUtils.getExtension(file.getName());
String fileNewName;
if (newFileName != null && !newFileName.isEmpty()) {
fileNewName = newFileName;
} else {
fileNewName = !fileExtensionName.isEmpty()
? UUIDUtils.random().concat(FilenameUtils.EXTENSION_SEPARATOR_STR).concat(fileExtensionName)
: UUIDUtils.random();
}
String relativeUploadPath = FilenameUtils.concat(String.join("/", Arrays.asList(uploadPath)), fileType);
return formatFilename(FilenameUtils.concat(relativeUploadPath, fileNewName),
true);
}
/**
* 拼接文件名
*
* @param filePath 文件路径
* @param newFileName 新文件名
* @param fileType 文件类型
* @param uploadPath 上传路径
* @return 拼接后的文件名 [上传路径 + 文件类型 + 新文件名]
* @since 1.1.0.211021
*/
@NonNull
public static String concatFilename(@NonNull String filePath, @Nullable String newFileName,
@NonNull String fileType, @NonNull String... uploadPath) {
Assert.notEmpty(uploadPath, "上传路径不能为空");
String fileExtensionName = FilenameUtils.getExtension(filePath);
String fileNewName;
if (newFileName != null && !newFileName.isEmpty()) {
fileNewName = newFileName;
} else {
fileNewName = !fileExtensionName.isEmpty()
? UUIDUtils.random().concat(FilenameUtils.EXTENSION_SEPARATOR_STR).concat(fileExtensionName)
: UUIDUtils.random();
}
String relativeUploadPath = FilenameUtils.concat(String.join("/", Arrays.asList(uploadPath)), fileType);
return formatFilename(FilenameUtils.concat(relativeUploadPath, fileNewName),
true);
}
/**
* 格式化文件名
*
* @param filename 文件名
* @param isReplaceFirstSeparator 是否替换第一个分隔符
* @return 格式化后的文件名
* @since 1.1.0.211021
*/
public static String formatFilename(@NonNull String filename, boolean isReplaceFirstSeparator) {
filename = FilenameUtils.separatorsToUnix(filename);
return isReplaceFirstSeparator && (StringUtils.equals(String.valueOf(filename.charAt(0)), "/"))
? filename.substring(1) : filename;
}
}
|
TaHanh/reactjs_templae | src/components/RichEditor/components/index.js | export { default as EditorToolbar } from './EditorToolbar';
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.