repo_name
stringlengths 6
101
| path
stringlengths 4
300
| text
stringlengths 7
1.31M
|
|---|---|---|
rneogns/simpleio
|
Library/SimpleIO/FileUtil.h
|
#pragma once
class FileUtil
{
public:
static bool IsFile(string path);
static bool IsDirectory(string path);
static bool Exists(string path);
static uintmax_t FileSize(string path);
static bool CreateDirectory(string path);
static char GetSeparator();
};
|
OscarRodriguezPrieto/ProgQuery
|
ProgQuery/src/database/querys/cypherWrapper/cmu/wiggle/OBJ56.java
|
<gh_stars>1-10
package database.querys.cypherWrapper.cmu.wiggle;
import database.nodes.NodeTypes;
import database.querys.cypherWrapper.AbstractQuery;
import database.querys.cypherWrapper.AnonymousNode;
import database.querys.cypherWrapper.Any;
import database.querys.cypherWrapper.Cardinalidad;
import database.querys.cypherWrapper.Case;
import database.querys.cypherWrapper.Clause;
import database.querys.cypherWrapper.ClauseImpl;
import database.querys.cypherWrapper.EdgeImpl;
import database.querys.cypherWrapper.Extract;
import database.querys.cypherWrapper.Filter;
import database.querys.cypherWrapper.MatchClause;
import database.querys.cypherWrapper.MatchImpl;
import database.querys.cypherWrapper.Node;
import database.querys.cypherWrapper.NodeVar;
import database.querys.cypherWrapper.Path;
import database.querys.cypherWrapper.Reduce;
import database.querys.cypherWrapper.RelationshipImpl;
import database.querys.cypherWrapper.ReturnClause;
import database.querys.cypherWrapper.SimpleWithClause;
import database.querys.cypherWrapper.UnwindClause;
import database.querys.cypherWrapper.WhereClause;
import database.querys.services.InmutabilityServicesPQ;
import database.relations.RelationTypes;
import database.relations.RelationTypesWiggle;
import database.relations.TypeRelations;
import utils.dataTransferClasses.Pair;
public class OBJ56 extends AbstractQuery {
public OBJ56() {
super(false);
}
private static final String CREATE_ITS_TYPE_IS_RELS =
"MATCH (c)-[:DECLARES_FIELD]->(n),(m) WHERE EXISTS(n.actualType) AND n.actualType=m.fullyQualifiedName CREATE (n)-[:ITS_TYPE_IS{created:TRUE}]->(m) WITH DISTINCT 'l' as l "+
"MATCH (c)-[:DECLARES_FIELD]->(n) WHERE EXISTS(n.typeKind) AND n.typeKind='ARRAY' MERGE (n)-[:ITS_TYPE_IS{created:TRUE}]->(arrayType:ARRAY_TYPE{fullyQualifiedName:n.actualType}) ";
public static final String CREATE_ALL_SUBTYPE_RELS = "MATCH (n)-[:HAS_CLASS_EXTENDS]->()-[:PARAMETERIZEDTYPE_TYPE*0..]->(m),(t) WHERE SPLIT(t.fullyQualifiedName,'<')[0]=SPLIT(m.actualType,'<')[0] MERGE (n)-[r:IS_SUBTYPE_EXTENDS]->(t) ON CREATE SET r.created=TRUE ";
@Override
protected void initiate() {
Node typeDec = new NodeVar("typeDec");
clauses = new Clause[] {
new ClauseImpl(CREATE_ITS_TYPE_IS_RELS+" WITH DISTINCT 'l' as aux")
, new ClauseImpl(CREATE_ALL_SUBTYPE_RELS
+" WITH DISTINCT 'l' as aux"
),
new MatchClause(getPDGServices().getCompleteIdentification(new NodeVar("id"), "")),
new MatchClause(
getStatementServices().getMethodFromStatement(
new MatchImpl(
"(classMod)<-[:HAS_CLASS_MODIFIERS]-(enclosingType)-[:DECLARES_FIELD]->(dec),(id)<-[:RETURN_EXPR]-()"),
Node.nodeForWiggle("method", NodeTypes.METHOD_DEF)),
new MatchImpl(
"(enclosingMethodType)-[:DECLARES_METHOD]->(method)-[:HAS_METHODDECL_MODIFIERS]->(methodMod),(dec)-[:HAS_VARIABLEDECL_MODIFIERS]->(fieldMod) "
+ ",(dec)-[:ITS_TYPE_IS]->()<-[:IS_SUBTYPE_EXTENDS|IS_SUBTYPE_IMPLEMENTS*0..]-(typeDec)"
)),
new WhereClause(
"id IN ids AND NOT fieldMod.flags CONTAINS 'public' AND methodMod.flags CONTAINS 'public' AND classMod.flags CONTAINS 'public' AND (typeDec:ARRAY_TYPE OR typeDec.nodeType<>'ClassType') "),
new SimpleWithClause("typeDec,enclosingType,dec as field,enclosingMethodType.fullyQualifiedName+':'+ method.name +'(line '+method.lineNumber+')' as method,identss"),
// BIEN
new MatchClause(true, InmutabilityServicesPQ.getTypesSuperTypesAndFieldsTransitive(typeDec)),
new SimpleWithClause(
"DISTINCT typeDec,enclosingType, field,identss, method,p, CASE WHEN p IS NULL THEN [typeDec] else NODES(p) END as nodes"),
new UnwindClause("nodes", "nodeInP"),
new SimpleWithClause("enclosingType, field, method,identss, typeDec, nodeInP,nodes"),
// TESTING
new MatchClause(true,
new Path(new NodeVar("nodeInP"),
Pair.create(new EdgeImpl(Cardinalidad.MIN_TO_INF(0), TypeRelations.IS_SUBTYPE_EXTENDS,
TypeRelations.IS_SUBTYPE_IMPLEMENTS), new AnonymousNode()),
Pair.createP("f", RelationTypes.DECLARES_FIELD)),
new RelationshipImpl(new NodeVar("mod"), new NodeVar("id"),
new EdgeImpl(Cardinalidad.ONE_TO_INF, RelationTypesWiggle.MEMBER_SELECT_EXPR,
RelationTypesWiggle.ARRAYACCESS_EXPR, RelationTypesWiggle.ASSIGNMENT_LHS)),
getExpressionServices().getMethodFromExp(new NodeVar("mod"),
Node.nodeForWiggle("setMethod", NodeTypes.METHOD_DEF)),
new MatchImpl(
"(nodeInP)-[:DECLARES_METHOD]->(setMethod)-[:HAS_METHODDECL_MODIFIERS]->(setMod)")),
new WhereClause("setMod.flags CONTAINS 'public' AND NOT setMod.flags CONTAINS 'static' AND [id,f] IN identss AND setMethod.name<>'<init>'"),
new SimpleWithClause(
"enclosingType, field, method, typeDec,identss, nodeInP,nodes, CASE WHEN setMethod IS NULL THEN [] ELSE COLLECT(DISTINCT setMethod) END as setters"),
// new ReturnClause(
// "enclosingType.fullyQualifiedName, field.name, method.name, typeDec.fullyQualifiedName, nodeInP,EXTRACT(n IN nodes |CASE WHEN EXISTS(n.fullyQualifiedName) THEN n.fullyQualifiedName ELSE n.name END), CASE WHEN setMethod IS NULL THEN [] ELSE COLLECT( DISTINCT setMethod) END as setters"),
new MatchClause(true,
new MatchImpl(
"(anyMethod)<-[:DECLARES_METHOD]-(nodeInP)-[:IS_SUBTYPE_EXTENDS | :IS_SUBTYPE_IMPLEMENTS*]->()-[:DECLARES_METHOD]->(ovMethod{name:anyMethod.name})")
),
// new WhereClause("ovMethod.") ,
// new ReturnClause("enclosingType, field, method, typeDec, nodeInP,nodes, setters, anyMethod, ovMethod")
new MatchClause(true,
new MatchImpl(
"(anyMethod)-[:HAS_METHODDECL_PARAMETERS]->(p),(ovMethod)-[:HAS_METHODDECL_PARAMETERS]->(baseP)")
),
new SimpleWithClause(
"enclosingType, field, method,anyMethod, typeDec"
+ ",identss"
+ ",ovMethod, nodeInP,nodes, setters, COLLECT(p.actualType) as params, COLLECT(baseP.actualType) as baseParams "),
new SimpleWithClause(
"enclosingType, field, method,anyMethod, typeDec"
+ ",identss"
+ ",ovMethod, nodeInP,nodes, setters, EXTRACT(p IN params |[p, SIZE(FILTER(x IN params WHERE x=p))]) as params, "
+ "EXTRACT(p IN baseParams |[p, SIZE(FILTER(x IN baseParams WHERE x=p))]) as baseParams"),
new SimpleWithClause(" enclosingType, field, method, typeDec"
+ ",identss"
+ ", nodeInP,nodes, setters"
+ ",COLLECT( DISTINCT CASE WHEN "
+ "SIZE(params)=SIZE(baseParams) AND ovMethod.name<>'<init>'"
+ " AND ALL(pPair IN baseParams WHERE pPair IN params)"
+ " THEN ovMethod ELSE NULL END) as methodsOverridenByThisType"
),
//new ReturnClause("distinct method")
new MatchClause(true,
new MatchImpl(
"(nodeInP)-[:IS_SUBTYPE_EXTENDS | :IS_SUBTYPE_IMPLEMENTS*0..]->()-[:DECLARES_METHOD]->(getMethod)-[:HAS_METHODDECL_MODIFIERS]->(methodMod)"),
new Path(new NodeVar("nodeInP"), Pair.createP("f", RelationTypes.DECLARES_FIELD)),
getStatementServices().getMethodFromStatement(
new MatchImpl("(id)<-[:RETURN_EXP]-()"),new NodeVar("getMethod")
)),
new WhereClause(
"[id,f] IN identss AND methodMod.flags CONTAINS 'public' AND NOT methodMod.flags CONTAINS 'static'"),
// new ReturnClause("enclosingType, field, method, typeDec, nodeInP,nodes, setters,methodsOverridenByThisType, COLLECT(DISTINCT getMethod) as getters")
new MatchClause(true,
new RelationshipImpl(new NodeVar("f"), new NodeVar("fType"),
new EdgeImpl(TypeRelations.ITS_TYPE_IS))),
//WHERE
// Pair.create("accessLevel", "public"), Pair.create("isStatic", false)
new SimpleWithClause(
"enclosingType, field, method, typeDec, nodeInP,nodes, setters,methodsOverridenByThisType, COLLECT(DISTINCT getMethod) as getters, COLLECT(DISTINCT fType) as getterTypes"),
new MatchClause(true,
new Path(new NodeVar("nodeInP"),
Pair.createP(new NodeVar("f"), RelationTypes.DECLARES_FIELD),
Pair.create(new EdgeImpl(TypeRelations.ITS_TYPE_IS), new NodeVar("fType")))
, new MatchImpl("(f)-[:HAS_VARIABLEDECL_MODIFIERS]->(fMods)")
),
new WhereClause("fMods.flags CONTAINS 'public' AND NOT fMods.flags CONTAINS 'static'"),
//WHERE , Pair.create("isStatic", false),Pair.create("accessLevel", "public")
new SimpleWithClause(
"enclosingType, field, method, typeDec,nodeInP,nodes,methodsOverridenByThisType, setters, getters,getterTypes, COLLECT(DISTINCT [f,fType,fMods.flags CONTAINS 'final']) as otherMutableDependencies "),
new SimpleWithClause(
"enclosingType, field, method, typeDec, nodeInP, nodes, setters,methodsOverridenByThisType, getters,getterTypes, "
+ new Extract(new Filter("otherMutableDependencies", "x[2]"), "y[1]", "y")
.expToString()
+ " as mutableDependenciesBis, "
+ new Any("otherMutableDependencies", "NOT x[2]").expToString()
+ " as isMutableDueToPublicField "),
// new ReturnClause("enclosingType, field, method, typeDec,
// nodeInP, p, setters")
// new WhereClause("SIZE(NODES(p))=12"),
new SimpleWithClause("DISTINCT enclosingType, field, method, typeDec, nodes"
//
+ ", COLLECT([ID(nodeInP),[isMutableDueToPublicField,mutableDependenciesBis,methodsOverridenByThisType,setters, getters,getterTypes ]]) as map "
//
),
new SimpleWithClause("enclosingType, field, method,typeDec,nodes as prevNodes"
+ ","
+ new Extract("nodes", "x+HEAD(" + new Filter("map", "y[0]=ID(x)", "y").expToString() + ")[1]")
.expToString()
+ " as nodes"
), new SimpleWithClause("DISTINCT enclosingType, field, method, typeDec,prevNodes, " + new Extract("nodes", "[x[0],CASE WHEN x[1] IS NULL THEN FALSE ELSE x[1] END, CASE WHEN x[2] IS NULL THEN [] ELSE x[2] END, CASE WHEN x[3] IS NULL THEN [] ELSE x[3] END, CASE WHEN x[4] IS NULL THEN [] ELSE x[4] END,CASE WHEN x[5] IS NULL THEN [] ELSE x[5] END, CASE WHEN x[6] IS NULL THEN [] ELSE x[6] END]").expToString() + " as nodes"
//
), new SimpleWithClause("enclosingType, field, method, typeDec,prevNodes, CASE WHEN nodes IS NULL THEN [] ELSE nodes END as nodes "),
new SimpleWithClause(
"enclosingType, field, method, typeDec,prevNodes, nodes, RANGE(0,SIZE(nodes)-1,1) as indexes"),
new SimpleWithClause(
"enclosingType, field, method, typeDec"
+ ",prevNodes"
+ ", nodes, indexes, EXTRACT(index IN indexes | CASE WHEN SIZE(FILTER(i IN indexes WHERE i<index AND NOT nodes[i][0].nodeType='JCClassDecl'))=0 THEN 0 ELSE LAST(FILTER(i IN indexes WHERE i<index AND nodes[i][0].nodeType='JCClassDecl')) END) as lastAttrIndexes"
+ " "),
new SimpleWithClause("enclosingType, field, method, typeDec,prevNodes, nodes, " + new Extract("indexes",
new Reduce(new Filter("indexes", "i<index AND i>=lastAttrIndexes[index]", "i").expToString(),
"s+nodes[x][3]", "s=[]").expToString()
, "index").expToString() + " as overrides, indexes, lastAttrIndexes"),
new SimpleWithClause(" enclosingType, field, method, typeDec,prevNodes, " + new Extract("indexes",
"[nodes[x][0], nodes[x][1] OR ANY(setter IN nodes[x][4] WHERE NOT setter IN overrides[x]) "
+ ", nodes[x][2]+ EXTRACT( getterIndex IN FILTER(gf IN RANGE(0,SIZE(nodes[x][5])-1,1) "
+ "WHERE NOT nodes[x][5][gf] IN overrides[x]) | nodes[x][6][getterIndex])] ")
.expToString()
+ " as nodes, indexes "),
// new SimpleWithClause("enclosingType, field, method,
// typeDec,p, " + new Extract("indexes",
// "[nodes[x][0], nodes[x][1]," + new Case("x>0 AND NOT
// nodes[x+1] IS NULL",
// "CASE WHEN nodes[x][0]:ATTR_DEC THEN nodes[x+1][0] IN
// nodes[x-1][2] ELSE TRUE END",
// "TRUE").expToString() + "] ").expToString()
// + " as nodes"),
new SimpleWithClause(
"enclosingType, field, method, typeDec,prevNodes,indexes, " + new Extract("indexes",
"[nodes[x][0], nodes[x][1],"
+ new Case("x>1 AND NOT nodes[x-1][0].nodeType='JCClassDecl'",
" ANY(dep IN nodes[x-2][2] WHERE dep =nodes[x][0])", "TRUE").expToString()
+ "] ").expToString()
+ " as nodes"),
new SimpleWithClause(" enclosingType, field, method, typeDec, " + new Extract("indexes",
"[nodes[x][0], nodes[x][1]," + "NOT ANY(i IN indexes WHERE i<=x AND NOT nodes[i][2]) ]")
.expToString()
+ " as nodes "),
// new SimpleWithClause(
// "enclosingType, field, method,typeDec,p, CASE WHEN
// SIZE(nodes)=0 THEN []+typeDec ELSE nodes END as nodes "),
// new WhereClause("SIZE(nodes)=0"),
new SimpleWithClause("DISTINCT enclosingType, field, method, typeDec," + new Reduce("nodes",
// MISSING ADD ARRAY_TYPE AS ITS_TYPE_IS array AT THE
// VERY BEGINNING
"s AND CASE WHEN NOT x[0].nodeType='JCClassDecl' THEN TRUE ELSE NOT x[2] OR (NOT x[0]:ARRAY_TYPE AND NOT x[1]) END ",
"s=true").expToString() + " AND NOT typeDec:ARRAY_TYPE as res"
), new SimpleWithClause("enclosingType, field,method, typeDec,ANY (x IN COLLECT(res) WHERE NOT x) as isMutable"),
new SimpleWithClause("DISTINCT enclosingType, field,COLLECT(method) as methods, typeDec,isMutable"),
new WhereClause("isMutable"),
// FILTRAR
// ,
new SimpleWithClause("COLLECT([enclosingType, field, methods, typeDec]) as res"),
new UnwindClause("res", "typeRes"),
new SimpleWithClause(
"typeRes[1] as field, typeRes[3] as typeDec, typeRes[0] as enclosingType,typeRes[2] as methods, res"),
new MatchClause(true,
new MatchImpl("(typeDec)<-[:IS_SUBTYPE_EXTENDS | :IS_SUBTYPE_IMPLEMENTS*]-(subType)")),
new SimpleWithClause("typeDec,methods,enclosingType,field, EXTRACT(x IN res | x[3]) as mutableTypes, COLLECT(subType) as subtypes"),
new SimpleWithClause(
" typeDec, methods,enclosingType,field, FILTER(y IN subtypes WHERE NOT y IN mutableTypes) as nonMutableSubTypes")
,new SimpleWithClause( " 'Warning[OBJ-56] Field ' +field.name+' declared in line ' +field.lineNumber+' in class '+enclosingType.fullyQualifiedName+' is not public, but it is exposed in public methods such as '+ methods+'. The problem is that the type '+typeDec.fullyQualifiedName+' can be mutated by a malicious client.'+ CASE WHEN SIZE(nonMutableSubTypes)=0 THEN ' You should use an appropiate inmutable subtype as a wrapper for your attribute.'ELSE 'Remember to use an appropiate inmutable subtype (such as '+EXTRACT(x IN nonMutableSubTypes | x.fullyQualifiedName)+') as a wrapper for your attribute.'END as warning ")
, new SimpleWithClause("DISTINCT COLLECT(warning) as warningList ")
,
new ClauseImpl(
"MATCH ()-[r]->() WHERE r.created DELETE r WITH DISTINCT warningList MATCH (array:ARRAY_TYPE) DELETE array ")
, new ReturnClause("DISTINCT warningList")
// , new ReturnClause(" warningList")
};
}
public static void main(String[] args) {
System.out.println(new OBJ56().queryToString());
}
}
|
yanzhe919/rythmengine
|
src/main/java/org/rythmengine/internal/IEvent.java
|
/**
* Copyright (C) 2013-2016 The Rythm Engine project
* for LICENSE and other details see:
* https://github.com/rythmengine/rythmengine
*/
package org.rythmengine.internal;
/**
* Defines event to be used in rythm system
*/
public interface IEvent<RETURN, PARAM> {
RETURN trigger(IEventDispatcher eventBus, PARAM eventParams);
boolean isSafe();
}
|
egorodet/CML
|
cml/matrix/row_node.tpp
|
<filename>cml/matrix/row_node.tpp<gh_stars>100-1000
/* -*- C++ -*- ------------------------------------------------------------
@@COPYRIGHT@@
*-----------------------------------------------------------------------*/
/** @file
*/
#ifndef __CML_MATRIX_ROW_NODE_TPP
#error "matrix/row_node.tpp not included correctly"
#endif
namespace cml {
/* matrix_row_node 'structors: */
template<class Sub>
matrix_row_node<Sub,-1>::matrix_row_node(Sub sub, int row)
: m_sub(std::move(sub)), m_row(row)
{
cml_require(row >= 0, std::invalid_argument, "row < 0");
}
template<class Sub>
matrix_row_node<Sub,-1>::matrix_row_node(node_type&& other)
: m_sub(std::move(other.m_sub)), m_row(other.m_row)
{
}
#ifndef CML_HAS_RVALUE_REFERENCE_FROM_THIS
template<class Sub>
matrix_row_node<Sub,-1>::matrix_row_node(const node_type& other)
: m_sub(other.m_sub), m_row(other.m_row)
{
}
#endif
/* Internal methods: */
/* readable_vector interface: */
template<class Sub> int
matrix_row_node<Sub,-1>::i_size() const
{
return this->m_sub.cols();
}
template<class Sub> auto
matrix_row_node<Sub,-1>::i_get(int j) const -> immutable_value
{
return this->m_sub.get(this->m_row,j);
}
} // namespace cml
// -------------------------------------------------------------------------
// vim:ft=cpp:sw=2
|
chirag-savant/svea_lli_firmware
|
lib/ros_included/marker_msgs/Marker.h
|
#ifndef _ROS_marker_msgs_Marker_h
#define _ROS_marker_msgs_Marker_h
#include <stdint.h>
#include <string.h>
#include <stdlib.h>
#include "ros/msg.h"
#include "geometry_msgs/Pose.h"
namespace marker_msgs
{
class Marker : public ros::Msg
{
public:
uint32_t ids_length;
typedef int32_t _ids_type;
_ids_type st_ids;
_ids_type * ids;
uint32_t ids_confidence_length;
typedef float _ids_confidence_type;
_ids_confidence_type st_ids_confidence;
_ids_confidence_type * ids_confidence;
typedef geometry_msgs::Pose _pose_type;
_pose_type pose;
Marker():
ids_length(0), st_ids(), ids(nullptr),
ids_confidence_length(0), st_ids_confidence(), ids_confidence(nullptr),
pose()
{
}
virtual int serialize(unsigned char *outbuffer) const override
{
int offset = 0;
*(outbuffer + offset + 0) = (this->ids_length >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (this->ids_length >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (this->ids_length >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (this->ids_length >> (8 * 3)) & 0xFF;
offset += sizeof(this->ids_length);
for( uint32_t i = 0; i < ids_length; i++){
union {
int32_t real;
uint32_t base;
} u_idsi;
u_idsi.real = this->ids[i];
*(outbuffer + offset + 0) = (u_idsi.base >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (u_idsi.base >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (u_idsi.base >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (u_idsi.base >> (8 * 3)) & 0xFF;
offset += sizeof(this->ids[i]);
}
*(outbuffer + offset + 0) = (this->ids_confidence_length >> (8 * 0)) & 0xFF;
*(outbuffer + offset + 1) = (this->ids_confidence_length >> (8 * 1)) & 0xFF;
*(outbuffer + offset + 2) = (this->ids_confidence_length >> (8 * 2)) & 0xFF;
*(outbuffer + offset + 3) = (this->ids_confidence_length >> (8 * 3)) & 0xFF;
offset += sizeof(this->ids_confidence_length);
for( uint32_t i = 0; i < ids_confidence_length; i++){
offset += serializeAvrFloat64(outbuffer + offset, this->ids_confidence[i]);
}
offset += this->pose.serialize(outbuffer + offset);
return offset;
}
virtual int deserialize(unsigned char *inbuffer) override
{
int offset = 0;
uint32_t ids_lengthT = ((uint32_t) (*(inbuffer + offset)));
ids_lengthT |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
ids_lengthT |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
ids_lengthT |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
offset += sizeof(this->ids_length);
if(ids_lengthT > ids_length)
this->ids = (int32_t*)realloc(this->ids, ids_lengthT * sizeof(int32_t));
ids_length = ids_lengthT;
for( uint32_t i = 0; i < ids_length; i++){
union {
int32_t real;
uint32_t base;
} u_st_ids;
u_st_ids.base = 0;
u_st_ids.base |= ((uint32_t) (*(inbuffer + offset + 0))) << (8 * 0);
u_st_ids.base |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
u_st_ids.base |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
u_st_ids.base |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
this->st_ids = u_st_ids.real;
offset += sizeof(this->st_ids);
memcpy( &(this->ids[i]), &(this->st_ids), sizeof(int32_t));
}
uint32_t ids_confidence_lengthT = ((uint32_t) (*(inbuffer + offset)));
ids_confidence_lengthT |= ((uint32_t) (*(inbuffer + offset + 1))) << (8 * 1);
ids_confidence_lengthT |= ((uint32_t) (*(inbuffer + offset + 2))) << (8 * 2);
ids_confidence_lengthT |= ((uint32_t) (*(inbuffer + offset + 3))) << (8 * 3);
offset += sizeof(this->ids_confidence_length);
if(ids_confidence_lengthT > ids_confidence_length)
this->ids_confidence = (float*)realloc(this->ids_confidence, ids_confidence_lengthT * sizeof(float));
ids_confidence_length = ids_confidence_lengthT;
for( uint32_t i = 0; i < ids_confidence_length; i++){
offset += deserializeAvrFloat64(inbuffer + offset, &(this->st_ids_confidence));
memcpy( &(this->ids_confidence[i]), &(this->st_ids_confidence), sizeof(float));
}
offset += this->pose.deserialize(inbuffer + offset);
return offset;
}
virtual const char * getType() override { return "marker_msgs/Marker"; };
virtual const char * getMD5() override { return "30e386538c9eeff614c69452dbde6926"; };
};
}
#endif
|
miniworld-project/miniworld_core
|
miniworld/model/network/connections/NodeDictMixin.py
|
from collections import UserDict
from miniworld.model.network.connections.JSONEncoder import JSONStrMixin
# TODO: REMOVE
class NodeDictMixin:
"""
"""
#########################################
# Structure Converting
#########################################
def to_ids(self):
"""
Convert all :py:class:`.EmulationNode` to their id.
Returns
-------
UserDict
All instances of EmulationNode replaced by their id.
Examples
--------
>>> x = {[EmulationNode(1), EmulationNode(2)]: {'loss': 0.5, 'bandwidth': 500}}
>>> x.to_ids()
{('1', '1'): {'loss': 0.5, 'bandwidth': 500}}
"""
converted_to_ids = {(emu_node_x.id, emu_node_y.id): val_inner for (emu_node_x, emu_node_y), val_inner in self.items()}
return self.__class__(converted_to_ids)
class NodeDict(JSONStrMixin, UserDict):
pass
|
AlexRogalskiy/DevArtifacts
|
master/com.github.danielpacak.jenkins.ci-master/com.github.danielpacak.jenkins.ci-master/core/src/main/java/com/github/danielpacak/jenkins/ci/core/util/XmlResponse.java
|
<filename>master/com.github.danielpacak.jenkins.ci-master/com.github.danielpacak.jenkins.ci-master/core/src/main/java/com/github/danielpacak/jenkins/ci/core/util/XmlResponse.java
/*
* #%L
* Jenkins Java API
* %%
* Copyright (C) 2013 <NAME>
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.danielpacak.jenkins.ci.core.util;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Document;
import org.xml.sax.SAXException;
public class XmlResponse {
private Document document;
private XPath xPath;
public XmlResponse(String content) throws IOException {
this(new ByteArrayInputStream(content.getBytes()));
}
public XmlResponse(InputStream content) throws IOException {
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
XPathFactory xPathFactory = XPathFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
xPath = xPathFactory.newXPath();
document = builder.parse(content);
} catch (ParserConfigurationException e) {
throw new IllegalStateException(e);
} catch (SAXException e) {
throw new IllegalArgumentException(e);
}
}
/**
* Evaluate the XPath expression in the context of this XML response.
*
* @param expression expression to be evaluated
* @return the result of evaluating the expression
* @throws Exception
*/
public String evaluateAsString(String expression) {
Preconditions.checkArgumentNotNull(expression, "Expression cannot be null");
try {
XPathExpression expr = xPath.compile(expression);
String rawResult = (String) expr.evaluate(document, XPathConstants.STRING);
return (rawResult != null && !"".equals(rawResult)) ? rawResult : null;
} catch (XPathExpressionException e) {
throw new IllegalArgumentException(e);
}
}
public Boolean evaluateAsBoolean(String expression) {
String rawResult = evaluateAsString(expression);
return rawResult != null ? Boolean.valueOf(rawResult) : null;
}
public Integer evaluateAsInteger(String expression) {
String rawResult = evaluateAsString(expression);
return rawResult != null ? Integer.valueOf(rawResult) : null;
}
public Long evaluateAsLong(String expression) {
String rawResult = evaluateAsString(expression);
return rawResult != null ? Long.valueOf(rawResult) : null;
}
}
|
legacyai/tf-transformers
|
research/long_block_sequencer/evaluate.py
|
<filename>research/long_block_sequencer/evaluate.py
# coding=utf-8
# Copyright 2021 TF-Transformers Authors.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Evaluation script for t5"""
import os
import tempfile
import hydra
import pandas as pd
import tensorflow as tf
import tqdm
from dataset_loader import load_dataset_eval
from model import get_model_inference, get_tokenizer
from omegaconf import DictConfig
from rouge_score import rouge_scorer, scoring
from tf_transformers.text import TextDecoder
def predict_and_evaluate(decoder, eval_dataset, tokenizer, decoder_seq_len, eos_id, mode):
"""Predict and evaluate"""
predicted_summaries = []
original_summaries = []
for (batch_inputs, batch_labels) in tqdm.tqdm(eval_dataset):
del batch_inputs[
'decoder_input_ids'
] # We do not need to pass decoder_input_ids , as we provide while initiating
# TextDecoder
if mode == 'greedy':
decoder_outputs = decoder.decode(
batch_inputs, max_iterations=decoder_seq_len + 1, mode='greedy', eos_id=eos_id
)
else:
decoder_outputs = decoder.decode(
batch_inputs, max_iterations=decoder_seq_len + 1, mode='beam', num_beams=3, alpha=0.8, eos_id=eos_id
)
predicted_batch_summaries = tokenizer._tokenizer.detokenize(decoder_outputs['predicted_ids'][:, 0, :].numpy())
predicted_summaries.extend(predicted_batch_summaries.numpy().tolist())
original_batch_summaries = tokenizer._tokenizer.detokenize(batch_labels['labels'])
original_summaries.extend(original_batch_summaries.numpy().tolist())
predicted_summaries = [entry.decode() for entry in predicted_summaries]
original_summaries = [text.decode() for text in original_summaries]
df = pd.DataFrame()
df['original_summaries'] = original_summaries
df['predicted_summaries'] = predicted_summaries
df.to_csv("prediction_summaries.csv", index=False)
scorer = rouge_scorer.RougeScorer(["rouge1", "rouge2", "rougeLsum"], use_stemmer=True)
aggregator = scoring.BootstrapAggregator()
for i in range(len(original_summaries)):
score = scorer.score(original_summaries[i], predicted_summaries[i])
aggregator.add_scores(score)
result = {}
result['rouge2_f1score_mid'] = aggregator.aggregate()['rouge2'].mid.fmeasure
result['rouge1_f1score_mid'] = aggregator.aggregate()['rouge1'].mid.fmeasure
result['rougel_f1score_mid'] = aggregator.aggregate()['rougeLsum'].mid.fmeasure
return result
@hydra.main(config_path="conf", config_name="config")
def run(cfg: DictConfig) -> None:
print("Config", cfg)
model_name = cfg.model.model_name
num_splits = cfg.task.num_splits
use_gru_layer = cfg.task.use_gru_layer
projection_dimension = cfg.task.projection_dimension
max_seq_len = cfg.task.max_seq_len
decoder_seq_len = cfg.task.decoder_seq_len
eval_batch_size = cfg.eval.eval_batch_size
model_checkpoint_dir = cfg.eval.model_checkpoint_dir
model_checkpoint_path = cfg.eval.model_checkpoint_path
take_sample = cfg.eval.take_sample
mode = cfg.eval.mode
temp_dir = tempfile.TemporaryDirectory().name
if model_checkpoint_path and model_checkpoint_dir:
raise ValueError("Do not provide both `model_checkpoint_path` and `model_checkpoint_dir`.")
if max_seq_len % num_splits != 0:
raise ValueError("`num_splits` should be divisble by `max_seq_len`")
tokenizer_layer = get_tokenizer(model_name, max_seq_len)
# Get Inference Model
model_inference = get_model_inference(model_name, num_splits, use_gru_layer, projection_dimension)
eval_dataset, _ = load_dataset_eval(tokenizer_layer, max_seq_len, decoder_seq_len, eval_batch_size)
if take_sample:
eval_dataset = eval_dataset.take(20) # We take only 20 after batching for callbacks
if model_checkpoint_dir:
all_results = []
print("Model model_checkpoint_dir", model_checkpoint_dir)
number_of_checkpoints = int(
tf.train.latest_checkpoint(model_checkpoint_dir).split("/")[-1].replace("ckpt-", "")
)
number_of_checkpoints += 1
for checkpoint_number in range(1, number_of_checkpoints):
ckpt_path = os.path.join(model_checkpoint_dir, "ckpt-{}".format(checkpoint_number))
model_inference.load_checkpoint(checkpoint_path=ckpt_path)
# Save as serialized module
model_inference.save_transformers_serialized(temp_dir, overwrite=True)
model_pb = tf.saved_model.load(temp_dir)
decoder = TextDecoder(model_pb)
result = predict_and_evaluate(
decoder, eval_dataset, tokenizer_layer, decoder_seq_len, tokenizer_layer.eos_token_id, mode
)
all_results.append(result)
print("ckpt_path: ", ckpt_path)
print(result)
print()
print()
print("Final Result")
print(all_results)
elif model_checkpoint_path:
model_inference.load_checkpoint(checkpoint_path=model_checkpoint_path)
# Save as serialized module
model_inference.save_transformers_serialized(temp_dir, overwrite=True)
model_pb = tf.saved_model.load(temp_dir)
decoder = TextDecoder(model_pb)
result = predict_and_evaluate(
decoder, eval_dataset, tokenizer_layer, decoder_seq_len, tokenizer_layer.eos_token_id, mode
)
print("ckpt_path: ", model_checkpoint_path)
print(result)
if __name__ == "__main__":
run()
|
Subject38/jujube
|
src/Resources/SpriteSheet.cpp
|
<filename>src/Resources/SpriteSheet.cpp
#include "SpriteSheet.hpp"
#include <sstream>
#include <stdexcept>
namespace Resources {
void from_json(const nlohmann::json& j, SpriteSheet& s) {
s.tex_path = fs::path{j.at("sprite_sheet").get<std::string>()};
j.at("count").get_to(s.count);
j.at("columns").get_to(s.columns);
j.at("rows").get_to(s.rows);
}
void SpriteSheet::load_and_check(
const fs::path& folder,
std::size_t size,
std::size_t fps,
const Toolkit::DurationInFrames& max_duration
) {
// File Load & Check
if (not tex.loadFromFile(folder/tex_path)) {
throw std::runtime_error(
"Cannot open file "
+(folder/tex_path).string()
);
}
tex.setSmooth(true);
// Sprite sheet size check
// throw if the texture size does not match what's announced by the metadata
auto sheet_size = tex.getSize();
auto expected_size = sf::Vector2u(columns, rows) * static_cast<unsigned int>(size);
if (sheet_size != expected_size) {
std::stringstream ss;
ss << "Sprite sheet ";
ss << (folder/tex_path).string();
ss << " should be " << expected_size.x << "×" << expected_size.y << " pixels";
ss << " but is " << sheet_size.x << "×" << sheet_size.y;
throw std::invalid_argument(ss.str());
}
// Sprite count check
// throw if the count calls for more sprites than possible according to the 'columns' and 'rows' fields
if (count > columns * rows) {
std::stringstream ss;
ss << "Metadata for sprite sheet ";
ss << (folder/tex_path).string();
ss << " indicates that it holds " << count << " sprites";
ss << " when it can only hold a maximum of " << columns * rows;
ss << " according to the 'columns' and 'rows' fields";
throw std::invalid_argument(ss.str());
}
// Duration check
// We do not allow any marker animation to take longer than the jubeat standard of 16 frames at 30 fps
// For that we make sure that :
// frames/fps <= max_frames/reference_fps
// Which is mathematically equivalent to checking that :
// count*reference_fps <= max_frames*fps
// Which allows us to avoid having to cast to float
if (count*max_duration.fps > max_duration.frames*fps) {
std::stringstream ss;
ss << "Animation for sprite sheet ";
ss << (folder/tex_path).string();
ss << " lasts " << count/static_cast<float>(fps)*1000.f << "ms";
ss << " (" << count << "f @ " << fps << "fps)";
ss << " which is more than the maximum of ";
ss << max_duration.frames/static_cast<float>(max_duration.fps)*1000.f << "ms";
ss << " (16f @ 30fps)";
throw std::invalid_argument(ss.str());
}
}
std::optional<sf::Sprite> SpriteSheet::get_sprite(std::size_t frame, std::size_t size) const {
if (frame < count) {
sf::Sprite sprite{tex};
sf::IntRect rect{
sf::Vector2i{
static_cast<int>(frame % columns),
static_cast<int>(frame / columns)
} * static_cast<int>(size),
sf::Vector2i{
static_cast<int>(size),
static_cast<int>(size)
}
};
sprite.setTextureRect(rect);
return sprite;
}
return {};
}
}
|
manh-vv/BroadleafCommerce
|
common/src/main/java/org/broadleafcommerce/common/sitemap/domain/SiteMapGeneratorConfiguration.java
|
/*
* #%L
* BroadleafCommerce Common Libraries
* %%
* Copyright (C) 2009 - 2016 Broadleaf Commerce
* %%
* Licensed under the Broadleaf Fair Use License Agreement, Version 1.0
* (the "Fair Use License" located at http://license.broadleafcommerce.org/fair_use_license-1.0.txt)
* unless the restrictions on use therein are violated and require payment to Broadleaf in which case
* the Broadleaf End User License Agreement (EULA), Version 1.1
* (the "Commercial License" located at http://license.broadleafcommerce.org/commercial_license-1.1.txt)
* shall apply.
*
* Alternatively, the Commercial License may be replaced with a mutually agreed upon license (the "Custom License")
* between you and Broadleaf Commerce. You may not use this file except in compliance with the applicable license.
* #L%
*/
package org.broadleafcommerce.common.sitemap.domain;
import org.broadleafcommerce.common.sitemap.service.type.SiteMapChangeFreqType;
import org.broadleafcommerce.common.sitemap.service.type.SiteMapGeneratorType;
import org.broadleafcommerce.common.sitemap.service.type.SiteMapPriorityType;
import java.io.Serializable;
/**
* Sample URL tag generated and controlled by this configuration.
*
* <url>
* <loc>http://www.heatclinic.com/hot-sauces</loc>
* <lastmod>2009-11-07</lastmod>
* <changefreq>weekly</changefreq>
* <priority>0.5</priority>
* </url>
*
* @author bpolster
*/
public interface SiteMapGeneratorConfiguration extends Serializable {
/**
* Returns the SiteMapGeneratorConfiguration Id.
*
* @return
*/
public Long getId();
/**
* Sets the SiteMapGeneratorConfiguration Id.
*
* @param id
*/
public void setId(Long id);
/**
* Returns the "disabled" boolean.
*
* @return
*/
public Boolean isDisabled();
/**
* Sets the "disabled" boolean.
*
* @param disabled
*/
public void setDisabled(Boolean disabled);
/**
* Returns the list of SiteMapChangeFreqTypes.
*
* @return
*/
public SiteMapChangeFreqType getSiteMapChangeFreq();
/**
* Sets the list of SiteMapChangeFreqTypes.
*
* @param siteMapChangeFreq
*/
public void setSiteMapChangeFreq(SiteMapChangeFreqType siteMapChangeFreq);
/**
* Returns the SiteMapPriority.
*
* @return
*/
public SiteMapPriorityType getSiteMapPriority();
/**
* Sets the SiteMapPriority. Must be a two digit value between 0.0 and 1.0.
*
* @param siteMapPriority
*/
public void setSiteMapPriority(SiteMapPriorityType siteMapPriority);
/**
* Returns the list of SiteMapGeneratorTypes.
*
* @return
*/
public SiteMapGeneratorType getSiteMapGeneratorType();
/**
* Sets the list of SiteMapGeneratorTypes.
*
* @param siteMapGeneratorType
*/
public void setSiteMapGeneratorType(SiteMapGeneratorType siteMapGeneratorType);
/**
* Returns the SiteMapConfiguration.
*
* @return
*/
public SiteMapConfiguration getSiteMapConfiguration();
/**
* Sets the SiteMapConfiguration.
*
* @param siteMapConfiguration
*/
public void setSiteMapConfiguration(SiteMapConfiguration siteMapConfiguration);
}
|
lore/lore
|
packages/lore-hook-connect/src/errors/InvalidBlueprintError.js
|
<gh_stars>100-1000
export default function InvalidBlueprintError(invalidBlueprintName, valueBlueprintNames) {
const error = new Error(
`There was no blueprint found matching the name '${invalidBlueprintName}'.
Valid blueprints are: ${valueBlueprintNames.join(', ')}`
);
error.name = 'InvalidBlueprintError';
return error;
}
|
Beignet95/yangfan-project
|
src/main/java/com/ruoyi/project/pms/relation/domain/AsinTypeRelation.java
|
<filename>src/main/java/com/ruoyi/project/pms/relation/domain/AsinTypeRelation.java
package com.ruoyi.project.pms.relation.domain;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import com.ruoyi.framework.aspectj.lang.annotation.Excel;
import com.ruoyi.framework.web.domain.BaseEntity;
/**
* ASIN与型号关联对象 pms_asin_type_relation
*
* @author Beignet
* @date 2021-01-22
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
public class AsinTypeRelation extends BaseEntity
{
private static final long serialVersionUID = 1L;
/** 主键 */
private Long id;
/** 型号 */
@Excel(name = "型号")
private String type;
/** ASIN */
@Excel(name = "ASIN")
private String asin;
/** 删除标记(1为删除,2为未删除) */
private Integer isdelete;
@Override
public String toString() {
return new ToStringBuilder(this,ToStringStyle.MULTI_LINE_STYLE)
.append("id", getId())
.append("type", getType())
.append("asin", getAsin())
.append("isdelete", getIsdelete())
.toString();
}
}
|
meteoorkip/GROOVE
|
groove/src/groove/sts/Location.java
|
<reponame>meteoorkip/GROOVE
package groove.sts;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* A location in the STS. It represents a Graph State stripped of data values.
*/
public class Location {
private String label;
private Map<SwitchRelation,Set<Location>> relations;
/**
* Creates a new instance.
* @param label The label on this Location.
*/
public Location(String label) {
this.label = label;
this.relations = new HashMap<>();
}
/**
* Returns the possible Switch Relations from this Location.
* @return The possible Switch Relations.
*/
public Set<SwitchRelation> getSwitchRelations() {
return this.relations.keySet();
}
/**
* Gets the target Locations of the Switch Relation.
* @param sr The Switch Relation.
* @return The target Location of sr.
*/
public Set<Location> getRelationTargets(SwitchRelation sr) {
return this.relations.get(sr);
}
/**
* Adds a new outgoing Switch Relation from this Location.
* @param sr The outgoing Switch Relation.
* @param l The target Location of sr.
*/
public void addSwitchRelation(SwitchRelation sr, Location l) {
Set<Location> set = this.relations.get(sr);
if (set == null) {
set = new HashSet<>();
this.relations.put(sr, set);
}
set.add(l);
}
/**
* Gets the label of this Location.
* @return The label.
*/
public String getLabel() {
return this.label;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof Location)) {
return false;
}
return this.label.equals(((Location) o).getLabel());
}
@Override
public int hashCode() {
return getLabel().hashCode();
}
/**
* Creates a JSON formatted string based on this Location.
* @return The JSON string.
*/
public String toJSON() {
return "\"" + this.label + "\"";
}
}
|
astubbs/orient
|
core/src/main/java/com/orientechnologies/orient/core/record/impl/ODocument.java
|
/*
* Copyright 1999-2010 <NAME> (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.record.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.exception.ORecordNotFoundException;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.iterator.OEmptyIterator;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OProperty;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.ORecord;
import com.orientechnologies.orient.core.record.ORecordVirtualAbstract;
import com.orientechnologies.orient.core.serialization.serializer.record.ORecordSerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.record.string.ORecordSerializerSchemaAware2CSV;
/**
* ORecord implementation schema aware. It's able to handle records with, without or with a partial schema. Fields can be added at
* run-time. Instances can be reused across calls by using the reset() before to re-use.
*/
@SuppressWarnings("unchecked")
public class ODocument extends ORecordVirtualAbstract<Object> implements Iterable<Entry<String, Object>> {
public static final byte RECORD_TYPE = 'd';
public ODocument() {
setup();
}
/**
* Creates a new instance by the raw stream usually read from the database. New instances are not persistent until {@link #save()}
* is called.
*
* @param iSource
* Raw stream
*/
public ODocument(final byte[] iSource) {
super(iSource);
setup();
}
/**
* Creates a new instance and binds to the specified database. New instances are not persistent until {@link #save()} is called.
*
* @param iDatabase
* Database instance
*/
public ODocument(final ODatabaseRecord<?> iDatabase) {
super(iDatabase);
setup();
}
/**
* Creates a new instance in memory linked by the Record Id to the persistent one. New instances are not persistent until
* {@link #save()} is called.
*
* @param iDatabase
* Database instance
* @param iRID
* Record Id
*/
public ODocument(final ODatabaseRecord<?> iDatabase, final ORID iRID) {
this(iDatabase);
_recordId = (ORecordId) iRID;
_status = STATUS.NOT_LOADED;
}
/**
* Creates a new instance in memory of the specified class, linked by the Record Id to the persistent one. New instances are not
* persistent until {@link #save()} is called.
*
* @param iDatabase
* Database instance
* @param iClassName
* Class name
* @param iRID
* Record Id
*/
public ODocument(final ODatabaseRecord<?> iDatabase, final String iClassName, final ORID iRID) {
this(iDatabase, iClassName);
_recordId = (ORecordId) iRID;
_dirty = false;
_status = STATUS.NOT_LOADED;
}
/**
* Creates a new instance in memory of the specified class. New instances are not persistent until {@link #save()} is called.
*
* @param iDatabase
* Database instance
* @param iClassName
* Class name
*/
public ODocument(final ODatabaseRecord<?> iDatabase, final String iClassName) {
super(iDatabase, iClassName);
setup();
}
/**
* Creates a new instance in memory of the specified schema class. New instances are not persistent until {@link #save()} is
* called. The database reference is taken by the OClass instance received.
*
* @param iClass
* OClass instance
*/
public ODocument(final OClass iClass) {
super(iClass.getDocument().getDatabase());
setup();
_clazz = iClass;
}
/**
* Copies the current instance to a new one.
*/
public ODocument copy() {
ODocument cloned = new ODocument();
cloned._source = _source;
cloned._database = _database;
cloned._recordId = _recordId.copy();
cloned._version = _version;
cloned._dirty = _dirty;
cloned._pinned = _pinned;
cloned._clazz = _clazz;
cloned._status = _status;
cloned._recordFormat = _recordFormat;
if (_fieldValues != null)
cloned._fieldValues = new LinkedHashMap<String, Object>(_fieldValues);
return cloned;
}
/**
* Loads the record using a fetch plan.
*/
public ODocument load(final String iFetchPlan) {
if (_database == null)
throw new ODatabaseException("No database assigned to current record");
Object result = null;
try {
result = _database.load(this, iFetchPlan);
} catch (Exception e) {
throw new ORecordNotFoundException("The record with id '" + getIdentity() + "' was not found", e);
}
if (result == null)
throw new ORecordNotFoundException("The record with id '" + getIdentity() + "' was not found");
return this;
}
/**
* Dumps the instance as string.
*/
@Override
public String toString() {
checkForFields();
StringBuilder buffer = new StringBuilder();
buffer.append(_clazz == null ? "<unknown>" : _clazz.getName());
if (_recordId != null) {
buffer.append("@");
if (_recordId != null && _recordId.isValid())
buffer.append(_recordId);
}
boolean first = true;
for (Entry<String, Object> f : _fieldValues.entrySet()) {
buffer.append(first ? "{" : ",");
buffer.append(f.getKey());
buffer.append(":");
if (f.getValue() instanceof Collection<?>) {
buffer.append("[");
buffer.append(((Collection<?>) f.getValue()).size());
buffer.append("]");
} else if (f.getValue() instanceof ORecord<?>) {
buffer.append("#");
buffer.append(((ORecord<?>) f.getValue()).getIdentity());
} else
buffer.append(f.getValue());
if (first)
first = false;
}
if (!first)
buffer.append("}");
return buffer.toString();
}
/**
* Returns the field number.
*/
public int size() {
return _fieldValues == null ? 0 : _fieldValues.size();
}
/**
* Returns the set of field names.
*/
public Set<String> fieldNames() {
checkForLoading();
checkForFields();
return new HashSet<String>(_fieldValues.keySet());
}
/**
* Returns the array of field values.
*/
public Object[] fieldValues() {
checkForLoading();
checkForFields();
Object[] result = new Object[_fieldValues.values().size()];
return _fieldValues.values().toArray(result);
}
/**
* Reads the field value.
*
* @param iPropertyName
* field name
* @return field value if defined, otherwise null
*/
public <RET> RET field(final String iPropertyName) {
checkForLoading();
checkForFields();
int separatorPos = iPropertyName.indexOf('.');
if (separatorPos > -1) {
// GET THE LINKED OBJECT IF ANY
String fieldName = iPropertyName.substring(0, separatorPos);
Object linkedObject = _fieldValues.get(fieldName);
if (linkedObject == null || !(linkedObject instanceof ODocument))
// IGNORE IT BY RETURNING NULL
return null;
ODocument linkedRecord = (ODocument) linkedObject;
if (linkedRecord.getInternalStatus() == STATUS.NOT_LOADED)
// LAZY LOAD IT
linkedRecord.load();
// CALL MYSELF RECURSIVELY BY CROSSING ALL THE OBJECTS
return (RET) linkedRecord.field(iPropertyName.substring(separatorPos + 1));
}
RET value = (RET) _fieldValues.get(iPropertyName);
if (value instanceof ORID) {
// CREATE THE DOCUMENT OBJECT IN LAZY WAY
value = (RET) new ODocument(_database, (ORID) value);
_fieldValues.put(iPropertyName, value);
}
return value;
}
/**
* Writes the field value.
*
* @param iPropertyName
* field name
* @param iPropertyValue
* field value
* @return The Record instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODocument field(final String iPropertyName, Object iPropertyValue) {
return field(iPropertyName, iPropertyValue, null);
}
/**
* Writes the field value forcing the type.
*
* @param iPropertyName
* field name
* @param iPropertyValue
* field value
* @param iType
* Forced type (not auto-determined)
* @return The Record instance itself giving a "fluent interface". Useful to call multiple methods in chain.
*/
public ODocument field(final String iPropertyName, Object iPropertyValue, OType iType) {
checkForLoading();
checkForFields();
final boolean knownProperty = _fieldValues.containsKey(iPropertyName);
final Object oldValue = _fieldValues.get(iPropertyName);
if (knownProperty)
// CHECK IF IS REALLY CHANGED
if (iPropertyValue == null) {
if (oldValue == null)
// BOTH NULL: UNCHANGED
return this;
} else {
try {
if (iPropertyValue == oldValue)
// BOTH NULL: UNCHANGED
return this;
} catch (Exception e) {
OLogManager.instance().warn(this, "Error on checking the value of property %s against the record %s", e, iPropertyName,
getIdentity());
}
}
if (_clazz != null) {
OProperty prop = _clazz.getProperty(iPropertyName);
if (prop != null) {
if (iPropertyValue instanceof Enum)
// ENUM
if (prop.getType().isAssignableFrom(""))
iPropertyValue = iPropertyValue.toString();
else if (prop.getType().isAssignableFrom(1))
iPropertyValue = ((Enum<?>) iPropertyValue).ordinal();
if (!(iPropertyValue instanceof String) && !prop.getType().isAssignableFrom(iPropertyValue))
throw new IllegalArgumentException("Property '" + iPropertyName + "' of type '" + prop.getType()
+ "' can't accept value of type: " + iPropertyValue.getClass());
}
}
if (knownProperty && _trackingChanges) {
// SAVE THE OLD VALUE IN A SEPARATE MAP
if (_fieldOriginalValues == null)
_fieldOriginalValues = new HashMap<String, Object>();
_fieldOriginalValues.put(iPropertyName, oldValue);
}
if (_status != STATUS.UNMARSHALLING)
setDirty();
if (oldValue != null) {
// DETERMINE THE TYPE FROM THE PREVIOUS CONTENT
if (oldValue instanceof ORecord<?> && iPropertyValue instanceof String)
// CONVERT TO RECORD-ID
iPropertyValue = new ORecordId((String) iPropertyValue);
else if (oldValue instanceof Collection<?> && iPropertyValue instanceof String) {
// CONVERT TO COLLECTION
final List<ODocument> newValue = new ArrayList<ODocument>();
iPropertyValue = newValue;
final String stringValue = (String) iPropertyValue;
if (stringValue != null && stringValue.length() > 0) {
final String[] items = stringValue.split(",");
for (String s : items) {
newValue.add(new ODocument(_database, new ORecordId(s)));
}
}
} else if (iPropertyValue instanceof Enum) {
// ENUM
if (oldValue instanceof Number)
iPropertyValue = ((Enum<?>) iPropertyValue).ordinal();
else
iPropertyValue = iPropertyValue.toString();
}
} else {
if (iPropertyValue instanceof Enum)
// ENUM
iPropertyValue = iPropertyValue.toString();
}
_fieldValues.put(iPropertyName, iPropertyValue);
if (iType != null) {
// SAVE FORCED TYPE
if (_fieldTypes == null)
_fieldTypes = new HashMap<String, OType>();
_fieldTypes.put(iPropertyName, iType);
}
return this;
}
/**
* Removes a field.
*/
public Object removeField(final String iPropertyName) {
checkForLoading();
checkForFields();
final boolean knownProperty = _fieldValues.containsKey(iPropertyName);
final Object oldValue = _fieldValues.get(iPropertyName);
if (knownProperty && _trackingChanges) {
// SAVE THE OLD VALUE IN A SEPARATE MAP
if (_fieldOriginalValues == null)
_fieldOriginalValues = new HashMap<String, Object>();
_fieldOriginalValues.put(iPropertyName, oldValue);
}
_fieldValues.remove(iPropertyName);
setDirty();
return oldValue;
}
/**
* Returns the original value of a field before it has been changed.
*
* @param iPropertyName
* Property name to retrieve the original value
*/
public Set<String> getDirtyFields() {
return _fieldOriginalValues != null ? Collections.unmodifiableSet(_fieldOriginalValues.keySet()) : null;
}
/**
* Returns the original value of a field before it has been changed.
*
* @param iPropertyName
* Property name to retrieve the original value
*/
public Object getOriginalValue(final String iPropertyName) {
return _fieldOriginalValues != null ? _fieldOriginalValues.get(iPropertyName) : null;
}
/**
* Returns the iterator against the field entries as name and value.
*/
public Iterator<Entry<String, Object>> iterator() {
if (_fieldValues == null)
return OEmptyIterator.INSTANCE;
return _fieldValues.entrySet().iterator();
}
/**
* Checks if a field exists.
*
* @return True if exists, otherwise false.
*/
@Override
public boolean containsField(final String iFieldName) {
checkForFields();
return _fieldValues.containsKey(iFieldName);
}
/**
* Internal.
*/
public byte getRecordType() {
return RECORD_TYPE;
}
/**
* Internal.
*/
@Override
protected void setup() {
super.setup();
_recordFormat = ORecordSerializerFactory.instance().getFormat(ORecordSerializerSchemaAware2CSV.NAME);
}
}
|
Dunor/job4j
|
chapter_002/src/main/java/ru/job4j/tracker/ReplaceAction.java
|
<gh_stars>0
package ru.job4j.tracker;
public class ReplaceAction extends BaseAction {
protected ReplaceAction(String name) {
super(name);
}
@Override
public boolean execute(Input input, Tracker tracker) {
String id = input.askStr("Enter id: ");
String name = input.askStr("Enter name: ");
Item item = new Item(name);
if (tracker.replace(id, item)) {
System.out.println("Заявка изменина.");
} else {
System.out.println("Заявка с id: " + id + " не найдена.");
}
return true;
}
}
|
ShansOwn/android-architecture
|
app/src/main/java/com/shansown/androidarchitecture/ui/BaseActivity.java
|
<reponame>ShansOwn/android-architecture
package com.shansown.androidarchitecture.ui;
import android.annotation.TargetApi;
import android.graphics.Color;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.LayoutRes;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.Optional;
import com.shansown.androidarchitecture.R;
import com.shansown.androidarchitecture.di.Injector;
import com.shansown.androidarchitecture.di.component.ActivityComponent;
import javax.inject.Inject;
import timber.log.Timber;
/**
* Base activity created to be extended by every activity in this application. This class provides
* dependency injection configuration, ButterKnife Android library configuration and some methods
* common to every activity.
*/
public abstract class BaseActivity extends AppCompatActivity
implements NavigationView.OnNavigationItemSelectedListener {
@Optional @InjectView(R.id.drawer_layout) DrawerLayout drawerLayout;
@Optional @InjectView(R.id.navigation_view) NavigationView navigationView;
@Inject AppContainer appContainer;
private Toolbar actionbarToolbar;
private ActionBarDrawerToggle drawerToggle;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
onInjectDependencies();
setupView();
injectViews();
initActionbarToolbar();
setupNavDrawer();
}
@Override public boolean onOptionsItemSelected(MenuItem item) {
return (drawerToggle != null && drawerToggle.onOptionsItemSelected(item));
}
@Override public boolean onNavigationItemSelected(MenuItem item) {
int id = item.getItemId();
item.setChecked(true);
closeNavDrawer();
switch (id) {
case R.id.navigation_item_1:
Timber.d("ITEM_1 drawer item selected");
return true;
case R.id.navigation_item_2:
Timber.d("ITEM_2 drawer item selected");
return true;
}
return false;
}
@Override public void onBackPressed() {
if (isNavDrawerOpen()) {
closeNavDrawer();
} else {
super.onBackPressed();
}
}
/**
* Get {@link Toolbar} that was set as actionbar with {@link #setSupportActionBar}
*
* @return {@link Toolbar}
* @throws IllegalStateException if toolbar is not been initialized
*/
protected Toolbar getActionbarToolbar() {
if (actionbarToolbar == null)
throw new IllegalStateException("Toolbar is not defined in your activity or fragment layout");
return actionbarToolbar;
}
protected boolean isNavDrawerOpen() {
return drawerLayout != null && drawerLayout.isDrawerOpen(GravityCompat.START);
}
protected void closeNavDrawer() {
if (drawerLayout != null) {
drawerLayout.closeDrawer(GravityCompat.START);
}
}
/**
* Get activity layout resource id which will be inflated as activity view
*
* @return layout resource id
*/
@LayoutRes protected abstract int getLayoutId();
/**
* Best place to init Dagger Activity scope component
* and inject the declared one in the activity if exist.
*/
protected void onInjectDependencies() {
}
/**
* Called when actionbar toolbar has been initialized
*
* @param toolbar Initialized toolbar as actionbar
*/
protected void onActionbarToolbarInit(Toolbar toolbar) {
}
private void setupView() {
LayoutInflater inflater = getLayoutInflater();
ViewGroup container = appContainer.bind(this);
inflater.inflate(getLayoutId(), container);
}
/**
* Replace every field annotated with ButterKnife annotations like @InjectView with the proper
* value.
*/
private void injectViews() {
ButterKnife.inject(this);
}
private boolean initActionbarToolbar() {
actionbarToolbar = ButterKnife.findById(this, R.id.toolbar_activity);
if (actionbarToolbar == null) {
actionbarToolbar = ButterKnife.findById(this, R.id.toolbar_fragment);
}
if (actionbarToolbar == null) return false;
setSupportActionBar(actionbarToolbar);
//noinspection ConstantConditions
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
onActionbarToolbarInit(actionbarToolbar);
return true;
}
/**
* Set up navigation drawer if it's defined on layout
*
* @return <code>true</code> if the navigation drawer was setup, <code>false</code> otherwise.
*/
private boolean setupNavDrawer() {
if (drawerLayout == null) return false;
drawerToggle = new ActionBarDrawerToggle(this, drawerLayout, actionbarToolbar, R.string.drawer_open,
R.string.drawer_close) {
public void onDrawerClosed(View v) {
super.onDrawerClosed(v);
invalidateOptionsMenu();
syncState();
}
public void onDrawerOpened(View v) {
super.onDrawerOpened(v);
invalidateOptionsMenu();
syncState();
}
};
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// Remove the status bar color. The DrawerLayout is responsible for drawing it from now on.
setStatusBarColor(getWindow());
}
drawerLayout.setStatusBarBackgroundColor(getResources().getColor(R.color.theme_primary_dark));
drawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
drawerLayout.setDrawerListener(drawerToggle);
navigationView.setNavigationItemSelectedListener(this);
drawerToggle.syncState();
return true;
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
private static void setStatusBarColor(Window window) {
window.setStatusBarColor(Color.TRANSPARENT);
}
}
|
yxiao1996/dev
|
catkin_ws/src/00-infrastructure/easy_node/include/easy_node/node_description/doc_generation.py
|
<reponame>yxiao1996/dev
import os
from duckietown_utils import logger
from duckietown_utils.constants import get_list_of_packages_in_catkin_ws
from duckietown_utils.read_package_xml import read_package_xml_info, Person, PackageXML
from .configuration import EasyNodeConfig, PROCESS_THREADED, load_configuration_for_nodes_in_package
def generate_easy_node_docs():
skip = ['easier_node']
packages = get_list_of_packages_in_catkin_ws()
logger.info('Looking in %d packages for nodes.' % len(packages))
names = sorted(packages, key=lambda _:packages[_])
for package_name in names:
md = ""
package_dir = packages[package_name]
package_xml = os.path.join(package_dir, 'package.xml')
package_info = read_package_xml_info(package_xml)
md += generate_from_package_info(package_info, package_dir)
configs = load_configuration_for_nodes_in_package(package_name)
for node_name, config in configs.items():
if node_name in skip:
continue
md += generate_from_node_config(config)
out = os.path.join(package_dir, '.autogenerated.md')
write_to_file_if_changed(out, md)
branch = 'master'
S = '\n\n'
def generate_from_package_info(info, package_dir):
assert isinstance(info, PackageXML)
md = ''
md += "<div id='%s-autogenerated' markdown='1'>\n\n" % (info.name)
md += '\n<!-- do not edit this file, autogenerated -->\n\n'
md += '## Package information ' + S
parent = os.path.basename(os.path.dirname(package_dir))
unique = '%s/%s' % (parent, info.name)
md += '[Link to package on Github](github:org=duckietown,repo=Software,path=%s,branch=%s)' % (unique, branch) + S
md += '### Essentials {nonumber="1"}' + S
def is_in(P, where):
return any(P.email == _.email for _ in where)
for p in info.authors:
if is_in(p, info.maintainers):
md += 'Author: ' + format_person(p) + ' (maintainer)'+ S
else:
md += 'Author: ' + format_person(p) + S
# if not info.authors:
# md += 'TODO: add authors.' + S
for p in info.maintainers:
if is_in(p, info.authors):
continue
else:
md += 'Maintainer: ' + format_person(p) + S
# if not info.maintainers:
# md += 'TODO: add maintainers.' + S
# md += 'TODO: add code to generate list of dependency.' + S
# md += 'TODO: add code to generate a link to a Github issue table for this package.' + S
# md += 'TODO: add code to show unit tests build indicator.' + S
md += '### Description {nonumber="1"}' + S
if info.description:
md += info.description + S
else:
md += 'TODO: no package description found.'
md += S + '</div>' + S
return md
def format_person(p):
assert isinstance(p, Person)
if p.email:
return '[%s](mailto:%s)' % (p.name, p.email)
else:
return p.name
def generate_from_node_config(config):
md = ''
md += '<!-- file start -->\n\n'
md += "<div id='%s-%s-autogenerated' markdown='1'>\n\n" % (
config.package_name, config.node_type_name)
md += '\n<!-- do not edit this file, autogenerated -->\n\n'
assert isinstance(config, EasyNodeConfig)
short = os.path.basename(config.filename)
md += '(Generated from [configuration `%s`]' % short
md += '(github:org=duckietown,repo=Software,path=%s,branch=%s).)' % (short, branch) + S
if config.description is None:
md += 'TODO: Missing node description in `%s`.\n\n' % os.path.basename(
config.filename)
else:
md += config.description + '\n\n'
md += generate_configuration(config)
md += '\n\n</div>'
return md
def write_to_file_if_changed(filename, contents):
if os.path.exists(filename):
existing = open(filename).read()
need_write = existing != contents
else:
need_write = True
if need_write:
with open(filename, 'w') as f:
f.write(contents)
logger.info('Written to %s' % filename)
else:
logger.info('File already up to date %s' % filename)
def write_desc(x):
return 'TODO: Missing description for entry "`%s`".' % x.name
# @contract(config=EasyNodeConfig)
def generate_configuration(config):
assert isinstance(config, EasyNodeConfig)
md = ""
COMMON_PREFIX = 'en_'
choose = [_ for _ in config.parameters.values(
) if not _.name.startswith(COMMON_PREFIX)]
md += '### Parameters {nonumber="1"}' + S
if not choose:
md += 'No parameters defined.\n\n'
for param in choose:
md += "**Parameter `%s`**: " % param.name
md += describe_type(param.type)
if param.has_default:
md += '; default value: `%r`' % param.default
md += '\n\n'
if param.desc:
md += param.desc
else:
md += write_desc(param)
md += '\n\n'
md += '### Subscriptions {nonumber="1"}' + S
if not config.subscriptions:
md += 'No subscriptions defined.\n\n'
for subscription in config.subscriptions.values():
md += "**Subscription `%s`**: " % subscription.name
md += 'topic `%s` (%s)\n\n' % (subscription.topic,
describe_type(subscription.type))
if subscription.desc:
md += subscription.desc
else:
md += write_desc(subscription)
md += '\n\n'
if subscription.process == PROCESS_THREADED:
md += 'Note: The data is processed *asynchronously* in a different thread.\n\n'
md += '### Publishers {nonumber="1"}' + S
if not config.publishers:
md += 'No publishers defined.\n\n'
for publisher in config.publishers.values():
md += "**Publisher `%s`**: " % publisher.name
md += 'topic `%s` (%s)\n\n' % (publisher.topic,
describe_type(publisher.type))
if publisher.desc:
md += publisher.desc
else:
md += write_desc(publisher)
md += '\n\n'
# md += '### Services \n\n'
#
#
# md += '(EasyNode does not parse services.\n\n')
return md
def describe_type(x):
if x is None:
return 'not known'
else:
return '`%s`' % x.__name__
|
mapbox/mr-ui
|
src/components/underline-tabs/underline-tabs-constants.js
|
export const SIZE_SMALL = 'small';
export const SIZE_MEDIUM = 'medium';
export const SIZE_LARGE = 'large';
|
imalpasha/cj
|
carijodoh/src/main/java/com/fly/cj/utils/AESCBC.java
|
<gh_stars>0
package com.fly.cj.utils;
import android.util.Base64;
import android.util.Log;
import javax.crypto.Cipher;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
public class AESCBC {
public static String encrypt(String key, String initVector, String value) {
try {
IvParameterSpec iv = new IvParameterSpec(initVector.getBytes("UTF-8"));
SecretKeySpec skeySpec = new SecretKeySpec(key.getBytes("UTF-8"), "AES");
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5PADDING");
cipher.init(Cipher.ENCRYPT_MODE, skeySpec, iv);
byte[] encrypted = cipher.doFinal(value.getBytes());
System.out.println("encrypted string: "+ Base64.encodeToString(encrypted,2));
return Base64.encodeToString(encrypted,2);
} catch (Exception ex) {
ex.printStackTrace();
Log.e("ex",ex.getMessage());
}
return null;
}
public static String decrypt(String key, String initVector, String encrypted) {
try {
IvParameterSpec iv = new IvParameterSpec(initVector.getBytes("UTF-8"));
SecretKeySpec skeySpec = new SecretKeySpec(key.getBytes("UTF-8"), "AES");
Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5PADDING");
cipher.init(Cipher.DECRYPT_MODE, skeySpec, iv);
byte[] original = cipher.doFinal(Base64.decode(encrypted,2));
return new String(original);
} catch (Exception ex) {
ex.printStackTrace();
Log.e("Error",ex.getMessage());
}
return null;
}
public static void main(String[] args) {
String key = "Bar12345Bar12345"; // 128 bit key
String initVector = "RandomInitVector"; // 16 bytes IV
}
}
|
NoFaceGoose/TabletopGames
|
src/main/java/core/components/Component.java
|
<filename>src/main/java/core/components/Component.java
package core.components;
import core.properties.*;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import utilities.Utils.ComponentType;
import java.util.*;
public abstract class Component {
private static int ID = 0; // All components receive a unique and final ID from this always increasing counter
protected final int componentID; // Unique ID of this component
protected final ComponentType type; // Type of this component
protected final HashMap<Integer, Property> properties; // Maps between integer key for the property and the property object
protected int ownerId = -1; // By default belongs to the game
protected String componentName; // Name of this component
public Component(ComponentType type, String name) {
this.componentID = ID++;
this.type = type;
this.componentName = name;
this.properties = new HashMap<>();
}
public Component(ComponentType type) {
this.componentID = ID++;
this.type = type;
this.componentName = type.toString();
this.properties = new HashMap<>();
}
protected Component(ComponentType type, String name, int componentID) {
this.componentID = componentID;
this.type = type;
this.componentName = name;
this.properties = new HashMap<>();
}
protected Component(ComponentType type, int componentID) {
this.componentID = componentID;
this.type = type;
this.componentName = type.toString();
this.properties = new HashMap<>();
}
/**
* To be implemented by subclass, all components should be able to create copies of themselves.
* @return - a new Component with the same properties.
*/
public abstract Component copy();
/**
* Get and set the type of this component.
*/
public ComponentType getType() {
return this.type;
}
/**
* Get number of properties for this component.
* @return - int, size of properties map.
*/
public int getNumProperties()
{
return properties.size();
}
/**
* Get and set the ID of the player who owns the deck (-1, the game's, by default).
* @return - int, owner ID
*/
public int getOwnerId() {
return ownerId;
}
public void setOwnerId(int ownerId) {
this.ownerId = ownerId;
}
/**
* Get the ID of this component.
* @return - component ID.
*/
public int getComponentID() {
return componentID;
}
/**
* @return name of this component.
*/
public String getComponentName() {
return componentName;
}
/**
* Sets the name of this component.
* @param componentName - new name for this component.
*/
public void setComponentName(String componentName) {
this.componentName = componentName;
}
/**
* Get the full map of properties.
* @return - mapping from property integer key to property objects.
*/
public HashMap<Integer, Property> getProperties() {
return properties;
}
/**
* Gets a property from the properties.
* @param propId id of the property to look for
* @return the property value. Null if it doesn't exist.
*/
public Property getProperty(int propId)
{
return properties.get(propId);
}
/**
* Adds a property with an id and a Property object
* @param prop property to add
*/
public void setProperty(Property prop)
{
properties.put(prop.getHashKey(), prop);
}
public void setProperties(HashMap<Integer, Property> props) {
for (Property p: props.values()) {
setProperty(p);
}
}
public static Component parseComponent(Component c, JSONObject obj) {
return parseComponent(c, obj, new HashSet<>());
}
/**
* Parses a Component object from a JSON object.
* @param obj - JSON object to parse.
* @return new Component object with properties as defined in JSON.
*/
public static Component parseComponent(Component c, JSONObject obj, Set<String> ignoreKeys)
{
for(Object o : obj.keySet())
{
String key = (String)o;
if (ignoreKeys.contains(key)) continue;
if(obj.get(key) instanceof JSONArray) {
JSONArray value = (JSONArray) obj.get(key);
String type = (String) value.get(0);
Property prop = null;
if (type.contains("[]")) // Array
{
JSONArray values = (JSONArray) value.get(1);
if (type.contains("String")) {
prop = new PropertyStringArray(key, values);
} else if (type.contains("Integer")) {
prop = new PropertyIntArray(key, values);
} else if (type.contains("Long")) {
prop = new PropertyLongArray(key, values);
}
//More types of arrays to come.
} else if (type.contains("<>")) { // We've got a list!
JSONArray values = (JSONArray) value.get(1);
if (type.contains("Integer")) {
prop = new PropertyIntArrayList(key, values);
} else if (type.contains("Long")) {
prop = new PropertyLongArrayList(key, values);
}
} else {
if (type.contains("String")) {
prop = new PropertyString(key, (String) value.get(1));
} else if (type.contains("Color")) {
prop = new PropertyColor(key, (String) value.get(1));
} else if (type.contains("Vector2D")) {
prop = new PropertyVector2D(key, (JSONArray) value.get(1));
} else if (type.contains("Boolean")) {
prop = new PropertyBoolean(key, (boolean) value.get(1));
} else if (type.contains("Integer")) {
prop = new PropertyInt(key, ((Long) value.get(1)).intValue());
} else if (type.contains("Long")) {
prop = new PropertyLong(key, (long) value.get(1));
}
}
if (prop != null) {
c.setProperty(prop);
}
}
}
return c;
}
/**
* Copies super class variables in given subclass instance.
* @param copyTo - subclass component instance
*/
public void copyComponentTo(Component copyTo)
{
copyTo.properties.clear();
for (int prop_key : this.properties.keySet()) {
Property newProp = this.properties.get(prop_key).copy();
copyTo.setProperty(newProp);
}
copyTo.ownerId = ownerId;
copyTo.componentName = componentName;
}
@Override
public String toString() {
return "Component{" +
"componentID=" + componentID +
", type=" + type +
", ownerId=" + ownerId +
", componentName='" + componentName + '\'' +
", properties=" + properties +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Component)) return false;
Component component = (Component) o;
return componentID == component.componentID;
}
@Override
public int hashCode() {
return componentID;
}
}
|
vishnuk007/service-fabric
|
src/prod/src/Hosting2/PrincipalsProviderContext.cpp
|
<filename>src/prod/src/Hosting2/PrincipalsProviderContext.cpp<gh_stars>1000+
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace std;
using namespace Common;
using namespace Hosting2;
using namespace Management;
using namespace ServiceModel;
StringLiteral const TracePrincipalsProviderContext("PrincipalsProviderContext");
PrincipalsProviderContext::PrincipalsProviderContext()
: securityPrincipalInformation_(),
lock_()
{
}
PrincipalsProviderContext::~PrincipalsProviderContext()
{
}
void PrincipalsProviderContext::AddSecurityPrincipals(vector<SecurityPrincipalInformation> const & principalsInfo)
{
for(auto iter = principalsInfo.begin(); iter != principalsInfo.end(); ++iter)
{
auto principal = make_shared<SecurityPrincipalInformation>((*iter).Name, (*iter).SecurityPrincipalId, (*iter).SidString, (*iter).IsLocalUser);
securityPrincipalInformation_.push_back(move(principal));
}
}
ErrorCode PrincipalsProviderContext::TryGetPrincipalId(wstring const & name, __out wstring & userId) const
{
AcquireReadLock lock(lock_);
for (auto it = securityPrincipalInformation_.begin(); it != securityPrincipalInformation_.end(); ++it)
{
if (StringUtility::AreEqualCaseInsensitive((*it)->Name, name))
{
userId = (*it)->SecurityPrincipalId;
return ErrorCode(ErrorCodeValue::Success);
}
}
return ErrorCode(ErrorCodeValue::ApplicationPrincipalDoesNotExist);
}
vector<SidSPtr> const & PrincipalsProviderContext::GetPrincipalSids()
{
ASSERT_IF(principalSids_.size() != securityPrincipalInformation_.size(), "InitializaeSecurityPrincipalSids must be called before GetPrincipalSids");
return principalSids_;
}
ErrorCode PrincipalsProviderContext::InitializeSecurityPrincipalSids()
{
if(principalSids_.empty())
{
AcquireWriteLock lock(lock_);
if(principalSids_.empty())
{
for (auto it = securityPrincipalInformation_.begin(); it != securityPrincipalInformation_.end(); ++it)
{
SidSPtr userSid;
auto error = BufferedSid::CreateSPtrFromStringSid((*it)->SidString, userSid);
if(!error.IsSuccess())
{
principalSids_.clear();
return error;
}
principalSids_.push_back(move(userSid));
}
}
}
return ErrorCode(ErrorCodeValue::Success);
}
ErrorCode PrincipalsProviderContext::TryGetPrincipalInfo(
wstring const & name,
__out SecurityPrincipalInformationSPtr & principalInfo)
{
for (auto it = securityPrincipalInformation_.begin(); it != securityPrincipalInformation_.end(); ++it)
{
if (StringUtility::AreEqualCaseInsensitive((*it)->Name, name))
{
principalInfo = *it;
return ErrorCode(ErrorCodeValue::Success);
}
}
return ErrorCode(ErrorCodeValue::ApplicationPrincipalDoesNotExist);
}
|
xiaoming123m/XM
|
ssd/ydybj.js
|
/*
有道云笔记VIP功能
https://note.youdao.com/yws/(mapi/payment|api/self)
hostname: note.youdao.com
*/
const path1 = "/api/self";
const path2 = "/mapi/payment";
let obj = JSON.parse($response.body);
if ($request.url.indexOf(path1) != -1){
obj.vip = true,
obj.properties.FEED_BACK_ID = 1,
obj.properties.isvip = true
}
if ($request.url.indexOf(path2) != -1){
obj.service = 1,
obj.payYear = true,
obj.end = 1867248816000
}
$done({body: JSON.stringify(obj)});
|
DotMail/Puissant
|
Puissant/PSTAddMessageContentOperation.h
|
//
// PSTAddMessageContentOperation.h
// Puissant
//
// Created by <NAME> on 11/21/12.
// Copyright (c) 2012 CodaFi. All rights reserved.
//
#import "PSTStorageOperation.h"
@class MCOAbstractMessage;
/**
* A concrete subclass of PSTStorageOperation that associates a given attachment's data with a
* message's body, and writes it's data to the database. Body content is the only acceptable data
* to use for this operation, as using it will reload the preview for a given message cache.
* Note: All properties are required.
*/
@interface PSTAddMessageContentOperation : PSTStorageOperation
/**
* The attachment data to write to the database.
* Note: This is a required property.
*/
@property (nonatomic, strong) NSData *data;
/**
* The message to associate the reciever with in the database.
*/
@property (nonatomic, strong) MCOAbstractMessage *message;
/**
* The folder path of the reciever.
*/
@property (nonatomic, strong) NSString *path;
@end
|
IamMayankThakur/test-bigdata
|
adminmgr/media/code/A2/python/task/BD_0064_1387_1416_IBwgwTi.py
|
from __future__ import print_function
from decimal import *
import re
import sys
from operator import add
from pyspark.sql import SparkSession
def computeContribs(urls, rank):
num_urls = len(urls)
for url in urls:
yield (url, rank / num_urls)
def parseNeighbors(urls):
parts = re.split(r',', urls)
return parts[0],parts[1]
def parseNeighbors1(urls):
parts = re.split(r',', urls)
return parts[1],round(float(parts[2])/float(parts[3]),12)
if __name__ == "__main__":
if len(sys.argv) != 4:
print("Usage: pagerank <file> <iterations>", file=sys.stderr)
sys.exit(-1)
# Initialize the spark context.
spark = SparkSession\
.builder\
.appName("PythonPageRank")\
.getOrCreate()
lines = spark.read.text(sys.argv[1]).rdd.map(lambda r: r[0])
links = lines.map(lambda urls: parseNeighbors(urls)).distinct().groupByKey().cache()
#to calculate default rank
lines1 = spark.read.text(sys.argv[1]).rdd.map(lambda r: r[0])
links1 = lines1.map(lambda urls: parseNeighbors1(urls)).distinct().groupByKey().cache()
#def_ranks = links1.map(lambda x:(x[0],max(round(sum(x[1]),12),1.0)))
#default rank calculated
def_ranks = links1.map(lambda x:(x[0],max(sum(x[1]),1.0)))
iterations = int(sys.argv[2])
p = float(sys.argv[3])/100
if iterations!=0:
for iteration in range(iterations):
contribs = links.join(def_ranks).flatMap(
lambda url_urls_rank: computeContribs(url_urls_rank[1][0], url_urls_rank[1][1]))
def_ranks = contribs.reduceByKey(add).mapValues(lambda rank: (rank *p + 1-p))
else:
j=0
l=2
while l!=0:
contribs = links.join(def_ranks).flatMap(lambda url_urls_rank: computeContribs(url_urls_rank[1][0], url_urls_rank[1][1]))
c1 = def_ranks.map(lambda x:x).sortByKey()
def_ranks = contribs.reduceByKey(add).mapValues(lambda rank:(rank * 0.80 + 0.20))
c2 = def_ranks.map(lambda x:x).sortByKey()
diff=c1.join(c2).map(lambda x:abs(x[1][1]-x[1][0])).filter(lambda x:x>0.0001)
l = len(diff.collect())
sort = def_ranks.sortBy(lambda a: (-a[1],a[0]))
for (link, rank) in sort.collect():
print("%s,%s" % (link, rank))
spark.stop()
|
sadasystems/gcsb
|
pkg/config/pool.go
|
<reponame>sadasystems/gcsb
package config
import (
"time"
"github.com/hashicorp/go-multierror"
)
// Assert that Connection implements Validate
var _ Validate = (*Pool)(nil)
type (
Pool struct {
MaxOpened int `mapstructure:"max_opened" yaml:"max_opened"`
MinOpened int `mapstructure:"min_opened" yaml:"min_opened"`
MaxIdle int `mapstructure:"max_idle" yaml:"max_idle"`
WriteSessions float64 `mapstructure:"write_sessions" yaml:"write_sessions"`
HealthcheckWorkers int `mapstructure:"healthcheck_workers" yaml:"healthcheck_workers"`
HealthcheckInterval time.Duration `mapstructure:"healthcheck_interval" yaml:"healthcheck_interval"`
TrackSessionHandles bool `mapstructure:"track_session_handles" yaml:"track_session_handles"`
}
)
func (c *Pool) Validate() error {
var result *multierror.Error
// TODO: Validate pool config
return result.ErrorOrNil()
}
|
frank20a/collaborative-sats
|
src/control/control/generic_mpc_generator/parameters.py
|
import casadi as cs
from control.parameters import force, torque, m, nx, nu, Icm
dt = 1.0/5
mpc_horizon = 30
|
akshitgoyal/csc398nlp
|
venv/lib/python3.8/site-packages/openapi_client/api/personal_api.py
|
# coding: utf-8
"""
NamSor API v2
NamSor API v2 : enpoints to process personal names (gender, cultural origin or ethnicity) in all alphabets or languages. Use GET methods for small tests, but prefer POST methods for higher throughput (batch processing of up to 100 names at a time). Need something you can't find here? We have many more features coming soon. Let us know, we'll do our best to add it! # noqa: E501
OpenAPI spec version: 2.0.10
Contact: <EMAIL>
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from openapi_client.api_client import ApiClient
class PersonalApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def country(self, personal_name_full, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of residence of a personal full name, or one surname. Assumes names as they are in the country of residence OR the country of origin. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.country(personal_name_full, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str personal_name_full: (required)
:return: PersonalNameGeoOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.country_with_http_info(personal_name_full, **kwargs) # noqa: E501
else:
(data) = self.country_with_http_info(personal_name_full, **kwargs) # noqa: E501
return data
def country_with_http_info(self, personal_name_full, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of residence of a personal full name, or one surname. Assumes names as they are in the country of residence OR the country of origin. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.country_with_http_info(personal_name_full, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str personal_name_full: (required)
:return: PersonalNameGeoOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['personal_name_full'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method country" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'personal_name_full' is set
if ('personal_name_full' not in local_var_params or
local_var_params['personal_name_full'] is None):
raise ValueError("Missing the required parameter `personal_name_full` when calling `country`") # noqa: E501
collection_formats = {}
path_params = {}
if 'personal_name_full' in local_var_params:
path_params['personalNameFull'] = local_var_params['personal_name_full'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/country/{personalNameFull}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PersonalNameGeoOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def country_batch(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of residence of up to 100 personal full names, or surnames. Assumes names as they are in the country of residence OR the country of origin. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.country_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameIn batch_personal_name_in: A list of personal names
:return: BatchPersonalNameGeoOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.country_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.country_batch_with_http_info(**kwargs) # noqa: E501
return data
def country_batch_with_http_info(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of residence of up to 100 personal full names, or surnames. Assumes names as they are in the country of residence OR the country of origin. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.country_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameIn batch_personal_name_in: A list of personal names
:return: BatchPersonalNameGeoOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_personal_name_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method country_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_personal_name_in' in local_var_params:
body_params = local_var_params['batch_personal_name_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/countryBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchPersonalNameGeoOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def diaspora(self, country_iso2, first_name, last_name, **kwargs): # noqa: E501
"""[USES 20 UNITS PER NAME] Infer the likely ethnicity/diaspora of a personal name, given a country of residence ISO2 code (ex. US, CA, AU, NZ etc.) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.diaspora(country_iso2, first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str country_iso2: (required)
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameDiasporaedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.diaspora_with_http_info(country_iso2, first_name, last_name, **kwargs) # noqa: E501
else:
(data) = self.diaspora_with_http_info(country_iso2, first_name, last_name, **kwargs) # noqa: E501
return data
def diaspora_with_http_info(self, country_iso2, first_name, last_name, **kwargs): # noqa: E501
"""[USES 20 UNITS PER NAME] Infer the likely ethnicity/diaspora of a personal name, given a country of residence ISO2 code (ex. US, CA, AU, NZ etc.) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.diaspora_with_http_info(country_iso2, first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str country_iso2: (required)
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameDiasporaedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['country_iso2', 'first_name', 'last_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method diaspora" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'country_iso2' is set
if ('country_iso2' not in local_var_params or
local_var_params['country_iso2'] is None):
raise ValueError("Missing the required parameter `country_iso2` when calling `diaspora`") # noqa: E501
# verify the required parameter 'first_name' is set
if ('first_name' not in local_var_params or
local_var_params['first_name'] is None):
raise ValueError("Missing the required parameter `first_name` when calling `diaspora`") # noqa: E501
# verify the required parameter 'last_name' is set
if ('last_name' not in local_var_params or
local_var_params['last_name'] is None):
raise ValueError("Missing the required parameter `last_name` when calling `diaspora`") # noqa: E501
collection_formats = {}
path_params = {}
if 'country_iso2' in local_var_params:
path_params['countryIso2'] = local_var_params['country_iso2'] # noqa: E501
if 'first_name' in local_var_params:
path_params['firstName'] = local_var_params['first_name'] # noqa: E501
if 'last_name' in local_var_params:
path_params['lastName'] = local_var_params['last_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/diaspora/{countryIso2}/{firstName}/{lastName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirstLastNameDiasporaedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def diaspora_batch(self, **kwargs): # noqa: E501
"""[USES 20 UNITS PER NAME] Infer the likely ethnicity/diaspora of up to 100 personal names, given a country of residence ISO2 code (ex. US, CA, AU, NZ etc.) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.diaspora_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoIn batch_first_last_name_geo_in: A list of personal names
:return: BatchFirstLastNameDiasporaedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.diaspora_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.diaspora_batch_with_http_info(**kwargs) # noqa: E501
return data
def diaspora_batch_with_http_info(self, **kwargs): # noqa: E501
"""[USES 20 UNITS PER NAME] Infer the likely ethnicity/diaspora of up to 100 personal names, given a country of residence ISO2 code (ex. US, CA, AU, NZ etc.) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.diaspora_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoIn batch_first_last_name_geo_in: A list of personal names
:return: BatchFirstLastNameDiasporaedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_first_last_name_geo_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method diaspora_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_first_last_name_geo_in' in local_var_params:
body_params = local_var_params['batch_first_last_name_geo_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/diasporaBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameDiasporaedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender(self, first_name, last_name, **kwargs): # noqa: E501
"""Infer the likely gender of a name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender(first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_with_http_info(first_name, last_name, **kwargs) # noqa: E501
else:
(data) = self.gender_with_http_info(first_name, last_name, **kwargs) # noqa: E501
return data
def gender_with_http_info(self, first_name, last_name, **kwargs): # noqa: E501
"""Infer the likely gender of a name. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_with_http_info(first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['first_name', 'last_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'first_name' is set
if ('first_name' not in local_var_params or
local_var_params['first_name'] is None):
raise ValueError("Missing the required parameter `first_name` when calling `gender`") # noqa: E501
# verify the required parameter 'last_name' is set
if ('last_name' not in local_var_params or
local_var_params['last_name'] is None):
raise ValueError("Missing the required parameter `last_name` when calling `gender`") # noqa: E501
collection_formats = {}
path_params = {}
if 'first_name' in local_var_params:
path_params['firstName'] = local_var_params['first_name'] # noqa: E501
if 'last_name' in local_var_params:
path_params['lastName'] = local_var_params['last_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/gender/{firstName}/{lastName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirstLastNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender_batch(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameIn batch_first_last_name_in: A list of personal names
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.gender_batch_with_http_info(**kwargs) # noqa: E501
return data
def gender_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameIn batch_first_last_name_in: A list of personal names
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_first_last_name_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_first_last_name_in' in local_var_params:
body_params = local_var_params['batch_first_last_name_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/genderBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender_full(self, full_name, **kwargs): # noqa: E501
"""Infer the likely gender of a full name, ex. <NAME> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full(full_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str full_name: (required)
:return: PersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_full_with_http_info(full_name, **kwargs) # noqa: E501
else:
(data) = self.gender_full_with_http_info(full_name, **kwargs) # noqa: E501
return data
def gender_full_with_http_info(self, full_name, **kwargs): # noqa: E501
"""Infer the likely gender of a full name, ex. <NAME> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full_with_http_info(full_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str full_name: (required)
:return: PersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['full_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender_full" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'full_name' is set
if ('full_name' not in local_var_params or
local_var_params['full_name'] is None):
raise ValueError("Missing the required parameter `full_name` when calling `gender_full`") # noqa: E501
collection_formats = {}
path_params = {}
if 'full_name' in local_var_params:
path_params['fullName'] = local_var_params['full_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/genderFull/{fullName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PersonalNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender_full_batch(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 full names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameIn batch_personal_name_in: A list of personal names
:return: BatchPersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_full_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.gender_full_batch_with_http_info(**kwargs) # noqa: E501
return data
def gender_full_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 full names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameIn batch_personal_name_in: A list of personal names
:return: BatchPersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_personal_name_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender_full_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_personal_name_in' in local_var_params:
body_params = local_var_params['batch_personal_name_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/genderFullBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchPersonalNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender_full_geo(self, full_name, country_iso2, **kwargs): # noqa: E501
"""Infer the likely gender of a full name, given a local context (ISO2 country code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full_geo(full_name, country_iso2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str full_name: (required)
:param str country_iso2: (required)
:return: PersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_full_geo_with_http_info(full_name, country_iso2, **kwargs) # noqa: E501
else:
(data) = self.gender_full_geo_with_http_info(full_name, country_iso2, **kwargs) # noqa: E501
return data
def gender_full_geo_with_http_info(self, full_name, country_iso2, **kwargs): # noqa: E501
"""Infer the likely gender of a full name, given a local context (ISO2 country code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full_geo_with_http_info(full_name, country_iso2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str full_name: (required)
:param str country_iso2: (required)
:return: PersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['full_name', 'country_iso2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender_full_geo" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'full_name' is set
if ('full_name' not in local_var_params or
local_var_params['full_name'] is None):
raise ValueError("Missing the required parameter `full_name` when calling `gender_full_geo`") # noqa: E501
# verify the required parameter 'country_iso2' is set
if ('country_iso2' not in local_var_params or
local_var_params['country_iso2'] is None):
raise ValueError("Missing the required parameter `country_iso2` when calling `gender_full_geo`") # noqa: E501
collection_formats = {}
path_params = {}
if 'full_name' in local_var_params:
path_params['fullName'] = local_var_params['full_name'] # noqa: E501
if 'country_iso2' in local_var_params:
path_params['countryIso2'] = local_var_params['country_iso2'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/genderFullGeo/{fullName}/{countryIso2}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PersonalNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender_full_geo_batch(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 full names, with a given cultural context (country ISO2 code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full_geo_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameGeoIn batch_personal_name_geo_in: A list of personal names, with a country ISO2 code
:return: BatchPersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_full_geo_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.gender_full_geo_batch_with_http_info(**kwargs) # noqa: E501
return data
def gender_full_geo_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 full names, with a given cultural context (country ISO2 code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_full_geo_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameGeoIn batch_personal_name_geo_in: A list of personal names, with a country ISO2 code
:return: BatchPersonalNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_personal_name_geo_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender_full_geo_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_personal_name_geo_in' in local_var_params:
body_params = local_var_params['batch_personal_name_geo_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/genderFullGeoBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchPersonalNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender_geo(self, first_name, last_name, country_iso2, **kwargs): # noqa: E501
"""Infer the likely gender of a name, given a local context (ISO2 country code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_geo(first_name, last_name, country_iso2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:param str country_iso2: (required)
:return: FirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_geo_with_http_info(first_name, last_name, country_iso2, **kwargs) # noqa: E501
else:
(data) = self.gender_geo_with_http_info(first_name, last_name, country_iso2, **kwargs) # noqa: E501
return data
def gender_geo_with_http_info(self, first_name, last_name, country_iso2, **kwargs): # noqa: E501
"""Infer the likely gender of a name, given a local context (ISO2 country code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_geo_with_http_info(first_name, last_name, country_iso2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:param str country_iso2: (required)
:return: FirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['first_name', 'last_name', 'country_iso2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender_geo" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'first_name' is set
if ('first_name' not in local_var_params or
local_var_params['first_name'] is None):
raise ValueError("Missing the required parameter `first_name` when calling `gender_geo`") # noqa: E501
# verify the required parameter 'last_name' is set
if ('last_name' not in local_var_params or
local_var_params['last_name'] is None):
raise ValueError("Missing the required parameter `last_name` when calling `gender_geo`") # noqa: E501
# verify the required parameter 'country_iso2' is set
if ('country_iso2' not in local_var_params or
local_var_params['country_iso2'] is None):
raise ValueError("Missing the required parameter `country_iso2` when calling `gender_geo`") # noqa: E501
collection_formats = {}
path_params = {}
if 'first_name' in local_var_params:
path_params['firstName'] = local_var_params['first_name'] # noqa: E501
if 'last_name' in local_var_params:
path_params['lastName'] = local_var_params['last_name'] # noqa: E501
if 'country_iso2' in local_var_params:
path_params['countryIso2'] = local_var_params['country_iso2'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/genderGeo/{firstName}/{lastName}/{countryIso2}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirstLastNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def gender_geo_batch(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 names, each given a local context (ISO2 country code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_geo_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoIn batch_first_last_name_geo_in: A list of names, with country code.
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.gender_geo_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.gender_geo_batch_with_http_info(**kwargs) # noqa: E501
return data
def gender_geo_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 names, each given a local context (ISO2 country code). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.gender_geo_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoIn batch_first_last_name_geo_in: A list of names, with country code.
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_first_last_name_geo_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gender_geo_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_first_last_name_geo_in' in local_var_params:
body_params = local_var_params['batch_first_last_name_geo_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/genderGeoBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def origin(self, first_name, last_name, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of origin of a personal name. Assumes names as they are in the country of origin. For US, CA, AU, NZ and other melting-pots : use 'diaspora' instead. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.origin(first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameOriginedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.origin_with_http_info(first_name, last_name, **kwargs) # noqa: E501
else:
(data) = self.origin_with_http_info(first_name, last_name, **kwargs) # noqa: E501
return data
def origin_with_http_info(self, first_name, last_name, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of origin of a personal name. Assumes names as they are in the country of origin. For US, CA, AU, NZ and other melting-pots : use 'diaspora' instead. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.origin_with_http_info(first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameOriginedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['first_name', 'last_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method origin" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'first_name' is set
if ('first_name' not in local_var_params or
local_var_params['first_name'] is None):
raise ValueError("Missing the required parameter `first_name` when calling `origin`") # noqa: E501
# verify the required parameter 'last_name' is set
if ('last_name' not in local_var_params or
local_var_params['last_name'] is None):
raise ValueError("Missing the required parameter `last_name` when calling `origin`") # noqa: E501
collection_formats = {}
path_params = {}
if 'first_name' in local_var_params:
path_params['firstName'] = local_var_params['first_name'] # noqa: E501
if 'last_name' in local_var_params:
path_params['lastName'] = local_var_params['last_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/origin/{firstName}/{lastName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirstLastNameOriginedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def origin_batch(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of origin of up to 100 names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.origin_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameIn batch_first_last_name_in: A list of personal names
:return: BatchFirstLastNameOriginedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.origin_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.origin_batch_with_http_info(**kwargs) # noqa: E501
return data
def origin_batch_with_http_info(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer the likely country of origin of up to 100 names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.origin_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameIn batch_first_last_name_in: A list of personal names
:return: BatchFirstLastNameOriginedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_first_last_name_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method origin_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_first_last_name_in' in local_var_params:
body_params = local_var_params['batch_first_last_name_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/originBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameOriginedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def parse_name(self, name_full, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name(name_full, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name_full: (required)
:return: PersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.parse_name_with_http_info(name_full, **kwargs) # noqa: E501
else:
(data) = self.parse_name_with_http_info(name_full, **kwargs) # noqa: E501
return data
def parse_name_with_http_info(self, name_full, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name_with_http_info(name_full, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name_full: (required)
:return: PersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name_full'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method parse_name" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name_full' is set
if ('name_full' not in local_var_params or
local_var_params['name_full'] is None):
raise ValueError("Missing the required parameter `name_full` when calling `parse_name`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name_full' in local_var_params:
path_params['nameFull'] = local_var_params['name_full'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/parseName/{nameFull}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PersonalNameParsedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def parse_name_batch(self, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameIn batch_personal_name_in: A list of personal names
:return: BatchPersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.parse_name_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.parse_name_batch_with_http_info(**kwargs) # noqa: E501
return data
def parse_name_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameIn batch_personal_name_in: A list of personal names
:return: BatchPersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_personal_name_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method parse_name_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_personal_name_in' in local_var_params:
body_params = local_var_params['batch_personal_name_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/parseNameBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchPersonalNameParsedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def parse_name_geo(self, name_full, country_iso2, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. For better accuracy, provide a geographic context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name_geo(name_full, country_iso2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name_full: (required)
:param str country_iso2: (required)
:return: PersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.parse_name_geo_with_http_info(name_full, country_iso2, **kwargs) # noqa: E501
else:
(data) = self.parse_name_geo_with_http_info(name_full, country_iso2, **kwargs) # noqa: E501
return data
def parse_name_geo_with_http_info(self, name_full, country_iso2, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. For better accuracy, provide a geographic context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name_geo_with_http_info(name_full, country_iso2, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name_full: (required)
:param str country_iso2: (required)
:return: PersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name_full', 'country_iso2'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method parse_name_geo" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'name_full' is set
if ('name_full' not in local_var_params or
local_var_params['name_full'] is None):
raise ValueError("Missing the required parameter `name_full` when calling `parse_name_geo`") # noqa: E501
# verify the required parameter 'country_iso2' is set
if ('country_iso2' not in local_var_params or
local_var_params['country_iso2'] is None):
raise ValueError("Missing the required parameter `country_iso2` when calling `parse_name_geo`") # noqa: E501
collection_formats = {}
path_params = {}
if 'name_full' in local_var_params:
path_params['nameFull'] = local_var_params['name_full'] # noqa: E501
if 'country_iso2' in local_var_params:
path_params['countryIso2'] = local_var_params['country_iso2'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/parseName/{nameFull}/{countryIso2}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PersonalNameParsedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def parse_name_geo_batch(self, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. Giving a local context improves precision. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name_geo_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameGeoIn batch_personal_name_geo_in: A list of personal names
:return: BatchPersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.parse_name_geo_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.parse_name_geo_batch_with_http_info(**kwargs) # noqa: E501
return data
def parse_name_geo_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely first/last name structure of a name, ex. <NAME> or SMITH, John or SMITH; John. Giving a local context improves precision. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_name_geo_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchPersonalNameGeoIn batch_personal_name_geo_in: A list of personal names
:return: BatchPersonalNameParsedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_personal_name_geo_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method parse_name_geo_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_personal_name_geo_in' in local_var_params:
body_params = local_var_params['batch_personal_name_geo_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/parseNameGeoBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchPersonalNameParsedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def parsed_gender_batch(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 fully parsed names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parsed_gender_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchParsedFullNameIn batch_parsed_full_name_in: A list of personal names
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.parsed_gender_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.parsed_gender_batch_with_http_info(**kwargs) # noqa: E501
return data
def parsed_gender_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 fully parsed names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parsed_gender_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchParsedFullNameIn batch_parsed_full_name_in: A list of personal names
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_parsed_full_name_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method parsed_gender_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_parsed_full_name_in' in local_var_params:
body_params = local_var_params['batch_parsed_full_name_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/parsedGenderBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def parsed_gender_geo_batch(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 fully parsed names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parsed_gender_geo_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchParsedFullNameGeoIn batch_parsed_full_name_geo_in: A list of personal names
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.parsed_gender_geo_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.parsed_gender_geo_batch_with_http_info(**kwargs) # noqa: E501
return data
def parsed_gender_geo_batch_with_http_info(self, **kwargs): # noqa: E501
"""Infer the likely gender of up to 100 fully parsed names, detecting automatically the cultural context. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parsed_gender_geo_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchParsedFullNameGeoIn batch_parsed_full_name_geo_in: A list of personal names
:return: BatchFirstLastNameGenderedOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_parsed_full_name_geo_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method parsed_gender_geo_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_parsed_full_name_geo_in' in local_var_params:
body_params = local_var_params['batch_parsed_full_name_geo_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/parsedGenderGeoBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameGenderedOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def us_race_ethnicity(self, first_name, last_name, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer a US resident's likely race/ethnicity according to US Census taxonomy W_NL (white, non latino), HL (hispano latino), A (asian, non latino), B_NL (black, non latino). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_race_ethnicity(first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.us_race_ethnicity_with_http_info(first_name, last_name, **kwargs) # noqa: E501
else:
(data) = self.us_race_ethnicity_with_http_info(first_name, last_name, **kwargs) # noqa: E501
return data
def us_race_ethnicity_with_http_info(self, first_name, last_name, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer a US resident's likely race/ethnicity according to US Census taxonomy W_NL (white, non latino), HL (hispano latino), A (asian, non latino), B_NL (black, non latino). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_race_ethnicity_with_http_info(first_name, last_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:return: FirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['first_name', 'last_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method us_race_ethnicity" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'first_name' is set
if ('first_name' not in local_var_params or
local_var_params['first_name'] is None):
raise ValueError("Missing the required parameter `first_name` when calling `us_race_ethnicity`") # noqa: E501
# verify the required parameter 'last_name' is set
if ('last_name' not in local_var_params or
local_var_params['last_name'] is None):
raise ValueError("Missing the required parameter `last_name` when calling `us_race_ethnicity`") # noqa: E501
collection_formats = {}
path_params = {}
if 'first_name' in local_var_params:
path_params['firstName'] = local_var_params['first_name'] # noqa: E501
if 'last_name' in local_var_params:
path_params['lastName'] = local_var_params['last_name'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/usRaceEthnicity/{firstName}/{lastName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirstLastNameUSRaceEthnicityOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def us_race_ethnicity_batch(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer up-to 100 US resident's likely race/ethnicity according to US Census taxonomy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_race_ethnicity_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoIn batch_first_last_name_geo_in: A list of personal names
:return: BatchFirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.us_race_ethnicity_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.us_race_ethnicity_batch_with_http_info(**kwargs) # noqa: E501
return data
def us_race_ethnicity_batch_with_http_info(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer up-to 100 US resident's likely race/ethnicity according to US Census taxonomy. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_race_ethnicity_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoIn batch_first_last_name_geo_in: A list of personal names
:return: BatchFirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_first_last_name_geo_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method us_race_ethnicity_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_first_last_name_geo_in' in local_var_params:
body_params = local_var_params['batch_first_last_name_geo_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/usRaceEthnicityBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameUSRaceEthnicityOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def us_race_ethnicity_zip5(self, first_name, last_name, zip5_code, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer a US resident's likely race/ethnicity according to US Census taxonomy, using (optional) ZIP5 code info. Output is W_NL (white, non latino), HL (hispano latino), A (asian, non latino), B_NL (black, non latino). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_race_ethnicity_zip5(first_name, last_name, zip5_code, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:param str zip5_code: (required)
:return: FirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.us_race_ethnicity_zip5_with_http_info(first_name, last_name, zip5_code, **kwargs) # noqa: E501
else:
(data) = self.us_race_ethnicity_zip5_with_http_info(first_name, last_name, zip5_code, **kwargs) # noqa: E501
return data
def us_race_ethnicity_zip5_with_http_info(self, first_name, last_name, zip5_code, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer a US resident's likely race/ethnicity according to US Census taxonomy, using (optional) ZIP5 code info. Output is W_NL (white, non latino), HL (hispano latino), A (asian, non latino), B_NL (black, non latino). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_race_ethnicity_zip5_with_http_info(first_name, last_name, zip5_code, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str first_name: (required)
:param str last_name: (required)
:param str zip5_code: (required)
:return: FirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['first_name', 'last_name', 'zip5_code'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method us_race_ethnicity_zip5" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'first_name' is set
if ('first_name' not in local_var_params or
local_var_params['first_name'] is None):
raise ValueError("Missing the required parameter `first_name` when calling `us_race_ethnicity_zip5`") # noqa: E501
# verify the required parameter 'last_name' is set
if ('last_name' not in local_var_params or
local_var_params['last_name'] is None):
raise ValueError("Missing the required parameter `last_name` when calling `us_race_ethnicity_zip5`") # noqa: E501
# verify the required parameter 'zip5_code' is set
if ('zip5_code' not in local_var_params or
local_var_params['zip5_code'] is None):
raise ValueError("Missing the required parameter `zip5_code` when calling `us_race_ethnicity_zip5`") # noqa: E501
collection_formats = {}
path_params = {}
if 'first_name' in local_var_params:
path_params['firstName'] = local_var_params['first_name'] # noqa: E501
if 'last_name' in local_var_params:
path_params['lastName'] = local_var_params['last_name'] # noqa: E501
if 'zip5_code' in local_var_params:
path_params['zip5Code'] = local_var_params['zip5_code'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/usRaceEthnicityZIP5/{firstName}/{lastName}/{zip5Code}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FirstLastNameUSRaceEthnicityOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def us_zip_race_ethnicity_batch(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer up-to 100 US resident's likely race/ethnicity according to US Census taxonomy, with (optional) ZIP code. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_zip_race_ethnicity_batch(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoZippedIn batch_first_last_name_geo_zipped_in: A list of personal names
:return: BatchFirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.us_zip_race_ethnicity_batch_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.us_zip_race_ethnicity_batch_with_http_info(**kwargs) # noqa: E501
return data
def us_zip_race_ethnicity_batch_with_http_info(self, **kwargs): # noqa: E501
"""[USES 10 UNITS PER NAME] Infer up-to 100 US resident's likely race/ethnicity according to US Census taxonomy, with (optional) ZIP code. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.us_zip_race_ethnicity_batch_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param BatchFirstLastNameGeoZippedIn batch_first_last_name_geo_zipped_in: A list of personal names
:return: BatchFirstLastNameUSRaceEthnicityOut
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['batch_first_last_name_geo_zipped_in'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method us_zip_race_ethnicity_batch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'batch_first_last_name_geo_zipped_in' in local_var_params:
body_params = local_var_params['batch_first_last_name_geo_zipped_in']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api2/json/usZipRaceEthnicityBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BatchFirstLastNameUSRaceEthnicityOut', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
shaojiankui/iOS10-Runtime-Headers
|
Frameworks/AVFoundation.framework/AVSampleBufferDisplayLayerContentLayer.h
|
/* Generated by RuntimeBrowser
Image: /System/Library/Frameworks/AVFoundation.framework/AVFoundation
*/
@interface AVSampleBufferDisplayLayerContentLayer : CALayer
+ (id)defaultActionForKey:(id)arg1;
- (id)actionForKey:(id)arg1;
@end
|
ednilson/jcatalog
|
jcatalog/models.py
|
<gh_stars>0
# coding: utf-8
from mongoengine import *
import datetime
connect('journals-catalog')
class Scielo(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
extraction_date = DateTimeField()
issn_list = ListField()
date_of_the_first_document = DateTimeField()
date_of_the_last_document = DateTimeField()
scholarone = IntField(required=True, min_value=0, default=0)
ojs_scielo = IntField(required=True, min_value=0, default=0)
is_submissions = IntField(required=True, min_value=0, default=0)
is_scielo = IntField(required=True, min_value=1, default=1)
is_scimago = IntField(required=True, min_value=0, default=0)
is_scopus = IntField(required=True, min_value=0, default=0)
is_jcr = IntField(required=True, min_value=0, default=0)
is_cwts = IntField(required=True, min_value=0, default=0)
is_doaj = IntField(required=True, min_value=0, default=0)
is_capes = IntField(required=True, min_value=0, default=0)
is_pubmed = IntField(required=True, min_value=0, default=0)
is_pmc = IntField(required=True, min_value=0, default=0)
is_wos = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country'
]
}
class Scielobk1(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
extraction_date = DateTimeField()
issn_list = ListField()
date_of_the_first_document = DateTimeField()
date_of_the_last_document = DateTimeField()
scholarone = IntField(required=True, min_value=0, default=0)
ojs_scielo = IntField(required=True, min_value=0, default=0)
is_submissions = IntField(required=True, min_value=0, default=0)
is_scielo = IntField(required=True, min_value=1, default=1)
is_scimago = IntField(required=True, min_value=0, default=0)
is_scopus = IntField(required=True, min_value=0, default=0)
is_jcr = IntField(required=True, min_value=0, default=0)
is_cwts = IntField(required=True, min_value=0, default=0)
is_doaj = IntField(required=True, min_value=0, default=0)
is_capes = IntField(required=True, min_value=0, default=0)
is_pubmed = IntField(required=True, min_value=0, default=0)
is_pmc = IntField(required=True, min_value=0, default=0)
is_wos = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'pid',
'collection'
]
}
class Scimago(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
is_scielo = IntField(required=True, min_value=0, default=0)
is_scimago = IntField(required=True, min_value=1, default=1)
is_scopus = IntField(required=True, min_value=0, default=0)
is_jcr = IntField(required=True, min_value=0, default=0)
is_wos = IntField(required=True, min_value=0, default=0)
inscielo = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country'
]
}
class Scopus(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
is_scielo = IntField(required=True, min_value=0, default=0)
is_scimago = IntField(required=True, min_value=0, default=0)
is_scopus = IntField(required=True, min_value=1, default=1)
is_jcr = IntField(required=True, min_value=0, default=0)
is_wos = IntField(required=True, min_value=0, default=0)
is_cwts = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country',
'sourcerecord_id',
'asjc_code_list',
'oecd'
]
}
class Jcr(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
is_scielo = IntField(required=True, min_value=0, default=0)
is_scimago = IntField(required=True, min_value=0, default=0)
is_scopus = IntField(required=True, min_value=0, default=0)
is_jcr = IntField(required=True, min_value=1, default=1)
is_wos = IntField(required=True, min_value=0, default=0)
is_cwts = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country'
]
}
class Wos(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
is_scielo = IntField(required=True, min_value=0, default=0)
is_scimago = IntField(required=True, min_value=0, default=0)
is_scopus = IntField(required=True, min_value=0, default=0)
is_jcr = IntField(required=True, min_value=0, default=0)
is_wos = IntField(required=True, min_value=1, default=1)
is_cwts = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country'
]
}
class Jcr_scielo(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
is_scielo = IntField(required=True, min_value=0, default=0)
is_scimago = IntField(required=True, min_value=0, default=0)
is_scopus = IntField(required=True, min_value=0, default=0)
is_jcr = IntField(required=True, min_value=1, default=1)
is_wos = IntField(required=True, min_value=0, default=0)
is_cwts = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country'
]
}
class Oecd(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
# Indexes
meta = {
'indexes': [
'oecd_description',
'wos_description'
]
}
class Cwts(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
is_cwts = IntField(required=True, min_value=1, default=1)
is_scielo = IntField(required=True, min_value=0, default=0)
is_scimago = IntField(required=True, min_value=0, default=0)
is_scopus = IntField(required=True, min_value=0, default=0)
is_jcr = IntField(required=True, min_value=0, default=0)
is_wos = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country'
]
}
class Submissions(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
scholarone = IntField(required=True, min_value=0, default=0)
ojs_scielo = IntField(required=True, min_value=0, default=0)
ojs_outro = IntField(required=True, min_value=0, default=0)
outro = IntField(required=True, min_value=0, default=0)
class Doaj(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
class Doajapi(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
class Pubmedapi(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
class Noscielo(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
class Capes(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
# Indexes
meta = {
'indexes': [
'issn',
'title',
'area_avaliacao'
]
}
class Wosindexes(DynamicDocument):
creation_date = DateTimeField(default=datetime.datetime.now)
updated_at = DateTimeField()
issn_list = ListField()
is_scielo = IntField(required=True, min_value=0, default=0)
is_wos = IntField(required=True, min_value=0, default=0)
# Indexes
meta = {
'indexes': [
'issn_list',
'title_country'
]
}
class Ztests(DynamicDocument):
pass
class Scielodates(DynamicDocument):
# creation_date = DateTimeField(default=datetime.datetime.now)
# updated_at = DateTimeField()
issn_list = ListField()
# Indexes
meta = {
'indexes': [
'issn_list',
'issn_scielo',
'pid'
]
}
|
rokumatsumoto/rspec-experiments
|
expectations/data_generator/spec/spec_helper.rb
|
RSpec.configure do |c|
c.fail_fast = true
c.formatter = 'documentation'
c.color = true
c.order = :defined
end
|
mpejcoch/aviso
|
aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py
|
<filename>aviso-server/monitoring/aviso_monitoring/reporter/etcd_reporter.py
# (C) Copyright 1996- ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
from enum import Enum
import requests
from .. import logger
from ..receiver import ETCD_APP_NAME
from .opsview_reporter import OpsviewReporter
class EtcdReporter(OpsviewReporter):
def __init__(self, config, *args, **kwargs):
self.etcd_config = config.etcd_reporter
self.frequency = self.etcd_config["frequency"]
self.enabled = self.etcd_config["enabled"]
self.req_timeout = self.etcd_config["req_timeout"]
self.member_urls = self.etcd_config["member_urls"]
self.tlms = self.etcd_config["tlms"]
super().__init__(config, *args, **kwargs)
def process_messages(self):
"""
This method for each metric to process instantiates the relative TLM checker and run it
:return: the metrics collected
"""
logger.debug("Etcd processing metrics...")
# fetch the raw tlms provided by etcd
raw_tlms = OpsviewReporter.retrive_metrics(self.member_urls, self.req_timeout) # noqa: F841
# array of metric to return
metrics = []
# check for each tlm
for tlm_type in self.tlms.keys():
# create the relative metric checker
m_type = EtcdMetricType[tlm_type.lower()]
checker = eval(
m_type.value
+ "(tlm_type, msg_receiver=self.msg_receiver, raw_tlms=raw_tlms, **self.tlms[tlm_type], \
**self.etcd_config)"
)
# retrieve metric
metrics.append(checker.metric())
logger.debug("Etcd metrics completed")
return metrics
class EtcdMetricType(Enum):
"""
This Enum describes the various etcd metrics that can be used and link the name to the relative checker
"""
etcd_store_size = "StoreSize"
etcd_cluster_status = "ClusterStatus"
etcd_total_keys = "TotalKeys"
etcd_error_log = "ErrorLog"
class EtcdChecker:
"""
Base class for etcd checkers
"""
def __init__(self, tlm_type, req_timeout=60, *args, **kwargs):
self.metric_name = tlm_type
self.req_timeout = req_timeout
self.member_urls = kwargs["member_urls"]
self.raw_tlms = kwargs["raw_tlms"]
self.msg_receiver = kwargs["msg_receiver"]
def metric(self):
pass
class StoreSize(EtcdChecker):
"""
This class aims at checking the size of the store.
"""
def metric(self):
# defaults
status = 0
message = "Store size is nominal"
store_logic = self.max_store_size("etcd_mvcc_db_total_size_in_use_in_bytes")
store_physical = self.max_store_size("etcd_mvcc_db_total_size_in_bytes")
store_quota = self.max_store_size("etcd_server_quota_backend_bytes")
if store_logic == -1:
status = 2
message = "Could not retrieve the store size"
else:
utilisation = store_logic * 100 / store_quota
if utilisation > 85:
status = 2
message = f"Store size {store_logic} GiB exceeded 85% of the quota available: {store_quota} GiB."
elif utilisation > 70:
status = 1
message = f"Store size {store_logic} GiB exceeded 85% of the quota available: {store_quota} GiB."
# build metric payload
m_status = {
"name": self.metric_name,
"status": status,
"message": message,
"metrics": [
{"m_name": "store_logic", "m_value": store_logic, "m_unit": "GiB"},
{"m_name": "store_physical", "m_value": store_physical, "m_unit": "GiB"},
{"m_name": "store_quota", "m_value": store_quota, "m_unit": "GiB"},
],
}
logger.debug(f"{self.metric_name} metric: {m_status}")
return m_status
def max_store_size(self, tlm_name):
"""
This method returns the max data size for the given tlm_name among all the members
:param tlm_name:
:return: data size in GiB, -1 if something went wrong
"""
max = -1
for u in self.member_urls:
if self.raw_tlms[u]:
size = OpsviewReporter.read_from_metrics(self.raw_tlms[u], tlm_name)
if size:
# convert byte in GiB
size = round(float(size) / (1024 * 1024 * 1024), 2)
max = size if size > max else max
return max
class ClusterStatus(EtcdChecker):
"""
This class aims at assessing the status of the cluster by checking the health of each member and verifying the
cluster size is as expected.
"""
def metric(self):
status = 0
message = "Cluster status is nominal"
# first retrieve the member size
cluster_size = self.cluster_size(self.member_urls[0]) # any of the member should give the same info
if cluster_size != len(self.member_urls):
status = 2
if cluster_size:
message = f"Cluster size is {cluster_size}"
else:
message = "Not able to get cluster info"
if status == 0:
# now check the health of each member
for url in self.member_urls:
if not self.health(url):
status = 2
message = f"Cluster member {url} not healthy"
break
if status == 0:
# check if there is a leader
if self.raw_tlms[self.member_urls[0]] is None:
status = 1
message = f"Could not retrieve metrics from {self.member_urls[0]}"
else:
leader = OpsviewReporter.read_from_metrics(self.raw_tlms[self.member_urls[0]], "etcd_server_has_leader")
if leader != "1":
status = 1
message = "Cluster has no leader"
# build metric payload
m_status = {
"name": self.metric_name,
"status": status,
"message": message,
}
logger.debug(f"{self.metric_name} metric: {m_status}")
return m_status
def health(self, url):
"""
This method implements the call to a etcd member to check its health status
:param url: member url
:return: True if healthy
"""
url = f"{url}/health"
try:
resp = requests.get(url, timeout=self.req_timeout)
except Exception as e:
logger.exception(f"Not able to get health on {url}, error {e}")
return False
if resp.status_code != 200:
logger.error(
f"Not able to get health on {url}, "
f"status {resp.status_code}, {resp.reason}, {resp.content.decode()}"
)
data = resp.json()
healthy = bool(data.get("health"))
return healthy
def cluster_size(self, url):
"""
This method implements the call to check the size of the cluster is as expected
:param url: member url
:return: cluster size
"""
url = f"{url}/v3/cluster/member/list"
try:
resp = requests.post(url, timeout=self.req_timeout)
except Exception as e:
logger.exception(f"Not able to get cluster info on {url}, error {e}")
return False
if resp.status_code != 200:
logger.error(
f"Not able to get cluster info on {url}, "
f"status {resp.status_code}, {resp.reason}, {resp.content.decode()}"
)
data = resp.json()
cluster_size = len(data.get("members"))
return cluster_size
class TotalKeys(EtcdChecker):
"""
Collect the total number of keys associated
"""
def metric(self):
# defaults
status = 0
message = "Total number of keys is nominal"
# any member should reply the same
t_keys = OpsviewReporter.read_from_metrics(self.raw_tlms[self.member_urls[0]], "etcd_debugging_mvcc_keys_total")
if t_keys is None:
status = 2
message = "Cannot retrieve total number of keys"
# build metric payload
m_status = {
"name": self.metric_name,
"status": status,
"message": message,
"metrics": [{"m_name": "total_keys", "m_value": t_keys, "m_unit": ""}],
}
logger.debug(f"{self.metric_name} metric: {m_status}")
return m_status
class ErrorLog(EtcdChecker):
"""
Collect the errors received
"""
def metric(self):
# defaults
status = 0
message = "No error to report"
# fetch the error log
assert self.msg_receiver, "Msg receiver is None"
new_errs = self.msg_receiver.extract_incoming_errors(ETCD_APP_NAME)
if len(new_errs):
logger.debug(f"Processing {len(new_errs)} tlms {self.metric_name}...")
# select warnings and errors
warns = list(filter(lambda log: ("warn" in log), new_errs))
errs = list(filter(lambda log: ("error" in log), new_errs))
fatals = list(filter(lambda log: ("fatal" in log), new_errs))
panics = list(filter(lambda log: ("panic" in log), new_errs))
# put together the worst errors
errs += fatals
errs += panics
if len(errs):
status = 2
message = f"Errors received: {errs}"
elif len(warns):
status = 1
message = f"Warnings received: {warns}"
# build metric payload
m_status = {"name": self.metric_name, "status": status, "message": message}
logger.debug(f"{self.metric_name} metric: {m_status}")
return m_status
|
borisdamevin/vuepress-theme-tsed
|
packages/vuepress-theme-tsed/src/install.js
|
import * as common from '@tsed/vuepress-common'
import { filterSymbols } from '@tsed/vuepress-common'
function isVueComponent (component) {
return component.name && typeof component !== 'function'
}
export default function install (Vue) {
Vue.mixin({
created () {
if (this.$themeConfig.api) {
this.$filterSymbols = filterSymbols(this.$themeConfig.api)
}
}
})
try {
Object.keys(common).forEach((key) => {
const component = common[key]
if (isVueComponent(component)) {
Vue.component(component.name || key, component)
}
})
} catch (er) {
console.error(er)
}
}
|
xyz031702/goblog
|
handler/admin_remind.go
|
<gh_stars>10-100
package handler
import (
"time"
"github.com/fifsky/goblog/core"
"github.com/fifsky/goblog/helpers"
"github.com/fifsky/goblog/models"
"github.com/gin-gonic/gin"
"github.com/ilibs/gosql"
"github.com/goapt/logger"
)
var AdminRemindGet core.HandlerFunc = func(c *core.Context) core.Response {
h := gin.H{}
id := helpers.StrTo(c.Query("id")).MustInt()
if id > 0 {
remind := &models.Reminds{Id: id}
gosql.Model(remind).Get()
h["Remind"] = remind
}
num := 10
page := helpers.StrTo(c.DefaultQuery("page", "1")).MustInt()
reminds, err := models.RemindGetList(page, num)
h["Reminds"] = reminds
total, err := gosql.Model(&models.Reminds{}).Count()
h["Pager"] = c.Pagination(total, num, page)
h["Types"] = map[int]string{
0: "固定",
1: "每分钟",
2: "每小时",
3: "每天",
4: "每周",
5: "每月",
6: "每年",
}
h["Layouts"] = map[int]string{
0: "2006-01-02 15:04:05",
1: "",
2: "",
3: "15:04:00",
4: "15:04:00",
5: "02日15:04:05",
6: "01月02日15:04:05",
}
h["CurrDate"] = time.Now().Format("2006-01-02 15:04:05")
if err != nil {
return c.ErrorMessage(err)
}
return c.HTML("admin/remind", h)
}
var AdminRemindPost core.HandlerFunc = func(c *core.Context) core.Response {
reminds := &models.Reminds{}
if err := c.ShouldBind(reminds); err != nil {
return c.Fail(201, "参数错误:"+err.Error())
}
if reminds.Content == "" {
return c.Fail(201, "提醒内容不能为空")
}
if reminds.Id > 0 {
if _, err := gosql.Model(reminds).Update(); err != nil {
logger.Error(err)
return c.Fail(201, "更新失败:"+err.Error())
}
} else {
if _, err := gosql.Model(reminds).Create(); err != nil {
logger.Error(err)
return c.Fail(201, "创建失败")
}
}
return c.Success(nil)
}
var AdminRemindDelete core.HandlerFunc = func(c *core.Context) core.Response {
id := helpers.StrTo(c.Query("id")).MustInt()
if _, err := gosql.Model(&models.Reminds{Id: id}).Delete(); err != nil {
logger.Error(err)
return c.Fail(201, "删除失败")
}
return c.Redirect(c.Request.Referer())
}
|
dmatthes/django-bmf
|
djangobmf/contrib/taxing/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
from djangobmf.settings import BASE_MODULE
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
migrations.swappable_dependency(BASE_MODULE["ACCOUNT"]),
]
operations = [
migrations.CreateModel(
name='Tax',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('modified', models.DateTimeField(auto_now=True, verbose_name='Modified', null=True)),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created', null=True)),
('uuid', models.CharField(editable=False, max_length=100, blank=True, null=True, verbose_name='UUID', db_index=True)),
('name', models.CharField(max_length=255)),
('rate', models.DecimalField(max_digits=8, decimal_places=5)),
('passive', models.BooleanField(default=False, verbose_name='Tax is allways included in the product price and never visible to the customer')),
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
('account', models.ForeignKey(to=BASE_MODULE["ACCOUNT"], on_delete=django.db.models.deletion.PROTECT, related_name="tax_liability")),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, related_name="+")),
('modified_by', models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, related_name="+")),
],
options={
'ordering': ['name'],
'abstract': False,
'verbose_name': 'Tax',
'verbose_name_plural': 'Taxes',
},
bases=(models.Model,),
),
]
|
heroku-miraheze/trafficserver
|
src/tscpp/util/unit_tests/test_IntrusiveDList.cc
|
/** @file
IntrusiveDList unit tests.
@section license License
Licensed to the Apache Software Foundation (ASF) under one or more contributor license
agreements. See the NOTICE file distributed with this work for additional information regarding
copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with the License. You may
obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing permissions and
limitations under the License.
*/
#include <iostream>
#include <string_view>
#include <string>
#include <algorithm>
#include "tscpp/util/IntrusiveDList.h"
#include "tscpp/util/bwf_base.h"
#include "catch.hpp"
using ts::IntrusiveDList;
// --------------------
// Code for documentation - placed here to guarantee the examples at least compile.
// First so that additional tests do not require updating the documentation source links.
class Message
{
using self_type = Message; ///< Self reference type.
public:
// Message severity level.
enum Severity { LVL_DEBUG, LVL_INFO, LVL_WARN, LVL_ERROR };
protected:
std::string _text; // Text of the message.
Severity _severity{LVL_DEBUG};
int _indent{0}; // indentation level for display.
// Intrusive list support.
struct Linkage {
static self_type *&next_ptr(self_type *); // Link accessor.
static self_type *&prev_ptr(self_type *); // Link accessor.
self_type *_next{nullptr}; // Forward link.
self_type *_prev{nullptr}; // Backward link.
} _link;
bool is_in_list() const;
friend class Container;
};
auto
Message::Linkage::next_ptr(self_type *that) -> self_type *&
{
return that->_link._next;
}
auto
Message::Linkage::prev_ptr(self_type *that) -> self_type *&
{
return that->_link._prev;
}
bool
Message::is_in_list() const
{
return _link._next || _link._prev;
}
class Container
{
using self_type = Container;
using MessageList = ts::IntrusiveDList<Message::Linkage>;
public:
~Container();
template <typename... Args> self_type &debug(std::string_view fmt, Args &&... args);
size_t count() const;
self_type &clear();
Message::Severity max_severity() const;
void print() const;
protected:
MessageList _msgs;
};
Container::~Container()
{
this->clear(); // clean up memory.
}
auto
Container::clear() -> self_type &
{
Message *msg;
while (nullptr != (msg = _msgs.take_head())) {
delete msg;
}
_msgs.clear();
return *this;
}
size_t
Container::count() const
{
return _msgs.count();
}
template <typename... Args>
auto
Container::debug(std::string_view fmt, Args &&... args) -> self_type &
{
Message *msg = new Message;
ts::bwprintv(msg->_text, fmt, std::forward_as_tuple(args...));
msg->_severity = Message::LVL_DEBUG;
_msgs.append(msg);
return *this;
}
Message::Severity
Container::max_severity() const
{
auto spot = std::max_element(_msgs.begin(), _msgs.end(),
[](Message const &lhs, Message const &rhs) { return lhs._severity < rhs._severity; });
return spot == _msgs.end() ? Message::Severity::LVL_DEBUG : spot->_severity;
}
void
Container::print() const
{
for (auto &&elt : _msgs) {
std::cout << static_cast<unsigned int>(elt._severity) << ": " << elt._text << std::endl;
}
}
TEST_CASE("IntrusiveDList Example", "[libtscpputil][IntrusiveDList]")
{
Container container;
container.debug("This is message {}", 1);
REQUIRE(container.count() == 1);
// Destructor is checked for non-crashing as container goes out of scope.
}
struct Thing {
Thing *_next{nullptr};
Thing *_prev{nullptr};
std::string _payload;
Thing(std::string_view text) : _payload(text) {}
struct Linkage {
static Thing *&
next_ptr(Thing *t)
{
return t->_next;
}
static Thing *&
prev_ptr(Thing *t)
{
return t->_prev;
}
};
};
// Just for you, @maskit ! Demonstrating non-public links and subclassing.
class PrivateThing : protected Thing
{
using self_type = PrivateThing;
using super_type = Thing;
public:
PrivateThing(std::string_view text) : super_type(text) {}
struct Linkage {
static self_type *&
next_ptr(self_type *t)
{
return ts::ptr_ref_cast<self_type>(t->_next);
}
static self_type *&
prev_ptr(self_type *t)
{
return ts::ptr_ref_cast<self_type>(t->_prev);
}
};
std::string const &
payload() const
{
return _payload;
}
};
// End of documentation example code.
// If any lines above here are changed, the documentation must be updated.
// --------------------
using ThingList = ts::IntrusiveDList<Thing::Linkage>;
using PrivateThingList = ts::IntrusiveDList<PrivateThing::Linkage>;
TEST_CASE("IntrusiveDList", "[libtscpputil][IntrusiveDList]")
{
ThingList list;
int n;
REQUIRE(list.count() == 0);
REQUIRE(list.head() == nullptr);
REQUIRE(list.tail() == nullptr);
REQUIRE(list.begin() == list.end());
REQUIRE(list.empty());
n = 0;
for ([[maybe_unused]] auto &thing : list)
++n;
REQUIRE(n == 0);
// Check const iteration (mostly compile checks here).
for ([[maybe_unused]] auto &thing : static_cast<ThingList const &>(list))
++n;
REQUIRE(n == 0);
list.append(new Thing("one"));
REQUIRE(list.begin() != list.end());
REQUIRE(list.tail() == list.head());
list.prepend(new Thing("two"));
REQUIRE(list.count() == 2);
REQUIRE(list.head()->_payload == "two");
REQUIRE(list.tail()->_payload == "one");
list.prepend(list.take_tail());
REQUIRE(list.head()->_payload == "one");
REQUIRE(list.tail()->_payload == "two");
list.insert_after(list.head(), new Thing("middle"));
list.insert_before(list.tail(), new Thing("muddle"));
REQUIRE(list.count() == 4);
auto spot = list.begin();
REQUIRE((*spot++)._payload == "one");
REQUIRE((*spot++)._payload == "middle");
REQUIRE((*spot++)._payload == "muddle");
REQUIRE((*spot++)._payload == "two");
REQUIRE(spot == list.end());
Thing *thing = list.take_head();
REQUIRE(thing->_payload == "one");
REQUIRE(list.count() == 3);
REQUIRE(list.head() != nullptr);
REQUIRE(list.head()->_payload == "middle");
list.prepend(thing);
list.erase(list.head());
REQUIRE(list.count() == 3);
REQUIRE(list.head() != nullptr);
REQUIRE(list.head()->_payload == "middle");
list.prepend(thing);
thing = list.take_tail();
REQUIRE(thing->_payload == "two");
REQUIRE(list.count() == 3);
REQUIRE(list.tail() != nullptr);
REQUIRE(list.tail()->_payload == "muddle");
list.append(thing);
list.erase(list.tail());
REQUIRE(list.count() == 3);
REQUIRE(list.tail() != nullptr);
REQUIRE(list.tail()->_payload == "muddle");
REQUIRE(list.head()->_payload == "one");
list.insert_before(list.end(), new Thing("trailer"));
REQUIRE(list.count() == 4);
REQUIRE(list.tail()->_payload == "trailer");
PrivateThingList priv_list;
for (int i = 1; i <= 23; ++i) {
std::string name;
ts::bwprint(name, "Item {}", i);
priv_list.append(new PrivateThing(name));
REQUIRE(priv_list.count() == i);
}
REQUIRE(priv_list.head()->payload() == "Item 1");
REQUIRE(priv_list.tail()->payload() == "Item 23");
}
|
Gekoncze/MgVulkan
|
1.1/src/cz/mg/vulkan/VkSparseMemoryBind.java
|
package cz.mg.vulkan;
public class VkSparseMemoryBind extends VkObject {
public VkSparseMemoryBind() {
super(sizeof());
}
protected VkSparseMemoryBind(VkMemory vkmemory) {
super(vkmemory);
}
protected VkSparseMemoryBind(VkMemory vkmemory, long vkaddress) {
super(vkmemory, vkaddress);
}
public VkSparseMemoryBind(VkPointer pointer) {
super(pointer);
}
public VkDeviceSize getResourceOffset() {
return new VkDeviceSize(getVkMemory(), getResourceOffsetNative(getVkAddress()));
}
public void setResourceOffset(VkDeviceSize resourceOffset) {
setResourceOffsetNative(getVkAddress(), resourceOffset != null ? resourceOffset.getVkAddress() : VkPointer.getNullAddressNative());
}
public long getResourceOffsetQ() {
return getResourceOffset().getValue();
}
public void setResourceOffset(long resourceOffset) {
getResourceOffset().setValue(resourceOffset);
}
protected static native long getResourceOffsetNative(long address);
protected static native void setResourceOffsetNative(long address, long resourceOffset);
public VkDeviceSize getSize() {
return new VkDeviceSize(getVkMemory(), getSizeNative(getVkAddress()));
}
public void setSize(VkDeviceSize size) {
setSizeNative(getVkAddress(), size != null ? size.getVkAddress() : VkPointer.getNullAddressNative());
}
public long getSizeQ() {
return getSize().getValue();
}
public void setSize(long size) {
getSize().setValue(size);
}
protected static native long getSizeNative(long address);
protected static native void setSizeNative(long address, long size);
public VkDeviceMemory getMemory() {
return new VkDeviceMemory(getVkMemory(), getMemoryNative(getVkAddress()));
}
public void setMemory(VkDeviceMemory memory) {
setMemoryNative(getVkAddress(), memory != null ? memory.getVkAddress() : VkPointer.getNullAddressNative());
}
protected static native long getMemoryNative(long address);
protected static native void setMemoryNative(long address, long memory);
public VkDeviceSize getMemoryOffset() {
return new VkDeviceSize(getVkMemory(), getMemoryOffsetNative(getVkAddress()));
}
public void setMemoryOffset(VkDeviceSize memoryOffset) {
setMemoryOffsetNative(getVkAddress(), memoryOffset != null ? memoryOffset.getVkAddress() : VkPointer.getNullAddressNative());
}
public long getMemoryOffsetQ() {
return getMemoryOffset().getValue();
}
public void setMemoryOffset(long memoryOffset) {
getMemoryOffset().setValue(memoryOffset);
}
protected static native long getMemoryOffsetNative(long address);
protected static native void setMemoryOffsetNative(long address, long memoryOffset);
public VkSparseMemoryBindFlags getFlags() {
return new VkSparseMemoryBindFlags(getVkMemory(), getFlagsNative(getVkAddress()));
}
public void setFlags(VkSparseMemoryBindFlags flags) {
setFlagsNative(getVkAddress(), flags != null ? flags.getVkAddress() : VkPointer.getNullAddressNative());
}
public int getFlagsQ() {
return getFlags().getValue();
}
public void setFlags(int flags) {
getFlags().setValue(flags);
}
protected static native long getFlagsNative(long address);
protected static native void setFlagsNative(long address, long flags);
public void set(VkSparseMemoryBind o){
setNative(getVkAddress(), o.getVkAddress());
}
public static native long sizeof();
protected static native void setNative(long o1, long o2);
public static class Array extends VkSparseMemoryBind implements cz.mg.collections.array.ReadonlyArray<VkSparseMemoryBind> {
private final int count;
public Array(int count) {
super(new VkMemory(count*VkSparseMemoryBind.sizeof()));
this.count = count;
}
public Array(VkSparseMemoryBind o, int count){
super(o.getVkMemory(), o.getVkAddress());
this.count = count;
}
public Array(VkPointer pointer, int count){
super(pointer);
this.count = count;
}
protected Array(VkMemory vkmemory, int count) {
super(vkmemory);
this.count = count;
}
protected Array(VkMemory vkmemory, long vkaddress, int count) {
super(vkmemory, vkaddress);
this.count = count;
}
@Override
public int count(){
return count;
}
@Override
public VkSparseMemoryBind get(int i){
return new VkSparseMemoryBind(getVkMemory(), address(i));
}
protected long address(int i){
return VkPointer.plus(getVkAddress(), sizeof()*i);
}
public static class Array2 extends VkPointer.Array {
public Array2(int count) {
super(count);
}
protected Array2(VkMemory vkmemory, int count) {
super(vkmemory, count);
}
protected Array2(VkMemory vkmemory, long vkaddress, int count) {
super(vkmemory, vkaddress, count);
}
}
}
}
|
LuanEdCosta/backstage
|
src/operations/template/helpers.js
|
<filename>src/operations/template/helpers.js
import { RESERVED_LABEL_IMG } from '../../constants';
export const hasReservedLabelImg = ((attr) => {
if( attr.metadata && attr.metadata.length > 0 ) {
if( attr.metadata.find(meta => RESERVED_LABEL_IMG.includes(meta.label)) ) {
return true;
}
}
return false;
});
|
bexl/bexl-js
|
src/nodes/Literal.js
|
// @flow
import Node from './Node';
import Token from '../Token';
import {Type} from '../types';
export default class Literal extends Node {
dataType: Type;
value: ?(string | number | boolean);
constructor(token: Token, dataType: Type, value: ?(string | number | boolean) = null) {
super('literal', token, token);
this.dataType = dataType;
this.value = value == null ? token.literal : value;
}
pretty(indent: number = 0): string {
let pad = ' '.repeat(indent);
return `${pad}Literal(${String(this.value)})`;
}
}
|
agnor99/OpenBlocks
|
OpenBlocks/broken/main/java/openblocks/common/item/ItemPedometer.java
|
package openblocks.common.item;
import javax.annotation.Nonnull;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ActionResult;
import net.minecraft.util.ActionResultType;
import net.minecraft.util.Hand;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.SoundCategory;
import net.minecraft.util.text.StringTextComponent;
import net.minecraft.util.text.TranslationTextComponent;
import net.minecraft.world.World;
import openblocks.OpenBlocks;
import openblocks.common.PedometerHandler;
import openblocks.common.PedometerHandler.PedometerData;
import openblocks.common.PedometerHandler.PedometerState;
import openmods.utils.Units.DistanceUnit;
import openmods.utils.Units.SpeedUnit;
public class ItemPedometer extends Item {
public ItemPedometer() {
setMaxStackSize(1);
addPropertyOverride(new ResourceLocation("speed"), (stack, worldIn, entityIn) -> {
if (entityIn == null) return 0;
return (float)Math.sqrt(entityIn.motionX * entityIn.motionX + entityIn.motionY * entityIn.motionY + entityIn.motionZ * entityIn.motionZ);
});
}
private static void send(PlayerEntity player, String format, Object... args) {
player.sendMessage(new TranslationTextComponent(format, args));
}
private static final SpeedUnit speedUnit = SpeedUnit.M_PER_TICK;
private static final DistanceUnit distanceUnit = DistanceUnit.M;
@Override
public ActionResult<ItemStack> onItemRightClick(World world, PlayerEntity player, Hand hand) {
if (world.isRemote) {
if (player.isSneaking()) {
PedometerHandler.getProperty(player).reset();
send(player, "openblocks.misc.pedometer.tracking_reset");
} else {
PedometerState state = PedometerHandler.getProperty(player);
if (state.isRunning()) {
showPedometerData(player, state);
} else {
state.init(player, world);
send(player, "openblocks.misc.pedometer.tracking_started");
}
}
} else {
world.playSound(null, player.getPosition(), OpenBlocks.Sounds.ITEM_PEDOMETER_USE, SoundCategory.PLAYERS, 1F, 1F);
}
return ActionResult.newResult(ActionResultType.SUCCESS, player.getHeldItem(hand));
}
protected void showPedometerData(PlayerEntity player, PedometerState state) {
PedometerData result = state.getData();
if (result == null) return;
player.sendMessage(new StringTextComponent(""));
send(player, "openblocks.misc.pedometer.start_point", String.format("%.1f %.1f %.1f", result.startingPoint.x, result.startingPoint.y, result.startingPoint.z));
send(player, "openblocks.misc.pedometer.speed", speedUnit.format(result.currentSpeed));
send(player, "openblocks.misc.pedometer.avg_speed", speedUnit.format(result.averageSpeed()));
send(player, "openblocks.misc.pedometer.total_distance", distanceUnit.format(result.totalDistance));
send(player, "openblocks.misc.pedometer.straght_line_distance", distanceUnit.format(result.straightLineDistance));
send(player, "openblocks.misc.pedometer.straigh_line_speed", speedUnit.format(result.straightLineSpeed()));
send(player, "openblocks.misc.pedometer.last_check_speed", speedUnit.format(result.lastCheckSpeed()));
send(player, "openblocks.misc.pedometer.last_check_distance", distanceUnit.format(result.lastCheckDistance));
send(player, "openblocks.misc.pedometer.last_check_time", result.lastCheckTime);
send(player, "openblocks.misc.pedometer.total_time", result.totalTime);
}
@Override
public void onUpdate(@Nonnull ItemStack stack, World world, Entity entity, int slotId, boolean isSelected) {
if (world.isRemote && slotId < 9) {
PedometerState state = PedometerHandler.getProperty(entity);
if (state.isRunning()) state.update(entity);
}
}
}
|
lechium/tvOS145Headers
|
usr/libexec/wifivelocityd/W5IOPowerManagement.h
|
//
// Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <objc/NSObject.h>
@protocol OS_dispatch_queue;
@interface W5IOPowerManagement : NSObject
{
NSObject<OS_dispatch_queue> *_queue; // 8 = 0x8
int _notifyToken; // 16 = 0x10
_Bool _isMonitoringEvents; // 20 = 0x14
CDUnknownBlockType _updatedPowerManagementCallback; // 24 = 0x18
}
@property(copy, nonatomic) CDUnknownBlockType updatedPowerManagementCallback; // @synthesize updatedPowerManagementCallback=_updatedPowerManagementCallback;
- (unsigned int)caps; // IMP=0x00000001000617f8
- (void)stopEventMonitoring; // IMP=0x0000000100061754
- (void)startEventMonitoring; // IMP=0x00000001000614bc
- (void)dealloc; // IMP=0x0000000100061468
- (id)init; // IMP=0x000000010006130c
@end
|
bje-/SAM
|
api/include/SAM_Singlediodeparams.h
|
<reponame>bje-/SAM<filename>api/include/SAM_Singlediodeparams.h
#ifndef SAM_SINGLEDIODEPARAMS_H_
#define SAM_SINGLEDIODEPARAMS_H_
#include "visibility.h"
#include "SAM_api.h"
#include <stdint.h>
#ifdef __cplusplus
extern "C"
{
#endif
//
// Singlediodeparams Technology Model
//
/**
* Create a Singlediodeparams variable table.
* @param def: the set of financial model-dependent defaults to use (None, Residential, ...)
* @param[in,out] err: a pointer to an error object
*/
SAM_EXPORT typedef void * SAM_Singlediodeparams;
/// verbosity level 0 or 1. Returns 1 on success
SAM_EXPORT int SAM_Singlediodeparams_execute(SAM_table data, int verbosity, SAM_error* err);
//
// SingleDiodeModel parameters
//
/**
* Set Adj_ref: OC SC temp coeff adjustment [%]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_Adj_ref_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set I: Irradiance [W/m2]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_I_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set Il_ref: Light current [A]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_Il_ref_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set Io_ref: Saturation current [A]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_Io_ref_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set Rs_ref: Series resistance [ohm]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_Rs_ref_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set Rsh_ref: Shunt resistance [ohm]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_Rsh_ref_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set T: Temperature [C]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_T_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set a_ref: Modified nonideality factor [1/V]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_a_ref_nset(SAM_table ptr, double number, SAM_error *err);
/**
* Set alpha_isc: Temp coeff of current at SC [A/'C]
* options: None
* constraints: None
* required if: *
*/
SAM_EXPORT void SAM_Singlediodeparams_SingleDiodeModel_alpha_isc_nset(SAM_table ptr, double number, SAM_error *err);
/**
* SingleDiodeModel Getters
*/
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_Adj_ref_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_I_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_Il_ref_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_Io_ref_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_Rs_ref_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_Rsh_ref_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_T_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_a_ref_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_SingleDiodeModel_alpha_isc_nget(SAM_table ptr, SAM_error *err);
/**
* Outputs Getters
*/
SAM_EXPORT double SAM_Singlediodeparams_Outputs_Il_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_Outputs_Io_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_Outputs_Rs_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_Outputs_Rsh_nget(SAM_table ptr, SAM_error *err);
SAM_EXPORT double SAM_Singlediodeparams_Outputs_a_nget(SAM_table ptr, SAM_error *err);
#ifdef __cplusplus
} /* end of extern "C" { */
#endif
#endif
|
peter-ls/kylo
|
core/search/search-rest-model/src/main/java/com/thinkbiganalytics/search/rest/model/FeedMetadataSearchResultData.java
|
package com.thinkbiganalytics.search.rest.model;
/*-
* #%L
* kylo-search-rest-model
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.util.List;
/**
* Stores the search results coming from a feed metadata
*/
public class FeedMetadataSearchResultData extends AbstractSearchResultData {
private String feedSystemName;
private String feedTitle;
private String feedDescription;
private String feedCategoryId;
private List<String> feedTags;
private String userProperties;
private String feedId;
public FeedMetadataSearchResultData() {
final String ICON = "linear_scale";
final String COLOR = "Maroon";
super.setIcon(ICON);
super.setColor(COLOR);
super.setType(SearchResultType.KYLO_FEEDS);
}
public String getFeedSystemName() {
return feedSystemName;
}
public void setFeedSystemName(String feedSystemName) {
this.feedSystemName = feedSystemName;
}
public String getFeedTitle() {
return feedTitle;
}
public void setFeedTitle(String feedTitle) {
this.feedTitle = feedTitle;
}
public String getFeedDescription() {
return feedDescription;
}
public void setFeedDescription(String feedDescription) {
this.feedDescription = feedDescription;
}
public String getFeedCategoryId() {
return feedCategoryId;
}
public void setFeedCategoryId(String feedCategoryId) {
this.feedCategoryId = feedCategoryId;
}
public List<String> getFeedTags() {
return feedTags;
}
public void setFeedTags(List<String> feedTags) {
this.feedTags = feedTags;
}
public String getUserProperties() {
return userProperties;
}
public void setUserProperties(String userProperties) {
this.userProperties = userProperties;
}
public String getFeedId() {
return feedId;
}
public void setFeedId(String feedId) {
this.feedId = feedId;
}
}
|
tunnelvisionlabs/java-immutable
|
src/com/tvl/util/Requires.java
|
<filename>src/com/tvl/util/Requires.java
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
package com.tvl.util;
/**
* This class defines common runtime checks which throw exceptions upon failure.
*
* These methods are used for argument validation throughout the immutable collections library.
*/
enum Requires {
;
/**
* Throws an exception if an argument is {@code null}.
*
* @param value The argument value.
* @param parameterName The parameter name.
* @param <T> The type of the parameter.
* @exception NullPointerException if {@code value} is {@code null}.
*/
public static <T> void notNull(T value, String parameterName) {
if (value == null) {
failNullPointer(parameterName);
}
}
/**
* Throws an exception if an argument is {@code null}.
*
* This method is used to include an argument validation in constructor chaining scenarios.
*
* @param value The argument value.
* @param parameterName The parameter name.
* @param <T> The type of the parameter.
* @return This method returns {@code value}.
* @exception NullPointerException if {@code value} is {@code null}.
*/
public static <T> T notNullPassThrough(T value, String parameterName) {
notNull(value, parameterName);
return value;
}
/**
* Throws an {@link IndexOutOfBoundsException} if a condition does not evaluate to true.
*
* @param condition The evaluated condition.
* @param parameterName The name of the parameter being validated by the condition.
* @exception IndexOutOfBoundsException if {@code condition} is false.
*/
public static void range(boolean condition, String parameterName) {
if (!condition) {
failRange(parameterName, null);
}
}
/**
* Throws an {@link IndexOutOfBoundsException} if a condition does not evaluate to true.
*
* @param condition The evaluated condition.
* @param parameterName The name of the parameter being validated by the condition.
* @param message An additional message to include in the exception message if {@code condition} is false.
* @exception IndexOutOfBoundsException if {@code condition} is false.
*/
public static void range(boolean condition, String parameterName, String message) {
if (!condition) {
failRange(parameterName, message);
}
}
/**
* Throws an {@link IllegalArgumentException} if a condition does not evaluate to true.
*
* @param condition The evaluated condition.
* @param parameterName The name of the parameter being validated by the condition.
* @param message An additional message to include in the exception message if {@code condition} is false.
* @exception IllegalArgumentException if {@code condition} is false.
*/
public static void argument(boolean condition, String parameterName, String message) {
if (!condition) {
throw new IllegalArgumentException(message + ": " + parameterName);
}
}
/**
* Throws an {@link IllegalArgumentException} if a condition does not evaluate to true.
*
* @param condition The evaluated condition.
* @exception IllegalArgumentException if {@code condition} is false.
*/
public static void argument(boolean condition) {
if (!condition) {
throw new IllegalArgumentException();
}
}
/**
* Throws a {@link NullPointerException}.
*
* @param parameterName The name of the parameter that was {@code null}.
* @exception NullPointerException always.
*/
private static void failNullPointer(String parameterName) {
// Separating out this throwing operation helps with inlining of the caller
throw new NullPointerException(parameterName);
}
/**
* Throws an {@link IndexOutOfBoundsException}.
*
* @param parameterName The name of the parameter that was out of its allowed range.
* @param message An optional additional message to include in the exception.
* @exception IndexOutOfBoundsException always.
*/
public static void failRange(String parameterName, String message) {
if (message == null || message.isEmpty()) {
throw new IndexOutOfBoundsException(parameterName);
} else {
throw new IndexOutOfBoundsException(message + ": " + parameterName);
}
}
}
|
MichaelJCaruso/vxa-node
|
deps/vision/src/kernel/Vca_VcaTransport.h
|
#ifndef Vca_VcaTransport_Interface
#define Vca_VcaTransport_Interface
/************************
***** Components *****
************************/
#include "Vca_VActivity.h"
#include "V_VFifoLite.h"
#include "V_VScheduler.h"
#include "VkMapOf.h"
#include "Vca_IBSClient.h"
#include "Vca_ITrigger.h"
#include "Vca_VBSConsumer.h"
#include "Vca_VBSProducer.h"
#include "Vca_VBSManager.h"
#include "Vca_VcaSite.h"
#include "V_VTime.h"
/**************************
***** Declarations *****
**************************/
#include "Vca_VTimer.h"
class VTypeInfo;
/*************************
***** Definitions *****
*************************/
/**************************
*---- VcaTransport ----*
**************************/
namespace Vca {
class IError;
class VcaPeer;
class VcaConnection;
class VcaSerializer;
class VcaSerializerForMessage;
class VcaSerializerForPlumbing;
class VcaTransportInbound;
class VcaTransportOutbound;
class VMessage;
class ABSTRACT Vca_API VcaTransport : public VActivity {
DECLARE_ABSTRACT_RTTLITE (VcaTransport, VActivity);
DECLARE_ROLEPLAYER_CLASSINFO ();
// Friends
friend class VcaConnection;
friend class VcaPeer;
friend class VcaSerializer;
friend class VcaSerializerForMessage;
// Aliases
public:
typedef V::VScheduler Scheduler;
typedef Pointer TransportPointer;
// Enum
protected:
enum {
HEARTBEAT_VALUE = 6000000 // Microseconds
};
// BSManager
public:
class Vca_API BSManager : public VBSManager {
DECLARE_CONCRETE_RTTLITE (BSManager, VBSManager);
// Construction
public:
BSManager (VcaTransport *pTransport);
// Destruction
protected:
~BSManager ();
// Callbacks
public:
virtual void onEnd () OVERRIDE {}
virtual void onError (IError *pError, VString const &rMessage) OVERRIDE {}
virtual void onTransfer (size_t sTransfer) OVERRIDE;
virtual bool onStatus (VStatus const &rStatus) OVERRIDE;
virtual void onChange (U32 sChange) OVERRIDE;
// Query
public:
VcaTransport *transport () const {
return m_pTransport;
}
// State
private:
TransportPointer const m_pTransport; // maintain a soft reference, to prevent cyclic referencing
};
friend class BSManager;
// InTransitMessage
public:
/**
* Wrapper class for VMessage that VcaTransport maintains.
* Wraps the VMessage with VBS's start byte offset, so that later as VBS acknowledges
* the actual byte transfer, these messages could be dequeued from the VcaTransports queue.
*/
class Vca_API InTransitMessage : public VReferenceable {
DECLARE_CONCRETE_RTTLITE (InTransitMessage, VReferenceable);
// Friend
friend class VcaTransport;
friend class VcaTransportOutbound;
// Construction
public:
InTransitMessage (VcaTransport *pTransport, VMessage *pMessage, U32 xStartByteOffset);
// Destruction
protected:
~InTransitMessage ();
/// @name Query
//@{
public:
U32 size () const {
return m_sSize;;
}
bool isSizeSet () const {
return m_sSize != UINT_MAX;
}
U32 startByteOffset () const {
return m_xStartByteOffset;
}
VMessage* message () const {
return m_pMessage;
}
//@}
/// @name Update
//@{
public:
void setSize (U32 sSize) {
m_sSize = sSize;
}
void decrementStartByteOffset (U32 sSize) {
m_xStartByteOffset -= sSize;
}
//@}
// State
private:
/** The transport within which we're operating. A soft reference to prevent cyclic referencing. */
TransportPointer const m_pTransport;
/** The message that we're wrapping. */
VMessage::Reference const m_pMessage;
U32 m_xStartByteOffset;
U32 m_sSize;
Reference m_pSuccessor;
};
protected:
typedef InTransitMessage Message;
// Trace Control
private:
enum TransportTracing {
Tracing_NotInitialized,
Tracing_On,
Tracing_Off
};
static TransportTracing g_bTracingTransfers;
static bool TracingTransfers ();
public:
static void setTracingAllTransfersTo (bool bTrace) {
g_bTracingTransfers = bTrace ? Tracing_On : Tracing_Off;
}
void setTracingTransfersTo (bool fTrace) {
m_bTracingTransfers = fTrace;
}
bool tracingTransfers () const {
return m_bTracingTransfers || TracingTransfers ();
}
// Construction
protected:
VcaTransport (VcaConnection *pConnection);
// Destruction
protected:
~VcaTransport ();
// Roles
public:
using BaseClass::getRole;
/// @name IBSClient Role
//@{
private:
VRole<ThisClass,IBSClient> m_pIBSClient;
protected:
void getRole (IBSClient::Reference& rpRole) {
m_pIBSClient.getRole (rpRole);
}
// IBSClient Methods
public/*private*/:
virtual void OnTransfer (IBSClient *pRole, size_t sTransfer) = 0;
//@}
/// @name Access
//@{
protected:
virtual void getDescription_(VString &rResult) const OVERRIDE;
public:
char const *directionCode () const;
VcaSite *peer () const {
return m_pPeer;
}
virtual VcaTransportInbound *transportIfInbound ();
virtual VcaTransportOutbound *transportIfOutbound ();
U32 inTransitMessageSize () const {
return m_sInTransitMessage;
}
void getPeerSiteInfo (IVReceiver<VString const&> *pResultSink) const;
//@}
/// @name Query
//@{
public:
virtual bool isIncoming () const {
return false;
}
virtual bool isOutgoing () const {
return false;
}
bool plumbed () const {
return m_pPeer.isntNil ();
}
bool shutdownInProgress () const {
return m_bShutdownInProgress;
}
//@}
/// @name Data Display
//@{
protected:
void displayDataAtTransferPoint (char const *pData, size_t sData, FILE *pFile) const;
void displayDataAtTransferPoint (char const *pData, size_t sData) const;
//@}
/// @name Data Transfer
//@{
public:
virtual void getData (VcaSerializer *pSerializer, void *pVoid, size_t sVoid);
virtual void putData (VcaSerializer *pSerializer, void const *pData, size_t sData);
virtual void transferDataFor (VcaSerializer *pSerializer) = 0;
//@}
/// @name Plumbing
//@{
private:
void plumbThyself ();
public:
virtual void setPeerFrom (VcaSerializerForPlumbing *pSerializer) = 0;
//@}
/// @name Processing
//@{
protected:
void resumeSerialization (bool notSuspended);
void abortSerialization ();
//@}
/// @name Scheduling
//@{
private:
void schedule (VcaSerializer *pSerializer);
virtual void startNextSerializer () = 0;
virtual void wrapup (VcaSerializer *pSerializer) = 0;
//@}
/// @name InTransit Message Bookkeeping
//@{
protected:
void enqueue (Message *pMsg);
bool dequeue (Message::Reference &rpMsg);
//@}
/// @name BSManager Calls
//@{
protected:
virtual void onBytesTransferred () {}
virtual void onChange (U32 sChange) {}
virtual void onFailedOrClosed () {}
//@}
/// @name Message Serialization Completion
//@{
protected:
virtual void onWrapupSerialization () {}
//@}
/// @name Shutdown
//@{
protected:
void onShutdownInProgress ();
public:
virtual void Close () = 0;
virtual void Abort () = 0;
//@}
// BSManager is not a reference but a direct pointer, to avoid cyclic reference chain
// State
protected:
VReference<VcaConnection> const m_pConnection;
VcaSite::Reference m_pPeer;
VcaSerializer::Reference m_pInTransitSerializer;
V::VFifoLite m_pInTransitDatum;
Scheduler m_iScheduler;
BSManager::Reference m_pBSManager;
U32 m_sInTransitMessage;
Message::Reference m_pInTransitMessage;
Message::Reference m_pInTransitMsgHead;
Message::Reference m_pInTransitMsgTail;
private:
U64 mutable m_cBytesTransfered;
bool m_bTracingTransfers;
bool m_bShutdownInProgress;
};
/*********************************
*---- VcaTransportInbound ----*
*********************************/
class VcaTransportInbound : public VcaTransport {
DECLARE_CONCRETE_RTTLITE (VcaTransportInbound, VcaTransport);
// Friends
friend class VcaConnection;
friend class VcaPeer;
// Byte Stream Types
protected:
typedef VBSProducer BS;
// Table Types
public:
typedef VkMapOf<SSID,SSID const&,SSID const&,VcaSite::Reference> SiteTable;
typedef VkMapOf<SSID,SSID const&,SSID const&,VTypeInfo::Reference> TypeTable;
// Construction
public:
VcaTransportInbound (VcaConnection *pConnection, BS *pBS);
// Destruction
protected:
~VcaTransportInbound ();
// Access
public:
size_t transferDeficit () const {
return m_pInTransitDatum.producerAreaSize ();
}
public:
virtual VcaTransportInbound *transportIfInbound () OVERRIDE {
return this;
}
// Query
public:
bool isConnected () const {
return m_pBS.isntNil ();
}
virtual bool isIncoming () const OVERRIDE {
return true;
}
// Import Management - for Optimization
public:
/***** Update *****/
bool addImport (SSID const &rSSID, VTypeInfo *pObject);
/***** Access *****/
bool getImport (SSID const &rSSID, VTypeInfo *&rpObject) const;
// Peer Import Update
bool addImport (SSID const &rSSID, VcaSite *pPeer);
// Peer Import Access
bool getImport (SSID const &rSSID, VcaSite *&rpPeer) const;
// Callbacks
public/*private*/:
virtual void OnTransfer (IBSClient *pRole, size_t sTransfer) OVERRIDE;
virtual void OnError (IClient *pRole, IError *pError, VString const &rMessage) OVERRIDE;
// Data Transfer
public:
virtual void getData (VcaSerializer *pSerializer, void *pVoid, size_t sVoid) OVERRIDE;
virtual void transferDataFor (VcaSerializer *pSerializer) OVERRIDE;
// Plumbing
private:
void onAttach (VcaPeer *pPeer);
void onDetach (VcaPeer *pPeer);
public:
virtual void setPeerFrom (VcaSerializerForPlumbing *pSerializer) OVERRIDE;
// Scheduling
private:
void startMessage ();
virtual void startNextSerializer () OVERRIDE;
virtual void wrapup (VcaSerializer *pSerializer) OVERRIDE;
// Shutdown
public:
virtual void Close () OVERRIDE;
virtual void Abort () OVERRIDE;
// State
private:
BS::Reference m_pBS;
SiteTable m_iSiteTable;
TypeTable m_iTypeTable;
Reference m_pSuccessor;
};
/**********************************
*---- VcaTransportOutbound ----*
**********************************/
class VcaTransportOutbound : public VcaTransport {
DECLARE_CONCRETE_RTTLITE (VcaTransportOutbound, VcaTransport);
// Friends
friend class VcaConnection;
friend class VcaPeer;
// Byte Stream Types
protected:
typedef VBSConsumer BS;
// Table Types
public:
typedef VkMapOf<
VcaSite::Reference, VcaSite*, VcaSite const*, SSID
> SiteTable;
typedef VkMapOf<
VTypeInfo::Reference, VTypeInfo*, VTypeInfo const*, SSID
> TypeTable;
// Construction
public:
VcaTransportOutbound (VcaConnection *pConnection, BS *pBS);
// Destruction
protected:
~VcaTransportOutbound ();
// Access
public:
size_t transferDeficit () const {
return m_pInTransitDatum.consumerAreaSize ();
}
public:
virtual VcaTransportOutbound *transportIfOutbound () OVERRIDE {
return this;
}
// Query
public:
bool isConnected () const {
return m_pBS.isntNil ();
}
virtual bool isOutgoing () const OVERRIDE {
return true;
}
// Export Management
public:
// returns true if object added to the export set, false if already present...
bool addExport (VTypeInfo *pObject, SSID &rSSID);
bool addExport (VcaSite *pObject, SSID &rSSID);
// Callbacks
public/*private*/:
virtual void OnTransfer (IBSClient *pRole, size_t sTransfer) OVERRIDE;
virtual void OnError (IClient *pRole, IError *pError, VString const &rMessage) OVERRIDE;
// Data Transfer
public:
virtual void putData (VcaSerializer *pSerializer, void const *pData, size_t sData) OVERRIDE ;
virtual void transferDataFor (VcaSerializer *pSerializer) OVERRIDE;
// Plumbing
private:
void onAttach (VcaPeer *pPeer);
void onDetach (VcaPeer *pPeer);
public:
virtual void setPeerFrom (VcaSerializerForPlumbing *pSerializer) OVERRIDE;
// Scheduling
private:
void startMessage (VMessage *pMessage);
virtual void startNextSerializer () OVERRIDE;
virtual void wrapup (VcaSerializer *pSerializer) OVERRIDE;
// BSManager Calls
protected:
virtual void onBytesTransferred () OVERRIDE;
virtual void onChange (U32 sChange) OVERRIDE;
virtual void onFailedOrClosed () OVERRIDE;
// Message Serialization
protected:
virtual void onWrapupSerialization () OVERRIDE;
// HeartBeat
private:
void startHeartBeat ();
void stopHeartBeat ();
// Base Roles
public:
using BaseClass::getRole;
// ITrigger Role
private:
VRole<ThisClass,ITrigger> m_pITrigger;
public:
void getRole (ITrigger::Reference& rpRole) {
m_pITrigger.getRole (rpRole);
}
// ITrigger Method
public:
void Process (ITrigger *pRole);
// Shutdown
public:
virtual void Close () OVERRIDE;
virtual void Abort () OVERRIDE;
// State
private:
BS::Reference m_pBS;
SiteTable m_iSiteTable;
TypeTable m_iTypeTable;
V::VTime m_iLastMessageTick;
VTimer::Reference m_pHeartBeatTimer;
unsigned int m_sHeartBeatStall;
Reference m_pSuccessor;
};
}
#endif
|
ms20hj/ets
|
ets-model/src/main/java/com/cms/ets/model/mongo/BaseLogEntity.java
|
package com.cms.ets.model.mongo;
import com.cms.ets.model.mysql.authority.User;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.index.Indexed;
import java.io.Serializable;
/**
* 日志基础对象
* @date 2019年11月15日11:43:09
* @author ChenMingsen
*/
public abstract class BaseLogEntity implements Serializable {
private static final long serialVersionUID = 1956786888637253872L;
@Id
protected String id;
@Indexed
protected String createTime;
@Indexed
protected String userId;
protected String userName;
protected String realName;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getCreateTime() {
return createTime;
}
public void setCreateTime(String createTime) {
this.createTime = createTime;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getRealName() {
return realName;
}
public void setRealName(String realName) {
this.realName = realName;
}
public abstract void setUser(User user);
}
|
smolnar82/cloudbreak
|
core/src/main/java/com/sequenceiq/cloudbreak/service/upgrade/sync/db/ComponentPersistingService.java
|
<filename>core/src/main/java/com/sequenceiq/cloudbreak/service/upgrade/sync/db/ComponentPersistingService.java
package com.sequenceiq.cloudbreak.service.upgrade.sync.db;
import java.util.Set;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.sequenceiq.cloudbreak.domain.stack.Component;
import com.sequenceiq.cloudbreak.domain.stack.Stack;
import com.sequenceiq.cloudbreak.service.upgrade.ClusterComponentUpdater;
import com.sequenceiq.cloudbreak.service.upgrade.StackComponentUpdater;
import com.sequenceiq.cloudbreak.service.upgrade.sync.operationresult.CmSyncOperationResult;
@Service
public class ComponentPersistingService {
private static final Logger LOGGER = LoggerFactory.getLogger(ComponentPersistingService.class);
@Inject
private CmSyncResultMergerService cmSyncResultMergerService;
@Inject
private StackComponentUpdater stackComponentUpdater;
@Inject
private ClusterComponentUpdater clusterComponentUpdater;
public void persistComponentsToDb(Stack stack, CmSyncOperationResult cmSyncOperationResult) {
Set<Component> syncedFromServer = cmSyncResultMergerService.merge(stack, cmSyncOperationResult);
LOGGER.debug("Active components read from CM server and persisting now to the DB: {}", syncedFromServer);
stackComponentUpdater.updateComponentsByStackId(stack, syncedFromServer, false);
clusterComponentUpdater.updateClusterComponentsByStackId(stack, syncedFromServer, false);
}
}
|
zmijunkie/lrnwebcomponents
|
elements/simple-fields/lib/simple-fields-form-lite.js
|
import { LitElement, html, css } from "lit";
import { SimpleFieldsLite } from "./simple-fields-lite.js";
import { SimpleFields } from "../simple-fields.js";
/**
* `simple-fields-form-lite`
* binding and submission capabilities on top of simple-fields-lite
*
* @group simple-fields
* @element simple-fields-form-lite
* @extends simple-fields-lite
* @demo ./demo/form-lite.html
*/
class SimpleFieldsFormLite extends LitElement {
static get styles() {
return [
css`
:host {
display: block;
}
`,
];
}
static get tag() {
return "simple-fields-form-lite";
}
// render function
render() {
return html`
<form part="form">
<slot name="before"></slot>
<simple-fields-lite
id="sf"
.autofocus="${!this.disableAutofocus}"
.language="${this.language}"
.resources="${this.resources}"
.schema="${this.schema}"
.elementizer="${this.elementizer}"
.value="${this.value}"
@value-changed="${this._valueChanged}"
part="fields"
>
</simple-fields-lite>
<slot name="after"></slot>
<slot></slot>
</form>
`;
}
/**
* gets form element that matches given form ID
*
* @param {*} id
* @returns
* @memberof SimpleFieldsLite
*/
getFormElementById(id) {
return (this.__formElementsArray || []).filter((el) => el.id === id)[0];
}
/**
* updates the form value when a field changes
*
* @param {*} e value-changed event
* @memberof SimpleFieldsFormLite
*/
_valueChanged(e) {
this.value = e.detail.value;
}
/**
* allow setting value
*/
setValue(value) {
this.value = value;
}
/**
* forces form rebuild
*
* @memberof SimpleFieldsFormLite
*/
rebuildForm() {
if (this.shadowRoot.querySelector("#sf"))
this.shadowRoot.querySelector("#sf").rebuidForm();
}
/**
* first update hook; also implies default settings
*/
firstUpdated(changedProperties) {
changedProperties.forEach((oldValue, propName) => {
// request form when it changes
if (propName === "loadEndpoint" && this.autoload) {
this.loadData();
}
});
}
updated(changedProperties) {
changedProperties.forEach((oldValue, propName) => {
if (this.autoload && !this.loadResponse && !this.loading) {
if (propName === "loadEndpoint" || propName === "autoload") {
this.loadData();
}
}
// we have response data from an end point this should create the form
if (propName === "loadResponse" && this.loadResponse.data) {
this._applyLoadedData();
/**
* fires event for things to react to about the response
* @event response
*/
this.dispatchEvent(
new CustomEvent("response", {
bubbles: true,
composed: true,
cancelable: false,
detail: this.loadResponse,
})
);
}
});
}
/**
* applies loaded datda to simple-fields-lite
*
* @memberof SimpleFieldsFormLite
*/
_applyLoadedData() {
if (this.loadResponse.data.schema) {
this.schema = this.loadResponse.data.schema;
}
if (this.loadResponse.data.value) this.value = this.loadResponse.data.value;
}
/**
* load data from the end point
*/
loadData() {
this.loading = true;
this.fetchData(
this.loadEndpoint,
this.method,
this.headers,
this.body
).then((data) => {
this.loading = false;
this.loadResponse = data;
/**
* fires event when forma data is loaded
* @event simple-fields-form-data-loaded
*/
this.dispatchEvent(
new CustomEvent("simple-fields-form-data-loaded", {
detail: {
value: data,
},
})
);
});
}
async fetchData(path, method, headers, body) {
let response = {};
if (method == "GET") {
if (body) {
path +=
"?" +
Object.entries(body)
.map(
([key, val]) =>
`${encodeURIComponent(key)}=${encodeURIComponent(val)}`
)
.join("&");
}
response = await fetch(path, {
method: method,
headers: headers,
});
} else {
response = await fetch(path, {
method: method,
headers: headers,
//make sure to serialize your JSON body
body: JSON.stringify(body),
});
}
let data = await response.json();
return data;
}
constructor() {
super();
this._setFieldProperties();
this._setFormProperties();
}
/**
* allows constructor to be overridden
*
* @memberof SimpleFieldsFormLite
*/
_setFieldProperties() {
this.disableAutofocus = false;
this.language = "en";
this.resources = {};
this.schema = {};
this.value = {};
}
/**
* allows constructor to be overridden
*
* @memberof SimpleFieldsFormLite
*/
_setFormProperties() {
this.method = "POST";
this.loading = false;
this.autoload = false;
this.headers = {
Accept: "application/json",
"Content-Type": "application/json",
};
this.body = {};
}
/**
* Submit form values if we have an end point, otherwise return value
* of the fields as they currently exist.
*/
submit() {
let sf = this.shadowRoot.querySelector("#sf");
if (this.saveEndpoint) {
fetch(this.saveEndpoint, {
method: this.method,
headers: this.headers,
//make sure to serialize your JSON body
body: JSON.stringify(sf.value),
});
}
return sf.value;
}
/**
* properties specific to field function
*
* @readonly
* @static
* @memberof SimpleFieldsFormLite
*/
static get fieldProperties() {
return {
/*
* Disables autofocus on fields.
*/
disableAutofocus: {
type: Boolean,
},
/*
* Error messages by field name,
* eg. `{ contactinfo.email: "A valid email is required." }`
*/
error: {
type: Object,
},
/*
* Language of the fields.
*/
language: {
type: String,
attribute: "lang",
reflect: true,
},
/*
* resource link
*/
resources: {
type: Object,
},
/*
* Fields schema.
* _See [Fields Schema Format](fields-schema-format) above._
*/
schema: {
type: Object,
},
/**
* Conversion from JSON Schema to HTML form elements.
* _See [Configuring schemaConversion Property](configuring-the-schemaConversion-property) above._
*/
schemaConversion: {
type: Object,
attribute: "schema-conversion",
},
/*
* value of fields
*/
value: {
type: Object,
},
};
}
/**
* properties specific to form function
*
* @readonly
* @static
* @memberof SimpleFieldsFormLite
*/
static get formProperties() {
return {
autoload: {
type: Boolean,
reflect: true,
},
loading: {
type: Boolean,
reflect: true,
},
loadEndpoint: {
type: String,
attribute: "load-endpoint",
},
saveEndpoint: {
type: String,
attribute: "save-endpoint",
},
method: {
type: String,
},
headers: {
type: Object,
},
body: {
type: Object,
},
loadResponse: {
type: Object,
},
};
}
/**
* gets the simple-fields object
*
* @readonly
* @memberof SimpleFieldsLite
*/
get formFields() {
return this.shadowRoot &&
this.shadowRoot.querySelector &&
this.shadowRoot.querySelector("#sf")
? this.shadowRoot.querySelector("#sf")
: undefined;
}
/**
* form elements by id
*
* @readonly
* @memberof SimpleFieldsLite
*/
get formElements() {
return this.formFields ? this.formFields.formElements : {};
}
/**
* list of form elements in order
*
* @readonly
* @memberof SimpleFieldsLite
*/
get formElementsArray() {
return this.formFields ? this.formFields.formElementsArray : [];
}
/**
* Props down
*/
static get properties() {
return {
...this.fieldProperties,
...this.formProperties,
};
}
}
window.customElements.define(SimpleFieldsFormLite.tag, SimpleFieldsFormLite);
export { SimpleFieldsFormLite };
|
TimeWz667/PyEpiDAG
|
example/exp_UseSpecialDataSet.py
|
<filename>example/exp_UseSpecialDataSet.py
import pandas as pd
from epidag.data import *
__author__ = 'TimeWz667'
# Transition Matrix
print('\nTransition Matrix')
mat = pd.read_csv('../data/transition.csv', index_col=0)
trm = TransitionMatrix(mat)
print(trm)
print('\nSampling\n')
st = 'A'
print('State', st)
for _ in range(5):
st = trm.sample_next(st)
print('State', st)
print(trm)
print('\nConditional Probability Table')
mat = pd.read_csv('../data/condprobtab.csv')
cpt = ConditionalProbabilityTable(mat, ['Age', 'Sex'], 'Prob')
print(cpt)
print('\nMarginalisation\n', cpt.marginalise('Age'))
print('\nConditioning\n', cpt.condition(Age='O'))
print('\nSampling\n', cpt())
print('\nTime Series')
mat = pd.read_csv('../data/ts.csv')
ts = TimeSeries(mat, 'Time', 'X', 'linear')
print(ts)
print('\nSampling\n', ts(range(10)))
print('\nTime Series Vector')
mat = pd.read_csv('../data/ts.csv')
tsv = TimeSeriesVector(mat, 'Time', ['X', 'Y'], 'linear')
print(tsv)
print('\nSampling\n', pd.DataFrame.from_dict(tsv([1, 2, 3])))
print('\nTime Series Probability Table')
mat = pd.read_csv('../data/tspt.csv')
tspt = TimeSeriesProbabilityTable(mat, 'Time', ['X', 'Y', 'Z'])
print(tspt)
print('\nSampling\n', tspt.sample(n=10, t=5))
print('\nLee Carter')
mat_a = pd.read_csv('../data/lca.csv')
mat_t = pd.read_csv('../data/lct.csv')
lc = LeeCarter(mat_t=mat_t, mat_a=mat_a, i_time='Time', i_age='Age',
i_al='Alpha', i_be='Beta', i_ka='Kappa')
print('\nGet single rate\n', lc.get_rate(5, 'M'))
print('\nGet Rate Table\n', lc(5))
|
ruoranluomu/AliOS-Things
|
platform/mcu/stm32l4xx_cube/hal/pwrmgmt_hal/board_cpu_pwr.c
|
/*
* Copyright (C) 2018 Alibaba Group Holding Limited
*/
/*
DESCRIPTION
This library provides the support for the STM32L496G-DISCOVERY
CPU power state control.
CPU power management:
provides low-level interface for setting CPU C-states.
provides low-level interface for setting CPU P-states.
*/
#include <k_api.h>
#if (PWRMGMT_CONFIG_CPU_LOWPOWER > 0)
#include "cpu_pwr_hal_lib.h"
#include "pwrmgmt_debug.h"
#include "cpu_tickless.h"
#include "stm32l4xx_hal.h"
/* forward declarations */
extern one_shot_timer_t tim5_one_shot; /* wakeup source for C1,C2 */
extern one_shot_timer_t rtc_one_shot; /* wakeup source for C3,C4 */
static cpu_pwr_t cpu_pwr_node_core_0;
/*
For verifying the RTC could wakeup system in C4 mode, you can define
C4_WAKE_UP_BY_RTC_EXAMPLE here
*/
#undef C4_WAKE_UP_BY_RTC_EXAMPLE
/**
* board_cpu_c_state_set - program CPU into Cx idle state
*
* RUN Context: could be called from ISR context or task context.
*
* SMP Consider: STM32L496G-DISCOVERY do not support SMP, so only UP is enough.
*
* @return PWR_OK or PWR_ERR when failed.
*/
static pwr_status_t board_cpu_c_state_set(uint32_t cpuCState, int master)
{
switch (cpuCState) {
case CPU_CSTATE_C0:
if (master) {
/*
* do something needed when CPU waked up from C1 or higher
* Cx state.
*/
}
break;
case CPU_CSTATE_C1:
/* put CPU into C1 state, for ARM we can call WFI instruction
to put CPU into C1 state. */
PWRMGMT_LOG(PWRMGMT_LOG_DBG, "enter C1\n");
HAL_PWR_EnterSLEEPMode(PWR_MAINREGULATOR_ON, PWR_SLEEPENTRY_WFI);
break;
case CPU_CSTATE_C2:
/* put CPU into C2 state, normally different SOC has different
way for C2. */
PWRMGMT_LOG(PWRMGMT_LOG_DBG, "enter C2\n");
HAL_PWR_EnterSLEEPMode(PWR_LOWPOWERREGULATOR_ON, PWR_SLEEPENTRY_WFI);
break;
case CPU_CSTATE_C3:
/* put CPU into C3 state, normally different SOC has different
way for C3. */
PWRMGMT_LOG(PWRMGMT_LOG_DBG, "enter C3\n");
HAL_PWR_EnterSTOPMode(PWR_LOWPOWERREGULATOR_ON, PWR_STOPENTRY_WFI);
break;
case CPU_CSTATE_C4:
/* put CPU into C4 state, normally different SOC has different
way for C4. */
PWRMGMT_LOG(PWRMGMT_LOG_DBG, "enter C4\n");
HAL_PWR_EnterSTANDBYMode();
break;
default:
PWRMGMT_LOG(PWRMGMT_LOG_ERR, "invalid C state: C%d\n", cpuCState);
break;
}
return PWR_OK;
}
/**
* board_cpu_pwr_init() is called by HAL lib to
* init board powr manage configure.
*
* RUN Context: could be called from task context only, ISR context is not
* supported.
*
* SMP Consider: STM32L496G-DISCOVERY do not support SMP, so only UP is enough.
*
* @return PWR_OK or PWR_ERR when failed.
*/
pwr_status_t board_cpu_pwr_init(void)
{
cpu_pwr_t *pCpuNode = NULL;
pwr_status_t retVal = PWR_OK;
uint32_t cpuIndex = 0; /* 0 for UP */
pCpuNode = &cpu_pwr_node_core_0;
retVal = cpu_pwr_node_init_static("core", 0, pCpuNode);
if (retVal != PWR_OK) {
return PWR_ERR;
}
/* record this node */
retVal = cpu_pwr_node_record(pCpuNode, cpuIndex);
if (retVal == PWR_ERR) {
return PWR_ERR;
}
/*
* According reference manual of STM32L496G-DISCOVERY
*
* C0 - RUN, Power supplies are on,all clocks are on.
* C1 - Sleep mode, CPU clock off, all peripherals including
* Cortex®-M4 core peripherals such as NVIC, SysTick, etc. can run
* and wake up the CPU when an interrupt or an event occurs.
* C2 - low power sleep mode, Low-power sleep mode: This mode is entered
* from the Low-power run mode: Cortex®-M4 is off.
* C3 - stop mode.
* C4 - standby mode.
*
* Currently, C0, C1, C2, C3, C4 is supported,
*/
/* C0,C1,C2,C3,C4 is controlled by level 2 : core */
retVal = cpu_pwr_c_method_set(cpuIndex, board_cpu_c_state_set);
if (retVal == PWR_ERR) {
return PWR_ERR;
}
/* save support C status bitset in level 2: C0,C1,C2,C3,C4 */
cpu_pwr_c_state_capability_set(cpuIndex,CPU_STATE_BIT(CPU_CSTATE_C0)
| CPU_STATE_BIT(CPU_CSTATE_C1)
| CPU_STATE_BIT(CPU_CSTATE_C2)
| CPU_STATE_BIT(CPU_CSTATE_C3)
| CPU_STATE_BIT(CPU_CSTATE_C4));
if (retVal == PWR_ERR) {
return PWR_ERR;
}
/*
* According reference manual of STM32L496G-DISCOVERY,
* the wakeup latency of Cx is:
* resume from C1 (Low Power mode) : immediate
* resume from C2 (Low Power Sleep mode) : immediate
* resume from C3 (stop mode) : immediate
*/
cpu_pwr_c_state_latency_save(cpuIndex, CPU_CSTATE_C0, 0);
cpu_pwr_c_state_latency_save(cpuIndex, CPU_CSTATE_C1, 0);
cpu_pwr_c_state_latency_save(cpuIndex, CPU_CSTATE_C2, 0);
cpu_pwr_c_state_latency_save(cpuIndex, CPU_CSTATE_C3, 0);
cpu_pwr_c_state_latency_save(cpuIndex, CPU_CSTATE_C4, 0);
/*
Although C1/C2/C3/C4 is supported with CPU PWR MGMT, that is not
meaning all of these Cx is suitable with tickless. For example,
if we config C4(CPU of STM32L496G enter standby mode which is
defined in reference maunal, the standby mode is something similar
power off the board) with tickless mode, when system has nothing
to do and decide enter tickless mode with one second
(such as the shortest sleep time of task) sleep time with C4,
the RTC is programmed to fire interrupt at one second, and then
CPU goto C4, and CPU goto standby mode which is something similar
power off the board, and with one second passed, the RTC fired
interrupt to wake up CPU, the CPU will start with first instruction
not the WFI, this lead system reboot in one second and find it
could goto C4 again and reboot in one second again and again.
With tickless mode, there is no need for user to decide when and
which Cx state is selected to enter tickless mode, the tickless
module will decide itself, C1/C2/C3 is suitable for tickless here.
But for C4-standby mode(not all standby mode of all SOC has same
meaning) here, it's better for user to decide when to put CPU
into C4 state, and it's responsibility of user to prepare/programm
the wakeup source before CPU enter C4 mode, normally the wakeup
source could be RTC, sensor and anything, please reference your
SOC user manual to check the detail which resource would be used.
The RTC here for C4 is for example showing how to wakeup CPU in
C4, you could open it for testing.
*/
tickless_one_shot_timer_save(CPU_CSTATE_C1, &tim5_one_shot);
tickless_one_shot_timer_save(CPU_CSTATE_C2, &tim5_one_shot);
tickless_one_shot_timer_save(CPU_CSTATE_C3, &rtc_one_shot);
/*
it could be opened for showing how to wakeup CPU
in C4(standby mode) by RTC.
*/
#ifdef C4_WAKE_UP_BY_RTC_EXAMPLE
tickless_one_shot_timer_save(CPU_CSTATE_C4, &rtc_one_shot);
#endif /* C4_WAKE_UP_BY_RTC_EXAMPLE */
/*
Tell the CPU PWR MGMT module which C state is supported with
tickless function through tickless_c_states_add(c_state_x).
NOTE, although C4 is support by this board, but C4 is not suitable
to work together with tickless mode, because C4 of STM32L496G means
standby which is something similar with shutdown mode, when system
enter C4 mode, the board need external wakeup source to trigger
to reboot.
*/
tickless_c_states_add(CPU_STATE_BIT(CPU_CSTATE_C0)
| CPU_STATE_BIT(CPU_CSTATE_C1)
| CPU_STATE_BIT(CPU_CSTATE_C2)
| CPU_STATE_BIT(CPU_CSTATE_C3)
#ifdef C4_WAKE_UP_BY_RTC_EXAMPLE
| CPU_STATE_BIT(CPU_CSTATE_C4)
#endif /* C4_WAKE_UP_BY_RTC_EXAMPLE */
);
/*
after init, we will show a cpu power capabilites like this:
CPU NODE CAPABILITIES
------------------------------ --------------------------
core0 (CPU0)
P-States:
P0: 80 MHz 1200 mv
C-States:
C0: 0 us latency
C1: 0 us latency
C2: 0 us latency
C3: 0 us latency
*/
#if RHINO_CONFIG_CPU_PWR_SHOW
cpu_pwr_info_show();
cpu_pwr_state_show();
#endif
return retVal;
}
#endif /* PWRMGMT_CONFIG_CPU_LOWPOWER */
|
Ashwanigupta9125/code-DS-ALGO
|
LeetCode-Challenges/2020/8. August/Week2/pascalTriangle2.cpp
|
<filename>LeetCode-Challenges/2020/8. August/Week2/pascalTriangle2.cpp
class Solution {
public:
vector<int> getRow(int k) {
vector<int> prev;
prev.push_back(1);
vector<int> res(prev);
for(int i = 1; i <= k; ++i) {
res.clear();
res.push_back(1);
for(int p = 0; p < prev.size() - 1; ++p) {
res.push_back(prev.at(p) + prev.at(p + 1));
}
res.push_back(1);
prev = res;
}
return res;
}
};
|
ClearTax/dropwizard-db-sharding-bundle
|
sharding-example/src/main/java/in/cleartax/dropwizard/sharding/application/TestApplication.java
|
/*
* Copyright 2018 Cleartax
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package in.cleartax.dropwizard.sharding.application;
import com.google.inject.Stage;
import in.cleartax.dropwizard.sharding.transactions.UnitOfWorkModule;
import io.dropwizard.Application;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
import lombok.Getter;
import ru.vyarus.dropwizard.guice.GuiceBundle;
public class TestApplication extends Application<TestConfig> {
@Getter
private GuiceBundle<TestConfig> guiceBundle;
private TestModule testModule;
public static void main(String[] args) throws Exception {
new TestApplication().run(args);
}
@Override
public String getName() {
return "ShardingDemoApp";
}
@Override
public void initialize(Bootstrap<TestConfig> bootstrap) {
testModule = new TestModule(bootstrap);
guiceBundle = GuiceBundle.<TestConfig>builder()
.modules(testModule, new UnitOfWorkModule())
.enableAutoConfig(TestModule.PCKGS)
.build(Stage.PRODUCTION);
bootstrap.addBundle(guiceBundle);
}
@Override
public void run(TestConfig configuration, Environment environment) throws Exception {
}
}
|
miguelalb/resume-portal-fastapi
|
services/backend/tests/fake_data_generator.py
|
<filename>services/backend/tests/fake_data_generator.py
import random
import sys
from datetime import datetime
from faker import Faker
#TODO Move on to Faker
fake = Faker()
def get_sample_template():
name = "Sample" + str(random.randint(0, sys.maxsize))
content = "SampleContent" + str(random.randint(0, sys.maxsize))
premium = random.choice([True, False])
return {"name": name, "content": content, "premium": premium}
def get_sample_skill():
return {
"name": random.choice(["Python", "Go", "Kubernetes", "Java", "Javascript", "Docker", "AWS", "GCP", "Azure", "Machine learning"]),
"learning": random.choice([True, False]),
}
def get_sample_job():
return {
"company": fake.company(),
"designation": fake.job(),
"description": fake.catch_phrase(),
"startdate": str(datetime.now().timestamp()),
"current": random.choice([True, False]),
"enddate": str(datetime.now().timestamp()),
}
def get_sample_education():
return {
"college": fake.company(),
"designation": "SampleDesignation" + str(random.randint(0, sys.maxsize)),
"description": "SampleDescription" + str(random.randint(0, sys.maxsize)),
"startdate": str(datetime.now().timestamp()),
"current": random.choice([True, False]),
"enddate": str(datetime.now().timestamp()),
}
def get_sample_certification():
return {
"name": "SampleName" + str(random.randint(0, sys.maxsize)),
"issuing_organization": "SampleOrg" + str(random.randint(0, sys.maxsize)),
"issue_date": str(datetime.now().timestamp()),
"current": random.choice([True, False]),
"expiration_date": str(datetime.now().timestamp()),
"credential_id": "SampleID" + str(random.randint(0, sys.maxsize)),
"credential_url": "https://sample.com/" + str(random.randint(0, sys.maxsize)),
}
def get_sample_profile():
return {
"first_name": fake.first_name(),
"last_name": fake.last_name(),
"public_name": fake.color_name().lower() + fake.last_name().lower(),
"summary": fake.paragraph(nb_sentences=6),
"email": fake.ascii_email(),
"phone": fake.phone_number(),
"designation": fake.job(),
"website": "https://www."+fake.domain_name(),
"skills": [get_sample_skill() for i in range(5)],
"jobs": [get_sample_job() for i in range(5)],
"educations": [get_sample_education() for i in range(5)],
"certifications": [get_sample_certification() for i in range(5)],
}
|
tanzzj/teapot
|
teapot-test/src/test/java/com/teamer/teapot/project/database/service/impl/ProjectDBServiceImplTest.java
|
package com.teamer.teapot.project.database.service.impl;
import com.teamer.teapot.common.model.Project;
import com.teamer.teapot.common.model.ProjectDatabase;
import com.teamer.teapot.common.model.SQLParams;
import com.teamer.teapot.common.util.TestUtil;
import com.teamer.teapot.project.database.service.ProjectDBService;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.transaction.annotation.Transactional;
import java.sql.SQLException;
/**
* @author : tanzj
* @date : 2020/7/31.
*/
@SpringBootTest
@RunWith(SpringRunner.class)
public class ProjectDBServiceImplTest {
@Autowired
ProjectDBService projectDBService;
@Test
@Transactional
@Rollback
public void addProjectDatabaseInstance() {
TestUtil.assertSuccess(
projectDBService.addProjectDatabaseInstance(
new ProjectDatabase().setDatabaseId("123")
.setProjectId("1")
)
);
}
@Test
public void queryProjectDataBaseList() {
TestUtil.assertSuccess(
projectDBService.queryProjectDataBaseList(
new Project().setProjectId("1")
)
);
}
@Test
@Rollback
@Transactional
public void executeSQL() throws ClassNotFoundException, SQLException {
TestUtil.assertSuccess(
projectDBService.executeSQL(
new SQLParams()
.setSql("select * from t_portal_user")
.setDatabaseId("2m2Htxgs")
)
);
TestUtil.assertSuccess(
projectDBService.executeSQL(
new SQLParams()
.setSql("update t_project set projectName = '666' where projectId = 'DL7ulWzq'")
.setDatabaseId("2m2Htxgs")
)
);
TestUtil.assertFail(
projectDBService.executeSQL(
new SQLParams()
.setSql("select * from t_portal_user")
.setDatabaseId("im2Htxgs")
)
);
TestUtil.assertFail(
projectDBService.executeSQL(
new SQLParams()
.setSql("select * from t_portal_user")
.setDatabaseId("database not exist")
)
);
}
}
|
isutton/source-to-image
|
vendor/github.com/hashicorp/consul/ui-v2/app/mixins/with-health-filtering.js
|
import Mixin from '@ember/object/mixin';
import WithFiltering from 'consul-ui/mixins/with-filtering';
export default Mixin.create(WithFiltering, {
queryParams: {
status: {
as: 'status',
},
s: {
as: 'filter',
},
},
});
|
igor-karpukhin/compass
|
tests/connector-tests/test/testkit/securedClient.go
|
<gh_stars>0
package testkit
import (
"context"
"crypto/rsa"
"crypto/tls"
"net/http"
schema "github.com/kyma-incubator/compass/components/connector/pkg/gqlschema"
gcli "github.com/machinebox/graphql"
"github.com/pkg/errors"
)
//Currently unused. SecuredConnectorClient will be used in future test cases
type SecuredConnectorClient interface {
Configuration() (schema.Configuration, error)
RenewCertificate(csr string) (schema.CertificationResult, error)
RevokeCertificate() (bool, error)
}
type securedClient struct {
graphQlClient *gcli.Client
queryProvider queryProvider
}
func NewSecuredConnectorClient(endpoint string, key *rsa.PrivateKey, certificate ...[]byte) ConnectorClient {
tlsCert := tls.Certificate{
Certificate: certificate,
PrivateKey: key,
}
tlsConfig := &tls.Config{
Certificates: []tls.Certificate{tlsCert},
ClientAuth: tls.RequireAndVerifyClientCert,
InsecureSkipVerify: true,
}
httpClient := &http.Client{
Transport: &http.Transport{
TLSClientConfig: tlsConfig,
},
}
graphQlClient := gcli.NewClient(endpoint, gcli.WithHTTPClient(httpClient))
return &client{
graphQlClient: graphQlClient,
queryProvider: queryProvider{},
}
}
func (c securedClient) Configuration() (schema.Configuration, error) {
query := c.queryProvider.configuration()
req := gcli.NewRequest(query)
var response ConfigurationResponse
err := c.graphQlClient.Run(context.Background(), req, &response)
if err != nil {
return schema.Configuration{}, errors.Wrap(err, "Failed to get configuration")
}
return response.Result, nil
}
func (c securedClient) RenewCert(csr string, token string) (schema.CertificationResult, error) {
query := c.queryProvider.generateCert(csr)
req := gcli.NewRequest(query)
var response CertificationResponse
err := c.graphQlClient.Run(context.Background(), req, &response)
if err != nil {
return schema.CertificationResult{}, errors.Wrap(err, "Failed to generate certificate")
}
return response.Result, nil
}
func (c securedClient) RevokeCertificate() (bool, error) {
query := c.queryProvider.revokeCert()
req := gcli.NewRequest(query)
var response RevokeResult
err := c.graphQlClient.Run(context.Background(), req, &response)
if err != nil {
return false, errors.Wrap(err, "Failed to revoke certificate")
}
return response.Result, nil
}
|
wangkaiwd/player
|
postcss.config.js
|
<reponame>wangkaiwd/player
module.exports = {
plugins: {
'postcss-import': {},
'postcss-url': {},
'postcss-aspect-ratio-mini': {},
'postcss-write-svg': {
'utf8': false
},
'postcss-cssnext': {},
// document address: https://github.com/evrone/postcss-px-to-viewport/blob/master/README_CN.md
'postcss-px-to-viewport': {
'viewportWidth': 375,
'unitPrecision': 5,
'selectorBlackList': [
'.ignore',
'.hairlines'
],
'mediaQuery': false
},
'postcss-viewport-units': {
// 过滤在使用伪元素时覆盖插件生成的content而在command line 中产生的warning:https://github.com/didi/cube-ui/issues/296
filterRule: rule => rule.nodes.findIndex(i => i.prop === 'content') === -1
},
'cssnano': {
'preset': 'advanced',
'autoprefixer': false,
'postcss-zindex': false
}
}
};
|
dot2gua/hbase98learning
|
hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationKillRS.java
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.fail;
@Category(LargeTests.class)
public class TestReplicationKillRS extends TestReplicationBase {
private static final Log LOG = LogFactory.getLog(TestReplicationKillRS.class);
/**
* Load up 1 tables over 2 region servers and kill a source during
* the upload. The failover happens internally.
*
* WARNING this test sometimes fails because of HBASE-3515
*
* @throws Exception
*/
public void loadTableAndKillRS(HBaseTestingUtility util) throws Exception {
// killing the RS with hbase:meta can result into failed puts until we solve
// IO fencing
int rsToKill1 =
util.getHBaseCluster().getServerWithMeta() == 0 ? 1 : 0;
// Takes about 20 secs to run the full loading, kill around the middle
Thread killer = killARegionServer(util, 5000, rsToKill1);
LOG.info("Start loading table");
int initialCount = utility1.loadTable(htable1, famName);
LOG.info("Done loading table");
killer.join(5000);
LOG.info("Done waiting for threads");
Result[] res;
while (true) {
try {
Scan scan = new Scan();
ResultScanner scanner = htable1.getScanner(scan);
res = scanner.next(initialCount);
scanner.close();
break;
} catch (UnknownScannerException ex) {
LOG.info("Cluster wasn't ready yet, restarting scanner");
}
}
// Test we actually have all the rows, we may miss some because we
// don't have IO fencing.
if (res.length != initialCount) {
LOG.warn("We lost some rows on the master cluster!");
// We don't really expect the other cluster to have more rows
initialCount = res.length;
}
int lastCount = 0;
final long start = System.currentTimeMillis();
int i = 0;
while (true) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for queueFailover replication. " +
"Waited "+(System.currentTimeMillis() - start)+"ms.");
}
Scan scan2 = new Scan();
ResultScanner scanner2 = htable2.getScanner(scan2);
Result[] res2 = scanner2.next(initialCount * 2);
scanner2.close();
if (res2.length < initialCount) {
if (lastCount < res2.length) {
i--; // Don't increment timeout if we make progress
} else {
i++;
}
lastCount = res2.length;
LOG.info("Only got " + lastCount + " rows instead of " +
initialCount + " current i=" + i);
Thread.sleep(SLEEP_TIME*2);
} else {
break;
}
}
}
private static Thread killARegionServer(final HBaseTestingUtility utility,
final long timeout, final int rs) {
Thread killer = new Thread() {
public void run() {
try {
Thread.sleep(timeout);
utility.getHBaseCluster().getRegionServer(rs).stop("Stopping as part of the test");
} catch (Exception e) {
LOG.error("Couldn't kill a region server", e);
}
}
};
killer.setDaemon(true);
killer.start();
return killer;
}
}
|
heropan/Elastos.ELA.SPV.Cpp
|
SDK/Common/secp256k1_name_fix.h
|
<reponame>heropan/Elastos.ELA.SPV.Cpp
/*
* Copyright (c) 2020 Elastos Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef __ELASTOS_SDK_SECP256K1_NAME_FIX_H__
#define __ELASTOS_SDK_SECP256K1_NAME_FIX_H__
#define secp256k1_context_create SPV_secp256k1_context_create
#define secp256k1_context_clone SPV_secp256k1_context_clone
#define secp256k1_context_destroy SPV_secp256k1_context_destroy
#define secp256k1_context_set_illegal_callback SPV_secp256k1_context_set_illegal_callback
#define secp256k1_context_set_error_callback SPV_secp256k1_context_set_error_callback
#define secp256k1_scratch_space_create SPV_secp256k1_scratch_space_create
#define secp256k1_scratch_space_destroy SPV_secp256k1_scratch_space_destroy
#define secp256k1_ec_pubkey_parse SPV_secp256k1_ec_pubkey_parse
#define secp256k1_ec_pubkey_serialize SPV_secp256k1_ec_pubkey_serialize
#define secp256k1_ecdsa_signature_parse_compact SPV_secp256k1_ecdsa_signature_parse_compact
#define secp256k1_ecdsa_signature_parse_der SPV_secp256k1_ecdsa_signature_parse_der
#define secp256k1_ecdsa_signature_serialize_der SPV_secp256k1_ecdsa_signature_serialize_der
#define secp256k1_ecdsa_signature_serialize_compact SPV_secp256k1_ecdsa_signature_serialize_compact
#define secp256k1_ecdsa_verify SPV_secp256k1_ecdsa_verify
#define secp256k1_ecdsa_signature_normalize SPV_secp256k1_ecdsa_signature_normalize
#define secp256k1_ecdsa_sign SPV_secp256k1_ecdsa_sign
#define secp256k1_ec_seckey_verify SPV_secp256k1_ec_seckey_verify
#define secp256k1_ec_pubkey_create SPV_secp256k1_ec_pubkey_create
#define secp256k1_ec_privkey_negate SPV_secp256k1_ec_privkey_negate
#define secp256k1_ec_pubkey_negate SPV_secp256k1_ec_pubkey_negate
#define secp256k1_ec_privkey_tweak_add SPV_secp256k1_ec_privkey_tweak_add
#define secp256k1_ec_pubkey_tweak_add SPV_secp256k1_ec_pubkey_tweak_add
#define secp256k1_ec_privkey_tweak_mul SPV_secp256k1_ec_privkey_tweak_mul
#define secp256k1_ec_pubkey_tweak_mul SPV_secp256k1_ec_pubkey_tweak_mul
#define secp256k1_context_randomize SPV_secp256k1_context_randomize
#define secp256k1_ec_pubkey_combine SPV_secp256k1_ec_pubkey_combine
#define secp256k1_ecdh SPV_secp256k1_ecdh
#define secp256k1_ecdsa_recoverable_signature_parse_compact SPV_secp256k1_ecdsa_recoverable_signature_parse_compact
#define secp256k1_ecdsa_recoverable_signature_convert SPV_secp256k1_ecdsa_recoverable_signature_convert
#define secp256k1_ecdsa_recoverable_signature_serialize_compact SPV_secp256k1_ecdsa_recoverable_signature_serialize_compact
#define secp256k1_ecdsa_sign_recoverable SPV_secp256k1_ecdsa_sign_recoverable
#define secp256k1_ecdsa_recover SPV_secp256k1_ecdsa_recover
#endif
|
xformation/xformation-compliancemanager-service
|
compliancemanager-server/src/test/java/com/synectiks/process/common/security/authzroles/PaginatedAuthzRolesServiceTest.java
|
/*
* */
package com.synectiks.process.common.security.authzroles;
import com.google.common.collect.ImmutableSet;
import com.synectiks.process.common.security.authzroles.AuthzRoleDTO;
import com.synectiks.process.common.security.authzroles.PaginatedAuthzRolesService;
import com.synectiks.process.common.testing.mongodb.MongoDBExtension;
import com.synectiks.process.common.testing.mongodb.MongoDBFixtures;
import com.synectiks.process.common.testing.mongodb.MongoDBTestService;
import com.synectiks.process.common.testing.mongodb.MongoJackExtension;
import com.synectiks.process.server.bindings.providers.MongoJackObjectMapperProvider;
import com.synectiks.process.server.database.PaginatedList;
import com.synectiks.process.server.search.SearchQuery;
import com.synectiks.process.server.shared.users.UserService;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mongojack.DBQuery;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@ExtendWith(MongoDBExtension.class)
@ExtendWith(MongoJackExtension.class)
@ExtendWith(MockitoExtension.class)
@MongoDBFixtures("roles.json")
class PaginatedAuthzRolesServiceTest {
private PaginatedAuthzRolesService service;
private UserService userService;
@BeforeEach
void setUp(MongoDBTestService mongodb,
MongoJackObjectMapperProvider mongoObjectMapperProvider,
@Mock UserService userService) {
this.userService = userService;
this.service = new PaginatedAuthzRolesService(mongodb.mongoConnection(), userService, mongoObjectMapperProvider);
}
@Test
void count() {
assertThat(service.count()).isEqualTo(16L);
}
@Test
void findPaginatedByIds(@Mock SearchQuery searchQuery) {
final String testRoleId = "56701ac4c8302ff6bee2a65d";
final String readerRoleId = "564c6707c8306e079f718980";
final String archiveManagerRoleId = "58dbaa158ae4923256dc6265";
final Set<String> roleIds = ImmutableSet.of(testRoleId, readerRoleId, archiveManagerRoleId);
when(searchQuery.toDBQuery()).thenReturn(DBQuery.empty());
final PaginatedList<AuthzRoleDTO> result = service.findPaginatedByIds(
searchQuery,
1,
10,
"id",
"asc",
roleIds
);
assertThat(result.delegate().size()).isEqualTo(3);
assertThat(result.delegate().get(0)).satisfies(role -> {
assertThat(role.id()).isEqualTo(readerRoleId);
assertThat(role.name()).isEqualTo("Reader");
});
assertThat(result.delegate().get(1)).satisfies(role -> {
assertThat(role.id()).isEqualTo(testRoleId);
assertThat(role.name()).isEqualTo("Test Role");
});
assertThat(result.delegate().get(2)).satisfies(role -> {
assertThat(role.id()).isEqualTo(archiveManagerRoleId);
assertThat(role.name()).isEqualTo("Archive Manager");
});
assertThat(result.grandTotal()).get().isEqualTo(16L);
assertThat(result.pagination()).satisfies(paginationInfo -> {
assertThat(paginationInfo.count()).isEqualTo(3);
assertThat(paginationInfo.total()).isEqualTo(3);
assertThat(paginationInfo.page()).isEqualTo(1);
assertThat(paginationInfo.perPage()).isEqualTo(10);
});
}
@Test
void findPaginatedByIdsWithFilter(@Mock SearchQuery searchQuery) {
final String testRoleId = "56701ac4c8302ff6bee2a65d";
final String readerRoleId = "564c6707c8306e079f718980";
final String archiveManagerRoleId = "58dbaa158ae4923256dc6265";
final Set<String> roleIds = ImmutableSet.of(testRoleId, readerRoleId, archiveManagerRoleId);
when(searchQuery.toDBQuery()).thenReturn(DBQuery.empty());
final PaginatedList<AuthzRoleDTO> result = service.findPaginatedByIdsWithFilter(
searchQuery,
(role) -> testRoleId.equals(role.id()),
1,
10,
"id",
"asc",
roleIds
);
assertThat(result.delegate().size()).isEqualTo(1);
assertThat(result.delegate().get(0)).satisfies(role -> {
assertThat(role.id()).isEqualTo(testRoleId);
assertThat(role.name()).isEqualTo("Test Role");
});
assertThat(result.grandTotal()).get().isEqualTo(16L);
assertThat(result.pagination()).satisfies(paginationInfo -> {
assertThat(paginationInfo.count()).isEqualTo(1);
assertThat(paginationInfo.total()).isEqualTo(1);
assertThat(paginationInfo.page()).isEqualTo(1);
assertThat(paginationInfo.perPage()).isEqualTo(10);
});
}
@Test
void findByIds() {
final List<AuthzRoleDTO> roles = service.findByIds(ImmutableSet.of(
"5d41bb973086a840541a3ed2",
"564c6707c8306e079f718980"
));
assertThat(roles).hasSize(2);
assertThat(roles.get(0).id()).isEqualTo("564c6707c8306e079f718980");
assertThat(roles.get(0).name()).isEqualTo("Reader");
assertThat(roles.get(1).id()).isEqualTo("5d41bb973086a840541a3ed2");
assertThat(roles.get(1).name()).isEqualTo("Alerts Manager");
}
@Test
void getAllRoleIds() {
assertThat(service.getAllRoleIds()).isEqualTo(ImmutableSet.of(
"564c6707c8306e079f718980",
"56701ac4c8302ff6bee2a65d",
"5b17d7c63f3ab8204eea0589",
"<KEY>",
"564c6707c8306e079f71897f",
"<KEY>",
"59fc4b2b6e948411fadbd85d",
"59fc4b2b6e948411fadbd85e",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"5f1f0d2a6f58d7c052d49775",
"5f1f0d2a6f58d7c052d49778",
"5f1f0d2a6f58d7c052d4977b",
"5f22792d6f58d7c0521edb23"
));
}
@Test
void delete() {
final String roleId = "5d41bb973086a840541a3ed2";
final Optional<AuthzRoleDTO> role = service.get(roleId);
assertThat(role).isPresent();
service.delete(roleId);
verify(userService).dissociateAllUsersFromRole(role.get().toLegacyRole());
}
}
|
welsonla/rabel
|
app/controllers/admin/planes_controller.rb
|
<reponame>welsonla/rabel<filename>app/controllers/admin/planes_controller.rb
# encoding: utf-8
class Admin::PlanesController < Admin::BaseController
before_filter :find_plane, :only => [:edit, :update, :destroy]
def index
@planes = Plane.default_order
@title = '位面节点'
end
def new
@title = '添加位面'
@plane = Plane.new
respond_to do |format|
format.js { render :show_form }
end
end
def create
@plane = Plane.new(params[:plane])
respond_to do |format|
if @plane.save
format.js
else
format.js { render :show_form }
end
end
end
def edit
respond_to do |format|
format.js {
@title = '修改位面'
render :show_form
}
end
end
def update
respond_to do |format|
if @plane.update_attributes(params[:plane])
format.js { render :js => 'window.location.reload()' }
else
format.js { render :show_form }
end
end
end
def destroy
respond_to do |format|
if @plane.can_delete? and @plane.destroy
format.js
else
format.js { render :json => {:error => 'delete plane failed'}, :status => :unprocessable_entity }
end
end
end
def sort
if params[:position].present?
params[:position].each_with_index do |id, pos|
Plane.update(id, :position => pos)
end
respond_to do |f|
f.js { head :ok }
end
else
respond_to do |f|
f.js {
@planes = Plane.default_order
}
end
end
end
end
|
tombosc/dict_based_learning
|
tests/test_text_dataset.py
|
import cPickle
from dictlearn.datasets import TextDataset
from tests.util import (
TEST_TEXT, temporary_content_path)
def test_text_dataset():
with temporary_content_path(TEST_TEXT) as path:
dataset = TextDataset(path, 100)
stream = dataset.get_example_stream()
it = stream.get_epoch_iterator()
d = next(it)
assert d == (['abc', 'abc', 'def'],)
pickled_it = cPickle.dumps(it)
d = next(it)
assert d == (['def', 'def', 'xyz'],)
it = cPickle.loads(pickled_it)
d = next(it)
assert d == (['def', 'def', 'xyz'],)
d = next(it)
assert d == (['xyz'],)
|
sjrd/scalafix
|
scalafix-core/src/main/scala/scalafix/internal/diff/Diff.scala
|
<filename>scalafix-core/src/main/scala/scalafix/internal/diff/Diff.scala
package scalafix.internal.diff
import java.nio.file.Path
case class GitChange(start: Int, length: Int)
sealed trait GitDiff
case class NewFile(path: Path) extends GitDiff
case class ModifiedFile(path: Path, changes: List[GitChange]) extends GitDiff
|
jandppw/ppwcode
|
java/vernacular/persistence/trunk/src/main/java/org/ppwcode/vernacular/persistence_III/dao/RemoteAtomicStatelessCrudDao.java
|
<gh_stars>0
/*<license>
Copyright 2005 - $Date$ by PeopleWare n.v..
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
</license>*/
package org.ppwcode.vernacular.persistence_III.dao;
import static org.ppwcode.util.exception_III.ProgrammingErrorHelpers.dependency;
import static org.ppwcode.util.exception_III.ProgrammingErrorHelpers.unexpectedException;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.ppwcode.vernacular.exception_III.ExternalError;
import org.ppwcode.vernacular.exception_III.ApplicationException;
import org.ppwcode.vernacular.exception_III.handle.ExceptionHandler;
import org.ppwcode.vernacular.persistence_III.IdNotFoundException;
import org.ppwcode.vernacular.persistence_III.PersistentBean;
import org.ppwcode.vernacular.persistence_III.VersionedPersistentBean;
import org.ppwcode.vernacular.persistence_III.dao.AtomicStatelessCrudDao;
import org.ppwcode.vernacular.persistence_III.dao.RequiredTransactionStatelessCrudDao;
import org.toryt.annotations_I.Basic;
import org.toryt.annotations_I.Expression;
import org.toryt.annotations_I.MethodContract;
/**
* <p>JPA implementation of {@link AtomicStatelessCrudDao}. This delegates to an instance of
* {@link RequiredTransactionStatelessCrudDao}.</p>
* <p>This is a stateless session bean, whose first intention is remote use.
* Because we advise not to use the interface {@link AtomicStatelessCrudDao} directly in the API of your business
* application, you cannot use this class directly either. You should extend this class in your business application as
* follows:</p>
* <pre>
* package my.business.application_IV.businesslogic.jpa;
*
* ...
*
* @Stateless
* <var>@WebService</var>
* @TransactionManagement(TransactionManagementType.BEAN)
* public class RemoteStatelessCrudDao extends org.ppwcode.vernacular.persistence_III.dao.ejb3.RemoteStatelessCrudDao {
*
* // NOP
*
* }
* </pre>
* <p>Furthermore, you need to inject a {@link #getRequiredTransactionStatelessCrudDao() RequiredTransactionStatelessCrudDao}
* and an {@link #getExceptionHandler() ExceptionHandler}. The abstract methods {@link #isOperational()},
* {@link #beginTransaction()}, {@link #commitTransaction()}, and {@link #rollbackTransaction()} must be
* implemented. A subclass could do this by providing a {@link javax.transaction.UserTransaction} or
* {@link javax.persistence.EntityTransaction}, and implementing these methods by delegating the work to these
* specialized objects.</p>
* <p>That is why this class does not have the {@code @Stateless}, {@code @WebService} nor {@code @TransactionManagement}
* or {@code @TransactionAttribute} annotation (apart from infecting this library package with a dependency on EJB3 annotations).
* In this way you have the possibility to keep backward compatibility when your business application's semantics change, and the class
* / object model and data model change. In that case, you develop a new version in package {@code my.business.application_V}, introducing
* {@code my.business.application_V.businesslogic.jpa.JpaStatelessTransactionCrudDao} implementing a new remote interface. With that,
* your clients can now choose which version they want to use. From the old version, you keep the necessary classes, but since the
* database structure probably has changed, retrieving and updating data cannot easily happen the same way. In particular, your
* semantics (persistent bean subtypes) will probably no longer map to the database. This means that your original implementation of
* {@code my.business.application_IV.businesslogic.jpa.JpaStatelessTransactionCrudDao} with the old semantics (entities) will no
* longer work. By changing the implementation of {@code my.business.application_IV.businesslogic.jpa.JpaStatelessTransactionCrudDao}
* to map old semantic POJO's (now no longer entities) to new entities (if at all possible), you make the old API forward compatible
* with the new semantics. Because this is not always possible with all methods of this interface in all circumstances, all methods
* can throw a {@code NoLongerSupportedError}.</p>
*
* @mudo unit tests
*/
public abstract class RemoteAtomicStatelessCrudDao implements AtomicStatelessCrudDao {
private final static Log _LOG = LogFactory.getLog(RemoteAtomicStatelessCrudDao.class);
/*<property name="statelessCrudJoinTransactionDao">
-------------------------------------------------------------------------*/
@Basic
public final RequiredTransactionStatelessCrudDao getRequiredTransactionStatelessCrudDao() {
return $requiredTransactionStatelessCrudDao;
}
@MethodContract(
post = @Expression("statelessCrudJoinTransactionDao == _statelessCrudJoinTransactionDao")
)
public final void setStatelessCrudJoinTransactionDao(RequiredTransactionStatelessCrudDao requiredTransactionStatelessCrudDao) {
$requiredTransactionStatelessCrudDao = requiredTransactionStatelessCrudDao;
}
private RequiredTransactionStatelessCrudDao $requiredTransactionStatelessCrudDao;
/*</property>*/
/*<property name="exception handler">
-------------------------------------------------------------------------*/
@Basic
public final ExceptionHandler getExceptionHandler() {
return $exceptionHandler;
}
@MethodContract(
post = @Expression("exceptionHandler == _exceptionHandler")
)
public final void setExceptionHandler(ExceptionHandler exceptionHandler) {
$exceptionHandler = exceptionHandler;
}
private ExceptionHandler $exceptionHandler;
/*</property>*/
/**
* Returns true if this RemoteAtomicStatelessCrudDao is involved in an
* transaction (beginTransaction was called, but not yet rolled back
* or committed).
*/
@MethodContract(post = @Expression("result ? statelessCrudJoinTransactionDao != null && userTransaction != null && exceptionHandler != null"))
public abstract boolean isOperational();
/**
* Starts a transaction. Must only be called by this class
* (hence protected)
*/
protected abstract void beginTransaction();
/**
* Commits a transaction. Must only be called by this class
* (hence protected)
*/
protected abstract void commitTransaction();
/**
* Rolls back a transaction. Must only be called by this class
* (hence protected)
*/
protected abstract void rollbackTransaction();
public <_PersistentBean_ extends PersistentBean<?>> Set<_PersistentBean_>
retrieveAllPersistentBeans(Class<_PersistentBean_> persistentBeanType, boolean retrieveSubClasses) {
assert dependency(getExceptionHandler(), "exceptionHandler");
try {
beginTransaction();
Set<_PersistentBean_> result = getRequiredTransactionStatelessCrudDao().retrieveAllPersistentBeans(persistentBeanType, retrieveSubClasses);
commitTransaction();
return result;
}
catch (Throwable t) {
handleForNoException(t);
}
return null; // keep compiler happy
}
public <_VersionedPersistentBean_ extends VersionedPersistentBean<?, Timestamp>> Set<_VersionedPersistentBean_>
retrieveAllPersistentBeansChangedSince(Class<_VersionedPersistentBean_> persistentBeanType, boolean retrieveSubClasses, Timestamp since) {
assert dependency(getExceptionHandler(), "exceptionHandler");
try {
beginTransaction();
Set<_VersionedPersistentBean_> result = getRequiredTransactionStatelessCrudDao().retrieveAllPersistentBeansChangedSince(persistentBeanType, retrieveSubClasses, since);
commitTransaction();
return result;
}
catch (Throwable t) {
handleForNoException(t);
}
return null; // keep compiler happy
}
public <_Id_ extends Serializable, _PersistentBean_ extends PersistentBean<_Id_>>
_PersistentBean_ retrievePersistentBean(Class<_PersistentBean_> persistentBeanType, _Id_ id) throws IdNotFoundException {
assert dependency(getExceptionHandler(), "exceptionHandler");
try {
beginTransaction();
_PersistentBean_ result = getRequiredTransactionStatelessCrudDao().retrievePersistentBean(persistentBeanType, id);
commitTransaction();
return result;
}
catch (Throwable t) {
handleForIdNotFoudException(t);
}
return null; // keep compiler happy
}
public <_Id_ extends Serializable, _Version_ extends Serializable, _PB_ extends VersionedPersistentBean<_Id_, _Version_>>
_PB_ updatePersistentBean(_PB_ pb) throws ApplicationException {
assert dependency(getExceptionHandler(), "exceptionHandler");
try {
beginTransaction();
_PB_ result = getRequiredTransactionStatelessCrudDao().updatePersistentBean(pb);
commitTransaction();
return result;
}
catch (Throwable t) {
handleForApplicationException(t);
}
return null; // keep compiler happy
}
public <_Id_ extends Serializable, _Version_ extends Serializable, _PB_ extends VersionedPersistentBean<_Id_, _Version_>>
_PB_ createPersistentBean(_PB_ pb) throws ApplicationException {
assert dependency(getExceptionHandler(), "exceptionHandler");
try {
beginTransaction();
_PB_ result = getRequiredTransactionStatelessCrudDao().createPersistentBean(pb);
commitTransaction();
return result;
}
catch (Throwable t) {
handleForApplicationException(t);
}
return null; // keep compiler happy
}
public <_Id_ extends Serializable, _Version_ extends Serializable, _PB_ extends VersionedPersistentBean<_Id_, _Version_>>
_PB_ deletePersistentBean(_PB_ pb) throws ApplicationException {
try {
beginTransaction();
_PB_ result = getRequiredTransactionStatelessCrudDao().deletePersistentBean(pb);
commitTransaction();
return result;
}
catch (Throwable t) {
handleForApplicationException(t);
}
return null; // keep compiler happy
}
private void handleForNoException(Throwable t) throws ExternalError, AssertionError {
Throwable finalException = robustRollback(t);
try {
getExceptionHandler().handleException(finalException, _LOG);
}
catch (ApplicationException metaExc) {
unexpectedException(metaExc, "handleException can throw no ApplicationExceptions");
}
}
@SuppressWarnings("unchecked")
private void handleForIdNotFoudException(Throwable t) throws ExternalError, AssertionError, IdNotFoundException {
Throwable finalException = robustRollback(t);
try {
getExceptionHandler().handleException(finalException, _LOG, IdNotFoundException.class);
}
catch (IdNotFoundException infExc) {
throw infExc;
}
catch (ApplicationException metaExc) {
unexpectedException(metaExc, "handleException can throw no ApplicationExceptions");
}
}
@SuppressWarnings("unchecked")
private void handleForApplicationException(Throwable t) throws ExternalError, AssertionError, ApplicationException {
Throwable finalException = robustRollback(t);
getExceptionHandler().handleException(finalException, _LOG, ApplicationException.class);
}
private Throwable robustRollback(Throwable reasonForRollback) {
Throwable finalException = reasonForRollback;
try {
rollbackTransaction();
}
catch (Throwable exc) {
finalException = exc;
}
return finalException;
}
}
|
lukaszwawrzyk/quasar
|
mongodb/src/main/scala/quasar/physical/mongodb/planner/common.scala
|
<gh_stars>0
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.mongodb.planner
import slamdata.Predef._
import quasar.Type, Type._
import quasar.contrib.scalaz._
import quasar.fp.ski._
import quasar.fs._, FileSystemError._
import quasar.fs.Planner.{InternalError, PlannerError}
import java.time.Instant
import matryoshka._
import matryoshka.data._
import matryoshka.implicits._
import matryoshka.patterns._
import scalaz._, Scalaz._
object common {
type ExecTimeR[F[_]] = MonadReader_[F, Instant]
// TODO: Remove this type.
type WBM[X] = PlannerError \/ X
/** Brings a [[WBM]] into our `M`. */
def liftM[M[_]: Monad: MonadFsErr, A](meh: WBM[A]): M[A] =
meh.fold(raisePlannerError[M, A], _.point[M])
def raiseErr[M[_], A](err: FileSystemError)(
implicit ev: MonadFsErr[M]
): M[A] = ev.raiseError(err)
def handleErr[M[_], A](ma: M[A])(f: FileSystemError => M[A])(
implicit ev: MonadFsErr[M]
): M[A] = ev.handleError(ma)(f)
def raisePlannerError[M[_]: MonadFsErr, A](err: PlannerError): M[A] =
raiseErr(qscriptPlanningFailed(err))
def raiseInternalError[M[_]: MonadFsErr, A](msg: String): M[A] =
raisePlannerError(InternalError.fromMsg(msg))
def unimplemented[M[_]: MonadFsErr, A](label: String): M[A] =
raiseInternalError(s"unimplemented $label")
def unpack[T[_[_]]: BirecursiveT, F[_]: Traverse](t: Free[F, T[F]]): T[F] =
t.cata(interpret[F, T[F], T[F]](ι, _.embed))
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def generateTypeCheck[In, Out](or: (Out, Out) => Out)(f: PartialFunction[Type, In => Out]):
Type => Option[In => Out] =
typ => f.lift(typ).fold(
typ match {
case Type.Temporal =>
generateTypeCheck(or)(f)(Type.OffsetDateTime)
case Type.LocalDateTime ⨿ OffsetDateTime => // time_of_day
generateTypeCheck(or)(f)(Type.OffsetDateTime)
case Type.OffsetDateTime ⨿ Type.OffsetDate ⨿
Type.LocalDateTime ⨿ Type.LocalDate => // date_part
generateTypeCheck(or)(f)(Type.OffsetDateTime)
case Type.Arr(_) => generateTypeCheck(or)(f)(Type.AnyArray)
case a ⨿ b =>
(generateTypeCheck(or)(f)(a) ⊛ generateTypeCheck(or)(f)(b))(
(a, b) => ((expr: In) => or(a(expr), b(expr))))
case _ => None
})(Some(_))
def createFieldName(prefix: String, i: Int): String = prefix + i.toString
object Keys {
val wrap = "wrap"
}
}
|
mrscorpion/MSPIECES
|
momentProgramme/momentProgramme/MVC/Collection/View/SearchViewTableViewHeader.h
|
<reponame>mrscorpion/MSPIECES<filename>momentProgramme/momentProgramme/MVC/Collection/View/SearchViewTableViewHeader.h
//
// SearchViewTableViewHeader.h
// momentProgramme
//
// Created by mr.scorpion on 15/11/30.
// Copyright © 2015年 mr.scorpion. All rights reserved.
//
#import "BSView.h"
@interface SearchViewTableViewHeader : BSView
@property (retain, nonatomic) UILabel *label;
@end
|
auto-flow/autoflow
|
autoflow/opt/config_generators/density_estimator/base.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : <NAME>
# @Contact : <EMAIL>
import random
from typing import List, Optional
import numpy as np
from scipy.stats import truncnorm
from sklearn.base import BaseEstimator
from autoflow.opt.utils import ConfigurationTransformer
from autoflow.utils.logging_ import get_logger
class BaseDensityEstimator(BaseEstimator):
def __init__(
self,
top_n_percent=15, bandwidth_factor=3, min_bandwidth=1e3,
bw_estimation="normal_reference", min_points_in_kde=2,
):
self.min_points_in_kde = min_points_in_kde
self.bw_estimation = bw_estimation
self.min_bandwidth = min_bandwidth
self.bandwidth_factor = bandwidth_factor
self.top_n_percent = top_n_percent
self.config_transformer: Optional[ConfigurationTransformer]= None
self.logger=get_logger(self)
def fit(self, X: np.ndarray, y: np.ndarray):
assert self.config_transformer is not None
assert X.shape[1] == len(self.config_transformer.n_choices_list)
return self
def predict(self, X: np.ndarray):
assert self.config_transformer is not None
assert X.shape[1] == len(self.config_transformer.n_choices_list)
def process_constants_vector(self, vec: np.ndarray, n_choices, bw, mode="extend"):
if np.unique(vec).size == 1:
if n_choices > 1:
# return vec
others = set(range(n_choices)) - set(vec)
other = random.choice(list(others))
elif n_choices == 0:
m = vec[0]
bw = max(0.1, bw)
while True:
other = truncnorm.rvs(-m / bw, (1 - m) / bw, loc=m, scale=bw)
other = np.clip(other, 0, 1)
if other != m:
break
else:
raise ValueError
if mode == "extend":
return np.hstack([vec, [other]])
elif mode == "replace":
vec[0] = other
return vec
else:
raise NotImplementedError
else:
return vec
|
SamirAroudj/BaseProject
|
Graphics3D/PointLight.h
|
/*
* Copyright (C) 2017 by Author: Aroudj, Samir, born in Suhl, Thueringen, Germany
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD 3-Clause license. See the License.txt file for details.
*/
#ifndef _POINT_LIGHT_H_
#define _POINT_LIGHT_H_
#include "Light.h"
namespace Graphics
{
class PointLight : public Light
{
public:
PointLight(const Color &ambient, const Color &diffuse, const Color &specular,
const Math::Vector3 &position, const Math::Vector3 &attenuationFactors, Real range);
virtual ~PointLight();
const Math::Vector3 &getAttenuationFactors() const { return mAttenuationFactors; }
virtual LightData getData() const;
const Math::Vector3 &getPosition() const { return mPosition; }
Real getRange() const { return mRange; }
void setAttenuationFactors(const Math::Vector3 &attenuationFactors);
void setPosition(const Math::Vector3 &position) { mPosition = position; }
void setRange(Real range) { assert(range > 0.0f); mRange = range; }
private:
Math::Vector3 mAttenuationFactors;
Math::Vector3 mPosition;
Real mRange;
};
}
#endif _POINT_LIGHT_H_
|
JoaoBaptMG/ReboundTheGame
|
MainGame/drawables/GUIMap.hpp
|
<gh_stars>10-100
//
// Copyright (c) 2016-2018 <NAME> Silva.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
#pragma once
#include <SFML/Graphics.hpp>
#include <memory>
#include <vector>
#include <chronoUtils.hpp>
class LevelData;
class GUIMap final : public sf::Drawable
{
std::shared_ptr<LevelData> curLevel;
sf::Texture* mapTexture;
sf::VertexArray vertArray;
sf::Vector2f displayPosition;
sf::Color mapBlinkColor;
size_t curRoom;
FrameTime initTime;
bool extendedFrame;
public:
GUIMap(bool extendedFrame = false) : extendedFrame(extendedFrame) {}
~GUIMap() {}
void update(FrameTime curTime);
auto getCurLevel() const { return curLevel; }
void setCurLevel(std::shared_ptr<LevelData> level);
void buildLevelTexture();
auto getCurRoom() const { return curRoom; }
void setCurRoom(size_t room) { curRoom = room; }
auto getDisplayPosition() const { return displayPosition; }
void setDisplayPosition(sf::Vector2f pos) { displayPosition = pos; }
auto getExtendedFrame() const { return extendedFrame; }
void setExtendedFrame(bool f) { extendedFrame = f; }
sf::FloatRect getBounds() const;
void presentRoom(size_t room);
void presentRoomFull(size_t room);
void hideRoom(size_t room);
private:
virtual void draw(sf::RenderTarget& target, sf::RenderStates states) const;
};
|
tugrulkarakaya/ia-case-api
|
src/test/java/uk/gov/hmcts/reform/iacaseapi/domain/handlers/presubmit/RestoreStateFromAdjournHandlerTest.java
|
package uk.gov.hmcts.reform.iacaseapi.domain.handlers.presubmit;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static uk.gov.hmcts.reform.iacaseapi.domain.entities.AsylumCaseFieldDefinition.*;
import java.util.Optional;
import org.assertj.core.api.Assertions;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.AsylumCase;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.ccd.CaseDetails;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.ccd.Event;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.ccd.State;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.ccd.callback.Callback;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.ccd.callback.PreSubmitCallbackResponse;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.ccd.callback.PreSubmitCallbackStage;
import uk.gov.hmcts.reform.iacaseapi.domain.entities.ccd.field.YesOrNo;
@RunWith(MockitoJUnitRunner.class)
public class RestoreStateFromAdjournHandlerTest {
@Mock private Callback<AsylumCase> callback;
@Mock private CaseDetails<AsylumCase> caseDetails;
@Mock private AsylumCase asylumCase;
@Mock private PreSubmitCallbackResponse<AsylumCase> callbackResponse;
private final String listCaseHearingDate = "05/05/2020";
private RestoreStateFromAdjournHandler restoreStateFromAdjournHandler;
@Before
public void setUp() {
restoreStateFromAdjournHandler = new RestoreStateFromAdjournHandler();
}
@Test
public void should_return_updated_state_for_return_state_from_adjourn_adjourned_state() {
when(callback.getCaseDetails()).thenReturn(caseDetails);
when(callback.getEvent()).thenReturn(Event.RESTORE_STATE_FROM_ADJOURN);
when(caseDetails.getCaseData()).thenReturn(asylumCase);
when(asylumCase.read(STATE_BEFORE_ADJOURN_WITHOUT_DATE, String.class)).thenReturn(Optional.of(State.PREPARE_FOR_HEARING.toString()));
when(asylumCase.read(DATE_BEFORE_ADJOURN_WITHOUT_DATE, String.class)).thenReturn(Optional.of(listCaseHearingDate));
PreSubmitCallbackResponse<AsylumCase> returnedCallbackResponse =
restoreStateFromAdjournHandler.handle(PreSubmitCallbackStage.ABOUT_TO_SUBMIT, callback, callbackResponse);
assertNotNull(returnedCallbackResponse);
Assertions.assertThat(returnedCallbackResponse.getState()).isEqualTo(State.PREPARE_FOR_HEARING);
assertEquals(asylumCase, returnedCallbackResponse.getData());
verify(asylumCase, times(1)).write(DOES_THE_CASE_NEED_TO_BE_RELISTED, YesOrNo.YES);
verify(asylumCase, times(1)).write(LIST_CASE_HEARING_DATE, listCaseHearingDate);
verify(asylumCase, times(1)).clear(DATE_BEFORE_ADJOURN_WITHOUT_DATE);
verify(asylumCase, times(1)).clear(STATE_BEFORE_ADJOURN_WITHOUT_DATE);
verify(asylumCase, times(1)).clear(ADJOURN_HEARING_WITHOUT_DATE_REASONS);
}
@Test
public void handling_should_throw_if_cannot_actually_handle() {
assertThatThrownBy(() -> restoreStateFromAdjournHandler.handle(PreSubmitCallbackStage.ABOUT_TO_START, callback, callbackResponse))
.hasMessage("Cannot handle callback")
.isExactlyInstanceOf(IllegalStateException.class);
}
@Test
@SuppressWarnings("unchecked")
public void it_can_handle_callback() {
for (Event event : Event.values()) {
when(callback.getEvent()).thenReturn(event);
for (PreSubmitCallbackStage callbackStage : PreSubmitCallbackStage.values()) {
boolean canHandle = restoreStateFromAdjournHandler.canHandle(callbackStage, callback);
if (event == Event.RESTORE_STATE_FROM_ADJOURN
&& callbackStage == PreSubmitCallbackStage.ABOUT_TO_SUBMIT) {
assertTrue(canHandle);
} else {
assertFalse(canHandle);
}
}
reset(callback);
}
}
@Test
public void should_not_allow_null_arguments() {
assertThatThrownBy(() -> restoreStateFromAdjournHandler.canHandle(null, callback))
.hasMessage("callbackStage must not be null")
.isExactlyInstanceOf(NullPointerException.class);
assertThatThrownBy(() -> restoreStateFromAdjournHandler.canHandle(PreSubmitCallbackStage.ABOUT_TO_SUBMIT, null))
.hasMessage("callback must not be null")
.isExactlyInstanceOf(NullPointerException.class);
assertThatThrownBy(() -> restoreStateFromAdjournHandler.handle(null, callback, callbackResponse))
.hasMessage("callbackStage must not be null")
.isExactlyInstanceOf(NullPointerException.class);
assertThatThrownBy(() -> restoreStateFromAdjournHandler.handle(PreSubmitCallbackStage.ABOUT_TO_SUBMIT, null, null))
.hasMessage("callback must not be null")
.isExactlyInstanceOf(NullPointerException.class);
}
}
|
sulthonzh/zaruba
|
util/getProjectServiceNames.go
|
package util
import (
"fmt"
"path/filepath"
"strings"
"github.com/state-alchemists/zaruba/config"
)
func GetProjectServiceNames(project *config.Project) (serviceNames []string) {
projectDir := filepath.Dir(project.GetFileLocation())
serviceNames = []string{}
for taskName, task := range project.Tasks {
// taskName should be started with "run"
if !strings.HasPrefix(taskName, "run") || taskName == "run" {
continue
}
upperServiceName := strings.TrimPrefix(taskName, "run")
serviceName := strings.ToLower(string(upperServiceName[0])) + upperServiceName[1:]
// service's envRef should be exist
if _, envRefExist := project.EnvRefMap[serviceName]; !envRefExist {
continue
}
// task should be located at project's zaruba-tasks
if task.GetFileLocation() != filepath.Join(projectDir, "zaruba-tasks", fmt.Sprintf("%s.zaruba.yaml", serviceName)) {
continue
}
serviceNames = append(serviceNames, serviceName)
}
return serviceNames
}
|
ANDROFAST/delivery_articulos
|
delivery/app-release_source_from_JADX/com/google/android/gms/internal/zzas.java
|
package com.google.android.gms.internal;
import java.io.IOException;
class zzas implements zzaq {
private zztd zzol;
private byte[] zzom;
private final int zzon;
public zzas(int i) {
this.zzon = i;
reset();
}
public void reset() {
this.zzom = new byte[this.zzon];
this.zzol = zztd.zzD(this.zzom);
}
public byte[] zzad() throws IOException {
int zzHx = this.zzol.zzHx();
if (zzHx < 0) {
throw new IOException();
} else if (zzHx == 0) {
return this.zzom;
} else {
Object obj = new byte[(this.zzom.length - zzHx)];
System.arraycopy(this.zzom, 0, obj, 0, obj.length);
return obj;
}
}
public void zzb(int i, long j) throws IOException {
this.zzol.zzb(i, j);
}
public void zzb(int i, String str) throws IOException {
this.zzol.zzb(i, str);
}
}
|
rdubois/tcommon-studio-se
|
main/plugins/org.talend.core.runtime/src/main/java/org/talend/core/model/process/IReplaceNodeInProcess.java
|
<reponame>rdubois/tcommon-studio-se<filename>main/plugins/org.talend.core.runtime/src/main/java/org/talend/core/model/process/IReplaceNodeInProcess.java
// ============================================================================
//
// Copyright (C) 2006-2015 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
// ============================================================================
package org.talend.core.model.process;
import java.util.List;
import org.talend.core.model.process.INode;
import org.talend.core.model.process.IProcess2;
/**
* DOC nrousseau class global comment. Detailled comment
*/
public interface IReplaceNodeInProcess {
// rebuild the current process from the selected node
public void rebuildGraphicProcessFromNode(INode node, List<INode> graphicalNodeList);
public boolean isNeedForceRebuild(IProcess2 process);
public void beforeRunJobInGUI(IProcess2 process);
}
|
phoenixsbk/kvmmgr
|
frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/datacenters/qos/NewQosModel.java
|
package org.ovirt.engine.ui.uicommonweb.models.datacenters.qos;
import org.ovirt.engine.core.common.action.QosParametersBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.action.VdcReturnValueBase;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.qos.QosBase;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.ui.frontend.Frontend;
import org.ovirt.engine.ui.uicommonweb.models.Model;
import org.ovirt.engine.ui.uicompat.FrontendActionAsyncResult;
import org.ovirt.engine.ui.uicompat.IFrontendActionAsyncCallback;
public abstract class NewQosModel<T extends QosBase, P extends QosParametersModel<T>> extends QosModel<T, P> {
public NewQosModel(Model sourceModel, StoragePool dataCenter) {
super(sourceModel, dataCenter);
}
@Override
protected void executeSave() {
final QosParametersBase<T> parameters = getParameters();
parameters.setQos(getQos());
Frontend.getInstance().runAction(getVdcAction(), parameters, new IFrontendActionAsyncCallback() {
@Override
public void executed(FrontendActionAsyncResult result1) {
VdcReturnValueBase retVal = result1.getReturnValue();
boolean succeeded = false;
if (retVal != null && retVal.getSucceeded()) {
succeeded = true;
getQos().setId((Guid) retVal.getActionReturnValue());
}
postSaveAction(succeeded);
}
});
}
protected abstract VdcActionType getVdcAction();
protected abstract QosParametersBase<T> getParameters();
}
|
gunnarmorling/infinispan
|
core/src/main/java/org/infinispan/configuration/cache/AsyncConfigurationBuilder.java
|
package org.infinispan.configuration.cache;
import static org.infinispan.configuration.cache.AsyncConfiguration.*;
import java.lang.invoke.MethodHandles;
import java.util.concurrent.TimeUnit;
import org.infinispan.commons.configuration.Builder;
import org.infinispan.commons.configuration.attributes.AttributeSet;
import org.infinispan.configuration.global.GlobalConfiguration;
import org.infinispan.remoting.ReplicationQueue;
import org.infinispan.remoting.ReplicationQueueImpl;
import org.infinispan.util.logging.Log;
import org.infinispan.util.logging.LogFactory;
/**
* If configured all communications are asynchronous, in that whenever a thread sends a message sent
* over the wire, it does not wait for an acknowledgment before returning. Asynchronous configuration is mutually
* exclusive with synchronous configuration.
*
*/
public class AsyncConfigurationBuilder extends AbstractClusteringConfigurationChildBuilder implements Builder<AsyncConfiguration> {
private static final Log log = LogFactory.getLog(MethodHandles.lookup().lookupClass(), Log.class);
private final AttributeSet attributes;
protected AsyncConfigurationBuilder(ClusteringConfigurationBuilder builder) {
super(builder);
attributes = AsyncConfiguration.attributeDefinitionSet();
}
/**
* Enable asynchronous marshalling. This allows the caller to return even quicker, but it can
* suffer from reordering of operations. You can find more information at <a
* href="https://docs.jboss.org/author/display/ISPN/Asynchronous+Options"
* >https://docs.jboss.org/author/display/ISPN/Asynchronous+Options</a>.
*/
public AsyncConfigurationBuilder asyncMarshalling() {
attributes.attribute(MARSHALLING).set(true);
return this;
}
public AsyncConfigurationBuilder asyncMarshalling(boolean async) {
attributes.attribute(MARSHALLING).set(async);
return this;
}
/**
* Enables synchronous marshalling. You can find more information at <a
* href="https://docs.jboss.org/author/display/ISPN/Asynchronous+Options"
* >https://docs.jboss.org/author/display/ISPN/Asynchronous+Options</a>.
*/
public AsyncConfigurationBuilder syncMarshalling() {
attributes.attribute(MARSHALLING).set(false);
return this;
}
/**
* The replication queue in use, by default {@link ReplicationQueueImpl}.
*
* NOTE: Currently Infinispan will not use the object instance, but instead instantiate a new
* instance of the class. Therefore, do not expect any state to survive, and provide a no-args
* constructor to any instance. This will be resolved in Infinispan 5.2.0
*/
public AsyncConfigurationBuilder replQueue(ReplicationQueue replicationQueue) {
attributes.attribute(REPLICATION_QUEUE).set(replicationQueue);
return this;
}
/**
* If useReplQueue is set to true, this attribute controls how often the asynchronous thread used
* to flush the replication queue runs.
*/
public AsyncConfigurationBuilder replQueueInterval(long interval) {
attributes.attribute(REPLICATION_QUEUE_INTERVAL).set(interval);
return this;
}
/**
* If useReplQueue is set to true, this attribute controls how often the asynchronous thread used
* to flush the replication queue runs.
*/
public AsyncConfigurationBuilder replQueueInterval(long interval, TimeUnit unit) {
return replQueueInterval(unit.toMillis(interval));
}
/**
* If useReplQueue is set to true, this attribute can be used to trigger flushing of the queue
* when it reaches a specific threshold.
*/
public AsyncConfigurationBuilder replQueueMaxElements(int elements) {
attributes.attribute(REPLICATION_QUEUE_MAX_ELEMENTS).set(elements);
return this;
}
/**
* If true, forces all async communications to be queued up and sent out periodically as a
* batch.
*/
public AsyncConfigurationBuilder useReplQueue(boolean use) {
attributes.attribute(USE_REPLICATION_QUEUE).set(use);
return this;
}
@Override
public
void validate() {
if (attributes.attribute(USE_REPLICATION_QUEUE).get() && getClusteringBuilder().cacheMode().isDistributed())
throw log.noReplicationQueueDistributedCache();
if (attributes.attribute(USE_REPLICATION_QUEUE).get() && getClusteringBuilder().cacheMode().isSynchronous())
throw log.replicationQueueOnlyForAsyncCaches();
}
@Override
public void validate(GlobalConfiguration globalConfig) {
}
@Override
public
AsyncConfiguration create() {
return new AsyncConfiguration(attributes.protect());
}
@Override
public AsyncConfigurationBuilder read(AsyncConfiguration template) {
this.attributes.read(template.attributes());
return this;
}
@Override
public String toString() {
return AsyncConfigurationBuilder.class.getSimpleName() + attributes;
}
}
|
neurlang/wayland
|
wl/error.go
|
<filename>wl/error.go
package wl
// combinedError is a tuple of an External and an Internal error
type combinedError [2]error
func (c combinedError) Error() string {
return c[0].Error() + ": " + c[1].Error()
}
func (c combinedError) Unwrap() error {
return c[1]
}
func (c combinedError) External() error {
return c[0]
}
|
abhaikollara/tensorflow
|
tensorflow/core/distributed_runtime/worker_session.h
|
<filename>tensorflow/core/distributed_runtime/worker_session.h<gh_stars>10-100
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_WORKER_SESSION_H_
#define TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_WORKER_SESSION_H_
#include <string>
#include "tensorflow/core/common_runtime/device_mgr.h"
#include "tensorflow/core/distributed_runtime/cluster_function_library_runtime.h"
#include "tensorflow/core/distributed_runtime/graph_mgr.h"
#include "tensorflow/core/distributed_runtime/worker_cache.h"
namespace tensorflow {
class ClusterFunctionLibraryRuntime;
class GraphMgr;
class WorkerCacheInterface;
// WorkerSession encapsulates all of the state relating to a given session.
class WorkerSession {
public:
// Collection of local devices. These devices are typically
// RenamedDevices in all except the SessionMgr.legacy_session_ and
// sessions created with `isolate_session_state == false`. In the
// those cases, this method returns a pointer to a borrowed
// DeviceMgr (typically the `worker_env.device_mgr`).
DeviceMgr* device_mgr() {
return device_mgr_ ? device_mgr_.get() : borrowed_device_mgr_;
}
DynamicDeviceMgr* remote_device_mgr() { return remote_device_mgr_.get(); }
const string& session_name() const { return session_name_; }
const string& worker_name() const { return worker_name_; }
WorkerCacheInterface* worker_cache() const { return worker_cache_.get(); }
GraphMgr* graph_mgr() const { return graph_mgr_.get(); }
ClusterFunctionLibraryRuntime* cluster_flr() const {
return cluster_flr_.get();
}
WorkerSession(const string& session_name, const string& worker_name,
std::unique_ptr<WorkerCacheInterface> worker_cache,
std::unique_ptr<DeviceMgr> device_mgr,
std::unique_ptr<GraphMgr> graph_mgr,
std::unique_ptr<DynamicDeviceMgr> remote_device_mgr);
static std::shared_ptr<WorkerSession> CreateWithBorrowedDeviceMgr(
const string& session_name, const string& worker_name,
std::unique_ptr<WorkerCacheInterface> worker_cache,
DeviceMgr* borrowed_device_mgr, std::unique_ptr<GraphMgr> graph_mgr,
std::unique_ptr<DynamicDeviceMgr> remote_device_mgr);
// Update an existing worker session with new set of remote workers and
// devices. Added devices will be owned by the worker session, and removed
// devices will be freed by their names.
Status UpdateWorkerCacheAndDevices(
std::unique_ptr<WorkerCacheInterface> new_worker_cache,
std::vector<std::unique_ptr<Device>> added_remote_devices,
const std::vector<Device*>& removed_remote_devices);
~WorkerSession();
private:
WorkerSession(const string& session_name, const string& worker_name,
std::unique_ptr<WorkerCacheInterface> worker_cache,
DeviceMgr* borrowed_device_mgr,
std::unique_ptr<GraphMgr> graph_mgr,
std::unique_ptr<DynamicDeviceMgr> remote_device_mgr);
// The name of the session.
const string session_name_;
// The name of the worker. E.g., /job:mnist/replica:0/task:1.
const string worker_name_;
// Object from which WorkerInterface instances can be obtained.
std::unique_ptr<WorkerCacheInterface> worker_cache_;
// graph_mgr keeps track of the registered graphs of this session.
//
// Note: graph_mgr must be deleted before rendezvous_mgr!
// Note: graph_mgr must be deleted before device_mgr!
const std::unique_ptr<GraphMgr> graph_mgr_;
std::unique_ptr<ClusterFunctionLibraryRuntime> cluster_flr_;
const std::unique_ptr<DeviceMgr> device_mgr_;
DeviceMgr* const borrowed_device_mgr_; // Not owned.
std::unique_ptr<DynamicDeviceMgr> remote_device_mgr_;
};
} // namespace tensorflow
#endif // TENSORFLOW_CORE_DISTRIBUTED_RUNTIME_WORKER_SESSION_H_
|
EsupPortail/esup-ecandidat
|
src/main/java/fr/univlorraine/ecandidat/views/windows/CtrCandPostItReadWindow.java
|
<gh_stars>10-100
/**
* ESUP-Portail eCandidat - Copyright (c) 2016 ESUP-Portail consortium
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.univlorraine.ecandidat.views.windows;
import java.util.List;
import javax.annotation.Resource;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.context.ApplicationContext;
import com.vaadin.data.util.BeanItemContainer;
import com.vaadin.server.FontAwesome;
import com.vaadin.ui.Alignment;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.Table;
import com.vaadin.ui.Table.ColumnGenerator;
import com.vaadin.ui.UI;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.Window;
import fr.univlorraine.ecandidat.controllers.CandidatController;
import fr.univlorraine.ecandidat.controllers.CandidatureCtrCandController;
import fr.univlorraine.ecandidat.controllers.DroitProfilController;
import fr.univlorraine.ecandidat.controllers.IndividuController;
import fr.univlorraine.ecandidat.controllers.UserController;
import fr.univlorraine.ecandidat.entities.ecandidat.Candidature;
import fr.univlorraine.ecandidat.entities.ecandidat.DroitFonctionnalite;
import fr.univlorraine.ecandidat.entities.ecandidat.PostIt;
import fr.univlorraine.ecandidat.entities.ecandidat.PostIt_;
import fr.univlorraine.ecandidat.utils.NomenclatureUtils;
import fr.univlorraine.ecandidat.vaadin.components.OneClickButton;
import fr.univlorraine.ecandidat.vaadin.components.TableFormating;
import fr.univlorraine.ecandidat.views.windows.CtrCandActionCandidatureWindow.ChangeCandidatureWindowListener;
/**
* Fenêtre de visu des PostIt d'une candidature
* @author <NAME>
*/
@Configurable(preConstruction = true)
@SuppressWarnings("serial")
public class CtrCandPostItReadWindow extends Window {
/** serialVersionUID **/
private static final long serialVersionUID = -7776558654950981770L;
@Resource
private transient ApplicationContext applicationContext;
@Resource
private transient CandidatController candidatController;
@Resource
private transient CandidatureCtrCandController candidatureCtrCandController;
@Resource
private transient DroitProfilController droitProfilController;
@Resource
private transient UserController userController;
@Resource
private transient IndividuController individuController;
public static final String[] FIELDS_ORDER = {
PostIt_.datCrePostIt.getName(),
PostIt_.userCrePostIt.getName(),
PostIt_.messagePostIt.getName() };
/* Composants */
private OneClickButton btnClose;
/**
* Crée une fenêtre de visu de l'histo des décisions d'une candidature
* @param candidature
* la candidature à éditer
* @param changeCandidatureWindowListener
*/
public CtrCandPostItReadWindow(final Candidature candidature, final List<DroitFonctionnalite> listeDroit, final ChangeCandidatureWindowListener changeCandidatureWindowListener) {
/* Style */
setModal(true);
setWidth(100, Unit.PERCENTAGE);
setResizable(true);
setClosable(true);
/* Layout */
VerticalLayout layout = new VerticalLayout();
layout.setMargin(true);
layout.setSpacing(true);
setContent(layout);
/* Titre */
setCaption(applicationContext.getMessage("postit.read.window",
new Object[] { candidatController.getLibelleTitle(candidature.getCandidat().getCompteMinima()),
candidature.getFormation().getLibForm() },
UI.getCurrent().getLocale()));
BeanItemContainer<PostIt> container = new BeanItemContainer<>(PostIt.class, candidatureCtrCandController.getPostIt(candidature));
TableFormating postItTable = new TableFormating(null, container);
postItTable.setSizeFull();
postItTable.setVisibleColumns((Object[]) FIELDS_ORDER);
for (String fieldName : FIELDS_ORDER) {
postItTable.setColumnHeader(fieldName, applicationContext.getMessage("postit.table." + fieldName, null, UI.getCurrent().getLocale()));
}
postItTable.addGeneratedColumn(PostIt_.messagePostIt.getName(), new ColumnGenerator() {
@Override
public Object generateCell(final Table source, final Object itemId, final Object columnId) {
final PostIt postIt = (PostIt) itemId;
Label label = new Label(postIt.getMessagePostIt());
label.setDescription(postIt.getMessagePostIt());
return label;
}
});
postItTable.addGeneratedColumn(PostIt_.userCrePostIt.getName(), new ColumnGenerator() {
@Override
public Object generateCell(final Table source, final Object itemId, final Object columnId) {
final PostIt postIt = (PostIt) itemId;
String user = postIt.getUserCrePostIt();
return individuController.getLibIndividu(user);
}
});
postItTable.setSortContainerPropertyId(PostIt_.datCrePostIt.getName());
postItTable.setColumnWidth(PostIt_.datCrePostIt.getName(), 180);
postItTable.setColumnWidth(PostIt_.userCrePostIt.getName(), 180);
postItTable.setSortAscending(false);
postItTable.setColumnCollapsingAllowed(true);
postItTable.setColumnReorderingAllowed(true);
postItTable.setSelectable(true);
postItTable.setImmediate(true);
postItTable.addItemSetChangeListener(e -> postItTable.sanitizeSelection());
layout.addComponent(postItTable);
layout.setExpandRatio(postItTable, 1);
/* Ajoute les boutons */
HorizontalLayout buttonsLayout = new HorizontalLayout();
buttonsLayout.setWidth(100, Unit.PERCENTAGE);
buttonsLayout.setSpacing(true);
layout.addComponent(buttonsLayout);
/* Verification que l'utilisateur a le droit d'ecrire un postit */
if (droitProfilController.hasAccessToFonctionnalite(NomenclatureUtils.FONCTIONNALITE_GEST_POST_IT, listeDroit, false)) {
OneClickButton btnWrite = new OneClickButton(applicationContext.getMessage("postit.add.button", null, UI.getCurrent().getLocale()), FontAwesome.EDIT);
btnWrite.addClickListener(e -> {
CtrCandPostItAddWindow window = new CtrCandPostItAddWindow(new PostIt(userController.getCurrentUserLogin(), candidature));
window.addPostItWindowListener(p -> {
container.addItem(p);
postItTable.sort();
if (changeCandidatureWindowListener != null) {
changeCandidatureWindowListener.addPostIt(p);
}
});
UI.getCurrent().addWindow(window);
});
OneClickButton btnDelete = new OneClickButton(applicationContext.getMessage("postit.delete.button", null, UI.getCurrent().getLocale()), FontAwesome.TRASH);
btnDelete.addClickListener(e -> {
ConfirmWindow confirmWindow =
new ConfirmWindow(applicationContext.getMessage("postit.window.confirmDelete", null, UI.getCurrent().getLocale()), applicationContext.getMessage("postit.window.confirmDeleteTitle", null, UI.getCurrent().getLocale()));
confirmWindow.addBtnOuiListener(f -> {
PostIt postIt = (PostIt) postItTable.getValue();
candidatureCtrCandController.deletePostIt(postIt);
container.removeItem(postIt);
postItTable.sort();
if (changeCandidatureWindowListener != null) {
changeCandidatureWindowListener.removePostIt(postIt);
}
});
UI.getCurrent().addWindow(confirmWindow);
});
btnDelete.setEnabled(false);
postItTable.addValueChangeListener(e -> {
PostIt postIt = (PostIt) postItTable.getValue();
if (postIt != null && postIt.getUserCrePostIt() != null && (postIt.getUserCrePostIt().equals(userController.getCurrentUserLogin()) || userController.isAdmin())) {
btnDelete.setEnabled(true);
} else {
btnDelete.setEnabled(false);
}
});
buttonsLayout.addComponent(btnWrite);
buttonsLayout.setComponentAlignment(btnWrite, Alignment.MIDDLE_CENTER);
buttonsLayout.addComponent(btnDelete);
buttonsLayout.setComponentAlignment(btnDelete, Alignment.MIDDLE_CENTER);
}
btnClose = new OneClickButton(applicationContext.getMessage("btnClose", null, UI.getCurrent().getLocale()), FontAwesome.TIMES);
btnClose.addClickListener(e -> close());
buttonsLayout.addComponent(btnClose);
buttonsLayout.setComponentAlignment(btnClose, Alignment.MIDDLE_CENTER);
/* Centre la fenêtre */
center();
}
}
|
carbon-drive/pyleecan
|
pyleecan/Methods/Slot/Hole/plot.py
|
# -*- coding: utf-8 -*-
from matplotlib.patches import Patch
from matplotlib.pyplot import axis, legend
from numpy import exp
from ....Functions.init_fig import init_fig
from ....definitions import config_dict
from ....Methods import ParentMissingError
MAGNET_COLOR = config_dict["PLOT"]["COLOR_DICT"]["MAGNET_COLOR"]
def plot(
self,
fig=None,
ax=None,
display_magnet=True,
is_add_arrow=False,
is_add_ref=False,
is_show_fig=True,
):
"""Plot the Hole in a matplotlib fig
Parameters
----------
self : Hole
A Hole object
fig :
if None, open a new fig and plot, else add to the current
one (Default value = None)
display_magnet : bool
if True, plot the magnet inside the hole, if there is any (Default value = True)
is_add_arrow : bool
To add an arrow for the magnetization
is_add_ref : bool
True to add the reference points of the surfaces
Returns
-------
None
"""
display = fig is None
if display:
color = "k"
else:
color = "w"
surf_hole = self.build_geometry()
patches = list()
for surf in surf_hole:
if "Magnet" in surf.label and display_magnet:
patches.extend(surf.get_patches(color=MAGNET_COLOR))
else:
patches.extend(surf.get_patches(color=color))
# Display the result
(fig, axes, patch_leg, label_leg) = init_fig(fig, ax)
axes.set_xlabel("(m)")
axes.set_ylabel("(m)")
axes.set_title("Hole")
# Add all the hole (and magnet) to fig
for patch in patches:
axes.add_patch(patch)
# Magnetization
if is_add_arrow:
H = self.comp_height()
mag_dict = self.comp_magnetization_dict()
for magnet_name, mag_dir in mag_dict.items():
# Get the correct surface
mag_surf = None
mag_id = magnet_name.split("_")[-1]
for surf in surf_hole:
if "Magnet" in surf.label and "_T" + mag_id in surf.label:
mag_surf = surf
break
# Create arrow coordinates
Z1 = mag_surf.point_ref
Z2 = mag_surf.point_ref + H / 5 * exp(1j * mag_dir)
axes.annotate(
text="",
xy=(Z2.real, Z2.imag),
xytext=(Z1.real, Z1.imag),
arrowprops=dict(arrowstyle="->", linewidth=1, color="b"),
)
# Add reference point
if is_add_ref:
for surf in self.surf_list:
axes.plot(surf.point_ref.real, surf.point_ref.imag, "rx")
# Axis Setup
axes.axis("equal")
try:
Lim = self.get_Rbo() * 1.2
axes.set_xlim(-Lim, Lim)
axes.set_ylim(-Lim, Lim)
except ParentMissingError:
pass
if display_magnet and "Magnet" in [surf.label for surf in surf_hole]:
patch_leg.append(Patch(color=MAGNET_COLOR))
label_leg.append("Magnet")
legend(patch_leg, label_leg)
if is_show_fig:
fig.show()
|
vikram0207/django-rest
|
venv/lib/python3.7/site-packages/allauth/socialaccount/providers/twitch/provider.py
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class TwitchAccount(ProviderAccount):
def get_profile_url(self):
return 'http://twitch.tv/' + self.account.extra_data.get('name')
def get_avatar_url(self):
return self.account.extra_data.get('logo')
def to_str(self):
dflt = super(TwitchAccount, self).to_str()
return self.account.extra_data.get('name', dflt)
class TwitchProvider(OAuth2Provider):
id = 'twitch'
name = 'Twitch'
account_class = TwitchAccount
def extract_uid(self, data):
return str(data['_id'])
def extract_common_fields(self, data):
return {
"username": data.get("name"),
"name": data.get("display_name"),
"email": data.get("email"),
}
def get_default_scope(self):
return ["user_read"]
provider_classes = [TwitchProvider]
|
ultimateabhi719/ChIA-PIPE
|
util/cpu-dir/cpu-dir/example.c
|
<filename>util/cpu-dir/cpu-dir/example.c<gh_stars>10-100
#include <stdio.h>
#include <zlib.h>
#include <string.h>
#include <errno.h>
#include <assert.h>
#include "bwamem.h"
#include "kseq.h" // for the FASTA/Q parser
KSEQ_DECLARE(gzFile)
#ifdef USE_MALLOC_WRAPPERS
# include "malloc_wrap.h"
#endif
int main(int argc, char *argv[])
{
bwaidx_t *idx;
gzFile fp;
kseq_t *ks;
mem_opt_t *opt;
if (argc < 3) {
fprintf(stderr, "Usage: bwamem-lite <idx.base> <reads.fq>\n");
return 1;
}
idx = bwa_idx_load(argv[1], BWA_IDX_ALL); // load the BWA index
if (NULL == idx) {
fprintf(stderr, "Index load failed.\n");
exit(EXIT_FAILURE);
}
fp = strcmp(argv[2], "-")? gzopen(argv[2], "r") : gzdopen(fileno(stdin), "r");
if (NULL == fp) {
fprintf(stderr, "Couldn't open %s : %s\n",
strcmp(argv[2], "-") ? argv[2] : "stdin",
errno ? strerror(errno) : "Out of memory");
exit(EXIT_FAILURE);
}
ks = kseq_init(fp); // initialize the FASTA/Q parser
opt = mem_opt_init(); // initialize the BWA-MEM parameters to the default values
while (kseq_read(ks) >= 0) { // read one sequence
mem_alnreg_v ar;
int i, k;
ar = mem_align1(opt, idx->bwt, idx->bns, idx->pac, ks->seq.l, ks->seq.s); // get all the hits
for (i = 0; i < ar.n; ++i) { // traverse each hit
mem_aln_t a;
if (ar.a[i].secondary >= 0) continue; // skip secondary alignments
a = mem_reg2aln(opt, idx->bns, idx->pac, ks->seq.l, ks->seq.s, &ar.a[i]); // get forward-strand position and CIGAR
// print alignment
err_printf("%s\t%c\t%s\t%ld\t%d\t", ks->name.s, "+-"[a.is_rev], idx->bns->anns[a.rid].name, (long)a.pos, a.mapq);
for (k = 0; k < a.n_cigar; ++k) // print CIGAR
err_printf("%d%c", a.cigar[k]>>4, "MIDSH"[a.cigar[k]&0xf]);
err_printf("\t%d\n", a.NM); // print edit distance
free(a.cigar); // don't forget to deallocate CIGAR
}
free(ar.a); // and deallocate the hit list
}
free(opt);
kseq_destroy(ks);
err_gzclose(fp);
bwa_idx_destroy(idx);
return 0;
}
|
kokizzu/moss
|
segment_index.go
|
// Copyright 2017-Present Couchbase, Inc.
//
// Use of this software is governed by the Business Source License included
// in the file licenses/BSL-Couchbase.txt. As of the Change Date specified
// in that file, in accordance with the Business Source License, use of this
// software will be governed by the Apache License, Version 2.0, included in
// the file licenses/APL2.txt.
package moss
import (
"bytes"
)
type segmentKeysIndex struct {
// Number of keys that can be indexed.
numIndexableKeys int
// Keys that have been added so far.
numKeys int
// Size in bytes of all the indexed keys.
numKeyBytes int
// In-memory byte array of keys.
data []byte
// Start offsets of keys in the data array.
offsets []uint32
// Number of skips over keys in the segment kvs to arrive at the
// next adjacent key in the data array.
hop int
// Total number of keys in the source segment.
srcKeyCount int
}
// newSegmentKeysIndex preallocates the data/offsets arrays
// based on a calculated hop.
func newSegmentKeysIndex(quota int, srcKeyCount int,
keyAvgSize int) *segmentKeysIndex {
numIndexableKeys := quota / (keyAvgSize + 4 /* 4 for the offset */)
if numIndexableKeys == 0 {
return nil
}
hop := (srcKeyCount / numIndexableKeys) + 1
data := make([]byte, numIndexableKeys*keyAvgSize)
offsets := make([]uint32, numIndexableKeys)
return &segmentKeysIndex{
numIndexableKeys: numIndexableKeys,
numKeys: 0,
numKeyBytes: 0,
data: data,
offsets: offsets,
hop: hop,
srcKeyCount: srcKeyCount,
}
}
// Adds a qualified entry to the index. Returns true if space
// still available, false otherwise.
func (s *segmentKeysIndex) add(keyIdx int, key []byte) bool {
if s.numKeys >= s.numIndexableKeys {
// All keys that can be indexed already have been,
// return false indicating that there's no room for
// anymore.
return false
}
if len(key) > (len(s.data) - s.numKeyBytes) {
// No room for any more keys.
return false
}
if keyIdx%(s.hop) != 0 {
// Key does not satisfy the hop condition.
return true
}
s.offsets[s.numKeys] = uint32(s.numKeyBytes)
copy(s.data[s.numKeyBytes:], key)
s.numKeys++
s.numKeyBytes += len(key)
return true
}
// Fetches the range of offsets between which the key exists,
// if present at all. The returned leftPos and rightPos can
// directly be used as the left and right extreme cursors
// while binary searching over the source segment.
func (s *segmentKeysIndex) lookup(key []byte) (leftPos int, rightPos int) {
i, j := 0, s.numKeys
if i == j || s.numKeys < 2 {
// The index either wasn't used or isn't of any use.
rightPos = s.srcKeyCount
return
}
// If key smaller than the first key, return early.
keyStart := s.offsets[0]
keyEnd := s.offsets[1]
cmp := bytes.Compare(key, s.data[keyStart:keyEnd])
if cmp < 0 {
return
}
indexOfLastKey := s.numKeys - 1
// If key larger than last key, return early.
keyStart = s.offsets[indexOfLastKey]
keyEnd = uint32(s.numKeyBytes)
cmp = bytes.Compare(s.data[keyStart:keyEnd], key)
if cmp < 0 {
leftPos = (indexOfLastKey) * s.hop
rightPos = s.srcKeyCount
return
}
for i < j {
h := i + (j-i)/2
keyStart = s.offsets[h]
if h < indexOfLastKey {
keyEnd = s.offsets[h+1]
} else {
keyEnd = uint32(s.numKeyBytes)
}
cmp = bytes.Compare(s.data[keyStart:keyEnd], key)
if cmp == 0 {
leftPos = h * s.hop
rightPos = leftPos + 1
return // Direct hit.
} else if cmp < 0 {
if i == h {
break
}
i = h
} else {
j = h
}
}
leftPos = i * s.hop
rightPos = j * s.hop
return
}
|
Snivyer1910/TinkersConstruct
|
src/main/java/slimeknights/tconstruct/library/tools/nbt/IModDataReadOnly.java
|
<filename>src/main/java/slimeknights/tconstruct/library/tools/nbt/IModDataReadOnly.java
package slimeknights.tconstruct.library.tools.nbt;
import java.util.function.BiFunction;
import net.minecraft.nbt.NbtCompound;
import net.minecraft.nbt.NbtElement;
import net.minecraft.util.Identifier;
/**
* Read only view of {@link ModDataNBT}
*/
public interface IModDataReadOnly {
/** Empty variant of mod data */
IModDataReadOnly EMPTY = new IModDataReadOnly() {
@Override
public int getUpgrades() {
return 0;
}
@Override
public int getAbilities() {
return 0;
}
@Override
public <T> T get(Identifier name, BiFunction<NbtCompound,String,T> function) {
return function.apply(new NbtCompound(), name.toString());
}
@Override
public boolean contains(Identifier name, int type) {
return false;
}
};
/** Gets the number of modifiers provided by this data */
int getUpgrades();
/** Gets the number of ability slots provided by this data */
int getAbilities();
/**
* Gets a namespaced key from NBT
* @param name Namedspaced key
* @param function Function to get data using the key
* @param <T> NBT type of output
* @return Data based on the function
*/
<T> T get(Identifier name, BiFunction<NbtCompound,String,T> function);
/**
* Checks if the data contains the given tag
* @param name Namespaced key
* @param type Tag type, see {@link net.minecraftforge.common.util.Constants.NBT} for values
* @return True if the tag is contained
*/
boolean contains(Identifier name, int type);
/* Helpers */
/**
* Reads an generic NBT value from the mod data
* @param name Name
* @return Integer value
*/
default NbtElement get(Identifier name) {
return get(name, NbtCompound::get);
}
/**
* Reads an integer from the mod data
* @param name Name
* @return Integer value
*/
default int getInt(Identifier name) {
return get(name, NbtCompound::getInt);
}
/**
* Reads an boolean from the mod data
* @param name Name
* @return Boolean value
*/
default boolean getBoolean(Identifier name) {
return get(name, NbtCompound::getBoolean);
}
/**
* Reads an float from the mod data
* @param name Name
* @return Float value
*/
default float getFloat(Identifier name) {
return get(name, NbtCompound::getFloat);
}
/**
* Reads a string from the mod data
* @param name Name
* @return String value
*/
default String getString(Identifier name) {
return get(name, NbtCompound::getString);
}
/**
* Reads a compound from the mod data
* @param name Name
* @return Compound value
*/
default NbtCompound getCompound(Identifier name) {
return get(name, NbtCompound::getCompound);
}
}
|
cmarqu/pyvsc
|
src/vsc/model/expr_bin_model.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from vsc.model.bin_expr_type import BinExprType
'''
Created on Jul 26, 2019
@author: ballance
'''
from vsc.model.expr_model import ExprModel
class ExprBinModel(ExprModel):
def __init__(self, lhs, op, rhs):
super().__init__()
self.lhs = lhs
self.op = op
self.rhs = rhs
self.width = 0
self.signed = 0
def build(self, btor):
ret = None
lhs_n = self.lhs.build(btor)
if lhs_n is None:
raise Exception("Expression " + str(self.lhs) + " build returned None")
rhs_n = self.rhs.build(btor)
if rhs_n is None:
raise Exception("Expression " + str(self.rhs) + " build returned None")
lhs_n = ExprBinModel.extend(lhs_n, rhs_n, self.lhs.is_signed(), btor)
rhs_n = ExprBinModel.extend(rhs_n, lhs_n, self.rhs.is_signed(), btor)
if self.op == BinExprType.Eq:
ret = btor.Eq(lhs_n, rhs_n)
elif self.op == BinExprType.Ne:
ret = btor.Ne(lhs_n, rhs_n)
elif self.op == BinExprType.Gt:
if not self.lhs.is_signed() or not self.rhs.is_signed():
ret = btor.Ugt(lhs_n, rhs_n)
else:
ret = btor.Sgt(lhs_n, rhs_n)
elif self.op == BinExprType.Ge:
if not self.lhs.is_signed() or not self.rhs.is_signed():
ret = btor.Ugte(lhs_n, rhs_n)
else:
ret = btor.Sgte(lhs_n, rhs_n)
elif self.op == BinExprType.Lt:
if not self.lhs.is_signed() or not self.rhs.is_signed():
ret = btor.Ult(lhs_n, rhs_n)
else:
ret = btor.Slt(lhs_n, rhs_n)
elif self.op == BinExprType.Le:
if not self.lhs.is_signed() or not self.rhs.is_signed():
ret = btor.Ulte(lhs_n, rhs_n)
else:
ret = btor.Slte(lhs_n, rhs_n)
elif self.op == BinExprType.Add:
ret = btor.Add(lhs_n, rhs_n)
elif self.op == BinExprType.Sub:
ret = btor.Sub(lhs_n, rhs_n)
elif self.op == BinExprType.Div:
ret = btor.Udiv(lhs_n, rhs_n)
elif self.op == BinExprType.Mul:
ret = btor.Mul(lhs_n, rhs_n)
elif self.op == BinExprType.Mod:
ret = btor.Urem(lhs_n, rhs_n)
elif self.op == BinExprType.And:
ret = btor.And(lhs_n, rhs_n)
elif self.op == BinExprType.Or:
ret = btor.Or(lhs_n, rhs_n)
elif self.op == BinExprType.Sll:
ret = btor.Sll(lhs_n, rhs_n)
elif self.op == BinExprType.Srl:
ret = btor.Srl(lhs_n, rhs_n)
elif self.op == BinExprType.Xor:
ret = btor.Xor(lhs_n, rhs_n)
elif self.op == BinExprType.Not:
ret = btor.Not(lhs_n, rhs_n)
else:
raise Exception("Unsupported binary expression type \"" + str(self.op) + "\"")
return ret
@staticmethod
def extend(e1, e2, signed, btor):
ret = e1
if e2.width > e1.width:
if signed:
ret = btor.Sext(e1, e2.width-e1.width)
else:
ret = btor.Uext(e1, e2.width-e1.width)
return ret
def is_signed(self):
return self.signed
def __str__(self):
return "ExprBin: " + str(self.lhs) + " " + str(self.op) + " " + str(self.rhs)
def accept(self, visitor):
visitor.visit_expr_bin(self)
|
guandjoy/redject
|
src/react-app/src/application/NavigationBar.js
|
<filename>src/react-app/src/application/NavigationBar.js
import React from "react";
import { css } from "linaria";
import Logo from "./Logo";
import LogoutButton from "./LogoutButton";
export const wrapper = css`
display: flex;
justify-content: space-between;
align-items: center;
height: 98px;
`;
const NavigationBar = () => (
<div className={wrapper}>
<Logo />
<LogoutButton />
</div>
);
export default NavigationBar;
|
luckybroman5/http-log-reconstructor
|
k6/lib/netext/dialer.go
|
/*
*
* k6 - a next-generation load testing tool
* Copyright (C) 2016 Load Impact
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package netext
import (
"context"
"fmt"
"net"
"strings"
"sync/atomic"
"time"
"github.com/luckybroman5/http-log-reconstructor/k6/lib"
"github.com/luckybroman5/http-log-reconstructor/k6/lib/metrics"
"github.com/luckybroman5/http-log-reconstructor/k6/stats"
"github.com/viki-org/dnscache"
)
// Dialer wraps net.Dialer and provides k6 specific functionality -
// tracing, blacklists and DNS cache and aliases.
type Dialer struct {
net.Dialer
Resolver *dnscache.Resolver
Blacklist []*lib.IPNet
Hosts map[string]net.IP
BytesRead int64
BytesWritten int64
}
// NewDialer constructs a new Dialer and initializes its cache.
func NewDialer(dialer net.Dialer) *Dialer {
return &Dialer{
Dialer: dialer,
Resolver: dnscache.New(0),
}
}
// BlackListedIPError is an error that is returned when a given IP is blacklisted
type BlackListedIPError struct {
ip net.IP
net *lib.IPNet
}
func (b BlackListedIPError) Error() string {
return fmt.Sprintf("IP (%s) is in a blacklisted range (%s)", b.ip, b.net)
}
// DialContext wraps the net.Dialer.DialContext and handles the k6 specifics
func (d *Dialer) DialContext(ctx context.Context, proto, addr string) (net.Conn, error) {
delimiter := strings.LastIndex(addr, ":")
host := addr[:delimiter]
// lookup for domain defined in Hosts option before trying to resolve DNS.
ip, ok := d.Hosts[host]
if !ok {
var err error
ip, err = d.Resolver.FetchOne(host)
if err != nil {
return nil, err
}
}
for _, ipnet := range d.Blacklist {
if (*net.IPNet)(ipnet).Contains(ip) {
return nil, BlackListedIPError{ip: ip, net: ipnet}
}
}
ipStr := ip.String()
if strings.ContainsRune(ipStr, ':') {
ipStr = "[" + ipStr + "]"
}
conn, err := d.Dialer.DialContext(ctx, proto, ipStr+":"+addr[delimiter+1:])
if err != nil {
return nil, err
}
conn = &Conn{conn, &d.BytesRead, &d.BytesWritten}
return conn, err
}
// GetTrail creates a new NetTrail instance with the Dialer
// sent and received data metrics and the supplied times and tags.
// TODO: Refactor this according to
// https://github.com/luckybroman5/http-log-reconstructor/k6/pull/1203#discussion_r337938370
func (d *Dialer) GetTrail(
startTime, endTime time.Time, fullIteration bool, emitIterations bool, tags *stats.SampleTags,
) *NetTrail {
bytesWritten := atomic.SwapInt64(&d.BytesWritten, 0)
bytesRead := atomic.SwapInt64(&d.BytesRead, 0)
samples := []stats.Sample{
{
Time: endTime,
Metric: metrics.DataSent,
Value: float64(bytesWritten),
Tags: tags,
},
{
Time: endTime,
Metric: metrics.DataReceived,
Value: float64(bytesRead),
Tags: tags,
},
}
if fullIteration {
samples = append(samples, stats.Sample{
Time: endTime,
Metric: metrics.IterationDuration,
Value: stats.D(endTime.Sub(startTime)),
Tags: tags,
})
if emitIterations {
samples = append(samples, stats.Sample{
Time: endTime,
Metric: metrics.Iterations,
Value: 1,
Tags: tags,
})
}
}
return &NetTrail{
BytesRead: bytesRead,
BytesWritten: bytesWritten,
FullIteration: fullIteration,
StartTime: startTime,
EndTime: endTime,
Tags: tags,
Samples: samples,
}
}
// NetTrail contains information about the exchanged data size and length of a
// series of connections from a particular netext.Dialer
type NetTrail struct {
BytesRead int64
BytesWritten int64
FullIteration bool
StartTime time.Time
EndTime time.Time
Tags *stats.SampleTags
Samples []stats.Sample
}
// Ensure that interfaces are implemented correctly
var _ stats.ConnectedSampleContainer = &NetTrail{}
// GetSamples implements the stats.SampleContainer interface.
func (ntr *NetTrail) GetSamples() []stats.Sample {
return ntr.Samples
}
// GetTags implements the stats.ConnectedSampleContainer interface.
func (ntr *NetTrail) GetTags() *stats.SampleTags {
return ntr.Tags
}
// GetTime implements the stats.ConnectedSampleContainer interface.
func (ntr *NetTrail) GetTime() time.Time {
return ntr.EndTime
}
// Conn wraps net.Conn and keeps track of sent and received data size
type Conn struct {
net.Conn
BytesRead, BytesWritten *int64
}
func (c *Conn) Read(b []byte) (int, error) {
n, err := c.Conn.Read(b)
if n > 0 {
atomic.AddInt64(c.BytesRead, int64(n))
}
return n, err
}
func (c *Conn) Write(b []byte) (int, error) {
n, err := c.Conn.Write(b)
if n > 0 {
atomic.AddInt64(c.BytesWritten, int64(n))
}
return n, err
}
|
Jorch72/plethora
|
src/main/java/org/squiddev/plethora/gameplay/neural/NeuralComputer.java
|
<reponame>Jorch72/plethora<filename>src/main/java/org/squiddev/plethora/gameplay/neural/NeuralComputer.java
package org.squiddev.plethora.gameplay.neural;
import com.google.common.collect.Maps;
import dan200.computercraft.shared.computer.core.ComputerFamily;
import dan200.computercraft.shared.computer.core.ServerComputer;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.math.Vec3d;
import net.minecraft.world.World;
import net.minecraftforge.items.CapabilityItemHandler;
import net.minecraftforge.items.IItemHandler;
import org.squiddev.plethora.api.Constants;
import org.squiddev.plethora.api.IPeripheralHandler;
import org.squiddev.plethora.core.executor.ContextDelayedExecutor;
import org.squiddev.plethora.core.executor.IExecutorFactory;
import org.squiddev.plethora.utils.Helpers;
import javax.annotation.Nonnull;
import java.lang.ref.WeakReference;
import java.util.Map;
import static org.squiddev.plethora.gameplay.neural.ItemComputerHandler.*;
import static org.squiddev.plethora.gameplay.neural.NeuralHelpers.*;
public class NeuralComputer extends ServerComputer {
private WeakReference<EntityLivingBase> entity;
private final ItemStack[] stacks = new ItemStack[INV_SIZE];
private int stackHash;
private final Map<ResourceLocation, NBTTagCompound> moduleData = Maps.newHashMap();
private boolean moduleDataDirty = false;
private final ContextDelayedExecutor executor = new ContextDelayedExecutor();
public NeuralComputer(World world, int computerID, String label, int instanceID) {
super(world, computerID, label, instanceID, ComputerFamily.Advanced, WIDTH, HEIGHT);
}
public IExecutorFactory getExecutor() {
return executor;
}
public void readModuleData(NBTTagCompound tag) {
for (String key : tag.getKeySet()) {
moduleData.put(new ResourceLocation(key), tag.getCompoundTag(key));
}
}
public NBTTagCompound getModuleData(ResourceLocation location) {
NBTTagCompound tag = moduleData.get(location);
if (tag == null) moduleData.put(location, tag = new NBTTagCompound());
return tag;
}
public void markModuleDataDirty() {
moduleDataDirty = true;
}
public int getStackHash() {
return stackHash;
}
/**
* Update an sync peripherals
*
* @param owner The owner of the current peripherals
*/
public boolean update(@Nonnull EntityLivingBase owner, @Nonnull ItemStack stack, int dirtyStatus) {
IItemHandler handler = stack.getCapability(CapabilityItemHandler.ITEM_HANDLER_CAPABILITY, null);
EntityLivingBase existing = entity == null ? null : entity.get();
if (existing != owner) {
dirtyStatus = -1;
if (!owner.isEntityAlive()) {
entity = null;
} else {
entity = new WeakReference<EntityLivingBase>(owner);
}
}
setWorld(owner.getEntityWorld());
setPosition(owner.getPosition());
// Sync changed slots
if (dirtyStatus != 0) {
for (int slot = 0; slot < INV_SIZE; slot++) {
if ((dirtyStatus & (1 << slot)) == 1 << slot) {
stacks[slot] = handler.getStackInSlot(slot);
}
}
stackHash = Helpers.hashStacks(stacks);
}
// Update peripherals
for (int slot = 0; slot < PERIPHERAL_SIZE; slot++) {
ItemStack peripheral = stacks[slot];
if (peripheral == null) continue;
IPeripheralHandler peripheralHandler = peripheral.getCapability(Constants.PERIPHERAL_HANDLER_CAPABILITY, null);
if (peripheralHandler != null) {
peripheralHandler.update(
owner.worldObj,
new Vec3d(owner.posX, owner.posY + owner.getEyeHeight(), owner.posZ),
owner
);
}
}
// Sync modules and peripherals
if (dirtyStatus != 0) {
for (int slot = 0; slot < PERIPHERAL_SIZE; slot++) {
if ((dirtyStatus & (1 << slot)) == 1 << slot) {
// We skip the "back" slot
setPeripheral(slot < BACK ? slot : slot + 1, buildPeripheral(stacks[slot]));
}
}
// If the modules have changed.
if (dirtyStatus >> PERIPHERAL_SIZE != 0) {
setPeripheral(BACK, NeuralHelpers.buildModules(this, stacks, owner));
}
}
executor.update();
if (moduleDataDirty) {
moduleDataDirty = false;
NBTTagCompound tag = new NBTTagCompound();
for (Map.Entry<ResourceLocation, NBTTagCompound> entry : moduleData.entrySet()) {
tag.setTag(entry.getKey().toString(), entry.getValue());
}
stack.getTagCompound().setTag(MODULE_DATA, tag);
return true;
}
return false;
}
}
|
xuzhikethinker/t4f-data
|
structure/core/src/main/java/aos/data/structure5/Vertex.java
|
<reponame>xuzhikethinker/t4f-data<gh_stars>1-10
/****************************************************************
* Licensed to the AOS Community (AOS) under one or more *
* contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The AOS licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
// Vertex base class, to be extended as needed.
// (c) 1998, 2001 <NAME>
package aos.data.structure5;
/**
* A private implementation of a vertex for use in graphs. A vertex
* is capable of holding a label and has a flag that can be set
* to mark it as visited.
* <P>
* Typical Usage:
* <P>
* <pre>
* Vertex v = new {@link #Vertex(Object) Vertex(someLabel)};
* //...several graph related operations occur
* if(!v.{@link #isVisited()}){
* Object label = v.label();
* v.{@link #visit()};
* }
* </pre>
* @see GraphListVertex
* @see GraphMatrixVertex
*
* @version $Id: Vertex.java 22 2006-08-21 19:27:26Z bailey $
* @author, 2001 <NAME>
*/
class Vertex<E>
{
/**
* A label associated with vertex.
*/
protected E label; // the user's label
/**
* Whether or not a vertex has been visited.
*/
protected boolean visited; // this vertex visited
/**
* Construct a vertex with an associated label.
*
* @post constructs unvisited vertex with label
*
* @param label A label to be associated with vertex.
*/
public Vertex(E label)
{
Assert.pre(label != null, "Vertex key is non-null");
this.label = label;
visited = false;
}
/**
* Fetch the label associated with vertex.
*
* @post returns user label associated w/vertex
*
* @return The label associated with vertex.
*/
public E label()
{
return label;
}
/**
* Test and set the visited flag.
*
* @post returns, then marks vertex as being visited
*
* @return The value of the flag before marking
*/
public boolean visit()
{
boolean was = visited;
visited = true;
return was;
}
/**
* Determine if the vertex has been visited.
*
* @post returns true iff vertex has been visited
*
* @return True iff the vertex has been visited.
*/
public boolean isVisited()
{
return visited;
}
/**
* Clears the visited flag.
*
* @post marks vertex unvisited
*/
public void reset()
{
visited = false;
}
/**
* Returns true iff the labels of two vertices are equal.
*
* @post returns true iff vertex labels are equal
*
* @param o Another vertex.
* @return True iff the vertex labels are equal.
*/
public boolean equals(Object o)
{
Vertex<?> v = (Vertex<?>)o;
return label.equals(v.label());
}
/**
* Return a hashcode associated with the vertex.
*
* @post returns hash code for vertex
*
* @return An integer for use in hashing values into tables.
*/
public int hashCode()
{
return label.hashCode();
}
/**
* Construct a string representing vertex.
*
* @post returns string representation of vertex
*
* @return A string representing vertex.
*/
public String toString()
{
return "<Vertex: "+label+">";
}
}
|
AshShawn/baseio
|
baseio-core/src/main/java/com/generallycloud/baseio/component/SocketChannelContext.java
|
/*
* Copyright 2015-2017 GenerallyCloud.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.generallycloud.baseio.component;
import java.nio.charset.Charset;
import com.generallycloud.baseio.LifeCycle;
import com.generallycloud.baseio.buffer.ByteBufAllocatorManager;
import com.generallycloud.baseio.collection.Attributes;
import com.generallycloud.baseio.component.ssl.SslContext;
import com.generallycloud.baseio.concurrent.ExecutorEventLoopGroup;
import com.generallycloud.baseio.configuration.ServerConfiguration;
import com.generallycloud.baseio.protocol.ProtocolDecoder;
import com.generallycloud.baseio.protocol.ProtocolEncoder;
import com.generallycloud.baseio.protocol.ProtocolFactory;
public interface SocketChannelContext extends Attributes, LifeCycle {
void addSessionEventListener(SocketSessionEventListener listener);
void addSessionIdleEventListener(SocketSessionIdleEventListener listener);
BeatFutureFactory getBeatFutureFactory();
ByteBufAllocatorManager getByteBufAllocatorManager();
ChannelByteBufReader newChannelByteBufReader();
ChannelService getChannelService();
Charset getEncoding();
ExecutorEventLoopGroup getExecutorEventLoopGroup();
ForeFutureAcceptor getForeReadFutureAcceptor();
IoEventHandleAdaptor getIoEventHandleAdaptor();
ProtocolDecoder getProtocolDecoder();
ProtocolEncoder getProtocolEncoder();
ProtocolFactory getProtocolFactory();
ServerConfiguration getServerConfiguration();
SocketSessionEventListenerWrapper getSessionEventListenerLink();
SocketSessionFactory getSessionFactory();
SocketSessionIdleEventListenerWrapper getSessionIdleEventListenerLink();
long getSessionIdleTime();
SocketSessionManager getSessionManager();
SimulateSocketChannel getSimulateSocketChannel();
SslContext getSslContext();
long getStartupTime();
boolean isEnableSSL();
void setBeatFutureFactory(BeatFutureFactory beatFutureFactory);
void setByteBufAllocatorManager(ByteBufAllocatorManager byteBufAllocatorManager);
void setChannelService(ChannelService service);
void setExecutorEventLoopGroup(ExecutorEventLoopGroup executorEventLoopGroup);
void setIoEventHandleAdaptor(IoEventHandleAdaptor ioEventHandleAdaptor);
void setProtocolFactory(ProtocolFactory protocolFactory);
void setSocketSessionFactory(SocketSessionFactory sessionFactory);
void setSslContext(SslContext sslContext);
}
|
harsha1979/demo-suite
|
src/main/java/org/wso2/carbon/solution/util/ResourceManager.java
|
<gh_stars>0
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.solution.util;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.solution.CarbonSolutionException;
import org.wso2.carbon.solution.endpoint.iam.config.IdentityServer;
import org.wso2.carbon.solution.model.datasource.DataSourceConfigEntity;
import org.wso2.carbon.solution.model.datasource.Datasource;
import org.wso2.carbon.solution.model.server.Server;
import org.wso2.carbon.solution.model.server.ServerConfigEntity;
import org.wso2.carbon.solution.model.solution.SolutionConfig;
import org.wso2.carbon.solution.model.solution.SolutionConfigEntity;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.introspector.BeanAccess;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamReader;
/**
* ResourceManager is load all the resources for known path or given path.
*/
public class ResourceManager {
private static Log log = LogFactory.getLog(ResourceManager.class);
/**
* Load keystores for give server configuration.
*
* @param serverName
* @param instance
* @throws CarbonSolutionException
*/
public static void loadKeyStores(String serverName, String instance) throws CarbonSolutionException {
Server serverConfig = getServerConfig(serverName, instance);
IdentityServer identityServer = new IdentityServer(serverConfig);
Path resourcePath = Paths.get(Constant.ResourcePath.RESOURCE_HOME_PATH, identityServer.getTrustStore());
log.info("Loading key store path in, " + resourcePath.toString());
System.setProperty("javax.net.ssl.trustStore", resourcePath.toString());
System.setProperty("javax.net.ssl.trustStorePassword", identityServer.getTrustStorePassword());
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
}
/**
* Read the config file under given solution folder.
*
* @param solution
* @return SolutionConfig
* @throws CarbonSolutionException
*/
public static SolutionConfig getSolutionConfig(String solution) throws CarbonSolutionException {
Path resourcePath = Paths.get(Constant.ResourcePath.SOLUTION_HOME_PATH,
solution, Constant.SOLUTION_CONFIG_FILE);
SolutionConfigEntity solutionConfigEntity = loadYAMLResource(resourcePath, SolutionConfigEntity.class);
if (solutionConfigEntity.getSolutionConfig() == null) {
throw new CarbonSolutionException("SolutionConfig was not loaded, " + resourcePath.toString());
}
return solutionConfigEntity.getSolutionConfig();
}
/**
* Read a resource under given path and load to the given entity.
*
* @param resourcePath
* @param className
* @param <T>
* @return
* @throws CarbonSolutionException
*/
public static <T> T loadYAMLResource(Path resourcePath, Class<T> className) throws CarbonSolutionException {
if (!resourcePath.toFile().exists()) {
String errorMessage = "Given resource path does not exists, " + resourcePath.toString();
throw new CarbonSolutionException(errorMessage);
}
log.debug("Reading YAML resource in " + resourcePath);
T resource;
try {
Reader in = new InputStreamReader(Files.newInputStream(resourcePath), StandardCharsets.UTF_8);
Yaml yaml = new Yaml();
yaml.setBeanAccess(BeanAccess.PROPERTY);
resource = yaml.loadAs(in, className);
if (resource == null) {
throw new CarbonSolutionException("The resource was not loaded successfully, " + resourcePath);
}
} catch (Exception e) {
String errorMessage = "Error occurred while loading the resource, " +
resourcePath + " cause, " + e.getMessage();
log.error(errorMessage);
throw new CarbonSolutionException(errorMessage, e);
}
return resource;
}
/**
* Load XML String
*
* @param resourcePath
* @param className
* @param <T>
* @return
* @throws CarbonSolutionException
*/
public static <T> T loadXMLToObject(String resourcePath, Class<T> className) throws CarbonSolutionException {
Path path = Paths.get(resourcePath);
return loadXMLToObject(path, className);
}
/**
* Load XML String
*
* @param resourcePath
* @param className
* @param <T>
* @return
* @throws CarbonSolutionException
*/
public static <T> T loadXMLToObject(Path resourcePath, Class<T> className) throws CarbonSolutionException {
T t = null;
try {
InputStream inputStream = Files.newInputStream(resourcePath);
XMLInputFactory inputFactory = XMLInputFactory.newInstance();
XMLStreamReader reader = inputFactory.createXMLStreamReader(inputStream);
t = (T) className.getDeclaredClasses()[0].getDeclaredMethod("parse", XMLStreamReader.class)
.invoke(null, reader);
} catch (Exception e) {
String errorMessage = "Error occurred while loading the resource, " +
resourcePath + " cause, " + e.getMessage();
log.error(errorMessage);
throw new CarbonSolutionException(errorMessage, e);
}
return t;
}
/**
* Read ServerConfig for all the solutions.
*
* @param serverName
* @param instance
* @return
* @throws CarbonSolutionException
*/
public static Server getServerConfig(String serverName, String instance) throws CarbonSolutionException {
Path resourcePathObj = Paths
.get(Constant.ResourcePath.RESOURCE_HOME_PATH, Constant.ResourceFolder.SERVERS_HOME_FOLDER,
Constant
.SERVER_CONFIG_FILE);
ServerConfigEntity serverConfigEntity = loadYAMLResource(resourcePathObj, ServerConfigEntity.class);
List<Server> servers = serverConfigEntity.getServers();
for (Server server : servers) {
if (StringUtils.isNotEmpty(server.getServerName()) && server.getServerName().equals(serverName) && server
.getInstance().equals(instance)) {
return server;
}
}
throw new CarbonSolutionException("No default server found for " + serverName);
}
public static List<Datasource> getDataSourceConfig(String solution) throws CarbonSolutionException {
Path resourcePathObj = Paths
.get(Constant.ResourcePath.RESOURCE_HOME_PATH, Constant.ResourceFolder.SOLUTION_HOME_FOLDER, solution,
Constant
.DATASOURCE_CONFIG_FILE);
DataSourceConfigEntity dataSourceConfigEntity = loadYAMLResource(resourcePathObj, DataSourceConfigEntity.class);
return dataSourceConfigEntity.getDatasources();
}
public static String getFileContent(String resourcePath) throws CarbonSolutionException {
String content = "";
try {
content = IOUtils.toString(new FileInputStream(new File(resourcePath)));
} catch (Exception e) {
throw new CarbonSolutionException("Error occurred while getFileContent.", e);
}
return content;
}
public static <T> T loadYAMLResource(String fileName, String resourcePath, Class<T> className)
throws CarbonSolutionException {
Path resourcePathObj = Paths.get(resourcePath, fileName);
return loadYAMLResource(resourcePathObj, className);
}
public static <T> Map<String, T> loadResources(String resourcesPath, Class<T> className)
throws CarbonSolutionException {
Map<String, T> resourceMap = new HashMap<String, T>();
Path resourcesPathObj = Paths.get(resourcesPath);
String[] list = resourcesPathObj.toFile().list();
if (list != null) {
for (String fileName : list) {
if (fileName.endsWith("." + Constant.YAML_EXT)) {
Path aResourcePath = Paths.get(resourcesPath, fileName);
T resource = loadYAMLResource(aResourcePath, className);
resourceMap.put(fileName, resource);
}
}
}
return resourceMap;
}
public static <T> Map<String, T> loadResources(Path resourcesPathObj, Class<T> className)
throws CarbonSolutionException {
Map<String, T> resourceMap = new HashMap<String, T>();
String[] list = resourcesPathObj.toFile().list();
if (list != null) {
for (String fileName : list) {
if (fileName.endsWith("." + Constant.YAML_EXT)) {
Path aResourcePath = Paths.get(resourcesPathObj.toString(), fileName);
T resource = loadYAMLResource(aResourcePath, className);
resourceMap.put(fileName, resource);
}
}
}
return resourceMap;
}
}
|
Bad-Sam/bad
|
src/bad/detect/os.h
|
<reponame>Bad-Sam/bad
#ifndef BAD_OS_H
#define BAD_OS_H
// OS detection
#if defined(__linux__)
# define BAD_LINUX
#elif defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(_WIN64) || defined(__NT__) || defined(__CYGWIN__)
# define BAD_WINDOWS
#else
# warning Unsupported OS. The library may not work properly
#endif
// OS-specific macro definition/undefinition
#if defined(BAD_WINDOWS)
# define _CRT_SECURE_NO_WARNINGS
# define NOMINMAX
#endif
#endif
|
a-bombarda/mvm-gui
|
gui/calibration/spirometer.py
|
#!/usr/bin/env python3
'''
This module implements the spirometer calibration procedure, guiding the
user through it.
'''
import os
from PyQt5 import QtWidgets, uic
from calibration.regression_tools import data_regression
class SpirometerCalibration(QtWidgets.QWidget):
'''
SpirometerCalibration widget. User guidance through the procedure and
calculations.
'''
def __init__(self, *args):
"""
Constructor. Initializes the SpirometerCalibration widget.
"""
super(SpirometerCalibration, self).__init__(*args)
uifile = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"spirometer.ui")
uic.loadUi(uifile, self)
self._esp32 = None
self._mainwindow = None
self._coefficients = []
self.retry_button.setEnabled(False)
self.retry_button.clicked.connect(self._start_calibration)
self.start_calibration.clicked.connect(self._start_calibration)
self.back_button.clicked.connect(self._accept_values)
def connect_mainwindow_esp32(self, mainwindow, esp32):
"""
Connect the ESP32Serial istance.
"""
self._esp32 = esp32
self._mainwindow = mainwindow
def _accept_values(self):
"""
Send coefficients to ESP32 and quit the procedure
"""
if self._coefficients != []:
for index in range(5):
self._esp32.set("venturi_coefficient_%d" % index,
self._coefficients[index])
self._mainwindow.goto_new_patient()
def _start_calibration(self):
"""
Start retrieving data to fit.
"""
self.start_calibration.setEnabled(False)
self.back_button.setEnabled(False)
self.retry_button.setEnabled(False)
self.completion_bar.setValue(0)
self._coefficients = []
self.endstatus_label.setText("")
try:
calibrator = self._esp32.venturi_calibration()
flows = []
delta_ps = []
for completion, flow, delta_p in calibrator.data():
self.completion_bar.setValue(completion)
flows.append(flow)
delta_ps.append(delta_p)
self._coefficients, chi_sq, ndf = data_regression(delta_ps, flows)
print('Fit coefficients', self._coefficients)
if self._coefficients == []:
raise Exception("invalid data points")
if chi_sq/ndf > 10:
raise Exception("Fit has a chi 2 too large")
self.endstatus_label.setText("Success")
except: #pylint: disable=W0702
self.start_calibration.setEnabled(True)
self.retry_button.setEnabled(True)
self.endstatus_label.setText("Venturi spirometer\npressure probes inverted")
finally:
self.back_button.setEnabled(True)
del calibrator
|
cburroughs/collins
|
app/util/concurrent/BackgroundProcessor.scala
|
package util
package concurrent
import akka.actor._
import akka.pattern.ask
import akka.routing.RoundRobinRouter
import akka.util.Duration
import play.api.libs.concurrent._
import java.util.concurrent.TimeoutException
//import scala.collection.immutable.Vector
class BackgroundProcessorActor extends Actor {
def receive = {
case processor: BackgroundProcess[_] => sender ! processor.run()
}
}
case class SexyTimeoutException(timeout: Duration) extends Exception("Command timeout after %s seconds".format(timeout.toSeconds.toString)) {
override def toString(): String = {
this.getMessage()
}
}
object BackgroundProcessor {
import play.api.Play.current
lazy val ref = {
val routees = (0 until 128).map { _ =>
Akka.system.actorOf(Props[BackgroundProcessorActor])
}
Akka.system.actorOf(
Props[BackgroundProcessorActor].withRouter(RoundRobinRouter(routees))
)
}
type SendType[T] = Tuple2[Option[Throwable], Option[T]]
def send[PROC_RES,RESPONSE](cmd: BackgroundProcess[PROC_RES])(result: SendType[PROC_RES] => RESPONSE)(implicit mf: Manifest[PROC_RES]) = {
ask(ref, cmd)(cmd.timeout).mapTo[PROC_RES].asPromise.extend1 {
case Redeemed(v) => result(Tuple2(None, Some(v)))
case Thrown(e) => e match {
case t: TimeoutException =>
result(Tuple2(Some(SexyTimeoutException(cmd.timeout)), None))
case _ =>
result(Tuple2(Some(e), None))
}
}
}
}
|
mslinklater/retrotools
|
interfaces/imemory.h
|
#pragma once
#include <inttypes.h>
class IMemory
{
public:
virtual uint8_t Read(uint16_t address) = 0;
virtual void Write(uint16_t address, uint8_t value) = 0;
// Dbg methods do not affect the state of the memory metadata - they are used for internal class access rather than emulated access
virtual uint8_t DbgRead(uint16_t address) = 0;
virtual void DbgWrite(uint16_t address, uint8_t value) = 0;
virtual void SetHasBeenExecuted(uint16_t address, uint16_t length) = 0;
};
|
ScalablyTyped/SlinkyTyped
|
w/winrt-uwp/src/main/scala/typingsSlinky/winrtUwp/global/Windows/Devices/PointOfService/PosPrinterLineDirection.scala
|
package typingsSlinky.winrtUwp.global.Windows.Devices.PointOfService
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/** Describes the possible directions that a receipt or slip printer station can use to print a ruled line. */
@JSGlobal("Windows.Devices.PointOfService.PosPrinterLineDirection")
@js.native
object PosPrinterLineDirection extends StObject {
@JSBracketAccess
def apply(value: Double): js.UndefOr[
typingsSlinky.winrtUwp.Windows.Devices.PointOfService.PosPrinterLineDirection with Double
] = js.native
/* 0 */ val horizontal: typingsSlinky.winrtUwp.Windows.Devices.PointOfService.PosPrinterLineDirection.horizontal with Double = js.native
/* 1 */ val vertical: typingsSlinky.winrtUwp.Windows.Devices.PointOfService.PosPrinterLineDirection.vertical with Double = js.native
}
|
LaudateCorpus1/oci-go-sdk
|
datacatalog/job_schedule_type.go
|
<filename>datacatalog/job_schedule_type.go
// Copyright (c) 2016, 2018, 2022, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
// Data Catalog API
//
// Use the Data Catalog APIs to collect, organize, find, access, understand, enrich, and activate technical, business, and operational metadata.
// For more information, see Data Catalog (https://docs.oracle.com/iaas/data-catalog/home.htm).
//
package datacatalog
import (
"strings"
)
// JobScheduleTypeEnum Enum with underlying type: string
type JobScheduleTypeEnum string
// Set of constants representing the allowable values for JobScheduleTypeEnum
const (
JobScheduleTypeScheduled JobScheduleTypeEnum = "SCHEDULED"
JobScheduleTypeImmediate JobScheduleTypeEnum = "IMMEDIATE"
)
var mappingJobScheduleTypeEnum = map[string]JobScheduleTypeEnum{
"SCHEDULED": JobScheduleTypeScheduled,
"IMMEDIATE": JobScheduleTypeImmediate,
}
// GetJobScheduleTypeEnumValues Enumerates the set of values for JobScheduleTypeEnum
func GetJobScheduleTypeEnumValues() []JobScheduleTypeEnum {
values := make([]JobScheduleTypeEnum, 0)
for _, v := range mappingJobScheduleTypeEnum {
values = append(values, v)
}
return values
}
// GetJobScheduleTypeEnumStringValues Enumerates the set of values in String for JobScheduleTypeEnum
func GetJobScheduleTypeEnumStringValues() []string {
return []string{
"SCHEDULED",
"IMMEDIATE",
}
}
// GetMappingJobScheduleTypeEnum performs case Insensitive comparison on enum value and return the desired enum
func GetMappingJobScheduleTypeEnum(val string) (JobScheduleTypeEnum, bool) {
mappingJobScheduleTypeEnumIgnoreCase := make(map[string]JobScheduleTypeEnum)
for k, v := range mappingJobScheduleTypeEnum {
mappingJobScheduleTypeEnumIgnoreCase[strings.ToLower(k)] = v
}
enum, ok := mappingJobScheduleTypeEnumIgnoreCase[strings.ToLower(val)]
return enum, ok
}
|
fhornain/patternfly-react-seed_1
|
node_modules/@patternfly/react-icons/dist/js/icons/cuttlefish-icon.d.js
|
"use strict";
//# sourceMappingURL=cuttlefish-icon.d.js.map
|
JARVIS-AI/python-codes
|
Python-Chaptering/Chapter-3/7-iphoneMessages.py
|
<reponame>JARVIS-AI/python-codes
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sqlite3
import optparse
def isMessageTable(iphoneDB):
try:
conn = sqlite3.connect(iphoneDB)
c = conn.cursor()
c.execute('SELECT tbl_name FROM sqlite_master \
WHERE type==\"table\";')
for row in c:
if 'message' in str(row):
return True
except:
return False
def printMessage(msgDB):
try:
conn = sqlite3.connect(msgDB)
c = conn.cursor()
c.execute('select datetime(date,\'unixepoch\'),\
address, text from message WHERE address>0;')
for row in c:
date = str(row[0])
addr = str(row[1])
text = row[2]
print '\n[+] Date: '+date+', Addr: '+addr \
+ ' Message: ' + text
except:
pass
def main():
parser = optparse.OptionParser("usage %prog "+\
"-p <iPhone Backup Directory> ")
parser.add_option('-p', dest='pathName',\
type='string',help='specify skype profile path')
(options, args) = parser.parse_args()
pathName = options.pathName
if pathName == None:
print parser.usage
exit(0)
else:
dirList = os.listdir(pathName)
for fileName in dirList:
iphoneDB = os.path.join(pathName, fileName)
if isMessageTable(iphoneDB):
try:
print '\n[*] --- Found Messages ---'
printMessage(iphoneDB)
except:
pass
if __name__ == '__main__':
main()
|
Telecominfraproject/wlan-cloud-base
|
base-hierarchical-datastore/src/main/java/com/telecominfraproject/wlan/hierarchical/datastore/index/RecordIndexPositions.java
|
<gh_stars>0
package com.telecominfraproject.wlan.hierarchical.datastore.index;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.telecominfraproject.wlan.core.model.json.BaseJsonModel;
/**
* Class RecordIndexCounts and RecordIndexPositions represent record-level index in hierarchical datastore.
* The goal of this index is to reduce number of records that need to be processed by json parser when performing filtering operations.
* This index corresponds on-to-one to a data file in HDS, and it is usually written at the same time as the data file.
* During data filtering operations the index should be taken as a hint - if it is missing, then full data file will be processed.
* It should be possible to introduce new indexes after the fact - old data files can be scanned and new index files can be created.
* Indexes are stored in the same directory as the data files they represent.
* <br>
* Index file name is structured as idx_[indexName]_[dataFileName] and it is not compressed.
* Inside the index file archive there is one entry with a text file.
* First line in that text file contains json object for RecordIndexCounts, second line contains json object for RecordIndexPositions
*
* @author dtop
*
*/
public class RecordIndexPositions extends BaseJsonModel {
private static final long serialVersionUID = 17672003429334228L;
private String name;
private Map<String, List<Integer>> perValuePositions = new HashMap<>();
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Map<String, List<Integer>> getPerValuePositions() {
return perValuePositions;
}
public void setPerValuePositions(Map<String, List<Integer>> perValuePositions) {
this.perValuePositions = perValuePositions;
}
@JsonIgnore
public List<Integer> getPositionsForValue(String value){
return perValuePositions.getOrDefault(value, Collections.emptyList());
}
public void addPositionForValue(String value, int pos){
List<Integer> positions = perValuePositions.get(value);
if(positions==null){
positions = new ArrayList<>();
perValuePositions.put(value, positions);
}
positions.add(pos);
}
}
|
zipated/src
|
chrome/browser/resources/chromeos/chromevox/cvox2/background/keyboard_handler.js
|
<gh_stars>1000+
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview ChromeVox keyboard handler.
*/
goog.provide('BackgroundKeyboardHandler');
goog.require('ChromeVoxState');
goog.require('EventSourceState');
goog.require('Output');
goog.require('cvox.ChromeVoxKbHandler');
goog.require('cvox.ChromeVoxPrefs');
/** @constructor */
BackgroundKeyboardHandler = function() {
/** @type {number} @private */
this.passThroughKeyUpCount_ = 0;
/** @type {Set} @private */
this.eatenKeyDowns_ = new Set();
document.addEventListener('keydown', this.onKeyDown.bind(this), false);
document.addEventListener('keyup', this.onKeyUp.bind(this), false);
chrome.accessibilityPrivate.setKeyboardListener(
true, cvox.ChromeVox.isStickyPrefOn);
window['prefs'].switchToKeyMap('keymap_next');
};
BackgroundKeyboardHandler.prototype = {
/**
* Handles key down events.
* @param {Event} evt The key down event to process.
* @return {boolean} True if the default action should be performed.
*/
onKeyDown: function(evt) {
EventSourceState.set(EventSourceType.STANDARD_KEYBOARD);
evt.stickyMode = cvox.ChromeVox.isStickyModeOn() && cvox.ChromeVox.isActive;
if (cvox.ChromeVox.passThroughMode)
return false;
Output.forceModeForNextSpeechUtterance(cvox.QueueMode.FLUSH);
if (!cvox.ChromeVoxKbHandler.basicKeyDownActionsListener(evt)) {
evt.preventDefault();
evt.stopPropagation();
this.eatenKeyDowns_.add(evt.keyCode);
}
return false;
},
/**
* Handles key up events.
* @param {Event} evt The key down event to process.
* @return {boolean} True if the default action should be performed.
*/
onKeyUp: function(evt) {
// Reset pass through mode once a keyup (not involving the pass through key)
// is seen. The pass through command involves three keys.
if (cvox.ChromeVox.passThroughMode) {
if (this.passThroughKeyUpCount_ >= 3) {
cvox.ChromeVox.passThroughMode = false;
this.passThroughKeyUpCount_ = 0;
} else {
this.passThroughKeyUpCount_++;
}
}
if (this.eatenKeyDowns_.has(evt.keyCode)) {
evt.preventDefault();
evt.stopPropagation();
this.eatenKeyDowns_.delete(evt.keyCode);
}
return false;
}
};
/**
* @param {number} keyCode
* @param {chrome.accessibilityPrivate.SyntheticKeyboardModifiers=} modifiers
* @return {boolean}
*/
BackgroundKeyboardHandler.sendKeyPress = function(keyCode, modifiers) {
var key = {
type: chrome.accessibilityPrivate.SyntheticKeyboardEventType.KEYDOWN,
keyCode: keyCode,
modifiers: modifiers
};
chrome.accessibilityPrivate.sendSyntheticKeyEvent(key);
key['type'] = chrome.accessibilityPrivate.SyntheticKeyboardEventType.KEYUP;
chrome.accessibilityPrivate.sendSyntheticKeyEvent(key);
return true;
};
|
AnthemCS/jobApp
|
node_modules/babel-plugin-transform-inline-consecutive-adds/lib/array-collapser.js
|
"use strict";
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var Collapser = require("./collapser");
var ArrayCollapser = function (_Collapser) {
_inherits(ArrayCollapser, _Collapser);
function ArrayCollapser() {
_classCallCheck(this, ArrayCollapser);
return _possibleConstructorReturn(this, (ArrayCollapser.__proto__ || Object.getPrototypeOf(ArrayCollapser)).apply(this, arguments));
}
_createClass(ArrayCollapser, [{
key: "isInitTypeValid",
value: function isInitTypeValid(init) {
return init.isArrayExpression();
}
}, {
key: "isExpressionTypeValid",
value: function isExpressionTypeValid(expr) {
return expr.isCallExpression();
}
}, {
key: "getExpressionChecker",
value: function getExpressionChecker(objName, checkReference) {
return function (expr) {
// checks expr is of form:
// foo.push(rval1, ...nrvals)
var callee = expr.get("callee");
if (!callee.isMemberExpression()) {
return false;
}
var obj = callee.get("object"),
prop = callee.get("property");
if (!obj.isIdentifier() || obj.node.name !== objName || !prop.isIdentifier() || prop.node.name !== "push") {
return false;
}
var args = expr.get("arguments");
if (args.some(checkReference)) {
return false;
}
return true;
};
}
}, {
key: "extractAssignment",
value: function extractAssignment(expr) {
return expr.node.arguments;
}
}, {
key: "addSuccessfully",
value: function addSuccessfully(t, args, init) {
args.map(function (a) {
return init.elements.push(a);
});
return true;
}
}]);
return ArrayCollapser;
}(Collapser);
module.exports = ArrayCollapser;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.