repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
Jaykingamez/evennia | evennia/server/profiling/test_queries.py | """
This is a little routine for viewing the sql queries that are executed by a given
query as well as count them for optimization testing.
"""
import sys
import os
# sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))))
# os.environ["DJANGO_SETTINGS_MODULE"] = "game.settings"
from django.db import connection
def count_queries(exec_string, setup_string):
"""
Display queries done by exec_string. Use setup_string
to setup the environment to test.
"""
exec(setup_string)
num_queries_old = len(connection.queries)
exec(exec_string)
nqueries = len(connection.queries) - num_queries_old
for query in connection.queries[-nqueries if nqueries else 1 :]:
print(query["time"], query["sql"])
print("Number of queries: %s" % nqueries)
if __name__ == "__main__":
# setup tests here
setup_string = """
from evennia.objects.models import ObjectDB
g = ObjectDB.objects.get(db_key="Griatch")
"""
exec_string = """
g.tags.all()
"""
count_queries(exec_string, setup_string)
|
elliotleee/Apollo-lab | src/ApolloRescue/module/algorithm/clustering/ApolloFireClustering.java | <filename>src/ApolloRescue/module/algorithm/clustering/ApolloFireClustering.java<gh_stars>0
package ApolloRescue.module.algorithm.clustering;
import ApolloRescue.module.algorithm.ApolloPathPlanning;
import ApolloRescue.module.universal.ApolloWorld;
import ApolloRescue.module.universal.entities.BuildingModel;
import ApolloRescue.module.universal.Util;
import adf.agent.communication.MessageManager;
import adf.agent.develop.DevelopData;
import adf.agent.info.AgentInfo;
import adf.agent.info.ScenarioInfo;
import adf.agent.info.WorldInfo;
import adf.agent.module.ModuleManager;
import adf.agent.precompute.PrecomputeData;
import adf.component.module.algorithm.Clustering;
import adf.component.module.algorithm.DynamicClustering;
import rescuecore2.misc.Pair;
import rescuecore2.standard.entities.Building;
import rescuecore2.standard.entities.StandardEntity;
import rescuecore2.worldmodel.EntityID;
import math.geom2d.polygon.SimplePolygon2D;
import java.awt.Polygon;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static rescuecore2.standard.entities.StandardEntityURN.*;
//import com.sun.jmx.mbeanserver.Util;
public class ApolloFireClustering extends DynamicClustering {
private int groupingDistance;
private int idCounter = 1;
private static int CLUSTER_RANGE_THRESHOLD;
ApolloFireZone firezone;
List<List<StandardEntity>> clusterList = new LinkedList<>();
private List<Polygon> clusterConvexPolygons;
private List<Cluster> clusters;
private int myClusterIndex = -1;
private ApolloPathPlanning pathPlanning;
private WorldInfo worldInfo;
private ScenarioInfo scenarioInfo;
private AgentInfo agentInfo;
private Map<EntityID, Cluster> entityClusterMap;
private ApolloWorld world;
private List<BuildingModel> buildings;
public ApolloFireClustering(AgentInfo ai, WorldInfo wi, ScenarioInfo si, ModuleManager moduleManager, DevelopData developData) {
super(ai, wi, si, moduleManager, developData);
this.groupingDistance = developData.getInteger("ApolloRescue.module.algorithm.clustering.ApolloFireClustering.groupingDistance", 30);
worldInfo = wi;
scenarioInfo = si;
agentInfo=ai;
entityClusterMap = new HashMap<>();
clusters = new ArrayList<>();
clusterConvexPolygons = new ArrayList<>();
switch (si.getMode()) {
case PRECOMPUTATION_PHASE:
this.pathPlanning = moduleManager.getModule("Clustering.PathPlanning", "adf.sample.module.algorithm.SamplePathPlanning");
break;
case PRECOMPUTED:
this.pathPlanning = moduleManager.getModule("Clustering.PathPlanning", "adf.sample.module.algorithm.SamplePathPlanning");
break;
case NON_PRECOMPUTE:
this.pathPlanning = moduleManager.getModule("Clustering.PathPlanning", "adf.sample.module.algorithm.SamplePathPlanning");
break;
}
CLUSTER_RANGE_THRESHOLD = scenarioInfo.getPerceptionLosMaxDistance();
this.world = ApolloWorld.load(ai,wi,si,moduleManager,developData);
}
/**
* calculation phase; update cluster
*
* @return own instance for method chaining
*/
public Clustering calc() {
Cluster cluster;
Cluster tempCluster;
Set<Cluster> adjacentClusters = new HashSet<>();
this.clusterConvexPolygons = new ArrayList<>();
for (StandardEntity entity : worldInfo.getEntitiesOfType(BUILDING, AMBULANCE_CENTRE, POLICE_OFFICE, FIRE_STATION, GAS_STATION)) {
Building building = (Building) entity;
if (building.isFierynessDefined() && building.getFieryness() != 8
&& building.isTemperatureDefined() && building.getTemperature() > 25) {
cluster = getCluster(building.getID());
if (cluster == null) {
// cluster = new FireCluster(world, fireClusterMembershipChecker); //old
cluster = new ApolloFireZone(world, buildings);
cluster.add(building);
//checking neighbour clusters
for (StandardEntity neighbourEntity : worldInfo.getObjectsInRange(building.getID(), CLUSTER_RANGE_THRESHOLD)) {
if (!(neighbourEntity instanceof Building)) {
continue;
}
tempCluster = getCluster(neighbourEntity.getID());
if (tempCluster == null) {
//TODO: isEligible_Estimated ok or it should be isEligible
/*if (fireClusterMembershipChecker.isEligible_Estimated(world.getMrlBuilding(entity.getID()))) {
cluster.add(entity);
entityClusterMap.put(entity.getID(), cluster);
}*/
} else {
adjacentClusters.add(tempCluster);
}
}
if (adjacentClusters.isEmpty()) {
cluster.setId(idCounter++);
addToClusterSet(cluster, building.getID());
} else {
merge(adjacentClusters, cluster, building.getID());
}
} else {
//do noting
}
} else { // remove this building if it was in a cluster
// Was it previously in any cluster?
cluster = getCluster(building.getID());
if (cluster == null) {
//do nothing
} else {
cluster.remove(building);
entityClusterMap.remove(building.getID());//edited by sajjad, 2 lines shifted up
if (cluster.entities.isEmpty()) {
clusters.remove(cluster);
}
}
}
adjacentClusters.clear();
}
if(clusters == null){
System.out.println("clusters is null!!!");
}
for (Cluster c : clusters) {
c.updateConvexHull();
c.setAllEntities(world.getBuildingsInShape(c.getConvexHullObject().getConvexPolygon()));//Mostafa
}
if (getClusterNumber() > 0) {
for (int i = 0; i < getClusterNumber(); i++) {
clusterConvexPolygons.add(i, createConvexHull(getClusterEntities(i)));
}
double minDistance = Double.MAX_VALUE;
int nearestClusterIndex = 0;
for (int i = 0; i < this.clusterConvexPolygons.size(); i++) {
double distance = Util.distance(this.clusterConvexPolygons.get(i), worldInfo.getLocation(agentInfo.getID()), false);
if (distance < minDistance) {
minDistance = distance;
nearestClusterIndex = i;
}
}
myClusterIndex = nearestClusterIndex;
Map<ApolloFireZone, Set<ApolloFireZone>> eat = new HashMap<>();
List<ApolloFireZone> eatenFireClusters = new ArrayList<>();
for (Cluster cluster1 : clusters) {
if (eatenFireClusters.contains((ApolloFireZone) cluster1)) continue;
Set<ApolloFireZone> feed = new HashSet<>();
for (Cluster cluster2 : clusters) {
if (eatenFireClusters.contains((ApolloFireZone) cluster2)) continue;
if (cluster1.equals(cluster2)) continue;
if (canEat((ApolloFireZone) cluster1, (ApolloFireZone) cluster2)) {
feed.add((ApolloFireZone) cluster2);
eatenFireClusters.add((ApolloFireZone) cluster2);
}
}
eat.put((ApolloFireZone) cluster1, feed);
}
for (ApolloFireZone nextCluster : eat.keySet()) {
for (ApolloFireZone c : eat.get(nextCluster)) {
nextCluster.eat(c);
// refreshing EntityClusterMap
for (StandardEntity entity : c.entities) {
entityClusterMap.remove(entity.getID());
entityClusterMap.put(entity.getID(), nextCluster);
}
clusters.remove(c);
}
}
List<StandardEntity> ignoredBorderBuildings = new ArrayList<StandardEntity>();
for (int i = 0; i < clusters.size() - 1; i++) {
for (int j = i + 1; j < clusters.size(); j++) {
findMutualEntities((ApolloFireZone) clusters.get(i), (ApolloFireZone) clusters.get(j), ignoredBorderBuildings);
}
}
// System.out.println(agentInfo.getTime() + " " + agentInfo.getID() + " clusterIndex: " +
// myClusterIndex + " clusterSize: " + clustering.getClusterEntities(myClusterIndex).size());
// for (int i = 0; i < this.clustering.getClusterNumber(); i++) {
// System.out.println(agentInfo.getID() + " first cluster : " + this.clustering.getClusterEntities(i).size());
/*if (MrlPersonalData.DEBUG_MODE) {
try {
Collection<StandardEntity> clusterEntities = getClusterEntities(nearestClusterIndex);
if (clusterEntities != null) {
List<Integer> elementList = Util.fetchIdValueFormElements(clusterEntities);
VDClient.getInstance().drawAsync(agentInfo.getID().getValue(), "MrlSampleBuildingsLayer", (Serializable) elementList);
}
} catch (Exception e) {
e.printStackTrace();
}
}
if (MrlPersonalData.DEBUG_MODE) {
try {
ArrayList<Polygon> data = new ArrayList<>();
data.add(clusterConvexPolygons.get(nearestClusterIndex));
VDClient.getInstance().drawAsync(agentInfo.getID().getValue(), "ClusterConvexPolygon", data);
} catch (Exception e) {
e.printStackTrace();
}
}
// }*/
// this.result = this.calcTargetInCluster(myClusterIndex);
}
// Set<StandardEntity> borderBuildings = findBorderElements(elements, clustering.getClusterConvexPolygons().get(myClusterIndex));
return this;
}
private void findMutualEntities(ApolloFireZone primaryFireCluster, ApolloFireZone secondaryFireCluster, List<StandardEntity> ignoredBorderBuildings) {
for (BuildingModel apolloBuilding : getBuildingsInConvexPolygon(primaryFireCluster.getConvexHullObject().getConvexPolygon())) {
if (getBuildingsInConvexPolygon(secondaryFireCluster.getConvexHullObject().getConvexPolygon()).contains(apolloBuilding)) {
primaryFireCluster.getIgnoredBorderEntities().add(apolloBuilding.getSelfBuilding());
primaryFireCluster.getBorderEntities().remove(apolloBuilding.getSelfBuilding());
secondaryFireCluster.getIgnoredBorderEntities().add(apolloBuilding.getSelfBuilding());
secondaryFireCluster.getBorderEntities().remove(apolloBuilding.getSelfBuilding());
ignoredBorderBuildings.add(apolloBuilding.getSelfBuilding());
}
}
}
public List<BuildingModel> getBuildingsInConvexPolygon(Polygon polygon) {
List<BuildingModel> result = new ArrayList<BuildingModel>();
for (BuildingModel mrlBuilding : world.getEstimatedBurningBuildings()) {
Pair<Integer, Integer> location = worldInfo.getLocation(mrlBuilding.getSelfBuilding().getID());
if (polygon.contains(location.first(), location.second()))
result.add(mrlBuilding);
}
return result;
}
/**
* merge new cluster to others and replace the result with all others
*
* @param adjacentClusters adjacent clusters to the new cluster
* @param cluster new constructed cluster
* @param entityID
*/
protected void merge(Set<Cluster> adjacentClusters, Cluster cluster, EntityID entityID) {
int maxCId = 0;
for (Cluster c : adjacentClusters) {
if (maxCId < c.getId()) {
maxCId = c.getId();
}
cluster.eat(c);
// refreshing EntityClusterMap
for (StandardEntity entity : c.entities) {
entityClusterMap.remove(entity.getID()); //added 25 khordad! by sajjad & peyman
entityClusterMap.put(entity.getID(), cluster);
}
clusters.remove(c);
break;//todo: remove this line to merge all possible clusters
}
cluster.setId(maxCId);
addToClusterSet(cluster, entityID);
}
protected void addToClusterSet(Cluster cluster, EntityID entityID) {
// cluster.updateConvexHull()
entityClusterMap.put(entityID, cluster);
clusters.add(cluster);
}
private Cluster getCluster(EntityID id) {
return entityClusterMap.get(id);
}
@Override
public Clustering updateInfo(MessageManager messageManager)
{
super.updateInfo(messageManager);
if(this.getCountUpdateInfo() > 1) { return this; }
this.calc(); // invoke calc()
System.out.println("Cluster : " + clusters.size());
return this;
}
@Override
public Clustering precompute(PrecomputeData precomputeData)
{
super.precompute(precomputeData);
if(this.getCountPrecompute() > 1) { return this; }
return this;
}
@Override
public Clustering resume(PrecomputeData precomputeData)
{
super.resume(precomputeData);
if(this.getCountResume() > 1) { return this; }
return this;
}
@Override
public Clustering preparate()
{
super.preparate();
if(this.getCountPreparate() > 1) { return this; }
return this;
}
@Override
public int getClusterNumber()
{
return clusters.size();
}
@Override
public int getClusterIndex(StandardEntity standardEntity)
{
for (int index = 0; index < clusters.size(); index++)
{
if (clusters.get(index).getEntities().contains(standardEntity))
{ return index; }
}
return -1;
}
@Override
public int getClusterIndex(EntityID entityID)
{
return getClusterIndex(worldInfo.getEntity(entityID));
}
@Override
public Collection<StandardEntity> getClusterEntities(int i)
{
if (i < clusters.size()) {
return clusters.get(i).getEntities();
} else {
return null;
}
}
@Override
public Collection<EntityID> getClusterEntityIDs(int i)
{
ArrayList<EntityID> list = new ArrayList<>();
for (StandardEntity entity : getClusterEntities(i))
{ list.add(entity.getID()); }
return list;
}
/**
* classify burning building
* @param building target building
* @return is building burning
*/
private boolean isBurning(Building building)
{
if (building.isFierynessDefined())
{
switch (building.getFieryness())
{
case 1: case 2: case 3:
return true;
default:
return false;
}
}
return false;
}
/**
* output text with class name to STDOUT when debug-mode.
* @param text output text
*/
private void debugStdOut(String text)
{
if (scenarioInfo.isDebugMode())
{ System.out.println("[" + this.getClass().getSimpleName() + "] " + text); }
}
private Polygon createConvexHull(Collection<StandardEntity> clusterEntities) {
ConvexHull convexHull = new ConvexHull();
for (StandardEntity entity : clusterEntities) {
if (entity instanceof Building) {
Building building = (Building) entity;
for (int i = 0; i < building.getApexList().length; i += 2) {
convexHull.addPoint(building.getApexList()[i],
building.getApexList()[i + 1]);
}
}
}
return convexHull.convex();
}
private boolean canEat(ApolloFireZone zone1, ApolloFireZone zone2) {
int nPointsCluster1 = zone1.getConvexHullObject().getConvexPolygon().npoints;
int nPointsCluster2 = zone2.getConvexHullObject().getConvexPolygon().npoints;
double[] xPointsCluster2 = new double[nPointsCluster2];
double[] yPointsCluster2 = new double[nPointsCluster2];
for (int i = 0; i < nPointsCluster2; i++) {
xPointsCluster2[i] = zone2.getConvexHullObject().getConvexPolygon().xpoints[i];
yPointsCluster2[i] = zone2.getConvexHullObject().getConvexPolygon().ypoints[i];
}
SimplePolygon2D cluster2Polygon = new SimplePolygon2D(xPointsCluster2, yPointsCluster2);
double mapArea = (world.getMapWidth() / 1000) * (world.getMapHeight() / 1000);
if ((cluster2Polygon.getArea() / 1000000) > mapArea * 0.1) return false;
if (zone1.getConvexHullObject().getConvexPolygon().contains(zone2.getCenter())) return true;
rescuecore2.misc.geometry.Point2D clusterCenter = new rescuecore2.misc.geometry.Point2D(zone2.getCenter().getX(), zone2.getCenter().getY());
Polygon convexPolygon = zone1.getConvexHullObject().getConvexPolygon();
for (int i = 0; i < nPointsCluster1; i++) {
rescuecore2.misc.geometry.Point2D point1 = new rescuecore2.misc.geometry.Point2D(convexPolygon.xpoints[i], convexPolygon.ypoints[i]);
rescuecore2.misc.geometry.Point2D point2 = new rescuecore2.misc.geometry.Point2D(convexPolygon.xpoints[(i + 1) % nPointsCluster1], convexPolygon.ypoints[(i + 1) % nPointsCluster1]);
if (Util.distance(new rescuecore2.misc.geometry.Line2D(point1, point2), clusterCenter) < 30000) {
return true;
}
}
return false;
}
public List<Polygon> getClusterConvexPolygons() {
return this.clusterConvexPolygons;
}
public Cluster findNearestCluster(Pair<Integer, Integer> location) {
if (clusters == null || clusters.isEmpty()) {
return null;
}
Cluster resultFireCluster = null;
double minDistance = Double.MAX_VALUE;
Set<Cluster> dyingAndNoExpandableClusters = new HashSet<>();
for (Cluster cluster : clusters) {
if (cluster.isDying() || (cluster instanceof ApolloFireZone && !((ApolloFireZone) cluster).isExpandableToCenterOfMap())) {
dyingAndNoExpandableClusters.add(cluster);
continue;
}
double distance = Util.distance(cluster.getConvexHullObject().getConvexPolygon(), location);
if (distance < minDistance) {
minDistance = distance;
resultFireCluster = cluster;
}
}
minDistance = Double.MAX_VALUE;
if (resultFireCluster == null) {
for (Cluster cluster : dyingAndNoExpandableClusters) {
double distance = Util.distance(cluster.getConvexHullObject().getConvexPolygon(), location);
if (distance < minDistance) {
minDistance = distance;
resultFireCluster = cluster;
}
}
}
return resultFireCluster;
}
public int getMyClusterIndex(){
return myClusterIndex;
}
}
|
guoxiangyang0914/ycia | src/main/java/com/thinkgem/jeesite/modules/ia/entity/IaHealthArchive.java | <reponame>guoxiangyang0914/ycia
/**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.thinkgem.jeesite.modules.ia.entity;
import org.hibernate.validator.constraints.Length;
import com.thinkgem.jeesite.common.persistence.DataEntity;
/**
* 居民健康档案Entity
* @author gxy
* @version 2017-02-08
*/
public class IaHealthArchive extends DataEntity<IaHealthArchive> {
private static final long serialVersionUID = 1L;
private String icId; // 关联身份证表
private String no; // 编号
private String name; // 姓名
private String curAddress; // 现住址
private String address; // 户籍住址
private String tel; // 联系电话
private String town; // 乡镇(街道)名称
private String committee; // 村(居)委会名称
private String buildUnit; // 建档单位
private String buildBy; // 建档人
private String doctor; // 责任医生
private String buildYear; // 建档年
private String buildMonth; // 建档月
private String buildDay; // 建档日
public IaHealthArchive() {
super();
}
public IaHealthArchive(String id){
super(id);
}
@Length(min=0, max=64, message="关联身份证表长度必须介于 0 和 64 之间")
public String getIcId() {
return icId;
}
public void setIcId(String icId) {
this.icId = icId;
}
@Length(min=0, max=100, message="编号长度必须介于 0 和 100 之间")
public String getNo() {
return no;
}
public void setNo(String no) {
this.no = no;
}
@Length(min=0, max=100, message="姓名长度必须介于 0 和 100 之间")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Length(min=0, max=200, message="现住址长度必须介于 0 和 200 之间")
public String getCurAddress() {
return curAddress;
}
public void setCurAddress(String curAddress) {
this.curAddress = curAddress;
}
@Length(min=0, max=200, message="户籍住址长度必须介于 0 和 200 之间")
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
@Length(min=0, max=100, message="联系电话长度必须介于 0 和 100 之间")
public String getTel() {
return tel;
}
public void setTel(String tel) {
this.tel = tel;
}
@Length(min=0, max=100, message="乡镇(街道)名称长度必须介于 0 和 100 之间")
public String getTown() {
return town;
}
public void setTown(String town) {
this.town = town;
}
@Length(min=0, max=100, message="村(居)委会名称长度必须介于 0 和 100 之间")
public String getCommittee() {
return committee;
}
public void setCommittee(String committee) {
this.committee = committee;
}
@Length(min=0, max=100, message="建档单位长度必须介于 0 和 100 之间")
public String getBuildUnit() {
return buildUnit;
}
public void setBuildUnit(String buildUnit) {
this.buildUnit = buildUnit;
}
@Length(min=0, max=100, message="建档人长度必须介于 0 和 100 之间")
public String getBuildBy() {
return buildBy;
}
public void setBuildBy(String buildBy) {
this.buildBy = buildBy;
}
@Length(min=0, max=100, message="责任医生长度必须介于 0 和 100 之间")
public String getDoctor() {
return doctor;
}
public void setDoctor(String doctor) {
this.doctor = doctor;
}
public String getBuildYear() {
return buildYear;
}
public void setBuildYear(String buildYear) {
this.buildYear = buildYear;
}
public String getBuildMonth() {
return buildMonth;
}
public void setBuildMonth(String buildMonth) {
this.buildMonth = buildMonth;
}
public String getBuildDay() {
return buildDay;
}
public void setBuildDay(String buildDay) {
this.buildDay = buildDay;
}
} |
rlourette/TI_SDK_u-boot-2019.01 | u-boot-2019.01+gitAUTOINC+333c3e72d3-g333c3e72d3/tools/mkimage.h | /* SPDX-License-Identifier: GPL-2.0+ */
/*
* (C) Copyright 2000-2004
* DENX Software Engineering
* <NAME>, <EMAIL>
*/
#ifndef _MKIIMAGE_H_
#define _MKIIMAGE_H_
#include "os_support.h"
#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/stat.h>
#include <time.h>
#include <unistd.h>
#include <u-boot/sha1.h>
#include "fdt_host.h"
#include "imagetool.h"
#undef MKIMAGE_DEBUG
#ifdef MKIMAGE_DEBUG
#define debug(fmt,args...) printf (fmt ,##args)
#else
#define debug(fmt,args...)
#endif /* MKIMAGE_DEBUG */
static inline void *map_sysmem(ulong paddr, unsigned long len)
{
return (void *)(uintptr_t)paddr;
}
static inline ulong map_to_sysmem(void *ptr)
{
return (ulong)(uintptr_t)ptr;
}
#define MKIMAGE_TMPFILE_SUFFIX ".tmp"
#define MKIMAGE_MAX_TMPFILE_LEN 256
#define MKIMAGE_DEFAULT_DTC_OPTIONS "-I dts -O dtb -p 500"
#define MKIMAGE_MAX_DTC_CMDLINE_LEN 512
#endif /* _MKIIMAGE_H_ */
|
FedorUporov/gridgain | modules/h2/src/main/java/org/h2/Driver.java | /*
* Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (http://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import org.h2.engine.Constants;
import org.h2.jdbc.JdbcConnection;
import org.h2.message.DbException;
import org.h2.upgrade.DbUpgrade;
/**
* The database driver. An application should not use this class directly. The
* only thing the application needs to do is load the driver. This can be done
* using Class.forName. To load the driver and open a database connection, use
* the following code:
*
* <pre>
* Class.forName("org.h2.Driver");
* Connection conn = DriverManager.getConnection(
* "jdbc:h2:˜/test", "sa", "sa");
* </pre>
*/
public class Driver implements java.sql.Driver, JdbcDriverBackwardsCompat {
private static final Driver INSTANCE = new Driver();
private static final String DEFAULT_URL = "jdbc:default:connection";
private static final ThreadLocal<Connection> DEFAULT_CONNECTION =
new ThreadLocal<>();
private static boolean registered;
static {
load();
}
/**
* Open a database connection.
* This method should not be called by an application.
* Instead, the method DriverManager.getConnection should be used.
*
* @param url the database URL
* @param info the connection properties
* @return the new connection or null if the URL is not supported
*/
@Override
public Connection connect(String url, Properties info) throws SQLException {
try {
if (info == null) {
info = new Properties();
}
if (!acceptsURL(url)) {
return null;
}
if (url.equals(DEFAULT_URL)) {
return DEFAULT_CONNECTION.get();
}
Connection c = DbUpgrade.connectOrUpgrade(url, info);
if (c != null) {
return c;
}
return new JdbcConnection(url, info);
} catch (Exception e) {
throw DbException.toSQLException(e);
}
}
/**
* Check if the driver understands this URL.
* This method should not be called by an application.
*
* @param url the database URL
* @return if the driver understands the URL
*/
@Override
public boolean acceptsURL(String url) {
if (url != null) {
if (url.startsWith(Constants.START_URL)) {
return true;
} else if (url.equals(DEFAULT_URL)) {
return DEFAULT_CONNECTION.get() != null;
}
}
return false;
}
/**
* Get the major version number of the driver.
* This method should not be called by an application.
*
* @return the major version number
*/
@Override
public int getMajorVersion() {
return Constants.VERSION_MAJOR;
}
/**
* Get the minor version number of the driver.
* This method should not be called by an application.
*
* @return the minor version number
*/
@Override
public int getMinorVersion() {
return Constants.VERSION_MINOR;
}
/**
* Get the list of supported properties.
* This method should not be called by an application.
*
* @param url the database URL
* @param info the connection properties
* @return a zero length array
*/
@Override
public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) {
return new DriverPropertyInfo[0];
}
/**
* Check if this driver is compliant to the JDBC specification.
* This method should not be called by an application.
*
* @return true
*/
@Override
public boolean jdbcCompliant() {
return true;
}
/**
* [Not supported]
*/
@Override
public Logger getParentLogger() {
return null;
}
/**
* INTERNAL
*/
public static synchronized Driver load() {
try {
if (!registered) {
registered = true;
DriverManager.registerDriver(INSTANCE);
}
} catch (SQLException e) {
DbException.traceThrowable(e);
}
return INSTANCE;
}
/**
* INTERNAL
*/
public static synchronized void unload() {
try {
if (registered) {
registered = false;
DriverManager.deregisterDriver(INSTANCE);
}
} catch (SQLException e) {
DbException.traceThrowable(e);
}
}
/**
* INTERNAL
* Sets, on a per-thread basis, the default-connection for
* user-defined functions.
*/
public static void setDefaultConnection(Connection c) {
if (c == null) {
DEFAULT_CONNECTION.remove();
} else {
DEFAULT_CONNECTION.set(c);
}
}
/**
* INTERNAL
*/
public static void setThreadContextClassLoader(Thread thread) {
// Apache Tomcat: use the classloader of the driver to avoid the
// following log message:
// org.apache.catalina.loader.WebappClassLoader clearReferencesThreads
// SEVERE: The web application appears to have started a thread named
// ... but has failed to stop it.
// This is very likely to create a memory leak.
try {
thread.setContextClassLoader(Driver.class.getClassLoader());
} catch (Throwable t) {
// ignore
}
}
}
|
fizzday/gopass | rules/init.go | <reponame>fizzday/gopass
package rules
import "github.com/gohouse/gopass/gopassRuler"
// defaultMessages 验证失败默认提示语
var defaultMessages = map[string]interface{}{
// 必填参数: required
gopassRuler.LangZHCN: map[string]interface{}{
RequiredString: "参数{field}为必须参数",
BetweenString: "参数{field}的值必须在({args})之间",
LengthString: "参数{field}的长度必须为{args}",
MinString: "参数{field}的值不能小于{args}",
MaxString: "参数{field}的值不能大于{args}",
InString: "参数{field}的值必须在给定的值({args})中",
NumericString: "参数{field}的值必须是数字",
IntegerString: "参数{field}的值必须是整数",
FloatString: "参数{field}的值必须是浮点数",
UrlString: "参数{field}的值必须是 url 地址",
EmaillString: "参数{field}的值必须是邮箱地址",
ChineseMobileString: "参数{field}的值必须是中国大陆手机号",
JsonString: "参数{field}的值必须是 json 格式",
XmlString: "参数{field}的值必须是 xml 格式",
DateString: "参数{field}的值必须是日期格式",
RegexpString: "参数{field}的值不匹配",
},
gopassRuler.LangENUS: map[string]interface{}{
RequiredString: "param {field} needed",
BetweenString: "param {field} allow range ({args})",
LengthString: "param {field} length must {args}",
MinString: "param {field} must greater or equal than {args}",
MaxString: "param {field} must less or equal than {args}",
InString: "param {field} must in ({args})",
NumericString: "param {field} must be numeric",
IntegerString: "param {field} must be integer",
FloatString: "param {field} must be float",
UrlString: "param {field} must be a url",
EmaillString: "param {field} must be a email address",
ChineseMobileString: "param {field} must be a chinese mobile number",
JsonString: "param {field} must be json format",
XmlString: "param {field} must be xml format",
DateString: "param {field} must be date format",
RegexpString: "param {field} value is not matched",
},
gopassRuler.LangZHTR: map[string]interface{}{
RequiredString: "參數{field}不能為空",
BetweenString: "參數{field}必須在({args})之間",
},
}
func init() {
// 注册所有默认提示信息
gopassRuler.RegisterMessageMulti(defaultMessages)
}
// GetDefaultMessages 获取默认错误提示语
func GetDefaultMessages() map[string]interface{} {
return defaultMessages
}
|
darecoder1999/Wikidata-Toolkit | wdtk-datamodel/src/test/java/org/wikidata/wdtk/datamodel/implementation/MediaInfoDocumentImplTest.java | <gh_stars>0
/*
* #%L
* Wikidata Toolkit Data Model
* %%
* Copyright (C) 2014 Wikidata Toolkit Developers
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wikidata.wdtk.datamodel.implementation;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper;
import org.wikidata.wdtk.datamodel.interfaces.Claim;
import org.wikidata.wdtk.datamodel.interfaces.DatatypeIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MediaInfoDocument;
import org.wikidata.wdtk.datamodel.interfaces.MediaInfoIdValue;
import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue;
import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument;
import org.wikidata.wdtk.datamodel.interfaces.Statement;
import org.wikidata.wdtk.datamodel.interfaces.StatementGroup;
import org.wikidata.wdtk.datamodel.interfaces.StatementRank;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
public class MediaInfoDocumentImplTest {
private final ObjectMapper mapper = new DatamodelMapper("http://example.com/entity/");
private final MediaInfoIdValue mid = new MediaInfoIdValueImpl("M42", "http://example.com/entity/");
private final Statement s = new StatementImpl("MyId", StatementRank.NORMAL,
new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://example.com/entity/")),
Collections.emptyList(), Collections.emptyList(), mid);
private final List<StatementGroup> statementGroups = Collections.singletonList(
new StatementGroupImpl(Collections.singletonList(s))
);
private final MonolingualTextValue label = new TermImpl("en", "label");
private final List<MonolingualTextValue> labelList = Collections.singletonList(label);
private final MediaInfoDocument mi1 = new MediaInfoDocumentImpl(mid, labelList, statementGroups,1234);
private final MediaInfoDocument mi2 = new MediaInfoDocumentImpl(mid, labelList, statementGroups, 1234);
private final String JSON_MEDIA_INFO_LABEL = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{\"en\":{\"language\":\"en\",\"value\":\"label\"}},\"claims\":{}}";
private final String JSON_MEDIA_INFO_DESCRIPTION = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{},\"descriptions\":{},\"statements\":{}}";
private final String JSON_MEDIA_INFO_STATEMENTS = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{},\"statements\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]}}";
private final String JSON_MEDIA_INFO_CLAIMS = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":{},\"claims\":{\"P42\":[{\"rank\":\"normal\",\"id\":\"MyId\",\"mainsnak\":{\"property\":\"P42\",\"snaktype\":\"somevalue\"},\"type\":\"statement\"}]}}";
private final String JSON_MEDIA_INFO_EMPTY_ARRAYS = "{\"type\":\"mediainfo\",\"id\":\"M42\",\"labels\":[],\"descriptions\":[],\"statements\":[],\"sitelinks\":[]}";
@Test
public void fieldsAreCorrect() {
assertEquals(mi1.getEntityId(), mid);
assertEquals(mi1.getLabels(), Collections.singletonMap(label.getLanguageCode(), label));
assertEquals(mi1.getStatementGroups(), statementGroups);
}
@Test
public void findLabels() {
assertEquals("label", mi1.findLabel("en"));
assertNull(mi1.findLabel("ja"));
}
@Test
public void equalityBasedOnContent() {
MediaInfoDocument irDiffLabel = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 1234);
MediaInfoDocument irDiffStatementGroups = new MediaInfoDocumentImpl(mid,
labelList,
Collections.emptyList(), 1234);
MediaInfoDocument irDiffRevisions = new MediaInfoDocumentImpl(mid,
labelList,
statementGroups, 1235);
PropertyDocument pr = new PropertyDocumentImpl(
new PropertyIdValueImpl("P42", "foo"),
labelList, Collections.emptyList(),
Collections.emptyList(), Collections.emptyList(),
new DatatypeIdImpl(DatatypeIdValue.DT_STRING), 1234);
// we need to use empty lists of Statement groups to test inequality
// based on different item ids with all other data being equal
MediaInfoDocument irDiffMediaInfoIdValue = new MediaInfoDocumentImpl(
new MediaInfoIdValueImpl("M23", "http://example.org/"),
labelList,
Collections.emptyList(), 1234);
assertEquals(mi1, mi1);
assertEquals(mi1, mi2);
assertNotEquals(mi1, irDiffLabel);
assertNotEquals(mi1, irDiffStatementGroups);
assertNotEquals(mi1, irDiffRevisions);
assertNotEquals(irDiffStatementGroups, irDiffMediaInfoIdValue);
assertNotEquals(mi1, pr);
assertNotEquals(mi1, null);
assertNotEquals(mi1, this);
}
@Test
public void hashBasedOnContent() {
assertEquals(mi1.hashCode(), mi2.hashCode());
}
@Test
public void idNotNull() {
assertThrows(NullPointerException.class, () -> new MediaInfoDocumentImpl(null, Collections.emptyList(), statementGroups, 1234));
}
@Test
public void labelsCanBeNull() {
MediaInfoDocument doc = new MediaInfoDocumentImpl(mid, null, statementGroups, 1234);
assertTrue(doc.getLabels().isEmpty());
}
@Test
public void statementGroupsCanBeNull() {
MediaInfoDocument doc = new MediaInfoDocumentImpl(mid, Collections.emptyList(), null, 1234);
assertTrue(doc.getStatementGroups().isEmpty());
}
@Test
public void statementGroupsUseSameSubject() {
MediaInfoIdValue mid2 = new MediaInfoIdValueImpl("M23", "http://example.org/");
Statement s2 = new StatementImpl("MyId", StatementRank.NORMAL,
new SomeValueSnakImpl(new PropertyIdValueImpl("P42", "http://wikibase.org/entity/")),
Collections.emptyList(), Collections.emptyList(), mid2);
StatementGroup sg2 = new StatementGroupImpl(Collections.singletonList(s2));
List<StatementGroup> statementGroups2 = new ArrayList<>();
statementGroups2.add(statementGroups.get(0));
statementGroups2.add(sg2);
assertThrows(IllegalArgumentException.class, () -> new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups2, 1234));
}
@Test
public void iterateOverAllStatements() {
Iterator<Statement> statements = mi1.getAllStatements();
assertTrue(statements.hasNext());
assertEquals(s, statements.next());
assertFalse(statements.hasNext());
}
@Test
public void testWithRevisionId() {
assertEquals(1235L, mi1.withRevisionId(1235L).getRevisionId());
assertEquals(mi1, mi1.withRevisionId(1325L).withRevisionId(mi1.getRevisionId()));
}
@Test
public void testWithLabelInNewLanguage() {
MonolingualTextValue newLabel = new MonolingualTextValueImpl(
"MediaInfo M42", "fr");
MediaInfoDocument withLabel = mi1.withLabel(newLabel);
assertEquals("MediaInfo M42", withLabel.findLabel("fr"));
}
@Test
public void testAddStatement() {
Statement fresh = new StatementImpl("MyFreshId", StatementRank.NORMAL,
new SomeValueSnakImpl(new PropertyIdValueImpl("P29", "http://example.com/entity/")),
Collections.emptyList(), Collections.emptyList(), mid);
Claim claim = fresh.getClaim();
assertFalse(mi1.hasStatementValue(
claim.getMainSnak().getPropertyId(),
claim.getValue()));
MediaInfoDocument withStatement = mi1.withStatement(fresh);
assertTrue(withStatement.hasStatementValue(
claim.getMainSnak().getPropertyId(),
claim.getValue()));
}
@Test
public void testDeleteStatements() {
Statement toRemove = statementGroups.get(0).getStatements().get(0);
MediaInfoDocument withoutStatement = mi1.withoutStatementIds(Collections.singleton(toRemove.getStatementId()));
assertNotEquals(withoutStatement, mi1);
}
@Test
public void testLabelsToJson() throws JsonProcessingException {
MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, labelList, Collections.emptyList(), 0);
JsonComparator.compareJsonStrings(JSON_MEDIA_INFO_LABEL, mapper.writeValueAsString(document));
}
@Test
public void testLabelToJava() throws IOException {
MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid,
labelList, Collections.emptyList(), 0);
assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_LABEL, EntityDocumentImpl.class));
}
@Test
public void testDescriptionsToJava() throws IOException {
MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid,
Collections.emptyList(), Collections.emptyList(), 0);
assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_DESCRIPTION, EntityDocumentImpl.class));
}
@Test
public void testStatementsToJson() throws JsonProcessingException {
MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 0);
JsonComparator.compareJsonStrings(JSON_MEDIA_INFO_CLAIMS, mapper.writeValueAsString(document));
}
@Test
public void testStatementsToJava() throws IOException {
MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 0);
assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_STATEMENTS, MediaInfoDocumentImpl.class));
}
@Test
public void testStatementsNamedClaimsToJava() throws IOException {
MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), statementGroups, 0);
assertEquals(document, mapper.readValue(JSON_MEDIA_INFO_CLAIMS, MediaInfoDocumentImpl.class));
}
/**
* Checks support of wrong serialization of empty object as empty array
*/
@Test
public void testEmptyArraysForTerms() throws IOException {
MediaInfoDocumentImpl document = new MediaInfoDocumentImpl(mid, Collections.emptyList(), Collections.emptyList(), 0);
assertEquals(document, mapper.readerFor(MediaInfoDocumentImpl.class)
.with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT)
.readValue(JSON_MEDIA_INFO_EMPTY_ARRAYS)
);
}
}
|
manjunkoh/my-website | node_modules/airbnb-prop-types/build/disallowedIf.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = disallowedIf;
var _wrapValidator = _interopRequireDefault(require("./helpers/wrapValidator"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
function disallowedIf(propType, otherPropName, otherPropType) {
if (typeof propType !== 'function' || typeof propType.isRequired !== 'function') {
throw new TypeError('a propType validator is required; propType validators must also provide `.isRequired`');
}
if (typeof otherPropName !== 'string') {
throw new TypeError('other prop name must be a string');
}
if (typeof otherPropType !== 'function') {
throw new TypeError('other prop type validator is required');
}
function disallowedIfRequired(props, propName, componentName) {
for (var _len = arguments.length, rest = new Array(_len > 3 ? _len - 3 : 0), _key = 3; _key < _len; _key++) {
rest[_key - 3] = arguments[_key];
}
var error = propType.isRequired.apply(propType, [props, propName, componentName].concat(rest));
if (error) {
return error;
}
if (props[otherPropName] == null) {
return null;
}
var otherError = otherPropType.apply(void 0, [props, otherPropName, componentName].concat(rest));
if (otherError) {
return null;
}
return new Error("prop \u201C".concat(propName, "\u201D is disallowed when \u201C").concat(otherPropName, "\u201D matches the provided validator"));
}
var validator = function disallowedIfPropType(props, propName) {
if (props[propName] == null) {
return null;
}
for (var _len2 = arguments.length, rest = new Array(_len2 > 2 ? _len2 - 2 : 0), _key2 = 2; _key2 < _len2; _key2++) {
rest[_key2 - 2] = arguments[_key2];
}
return disallowedIfRequired.apply(void 0, [props, propName].concat(rest));
};
validator.isRequired = disallowedIfRequired;
return (0, _wrapValidator["default"])(validator, 'disallowedIf', {
propType: propType,
otherPropName: otherPropName,
otherPropType: otherPropType
});
}
//# sourceMappingURL=disallowedIf.js.map |
a1209023760/code-java | 008-hutool/hutool-core/src/main/java/com/keqi/hutool/core/common/Premiss.java | package com.keqi.hutool.core.common;
import lombok.Data;
@Data
public class Premiss {
private String premissName;
}
|
danielmicaletti/Procurement-Order-Management-App | static/build/development/scripts/accounts/services/company.service.js |
(function () {
'use strict';
angular
.module('accounts.services')
.factory('Company', Company);
Company.$inject = ['$http', '$q'];
function Company($http, $q) {
var Company = {
getOptiz: getOptiz,
getAll: getAll,
get: get,
create:create,
destroy: destroy,
update: update,
newAddress: newAddress,
updAddress: updAddress,
destroyAddress: destroyAddress,
};
return Company;
function generalCallbackSuccess(response){
console.log(response.data)
console.log(response)
return response.data;
}
function generalCallbackError(response){
return $q.reject('Error '+response.status+'');
}
function getOptiz(){
return $http.get('api/v1/optiz')
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function getAll() {
return $http.get('/api/v1/companies/')
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function get(companyId) {
return $http.get('/api/v1/companies/' + companyId + '/')
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function create(company) {
console.log(company);
return $http.post('/api/v1/companies/', company)
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function destroy(companyId) {
return $http.delete('/api/v1/companies/' + companyId + '/')
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function update(companyId, company) {
return $http.put('/api/v1/companies/' + companyId + '/', company)
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function newAddress(addr) {
return $http.post('/api/v1/addresses/', addr)
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function updAddress(addr) {
return $http.put('/api/v1/addresses/'+addr.id+'/', addr)
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
function destroyAddress(addr){
return $http.delete('api/v1/addresses/'+addr+'/')
.then(generalCallbackSuccess)
.catch(generalCallbackError);
}
}
})(); |
korjaa/mbed-os | features/nanostack/sal-stack-nanostack/source/6LoWPAN/Thread/thread_config.h | <reponame>korjaa/mbed-os
/*
* Copyright (c) 2015-2017, Arm Limited and affiliates.
* SPDX-License-Identifier: BSD-3-Clause
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef THREAD_CONFIG_H_
#define THREAD_CONFIG_H_
/**
* Thread stack configuration values. All constants that are specified in Thread specification can be found from thread_constants.h
*/
#include "thread_constants.h"
/**
* Threshold for when the mac frame counter is stored to nvm.
*/
#define MAC_FRAME_COUNTER_LIMIT 1000
/**
* Threshold for when the mle frame counter is stored to nvm.
*/
#define MLE_FRAME_COUNTER_LIMIT 100
/**
* Organisational Unique Identifier for ARM.
*/
#define THREAD_ARM_OUI 0x0002f7
/**
* Build number value of 12 bit.
*/
#define THREAD_BUILD_NUMBER 100
/**
* Stack revision number 4 bit.
*/
#define THREAD_REVISION_NUMBER 100
/**
* Stack Major version number 4 bit.
*/
#define THREAD_VERSION_MAJ 1
/**
* Stack Minor version number 4 bit.
*/
#define THREAD_VERSION_MIN 0
/**
* Default vendor name.
*/
#define THREAD_VENDOR_NAME "ARM ltd"
/**
* Default vendor model.
*/
#define THREAD_VENDOR_MODEL "MbedOS"
/**
* Default SW Version.
*/
#define THREAD_SW_VERSION "1.1"
/**
* Protocol ID in beacon for thread network discovery.
*/
#define THREAD_PROTOCOL_ID 3
/**
* Protocol version present in version tlvs and in beacons
*/
#define THREAD_PROTOCOL_VERSION 2
/**
* Default Maximum child count.
*/
#define THREAD_MAX_CHILD_COUNT 26
/**
* Default Maximum child id count for router parent.
*/
#define THREAD_MAX_CHILD_ID_COUNT 511
/**
* Default Link lifetime for Router and Host.
*/
#define THREAD_DEFAULT_LINK_LIFETIME 100
/**
* Default network instance weighting (average).
*/
#define THREAD_DEFAULT_WEIGHTING 64
/**
* Context re use timer in 48 hours in seconds
*/
#define THREAD_CONTEXT_ID_REUSE_TIMEOUT (48 * 3600)
/**
* Timeout for Leader to set commissioner session to obsolete.
*/
#define COMMISSIONER_OBSOLETE_TIMEOUT (THREAD_COMMISSIONER_KEEP_ALIVE_INTERVAL)
/**
* Timeout for Leader to remove commissioner session.
* This is a delay before commissioner is removed after Thread defined keep alive value.
*/
#define COMMISSIONER_REMOVE_TIMEOUT (1000)
/**
* Router selection jitter
* default 120
*/
#define THREAD_ROUTER_SELECTION_JITTER 120
/**
* Maximum parallel Link requests
* default 4
*/
#define THREAD_MAX_PARALLEL_MLE_LINK_REQUEST 4
/**
* Number of free MLE entries for parent priority adjustment
* default 4
*/
#define THREAD_FREE_MLE_ENTRY_THRESHOLD 4
/**
* Maximum parallel Parent Requests
* default 8
*/
#define THREAD_MAX_PARALLEL_MLE_PARENT_REQUEST 8
/**
* Minimum pending delay for commissioner
* default 30
*/
#define THREAD_DELAY_TIMER_MINIMAL_SECONDS (30)
/**
* Minimum pending delay for end device
* default 1800
*/
#define THREAD_DELAY_TIMER_DEFAULT_SECONDS (300)
/**
* Minimum pending delay for end device
* default 1800
*/
#define THREAD_MIN_DELAY_TIMER_THREAD_DEVICE_MASTER_KEY_CHANGE_SECONDS (1800)
/**
* Maximum pending delay timer in seconds
*/
#define THREAD_MAX_DELAY_TIMER_SECONDS (72 * 3600)
/**
* Maximum neighbor links for reeds and end devices
*/
#define THREAD_REED_AND_END_DEVICE_NEIGHBOR_LINKS 5
/**
* Maximum data size of commissioner data set parameter - Border agent locator, Commissioner session id and Steering data TLVs are
* minimum required TLVs at this point
*/
#define THREAD_MAX_COMMISSIONER_DATA_SIZE 50
/**
* Trace group for THCI traces
*/
#define THCI_TRACE_GROUP "THCI"
#define THREAD_MAX_FRAME_RETRIES 3
/**
* Thread discovery timeout for discovery response
*/
#define THREAD_DISCOVERY_TIMEOUT 300
#define THREAD_DISCOVERY_MAX_JITTER 250
/**
* Interval between increments of ID sequence number by the Leader.
*/
#define ID_SEQUENCE_PERIOD 10 /* 10 second */
/**
* Delay before new network data is being updated after update (seconds).
*/
#define THREAD_COMMISSION_DATA_UPDATE_DELAY 1
/**
* SED configuration (used in Connectivity TLV).
* -Buffer capacity in octets for all IPv6 datagrams destined to a given SED
* -Queue capacity in IPv6 datagrams destined to a given SED
*/
#define THREAD_SED_BUFFER_SIZE THREAD_SED_BUFFER_MIN_SIZE
#define THREAD_SED_DATAGRAM_COUNT THREAD_SED_DATAGRAM_MIN_COUNT
#define THREAD_DISCOVERY_SECURITY_KEY_INDEX 0xff
#define THREAD_DISCOVERY_SECURITY_KEY_SOURCE 0xffffffff
/**
* The threshold value that must be exceed by a packet to be
* considered big. Thread conformance document specifies the
* threshold as 106.
*/
#define THREAD_INDIRECT_BIG_PACKET_THRESHOLD 106
/**
* Thread conformance document says: "A Thread Router MUST be able
* to buffer at least one (1) 1280-octet IPv6 datagram destined for
* an attached SED" and "A Thread Router MUST be able to buffer at
* least one (1) 106-octet IPv6 datagram per attached SED".
*
* The defines below tell how many small (i.e. up to the big packet
* threshold) packets per sleepy child and big (i.e. over the big
* packet threshold) packets total we buffer in the indirect TX
* queue. The minimum values are 1 for both, but here we use larger
* value for better performance.
*/
#define THREAD_INDIRECT_BIG_PACKETS_TOTAL 10
#define THREAD_INDIRECT_SMALL_PACKETS_PER_CHILD 2
/**
* Maximum number of MTD children, default 16
* Thread conformance minimum is 10
*/
#define THREAD_MAX_MTD_CHILDREN 16
/**
* Maximum number of sleepy children, default 10
* Thread conformance minimum is 6
*/
#define THREAD_MAX_SED_CHILDREN 10
/**
* Base value for blacklist interval value
*/
#define THREAD_BLACKLIST_TIMER_TIMEOUT 4
/**
* Maximum value for blacklist interval value
*/
#define THREAD_BLACKLIST_TIMER_MAX_TIMEOUT 500
/**
* Base value for blacklist entry lifetime
*/
#define THREAD_BLACKLIST_ENTRY_LIFETIME 8
/**
* Maximum number of entries in the blacklist table
*/
#define THREAD_BLACKLIST_ENTRY_MAX_NBR 15
/**
* Number of extra entries to purge from the blacklist table
*/
#define THREAD_BLACKLIST_PURGE_NBR 3
/**
* Timeout value for purging extra blacklist entries
*/
#define THREAD_BLACKLIST_PURGE_TIMER_TIMEOUT 6
/**
* Base value for commission blacklist interval value
*/
#define THREAD_COMM_BLACKLIST_TIMER_TIMEOUT 60
/**
* Maximum value for commission blacklist interval value
*/
#define THREAD_COMM_BLACKLIST_TIMER_MAX_TIMEOUT 600
/**
* Base value for commission blacklist entry lifetime
*/
#define THREAD_COMM_BLACKLIST_ENTRY_LIFETIME 600
/**
* Delay value for sending proactive address notification after router upgrade
*/
#define THREAD_PROACTIVE_AN_SEND_DELAY 2
/*
* Parent response wait time (in 100ms) when "R" bit is set in scan mask TLV (rounded up from 0.75 seconds)
*/
#define THREAD_PARENT_REQ_SCANMASK_R_TIMEOUT 9
/*
* Parent response wait time (in 100ms) when both "R" and "E" bit is set in scan mask TLV (rounded up from 1.25 seconds)
*/
#define THREAD_PARENT_REQ_SCANMASK_RE_TIMEOUT 15
/*
* When BBR is started, router address is requested from leader with following status
*/
#define THREAD_BBR_ROUTER_ID_REQUEST_STATUS THREAD_COAP_STATUS_TLV_HAVE_CHILD_ID_REQUEST
/**
* Build time flag to enable THCI special traces for test harness purposes
*/
#ifdef THREAD_THCI_SUPPORT
#define thci_trace(...) mbed_tracef(TRACE_LEVEL_INFO, THCI_TRACE_GROUP, __VA_ARGS__ );
#else
#define thci_trace(...)
#endif
#endif /* THREAD_CONFIG_H_ */
|
UnioDex/UnioProtections | src/main/java/me/uniodex/unioprotections/listeners/SmallFixListeners.java | <gh_stars>1-10
package me.uniodex.unioprotections.listeners;
import me.uniodex.unioprotections.UnioProtections;
import me.uniodex.unioprotections.managers.CheckManager.Check;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.BlockDispenseEvent;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.inventory.InventoryType;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.inventory.ItemStack;
public class SmallFixListeners implements Listener {
private UnioProtections plugin;
public SmallFixListeners(UnioProtections plugin) {
this.plugin = plugin;
Bukkit.getPluginManager().registerEvents(this, plugin);
}
// Leave vehicle on join
@EventHandler
public void onJoin(PlayerJoinEvent event) {
Player player = event.getPlayer();
if (plugin.getCheckManager().isCheckEnabled(Check.SMALLFIX_LEAVEVEHICLEONJOIN)) {
if (player.getVehicle() != null) {
player.leaveVehicle();
}
}
}
// Don't allow putting item frame to dispenser Start
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onInventoryClick(InventoryClickEvent event) {
if (!plugin.getCheckManager().isCheckEnabled(Check.SMALLFIX_DISALLOWITEMFRAMEINDISPENSER)) return;
if (event.getInventory().getType() == InventoryType.DISPENSER) {
Player p = (Player) event.getWhoClicked();
ItemStack item = event.getCurrentItem();
if ((item != null) && (item.getType() == Material.ITEM_FRAME)) {
event.setCancelled(true);
p.sendMessage(plugin.getMessage("smallFixes.dispenserFix"));
}
}
}
@EventHandler(priority = EventPriority.LOW, ignoreCancelled = true)
public void onDispense(BlockDispenseEvent event) {
if (!plugin.getCheckManager().isCheckEnabled(Check.SMALLFIX_DISALLOWITEMFRAMEINDISPENSER)) return;
if (event.getBlock().getType() == Material.ITEM_FRAME) {
event.setCancelled(true);
}
}
// Don't allow putting item frame to dispenser End
}
|
ksmai/fire-web-engine | src/App/Clock.h | #ifndef __CLOCK_H__
#define __CLOCK_H__
#include <SDL/SDL.h>
namespace FW {
class Clock {
public:
using Counter = Uint64;
Clock();
Clock(const Clock&) =delete;
Clock& operator=(const Clock&) =delete;
~Clock();
double dt() const;
double time() const;
void update();
private:
static bool created;
double counterToMS(Counter) const;
Counter prevCounter;
Counter counter;
};
}
#endif
|
zengshunyao/test | src/test/java/category/book/shenrujvm/ch08/TestClassLoader.java | <reponame>zengshunyao/test
package category.book.shenrujvm.ch08;
/**********************************************************************
* <p>文件名:TestClassLoader.java </p>
* <p>文件描述:${DESCRIPTION}(描述该文件做什么)
* @project_name:test
* @author zengshunyao
* @date 2019/3/25 17:13
* @history
* @department:政务事业部
* Copyright ChengDu Funi Cloud Code Technology Development CO.,LTD 2014
* All Rights Reserved.
*/
public class TestClassLoader {
public static void main(String[] args) {
System.out.println(Child.a + Child.b);
}
}
class Parent {
public static int a = 1;
static {
System.out.println("parent init");
}
}
class Child extends Parent {
public static int b = 2;
static {
System.out.println("Child init");
}
}
|
lindsaygelle/animalcrossing | villager/cat/mitzi/doc.go | <gh_stars>0
// Package mitzi exports the Animal Crossing villager Mitzi.
package mitzi
|
cloudgraph/cloudgraph | cloudgraph-hbase/src/main/java/org/cloudgraph/hbase/scan/ScanRecognizerLogicalBinaryExpr.java | <reponame>cloudgraph/cloudgraph
/**
* Copyright 2017 TerraMeta Software, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudgraph.hbase.scan;
import org.cloudgraph.query.expr.DefaultLogicalBinaryExpr;
import org.cloudgraph.query.expr.EvaluationContext;
import org.cloudgraph.query.expr.Expr;
import org.cloudgraph.query.expr.ExprVisitor;
import org.cloudgraph.query.expr.LogicalBinaryExpr;
import org.plasma.query.model.LogicalOperator;
/**
*
* @author <NAME>
* @since 0.5.3
* @see Expr
* @see ExprVisitor
* @see EvaluationContext
*/
public class ScanRecognizerLogicalBinaryExpr extends DefaultLogicalBinaryExpr implements
LogicalBinaryExpr {
/**
* Constructs a composite expression based on the given terms and <a href=
* "http://docs.plasma-sdo.org/api/org/plasma/query/model/LogicalOperator.html"
* >logical</a> operator.
*
* @param left
* the "left" expression term
* @param right
* the "right" expression term
* @param oper
* the logical operator
*/
public ScanRecognizerLogicalBinaryExpr(Expr left, Expr right, LogicalOperator oper) {
super(left, right, oper);
}
} |
sofa-framework/issofa | modules/SofaNonUniformFem/DynamicSparseGridTopologyContainer.cpp | <filename>modules/SofaNonUniformFem/DynamicSparseGridTopologyContainer.cpp
/******************************************************************************
* SOFA, Simulation Open-Framework Architecture, development version *
* (c) 2006-2017 INRIA, USTL, UJF, CNRS, MGH *
* *
* This program is free software; you can redistribute it and/or modify it *
* under the terms of the GNU Lesser General Public License as published by *
* the Free Software Foundation; either version 2.1 of the License, or (at *
* your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, but WITHOUT *
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or *
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License *
* for more details. *
* *
* You should have received a copy of the GNU Lesser General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
*******************************************************************************
* Authors: The SOFA Team and external contributors (see Authors.txt) *
* *
* Contact information: <EMAIL> *
******************************************************************************/
#include <SofaNonUniformFem/DynamicSparseGridTopologyContainer.h>
#include <sofa/core/visual/VisualParams.h>
#include <sofa/core/ObjectFactory.h>
#include <sofa/core/loader/VoxelLoader.h>
#include <SofaBaseTopology/TopologyData.inl>
namespace sofa
{
namespace component
{
namespace topology
{
using namespace std;
using namespace sofa::defaulttype;
SOFA_DECL_CLASS ( DynamicSparseGridTopologyContainer );
int DynamicSparseGridTopologyContainerClass = core::RegisterObject ( "Hexahedron set topology container" )
.add< DynamicSparseGridTopologyContainer >()
;
DynamicSparseGridTopologyContainer::DynamicSparseGridTopologyContainer()
: HexahedronSetTopologyContainer()
, resolution ( initData ( &resolution, Vec3i ( 0, 0, 0 ), "resolution", "voxel grid resolution" ) )
, valuesIndexedInRegularGrid( initData ( &valuesIndexedInRegularGrid, sofa::helper::vector<unsigned char>(), "valuesIndexedInRegularGrid", "values indexed in the Regular Grid" ) )
, valuesIndexedInTopology( initData(&valuesIndexedInTopology, "valuesIndexedInTopology", "values indexed in the topology"))
, idxInRegularGrid( initData ( &idxInRegularGrid, sofa::helper::vector<BaseMeshTopology::HexaID>(), "idxInRegularGrid", "indices in the Regular Grid" ) )
, idInRegularGrid2IndexInTopo( initData ( &idInRegularGrid2IndexInTopo, std::map< unsigned int, BaseMeshTopology::HexaID> (), "idInRegularGrid2IndexInTopo", "map between id in the Regular Grid and index in the topology" ) )
, voxelSize( initData(&voxelSize, defaulttype::Vector3(1,1,1), "voxelSize", "Size of the Voxels"))
{
valuesIndexedInRegularGrid.setDisplayed( false);
valuesIndexedInTopology.setDisplayed( false);
idInRegularGrid2IndexInTopo.setDisplayed( false);
}
void DynamicSparseGridTopologyContainer::init()
{
HexahedronSetTopologyContainer::init();
// Init regular/topo mapping
sofa::core::loader::VoxelLoader* VoxelLoader;
this->getContext()->get(VoxelLoader);
if ( !VoxelLoader )
{
this->serr << "DynamicSparseGridTopologyContainer::init(): No VoxelLoader found! Aborting..." << this->sendl;
exit(EXIT_FAILURE);
}
const helper::vector<BaseMeshTopology::HexaID>& iirg = idxInRegularGrid.getValue();
std::map< unsigned int, BaseMeshTopology::HexaID> &idrg2tpo = *idInRegularGrid2IndexInTopo.beginEdit();
helper::vector<unsigned char>& viirg = *(valuesIndexedInRegularGrid.beginEdit());
helper::vector<unsigned char>& viit = *(valuesIndexedInTopology.beginEdit());
for( unsigned int i = 0; i < iirg.size(); i++)
{
idrg2tpo.insert( make_pair( iirg[i], i ));
}
// Init values
int dataSize = VoxelLoader->getDataSize();
const unsigned char* data = VoxelLoader->getData();
// init values in regular grid. (dense).
viirg.resize( dataSize);
//for( int i = 0; i < dataSize; i++)
// viirg[i] = data[i];
for( unsigned int i = 0; i < iirg.size(); ++i)
{
viirg[iirg[i]] = 255;
}
// init values in topo. (pas dense).
viit.resize( iirg.size());
for(unsigned int i = 0; i < iirg.size(); i++)
{
viit[i] = data[iirg[i]];
}
idInRegularGrid2IndexInTopo.endEdit();
valuesIndexedInRegularGrid.endEdit();
valuesIndexedInTopology.endEdit();
}
} // namespace topology
} // namespace component
} // namespace sofa
|
Matt-Ti/Logica-de-programa | 05 - comando IF/MediaAluno/index.js | var readline = require("readline-sync");
var nome = "";
var n1 = 0;
var n2 = 0;
var n3 = 0;
var n4 = 0;
var media = 0;
//Entrada de dados
console.log("Programa que calcula a media de um aluno");
nome = readline.question("Qual seu nome?: ");
n1 = parseFloat(readline.question("informe a 1 nota: "));
n2 = parseFloat(readline.question("informe a 2 nota: "));
n3 = parseFloat(readline.question("informe a 3 nota: "));
n4 = parseFloat(readline.question("informe a 4 nota: "));
media = (n1+n2+n3+n4) / 4;
console.log("Media: " + media);
if(media < 6){
console.log(nome + " Voce foi reprovado \ntente novamente.");
}else{
console.log(nome + " Voce foi aprovado \nParabens!");
}
/*if(media >= 6){
console.log(nome + " Voce foi aprovado \nParabens!");
}*/
|
adriengivry/ElkCraft | Sources/ElkEngine/ElkTools/stdafx.h | <gh_stars>1-10
#pragma once
#pragma warning(disable : 4251)
#pragma warning(disable : 4275)
#include <string>
#include <iostream>
#include <fstream>
#include <string>
#include <ctime>
#include <sstream>
#include <unordered_map>
#include <filesystem>
#include <memory>
#include <type_traits>
#include <typeinfo>
#include <chrono>
#include <random> |
extension/ask | app/models/question_event.rb | # === COPYRIGHT:
# Copyright (c) North Carolina State University
# Developed with funding for the National eXtension Initiative.
# === LICENSE:
#
# see LICENSE file
class QuestionEvent < ActiveRecord::Base
# includes
include MarkupScrubber
include CacheTools
extend YearWeek
# attributes
serialize :updated_question_values
serialize :group_logs
# constants
# date of first QuestionEvent for default dates to avoid hitting db
FIRST_CONTACT = Date.parse('2006-10-10').to_datetime
# #'s 3 and 4 were the old marked spam and marked non spam question events from darmok, these were
# just pulled instead of renumbering all these so to not disturb the other status numbers being pulled over from the other sytem
ASSIGNED_TO = 1
RESOLVED = 2
REACTIVATE = 5
REJECTED = 6
NO_ANSWER = 7
TAG_CHANGE = 8
WORKING_ON = 9
EDIT_QUESTION = 10
PUBLIC_RESPONSE = 11
REOPEN = 12
CLOSED = 13
INTERNAL_COMMENT = 14
ASSIGNED_TO_GROUP = 15
CHANGED_GROUP = 16
CHANGED_LOCATION = 17
EXPERT_EDIT_QUESTION = 18
EXPERT_EDIT_RESPONSE = 19
CHANGED_TO_PUBLIC = 20
CHANGED_TO_PRIVATE = 21
CHANGED_FEATURED = 22
ADDED_TAG = 23
DELETED_TAG = 24
PASSED_TO_WRANGLER = 25
AUTO_ASSIGNED_TO = 26
EVENT_TO_TEXT_MAPPING = { ASSIGNED_TO => 'assigned to',
RESOLVED => 'resolved by',
REACTIVATE => 're-activated by',
REJECTED => 'rejected by',
NO_ANSWER => 'no answer given',
TAG_CHANGE => 'tags edited by',
WORKING_ON => 'worked on by',
EDIT_QUESTION => 'edited question',
PUBLIC_RESPONSE => 'public response',
REOPEN => 'reopened',
CLOSED => 'closed',
INTERNAL_COMMENT => 'commented',
ASSIGNED_TO_GROUP => 'assigned to group',
CHANGED_GROUP => 'group changed',
CHANGED_LOCATION => 'location changed',
EXPERT_EDIT_QUESTION => 'expert edit of question',
EXPERT_EDIT_RESPONSE => 'expert edit of response',
CHANGED_TO_PUBLIC => 'changed to public by',
CHANGED_TO_PRIVATE => 'changed to private by',
CHANGED_FEATURED => 'changed featured by',
ADDED_TAG => 'tag added by',
DELETED_TAG => 'tag deleted by',
PASSED_TO_WRANGLER => 'handed off to',
AUTO_ASSIGNED_TO => 'automatically assigned to'
}
HANDLING_EVENTS = [ASSIGNED_TO, PASSED_TO_WRANGLER, ASSIGNED_TO_GROUP, RESOLVED, REJECTED, NO_ANSWER, CLOSED]
SIGNIFICANT_EVENTS = [REJECTED,PASSED_TO_WRANGLER,NO_ANSWER,EXPERT_EDIT_QUESTION,EXPERT_EDIT_RESPONSE,CHANGED_TO_PUBLIC,CHANGED_TO_PRIVATE]
UPDATE_LAST_ASSIGNED_AT_EVENTS = [ASSIGNED_TO, ASSIGNED_TO_GROUP, PASSED_TO_WRANGLER]
# reporting
YEARWEEK_ACTIVE = "YEARWEEK(#{self.table_name}.created_at,3)"
# associations
belongs_to :question
belongs_to :initiator, :class_name => "User", :foreign_key => "initiated_by_id"
belongs_to :submitter, :class_name => "User", :foreign_key => "submitter_id"
belongs_to :recipient, :class_name => "User", :foreign_key => "recipient_id"
belongs_to :assigned_group, :class_name => "Group", :foreign_key => "recipient_group_id"
belongs_to :previous_recipient, :class_name => "User", :foreign_key => "previous_recipient_id"
belongs_to :previous_initiator, :class_name => "User", :foreign_key => "previous_initiator_id"
belongs_to :previous_handling_recipient, :class_name => "User", :foreign_key => "previous_handling_recipient_id"
belongs_to :previous_handling_initiator, :class_name => "User", :foreign_key => "previous_handling_initiator_id"
belongs_to :previous_group, class_name: 'Group'
belongs_to :changed_group, class_name: 'Group'
belongs_to :auto_assignment_log
# scopes
scope :latest, order("#{self.table_name}.created_at desc")
scope :handling_events, where("event_state IN (#{HANDLING_EVENTS.join(',')})")
scope :significant_events, where("event_state IN (#{SIGNIFICANT_EVENTS.join(',')})")
scope :individual_assignments, where("event_state = ?",ASSIGNED_TO)
scope :extension, where(is_extension: true)
# validations
# filters
after_create :create_question_event_notification
after_create :update_initiator_last_touched
def update_initiator_last_touched
self.initiator.update_column(:last_question_touched_at, self.created_at) if self.initiated_by_id.present?
end
def self.log_resolution(question)
return self.log_event({:question => question,
:initiator => question.current_resolver,
:event_state => RESOLVED,
:response => question.current_response})
end
def self.log_assignment(options = {})
return self.log_event({:question => options[:question],
:initiated_by_id => options[:initiated_by].id,
:recipient_id => options[:recipient].id,
:event_state => ASSIGNED_TO,
:response => options[:assignment_comment]})
end
def self.log_auto_assignment(options = {})
return self.log_event({:question => options[:question],
:initiated_by_id => User.system_user_id,
:recipient_id => options[:recipient].id,
:event_state => AUTO_ASSIGNED_TO,
:response => options[:assignment_comment]})
end
def self.log_wrangler_handoff(options = {})
return self.log_event({:question => options[:question],
:initiated_by_id => options[:initiated_by].id,
:recipient_id => options[:recipient].id,
:event_state => PASSED_TO_WRANGLER,
:response => options[:handoff_reason],
:auto_assignment_log => options[:auto_assignment_log]})
end
def self.log_history_comment(question, initiated_by, history_comment)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:event_state => INTERNAL_COMMENT,
:response => history_comment})
end
def self.log_group_assignment(question, group, initiated_by, assignment_comment)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:recipient_group_id => group.id,
:recipient_id => nil,
:event_state => ASSIGNED_TO_GROUP,
:response => assignment_comment})
end
def self.log_group_change(options = {})
question = options[:question]
previous_group = options[:old_group]
changed_group = options[:new_group]
initiated_by = options[:initiated_by]
if(question and previous_group and changed_group and initiated_by)
return self.log_event(question: question, previous_group_id: previous_group.id, changed_group_id: changed_group.id, initiated_by_id: initiated_by.id, event_state: CHANGED_GROUP)
else
return false
end
end
def self.log_featured_changed(question, initiated_by)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:updated_question_values => {:old_value => !question.featured, :new_value => question.featured},
:event_state => CHANGED_FEATURED
})
end
def self.log_added_tag(question, initiated_by, tag)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:changed_tag => tag,
:event_state => ADDED_TAG
})
end
def self.log_deleted_tag(question, initiated_by, tag)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:changed_tag => tag,
:event_state => DELETED_TAG
})
end
def self.log_location_change(question, initiated_by, event_hash)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:updated_question_values => event_hash,
:event_state => CHANGED_LOCATION
})
end
def self.log_reopen(question, recipient, initiated_by, assignment_comment)
question.update_attribute(:last_opened_at, Time.now)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:recipient_id => recipient.id,
:event_state => REOPEN,
:response => assignment_comment})
end
def self.log_public_edit(question)
return self.log_event({:question => question,
:event_state => EDIT_QUESTION,
:additional_data => question.body})
end
def self.log_public_response(question, submitter_id)
return self.log_event({:question => question,
:initiated_by_id => User.system_user_id,
:event_state => PUBLIC_RESPONSE,
:submitter_id => submitter_id})
end
def self.log_working_on(question, initiated_by)
return self.log_event({:question => question, :initiated_by_id => initiated_by.id, :event_state => WORKING_ON})
end
def self.log_make_public(question, initiated_by)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:event_state => CHANGED_TO_PUBLIC
})
end
def self.log_make_private(question, initiated_by)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:event_state => CHANGED_TO_PRIVATE
})
end
def self.log_question_edit_by_expert(question, initiated_by)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:event_state => EXPERT_EDIT_QUESTION
})
end
def self.log_response_edit_by_expert(question, initiated_by, response)
# kick off notification to expert (author of edited response) when their response is edited if a different expert edited it
if response.is_expert? and !response.pending_expert_response_edit_notification?
Notification.create(notifiable: response, created_by: initiated_by.id, recipient_id: response.resolver.id, notification_type: Notification::AAE_EXPERT_RESPONSE_EDIT, delivery_time: Settings.expert_response_edit_interval.from_now) unless initiated_by == response.resolver
end
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:event_state => EXPERT_EDIT_RESPONSE,
:additional_data => response.id
})
end
def self.log_reopen_to_group(question, group, initiated_by, assignment_comment)
question.update_attribute(:last_opened_at, Time.now)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:recipient_group_id => group.id,
:event_state => REOPEN,
:response => assignment_comment})
end
def self.log_rejection(question)
return self.log_event({:question => question,
:initiated_by_id => question.current_resolver_id,
:event_state => REJECTED,
:response => question.current_response})
end
def self.log_reactivate(question, initiated_by)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:event_state => REACTIVATE})
end
def self.log_no_answer(question)
return self.log_event({:question => question,
:initiator => question.current_resolver,
:event_state => NO_ANSWER,
:response => question.current_response})
end
def self.log_close(question, initiated_by, close_out_reason)
return self.log_event({:question => question,
:initiated_by_id => initiated_by.id,
:event_state => CLOSED,
:response => close_out_reason})
end
def self.log_event(create_attributes = {})
time_of_this_event = Time.now.utc
question = create_attributes[:question]
if UPDATE_LAST_ASSIGNED_AT_EVENTS.include?(create_attributes[:event_state])
question.update_column(:last_assigned_at, time_of_this_event)
end
# set is_extension
if(create_attributes[:initiated_by_id] and user = User.find(create_attributes[:initiated_by_id]))
create_attributes[:is_extension] = user.has_exid?
elsif(create_attributes[:initiator])
create_attributes[:is_extension] = create_attributes[:initiator].has_exid?
end
# gathering of previous events for metrics gathering for things like duration and handling rate,
# if we want to keep track of this for a group context (user is being tracked here), we'll need to add more columns to the schema for groups
# get last event
last_event = question.question_events.latest.first
if last_event.present?
create_attributes[:duration_since_last] = (time_of_this_event - last_event.created_at).to_i
create_attributes[:previous_recipient_id] = last_event.recipient_id
create_attributes[:previous_initiator_id] = last_event.initiated_by_id
create_attributes[:previous_event_id] = last_event.id
# if not a handling event, get the last handling event
if(!last_event.is_handling_event?)
if(last_handling_event = question.question_events.handling_events.latest.first)
create_attributes[:previous_handling_event_id] = last_handling_event.id
create_attributes[:duration_since_last_handling_event] = (time_of_this_event - last_handling_event.created_at).to_i
create_attributes[:previous_handling_event_state] = last_handling_event.event_state
create_attributes[:previous_handling_recipient_id] = last_handling_event.recipient_id
create_attributes[:previous_handling_initiator_id] = last_handling_event.initiated_by_id
end
else
# last_event was a handling event - so use the last_event details to fill those values in
create_attributes[:previous_handling_event_id] = last_event.id
create_attributes[:duration_since_last_handling_event] = (time_of_this_event - last_event.created_at).to_i
create_attributes[:previous_handling_event_state] = last_event.event_state
create_attributes[:previous_handling_recipient_id] = last_event.recipient_id
create_attributes[:previous_handling_initiator_id] = last_event.initiated_by_id
end
end
return QuestionEvent.create(create_attributes)
end
def is_handling_event?
HANDLING_EVENTS.include?(self.event_state)
end
def next_handling_event
self.question.question_events.handling_events.where("question_events.created_at >= ?",self.created_at).where("question_events.id != ?",self.id).first
end
# NOTE THAT THE RECIPIENT_ID (AND PREVIOUS_RECIPIENT_ID AND PREVIOUS_HANDLING_RECIPIENT_ID) CAN BE NULL HERE DUE TO ASSIGNMENT TO GROUPS IN WHICH THE RECIPIENT_GROUP_ID IS SET
def create_question_event_notification
case self.event_state
when REJECTED
Notification.create(notifiable: self, created_by: self.initiated_by_id, recipient_id: self.previous_recipient_id, notification_type: Notification::AAE_REJECT, delivery_time: 1.minute.from_now ) unless (self.previous_recipient_id.nil? || (self.initiated_by_id == self.previous_recipient_id))
when INTERNAL_COMMENT
Notification.create(notifiable: self, created_by: 1, recipient_id: self.question.assignee.id, notification_type: Notification::AAE_INTERNAL_COMMENT, delivery_time: 1.minute.from_now ) unless (self.question.assignee.nil? or (self.question.assignee.id == self.initiated_by_id))
when EDIT_QUESTION
Notification.create(notifiable: self, created_by: 1, recipient_id: self.question.assignee.id, notification_type: Notification::AAE_PUBLIC_EDIT, delivery_time: 1.minute.from_now ) unless self.question.assignee.nil?
when PUBLIC_RESPONSE
Notification.create(notifiable: self.question.responses.last, created_by: 1, recipient_id: self.question.current_resolver.id, notification_type: Notification::AAE_PUBLIC_RESPONSE, delivery_time: 1.minute.from_now )
when RESOLVED
Notification.create(notifiable: self, created_by: self.initiated_by_id, recipient_id: self.question.submitter.id, notification_type: Notification::AAE_PUBLIC_EXPERT_RESPONSE, delivery_time: 1.minute.from_now )
if !Notification.pending_activity_notification?(self.question)
Notification.create(notifiable: self.question, notification_type: Notification::AAE_QUESTION_ACTIVITY, created_by: 1, recipient_id: 1, delivery_time: Settings.activity_notification_interval.from_now)
end
else
true
end
end
# attr_writer override for response to scrub html
def response=(response)
write_attribute(:response, self.cleanup_html(response))
end
# data
def self.increase_group_concat_length
set_group_concat_size_query = "SET SESSION group_concat_max_len = #{Settings.group_concat_max_len};"
self.connection.execute(set_group_concat_size_query)
end
def self.earliest_activity_at
with_scope do
ea = self.minimum(:created_at)
(ea < EpochDate::WWW_LAUNCH) ? EpochDate::WWW_LAUNCH : ea
end
end
def self.latest_activity_at
with_scope do
self.maximum(:created_at)
end
end
def self.stats_by_yearweek(cache_options = {})
if(!cache_options[:nocache])
cache_key = self.get_cache_key(__method__,{scope_sql: current_scope ? current_scope.to_sql : ''})
Rails.cache.fetch(cache_key,cache_options) do
with_scope do
_stats_by_yearweek
end
end
else
with_scope do
_stats_by_yearweek
end
end
end
def self._stats_by_yearweek
metric = 'experts'
stats = YearWeekStats.new
# increase_group_concat_length
with_scope do
ea = self.extension.earliest_activity_at
if(ea.blank?)
return stats
end
la = self.extension.latest_activity_at
metric_by_yearweek = self.extension.group(YEARWEEK_ACTIVE).count('DISTINCT(initiated_by_id)')
year_weeks = self.year_weeks_between_dates(ea.to_date,la.to_date)
year_weeks.each do |year,week|
yw = self.yearweek(year,week)
stats[yw] = {}
metric_value = metric_by_yearweek[yw] || 0
stats[yw][metric] = metric_value
previous_year_key = self.yearweek(year-1,week)
(previous_year,previous_week) = self.previous_year_week(year,week)
previous_week_key = self.yearweek(previous_year,previous_week)
previous_week = (metric_by_yearweek[previous_week_key] ? metric_by_yearweek[previous_week_key] : 0)
stats[yw]["previous_week_#{metric}"] = previous_week
previous_year = (metric_by_yearweek[previous_year_key] ? metric_by_yearweek[previous_year_key] : 0)
stats[yw]["previous_year_#{metric}"] = previous_year
# pct_change
if(previous_week == 0)
stats[yw]["pct_change_week_#{metric}"] = nil
else
stats[yw]["pct_change_week_#{metric}"] = (metric_value - previous_week) / previous_week.to_f
end
if(previous_year == 0)
stats[yw]["pct_change_year_#{metric}"] = nil
else
stats[yw]["pct_change_year_#{metric}"] = (metric_value - previous_year) / previous_year.to_f
end
end
end
stats
end
end
|
maledicente/cursos | Data_Science/Neural Networks from Scratch/P.9 Introducing Optimization and derivatives.py | <filename>Data_Science/Neural Networks from Scratch/P.9 Introducing Optimization and derivatives.py
import numpy as np
from nnfs.datasets import vertical_data, spiral_data
import nnfs
nnfs.init()
# Create dataset
X, y = spiral_data(samples = 100 , classes = 3)
# Dense layer
class Layer_Dense:
# Layer initialization
def __init__(self, n_inputs, n_neurons):
# Initialize weights and biases
self.weights = 0.01 * np.random.randn(n_inputs, n_neurons)
self.biases = np.zeros((1, n_neurons))
# Forward pass
def forward(self, inputs):
self.output = np.dot(inputs, self.weights) + self.biases
# ReLU activation
class Activation_ReLu:
# Forward pass
def forward(self, inputs):
# Calculate output values from inputs
self.output = np.maximum(0, inputs)
class Activation_Softmax:
# Forward pass
def forward(self, inputs):
exp_values = np.exp(inputs - np.max(inputs, axis=1, keepdims=True))
probabilities = exp_values / np.sum(exp_values, axis=1, keepdims=True)
self.output = probabilities
# Common loss class
class Loss:
# Calculates the data and regularization losses
# given model output and ground truth values
def calculate(self, output, y):
# Calculate sample losses
sample_losses = self.forward(output, y)
# Calculate mean loss
data_loss = np.mean(sample_losses)
# Return loss
return data_loss
# Cross-entropy loss
class Loss_CategoricalCrossentropy(Loss):
# Forward pass
def forward(self, y_pred, y_true):
samples = len(y_pred)
# Clip data to prevent division by 0
# Clip both sides to not drag mean towards any value
y_pred_clipped = np.clip(y_pred, 1e-7, 1-1e-7)
if len(y_true.shape) == 1:
correct_confidences = y_pred_clipped[range(samples), y_true]
elif len(y_true.shape == 2):
correct_confidences = np.sum(y_pred_clipped * y_true, axis=1)
negative_log_likelihoods = -np.log(correct_confidences)
return negative_log_likelihoods
dense1 = Layer_Dense(2, 3)
activation1 = Activation_ReLu()
dense2 = Layer_Dense(3,3)
activation2 = Activation_Softmax()
# Create loss function
loss_function = Loss_CategoricalCrossentropy()
lowest_loss = 999999 # some initial value
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
for iteration in range(10000):
# Generate a new set of weights for iteration
dense1.weights += 0.05 * np.random.randn(2,3)
dense1.biases += 0.05 * np.random.randn(1,3)
dense2.weights += 0.05 * np.random.randn(3,3)
dense2.biases += 0.05 * np.random.randn(1,3)
# Perform a forward pass of the training data through this layer
dense1.forward(X)
activation1.forward(dense1.output)
dense2.forward(activation1.output)
activation2.forward(dense2.output)
# Perform a forward pass through activation function
# it takes the output of second dense layer here and returns loss
loss = loss_function.calculate(activation2.output, y)
# Calculate accuracy from output of activation2 and targets
# calculate values along first axis
predictions = np.argmax(activation2.output, axis = 1)
accuracy = np.mean(predictions == y)
# If loss is smaller - print and save weights and biases aside
if loss < lowest_loss:
print ('New set of weights found, iteration:', iteration,
'loss:' , round(loss, 3), 'accuracy:', round(accuracy,3))
best_dense1_weights = dense1.weights.copy()
best_dense1_biases = dense1.biases.copy()
best_dense2_weights = dense2.weights.copy()
best_dense2_biases = dense2.biases.copy()
lowest_loss = loss
else:
dense1.weights = best_dense1_weights.copy()
dense1.biases = best_dense1_biases.copy()
dense2.weights = best_dense2_weights.copy()
dense2.biases = best_dense2_biases.copy() |
PublicHealthEngland/data_management_system | db/migrate/20160913110454_add_attachment_attachment_to_project_attachments.rb | class AddAttachmentAttachmentToProjectAttachments < ActiveRecord::Migration[5.0]
def change
add_column :project_attachments, :attachment_file_name, :string
add_column :project_attachments, :attachment_content_type, :string
add_column :project_attachments, :attachment_file_size, :integer
add_column :project_attachments, :attachment_updated_at, :datetime
end
end
|
manvis/IYFEngine | components/common/src/io/File.cpp | // The IYFEngine
//
// Copyright (C) 2015-2018, <NAME>
//
// Redistribution and use in source and binary forms, with or without modification, are
// permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of
// conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
// of conditions and the following disclaimer in the documentation and/or other materials
// provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be
// used to endorse or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
// SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
// BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "File.hpp"
#include <cassert>
#include "logging/Logger.hpp"
#include "io/exceptions/FileOpenException.hpp"
namespace iyf {
File::File(const Path& path, FileOpenMode openMode)
: path(path), openMode(openMode) {}
std::int64_t File::writeString(const char* string, std::size_t length, StringLengthIndicator indicator) {
bool result;
int lengthIncrement;
switch (indicator) {
case StringLengthIndicator::None:
result = true;
lengthIncrement = 0;
break;
case StringLengthIndicator::UInt8:
result = writeUInt8(static_cast<std::uint8_t>(length));
lengthIncrement = 1;
break;
case StringLengthIndicator::UInt16:
result = writeUInt16(static_cast<std::uint16_t>(length));
lengthIncrement = 2;
break;
case StringLengthIndicator::UInt32:
result = writeUInt32(static_cast<std::uint32_t>(length));
lengthIncrement = 4;
break;
case StringLengthIndicator::UInt64:
result = writeUInt64(static_cast<std::uint64_t>(length));
lengthIncrement = 8;
break;
}
if (!result) {
throw FileException("Failed to write a string length indicator to file ", path.getGenericString());
}
return writeBytes(string, sizeof(char) * length) + lengthIncrement;
}
std::int64_t File::writeString(const std::string& string, StringLengthIndicator indicator) {
return writeString(string.data(), string.length(), indicator);
}
std::int64_t File::writeString(const std::string_view& stringView, StringLengthIndicator indicator) {
return writeString(stringView.data(), stringView.length(), indicator);
}
std::int64_t File::readString(std::string& string, StringLengthIndicator indicator, std::uint64_t count) {
std::uint64_t bytesToRead;
int lengthIncrement;
switch (indicator) {
case StringLengthIndicator::None:
bytesToRead = count;
lengthIncrement = 0;
break;
case StringLengthIndicator::UInt8:
bytesToRead = readUInt8();
lengthIncrement = 1;
break;
case StringLengthIndicator::UInt16:
bytesToRead = readUInt16();
lengthIncrement = 2;
break;
case StringLengthIndicator::UInt32:
bytesToRead = readUInt32();
lengthIncrement = 4;
break;
case StringLengthIndicator::UInt64:
bytesToRead = readUInt64();
lengthIncrement = 8;
break;
}
char* buffer = new char[bytesToRead];
std::int64_t readByteCount = readBytes(buffer, bytesToRead);
if (readByteCount == -1 || static_cast<std::uint32_t>(readByteCount) != bytesToRead) {
throw FileException("Failed read a string from file ", path.getGenericString());
}
string.append(buffer, bytesToRead);
delete[] buffer;
return readByteCount + lengthIncrement;
}
std::pair<std::unique_ptr<char[]>, std::int64_t> File::readWholeFile() {
// Don't forget to save the current position. We don't want to mess someone's logic
const std::int64_t currentPos = tell();
if (currentPos == -1) {
throw FileException("Failed to tell the current position of ", path.getGenericString());
}
std::int64_t size = seek(0, File::SeekFrom::End);
if (size == -1) {
throw FileException("Failed to seek to the end of ", path.getGenericString());
}
std::int64_t result = seek(0, File::SeekFrom::Start);
if (result == -1) {
throw FileException("Failed to seek to the start of ", path.getGenericString());
}
auto buffer = std::make_unique<char[]>(size + 1);
std::int64_t byteCount = readBytes(buffer.get(), size);
if (byteCount == -1 || (byteCount < size)) {
const std::string errorStr = fmt::format("Failed to read the required number of bytes (read {} of {}) from ", byteCount, size);
throw FileException(errorStr, path.getGenericString());
}
buffer[size] = '\0';
// Don't forget to reset the position.
result = seek(currentPos, File::SeekFrom::Start);
if (result == -1) {
throw FileException("Failed to seek to the original position ", path.getGenericString());
}
return {std::move(buffer), size};
}
File::~File() {}
}
|
nprail/ghost-static-site-generator | src/constants/OPTIONS.js | <gh_stars>100-1000
const path = require('path');
const { argv } = require('yargs');
const { execSync } = require('child_process');
const DOMAIN = argv.domain || 'http://localhost:2368';
const URL = argv.url || 'http://localhost:2368';
const IGNORE_ABSOLUTE_PATHS = argv.ignoreAbsolutePaths || false;
const STATIC_DIRECTORY = argv.dest || 'static';
const shouldShowProgress = () => {
if (argv.silent) {
return '';
}
const showProgressHelpText = execSync(
'wget --help | grep "show-progress" || true',
).toString();
return `${showProgressHelpText}`.includes('show-progress');
};
const OPTIONS = {
// This is the path of the static directory the your machine
ABSOLUTE_STATIC_DIRECTORY: path.resolve(
process.cwd(),
`${STATIC_DIRECTORY}`,
),
// This is the --dest flag
STATIC_DIRECTORY,
// This is the --domain flag without http:// or https://
DOMAIN_WITHOUT_PROTOCOL: DOMAIN.replace(/^https?:\/\//i, ''),
// This is the --domain flag
DOMAIN,
// This is the --url flag
URL,
// This is the --url flag without http:// or https://
URL_WITHOUT_PROTOCOL: URL.replace(/^https?:\/\//i, ''),
// The --silent flag determines if we should show the progress bar or not
SHOW_PROGRESS_BAR: shouldShowProgress()
? '--show-progress '
: '',
// --ignore-absolute-paths flag will remove all urls
IGNORE_ABSOLUTE_PATHS,
};
module.exports = OPTIONS;
|
ICESat2-SlideRule/sliderule | packages/core/DeviceReader.cpp | <gh_stars>1-10
/*
* Copyright (c) 2021, University of Washington
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the University of Washington nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF WASHINGTON AND CONTRIBUTORS
* “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF WASHINGTON OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/******************************************************************************
* INCLUDES
******************************************************************************/
#include "DeviceReader.h"
#include "DeviceIO.h"
#include "EventLib.h"
#include "MsgQ.h"
/******************************************************************************
* PUBLIC METHODS
******************************************************************************/
/*----------------------------------------------------------------------------
* luaCreate - reader(<device>, <output stream name>)
*----------------------------------------------------------------------------*/
int DeviceReader::luaCreate (lua_State* L)
{
try
{
/* Get Parameters */
DeviceObject* _device = (DeviceObject*)getLuaObject(L, 1, DeviceObject::OBJECT_TYPE);
const char* q_name = getLuaString(L, 2, true, NULL);
/* Return DeviceReader Object */
return createLuaObject(L, new DeviceReader(L, _device, q_name));
}
catch(const RunTimeException& e)
{
mlog(e.level(), "Error creating %s: %s", LuaMetaName, e.what());
return returnLuaStatus(L, false);
}
}
/******************************************************************************
* PRIVATE METHODS
******************************************************************************/
/*----------------------------------------------------------------------------
* Constructor
*----------------------------------------------------------------------------*/
DeviceReader::DeviceReader(lua_State* L, DeviceObject* _device, const char* outq_name):
DeviceIO(L, _device)
{
outq = NULL;
if(outq_name)
{
outq = new Publisher(outq_name);
ioActive = true;
ioThread = new Thread(readerThread, this);
}
}
/*----------------------------------------------------------------------------
* Destructor
*----------------------------------------------------------------------------*/
DeviceReader::~DeviceReader(void)
{
/* Prevent Double Death */
dieOnDisconnect = false;
/* Stop Thread */
ioActive = false;
if(ioThread) delete ioThread;
/* Delete Output Queue */
if(outq) delete outq;
/* Release Device */
device->releaseLuaObject();
}
/*----------------------------------------------------------------------------
* readerThread
*----------------------------------------------------------------------------*/
void* DeviceReader::readerThread (void* parm)
{
assert(parm != NULL);
DeviceReader* dr = (DeviceReader*)parm;
int io_maxsize = LocalLib::getIOMaxsize();
unsigned char* buf = new unsigned char [io_maxsize];
/* Read Loop */
while(dr->ioActive)
{
/* Read Device */
int bytes = dr->device->readBuffer(buf, io_maxsize);
if(bytes > 0)
{
/* Post Message */
int post_status = MsgQ::STATE_ERROR;
while(dr->ioActive && (post_status = dr->outq->postCopy(buf, bytes, dr->blockCfg)) <= 0)
{
mlog(ERROR, "Device reader unable to post to stream %s: %d", dr->outq->getName(), post_status);
}
/* Update Statistics */
if(post_status > 0)
{
dr->bytesProcessed += bytes;
dr->packetsProcessed += 1;
}
else
{
dr->bytesDropped += bytes;
dr->packetsDropped += 1;
}
}
else if(bytes != TIMEOUT_RC)
{
/* Handle Non-Timeout Errors */
if(dr->dieOnDisconnect)
{
if(bytes == SHUTDOWN_RC) mlog(DEBUG, "shutting down device and exiting reader");
else mlog(CRITICAL, "failed to read device (%d)... closing connection and exiting reader!", bytes);
dr->ioActive = false; // breaks out of loop
}
else
{
if(bytes == SHUTDOWN_RC) mlog(DEBUG, "shutting down device... sleeping and trying again");
else mlog(ERROR, "failed to read device (%d)... sleeping and trying again!", bytes);
LocalLib::performIOTimeout(); // prevent spinning
}
}
}
/* Clean Up */
delete [] buf;
dr->device->closeConnection();
dr->signalComplete();
dr->outq->postCopy("", 0); // send terminator
return NULL;
}
|
aviweit/nfvo-drivers | msnoClient/src/main/java/it/nextworks/openapi/msno/model/ScaleToLevelData.java | <gh_stars>0
/*
* SOL005 - NS Lifecycle Management Interface
* SOL005 - NS Lifecycle Management Interface IMPORTANT: Please note that this file might be not aligned to the current version of the ETSI Group Specification it refers to and has not been approved by the ETSI NFV ISG. In case of discrepancies the published ETSI Group Specification takes precedence. Please report bugs to https://forge.etsi.org/bugzilla/buglist.cgi?component=Nfv-Openapis
*
* OpenAPI spec version: 1.1.0-impl:etsi.org:ETSI_NFV_OpenAPI:1
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package it.nextworks.openapi.msno.model;
import java.util.Objects;
import java.util.Arrays;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import it.nextworks.openapi.msno.model.KeyValuePairs;
import it.nextworks.openapi.msno.model.VnfScaleInfo;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* This type describes the information used to scale a VNF instance to a target size. The target size is either expressed as an instantiation level of that DF as defined in the VNFD, or given as a list of scale levels, one per scaling aspect of that DF. Instantiation levels and scaling aspects are declared in the VNFD. The NFVO shall then invoke the ScaleVnfToLevel operation towards the appropriate VNFM..
*/
@ApiModel(description = "This type describes the information used to scale a VNF instance to a target size. The target size is either expressed as an instantiation level of that DF as defined in the VNFD, or given as a list of scale levels, one per scaling aspect of that DF. Instantiation levels and scaling aspects are declared in the VNFD. The NFVO shall then invoke the ScaleVnfToLevel operation towards the appropriate VNFM.. ")
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2019-11-08T16:52:33.422+01:00")
public class ScaleToLevelData {
@SerializedName("vnfInstantiationLevelId")
private String vnfInstantiationLevelId = null;
@SerializedName("vnfScaleInfo")
private List<VnfScaleInfo> vnfScaleInfo = null;
@SerializedName("additionalParams")
private KeyValuePairs additionalParams = null;
public ScaleToLevelData vnfInstantiationLevelId(String vnfInstantiationLevelId) {
this.vnfInstantiationLevelId = vnfInstantiationLevelId;
return this;
}
/**
* Identifier of the target instantiation level of the current deployment flavor to which the VNF is requested to be scaled.
* @return vnfInstantiationLevelId
**/
@ApiModelProperty(value = "Identifier of the target instantiation level of the current deployment flavor to which the VNF is requested to be scaled. ")
public String getVnfInstantiationLevelId() {
return vnfInstantiationLevelId;
}
public void setVnfInstantiationLevelId(String vnfInstantiationLevelId) {
this.vnfInstantiationLevelId = vnfInstantiationLevelId;
}
public ScaleToLevelData vnfScaleInfo(List<VnfScaleInfo> vnfScaleInfo) {
this.vnfScaleInfo = vnfScaleInfo;
return this;
}
public ScaleToLevelData addVnfScaleInfoItem(VnfScaleInfo vnfScaleInfoItem) {
if (this.vnfScaleInfo == null) {
this.vnfScaleInfo = new ArrayList<VnfScaleInfo>();
}
this.vnfScaleInfo.add(vnfScaleInfoItem);
return this;
}
/**
* For each scaling aspect of the current deployment flavor, indicates the target scale level to which the VNF is to be scaled.
* @return vnfScaleInfo
**/
@ApiModelProperty(value = "For each scaling aspect of the current deployment flavor, indicates the target scale level to which the VNF is to be scaled. ")
public List<VnfScaleInfo> getVnfScaleInfo() {
return vnfScaleInfo;
}
public void setVnfScaleInfo(List<VnfScaleInfo> vnfScaleInfo) {
this.vnfScaleInfo = vnfScaleInfo;
}
public ScaleToLevelData additionalParams(KeyValuePairs additionalParams) {
this.additionalParams = additionalParams;
return this;
}
/**
* Additional parameters passed by the NFVO as input to the scaling process, specific to the VNF being scaled.
* @return additionalParams
**/
@ApiModelProperty(value = "Additional parameters passed by the NFVO as input to the scaling process, specific to the VNF being scaled. ")
public KeyValuePairs getAdditionalParams() {
return additionalParams;
}
public void setAdditionalParams(KeyValuePairs additionalParams) {
this.additionalParams = additionalParams;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ScaleToLevelData scaleToLevelData = (ScaleToLevelData) o;
return Objects.equals(this.vnfInstantiationLevelId, scaleToLevelData.vnfInstantiationLevelId) &&
Objects.equals(this.vnfScaleInfo, scaleToLevelData.vnfScaleInfo) &&
Objects.equals(this.additionalParams, scaleToLevelData.additionalParams);
}
@Override
public int hashCode() {
return Objects.hash(vnfInstantiationLevelId, vnfScaleInfo, additionalParams);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ScaleToLevelData {\n");
sb.append(" vnfInstantiationLevelId: ").append(toIndentedString(vnfInstantiationLevelId)).append("\n");
sb.append(" vnfScaleInfo: ").append(toIndentedString(vnfScaleInfo)).append("\n");
sb.append(" additionalParams: ").append(toIndentedString(additionalParams)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
crawld/parcoords-es | src/api/commonScale.js | <reponame>crawld/parcoords-es<filename>src/api/commonScale.js
import { extent } from 'd3-array';
const commonScale = (config, pc) =>
function(global, type) {
const t = type || 'number';
if (typeof global === 'undefined') {
global = true;
}
// try to autodetect dimensions and create scales
if (!Object.keys(config.dimensions).length) {
pc.detectDimensions();
}
pc.autoscale();
// scales of the same type
const scales = Object.keys(config.dimensions).filter(
p => config.dimensions[p].type == t
);
if (global) {
let _extent = extent(
scales
.map(d => config.dimensions[d].yscale.domain())
.reduce((cur, acc) => cur.concat(acc))
);
scales.forEach(d => {
config.dimensions[d].yscale.domain(_extent);
});
} else {
scales.forEach(d => {
config.dimensions[d].yscale.domain(extent(config.data, d => +d[k]));
});
}
// update centroids
if (config.bundleDimension !== null) {
pc.bundleDimension(config.bundleDimension);
}
return this;
};
export default commonScale;
|
TheFactory22/RBotFirmware | PlatformIO/src/RobotMotion/RobotController.cpp | <reponame>TheFactory22/RBotFirmware
// RBotFirmware
// <NAME> 2016-18
#include "RobotController.h"
#include "ConfigBase.h"
#include "RdJson.h"
#include "RobotCommandArgs.h"
// Robot types
#include "Robots/RobotMugBot.h"
#include "Robots/RobotGeistBot.h"
#include "Robots/RobotHockeyBot.h"
#include "Robots/RobotSandTableScara.h"
#include "Robots/RobotXYBot.h"
RobotController::RobotController()
{
// Init
_pRobot = NULL;
}
RobotController::~RobotController()
{
delete _pRobot;
}
bool RobotController::init(const char* configStr)
{
// Init
delete _pRobot;
_pRobot = NULL;
// Get the geometry
ConfigBase robotGeom(RdJson::getString("robotGeom", "NONE", configStr).c_str());
// Get the robot geometry from the config
String robotModel = robotGeom.getString("model", "");
if (robotModel.equalsIgnoreCase("MugBot"))
{
Log.notice("Constructing %s\n", robotModel.c_str());
_pRobot = new RobotMugBot(robotModel.c_str(), _motionHelper);
if (!_pRobot)
return false;
_pRobot->init(configStr);
}
else if (robotModel.equalsIgnoreCase("GeistBot"))
{
Log.notice("Constructing %s\n", robotModel.c_str());
_pRobot = new RobotGeistBot(robotModel.c_str(), _motionHelper);
if (!_pRobot)
return false;
_pRobot->init(configStr);
}
else if (robotModel.equalsIgnoreCase("SingleArmScara"))
{
Log.notice("Constructing %s\n", robotModel.c_str());
_pRobot = new RobotSandTableScara(robotModel.c_str(), _motionHelper);
if (!_pRobot)
return false;
_pRobot->init(configStr);
}
else if (robotModel.equalsIgnoreCase("HBot"))
{
Log.notice("Constructing %s\n", robotModel.c_str());
_pRobot = new RobotHockeyBot(robotModel.c_str(), _motionHelper);
if (!_pRobot)
return false;
_pRobot->init(configStr);
}
else if ((robotModel.equalsIgnoreCase("Cartesian")) || (robotModel.equalsIgnoreCase("XYBot")))
{
Log.notice("Constructing %s\n", robotModel.c_str());
_pRobot = new RobotXYBot(robotModel.c_str(), _motionHelper);
if (!_pRobot)
return false;
_pRobot->init(configStr);
}
else
{
Log.notice("Cannot determine robotModel %s\n", robotModel.c_str());
}
if (_pRobot)
{
_motionHelper.setIntrumentationMode(INSTRUMENT_MOTION_ACTUATOR_CONFIG);
_pRobot->pause(false);
}
return true;
}
// Pause (or un-pause) all motion
void RobotController::pause(bool pauseIt)
{
if (pauseIt)
Log.notice("RobotController: pausing\n");
else
Log.notice("RobotController: resuming\n");
if (!_pRobot)
return;
_pRobot->pause(pauseIt);
}
// Stop
void RobotController::stop()
{
Log.notice("RobotController: stop\n");
if (!_pRobot)
return;
_pRobot->stop();
}
// Check if paused
bool RobotController::isPaused()
{
if (!_pRobot)
return false;
return _pRobot->isPaused();
}
// Service (called frequently)
void RobotController::service()
{
if (!_pRobot)
return;
_pRobot->service();
}
// Movement commands
void RobotController::actuator(double value)
{
if (!_pRobot)
return;
_pRobot->actuator(value);
}
// Check if the robot can accept a (motion) command
bool RobotController::canAcceptCommand()
{
if (!_pRobot)
return false;
return _pRobot->canAcceptCommand();
}
void RobotController::moveTo(RobotCommandArgs& args)
{
if (!_pRobot)
return;
_pRobot->moveTo(args);
}
// Set motion parameters
void RobotController::setMotionParams(RobotCommandArgs& args)
{
if (!_pRobot)
return;
_pRobot->setMotionParams(args);
}
// Get status
void RobotController::getCurStatus(RobotCommandArgs& args)
{
if (!_pRobot)
return;
_pRobot->getCurStatus(args);
}
// Get robot attributes
void RobotController::getRobotAttributes(String& robotAttrs)
{
robotAttrs = "{}";
if (!_pRobot)
return;
_pRobot->getRobotAttributes(robotAttrs);
}
// Go Home
void RobotController::goHome(RobotCommandArgs& args)
{
if (!_pRobot)
return;
_pRobot->goHome(args);
}
// Set Home
void RobotController::setHome(RobotCommandArgs& args)
{
if (!_pRobot)
return;
_pRobot->setHome(args);
}
bool RobotController::wasActiveInLastNSeconds(int nSeconds)
{
if (!_pRobot)
return false;
return _pRobot->wasActiveInLastNSeconds(nSeconds);
}
String RobotController::getDebugStr()
{
return _motionHelper.getDebugStr();
}
|
ajitkhaparde/trex-core | src/dpdk/drivers/net/bnxt/tf_core/tf_tbl.c | <reponame>ajitkhaparde/trex-core
/* SPDX-License-Identifier: BSD-3-Clause
* Copyright(c) 2019-2021 Broadcom
* All rights reserved.
*/
/* Truflow Table APIs and supporting code */
#include <rte_common.h>
#include "tf_tbl.h"
#include "tf_common.h"
#include "tf_rm.h"
#include "tf_util.h"
#include "tf_msg.h"
#include "tfp.h"
#include "tf_shadow_tbl.h"
#include "tf_session.h"
#include "tf_device.h"
struct tf;
/**
* Table DBs.
*/
static void *tbl_db[TF_DIR_MAX];
/**
* Table Shadow DBs
*/
static void *shadow_tbl_db[TF_DIR_MAX];
/**
* Init flag, set on bind and cleared on unbind
*/
static uint8_t init;
/**
* Shadow init flag, set on bind and cleared on unbind
*/
static uint8_t shadow_init;
int
tf_tbl_bind(struct tf *tfp,
struct tf_tbl_cfg_parms *parms)
{
int rc, d, i;
struct tf_rm_alloc_info info;
struct tf_rm_free_db_parms fparms;
struct tf_shadow_tbl_free_db_parms fshadow;
struct tf_rm_get_alloc_info_parms ainfo;
struct tf_shadow_tbl_cfg_parms shadow_cfg;
struct tf_shadow_tbl_create_db_parms shadow_cdb;
struct tf_rm_create_db_parms db_cfg = { 0 };
TF_CHECK_PARMS2(tfp, parms);
if (init) {
TFP_DRV_LOG(ERR,
"Table DB already initialized\n");
return -EINVAL;
}
db_cfg.num_elements = parms->num_elements;
db_cfg.type = TF_DEVICE_MODULE_TYPE_TABLE;
db_cfg.num_elements = parms->num_elements;
db_cfg.cfg = parms->cfg;
for (d = 0; d < TF_DIR_MAX; d++) {
db_cfg.dir = d;
db_cfg.alloc_cnt = parms->resources->tbl_cnt[d].cnt;
db_cfg.rm_db = &tbl_db[d];
rc = tf_rm_create_db(tfp, &db_cfg);
if (rc) {
TFP_DRV_LOG(ERR,
"%s: Table DB creation failed\n",
tf_dir_2_str(d));
return rc;
}
}
/* Initialize the Shadow Table. */
if (parms->shadow_copy) {
for (d = 0; d < TF_DIR_MAX; d++) {
memset(&shadow_cfg, 0, sizeof(shadow_cfg));
memset(&shadow_cdb, 0, sizeof(shadow_cdb));
/* Get the base addresses of the tables */
for (i = 0; i < TF_TBL_TYPE_MAX; i++) {
memset(&info, 0, sizeof(info));
if (!parms->resources->tbl_cnt[d].cnt[i])
continue;
ainfo.rm_db = tbl_db[d];
ainfo.db_index = i;
ainfo.info = &info;
rc = tf_rm_get_info(&ainfo);
if (rc)
goto error;
shadow_cfg.base_addr[i] = info.entry.start;
}
/* Create the shadow db */
shadow_cfg.alloc_cnt =
parms->resources->tbl_cnt[d].cnt;
shadow_cfg.num_entries = parms->num_elements;
shadow_cdb.shadow_db = &shadow_tbl_db[d];
shadow_cdb.cfg = &shadow_cfg;
rc = tf_shadow_tbl_create_db(&shadow_cdb);
if (rc) {
TFP_DRV_LOG(ERR,
"Shadow TBL DB creation failed "
"rc=%d\n", rc);
goto error;
}
}
shadow_init = 1;
}
init = 1;
TFP_DRV_LOG(INFO,
"Table Type - initialized\n");
return 0;
error:
for (d = 0; d < TF_DIR_MAX; d++) {
memset(&fparms, 0, sizeof(fparms));
fparms.dir = d;
fparms.rm_db = tbl_db[d];
/* Ignoring return here since we are in the error case */
(void)tf_rm_free_db(tfp, &fparms);
if (parms->shadow_copy) {
fshadow.shadow_db = shadow_tbl_db[d];
tf_shadow_tbl_free_db(&fshadow);
shadow_tbl_db[d] = NULL;
}
tbl_db[d] = NULL;
}
shadow_init = 0;
init = 0;
return rc;
}
int
tf_tbl_unbind(struct tf *tfp)
{
int rc;
int i;
struct tf_rm_free_db_parms fparms = { 0 };
struct tf_shadow_tbl_free_db_parms fshadow;
TF_CHECK_PARMS1(tfp);
/* Bail if nothing has been initialized */
if (!init) {
TFP_DRV_LOG(INFO,
"No Table DBs created\n");
return 0;
}
for (i = 0; i < TF_DIR_MAX; i++) {
fparms.dir = i;
fparms.rm_db = tbl_db[i];
rc = tf_rm_free_db(tfp, &fparms);
if (rc)
return rc;
tbl_db[i] = NULL;
if (shadow_init) {
memset(&fshadow, 0, sizeof(fshadow));
fshadow.shadow_db = shadow_tbl_db[i];
tf_shadow_tbl_free_db(&fshadow);
shadow_tbl_db[i] = NULL;
}
}
init = 0;
shadow_init = 0;
return 0;
}
int
tf_tbl_alloc(struct tf *tfp __rte_unused,
struct tf_tbl_alloc_parms *parms)
{
int rc;
uint32_t idx;
struct tf_rm_allocate_parms aparms = { 0 };
TF_CHECK_PARMS2(tfp, parms);
if (!init) {
TFP_DRV_LOG(ERR,
"%s: No Table DBs created\n",
tf_dir_2_str(parms->dir));
return -EINVAL;
}
/* Allocate requested element */
aparms.rm_db = tbl_db[parms->dir];
aparms.db_index = parms->type;
aparms.index = &idx;
rc = tf_rm_allocate(&aparms);
if (rc) {
TFP_DRV_LOG(ERR,
"%s: Failed allocate, type:%d\n",
tf_dir_2_str(parms->dir),
parms->type);
return rc;
}
*parms->idx = idx;
return 0;
}
int
tf_tbl_free(struct tf *tfp __rte_unused,
struct tf_tbl_free_parms *parms)
{
int rc;
struct tf_rm_is_allocated_parms aparms = { 0 };
struct tf_rm_free_parms fparms = { 0 };
struct tf_shadow_tbl_remove_parms shparms;
int allocated = 0;
TF_CHECK_PARMS2(tfp, parms);
if (!init) {
TFP_DRV_LOG(ERR,
"%s: No Table DBs created\n",
tf_dir_2_str(parms->dir));
return -EINVAL;
}
/* Check if element is in use */
aparms.rm_db = tbl_db[parms->dir];
aparms.db_index = parms->type;
aparms.index = parms->idx;
aparms.allocated = &allocated;
rc = tf_rm_is_allocated(&aparms);
if (rc)
return rc;
if (allocated != TF_RM_ALLOCATED_ENTRY_IN_USE) {
TFP_DRV_LOG(ERR,
"%s: Entry already free, type:%d, index:%d\n",
tf_dir_2_str(parms->dir),
parms->type,
parms->idx);
return -EINVAL;
}
/*
* The Shadow mgmt, if enabled, determines if the entry needs
* to be deleted.
*/
if (shadow_init) {
memset(&shparms, 0, sizeof(shparms));
shparms.shadow_db = shadow_tbl_db[parms->dir];
shparms.fparms = parms;
rc = tf_shadow_tbl_remove(&shparms);
if (rc) {
/*
* Should not get here, log it and let the entry be
* deleted.
*/
TFP_DRV_LOG(ERR, "%s: Shadow free fail, "
"type:%d index:%d deleting the entry.\n",
tf_dir_2_str(parms->dir),
parms->type,
parms->idx);
} else {
/*
* If the entry still has references, just return the
* ref count to the caller. No need to remove entry
* from rm.
*/
if (parms->ref_cnt >= 1)
return rc;
}
}
/* Free requested element */
fparms.rm_db = tbl_db[parms->dir];
fparms.db_index = parms->type;
fparms.index = parms->idx;
rc = tf_rm_free(&fparms);
if (rc) {
TFP_DRV_LOG(ERR,
"%s: Free failed, type:%d, index:%d\n",
tf_dir_2_str(parms->dir),
parms->type,
parms->idx);
return rc;
}
return 0;
}
int
tf_tbl_alloc_search(struct tf *tfp,
struct tf_tbl_alloc_search_parms *parms)
{
int rc, frc;
uint32_t idx;
struct tf_session *tfs;
struct tf_dev_info *dev;
struct tf_tbl_alloc_parms aparms;
struct tf_shadow_tbl_search_parms sparms;
struct tf_shadow_tbl_bind_index_parms bparms;
struct tf_tbl_free_parms fparms;
TF_CHECK_PARMS2(tfp, parms);
if (!shadow_init || !shadow_tbl_db[parms->dir]) {
TFP_DRV_LOG(ERR, "%s: Shadow TBL not initialized.\n",
tf_dir_2_str(parms->dir));
return -EINVAL;
}
memset(&sparms, 0, sizeof(sparms));
sparms.sparms = parms;
sparms.shadow_db = shadow_tbl_db[parms->dir];
rc = tf_shadow_tbl_search(&sparms);
if (rc)
return rc;
/*
* The app didn't request us to alloc the entry, so return now.
* The hit should have been updated in the original search parm.
*/
if (!parms->alloc || parms->search_status != MISS)
return rc;
/* Retrieve the session information */
rc = tf_session_get_session(tfp, &tfs);
if (rc) {
TFP_DRV_LOG(ERR,
"%s: Failed to lookup session, rc:%s\n",
tf_dir_2_str(parms->dir),
strerror(-rc));
return rc;
}
/* Retrieve the device information */
rc = tf_session_get_device(tfs, &dev);
if (rc) {
TFP_DRV_LOG(ERR,
"%s: Failed to lookup device, rc:%s\n",
tf_dir_2_str(parms->dir),
strerror(-rc));
return rc;
}
/* Allocate the index */
if (dev->ops->tf_dev_alloc_tbl == NULL) {
rc = -EOPNOTSUPP;
TFP_DRV_LOG(ERR,
"%s: Operation not supported, rc:%s\n",
tf_dir_2_str(parms->dir),
strerror(-rc));
return -EOPNOTSUPP;
}
memset(&aparms, 0, sizeof(aparms));
aparms.dir = parms->dir;
aparms.type = parms->type;
aparms.tbl_scope_id = parms->tbl_scope_id;
aparms.idx = &idx;
rc = dev->ops->tf_dev_alloc_tbl(tfp, &aparms);
if (rc) {
TFP_DRV_LOG(ERR,
"%s: Table allocation failed, rc:%s\n",
tf_dir_2_str(parms->dir),
strerror(-rc));
return rc;
}
/* Bind the allocated index to the data */
memset(&bparms, 0, sizeof(bparms));
bparms.shadow_db = shadow_tbl_db[parms->dir];
bparms.dir = parms->dir;
bparms.type = parms->type;
bparms.idx = idx;
bparms.data = parms->result;
bparms.data_sz_in_bytes = parms->result_sz_in_bytes;
bparms.hb_handle = sparms.hb_handle;
rc = tf_shadow_tbl_bind_index(&bparms);
if (rc) {
/* Error binding entry, need to free the allocated idx */
if (dev->ops->tf_dev_free_tbl == NULL) {
rc = -EOPNOTSUPP;
TFP_DRV_LOG(ERR,
"%s: Operation not supported, rc:%s\n",
tf_dir_2_str(parms->dir),
strerror(-rc));
return rc;
}
memset(&fparms, 0, sizeof(fparms));
fparms.dir = parms->dir;
fparms.type = parms->type;
fparms.idx = idx;
frc = dev->ops->tf_dev_free_tbl(tfp, &fparms);
if (frc) {
TFP_DRV_LOG(ERR,
"%s: Failed free index allocated during "
"search. rc=%s\n",
tf_dir_2_str(parms->dir),
strerror(-frc));
/* return the original failure. */
return rc;
}
}
parms->idx = idx;
return rc;
}
int
tf_tbl_set(struct tf *tfp,
struct tf_tbl_set_parms *parms)
{
int rc;
int allocated = 0;
uint16_t hcapi_type;
struct tf_rm_is_allocated_parms aparms = { 0 };
struct tf_rm_get_hcapi_parms hparms = { 0 };
TF_CHECK_PARMS3(tfp, parms, parms->data);
if (!init) {
TFP_DRV_LOG(ERR,
"%s: No Table DBs created\n",
tf_dir_2_str(parms->dir));
return -EINVAL;
}
/* Verify that the entry has been previously allocated */
aparms.rm_db = tbl_db[parms->dir];
aparms.db_index = parms->type;
aparms.index = parms->idx;
aparms.allocated = &allocated;
rc = tf_rm_is_allocated(&aparms);
if (rc)
return rc;
if (allocated != TF_RM_ALLOCATED_ENTRY_IN_USE) {
TFP_DRV_LOG(ERR,
"%s, Invalid or not allocated index, type:%d, idx:%d\n",
tf_dir_2_str(parms->dir),
parms->type,
parms->idx);
return -EINVAL;
}
/* Set the entry */
hparms.rm_db = tbl_db[parms->dir];
hparms.db_index = parms->type;
hparms.hcapi_type = &hcapi_type;
rc = tf_rm_get_hcapi_type(&hparms);
if (rc) {
TFP_DRV_LOG(ERR,
"%s, Failed type lookup, type:%d, rc:%s\n",
tf_dir_2_str(parms->dir),
parms->type,
strerror(-rc));
return rc;
}
rc = tf_msg_set_tbl_entry(tfp,
parms->dir,
hcapi_type,
parms->data_sz_in_bytes,
parms->data,
parms->idx);
if (rc) {
TFP_DRV_LOG(ERR,
"%s, Set failed, type:%d, rc:%s\n",
tf_dir_2_str(parms->dir),
parms->type,
strerror(-rc));
return rc;
}
return 0;
}
int
tf_tbl_get(struct tf *tfp,
struct tf_tbl_get_parms *parms)
{
int rc;
uint16_t hcapi_type;
int allocated = 0;
struct tf_rm_is_allocated_parms aparms = { 0 };
struct tf_rm_get_hcapi_parms hparms = { 0 };
TF_CHECK_PARMS3(tfp, parms, parms->data);
if (!init) {
TFP_DRV_LOG(ERR,
"%s: No Table DBs created\n",
tf_dir_2_str(parms->dir));
return -EINVAL;
}
/* Verify that the entry has been previously allocated */
aparms.rm_db = tbl_db[parms->dir];
aparms.db_index = parms->type;
aparms.index = parms->idx;
aparms.allocated = &allocated;
rc = tf_rm_is_allocated(&aparms);
if (rc)
return rc;
if (allocated != TF_RM_ALLOCATED_ENTRY_IN_USE) {
TFP_DRV_LOG(ERR,
"%s, Invalid or not allocated index, type:%d, idx:%d\n",
tf_dir_2_str(parms->dir),
parms->type,
parms->idx);
return -EINVAL;
}
/* Set the entry */
hparms.rm_db = tbl_db[parms->dir];
hparms.db_index = parms->type;
hparms.hcapi_type = &hcapi_type;
rc = tf_rm_get_hcapi_type(&hparms);
if (rc) {
TFP_DRV_LOG(ERR,
"%s, Failed type lookup, type:%d, rc:%s\n",
tf_dir_2_str(parms->dir),
parms->type,
strerror(-rc));
return rc;
}
/* Get the entry */
rc = tf_msg_get_tbl_entry(tfp,
parms->dir,
hcapi_type,
parms->data_sz_in_bytes,
parms->data,
parms->idx);
if (rc) {
TFP_DRV_LOG(ERR,
"%s, Get failed, type:%d, rc:%s\n",
tf_dir_2_str(parms->dir),
parms->type,
strerror(-rc));
return rc;
}
return 0;
}
int
tf_tbl_bulk_get(struct tf *tfp,
struct tf_tbl_get_bulk_parms *parms)
{
int rc;
uint16_t hcapi_type;
struct tf_rm_get_hcapi_parms hparms = { 0 };
struct tf_rm_check_indexes_in_range_parms cparms = { 0 };
TF_CHECK_PARMS2(tfp, parms);
if (!init) {
TFP_DRV_LOG(ERR,
"%s: No Table DBs created\n",
tf_dir_2_str(parms->dir));
return -EINVAL;
}
/* Verify that the entries are in the range of reserved resources. */
cparms.rm_db = tbl_db[parms->dir];
cparms.db_index = parms->type;
cparms.starting_index = parms->starting_idx;
cparms.num_entries = parms->num_entries;
rc = tf_rm_check_indexes_in_range(&cparms);
if (rc) {
TFP_DRV_LOG(ERR,
"%s, Invalid or %d index starting from %d"
" not in range, type:%d",
tf_dir_2_str(parms->dir),
parms->starting_idx,
parms->num_entries,
parms->type);
return rc;
}
hparms.rm_db = tbl_db[parms->dir];
hparms.db_index = parms->type;
hparms.hcapi_type = &hcapi_type;
rc = tf_rm_get_hcapi_type(&hparms);
if (rc) {
TFP_DRV_LOG(ERR,
"%s, Failed type lookup, type:%d, rc:%s\n",
tf_dir_2_str(parms->dir),
parms->type,
strerror(-rc));
return rc;
}
/* Get the entries */
rc = tf_msg_bulk_get_tbl_entry(tfp,
parms->dir,
hcapi_type,
parms->starting_idx,
parms->num_entries,
parms->entry_sz_in_bytes,
parms->physical_mem_addr);
if (rc) {
TFP_DRV_LOG(ERR,
"%s, Bulk get failed, type:%d, rc:%s\n",
tf_dir_2_str(parms->dir),
parms->type,
strerror(-rc));
}
return rc;
}
|
mamh-java/opengrok-plugin | src/main/java/org/opengrok/indexer/analysis/javascript/JavaScriptLexer.java | <reponame>mamh-java/opengrok-plugin
/*
* CDDL HEADER START
*
* The contents of this file are subject to the terms of the
* Common Development and Distribution License (the "License").
* You may not use this file except in compliance with the License.
*
* See LICENSE.txt included in this distribution for the specific
* language governing permissions and limitations under the License.
*
* When distributing Covered Code, include this CDDL HEADER in each
* file and include the License file at LICENSE.txt.
* If applicable, add the following below this CDDL HEADER, with the
* fields enclosed by brackets "[]" replaced with your own identifying
* information: Portions Copyright [yyyy] [name of copyright owner]
*
* CDDL HEADER END
*/
/*
* Copyright (c) 2017, 2019, <NAME> <<EMAIL>>.
*/
package org.opengrok.indexer.analysis.javascript;
import org.opengrok.indexer.analysis.JFlexJointLexer;
import org.opengrok.indexer.analysis.JFlexSymbolMatcher;
import org.opengrok.indexer.analysis.Resettable;
import org.opengrok.indexer.web.HtmlConsts;
import java.io.IOException;
import java.util.Stack;
/**
* Represents an abstract base class for JavaScript lexers.
*/
@SuppressWarnings("Duplicates")
public abstract class JavaScriptLexer extends JFlexSymbolMatcher
implements JFlexJointLexer, Resettable {
private ECMAScriptLexerData data;
/**
* Represents the stack of data if substitution is nested.
*/
private Stack<ECMAScriptLexerData> dataStack;
public JavaScriptLexer() {
data = new ECMAScriptLexerData();
// dataStack is null to begin.
}
/**
* Resets the instance to an initial state.
*/
@Override
public void reset() {
super.reset();
data = new ECMAScriptLexerData();
if (dataStack != null) {
dataStack.clear();
}
}
/**
* Calls {@link #phLOC()} if the yystate is not COMMENT or SCOMMENT.
*/
public void chkLOC() {
if (yystate() != COMMENT() && yystate() != SCOMMENT()) {
phLOC();
}
}
/**
* Resets the substitution brace counter to 1.
*/
protected void substitutionOp() {
data.nEndBrace = 1;
}
/**
* Determine if template substitution should end based on the first
* character of {@code capture}, and also recognizing tokens that increase
* the nesting level alternatively.
* <p>
* Calling this method has side effects to possibly modify
* {@code nEndBrace}.
* @return {@code true} if the substitution state does not end
*/
protected boolean notInTemplateOrSubstitutionDoesNotEnd(String capture) throws IOException {
if (data.nEndBrace <= 0) {
return true;
}
if (capture.startsWith("}")) {
if (--data.nEndBrace <= 0) {
int nRemaining = capture.length() - 1;
String opener = capture.substring(0, 1);
popData();
yypop();
disjointSpan(HtmlConsts.STRING_CLASS);
offer(opener);
if (nRemaining > 0) {
yypushback(nRemaining);
}
return false;
}
}
if (capture.startsWith("{")) {
++data.nEndBrace;
}
return true;
}
protected void pushData() {
if (dataStack == null) {
dataStack = new Stack<>();
}
dataStack.push(data);
data = new ECMAScriptLexerData();
}
private void popData() {
data = dataStack.pop();
}
/**
* Subclasses must override to get the constant value created by JFlex to
* represent COMMENT.
*/
protected abstract int COMMENT();
/**
* Subclasses must override to get the constant value created by JFlex to
* represent SCOMMENT.
*/
protected abstract int SCOMMENT();
private static class ECMAScriptLexerData {
/**
* When interpolating inside `` with ${, the number of remaining '}'
* characters is stored. It starts at 1, and any nesting increases the
* value.
*/
int nEndBrace;
}
}
|
StevenCode/ovirt-engine | backend/manager/modules/restapi/jaxrs/src/main/java/org/ovirt/engine/api/v3/adapters/V3HighAvailabilityOutAdapter.java | <gh_stars>0
/*
* Copyright oVirt Authors
* SPDX-License-Identifier: Apache-2.0
*/
package org.ovirt.engine.api.v3.adapters;
import org.ovirt.engine.api.model.HighAvailability;
import org.ovirt.engine.api.v3.V3Adapter;
import org.ovirt.engine.api.v3.types.V3HighAvailability;
public class V3HighAvailabilityOutAdapter implements V3Adapter<HighAvailability, V3HighAvailability> {
@Override
public V3HighAvailability adapt(HighAvailability from) {
V3HighAvailability to = new V3HighAvailability();
if (from.isSetEnabled()) {
to.setEnabled(from.isEnabled());
}
if (from.isSetPriority()) {
to.setPriority(from.getPriority());
}
return to;
}
}
|
mbarall/opensha-dev | src/scratch/kevin/ucerf3/inversion/Stampede2CoresPerNodePlot.java | package scratch.kevin.ucerf3.inversion;
import java.awt.Color;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.jfree.chart.plot.DatasetRenderingOrder;
import org.jfree.data.Range;
import org.opensha.commons.data.CSVFile;
import org.opensha.commons.data.function.ArbitrarilyDiscretizedFunc;
import org.opensha.commons.data.function.DiscretizedFunc;
import org.opensha.commons.data.function.EvenlyDiscretizedFunc;
import org.opensha.commons.data.function.XY_DataSet;
import org.opensha.commons.gui.plot.HeadlessGraphPanel;
import org.opensha.commons.gui.plot.PlotCurveCharacterstics;
import org.opensha.commons.gui.plot.PlotLineType;
import org.opensha.commons.gui.plot.PlotSpec;
import org.opensha.commons.gui.plot.PlotSymbol;
import org.opensha.commons.mapping.gmt.elements.GMT_CPT_Files;
import org.opensha.commons.util.cpt.CPT;
public class Stampede2CoresPerNodePlot {
public static void main(String[] args) throws IOException {
File dataDir = new File("/home/kevin/OpenSHA/UCERF3/inversions/2020_07_13-stampede2-benchmark/results");
Map<Integer, List<DiscretizedFunc>> funcMap = new HashMap<>();
for (File file : dataDir.listFiles()) {
String name = file.getName();
if (!name.endsWith(".csv") || !name.contains("_thread"))
continue;
String threadStr = name.substring(name.lastIndexOf("_")+1);
threadStr = threadStr.substring(0, threadStr.indexOf(".csv"));
int threads = Integer.parseInt(threadStr);
System.out.println("Loading "+threads+" threads from "+name);
CSVFile<String> csv = CSVFile.readFile(file, true);
ArbitrarilyDiscretizedFunc func = new ArbitrarilyDiscretizedFunc();
for (int row=1; row<csv.getNumRows(); row++) {
long millis = csv.getLong(row, 1);
double secs = millis/1000d;
double mins = secs/60d;
double energy = csv.getDouble(row, 2);
func.set(mins, energy);
}
System.out.println("\tmin energy: "+func.getMinY());
List<DiscretizedFunc> funcs = funcMap.get(threads);
if (funcs == null) {
funcs = new ArrayList<>();
funcMap.put(threads, funcs);
}
funcs.add(func);
}
List<Integer> threads = new ArrayList<>(funcMap.keySet());
Collections.sort(threads);
CPT cpt = GMT_CPT_Files.RAINBOW_UNIFORM.instance().rescale(1, threads.size()-1);
List<Color> colors = new ArrayList<>();
for (int i=0; i<threads.size(); i++) {
if (i == 0)
colors.add(Color.BLACK);
else
colors.add(cpt.getColor((float)i).darker());
}
List<DiscretizedFunc> meanFuncs = new ArrayList<>();
List<PlotCurveCharacterstics> chars = new ArrayList<>();
double largestMinEnergy = 0d;
double smallestMinEnergy = Double.POSITIVE_INFINITY;
for (int i=0; i<threads.size(); i++) {
List<DiscretizedFunc> tFuncs = funcMap.get(threads.get(i));
double largestMin = 0d;
double smallestMax = Double.POSITIVE_INFINITY;
for (DiscretizedFunc func : tFuncs) {
largestMin = Math.max(largestMin, func.getMinX());
smallestMax = Math.min(smallestMax, func.getMaxX());
}
EvenlyDiscretizedFunc meanFunc = new EvenlyDiscretizedFunc(largestMin, smallestMax, 1000);
for (int j=0; j<meanFunc.size(); j++) {
double avgVal = 0d;
double x = meanFunc.getX(j);
for (DiscretizedFunc func : tFuncs)
avgVal += func.getInterpolatedY(x);
avgVal /= (double)tFuncs.size();
meanFunc.set(j, avgVal);
}
largestMinEnergy = Math.max(largestMinEnergy, meanFunc.getMinY());
smallestMinEnergy = Math.min(smallestMinEnergy, meanFunc.getMinY());
meanFunc.setName(threads.get(i)+" Threads");
meanFuncs.add(meanFunc);
chars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, colors.get(i)));
}
PlotSpec spec = new PlotSpec(meanFuncs, chars, "Convergence Thread Test",
"Time (minutes)", "Energy");
spec.setLegendVisible(true);
HeadlessGraphPanel gp = new HeadlessGraphPanel();
gp.setTickLabelFontSize(18);
gp.setAxisLabelFontSize(24);
gp.setPlotLabelFontSize(24);
gp.setLegendFontSize(16);
gp.setBackgroundColor(Color.WHITE);
gp.setRenderingOrder(DatasetRenderingOrder.REVERSE);
Range xRange = null;
Range yRange = new Range(00.9*smallestMinEnergy, 4*largestMinEnergy);
gp.drawGraphPanel(spec, false, false, xRange, yRange);
File file = new File(dataDir, "energy_vs_time");
gp.getChartPanel().setSize(800, 600);
gp.saveAsPNG(file.getAbsolutePath()+".png");
gp.saveAsPDF(file.getAbsolutePath()+".pdf");
// now time to 150 plot
double[] targets = { 150d, 130d };
for (double target : targets) {
DiscretizedFunc threadTimeFunc = new ArbitrarilyDiscretizedFunc();
double oneThreadTime = Double.NaN;
for (int i=0; i<meanFuncs.size(); i++) {
DiscretizedFunc meanFunc = meanFuncs.get(i);
if (meanFunc.getMinY() > target)
continue;
double time = meanFunc.getFirstInterpolatedX(target);
int myThreads = threads.get(i);
if (myThreads == 1)
oneThreadTime = time;
threadTimeFunc.set((double)myThreads, time);
}
EvenlyDiscretizedFunc idealScaling = new EvenlyDiscretizedFunc(
threadTimeFunc.getMinX(), threadTimeFunc.getMaxX(), 500);
EvenlyDiscretizedFunc sqrtScaling = new EvenlyDiscretizedFunc(
threadTimeFunc.getMinX(), threadTimeFunc.getMaxX(), 500);
for (int i=0; i<idealScaling.size(); i++) {
double myThreads = idealScaling.getX(i);
double ideal = oneThreadTime/myThreads;
idealScaling.set(i, ideal);
double sqrt = oneThreadTime/Math.sqrt(myThreads);
sqrtScaling.set(i, sqrt);
}
List<DiscretizedFunc> funcs = new ArrayList<>();
chars = new ArrayList<>();
threadTimeFunc.setName("Actual Scaling");
funcs.add(threadTimeFunc);
chars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, PlotSymbol.FILLED_CIRCLE, 5f, Color.BLACK));
idealScaling.setName("Ideal Scaling");
funcs.add(idealScaling);
chars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, Color.GRAY));
sqrtScaling.setName("Sqrt(N) Scaling");
funcs.add(sqrtScaling);
chars.add(new PlotCurveCharacterstics(PlotLineType.DASHED, 2f, Color.GRAY));
spec = new PlotSpec(funcs, chars, "Scaling Test",
"Threads", "Time to E="+(int)target+" (m)");
spec.setLegendVisible(true);
xRange = null;
yRange = null;
gp.setLegendFontSize(22);
gp.drawGraphPanel(spec, true, false, xRange, yRange);
file = new File(dataDir, "scaling_to_"+(int)target+"_log");
gp.getChartPanel().setSize(800, 600);
gp.saveAsPNG(file.getAbsolutePath()+".png");
gp.saveAsPDF(file.getAbsolutePath()+".pdf");
gp.drawGraphPanel(spec, false, false, xRange, yRange);
file = new File(dataDir, "scaling_to_"+(int)target);
gp.getChartPanel().setSize(800, 600);
gp.saveAsPNG(file.getAbsolutePath()+".png");
gp.saveAsPDF(file.getAbsolutePath()+".pdf");
// strong scaling
DiscretizedFunc strongScalingFunc = new ArbitrarilyDiscretizedFunc();
for (int i=0; i<meanFuncs.size(); i++) {
int myThreads = threads.get(i);
double myTime = threadTimeFunc.getY((double)myThreads);
double speedup = oneThreadTime/myTime;
strongScalingFunc.set((double)myThreads, speedup);
}
for (int i=0; i<idealScaling.size(); i++) {
double myThreads = idealScaling.getX(i);
double ideal = myThreads;
idealScaling.set(i, ideal);
double sqrt = Math.sqrt(myThreads);
sqrtScaling.set(i, sqrt);
}
funcs = new ArrayList<>();
chars = new ArrayList<>();
threadTimeFunc.setName("Actual Scaling");
funcs.add(strongScalingFunc);
chars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, PlotSymbol.FILLED_CIRCLE, 5f, Color.BLACK));
idealScaling.setName("Ideal Scaling");
funcs.add(idealScaling);
chars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, Color.GRAY));
sqrtScaling.setName("Sqrt(N) Scaling");
funcs.add(sqrtScaling);
chars.add(new PlotCurveCharacterstics(PlotLineType.DASHED, 2f, Color.GRAY));
spec = new PlotSpec(funcs, chars, "Strong Scaling",
"Threads", "Speedup");
spec.setLegendVisible(true);
double maxThreads = idealScaling.getMaxX();
xRange = new Range(0d, maxThreads);
yRange = xRange;
gp.drawGraphPanel(spec, false, false, xRange, yRange);
file = new File(dataDir, "strong_scaling_"+(int)target);
gp.getChartPanel().setSize(800, 600);
gp.saveAsPNG(file.getAbsolutePath()+".png");
gp.saveAsPDF(file.getAbsolutePath()+".pdf");
// scaling fraction
DiscretizedFunc strongScalingFract = new ArbitrarilyDiscretizedFunc();
for (int i=0; i<meanFuncs.size(); i++) {
strongScalingFract.set(strongScalingFunc.getX(i), strongScalingFunc.getY(i)/strongScalingFunc.getX(i));
}
for (int i=0; i<idealScaling.size(); i++) {
double myThreads = idealScaling.getX(i);
double sqrt = Math.sqrt(myThreads);
sqrtScaling.set(i, sqrt/myThreads);
}
funcs = new ArrayList<>();
chars = new ArrayList<>();
strongScalingFract.setName("Actual Scaling");
funcs.add(strongScalingFract);
chars.add(new PlotCurveCharacterstics(PlotLineType.SOLID, 2f, PlotSymbol.FILLED_CIRCLE, 5f, Color.BLACK));
sqrtScaling.setName("Sqrt(N) Scaling");
funcs.add(sqrtScaling);
chars.add(new PlotCurveCharacterstics(PlotLineType.DASHED, 2f, Color.GRAY));
spec = new PlotSpec(funcs, chars, "Strong Scaling",
"Threads", "Scaling Efficiency");
spec.setLegendVisible(true);
xRange = new Range(0d, maxThreads);
yRange = new Range(0d, 1d);
gp.drawGraphPanel(spec, false, false, xRange, yRange);
file = new File(dataDir, "strong_scaling_fract_"+(int)target);
gp.getChartPanel().setSize(800, 600);
gp.saveAsPNG(file.getAbsolutePath()+".png");
gp.saveAsPDF(file.getAbsolutePath()+".pdf");
}
}
}
|
eroma2014/seagrid-rich-client | src/main/java/gamess/IncompatibilityPackage/RequiresIncompatibility.java | <filename>src/main/java/gamess/IncompatibilityPackage/RequiresIncompatibility.java<gh_stars>1-10
package gamess.IncompatibilityPackage;
import java.util.Enumeration;
import gamess.GlobalParameters;
import gamess.IncompatibilityPackage.Restriction.IbuildList;
import gamess.IncompatibilityPackage.Restriction.RequiredIfRestriction;
import gamess.IncompatibilityPackage.Restriction.RequiresRestriction;
import gamess.IncompatibilityPackage.Restriction.RestrictionsHolder;
public class RequiresIncompatibility extends IncompatibilityBase
{
private static RequiresIncompatibility instance = new RequiresIncompatibility(true);
private static RequiresIncompatibility provisionalInstance = new RequiresIncompatibility(false);
public static RequiresIncompatibility getInstance()
{
if(GlobalParameters.isProvisionalMode)
return provisionalInstance;
return instance;
}
private RequiresIncompatibility(boolean isMainObject)
{
if(isMainObject)
initiallize();
}
private void initiallize()
{
//requires
RequiresRestriction requires = new RequiresRestriction();
((IbuildList)requires).buildList("*");
addIncompatibilityList(requires);
//requiredIf
RequiredIfRestriction required = new RequiredIfRestriction();
((IbuildList)required).buildList("*");
addIncompatibilityList(required);
}
@Override
void callAddIncompatibilityList(RestrictionsHolder restrictions)
{
//requires
addIncompatibilityList(restrictions.getRequiresRestriction());
//requiredIf
addIncompatibilityList(restrictions.getRequiredIfRestriction());
}
@Override
void callRemoveIncompatibilityList(RestrictionsHolder restrictions)
{
//requires
removeIncompatibilityList(restrictions.getRequiresRestriction());
//requiredIf
removeIncompatibilityList(restrictions.getRequiredIfRestriction());
}
@Override
boolean isIncompatible(Incompatible incompatible) {
//this is incompatible if the item is required and it is not available
if(incompatible != null)
return !incompatible.isAvailable();
return false;
}
public String getPartialIncompatibility(String Data)
{
StringBuilder requiredList = new StringBuilder();
for (Enumeration<String> keys = incompatibilityTable.keys(); keys.hasMoreElements() ;) {
String currentKey = keys.nextElement();
if(currentKey.startsWith(Data))
{
currentKey = currentKey.substring(Data.length()).trim();
if(requiredList.length() != 0)
requiredList.append("|");
requiredList.append(currentKey);
}
}
return requiredList.toString();
}
}
|
vjeuel/G-well_Portfolio-React | lib/node_modules/firebase-tools/lib/functionsDeployHelper.js | <filename>lib/node_modules/firebase-tools/lib/functionsDeployHelper.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.printTriggerUrls = exports.printSuccess = exports.logAndTrackDeployStats = exports.toJob = exports.getFunctionLabel = exports.getRegion = exports.getTopicName = exports.getScheduleName = exports.getFunctionId = exports.getFunctionTrigger = exports.logFilters = exports.getReleaseNames = exports.getFilterGroups = exports.functionMatchesGroup = exports.functionMatchesAnyGroup = void 0;
const _ = require("lodash");
const clc = require("cli-color");
const error_1 = require("./error");
const logger_1 = require("./logger");
const track = require("./track");
const utils = require("./utils");
const cloudfunctions = require("./gcp/cloudfunctions");
function functionMatchesAnyGroup(fnName, filterGroups) {
if (!filterGroups.length) {
return true;
}
for (const groupChunks of filterGroups) {
if (functionMatchesGroup(fnName, groupChunks)) {
return true;
}
}
return false;
}
exports.functionMatchesAnyGroup = functionMatchesAnyGroup;
function functionMatchesGroup(functionName, groupChunks) {
const last = _.last(functionName.split("/"));
if (!last) {
return false;
}
const functionNameChunks = last.split("-").slice(0, groupChunks.length);
return _.isEqual(groupChunks, functionNameChunks);
}
exports.functionMatchesGroup = functionMatchesGroup;
function getFilterGroups(options) {
if (!options.only) {
return [];
}
let opts;
return options.only
.split(",")
.filter((filter) => {
opts = filter.split(":");
return opts[0] === "functions" && opts[1];
})
.map((filter) => {
return filter.split(":")[1].split(/[.-]/);
});
}
exports.getFilterGroups = getFilterGroups;
function getReleaseNames(uploadNames, existingNames, functionFilterGroups) {
if (functionFilterGroups.length === 0) {
return uploadNames;
}
const allFunctions = _.union(uploadNames, existingNames);
return _.filter(allFunctions, (functionName) => {
return _.some(_.map(functionFilterGroups, (groupChunks) => {
return functionMatchesGroup(functionName, groupChunks);
}));
});
}
exports.getReleaseNames = getReleaseNames;
function logFilters(existingNames, releaseNames, functionFilterGroups) {
if (functionFilterGroups.length === 0) {
return;
}
logger_1.logger.debug("> [functions] filtering triggers to: " + JSON.stringify(releaseNames, null, 2));
track("Functions Deploy with Filter", "", releaseNames.length);
let list;
if (existingNames.length > 0) {
list = _.map(existingNames, (name) => {
return getFunctionId(name) + "(" + getRegion(name) + ")";
}).join(", ");
utils.logBullet(clc.bold.cyan("functions: ") + "current functions in project: " + list);
}
if (releaseNames.length > 0) {
list = _.map(releaseNames, (name) => {
return getFunctionId(name) + "(" + getRegion(name) + ")";
}).join(", ");
utils.logBullet(clc.bold.cyan("functions: ") + "uploading functions in project: " + list);
}
const allFunctions = _.union(releaseNames, existingNames);
const unmatchedFilters = _.chain(functionFilterGroups)
.filter((filterGroup) => {
return !_.some(_.map(allFunctions, (functionName) => {
return functionMatchesGroup(functionName, filterGroup);
}));
})
.map((group) => {
return group.join("-");
})
.value();
if (unmatchedFilters.length > 0) {
utils.logWarning(clc.bold.yellow("functions: ") +
"the following filters were specified but do not match any functions in the project: " +
unmatchedFilters.join(", "));
}
}
exports.logFilters = logFilters;
function getFunctionTrigger(functionInfo) {
if (functionInfo.httpsTrigger) {
return { httpsTrigger: functionInfo.httpsTrigger };
}
else if (functionInfo.eventTrigger) {
const trigger = functionInfo.eventTrigger;
trigger.failurePolicy = functionInfo.failurePolicy;
return { eventTrigger: trigger };
}
logger_1.logger.debug("Unknown trigger type found in:", functionInfo);
throw new error_1.FirebaseError("Could not parse function trigger, unknown trigger type.");
}
exports.getFunctionTrigger = getFunctionTrigger;
function getFunctionId(fullName) {
return fullName.split("/")[5];
}
exports.getFunctionId = getFunctionId;
function getScheduleName(fullName, appEngineLocation) {
const [projectsPrefix, project, regionsPrefix, region, , functionName] = fullName.split("/");
return `${projectsPrefix}/${project}/${regionsPrefix}/${appEngineLocation}/jobs/firebase-schedule-${functionName}-${region}`;
}
exports.getScheduleName = getScheduleName;
function getTopicName(fullName) {
const [projectsPrefix, project, , region, , functionName] = fullName.split("/");
return `${projectsPrefix}/${project}/topics/firebase-schedule-${functionName}-${region}`;
}
exports.getTopicName = getTopicName;
function getRegion(fullName) {
return fullName.split("/")[3];
}
exports.getRegion = getRegion;
function getFunctionLabel(fullName) {
return getFunctionId(fullName) + "(" + getRegion(fullName) + ")";
}
exports.getFunctionLabel = getFunctionLabel;
function toJob(fn, appEngineLocation, projectId) {
return Object.assign(fn.schedule, {
name: getScheduleName(fn.name, appEngineLocation),
pubsubTarget: {
topicName: getTopicName(fn.name),
attributes: {
scheduled: "true",
},
},
});
}
exports.toJob = toJob;
function logAndTrackDeployStats(queue, errorHandler) {
const stats = queue.stats();
logger_1.logger.debug(`Total Function Deployment time: ${stats.elapsed}`);
logger_1.logger.debug(`${stats.total} Functions Deployed`);
logger_1.logger.debug(`${errorHandler.errors.length} Functions Errored`);
logger_1.logger.debug(`Average Function Deployment time: ${stats.avg}`);
if (stats.total > 0) {
if (errorHandler.errors.length === 0) {
track("functions_deploy_result", "success", stats.total);
}
else if (errorHandler.errors.length < stats.total) {
track("functions_deploy_result", "partial_success", stats.total - errorHandler.errors.length);
track("functions_deploy_result", "partial_failure", errorHandler.errors.length);
track("functions_deploy_result", "partial_error_ratio", errorHandler.errors.length / stats.total);
}
else {
track("functions_deploy_result", "failure", stats.total);
}
}
}
exports.logAndTrackDeployStats = logAndTrackDeployStats;
function printSuccess(funcName, type) {
utils.logSuccess(clc.bold.green("functions[" + getFunctionLabel(funcName) + "]: ") +
"Successful " +
type +
" operation. ");
}
exports.printSuccess = printSuccess;
async function printTriggerUrls(projectId, sourceUrl) {
const res = await cloudfunctions.listAllFunctions(projectId);
const httpsFunctions = res.functions.filter((fn) => {
return fn.sourceUploadUrl === sourceUrl && fn.httpsTrigger;
});
if (httpsFunctions.length === 0) {
return;
}
httpsFunctions.forEach((httpsFunc) => {
var _a;
logger_1.logger.info(clc.bold("Function URL"), `(${getFunctionId(httpsFunc.name)}):`, (_a = httpsFunc.httpsTrigger) === null || _a === void 0 ? void 0 : _a.url);
});
return;
}
exports.printTriggerUrls = printTriggerUrls;
|
Northshoot/RavelLang | src/ai/harmony/ravel/compiler/ir/untyped/IfStatement.java | package ai.harmony.ravel.compiler.ir.untyped;
import org.antlr.v4.runtime.ParserRuleContext;
/**
* Created by gcampagn on 1/20/17.
*/
public class IfStatement extends Instruction {
public final int cond;
public final Block iftrue;
public final Block iffalse;
public IfStatement(ParserRuleContext definer, int cond, Block iftrue, Block iffalse) {
super(definer);
this.cond = cond;
this.iftrue = iftrue;
this.iffalse = iffalse;
}
public String toString() {
return "if " + cond + " {\n" + iftrue + "} else {\n" + iffalse + "}";
}
void accept(InstructionVisitor visitor) {
visitor.visit(this);
}
}
|
atomixnmc/anigo-editor-react | src/components/User/User-Form/User-Form-New-Row.js | <gh_stars>0
import React, { Component, } from 'react';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { reduxForm, Field, formValueSelector, submit } from 'redux-form';
import { ButtonGroup, Button, DropdownButton, Dropdown } from 'react-bootstrap';
import { LinkContainer } from 'react-router-bootstrap';
import Confirm from 'react-confirm-bootstrap';
import userValidation from './User-Validator';
import * as userManageActions from 'redux/modules/user/user-manage';
import * as userRoleManageActions from 'redux/modules/user-role/user-role-manage';
import * as userGroupManageActions from 'redux/modules/user-group/user-group-manage';
import { asyncConnect } from 'redux-connect';
import Select from 'react-select';
import Pager from 'react-pager';
import { renderInput, optionsTransform, optionsChange } from 'utils/formUtil';
import * as lodash from 'lodash';
@connect(
state => {
return ({
listUserRole: state.userRoleManage.listAll,
listUserGroup: state.userGroupManage.listAll,
});
},
dispatch => bindActionCreators(userManageActions, dispatch)
)
@reduxForm({
form: 'userFormNewRow',
validate: userValidation,
onSubmit: (values, dispatch, props ) => {
// NOTE: Transform to id list
dispatch(userManageActions.createNew(values))
.then(()=>{
if (props.onSubmitFinishedAction) {
props.onSubmitFinishedAction();
}
});
}
})
export default class UserFormNewRow extends Component {
static propTypes = {
handleSubmit: PropTypes.func,
invalid: PropTypes.bool,
pristine: PropTypes.bool,
submitting: PropTypes.bool,
saveError: PropTypes.object,
error: PropTypes.object,
dispatch: PropTypes.func,
change: PropTypes.func,
editItemStop: PropTypes.func,
listUserRole: PropTypes.array,
listUserGroup: PropTypes.array,
userEntry: PropTypes.object,
initialValues: PropTypes.object,
currentId: PropTypes.number,
form: PropTypes.string,
formKey: PropTypes.string,
onClickCancel: PropTypes.func
};
constructor(props) {
super(props);
}
componentWillMount() {
const {
dispatch
} = this.props;
dispatch(userRoleManageActions.loadAll());
dispatch(userGroupManageActions.loadAll());
}
render() {
const {
currentId,
form, invalid, pristine, submitting, dispatch, error, handleSubmit, change,
saveError, editItemStop,
formKey, initialValues
} = this.props;
const user = initialValues;
const handleUserRoleOptions = (value) =>{
const result = optionsTransform(this.props.listUserRole, value, 'id', 'name');
return result;
};
const handleUserRoleOptionsChange = (values) => {
optionsChange(this.props.listUserRole, values, change, 'role');
};
const handleUserGroupOptions = (value) =>{
const result = optionsTransform(this.props.listUserGroup, value, 'id', 'name');
return result;
};
const handleUserGroupOptionsChange = (values) => {
optionsChange(this.props.listUserGroup, values, change, 'groups');
};
const renderRoleSelect = (role) =>
(<div>
<Select
style={{paddingTop: '4px', color: '#000'}}
closeMenuOnSelect={true}
value={handleUserRoleOptions(role.input.value)}
onChange={handleUserRoleOptionsChange}
options={handleUserRoleOptions(this.props.listUserRole)}
/>
</div>
);
const renderGroupSelect = (groups) =>
(<div>
<Select
style={{paddingTop: '4px', color: '#000'}}
closeMenuOnSelect={true}
isMulti
value={handleUserGroupOptions(groups.input.value)}
onChange={handleUserGroupOptionsChange}
options={handleUserGroupOptions(this.props.listUserGroup)}
/>
</div>
);
return (
<tr>
<td>
{user.id}
</td>
<td>
<Field
name="username"
component={renderInput}/>
</td>
<td>
<Field
name="role"
component={renderRoleSelect}/>
</td>
<td>
<Field
name="email"
component={renderInput}/>
</td>
<td>
<Field
name="groups"
component={renderGroupSelect}/>
</td>
<td>
<button className="btn btn-default"
onClick={this.props.onClickCancel}
disabled={submitting}>
<i className="fa fa-ban" /> Cancel
</button>
<button className="btn btn-success"
onClick={handleSubmit}
disabled={pristine || invalid || submitting}>
<i className={'fa ' + (submitting ? 'fa-cog fa-spin' : 'fa-cloud')} /> Save
</button>
{/* {saveError && <div className="text-danger">{saveError}</div>} */}
</td>
</tr>
);
}
}
|
dushmis/linux-pam | libpam/pam_handlers.c | <filename>libpam/pam_handlers.c
/* pam_handlers.c -- pam config file parsing and module loading */
/*
* created by <NAME>.
* Currently maintained by <NAME> <<EMAIL>>
*
*/
#include "pam_private.h"
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#define BUF_SIZE 1024
#define MODULE_CHUNK 4
#define UNKNOWN_MODULE "<*unknown module*>"
#ifndef _PAM_ISA
#define _PAM_ISA "."
#endif
static int _pam_assemble_line(FILE *f, char *buf, int buf_len);
static void _pam_free_handlers_aux(struct handler **hp);
static int _pam_add_handler(pam_handle_t *pamh
, int must_fail, int other, int stack_level, int type
, int *actions, const char *mod_path
, int argc, char **argv, int argvlen);
/* Values for module type */
#define PAM_T_ANY 0
#define PAM_T_AUTH 1
#define PAM_T_SESS 2
#define PAM_T_ACCT 4
#define PAM_T_PASS 8
static int _pam_load_conf_file(pam_handle_t *pamh, const char *config_name
, const char *service /* specific file */
, int module_type /* specific type */
, int stack_level /* level of substack */
#ifdef PAM_READ_BOTH_CONFS
, int not_other
#endif /* PAM_READ_BOTH_CONFS */
);
static int _pam_parse_conf_file(pam_handle_t *pamh, FILE *f
, const char *known_service /* specific file */
, int requested_module_type /* specific type */
, int stack_level /* level of substack */
#ifdef PAM_READ_BOTH_CONFS
, int not_other
#endif /* PAM_READ_BOTH_CONFS */
)
{
char buf[BUF_SIZE];
int x; /* read a line from the FILE *f ? */
/*
* read a line from the configuration (FILE *) f
*/
while ((x = _pam_assemble_line(f, buf, BUF_SIZE)) > 0) {
char *tok, *nexttok=NULL;
const char *this_service;
const char *mod_path;
int module_type, actions[_PAM_RETURN_VALUES];
int other; /* set if module is for PAM_DEFAULT_SERVICE */
int res; /* module added successfully? */
int handler_type = PAM_HT_MODULE; /* regular handler from a module */
int argc;
char **argv;
int argvlen;
D(("_pam_init_handler: LINE: %s", buf));
if (known_service != NULL) {
nexttok = buf;
/* No service field: all lines are for the known service. */
this_service = known_service;
} else {
this_service = tok = _pam_StrTok(buf, " \n\t", &nexttok);
}
#ifdef PAM_READ_BOTH_CONFS
if (not_other)
other = 0;
else
#endif /* PAM_READ_BOTH_CONFS */
other = !strcasecmp(this_service, PAM_DEFAULT_SERVICE);
/* accept "service name" or PAM_DEFAULT_SERVICE modules */
if (!strcasecmp(this_service, pamh->service_name) || other) {
int pam_include = 0;
int substack = 0;
/* This is a service we are looking for */
D(("_pam_init_handlers: Found PAM config entry for: %s"
, this_service));
tok = _pam_StrTok(NULL, " \n\t", &nexttok);
if (tok == NULL) {
/* module type does not exist */
D(("_pam_init_handlers: empty module type for %s", this_service));
pam_syslog(pamh, LOG_ERR,
"(%s) empty module type", this_service);
module_type = (requested_module_type != PAM_T_ANY) ?
requested_module_type : PAM_T_AUTH; /* most sensitive */
handler_type = PAM_HT_MUST_FAIL; /* install as normal but fail when dispatched */
} else {
if (tok[0] == '-') { /* do not log module load errors */
handler_type = PAM_HT_SILENT_MODULE;
++tok;
}
if (!strcasecmp("auth", tok)) {
module_type = PAM_T_AUTH;
} else if (!strcasecmp("session", tok)) {
module_type = PAM_T_SESS;
} else if (!strcasecmp("account", tok)) {
module_type = PAM_T_ACCT;
} else if (!strcasecmp("password", tok)) {
module_type = PAM_T_PASS;
} else {
/* Illegal module type */
D(("_pam_init_handlers: bad module type: %s", tok));
pam_syslog(pamh, LOG_ERR, "(%s) illegal module type: %s",
this_service, tok);
module_type = (requested_module_type != PAM_T_ANY) ?
requested_module_type : PAM_T_AUTH; /* most sensitive */
handler_type = PAM_HT_MUST_FAIL; /* install as normal but fail when dispatched */
}
}
D(("Using %s config entry: %s", handler_type?"BAD ":"", tok));
if (requested_module_type != PAM_T_ANY &&
module_type != requested_module_type) {
D(("Skipping config entry: %s (requested=%d, found=%d)",
tok, requested_module_type, module_type));
continue;
}
/* reset the actions to .._UNDEF's -- this is so that
we can work out which entries are not yet set (for default). */
{
int i;
for (i=0; i<_PAM_RETURN_VALUES;
actions[i++] = _PAM_ACTION_UNDEF);
}
tok = _pam_StrTok(NULL, " \n\t", &nexttok);
if (tok == NULL) {
/* no module name given */
D(("_pam_init_handlers: no control flag supplied"));
pam_syslog(pamh, LOG_ERR,
"(%s) no control flag supplied", this_service);
_pam_set_default_control(actions, _PAM_ACTION_BAD);
handler_type = PAM_HT_MUST_FAIL;
} else if (!strcasecmp("required", tok)) {
D(("*PAM_F_REQUIRED*"));
actions[PAM_SUCCESS] = _PAM_ACTION_OK;
actions[PAM_NEW_AUTHTOK_REQD] = _PAM_ACTION_OK;
actions[PAM_IGNORE] = _PAM_ACTION_IGNORE;
_pam_set_default_control(actions, _PAM_ACTION_BAD);
} else if (!strcasecmp("requisite", tok)) {
D(("*PAM_F_REQUISITE*"));
actions[PAM_SUCCESS] = _PAM_ACTION_OK;
actions[PAM_NEW_AUTHTOK_REQD] = _PAM_ACTION_OK;
actions[PAM_IGNORE] = _PAM_ACTION_IGNORE;
_pam_set_default_control(actions, _PAM_ACTION_DIE);
} else if (!strcasecmp("optional", tok)) {
D(("*PAM_F_OPTIONAL*"));
actions[PAM_SUCCESS] = _PAM_ACTION_OK;
actions[PAM_NEW_AUTHTOK_REQD] = _PAM_ACTION_OK;
_pam_set_default_control(actions, _PAM_ACTION_IGNORE);
} else if (!strcasecmp("sufficient", tok)) {
D(("*PAM_F_SUFFICIENT*"));
actions[PAM_SUCCESS] = _PAM_ACTION_DONE;
actions[PAM_NEW_AUTHTOK_REQD] = _PAM_ACTION_DONE;
_pam_set_default_control(actions, _PAM_ACTION_IGNORE);
} else if (!strcasecmp("include", tok)) {
D(("*PAM_F_INCLUDE*"));
pam_include = 1;
substack = 0;
} else if (!strcasecmp("substack", tok)) {
D(("*PAM_F_SUBSTACK*"));
pam_include = 1;
substack = 1;
} else {
D(("will need to parse %s", tok));
_pam_parse_control(actions, tok);
/* by default the default is to treat as failure */
_pam_set_default_control(actions, _PAM_ACTION_BAD);
}
tok = _pam_StrTok(NULL, " \n\t", &nexttok);
if (pam_include) {
if (substack) {
res = _pam_add_handler(pamh, PAM_HT_SUBSTACK, other,
stack_level, module_type, actions, tok,
0, NULL, 0);
if (res != PAM_SUCCESS) {
pam_syslog(pamh, LOG_ERR, "error adding substack %s", tok);
D(("failed to load module - aborting"));
return PAM_ABORT;
}
}
if (_pam_load_conf_file(pamh, tok, this_service, module_type,
stack_level + substack
#ifdef PAM_READ_BOTH_CONFS
, !other
#endif /* PAM_READ_BOTH_CONFS */
) == PAM_SUCCESS)
continue;
_pam_set_default_control(actions, _PAM_ACTION_BAD);
mod_path = NULL;
handler_type = PAM_HT_MUST_FAIL;
nexttok = NULL;
} else if (tok != NULL) {
mod_path = tok;
D(("mod_path = %s",mod_path));
} else {
/* no module name given */
D(("_pam_init_handlers: no module name supplied"));
pam_syslog(pamh, LOG_ERR,
"(%s) no module name supplied", this_service);
mod_path = NULL;
handler_type = PAM_HT_MUST_FAIL;
}
/* nexttok points to remaining arguments... */
if (nexttok != NULL) {
D(("list: %s",nexttok));
argvlen = _pam_mkargv(nexttok, &argv, &argc);
D(("argvlen = %d",argvlen));
} else { /* there are no arguments so fix by hand */
D(("_pam_init_handlers: empty argument list"));
argvlen = argc = 0;
argv = NULL;
}
#ifdef PAM_DEBUG
{
int y;
D(("CONF%s: %s%s %d %s %d"
, handler_type==PAM_HT_MUST_FAIL?"<*will fail*>":""
, this_service, other ? "(backup)":""
, module_type
, mod_path, argc));
for (y = 0; y < argc; y++) {
D(("CONF: %s", argv[y]));
}
for (y = 0; y<_PAM_RETURN_VALUES; ++y) {
D(("RETURN %s(%d) -> %d %s",
_pam_token_returns[y], y, actions[y],
actions[y]>0 ? "jump":
_pam_token_actions[-actions[y]]));
}
}
#endif
res = _pam_add_handler(pamh, handler_type, other, stack_level
, module_type, actions, mod_path
, argc, argv, argvlen);
if (res != PAM_SUCCESS) {
pam_syslog(pamh, LOG_ERR, "error loading %s", mod_path);
D(("failed to load module - aborting"));
return PAM_ABORT;
}
}
}
return ( (x < 0) ? PAM_ABORT:PAM_SUCCESS );
}
static int
_pam_open_config_file(pam_handle_t *pamh
, const char *service
, char **path
, FILE **file)
{
const char *pamd_dirs[] = { PAM_CONFIG_DF, PAM_CONFIG_DIST_DF
#ifdef VENDORDIR
, PAM_CONFIG_DIST2_DF
#endif
};
char *p;
FILE *f;
size_t i;
/* Absolute path */
if (service[0] == '/') {
p = _pam_strdup(service);
if (p == NULL) {
pam_syslog(pamh, LOG_CRIT, "strdup failed");
return PAM_BUF_ERR;
}
f = fopen(service, "r");
if (f != NULL) {
*path = p;
*file = f;
return PAM_SUCCESS;
}
_pam_drop(p);
return PAM_ABORT;
}
for (i = 0; i < sizeof (pamd_dirs)/sizeof (char *); i++) {
if (asprintf (&p, pamd_dirs[i], service) < 0) {
pam_syslog(pamh, LOG_CRIT, "asprintf failed");
return PAM_BUF_ERR;
}
D(("opening %s", p));
f = fopen(p, "r");
if (f != NULL) {
*path = p;
*file = f;
return PAM_SUCCESS;
}
_pam_drop(p);
}
return PAM_ABORT;
}
static int _pam_load_conf_file(pam_handle_t *pamh, const char *config_name
, const char *service /* specific file */
, int module_type /* specific type */
, int stack_level /* level of substack */
#ifdef PAM_READ_BOTH_CONFS
, int not_other
#endif /* PAM_READ_BOTH_CONFS */
)
{
FILE *f;
char *path = NULL;
int retval = PAM_ABORT;
D(("_pam_load_conf_file called"));
if (stack_level >= PAM_SUBSTACK_MAX_LEVEL) {
D(("maximum level of substacks reached"));
pam_syslog(pamh, LOG_ERR, "maximum level of substacks reached");
return PAM_ABORT;
}
if (config_name == NULL) {
D(("no config file supplied"));
pam_syslog(pamh, LOG_ERR, "(%s) no config name supplied", service);
return PAM_ABORT;
}
if (_pam_open_config_file(pamh, config_name, &path, &f) == PAM_SUCCESS) {
retval = _pam_parse_conf_file(pamh, f, service, module_type, stack_level
#ifdef PAM_READ_BOTH_CONFS
, not_other
#endif /* PAM_READ_BOTH_CONFS */
);
if (retval != PAM_SUCCESS)
pam_syslog(pamh, LOG_ERR,
"_pam_load_conf_file: error reading %s: %s",
path, pam_strerror(pamh, retval));
_pam_drop(path);
fclose(f);
} else {
D(("unable to open %s", config_name));
pam_syslog(pamh, LOG_ERR,
"_pam_load_conf_file: unable to open config for %s",
config_name);
}
return retval;
}
/* Parse config file, allocate handler structures, dlopen() */
int _pam_init_handlers(pam_handle_t *pamh)
{
FILE *f;
int retval;
D(("_pam_init_handlers called"));
IF_NO_PAMH("_pam_init_handlers",pamh,PAM_SYSTEM_ERR);
/* Return immediately if everything is already loaded */
if (pamh->handlers.handlers_loaded) {
return PAM_SUCCESS;
}
D(("_pam_init_handlers: initializing"));
/* First clean the service structure */
_pam_free_handlers(pamh);
if (! pamh->handlers.module) {
if ((pamh->handlers.module =
malloc(MODULE_CHUNK * sizeof(struct loaded_module))) == NULL) {
pam_syslog(pamh, LOG_CRIT,
"_pam_init_handlers: no memory loading module");
return PAM_BUF_ERR;
}
pamh->handlers.modules_allocated = MODULE_CHUNK;
pamh->handlers.modules_used = 0;
}
if (pamh->service_name == NULL) {
return PAM_BAD_ITEM; /* XXX - better error? */
}
#ifdef PAM_LOCKING
/* Is the PAM subsystem locked? */
{
int fd_tmp;
if ((fd_tmp = open( PAM_LOCK_FILE, O_RDONLY )) != -1) {
pam_syslog(pamh, LOG_ERR,
"_pam_init_handlers: PAM lockfile ("
PAM_LOCK_FILE ") exists - aborting");
(void) close(fd_tmp);
/*
* to avoid swamping the system with requests
*/
_pam_start_timer(pamh);
pam_fail_delay(pamh, 5000000);
_pam_await_timer(pamh, PAM_ABORT);
return PAM_ABORT;
}
}
#endif /* PAM_LOCKING */
/*
* Now parse the config file(s) and add handlers
*/
{
struct stat test_d;
/* Is there a PAM_CONFIG_D directory? */
if ((stat(PAM_CONFIG_D, &test_d) == 0 && S_ISDIR(test_d.st_mode)) ||
(stat(PAM_CONFIG_DIST_D, &test_d) == 0 && S_ISDIR(test_d.st_mode))
#ifdef PAM_CONFIG_DIST2_D
|| (stat(PAM_CONFIG_DIST2_D, &test_d) == 0
&& S_ISDIR(test_d.st_mode))
#endif
) {
char *path = NULL;
int read_something=0;
if (_pam_open_config_file(pamh, pamh->service_name, &path, &f) == PAM_SUCCESS) {
retval = _pam_parse_conf_file(pamh, f, pamh->service_name,
PAM_T_ANY, 0
#ifdef PAM_READ_BOTH_CONFS
, 0
#endif /* PAM_READ_BOTH_CONFS */
);
if (retval != PAM_SUCCESS) {
pam_syslog(pamh, LOG_ERR,
"_pam_init_handlers: error reading %s",
path);
pam_syslog(pamh, LOG_ERR, "_pam_init_handlers: [%s]",
pam_strerror(pamh, retval));
} else {
read_something = 1;
}
_pam_drop(path);
fclose(f);
} else {
D(("unable to open configuration for %s", pamh->service_name));
#ifdef PAM_READ_BOTH_CONFS
D(("checking %s", PAM_CONFIG));
if ((f = fopen(PAM_CONFIG,"r")) != NULL) {
retval = _pam_parse_conf_file(pamh, f, NULL, PAM_T_ANY, 0, 1);
fclose(f);
} else
#endif /* PAM_READ_BOTH_CONFS */
retval = PAM_SUCCESS;
/*
* XXX - should we log an error? Some people want to always
* use "other"
*/
}
if (retval == PAM_SUCCESS) {
/* now parse the PAM_DEFAULT_SERVICE */
if (_pam_open_config_file(pamh, PAM_DEFAULT_SERVICE, &path, &f) == PAM_SUCCESS) {
/* would test magic here? */
retval = _pam_parse_conf_file(pamh, f, PAM_DEFAULT_SERVICE,
PAM_T_ANY, 0
#ifdef PAM_READ_BOTH_CONFS
, 0
#endif /* PAM_READ_BOTH_CONFS */
);
if (retval != PAM_SUCCESS) {
pam_syslog(pamh, LOG_ERR,
"_pam_init_handlers: error reading %s",
path);
pam_syslog(pamh, LOG_ERR,
"_pam_init_handlers: [%s]",
pam_strerror(pamh, retval));
} else {
read_something = 1;
}
_pam_drop(path);
fclose(f);
} else {
D(("unable to open %s", PAM_DEFAULT_SERVICE));
pam_syslog(pamh, LOG_ERR,
"_pam_init_handlers: no default config %s",
PAM_DEFAULT_SERVICE);
}
if (!read_something) { /* nothing read successfully */
retval = PAM_ABORT;
}
}
} else {
if ((f = fopen(PAM_CONFIG, "r")) == NULL) {
pam_syslog(pamh, LOG_ERR, "_pam_init_handlers: could not open "
PAM_CONFIG );
return PAM_ABORT;
}
retval = _pam_parse_conf_file(pamh, f, NULL, PAM_T_ANY, 0
#ifdef PAM_READ_BOTH_CONFS
, 0
#endif /* PAM_READ_BOTH_CONFS */
);
D(("closing configuration file"));
fclose(f);
}
}
if (retval != PAM_SUCCESS) {
/* Read error */
pam_syslog(pamh, LOG_ERR, "error reading PAM configuration file");
return PAM_ABORT;
}
pamh->handlers.handlers_loaded = 1;
D(("_pam_init_handlers exiting"));
return PAM_SUCCESS;
}
/*
* This is where we read a line of the PAM config file. The line may be
* preceeded by lines of comments and also extended with "\\\n"
*/
static int _pam_assemble_line(FILE *f, char *buffer, int buf_len)
{
char *p = buffer;
char *endp = buffer + buf_len;
char *s, *os;
int used = 0;
/* loop broken with a 'break' when a non-'\\n' ended line is read */
D(("called."));
for (;;) {
if (p >= endp) {
/* Overflow */
D(("_pam_assemble_line: overflow"));
return -1;
}
if (fgets(p, endp - p, f) == NULL) {
if (used) {
/* Incomplete read */
return -1;
} else {
/* EOF */
return 0;
}
}
/* skip leading spaces --- line may be blank */
s = p + strspn(p, " \n\t");
if (*s && (*s != '#')) {
os = s;
/*
* we are only interested in characters before the first '#'
* character
*/
while (*s && *s != '#')
++s;
if (*s == '#') {
*s = '\0';
used += strlen(os);
break; /* the line has been read */
}
s = os;
/*
* Check for backslash by scanning back from the end of
* the entered line, the '\n' has been included since
* normally a line is terminated with this
* character. fgets() should only return one though!
*/
s += strlen(s);
while (s > os && ((*--s == ' ') || (*s == '\t')
|| (*s == '\n')));
/* check if it ends with a backslash */
if (*s == '\\') {
*s++ = ' '; /* replace backslash with ' ' */
*s = '\0'; /* truncate the line here */
used += strlen(os);
p = s; /* there is more ... */
} else {
/* End of the line! */
used += strlen(os);
break; /* this is the complete line */
}
} else {
/* Nothing in this line */
/* Don't move p */
}
}
return used;
}
static char *
extract_modulename(const char *mod_path)
{
const char *p = strrchr (mod_path, '/');
char *dot, *retval;
if (p == NULL)
p = mod_path;
else
p++;
if ((retval = _pam_strdup (p)) == NULL)
return NULL;
dot = strrchr (retval, '.');
if (dot)
*dot = '\0';
if (*retval == '\0' || strcmp(retval, "?") == 0) {
/* do not allow empty module name or "?" to avoid confusing audit trail */
_pam_drop(retval);
return NULL;
}
return retval;
}
static struct loaded_module *
_pam_load_module(pam_handle_t *pamh, const char *mod_path, int handler_type)
{
int x = 0;
int success;
char *mod_full_isa_path=NULL, *isa=NULL;
struct loaded_module *mod;
D(("_pam_load_module: loading module `%s'", mod_path));
mod = pamh->handlers.module;
/* First, ensure the module is loaded */
while (x < pamh->handlers.modules_used) {
if (!strcmp(mod[x].name, mod_path)) { /* case sensitive ! */
break;
}
x++;
}
if (x == pamh->handlers.modules_used) {
/* Not found */
if (pamh->handlers.modules_allocated == pamh->handlers.modules_used) {
/* will need more memory */
void *tmp = realloc(pamh->handlers.module,
(pamh->handlers.modules_allocated+MODULE_CHUNK)
*sizeof(struct loaded_module));
if (tmp == NULL) {
D(("cannot enlarge module pointer memory"));
pam_syslog(pamh, LOG_CRIT,
"realloc returned NULL in _pam_load_module");
return NULL;
}
pamh->handlers.module = tmp;
pamh->handlers.modules_allocated += MODULE_CHUNK;
}
mod = &(pamh->handlers.module[x]);
/* Be pessimistic... */
success = PAM_ABORT;
D(("_pam_load_module: _pam_dlopen(%s)", mod_path));
mod->dl_handle = _pam_dlopen(mod_path);
D(("_pam_load_module: _pam_dlopen'ed"));
D(("_pam_load_module: dlopen'ed"));
if (mod->dl_handle == NULL) {
if (strstr(mod_path, "$ISA")) {
mod_full_isa_path = malloc(strlen(mod_path) + strlen(_PAM_ISA) + 1);
if (mod_full_isa_path == NULL) {
D(("_pam_load_module: couldn't get memory for mod_path"));
pam_syslog(pamh, LOG_CRIT, "no memory for module path");
success = PAM_ABORT;
} else {
strcpy(mod_full_isa_path, mod_path);
isa = strstr(mod_full_isa_path, "$ISA");
if (isa) {
memmove(isa + strlen(_PAM_ISA), isa + 4, strlen(isa + 4) + 1);
memmove(isa, _PAM_ISA, strlen(_PAM_ISA));
}
mod->dl_handle = _pam_dlopen(mod_full_isa_path);
_pam_drop(mod_full_isa_path);
}
}
}
if (mod->dl_handle == NULL) {
D(("_pam_load_module: _pam_dlopen(%s) failed", mod_path));
if (handler_type != PAM_HT_SILENT_MODULE)
pam_syslog(pamh, LOG_ERR, "unable to dlopen(%s): %s", mod_path,
_pam_dlerror());
/* Don't abort yet; static code may be able to find function.
* But defaults to abort if nothing found below... */
} else {
D(("module added successfully"));
success = PAM_SUCCESS;
mod->type = PAM_MT_DYNAMIC_MOD;
pamh->handlers.modules_used++;
}
if (success != PAM_SUCCESS) { /* add a malformed module */
mod->dl_handle = NULL;
mod->type = PAM_MT_FAULTY_MOD;
pamh->handlers.modules_used++;
if (handler_type != PAM_HT_SILENT_MODULE)
pam_syslog(pamh, LOG_ERR, "adding faulty module: %s", mod_path);
success = PAM_SUCCESS; /* We have successfully added a module */
}
/* indicate its name - later we will search for it by this */
if ((mod->name = _pam_strdup(mod_path)) == NULL) {
D(("_pam_load_module: couldn't get memory for mod_path"));
pam_syslog(pamh, LOG_CRIT, "no memory for module path");
success = PAM_ABORT;
}
} else { /* x != pamh->handlers.modules_used */
mod += x; /* the located module */
success = PAM_SUCCESS;
}
return success == PAM_SUCCESS ? mod : NULL;
}
int _pam_add_handler(pam_handle_t *pamh
, int handler_type, int other, int stack_level, int type
, int *actions, const char *mod_path
, int argc, char **argv, int argvlen)
{
struct loaded_module *mod = NULL;
struct handler **handler_p;
struct handler **handler_p2;
struct handlers *the_handlers;
const char *sym, *sym2;
char *mod_full_path;
servicefn func, func2;
int mod_type = PAM_MT_FAULTY_MOD;
D(("called."));
IF_NO_PAMH("_pam_add_handler",pamh,PAM_SYSTEM_ERR);
D(("_pam_add_handler: adding type %d, handler_type %d, module `%s'",
type, handler_type, mod_path));
if ((handler_type == PAM_HT_MODULE || handler_type == PAM_HT_SILENT_MODULE) &&
mod_path != NULL) {
if (mod_path[0] == '/') {
mod = _pam_load_module(pamh, mod_path, handler_type);
} else if (asprintf(&mod_full_path, "%s%s",
DEFAULT_MODULE_PATH, mod_path) >= 0) {
mod = _pam_load_module(pamh, mod_full_path, handler_type);
_pam_drop(mod_full_path);
} else {
pam_syslog(pamh, LOG_CRIT, "cannot malloc full mod path");
return PAM_ABORT;
}
if (mod == NULL) {
/* if we get here with NULL it means allocation error */
return PAM_ABORT;
}
mod_type = mod->type;
}
if (mod_path == NULL)
mod_path = UNKNOWN_MODULE;
/*
* At this point 'mod' points to the stored/loaded module.
*/
/* Now define the handler(s) based on mod->dlhandle and type */
/* decide which list of handlers to use */
the_handlers = (other) ? &pamh->handlers.other : &pamh->handlers.conf;
handler_p = handler_p2 = NULL;
func = func2 = NULL;
sym2 = NULL;
/* point handler_p's at the root addresses of the function stacks */
switch (type) {
case PAM_T_AUTH:
handler_p = &the_handlers->authenticate;
sym = "pam_sm_authenticate";
handler_p2 = &the_handlers->setcred;
sym2 = "pam_sm_setcred";
break;
case PAM_T_SESS:
handler_p = &the_handlers->open_session;
sym = "pam_sm_open_session";
handler_p2 = &the_handlers->close_session;
sym2 = "pam_sm_close_session";
break;
case PAM_T_ACCT:
handler_p = &the_handlers->acct_mgmt;
sym = "pam_sm_acct_mgmt";
break;
case PAM_T_PASS:
handler_p = &the_handlers->chauthtok;
sym = "pam_sm_chauthtok";
break;
default:
/* Illegal module type */
D(("_pam_add_handler: illegal module type %d", type));
return PAM_ABORT;
}
/* are the modules reliable? */
if (mod_type != PAM_MT_DYNAMIC_MOD &&
mod_type != PAM_MT_FAULTY_MOD) {
D(("_pam_add_handlers: illegal module library type; %d", mod_type));
pam_syslog(pamh, LOG_ERR,
"internal error: module library type not known: %s;%d",
sym, mod_type);
return PAM_ABORT;
}
/* now identify this module's functions - for non-faulty modules */
if ((mod_type == PAM_MT_DYNAMIC_MOD) &&
!(func = _pam_dlsym(mod->dl_handle, sym)) ) {
pam_syslog(pamh, LOG_ERR, "unable to resolve symbol: %s", sym);
}
if (sym2) {
if ((mod_type == PAM_MT_DYNAMIC_MOD) &&
!(func2 = _pam_dlsym(mod->dl_handle, sym2)) ) {
pam_syslog(pamh, LOG_ERR, "unable to resolve symbol: %s", sym2);
}
}
/* here func (and perhaps func2) point to the appropriate functions */
/* add new handler to end of existing list */
while (*handler_p != NULL) {
handler_p = &((*handler_p)->next);
}
if ((*handler_p = malloc(sizeof(struct handler))) == NULL) {
pam_syslog(pamh, LOG_CRIT, "cannot malloc struct handler #1");
return (PAM_ABORT);
}
(*handler_p)->handler_type = handler_type;
(*handler_p)->stack_level = stack_level;
(*handler_p)->func = func;
memcpy((*handler_p)->actions,actions,sizeof((*handler_p)->actions));
(*handler_p)->cached_retval = _PAM_INVALID_RETVAL;
(*handler_p)->cached_retval_p = &((*handler_p)->cached_retval);
(*handler_p)->argc = argc;
(*handler_p)->argv = argv; /* not a copy */
if (((*handler_p)->mod_name = extract_modulename(mod_path)) == NULL)
return PAM_ABORT;
(*handler_p)->grantor = 0;
(*handler_p)->next = NULL;
/* some of the modules have a second calling function */
if (handler_p2) {
/* add new handler to end of existing list */
while (*handler_p2) {
handler_p2 = &((*handler_p2)->next);
}
if ((*handler_p2 = malloc(sizeof(struct handler))) == NULL) {
pam_syslog(pamh, LOG_CRIT, "cannot malloc struct handler #2");
return (PAM_ABORT);
}
(*handler_p2)->handler_type = handler_type;
(*handler_p2)->stack_level = stack_level;
(*handler_p2)->func = func2;
memcpy((*handler_p2)->actions,actions,sizeof((*handler_p2)->actions));
(*handler_p2)->cached_retval = _PAM_INVALID_RETVAL; /* ignored */
/* Note, this next entry points to the handler_p value! */
(*handler_p2)->cached_retval_p = &((*handler_p)->cached_retval);
(*handler_p2)->argc = argc;
if (argv) {
if (((*handler_p2)->argv = malloc(argvlen)) == NULL) {
pam_syslog(pamh, LOG_CRIT, "cannot malloc argv for handler #2");
return (PAM_ABORT);
}
memcpy((*handler_p2)->argv, argv, argvlen);
} else {
(*handler_p2)->argv = NULL; /* no arguments */
}
if (((*handler_p2)->mod_name = extract_modulename(mod_path)) == NULL)
return PAM_ABORT;
(*handler_p2)->grantor = 0;
(*handler_p2)->next = NULL;
}
D(("_pam_add_handler: returning successfully"));
return PAM_SUCCESS;
}
/* Free various allocated structures and dlclose() the libs */
int _pam_free_handlers(pam_handle_t *pamh)
{
struct loaded_module *mod;
D(("called."));
IF_NO_PAMH("_pam_free_handlers",pamh,PAM_SYSTEM_ERR);
mod = pamh->handlers.module;
/* Close all loaded modules */
while (pamh->handlers.modules_used) {
D(("_pam_free_handlers: dlclose(%s)", mod->name));
free(mod->name);
if (mod->type == PAM_MT_DYNAMIC_MOD) {
_pam_dlclose(mod->dl_handle);
}
mod++;
pamh->handlers.modules_used--;
}
/* Free all the handlers */
_pam_free_handlers_aux(&(pamh->handlers.conf.authenticate));
_pam_free_handlers_aux(&(pamh->handlers.conf.setcred));
_pam_free_handlers_aux(&(pamh->handlers.conf.acct_mgmt));
_pam_free_handlers_aux(&(pamh->handlers.conf.open_session));
_pam_free_handlers_aux(&(pamh->handlers.conf.close_session));
_pam_free_handlers_aux(&(pamh->handlers.conf.chauthtok));
_pam_free_handlers_aux(&(pamh->handlers.other.authenticate));
_pam_free_handlers_aux(&(pamh->handlers.other.setcred));
_pam_free_handlers_aux(&(pamh->handlers.other.acct_mgmt));
_pam_free_handlers_aux(&(pamh->handlers.other.open_session));
_pam_free_handlers_aux(&(pamh->handlers.other.close_session));
_pam_free_handlers_aux(&(pamh->handlers.other.chauthtok));
/* no more loaded modules */
_pam_drop(pamh->handlers.module);
/* Indicate that handlers are not initialized for this pamh */
pamh->handlers.handlers_loaded = 0;
return PAM_SUCCESS;
}
void _pam_start_handlers(pam_handle_t *pamh)
{
D(("called."));
/* NB. There is no check for a NULL pamh here, since no return
* value to communicate the fact! */
/* Indicate that handlers are not initialized for this pamh */
pamh->handlers.handlers_loaded = 0;
pamh->handlers.modules_allocated = 0;
pamh->handlers.modules_used = 0;
pamh->handlers.module = NULL;
/* initialize the .conf and .other entries */
pamh->handlers.conf.authenticate = NULL;
pamh->handlers.conf.setcred = NULL;
pamh->handlers.conf.acct_mgmt = NULL;
pamh->handlers.conf.open_session = NULL;
pamh->handlers.conf.close_session = NULL;
pamh->handlers.conf.chauthtok = NULL;
pamh->handlers.other.authenticate = NULL;
pamh->handlers.other.setcred = NULL;
pamh->handlers.other.acct_mgmt = NULL;
pamh->handlers.other.open_session = NULL;
pamh->handlers.other.close_session = NULL;
pamh->handlers.other.chauthtok = NULL;
}
void _pam_free_handlers_aux(struct handler **hp)
{
struct handler *h = *hp;
struct handler *last;
D(("called."));
while (h) {
last = h;
_pam_drop(h->argv); /* This is all alocated in a single chunk */
_pam_drop(h->mod_name);
h = h->next;
memset(last, 0, sizeof(*last));
free(last);
}
*hp = NULL;
}
|
juseongkr/BOJ | acmicpc/2164.cpp | #include <iostream>
#include <deque>
using namespace std;
int main()
{
deque<int> q;
int n;
scanf("%d", &n);
for (int i=1; i<=n; ++i)
q.push_back(i);
while (q.size() != 1) {
q.pop_front();
int t = q.front();
q.pop_front();
q.push_back(t);
}
printf("%d\n", q.front());
return 0;
}
|
Tmplt/daredevil-small | refs/platform/pal/ic/src/ic_pal.c | /*
* Copyright 2017 NXP
* All rights reserved.
*
* THIS SOFTWARE IS PROVIDED BY NXP "AS IS" AND ANY EXPRESSED OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL NXP OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
/*!
* @file ic_pal.c
*
* @page misra_violations MISRA-C:2012 violations
*/
#include "ic_pal.h"
/*******************************************************************************
* Variables
******************************************************************************/
/* Define state structures for IC PAL */
static ic_pal_state_t g_icPalStatePtr[NUMBER_OF_IC_PAL_INSTANCES];
/* IC state-instance matching */
static uint32_t icStateInstanceMapping[NUMBER_OF_IC_PAL_INSTANCES];
/* IC available resources table */
static bool icStateIsAllocated[NUMBER_OF_IC_PAL_INSTANCES];
#if (defined(IC_PAL_OVER_FTM))
/* The FTM state structures */
static ftm_state_t g_ftmState[NUMBER_OF_IC_PAL_INSTANCES];
#endif
/*******************************************************************************
* Code
******************************************************************************/
/*FUNCTION**********************************************************************
*
* Function Name : icAllocateState
* Description : Allocates one of the available state structure.
*
*END**************************************************************************/
static uint8_t icAllocateState(bool * isAllocated,
uint32_t * instanceMapping,
uint32_t instance)
{
uint8_t i;
/* Allocate one of the IC state structure for this instance */
for (i = 0U;i < NUMBER_OF_IC_PAL_INSTANCES;i++)
{
if (isAllocated[i] == false)
{
instanceMapping[i] = instance;
isAllocated[i] = true;
break;
}
}
return i;
}
/*FUNCTION**********************************************************************
*
* Function Name : icFreeState
* Description : Deallocates one of the available state structure.
*
*END**************************************************************************/
static void icFreeState(bool * isAllocated,
uint32_t * instanceMapping,
uint32_t instance)
{
uint8_t i;
/* Allocate one of the IC state structure for this instance */
for (i = 0U;i < NUMBER_OF_IC_PAL_INSTANCES;i++)
{
if (instanceMapping[i] == instance)
{
isAllocated[i] = false;
break;
}
}
}
/*FUNCTION**********************************************************************
*
* Function Name : FindIcState
* Description : Search the state structure of the instance for each IP
*
*END**************************************************************************/
static uint8_t FindIcState(uint32_t instance)
{
uint8_t i;
for (i = 0U;i < NUMBER_OF_IC_PAL_INSTANCES;i++)
{
if (icStateInstanceMapping[i] == instance)
{
break;
}
}
return i;
}
/*FUNCTION**********************************************************************
*
* Function Name : IC_Init
* Description : This function will initialize the IC PAL instance, including the
* other platform specific HW units used together in the input capture mode. This
* function configures a group of channels in instance to detect or measure the
* input signal.
*
* Implements : IC_Init_Activity
*END**************************************************************************/
status_t IC_Init(uint32_t instance,
const ic_config_t * configPtr)
{
DEV_ASSERT(configPtr != NULL);
DEV_ASSERT(instance < IC_PAL_INSTANCE_MAX);
ic_pal_state_t * icState;
status_t status = STATUS_SUCCESS;
uint8_t index = 0U;
uint8_t indexInstance = 0U;
/* Allocate one of the IC state structure for this instance */
indexInstance = icAllocateState(icStateIsAllocated, icStateInstanceMapping, instance);
icState = &g_icPalStatePtr[indexInstance];
DEV_ASSERT(icState->nNumChannels == 0x0U);
/* Initialize the internal context to default value */
icState->nNumChannels = configPtr->nNumChannels;
for (index = 0U; index < IC_PAL_NUM_OF_CHANNEL_MAX; index++)
{
icState->channelConfigArray[index] = 0U;
icState->icChannelType[index] = IC_DISABLE_OPERATION;
}
#if (defined(IC_PAL_OVER_FTM))
uint8_t channel;
ftm_pwm_sync_t sync;
ftm_user_config_t info;
ftm_state_t * ftmState = &g_ftmState[indexInstance];
ftm_input_param_t param;
ftm_input_ch_param_t channelConfig[FEATURE_FTM_CHANNEL_COUNT];
/* Configure the synchronous to default */
sync.softwareSync = true;
sync.hardwareSync0 = false;
sync.hardwareSync1 = false;
sync.hardwareSync2 = false;
sync.maxLoadingPoint = true;
sync.minLoadingPoint = false;
sync.inverterSync = FTM_SYSTEM_CLOCK;
sync.outRegSync = FTM_SYSTEM_CLOCK;
sync.maskRegSync = FTM_SYSTEM_CLOCK;
sync.initCounterSync = FTM_SYSTEM_CLOCK;
sync.autoClearTrigger = false;
sync.syncPoint = FTM_UPDATE_NOW;
/* Get some information from user configuration */
info.syncMethod = sync;
info.ftmMode = FTM_MODE_INPUT_CAPTURE;
info.ftmPrescaler = ((extension_ftm_for_ic_t *)(configPtr->extension))->ftmPrescaler;
info.ftmClockSource = ((extension_ftm_for_ic_t *)(configPtr->extension))->ftmClockSource;
info.BDMMode = FTM_BDM_MODE_11;
info.isTofIsrEnabled = false;
info.enableInitializationTrigger = false;
/* Initializes the FTM driver */
status = FTM_DRV_Init(instance,
&info,
ftmState);
DEV_ASSERT(STATUS_SUCCESS == status);
param.nNumChannels = configPtr->nNumChannels;
param.nMaxCountValue = MAX_COUNTER_VALUE;
/* Configure a list of channels which are used */
for (index = 0U; index < configPtr->nNumChannels; index++)
{
/* Get the hardware channel ID */
channelConfig[index].hwChannelId = configPtr->inputChConfig[index].hwChannelId;
channel = channelConfig[index].hwChannelId;
/* Check the input capture operation mode */
if ((configPtr->inputChConfig[index].inputCaptureMode == IC_TIMESTAMP_RISING_EDGE) || \
(configPtr->inputChConfig[index].inputCaptureMode == IC_TIMESTAMP_FALLING_EDGE) || \
(configPtr->inputChConfig[index].inputCaptureMode == IC_TIMESTAMP_BOTH_EDGES))
{
channelConfig[index].inputMode = FTM_EDGE_DETECT;
channelConfig[index].measurementType = FTM_NO_MEASUREMENT;
/* Check the type of signal detection */
if (configPtr->inputChConfig[index].inputCaptureMode == IC_TIMESTAMP_RISING_EDGE)
{
channelConfig[index].edgeAlignement = FTM_RISING_EDGE;
}
else if (configPtr->inputChConfig[index].inputCaptureMode == IC_TIMESTAMP_FALLING_EDGE)
{
channelConfig[index].edgeAlignement = FTM_FALLING_EDGE;
}
else
{
channelConfig[index].edgeAlignement = FTM_BOTH_EDGES;
}
}
else
{
/* Check the channel ID need to even number in the measurement mode */
DEV_ASSERT((channel % 2U) == 0U);
channelConfig[index].inputMode = FTM_SIGNAL_MEASUREMENT;
channelConfig[index].edgeAlignement = FTM_NO_PIN_CONTROL;
/* Check the type of measurement */
if (configPtr->inputChConfig[index].inputCaptureMode == IC_MEASURE_RISING_EDGE_PERIOD)
{
channelConfig[index].measurementType = FTM_RISING_EDGE_PERIOD_MEASUREMENT;
}
else if (configPtr->inputChConfig[index].inputCaptureMode == IC_MEASURE_FALLING_EDGE_PERIOD)
{
channelConfig[index].measurementType = FTM_FALLING_EDGE_PERIOD_MEASUREMENT;
}
else if (configPtr->inputChConfig[index].inputCaptureMode == IC_MEASURE_PULSE_HIGH)
{
channelConfig[index].measurementType = FTM_PERIOD_ON_MEASUREMENT;
}
else
{
channelConfig[index].measurementType = FTM_PERIOD_OFF_MEASUREMENT;
}
}
/* Set channels configuration from user */
channelConfig[index].filterEn = configPtr->inputChConfig[index].filterEn;
channelConfig[index].filterValue = configPtr->inputChConfig[index].filterValue;
channelConfig[index].continuousModeEn = ((channel_extension_ftm_for_ic_t *)(configPtr->inputChConfig[index].channelExtension))->continuousModeEn;
channelConfig[index].channelsCallbacksParams = NULL;
channelConfig[index].channelsCallbacks = NULL;
/* Store some needed information into state structure */
icState->channelConfigArray[index] = configPtr->inputChConfig[index].hwChannelId;
icState->icChannelType[channel] = configPtr->inputChConfig[index].inputCaptureMode;
}
param.inputChConfig = channelConfig;
/* Configure channels in input capture mode */
status = FTM_DRV_InitInputCapture(instance,
¶m);
#endif
#if (defined(IC_PAL_OVER_EMIOS))
emios_common_param_t commonParam;
uint8_t channel;
emios_bus_select_t timeBase;
emios_mc_mode_param_t mcParam;
emios_input_capture_param_t icParam;
/* Get common parameters from user */
commonParam.allowDebugMode = false;
commonParam.lowPowerMode = false;
commonParam.clkDivVal = ((extension_emios_for_ic_t *)(configPtr->extension))->clkDivVal;
commonParam.enableGlobalPrescaler = ((extension_emios_for_ic_t *)(configPtr->extension))->enableGlobalPrescaler;
commonParam.enableGlobalTimeBase = ((extension_emios_for_ic_t *)(configPtr->extension))->enableGlobalTimeBase;
/* Initialize the global for a eMIOS group */
EMIOS_DRV_InitGlobal((uint8_t)instance,
&commonParam);
/* Get the information from user configuration */
mcParam.mode = EMIOS_MODE_MCB_UP_COUNTER_INT_CLK;
mcParam.period = MAX_COUNTER_VALUE;
mcParam.filterInput = EMIOS_INPUT_FILTER_BYPASS;
mcParam.filterEn = false;
mcParam.triggerMode = EMIOS_TRIGGER_EDGE_ANY;
/* Configure a list of channels which are used */
for (index = 0U; index < configPtr->nNumChannels; index++)
{
mcParam.internalPrescaler = (emios_clock_internal_ps_t)((channel_extension_emios_for_ic_t *)(configPtr->inputChConfig[index].channelExtension))->prescaler;
mcParam.internalPrescalerEn = true;
switch (((channel_extension_emios_for_ic_t *)(configPtr->inputChConfig[index].channelExtension))->timebase)
{
case IC_BUS_SEL_A:
/* Set channel to use as a time base */
channel = 23U;
timeBase = EMIOS_BUS_SEL_A;
break;
case IC_BUS_SEL_B:
/* Set channel to use as a time base */
channel = 0U;
timeBase = EMIOS_BUS_SEL_BCDE;
break;
case IC_BUS_SEL_C:
/* Set channel to use as a time base */
channel = 8U;
timeBase = EMIOS_BUS_SEL_BCDE;
break;
case IC_BUS_SEL_D:
/* Set channel to use as a time base */
channel = 16U;
timeBase = EMIOS_BUS_SEL_BCDE;
break;
case IC_BUS_SEL_E:
/* Set channel to use as a time base */
channel = 24U;
timeBase = EMIOS_BUS_SEL_BCDE;
break;
case IC_BUS_SEL_F:
/* Set channel to use as a time base */
channel = 22U;
timeBase = EMIOS_BUS_SEL_F;
break;
case IC_BUS_SEL_INTERNAL:
/* Set channel to use as a time base */
channel = configPtr->inputChConfig[index].hwChannelId;
timeBase = EMIOS_BUS_SEL_INTERNAL;
break;
default:
/* Do nothing */
break;
}
/* Initialize the counter mode */
status = EMIOS_DRV_MC_InitCounterMode((uint8_t)instance,
channel,
&mcParam);
channel = configPtr->inputChConfig[index].hwChannelId;
icParam.mode = EMIOS_MODE_IC;
icParam.timebase = timeBase;
icParam.filterInput = (emios_input_filter_t)(configPtr->inputChConfig[index].filterValue);
icParam.filterEn = configPtr->inputChConfig[index].filterEn;
icParam.inputCaptureMode = (emios_input_capture_mode_t)(configPtr->inputChConfig[index].inputCaptureMode);
/* Initialize the input capture mode for each channel */
status = EMIOS_DRV_IC_InitInputCaptureMode((uint8_t)instance,
channel,
&icParam);
/* Store some needed information into state structure */
icState->channelConfigArray[index] = channel;
icState->icChannelType[channel] = configPtr->inputChConfig[index].inputCaptureMode;
icState->timeBaseSelection[channel] = icParam.timebase;
icState->filterEn[channel] = icParam.filterEn;
icState->filterInput[channel] = icParam.filterInput;
}
/* Enable the global eMIOS to start counter */
EMIOS_DRV_EnableGlobalEmios((uint8_t)instance);
#endif
return status;
}
/*FUNCTION**********************************************************************
*
* Function Name : IC_Deinit
* Description : This function will disable the input capture mode. The driver
* can't be used again until reinitialized. The context structure is no longer
* needed by the driver and can be freed after calling this function.
*
* Implements : IC_Deinit_Activity
*END**************************************************************************/
status_t IC_Deinit(uint32_t instance)
{
DEV_ASSERT(instance < IC_PAL_INSTANCE_MAX);
status_t status = STATUS_SUCCESS;
uint8_t index = 0U;
ic_pal_state_t * icState;
/* Allocate one of the IC state structure for this instance */
index = FindIcState(instance);
icState = &g_icPalStatePtr[index];
#if (defined(IC_PAL_OVER_FTM))
/* Disable the input capture over FTM */
status = FTM_DRV_Deinit(instance);
#endif
#if (defined(IC_PAL_OVER_EMIOS))
uint8_t channel;
for (index = 0U; index < icState->nNumChannels; index++)
{
channel = icState->channelConfigArray[index];
/* Disable channels in the input capture over EMIOS */
EMIOS_DRV_DeInitChannel((uint8_t)instance,
channel);
}
/* Disable the global of EMIOS */
EMIOS_DRV_DisableGlobalEmios((uint8_t)instance);
#endif
/* De-Initialize the internal context to default value */
icState->nNumChannels = 0U;
for (index = 0U; index < IC_PAL_NUM_OF_CHANNEL_MAX; index++)
{
icState->channelConfigArray[index] = 0U;
icState->icChannelType[index] = IC_DISABLE_OPERATION;
icState->enableContinuousMode[index] = false;
}
if (status == STATUS_SUCCESS)
{
/* De-Allocate the instance which is not used */
icFreeState(icStateIsAllocated, icStateInstanceMapping, instance);
}
return status;
}
/*FUNCTION**********************************************************************
*
* Function Name : IC_StartChannel
* Description : This function start channel counting.
*
* Implements : IC_StartChannel_Activity
*END**************************************************************************/
void IC_StartChannel(uint32_t instance,
uint8_t channel)
{
DEV_ASSERT(instance < IC_PAL_INSTANCE_MAX);
#if (defined(IC_PAL_OVER_FTM))
uint8_t index = 0U;
ic_pal_state_t * icState;
/* Allocate one of the IC state structure for this instance */
index = FindIcState(instance);
icState = &g_icPalStatePtr[index];
/* Re-start the channel mode */
IC_SetChannelMode(instance,
channel,
icState->icChannelType[channel]);
#endif
#if (defined(IC_PAL_OVER_EMIOS))
/* Enable the channel clock */
EMIOS_DRV_ChannelEnableClk((uint8_t)instance,
channel);
#endif
}
/*FUNCTION**********************************************************************
*
* Function Name : IC_StopChannel
* Description : This function will stop channel counting.
*
* Implements : IC_StopChannel_Activity
*END**************************************************************************/
void IC_StopChannel(uint32_t instance,
uint8_t channel)
{
DEV_ASSERT(instance < IC_PAL_INSTANCE_MAX);
#if (defined(IC_PAL_OVER_FTM))
/* Disable pin not used for FTM */
FTM_DRV_SetOutputlevel(instance,
channel,
0x0U);
#endif
#if (defined(IC_PAL_OVER_EMIOS))
/* Disable individual channel by stopping its respective clock*/
EMIOS_DRV_ChannelDisableClk((uint8_t)instance,
channel);
#endif
}
/*FUNCTION**********************************************************************
*
* Function Name : IC_SetChannelMode
* Description : This function is used to change the channel mode at run time or
* when stopping channel. The channel mode is selected in the ic_option_mode_t
* enumeration type.
*
* Implements : IC_SetChannelMode_Activity
*END**************************************************************************/
status_t IC_SetChannelMode(uint32_t instance,
uint8_t channel,
ic_option_mode_t channelMode)
{
DEV_ASSERT(instance < IC_PAL_INSTANCE_MAX);
DEV_ASSERT(channel < IC_PAL_NUM_OF_CHANNEL_MAX);
uint8_t index = 0U;
ic_pal_state_t * icState;
status_t status = STATUS_SUCCESS;
/* Allocate one of the IC state structure for this instance */
index = FindIcState(instance);
icState = &g_icPalStatePtr[index];
#if (defined(IC_PAL_OVER_FTM))
bool contModeEnable;
if (true == icState->enableContinuousMode[channel])
{
contModeEnable = true;
}
else
{
contModeEnable = false;
}
/* Set operation mode for channel input */
status = FTM_IC_DRV_SetChannelMode(instance,
channel,
(ftm_ic_op_mode_t)channelMode,
contModeEnable);
#endif
#if (defined(IC_PAL_OVER_EMIOS))
emios_input_capture_param_t icParam;
emios_gpio_mode_param_t gpioParam;
if (channelMode == IC_DISABLE_OPERATION)
{
/* Set default configure for an input pin */
gpioParam.mode = EMIOS_MODE_GPIO_INPUT;
gpioParam.filterEn = false;
gpioParam.filterInput = EMIOS_INPUT_FILTER_BYPASS;
gpioParam.triggerMode = EMIOS_TRIGGER_EDGE_ANY;
/* Disable operation on the channel input */
EMIOS_DRV_InitGpioMode((uint8_t)instance,
channel,
&gpioParam);
}
else
{
icParam.mode = EMIOS_MODE_IC;
icParam.timebase = icState->timeBaseSelection[channel];
icParam.filterInput = icState->filterInput[channel];
icParam.filterEn = icState->filterEn[channel];
icParam.inputCaptureMode = (emios_input_capture_mode_t)((uint8_t)(channelMode) - 1U);
/* Initialize the input capture mode for each channel */
status = EMIOS_DRV_IC_InitInputCaptureMode((uint8_t)instance,
channel,
&icParam);
}
#endif
/* Update the channel mode */
icState->icChannelType[channel] = channelMode;
return status;
}
/*FUNCTION**********************************************************************
*
* Function Name : IC_GetMeasurement
* Description : This function will get the value of measured signal in ticks.
*
* Implements : IC_GetMeasurement_Activity
*END**************************************************************************/
uint16_t IC_GetMeasurement(uint32_t instance,
uint8_t channel)
{
DEV_ASSERT(instance < IC_PAL_INSTANCE_MAX);
DEV_ASSERT(channel < IC_PAL_NUM_OF_CHANNEL_MAX);
uint16_t value = 0U;
#if (defined(IC_PAL_OVER_FTM))
/* Get the measured value over the FTM */
value = FTM_DRV_GetInputCaptureMeasurement(instance,
channel);
#endif
#if (defined(IC_PAL_OVER_EMIOS))
status_t status = STATUS_SUCCESS;
uint32_t retValue = 0U;
/* Get the measured value over the EMIOS */
status = EMIOS_DRV_IC_GetLastMeasurement((uint8_t)instance,
channel,
&retValue);
DEV_ASSERT(STATUS_SUCCESS == status);
value = (uint16_t)retValue;
#endif
return value;
}
/*******************************************************************************
* EOF
******************************************************************************/
|
gabrielnido/FossilsArcheologyRevival | src/main/java/fossilsarcheology/server/item/AncientHelmetItem.java | <gh_stars>10-100
package fossilsarcheology.server.item;
import fossilsarcheology.Revival;
import fossilsarcheology.server.api.DefaultRenderedItem;
import fossilsarcheology.server.tab.FATabRegistry;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.inventory.EntityEquipmentSlot;
import net.minecraft.item.ItemArmor;
import net.minecraft.item.ItemStack;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class AncientHelmetItem extends ItemArmor implements DefaultRenderedItem {
public AncientHelmetItem() {
super(FAItemRegistry.ANCIENT_HELMET_MATERIAL, 0, EntityEquipmentSlot.HEAD);
this.setCreativeTab(FATabRegistry.ITEMS);
this.setTranslationKey("ancient_helmet");
}
@Override
@SideOnly(Side.CLIENT)
public net.minecraft.client.model.ModelBiped getArmorModel(EntityLivingBase entityLiving, ItemStack itemStack, EntityEquipmentSlot armorSlot, net.minecraft.client.model.ModelBiped _default) {
net.minecraft.client.model.ModelBiped armorModel = new net.minecraft.client.model.ModelBiped();
if (itemStack != null) {
if (itemStack.getItem() instanceof AncientHelmetItem) {
EntityEquipmentSlot type = ((ItemArmor) itemStack.getItem()).armorType;
if (type == EntityEquipmentSlot.CHEST || type == EntityEquipmentSlot.HEAD || type == EntityEquipmentSlot.FEET) {
armorModel = (net.minecraft.client.model.ModelBiped) Revival.PROXY.getArmorModel(0);
} else {
armorModel = (net.minecraft.client.model.ModelBiped) Revival.PROXY.getArmorModel(1);
}
}
if (armorModel != null) {
armorModel.bipedHead.showModel = armorSlot == EntityEquipmentSlot.HEAD;
armorModel.bipedHeadwear.showModel = armorSlot == EntityEquipmentSlot.HEAD;
armorModel.bipedBody.showModel = armorSlot == EntityEquipmentSlot.CHEST || armorSlot == EntityEquipmentSlot.LEGS;
armorModel.bipedRightArm.showModel = armorSlot == EntityEquipmentSlot.CHEST;
armorModel.bipedLeftArm.showModel = armorSlot == EntityEquipmentSlot.CHEST;
armorModel.bipedRightLeg.showModel = armorSlot == EntityEquipmentSlot.LEGS || armorSlot == EntityEquipmentSlot.FEET;
armorModel.bipedLeftLeg.showModel = armorSlot == EntityEquipmentSlot.LEGS || armorSlot == EntityEquipmentSlot.FEET;
armorModel.isSneak = entityLiving.isSneaking();
armorModel.isRiding = entityLiving.isRiding();
armorModel.isChild = entityLiving.isChild();
return armorModel;
}
}
return null;
}
}
|
couchbaselabs/talend-components | services/components-api-service-rest/src/test/java/org/talend/components/service/rest/impl/RuntimeControllerImplTest.java | <filename>services/components-api-service-rest/src/test/java/org/talend/components/service/rest/impl/RuntimeControllerImplTest.java<gh_stars>1-10
//==============================================================================
//
// Copyright (C) 2006-2017 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
//==============================================================================
package org.talend.components.service.rest.impl;
import com.jayway.restassured.response.Response;
import org.junit.Test;
import org.springframework.http.HttpStatus;
import org.talend.components.service.rest.AbstractSpringIntegrationTests;
import org.talend.components.service.rest.dto.PropertiesDto;
import org.talend.components.service.rest.mock.MockDatasetRuntime;
import static com.jayway.restassured.RestAssured.given;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.springframework.http.MediaType.APPLICATION_JSON_UTF8_VALUE;
public class RuntimeControllerImplTest extends AbstractSpringIntegrationTests {
@Test
public void validateConnection() throws Exception {
PropertiesDto propertiesDto = buildTestDataStoreFormData();
// when
Response response = given().accept(APPLICATION_JSON_UTF8_VALUE) //
.expect() //
.statusCode(HttpStatus.OK.value()).log().ifError() //
.with().content(propertiesDto) //
.contentType(APPLICATION_JSON_UTF8_VALUE) //
.post("/runtimes/{definitionName}", DATA_STORE_DEFINITION_NAME);
// then
assertNotNull(response);
String content = response.asString();
assertNotNull(content);
}
@Test
public void getDatasetSchema() throws Exception {
// given
PropertiesDto formDataContainer = buildTestDataSetFormData();
// when
Response response = given().accept(APPLICATION_JSON_UTF8_VALUE) //
.expect() //
.statusCode(200).log().ifError() //
.with().content(formDataContainer) //
.contentType(APPLICATION_JSON_UTF8_VALUE) //
.post("/runtimes/{datasetDefinitionName}/schema", DATA_SET_DEFINITION_NAME);
// then
assertNotNull(response);
String content = response.asString();
assertNotNull(content);
assertEquals(MockDatasetRuntime.getSchemaJsonRepresentation(), content);
}
@Test
public void getDatasetData() throws Exception {
// given
PropertiesDto formDataContainer = buildTestDataSetFormData();
// when
Response response = given().accept(APPLICATION_JSON_UTF8_VALUE) //
.expect() //
.statusCode(200).log().ifError() //
.with().content(formDataContainer) //
.contentType(APPLICATION_JSON_UTF8_VALUE) //
.post("/runtimes/{datasetDefinitionName}/data", DATA_SET_DEFINITION_NAME);
// then
assertNotNull(response);
String content = response.asString();
assertNotNull(content);
assertEquals(MockDatasetRuntime.getRecordJsonRepresentation(), content);
}
}
|
PY-GZKY/fastapi-crawl-admin | backend/app/api/api_v1/core/tools/websocketDemo.py | import time
from fastapi import FastAPI
from fastapi.testclient import TestClient
from fastapi.websockets import WebSocket,WebSocketDisconnect
app = FastAPI()
@app.get("/")
async def read_main():
return {"msg": "Hello World"}
@app.websocket_route("/ws")
async def websocket(websocket: WebSocket):
await websocket.accept()
_a = 0
while 1:
time.sleep(3)
await websocket.send_json({"msg": "Hello WebSocket"})
if _a > 10:
break
_a+=1
await websocket.close()
def test_read_main():
client = TestClient(app)
response = client.get("/")
print(response.json())
assert response.status_code == 200
assert response.json() == {"msg": "Hello World"}
def test_websocket():
client = TestClient(app)
with client.websocket_connect("/ws") as websocket:
try:
while 1:
data = websocket.receive_json()
print(data)
assert data == {"msg": "Hello WebSocket"}
except WebSocketDisconnect:
print("关闭")
if __name__ == '__main__':
# test_read_main()
test_websocket() |
ministryofjustice/hmpps-book-secure-move-api | spec/lib/nomis_client/court_cases_spec.rb | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe NomisClient::CourtCases, with_nomis_client_authentication: true do
describe '.get' do
let(:booking_id) { '1495077' }
let(:filter_params) { nil }
it 'calls the nomis client with the correct booking id' do
described_class.get(booking_id, filter_params)
expect(token).to have_received(:get).with('/api/bookings/1495077/court-cases', {})
end
context 'when no filter_params are passed' do
it 'returns active court cases' do
described_class.get(booking_id)
expect(token).to have_received(:get).with("/api/bookings/#{booking_id}/court-cases?activeOnly=true", {})
end
end
context 'when filter_params are present' do
let(:filter_params) { ActionController::Parameters.new(active: 'true') }
it 'returns active court cases' do
described_class.get(booking_id, filter_params)
expect(token).to have_received(:get).with("/api/bookings/#{booking_id}/court-cases?activeOnly=true", {})
end
end
end
end
|
cisco-ie/cisco-proto | codegen/go/xr/62x/cisco_ios_xr_pmengine_oper/performance_management_history/global/periodic/otu_history/otu_port_histories/otu_port_history/otu_minute15_history/otu_minute15otn_histories/otu_minute15otn_history/otu_minute15otn_time_line_instances/otu_minute15otn_time_line_instance/pm_otn_paras.pb.go | <gh_stars>1-10
/*
Copyright 2019 Cisco Systems
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: pm_otn_paras.proto
package cisco_ios_xr_pmengine_oper_performance_management_history_global_periodic_otu_history_otu_port_histories_otu_port_history_otu_minute15_history_otu_minute15otn_histories_otu_minute15otn_history_otu_minute15otn_time_line_instances_otu_minute15otn_time_line_instance
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type PmOtnParas_KEYS struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Number uint32 `protobuf:"varint,2,opt,name=number,proto3" json:"number,omitempty"`
Number_1 uint32 `protobuf:"varint,3,opt,name=number_1,json=number1,proto3" json:"number_1,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *PmOtnParas_KEYS) Reset() { *m = PmOtnParas_KEYS{} }
func (m *PmOtnParas_KEYS) String() string { return proto.CompactTextString(m) }
func (*PmOtnParas_KEYS) ProtoMessage() {}
func (*PmOtnParas_KEYS) Descriptor() ([]byte, []int) {
return fileDescriptor_fe1002cf207593db, []int{0}
}
func (m *PmOtnParas_KEYS) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_PmOtnParas_KEYS.Unmarshal(m, b)
}
func (m *PmOtnParas_KEYS) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_PmOtnParas_KEYS.Marshal(b, m, deterministic)
}
func (m *PmOtnParas_KEYS) XXX_Merge(src proto.Message) {
xxx_messageInfo_PmOtnParas_KEYS.Merge(m, src)
}
func (m *PmOtnParas_KEYS) XXX_Size() int {
return xxx_messageInfo_PmOtnParas_KEYS.Size(m)
}
func (m *PmOtnParas_KEYS) XXX_DiscardUnknown() {
xxx_messageInfo_PmOtnParas_KEYS.DiscardUnknown(m)
}
var xxx_messageInfo_PmOtnParas_KEYS proto.InternalMessageInfo
func (m *PmOtnParas_KEYS) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *PmOtnParas_KEYS) GetNumber() uint32 {
if m != nil {
return m.Number
}
return 0
}
func (m *PmOtnParas_KEYS) GetNumber_1() uint32 {
if m != nil {
return m.Number_1
}
return 0
}
type PmOtnParameter struct {
Data uint64 `protobuf:"varint,1,opt,name=data,proto3" json:"data,omitempty"`
Threshold uint64 `protobuf:"varint,2,opt,name=threshold,proto3" json:"threshold,omitempty"`
TcaReport bool `protobuf:"varint,3,opt,name=tca_report,json=tcaReport,proto3" json:"tca_report,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *PmOtnParameter) Reset() { *m = PmOtnParameter{} }
func (m *PmOtnParameter) String() string { return proto.CompactTextString(m) }
func (*PmOtnParameter) ProtoMessage() {}
func (*PmOtnParameter) Descriptor() ([]byte, []int) {
return fileDescriptor_fe1002cf207593db, []int{1}
}
func (m *PmOtnParameter) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_PmOtnParameter.Unmarshal(m, b)
}
func (m *PmOtnParameter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_PmOtnParameter.Marshal(b, m, deterministic)
}
func (m *PmOtnParameter) XXX_Merge(src proto.Message) {
xxx_messageInfo_PmOtnParameter.Merge(m, src)
}
func (m *PmOtnParameter) XXX_Size() int {
return xxx_messageInfo_PmOtnParameter.Size(m)
}
func (m *PmOtnParameter) XXX_DiscardUnknown() {
xxx_messageInfo_PmOtnParameter.DiscardUnknown(m)
}
var xxx_messageInfo_PmOtnParameter proto.InternalMessageInfo
func (m *PmOtnParameter) GetData() uint64 {
if m != nil {
return m.Data
}
return 0
}
func (m *PmOtnParameter) GetThreshold() uint64 {
if m != nil {
return m.Threshold
}
return 0
}
func (m *PmOtnParameter) GetTcaReport() bool {
if m != nil {
return m.TcaReport
}
return false
}
type PmOtnParameterRatio struct {
Data string `protobuf:"bytes,1,opt,name=data,proto3" json:"data,omitempty"`
Threshold string `protobuf:"bytes,2,opt,name=threshold,proto3" json:"threshold,omitempty"`
TcaReport bool `protobuf:"varint,3,opt,name=tca_report,json=tcaReport,proto3" json:"tca_report,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *PmOtnParameterRatio) Reset() { *m = PmOtnParameterRatio{} }
func (m *PmOtnParameterRatio) String() string { return proto.CompactTextString(m) }
func (*PmOtnParameterRatio) ProtoMessage() {}
func (*PmOtnParameterRatio) Descriptor() ([]byte, []int) {
return fileDescriptor_fe1002cf207593db, []int{2}
}
func (m *PmOtnParameterRatio) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_PmOtnParameterRatio.Unmarshal(m, b)
}
func (m *PmOtnParameterRatio) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_PmOtnParameterRatio.Marshal(b, m, deterministic)
}
func (m *PmOtnParameterRatio) XXX_Merge(src proto.Message) {
xxx_messageInfo_PmOtnParameterRatio.Merge(m, src)
}
func (m *PmOtnParameterRatio) XXX_Size() int {
return xxx_messageInfo_PmOtnParameterRatio.Size(m)
}
func (m *PmOtnParameterRatio) XXX_DiscardUnknown() {
xxx_messageInfo_PmOtnParameterRatio.DiscardUnknown(m)
}
var xxx_messageInfo_PmOtnParameterRatio proto.InternalMessageInfo
func (m *PmOtnParameterRatio) GetData() string {
if m != nil {
return m.Data
}
return ""
}
func (m *PmOtnParameterRatio) GetThreshold() string {
if m != nil {
return m.Threshold
}
return ""
}
func (m *PmOtnParameterRatio) GetTcaReport() bool {
if m != nil {
return m.TcaReport
}
return false
}
type PmOtnParas struct {
Index uint32 `protobuf:"varint,50,opt,name=index,proto3" json:"index,omitempty"`
Valid bool `protobuf:"varint,51,opt,name=valid,proto3" json:"valid,omitempty"`
Timestamp string `protobuf:"bytes,52,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
LastClearTime string `protobuf:"bytes,53,opt,name=last_clear_time,json=lastClearTime,proto3" json:"last_clear_time,omitempty"`
LastClear15MinTime string `protobuf:"bytes,54,opt,name=last_clear15_min_time,json=lastClear15MinTime,proto3" json:"last_clear15_min_time,omitempty"`
LastClear30SecTime string `protobuf:"bytes,55,opt,name=last_clear30_sec_time,json=lastClear30SecTime,proto3" json:"last_clear30_sec_time,omitempty"`
LastClear24HrTime string `protobuf:"bytes,56,opt,name=last_clear24_hr_time,json=lastClear24HrTime,proto3" json:"last_clear24_hr_time,omitempty"`
Sec30Support bool `protobuf:"varint,57,opt,name=sec30_support,json=sec30Support,proto3" json:"sec30_support,omitempty"`
Lbc *PmOtnParameter `protobuf:"bytes,58,opt,name=lbc,proto3" json:"lbc,omitempty"`
EsNe *PmOtnParameter `protobuf:"bytes,59,opt,name=es_ne,json=esNe,proto3" json:"es_ne,omitempty"`
EsrNe *PmOtnParameterRatio `protobuf:"bytes,60,opt,name=esr_ne,json=esrNe,proto3" json:"esr_ne,omitempty"`
SesNe *PmOtnParameter `protobuf:"bytes,61,opt,name=ses_ne,json=sesNe,proto3" json:"ses_ne,omitempty"`
SesrNe *PmOtnParameterRatio `protobuf:"bytes,62,opt,name=sesr_ne,json=sesrNe,proto3" json:"sesr_ne,omitempty"`
UasNe *PmOtnParameter `protobuf:"bytes,63,opt,name=uas_ne,json=uasNe,proto3" json:"uas_ne,omitempty"`
BbeNe *PmOtnParameter `protobuf:"bytes,64,opt,name=bbe_ne,json=bbeNe,proto3" json:"bbe_ne,omitempty"`
BberNe *PmOtnParameterRatio `protobuf:"bytes,65,opt,name=bber_ne,json=bberNe,proto3" json:"bber_ne,omitempty"`
FcNe *PmOtnParameter `protobuf:"bytes,66,opt,name=fc_ne,json=fcNe,proto3" json:"fc_ne,omitempty"`
EsFe *PmOtnParameter `protobuf:"bytes,67,opt,name=es_fe,json=esFe,proto3" json:"es_fe,omitempty"`
EsrFe *PmOtnParameterRatio `protobuf:"bytes,68,opt,name=esr_fe,json=esrFe,proto3" json:"esr_fe,omitempty"`
SesFe *PmOtnParameter `protobuf:"bytes,69,opt,name=ses_fe,json=sesFe,proto3" json:"ses_fe,omitempty"`
SesrFe *PmOtnParameterRatio `protobuf:"bytes,70,opt,name=sesr_fe,json=sesrFe,proto3" json:"sesr_fe,omitempty"`
UasFe *PmOtnParameter `protobuf:"bytes,71,opt,name=uas_fe,json=uasFe,proto3" json:"uas_fe,omitempty"`
BbeFe *PmOtnParameter `protobuf:"bytes,72,opt,name=bbe_fe,json=bbeFe,proto3" json:"bbe_fe,omitempty"`
BberFe *PmOtnParameterRatio `protobuf:"bytes,73,opt,name=bber_fe,json=bberFe,proto3" json:"bber_fe,omitempty"`
FcFe *PmOtnParameter `protobuf:"bytes,74,opt,name=fc_fe,json=fcFe,proto3" json:"fc_fe,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *PmOtnParas) Reset() { *m = PmOtnParas{} }
func (m *PmOtnParas) String() string { return proto.CompactTextString(m) }
func (*PmOtnParas) ProtoMessage() {}
func (*PmOtnParas) Descriptor() ([]byte, []int) {
return fileDescriptor_fe1002cf207593db, []int{3}
}
func (m *PmOtnParas) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_PmOtnParas.Unmarshal(m, b)
}
func (m *PmOtnParas) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_PmOtnParas.Marshal(b, m, deterministic)
}
func (m *PmOtnParas) XXX_Merge(src proto.Message) {
xxx_messageInfo_PmOtnParas.Merge(m, src)
}
func (m *PmOtnParas) XXX_Size() int {
return xxx_messageInfo_PmOtnParas.Size(m)
}
func (m *PmOtnParas) XXX_DiscardUnknown() {
xxx_messageInfo_PmOtnParas.DiscardUnknown(m)
}
var xxx_messageInfo_PmOtnParas proto.InternalMessageInfo
func (m *PmOtnParas) GetIndex() uint32 {
if m != nil {
return m.Index
}
return 0
}
func (m *PmOtnParas) GetValid() bool {
if m != nil {
return m.Valid
}
return false
}
func (m *PmOtnParas) GetTimestamp() string {
if m != nil {
return m.Timestamp
}
return ""
}
func (m *PmOtnParas) GetLastClearTime() string {
if m != nil {
return m.LastClearTime
}
return ""
}
func (m *PmOtnParas) GetLastClear15MinTime() string {
if m != nil {
return m.LastClear15MinTime
}
return ""
}
func (m *PmOtnParas) GetLastClear30SecTime() string {
if m != nil {
return m.LastClear30SecTime
}
return ""
}
func (m *PmOtnParas) GetLastClear24HrTime() string {
if m != nil {
return m.LastClear24HrTime
}
return ""
}
func (m *PmOtnParas) GetSec30Support() bool {
if m != nil {
return m.Sec30Support
}
return false
}
func (m *PmOtnParas) GetLbc() *PmOtnParameter {
if m != nil {
return m.Lbc
}
return nil
}
func (m *PmOtnParas) GetEsNe() *PmOtnParameter {
if m != nil {
return m.EsNe
}
return nil
}
func (m *PmOtnParas) GetEsrNe() *PmOtnParameterRatio {
if m != nil {
return m.EsrNe
}
return nil
}
func (m *PmOtnParas) GetSesNe() *PmOtnParameter {
if m != nil {
return m.SesNe
}
return nil
}
func (m *PmOtnParas) GetSesrNe() *PmOtnParameterRatio {
if m != nil {
return m.SesrNe
}
return nil
}
func (m *PmOtnParas) GetUasNe() *PmOtnParameter {
if m != nil {
return m.UasNe
}
return nil
}
func (m *PmOtnParas) GetBbeNe() *PmOtnParameter {
if m != nil {
return m.BbeNe
}
return nil
}
func (m *PmOtnParas) GetBberNe() *PmOtnParameterRatio {
if m != nil {
return m.BberNe
}
return nil
}
func (m *PmOtnParas) GetFcNe() *PmOtnParameter {
if m != nil {
return m.FcNe
}
return nil
}
func (m *PmOtnParas) GetEsFe() *PmOtnParameter {
if m != nil {
return m.EsFe
}
return nil
}
func (m *PmOtnParas) GetEsrFe() *PmOtnParameterRatio {
if m != nil {
return m.EsrFe
}
return nil
}
func (m *PmOtnParas) GetSesFe() *PmOtnParameter {
if m != nil {
return m.SesFe
}
return nil
}
func (m *PmOtnParas) GetSesrFe() *PmOtnParameterRatio {
if m != nil {
return m.SesrFe
}
return nil
}
func (m *PmOtnParas) GetUasFe() *PmOtnParameter {
if m != nil {
return m.UasFe
}
return nil
}
func (m *PmOtnParas) GetBbeFe() *PmOtnParameter {
if m != nil {
return m.BbeFe
}
return nil
}
func (m *PmOtnParas) GetBberFe() *PmOtnParameterRatio {
if m != nil {
return m.BberFe
}
return nil
}
func (m *PmOtnParas) GetFcFe() *PmOtnParameter {
if m != nil {
return m.FcFe
}
return nil
}
func init() {
proto.RegisterType((*PmOtnParas_KEYS)(nil), "cisco_ios_xr_pmengine_oper.performance_management_history.global.periodic.otu_history.otu_port_histories.otu_port_history.otu_minute15_history.otu_minute15otn_histories.otu_minute15otn_history.otu_minute15otn_time_line_instances.otu_minute15otn_time_line_instance.pm_otn_paras_KEYS")
proto.RegisterType((*PmOtnParameter)(nil), "cisco_ios_xr_pmengine_oper.performance_management_history.global.periodic.otu_history.otu_port_histories.otu_port_history.otu_minute15_history.otu_minute15otn_histories.otu_minute15otn_history.otu_minute15otn_time_line_instances.otu_minute15otn_time_line_instance.pm_otn_parameter")
proto.RegisterType((*PmOtnParameterRatio)(nil), "cisco_ios_xr_pmengine_oper.performance_management_history.global.periodic.otu_history.otu_port_histories.otu_port_history.otu_minute15_history.otu_minute15otn_histories.otu_minute15otn_history.otu_minute15otn_time_line_instances.otu_minute15otn_time_line_instance.pm_otn_parameter_ratio")
proto.RegisterType((*PmOtnParas)(nil), "cisco_ios_xr_pmengine_oper.performance_management_history.global.periodic.otu_history.otu_port_histories.otu_port_history.otu_minute15_history.otu_minute15otn_histories.otu_minute15otn_history.otu_minute15otn_time_line_instances.otu_minute15otn_time_line_instance.pm_otn_paras")
}
func init() { proto.RegisterFile("pm_otn_paras.proto", fileDescriptor_fe1002cf207593db) }
var fileDescriptor_fe1002cf207593db = []byte{
// 722 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xec, 0x59, 0xcd, 0x6a, 0x1b, 0x3b,
0x14, 0xc6, 0x13, 0xdb, 0x89, 0x75, 0x13, 0xee, 0x8d, 0xc8, 0x0d, 0xba, 0x70, 0x0b, 0x21, 0x85,
0x12, 0x28, 0xb8, 0xb1, 0x1d, 0xf7, 0xff, 0x3f, 0xcd, 0x34, 0x6d, 0x69, 0x16, 0x93, 0x6e, 0xda,
0x8d, 0xd0, 0x8c, 0xcf, 0xc4, 0x82, 0x19, 0x69, 0x90, 0xe4, 0x92, 0x3e, 0x41, 0x9f, 0xa1, 0xab,
0xbe, 0x40, 0xa1, 0x74, 0x55, 0xfa, 0x14, 0x7d, 0xa5, 0x22, 0x8d, 0x63, 0x3b, 0x4e, 0x83, 0xa7,
0x7b, 0xed, 0xa4, 0xa3, 0xef, 0x9c, 0x23, 0x7d, 0xf3, 0x1d, 0xf3, 0x81, 0x11, 0x2e, 0x72, 0x2a,
0x8d, 0xa0, 0x05, 0x53, 0x4c, 0xb7, 0x0b, 0x25, 0x8d, 0xc4, 0x1f, 0x83, 0x84, 0xeb, 0x44, 0x52,
0x2e, 0x35, 0x3d, 0x55, 0xb4, 0xc8, 0x41, 0x9c, 0x70, 0x01, 0x54, 0x16, 0xa0, 0xda, 0x05, 0xa8,
0x54, 0xaa, 0x9c, 0x89, 0x04, 0x68, 0xce, 0x04, 0x3b, 0x81, 0x1c, 0x84, 0xa1, 0x43, 0xae, 0x8d,
0x54, 0x1f, 0xda, 0x27, 0x99, 0x8c, 0x59, 0x66, 0x51, 0x5c, 0x0e, 0x78, 0xd2, 0x96, 0x66, 0x34,
0x39, 0xb3, 0xeb, 0x42, 0xaa, 0x33, 0x30, 0x07, 0x3d, 0x1f, 0x2a, 0x31, 0x39, 0x17, 0x23, 0x03,
0x9d, 0xfe, 0x6f, 0x83, 0xf6, 0xb2, 0xe7, 0xf3, 0x2f, 0x9e, 0x5c, 0xcc, 0x30, 0x3c, 0x07, 0x9a,
0xd9, 0x47, 0x70, 0xa1, 0x8d, 0xbd, 0xbc, 0xae, 0x80, 0xd9, 0x7e, 0x87, 0xd6, 0x67, 0xf9, 0xa1,
0xaf, 0x0e, 0xde, 0x1e, 0x63, 0x8c, 0xea, 0x82, 0xe5, 0x40, 0x6a, 0x5b, 0xb5, 0x9d, 0x56, 0xe4,
0xd6, 0x78, 0x13, 0x35, 0xc5, 0x28, 0x8f, 0x41, 0x91, 0x60, 0xab, 0xb6, 0xb3, 0x16, 0x8d, 0x77,
0xf8, 0x3f, 0xb4, 0x52, 0xae, 0x68, 0x87, 0x2c, 0xb9, 0x93, 0xe5, 0x72, 0xdf, 0xd9, 0x4e, 0xd0,
0x3f, 0x33, 0xb5, 0x73, 0x30, 0xa0, 0x6c, 0xe9, 0x01, 0x33, 0xcc, 0x95, 0xae, 0x47, 0x6e, 0x8d,
0xff, 0x47, 0x2d, 0x33, 0x54, 0xa0, 0x87, 0x32, 0x1b, 0xb8, 0xea, 0xf5, 0x68, 0x1a, 0xc0, 0x57,
0x10, 0x32, 0x09, 0xa3, 0x0a, 0x2c, 0x8b, 0xae, 0xc5, 0x4a, 0xd4, 0x32, 0x09, 0x8b, 0x5c, 0x60,
0x9b, 0xa3, 0xcd, 0xf9, 0x26, 0x54, 0x31, 0xc3, 0xe5, 0xb9, 0x56, 0xad, 0xcb, 0x5a, 0xb5, 0xfe,
0xa0, 0xd5, 0xcf, 0xeb, 0x68, 0x75, 0x96, 0x2c, 0xbc, 0x81, 0x1a, 0x5c, 0x0c, 0xe0, 0x94, 0x74,
0xdd, 0xc3, 0xcb, 0x8d, 0x8d, 0xbe, 0x67, 0x19, 0x1f, 0x90, 0x9e, 0x2b, 0x50, 0x6e, 0x5c, 0x67,
0x9e, 0x83, 0x36, 0x2c, 0x2f, 0xc8, 0xde, 0xb8, 0xf3, 0x59, 0x00, 0x5f, 0x43, 0x7f, 0x67, 0x4c,
0x1b, 0x9a, 0x64, 0xc0, 0x94, 0xfb, 0x4e, 0xa4, 0xef, 0x30, 0x6b, 0x36, 0xbc, 0x6f, 0xa3, 0x6f,
0x78, 0x0e, 0xb8, 0x83, 0xfe, 0x9d, 0xe2, 0x3a, 0x7d, 0xfb, 0x75, 0x4b, 0xf4, 0x4d, 0x87, 0xc6,
0x13, 0x74, 0xa7, 0xff, 0x9a, 0x8b, 0x8b, 0x29, 0xbd, 0x5d, 0xaa, 0x21, 0x29, 0x53, 0x6e, 0xcd,
0xa5, 0xf4, 0x76, 0x8f, 0x21, 0x71, 0x29, 0x37, 0xd0, 0xc6, 0x34, 0xa5, 0xbb, 0x47, 0x87, 0xe3,
0x2b, 0xdd, 0x76, 0x19, 0xeb, 0x93, 0x8c, 0xee, 0xde, 0x61, 0x79, 0xad, 0xab, 0x68, 0x4d, 0x43,
0x62, 0x8b, 0x8f, 0x0a, 0xc7, 0xdd, 0x1d, 0xf7, 0xf4, 0x55, 0x17, 0x3c, 0x2e, 0x63, 0xf8, 0x4b,
0x80, 0x96, 0xb2, 0x38, 0x21, 0x77, 0xb7, 0x6a, 0x3b, 0x7f, 0x75, 0x3f, 0x05, 0x6d, 0x3f, 0x83,
0x0e, 0xd3, 0x9e, 0xd7, 0x6f, 0x64, 0x69, 0xc2, 0x5f, 0x03, 0xd4, 0x00, 0x4d, 0x05, 0x90, 0x7b,
0x9e, 0xb0, 0x05, 0x84, 0xd5, 0x41, 0x1f, 0x01, 0xfe, 0x1e, 0xa0, 0x26, 0x68, 0x65, 0x29, 0xbb,
0xef, 0x28, 0xfb, 0xec, 0x29, 0xbb, 0x8c, 0xb2, 0xf2, 0x37, 0x32, 0x6a, 0x80, 0x56, 0x47, 0x80,
0xbf, 0x05, 0xa8, 0xa9, 0x4b, 0xb1, 0x3d, 0xf0, 0x62, 0x5b, 0x20, 0xb6, 0x86, 0x76, 0x6a, 0xfb,
0x11, 0xa0, 0x65, 0x3d, 0x96, 0xdb, 0x43, 0x2f, 0xb7, 0x4a, 0x72, 0xb3, 0x22, 0x3b, 0xd3, 0xdb,
0x88, 0x39, 0xbd, 0x3d, 0xf2, 0x7a, 0x5b, 0xa4, 0xb7, 0x11, 0xd3, 0x63, 0xce, 0xe2, 0x18, 0x2c,
0x67, 0x8f, 0x3d, 0x67, 0x8b, 0x38, 0x8b, 0x63, 0x18, 0xcf, 0x68, 0x6c, 0xbd, 0xa9, 0x00, 0xf2,
0xc4, 0xcf, 0x68, 0xb5, 0x19, 0xb5, 0x84, 0x1d, 0x81, 0xf3, 0x1f, 0x69, 0x62, 0x99, 0x7b, 0xea,
0xe5, 0xb6, 0xc8, 0x7f, 0xa4, 0xc9, 0x98, 0x31, 0xd0, 0x34, 0x05, 0xb2, 0xef, 0x19, 0x5b, 0xec,
0xd8, 0xc2, 0xa9, 0x63, 0x4b, 0x81, 0x3c, 0xf3, 0xe3, 0x59, 0xd9, 0xb1, 0x85, 0x53, 0xc7, 0x96,
0x02, 0x39, 0xf0, 0x62, 0xab, 0xe0, 0xd8, 0xc2, 0x19, 0xc7, 0x96, 0x02, 0x09, 0xbd, 0xdc, 0xaa,
0x3b, 0xb6, 0x70, 0xea, 0xd8, 0x52, 0x20, 0xcf, 0xbd, 0xde, 0x2a, 0x38, 0xb6, 0x70, 0xea, 0xd8,
0x52, 0x20, 0x87, 0x9e, 0xb3, 0x0a, 0x8e, 0x2d, 0x9c, 0x71, 0x6c, 0x29, 0x90, 0x17, 0x7e, 0x46,
0xab, 0x3b, 0xb6, 0x70, 0xe2, 0xd8, 0x52, 0x20, 0x2f, 0xbd, 0xdc, 0x16, 0x3b, 0xb6, 0x10, 0xe2,
0xa6, 0xfb, 0x3b, 0xa0, 0xf7, 0x2b, 0x00, 0x00, 0xff, 0xff, 0x9a, 0x86, 0x8b, 0xc8, 0x24, 0x18,
0x00, 0x00,
}
|
hyunjoy/scripts | qemu/include/hw/usb/hcd-dwc3.h | <filename>qemu/include/hw/usb/hcd-dwc3.h
/*
* QEMU model of the USB DWC3 host controller emulation.
*
* Copyright (c) 2020 Xilinx Inc.
*
* Written by <NAME><<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef HCD_DWC3_H
#define HCD_DWC3_H
#include "hw/usb/hcd-xhci.h"
#include "hw/usb/hcd-xhci-sysbus.h"
#define TYPE_USB_DWC3 "usb_dwc3"
#define USB_DWC3(obj) \
OBJECT_CHECK(USBDWC3, (obj), TYPE_USB_DWC3)
#define USB_DWC3_R_MAX ((0x530 / 4) + 1)
#define DWC3_SIZE 0x10000
typedef struct USBDWC3 {
SysBusDevice parent_obj;
MemoryRegion iomem;
XHCISysbusState sysbus_xhci;
uint32_t regs[USB_DWC3_R_MAX];
RegisterInfo regs_info[USB_DWC3_R_MAX];
struct {
uint8_t mode;
uint32_t dwc_usb3_user;
} cfg;
} USBDWC3;
#endif
|
HelpGiveThanks/Library | scripts/tagMenu/sortTestOrBrainstormTaggedLearnRecords.c | <gh_stars>0
July 21, 2018 14:36:22 Library.fmp12 - sortTestOrBrainstormTaggedLearnRecords -1-
tagMenu: sortTestOrBrainstormTaggedLearnRecords
#
#Used by the following scripts:
#reportTagInfo
#addTagToMainRecord
#sortButtonLearnLayouts
#loadItemRecordForBrainstormTagMenu
#loadTestSectionAndTaggedLearnRecords
#
#Perform script for brainstorm and test tags only.
If [ $$citationMatch = "brainstorm" or $$citationMatch = "test"
or
Left ( Get (LayoutName) ; 4 ) = "test" ]
#
#Make a key for the current record's lock so after
#this sort is complete the system can go back to
#this record.
Set Variable [ $recordKey; Value:testlearn::_Ltestlearn ]
#
#Turn off the loadcitation script to speed up the
#loop portion of the script.
Set Variable [ $$stoploadCitation; Value:1 ]
#
#Check if the user does or does not want brainstorm
#test learn table records sorted by order number.
#If they do (the tlbrainstormsort is blank) then sort
#by order number.
If [ TEMP::TLBrainstormSort =1 and $$citationMatch = "brainstorm" ]
#
#Set the TLBrainstormSort field with a 1. Now if user
#clicks the sort button it will be because they
#now want the records sorted by date.
Set Field [ TEMP::TLBrainstormSort; 1 ]
#
#To speed up the process, sort to the top the records
#that have ksample numbers. Do not loop thru
#records whose ksample records are blank.
Sort Records [ Keep records in sorted order; Specified Sort Order: testlearn::kcbrainstorm; descending ]
[ Restore; No dialog ]
#
#Go the first record.
Go to Record/Request/Page
[ First ]
#
#Check the brainstorm key fields if in
#brainstorm mode.
If [ $$citationMatch = "brainstorm" ]
Loop
#The first step is to blank or clear each record's
#order field, so only those records that are part
#of the current brainstorm or test will have order numbers
#and be part of the sort coming right after this
#assignment of sort order numbers.
Set Field [ testlearn::orderTestInformation; "" ]
Exit Loop If [ testlearn::kcbrainstorm = "" ]
#
#The order number is the left three digits of a
#brainstorm or test record lock number. ( Remember
#each record in the database has lock number or
#ID number. To open the lock and see any record's
#contents requires a key number that fits the lock.
#So if a lock number is 123, then the key that will
#fit this lock is 123 too. ) Because the brainstorm
#and test key fields (think of key chains) may
#have several keys, because any one learn record
#may be part of more than one brainstorm or test.
#the system must check each learn record's keys
#one at a time to see if any fit the current brainstorm
#or test's lock. So beginning with number one
#the system checks each key. The left most 3 numbers
#are order numbers so below you will note that
#they system starts the check on the 4th number
#from the left.
Set Variable [ $number; Value:1 ]
Go to Field [ ]
Loop
#
If [ Middle ( GetValue ( testlearn::kcbrainstorm ; $number ) ; 4 ; 42 ) & "¶" = $$tagBrainstorm & "¶" ]
#
#When and if a key is found that fits the current
#brainstorm or test record's lock, the order number
#( left most 3 digits ) is caputred and placed in
#in the order field.
Set Field [ testlearn::orderTestInformation; Left ( GetValue ( testlearn::kcbrainstorm ; $number ) ; 3 ) ]
#
#The 'number' variable is made blank to trigger
#the exit from this loop, the order number having
#been assigned.
Set Variable [ $number ]
// Else If [ Middle ( GetValue ( testlearn::kcbrainstorm ; $number ) ; 4 ; 42 ) & "¶" ≠ $$tagBrainstorm ]
// Set Field [ testlearn::orderTestInformation; "" ]
End If
Exit Loop If [ $number = "" ]
#
#Exit loop if the current record is not part
#of the selected brainstorm or test after checking
#all of its keys.
Exit Loop If [ GetValue ( testlearn::kcbrainstorm ; $number ) = "" ]
#
#Add 1 to the 'number' varaible after each key
#in the key field is checked, to direct the system
#to check the next key.
Set Variable [ $add; Value:$number ]
Set Variable [ $number; Value:$add + 1 ]
End Loop
Go to Record/Request/Page
[ Next; Exit after last ]
End Loop
End If
#
#Sort the records by order field.
If [ TEMP::InventoryLibraryYN = "" ]
Sort Records [ Keep records in sorted order; Specified Sort Order: testlearn::orderInventoryList; based on value list:
“testPulldownListANDsortOrderList”
testlearn::orderTestInformation; based on value list: “testPulldownListANDsortOrderList”
testlearn::timestamp; descending ]
[ Restore; No dialog ]
Else
Sort Records [ Keep records in sorted order; Specified Sort Order: testlearn::orderInventoryList; based on value list:
“testPulldownListANDsortOrderList”
testlearn::orderInventoryGroupNumber; ascending
testlearn::orderTestInformation; based on value list: “testPulldownListANDsortOrderList”
testlearn::note; ascending ]
[ Restore; No dialog ]
End If
#
#Return to record user was on at
#the start of this script.
Go to Record/Request/Page
[ First ]
Scroll Window
[ Home ]
Loop
Exit Loop If [ $recordkey = testlearn::_Ltestlearn ]
Go to Record/Request/Page
[ Next; Exit after last ]
End Loop
#
#Turn loadcitation script back on and exit script.
Set Variable [ $$stoploadCitation ]
Exit Script [ ]
End If
#
#
#
#
#Check if the user does or does not want
#learn records sorted by order number.
#If they do (the TLTestSort is blank) then sort
#by order number.
If [ TEMP::TLTestSort = 1 and $$citationMatch = "test" ]
#
#To speed up the process, sort to the top the records
#that have kbrainstorm numbers. Do not loop thru
#records whose kbrainstorm records are blank.
Sort Records [ Keep records in sorted order; Specified Sort Order: testlearn::kctestSubsectionInfo; descending ]
[ Restore; No dialog ]
#
#Go the first record.
Go to Record/Request/Page
[ First ]
#
#Check the test key fields if in brainstorm mode.
If [ $$citationMatch = "test" ]
Loop
#
#The first step is to blank or clear each record's
#order field, so only those records that are part
#of the current brainstorm or test will have order numbers
#and be part of the sort coming right after this
#assignment of sort order numbers.
Set Field [ testlearn::orderTestInformation; "" ]
Exit Loop If [ testlearn::kctestSubsectionInfo = "" ]
#
#The order number is the left three digits of a
#brainstorm or test record lock number. ( Remember
#each record in the database has lock number or
#ID number. To open the lock and see any record's
#contents requires a key number that fits the lock.
#So if a lock number is 123, then the key that will
#fit this lock is 123 too. ) Because the brainstorm
#and test key fields (think of key chains) may
#have several keys, because any one learn record
#may be part of more than one brainstorm or test,
#the system must check each learn record's keys
#one at a time to see if any fit the current brainstorm
#or test's lock. So beginning with number one
#the system checks each key. The left most 3 numbers
#are order numbers so below you will note that
#they system starts the check on the 4th number
#from the left.
Set Variable [ $number; Value:1 ]
Go to Field [ ]
Loop
If [ Middle ( GetValue ( testlearn::kctestSubsectionInfo ; $number ) ; 4 ; 42 ) & "¶" = $$tagtest & ¶ ]
#
#When and if a key is found that fits the current
#brainstorm or test record's lock, the order number
#( left most 3 digits ) is caputred and placed in
#in the order field.
Set Field [ testlearn::orderTestInformation; Left ( GetValue ( testlearn::kctestSubsectionInfo ; $number ) ;
3 ) ]
#
#The 'number' variable is made blank to trigger
#the exit from this loop, the order number having
#been assigned.
Set Variable [ $number ]
End If
Exit Loop If [ $number = "" ]
#
#Exit loop if the current record is not part
#of the selected brainstorm or test after checking
#all of its keys.
Exit Loop If [ GetValue ( testlearn::kctestSubsectionInfo ; $number ) = "" ]
#
#Add 1 to the 'number' varaible after each key
#in the key field is checked, to direct the system
#to check the next key.
Set Variable [ $add; Value:$number ]
Set Variable [ $number; Value:$add + 1 ]
End Loop
Go to Record/Request/Page
[ Next; Exit after last ]
End Loop
End If
#
#Sort the records by order field.
If [ Right ( Get (LayoutName) ; 4 ) = "info" ]
Sort Records [ Keep records in sorted order; Specified Sort Order: testlearnReportTags::orderTestInformation; based
on value list: “testPulldownListANDsortOrderList”
testlearnReportTags::timestamp; descending ]
[ Restore; No dialog ]
Else
If [ TEMP::InventoryLibraryYN = "" ]
Sort Records [ Keep records in sorted order; Specified Sort Order: testlearn::orderInventoryList; based on value
list: “testPulldownListANDsortOrderList”
testlearn::orderTestInformation; based on value list: “testPulldownListANDsortOrderList”
testlearn::timestamp; descending ]
[ Restore; No dialog ]
Else
Sort Records [ Keep records in sorted order; Specified Sort Order: testlearn::orderInventoryList; based on value
list: “testPulldownListANDsortOrderList”
testlearn::orderInventoryGroupNumber; ascending
testlearn::orderTestInformation; based on value list: “testPulldownListANDsortOrderList”
testlearn::note; ascending ]
[ Restore; No dialog ]
End If
End If
#
#Return to record user was on at
#the start of this script.
Go to Record/Request/Page
[ First ]
Scroll Window
[ Home ]
Loop
Exit Loop If [ $recordkey = testlearn::_Ltestlearn ]
Go to Record/Request/Page
[ Next; Exit after last ]
End Loop
#
#Turn loadcitation script back on, load up the
#selected Learn records variables (just in case
#the user came here via the edit/new button in
#the test module), and exit script.
Set Variable [ $$stoploadCitation ]
#
Set Variable [ $$citationRecord; Value:testlearn::_Ltestlearn ]
Set Variable [ $$node; Value:testlearn::kNodeOther ]
Set Variable [ $$primaryNode; Value:testlearn::kNodePrimary ]
Set Variable [ $$copyright; Value:testlearn::kcopyright ]
Set Variable [ $$ref; Value:testlearn::kcreference ]
Set Variable [ $$PrimaryKey; Value:testlearn::kKeywordPrimary ]
Set Variable [ $$Key; Value:testlearn::kcKeywordOther ]
Set Variable [ $$OtherKey; Value:testlearn::kcKeywordOther ]
Set Variable [ $$test; Value:testlearn::kctestSubsectionInfo ]
Set Variable [ $$brainstorm; Value:testlearn::kcbrainstorm ]
Set Variable [ $$RecordID; Value:Get (RecordID) ]
#
#Note if record is locked to protect its tags.
If [ tagTLNodePrimary::orderOrLock ≠ "" ]
Set Variable [ $$lockedMainLearnRecord; Value:tagTLNodePrimary::tag ]
Else
Set Variable [ $$lockedMainLearnRecord ]
End If
Exit Script [ ]
End If
#
End If
|
peturingi/opae-sdk | tools/libboard/board_n5010/board_n5010.c | <reponame>peturingi/opae-sdk<gh_stars>100-1000
// Original work Copyright(c) 2019-2020, Intel Corporation
// Modifications Copyright(c) 2021, <NAME>
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Intel Corporation nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif // HAVE_CONFIG_H
#include <glob.h>
#include <stdio.h>
#include <errno.h>
#include <string.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <net/ethernet.h>
#include <opae/properties.h>
#include <opae/utils.h>
#include <opae/fpga.h>
#include <sys/ioctl.h>
#include <netinet/ether.h>
#include "board_n5010.h"
#include "../board_common/board_common.h"
#define MACADDR_LEN 19
// DFL SYSFS
#define DFL_SYSFS_BMCFW_VER "dfl*/**/spi_master/spi*/spi*/bmcfw_version"
#define DFL_SYSFS_MAX10_VER "dfl*/**/spi_master/spi*/spi*/bmc_version"
#define DFL_SYSFS_MACADDR_PATH "dfl*/**/spi_master/spi*/spi*.*/mac_address"
#define DFL_SYSFS_MACCNT_PATH "dfl*/**/spi_master/spi*/spi*.*/mac_count"
// Read BMC firmware version
fpga_result read_bmcfw_version(fpga_token token, char *bmcfw_ver, size_t len)
{
fpga_result res = FPGA_OK;
char buf[FPGA_VAR_BUF_LEN] = { 0 };
if (bmcfw_ver == NULL) {
OPAE_ERR("Invalid Input parameters");
return FPGA_INVALID_PARAM;
}
res = read_sysfs(token, DFL_SYSFS_BMCFW_VER, buf, FPGA_VAR_BUF_LEN - 1);
if (res != FPGA_OK) {
OPAE_ERR("Failed to get read object");
return res;
}
res = parse_fw_ver(buf, bmcfw_ver, len);
if (res != FPGA_OK) {
OPAE_ERR("Failed to parse version ");
}
return res;
}
fpga_result parse_fw_ver(char *buf, char *fw_ver, size_t len)
{
uint32_t var = 0;
fpga_result res = FPGA_OK;
int retval = 0;
char *endptr = NULL;
if (buf == NULL || fw_ver == NULL) {
OPAE_ERR("Invalid Input parameters");
return FPGA_INVALID_PARAM;
}
/* BMC FW version format reading
NIOS II Firmware Build 0x0 32 RW[23:0] 24 hFFFFFF Build version of NIOS II Firmware
NIOS FW is up e.g. 1.0.1 for first release
[31:24] 8hFF Firmware Support Revision - ASCII code
0xFF is the default value without NIOS FW, will be changed after NIOS FW is up
*/
errno = 0;
var = strtoul(buf, &endptr, 16);
if (endptr != buf + strlen(buf)) {
OPAE_ERR("Failed to convert buffer to integer: %s", strerror(errno));
return FPGA_EXCEPTION;
}
retval = snprintf(fw_ver, len, "%u.%u.%u", (var >> 16) & 0xff, (var >> 8) & 0xff, var & 0xff);
if (retval < 0) {
OPAE_ERR("error in formatting version");
return FPGA_EXCEPTION;
}
return res;
}
// Read MAX10 firmware version
fpga_result read_max10fw_version(fpga_token token, char *max10fw_ver, size_t len)
{
fpga_result res = FPGA_OK;
char buf[FPGA_VAR_BUF_LEN] = { 0 };
if (max10fw_ver == NULL) {
OPAE_ERR("Invalid Input parameters");
return FPGA_INVALID_PARAM;
}
res = read_sysfs(token, DFL_SYSFS_MAX10_VER, buf, FPGA_VAR_BUF_LEN - 1);
if (res != FPGA_OK) {
OPAE_ERR("Failed to get read object");
return res;
}
res = parse_fw_ver(buf, max10fw_ver, len);
if (res != FPGA_OK) {
OPAE_ERR("Failed to parse version ");
}
return res;
}
// Read mac information
fpga_result read_mac_info(fpga_token token, uint32_t afu_channel_num,
struct ether_addr *mac_addr)
{
fpga_result res = FPGA_OK;
char mac_buf[MACADDR_LEN] = { 0 };
char mac_count[MACADDR_LEN] = { 0 };
uint64_t count = 0;
char *endptr = NULL;
if (mac_addr == NULL) {
OPAE_ERR("Invalid Input parameters");
return FPGA_INVALID_PARAM;
}
res = read_sysfs(token, DFL_SYSFS_MACADDR_PATH, mac_buf, MACADDR_LEN - 1);
if (res != FPGA_OK) {
OPAE_ERR("Failed to get read object");
return res;
}
ether_aton_r(mac_buf, mac_addr);
res = read_sysfs(token, DFL_SYSFS_MACCNT_PATH, mac_count, MACADDR_LEN - 1);
if (res != FPGA_OK) {
OPAE_ERR("Failed to get read object");
return res;
}
errno = 0;
count = strtoul(mac_count, &endptr, 16);
if (endptr != mac_count + strlen(mac_count)) {
OPAE_ERR("Failed to convert buffer to integer: %s", strerror(errno));
return FPGA_EXCEPTION;
}
if (afu_channel_num >= count) {
OPAE_ERR("Invalid Input parameters");
return FPGA_INVALID_PARAM;
}
if ((mac_addr->ether_addr_octet[0] == 0xff) &&
(mac_addr->ether_addr_octet[1] == 0xff) &&
(mac_addr->ether_addr_octet[2] == 0xff) &&
(mac_addr->ether_addr_octet[3] == 0xff) &&
(mac_addr->ether_addr_octet[4] == 0xff) &&
(mac_addr->ether_addr_octet[5] == 0xff)) {
OPAE_ERR("Invalid MAC address");
return FPGA_INVALID_PARAM;
}
mac_addr->ether_addr_octet[5] += afu_channel_num;
return res;
}
// print board information
fpga_result print_board_info(fpga_token token)
{
fpga_result res = FPGA_OK;
fpga_result resval = FPGA_OK;
char bmc_ver[FPGA_VAR_BUF_LEN] = { 0 };
char max10_ver[FPGA_VAR_BUF_LEN] = { 0 };
res = read_bmcfw_version(token, bmc_ver, FPGA_VAR_BUF_LEN);
if (res != FPGA_OK) {
OPAE_ERR("Failed to read bmc version");
resval = res;
}
res = read_max10fw_version(token, max10_ver, FPGA_VAR_BUF_LEN);
if (res != FPGA_OK) {
OPAE_ERR("Failed to read max10 version");
resval = res;
}
printf("Board Management Controller, MAX10 NIOS FW version: %s \n", bmc_ver);
printf("Board Management Controller, MAX10 Build version: %s \n", max10_ver);
return resval;
}
// print board information
fpga_result print_sec_info(fpga_token token)
{
return print_sec_common_info(token);
}
// print mac information
fpga_result print_mac_info(fpga_token token)
{
char mac_str[18] = { 0 };
struct ether_addr MAC;
fpga_result res = FPGA_OK;
memset((void *)&MAC, 0, sizeof(MAC));
res = read_mac_info(token, 0, &MAC);
if (res != FPGA_OK) {
OPAE_ERR("Failed to read mac address");
} else {
printf("%-1s : %s\n", "MAC address",
ether_ntoa_r(&MAC, mac_str));
}
return res;
}
// print phy group information
fpga_result print_phy_info(fpga_token token)
{
fpga_result res = FPGA_OK;
res = print_eth_interface_info(token, "n<PASSWORD>");
if (res != FPGA_OK) {
OPAE_ERR("Failed to read phy info");
return res;
}
return res;
}
// prints fpga boot page info
fpga_result fpga_boot_info(fpga_token token)
{
return print_common_boot_info(token);
} |
puckel/dgr | vendor/github.com/rkt/rkt/tests/rkt_dns_test.go | <reponame>puckel/dgr<gh_stars>1000+
// Copyright 2016 The rkt Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"crypto/sha1"
"fmt"
"io/ioutil"
"os"
"testing"
"time"
"github.com/rkt/rkt/tests/testutils"
)
// TestDNS is checking how rkt fills /etc/resolv.conf
func TestDNSParam(t *testing.T) {
imageFile := patchTestACI("rkt-inspect-exit.aci", "--exec=/inspect --print-msg=Hello --read-file")
defer os.Remove(imageFile)
ctx := testutils.NewRktRunCtx()
defer ctx.Cleanup()
for i, tt := range []struct {
paramDNS string
expectedLine string
expectedError bool
}{
{
paramDNS: "",
expectedLine: "Cannot read file",
expectedError: TestedFlavor.ExitStatusPreserved,
},
{
paramDNS: "--dns=8.8.4.4",
expectedLine: "nameserver 8.8.4.4",
expectedError: false,
},
{
paramDNS: "--dns=8.8.8.8 --dns=8.8.4.4",
expectedLine: "nameserver 8.8.8.8",
expectedError: false,
},
{
paramDNS: "--dns=8.8.8.8 --dns=8.8.4.4 --dns-search=search.com --dns-opt=debug",
expectedLine: "nameserver 8.8.4.4",
expectedError: false,
},
{
paramDNS: "--dns-search=foo.com --dns-search=bar.com",
expectedLine: "search foo.com bar.com",
expectedError: false,
},
{
paramDNS: "--dns-opt=debug --dns-opt=use-vc --dns-opt=rotate",
expectedLine: "options debug use-vc rotate",
expectedError: false,
},
{
paramDNS: "--dns-opt=debug --dns-opt=use-vc --dns-opt=rotate --dns-domain=example.net",
expectedLine: "domain example.net",
expectedError: false,
},
} {
rktCmd := fmt.Sprintf(`%s --insecure-options=image run --set-env=FILE=/etc/resolv.conf %s %s`,
ctx.Cmd(), tt.paramDNS, imageFile)
_ = i
// t.Logf("%d: %s\n", i, rktCmd)
runRktAndCheckOutput(t, rktCmd, tt.expectedLine, tt.expectedError)
}
}
// TestHostDNS checks that --dns=host reflects the host's /etc/resolv.conf
func TestDNSHost(t *testing.T) {
dat, err := ioutil.ReadFile("/etc/resolv.conf")
if err != nil {
t.Fatal("Could not read host's resolv.conf", err)
}
sum := fmt.Sprintf("%x", sha1.Sum(dat))
t.Log("Expecting sum", sum)
ctx := testutils.NewRktRunCtx()
defer ctx.Cleanup()
appCmd := "--exec=/inspect -- --hash-file"
rktCmd := fmt.Sprintf("%s --insecure-options=image run --dns=host --set-env=FILE=/etc/resolv.conf %s %s",
ctx.Cmd(), getInspectImagePath(), appCmd)
child := spawnOrFail(t, rktCmd)
ctx.RegisterChild(child)
defer waitOrFail(t, child, 0)
expectedRegex := `sha1sum: ([0-9a-f]+)`
result, out, err := expectRegexTimeoutWithOutput(child, expectedRegex, 30*time.Second)
if err != nil {
t.Fatalf("Error: %v\nOutput: %v", err, out)
}
if result[1] != sum {
t.Fatalf("container's /etc/host has sha1sum %s expected %s", result[1], sum)
}
}
|
isabella232/RDepot | app/src/test/java/eu/openanalytics/rdepot/test/fixture/RRepositoryTestFixture.java | /**
* R Depot
*
* Copyright (C) 2012-2021 Open Analytics NV
*
* ===========================================================================
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the Apache License as published by
* The Apache Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Apache License for more details.
*
* You should have received a copy of the Apache License
* along with this program. If not, see <http://www.apache.org/licenses/>
*/
package eu.openanalytics.rdepot.test.fixture;
import java.util.List;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import eu.openanalytics.rdepot.model.Repository;
public class RRepositoryTestFixture {
public static List<Repository> GET_EXAMPLE_REPOSITORIES() {
Repository repository1 = new Repository(123, "http://localhost/repo/testrepo123", "Test Repository 123", "http://192.168.1.100/testrepo123", true, false, null, null, null, null);
repository1.setSynchronizing(false);
repository1.setVersion(10);
Repository repository2 = new Repository(234, "http://localhost/repo/testrepo234", "Test Repository 234", "http://192.168.1.101/testrepo234", false, true, null, null, null, null);
repository2.setSynchronizing(false);
repository2.setVersion(12);
Repository repository3 = new Repository(456, "http://localhost/repo/anotherrepo", "Just another repository", "http://192.168.1.102/anotherrepo", false, false, null, null, null, null);
repository3.setSynchronizing(true);
repository3.setVersion(5);
return List.of(repository1, repository2, repository3);
}
public static Page<Repository> GET_EXAMPLE_REPOSITORIES_PAGED() {
Repository repository1 = new Repository(123, "http://localhost/repo/testrepo123", "Test Repository 123", "http://192.168.1.100/testrepo123", true, false, null, null, null, null);
repository1.setSynchronizing(false);
repository1.setVersion(10);
Repository repository2 = new Repository(234, "http://localhost/repo/testrepo234", "Test Repository 234", "http://192.168.1.101/testrepo234", false, true, null, null, null, null);
repository2.setSynchronizing(false);
repository2.setVersion(12);
Repository repository3 = new Repository(456, "http://localhost/repo/anotherrepo", "Just another repository", "http://192.168.1.102/anotherrepo", false, false, null, null, null, null);
repository3.setSynchronizing(true);
repository3.setVersion(5);
return new PageImpl<>(List.of(repository1, repository2, repository3));
}
public static Repository GET_EXAMPLE_REPOSITORY() {
return GET_EXAMPLE_REPOSITORIES().get(0);
}
public static Repository GET_EXAMPLE_REPOSITORY(int id) {
Repository repository = new Repository(id, "http://localhost/repo/testrepo" + id, "Test Repository " + id, "http://192.168.1.100/testrepo" + id, false, false, null, null, null, null);
repository.setSynchronizing(false);
repository.setVersion(0);
return repository;
}
} |
meta-quick/opa | internal/jwx/jws/verify/ecdsa_test.go | <reponame>meta-quick/opa<gh_stars>0
package verify
import (
"testing"
"github.com/meta-quick/opa/internal/jwx/jwa"
)
func TestECDSAVerify(t *testing.T) {
type dummyStruct struct {
dummy1 int
dummy2 float64
}
dummy := &dummyStruct{1, 3.4}
t.Run("ECDSA Verifier Creation Error", func(t *testing.T) {
_, err := newECDSA(jwa.HS256)
if err == nil {
t.Fatal("ECDSA Verifier Object creation should fail")
}
})
t.Run("ECDSA Verifier Sign Error", func(t *testing.T) {
pVerifier, err := newECDSA(jwa.ES512)
if err != nil {
t.Fatalf("Signer creation failure: %v", jwa.ES512)
}
err = pVerifier.Verify([]byte("payload"), []byte("signature"), dummy)
if err == nil {
t.Fatal("ECDSA Verification should fail")
}
err = pVerifier.Verify([]byte("payload"), []byte("signature"), nil)
if err == nil {
t.Fatal("ECDSA Verification should fail")
}
})
}
|
Noxware/primo | oldCode/src/game/collections/KillList.js | // @ts-check
const PlayerCollection = require('./PlayerCollection');
/* LAZY IMPLEMENTATION */
class KillList extends PlayerCollection {}
module.exports = KillList; |
michaeldimchuk/objectify-tools | objectify-processor/src/main/java/io/md/code/objectify/dao/TypeCache.java | <gh_stars>0
package io.md.code.objectify.dao;
import java.util.Map;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import com.google.common.collect.ImmutableMap;
import com.googlecode.objectify.Key;
import com.googlecode.objectify.Ref;
class TypeCache {
Map<Class<?>, TypeElement> typeElements;
Map<Class<?>, TypeMirror> typeMirrors;
TypeCache(Elements elements) {
typeElements = getKnownTypeElements(elements);
typeMirrors = getKnownTypeMirrors(elements);
}
TypeMirror getTypeMirror(Class<?> type) {
return typeMirrors.get(type);
}
TypeElement getTypeElement(Class<?> type) {
return typeElements.get(type);
}
private Map<Class<?>, TypeMirror> getKnownTypeMirrors(Elements elements) {
ImmutableMap.Builder<Class<?>, TypeMirror> knownTypes = ImmutableMap.builder();
addType(knownTypes, String.class, elements);
addType(knownTypes, Long.class, elements);
addType(knownTypes, Ref.class, elements);
addType(knownTypes, Key.class, elements);
addType(knownTypes, com.google.appengine.api.datastore.Key.class, elements);
return knownTypes.build();
}
private Map<Class<?>, TypeElement> getKnownTypeElements(Elements elements) {
return ImmutableMap.of(
Ref.class, elements.getTypeElement(Ref.class.getCanonicalName()),
Key.class, elements.getTypeElement(Key.class.getCanonicalName())
);
}
private void addType(ImmutableMap.Builder<Class<?>, TypeMirror> knownTypes, Class<?> type, Elements elements) {
knownTypes.put(type, elements.getTypeElement(type.getCanonicalName()).asType());
}
}
|
nissshh/sawtooth-java-wrapper | sawtooth-java-wrapper-client/src/main/java/com/mycompany/blockchain/sawtooth/client/IEventProcessor.java | /**
*
*/
package com.mycompany.blockchain.sawtooth.client;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import com.mycompany.blockchain.sawtooth.core.service.IAddressBuilder;
import com.mycompany.blockchain.sawtooth.loan.protobuf.Loan;
/**
*
* Implemented class will process events
*
* @author <NAME> <<EMAIL>>
*
*/
public interface IEventProcessor <ENTITY> {
/**
* Proces the event data as found
* @param address
* @param value
* @throws InvalidProtocolBufferException
* @throws Exception
*/
public void processEvent(String address, ByteString value) throws InvalidProtocolBufferException;
/**
* An address builder that will help in filtering per addres.
* @return
*/
public IAddressBuilder<ENTITY> getAddressBuilder();
}
|
neptune076/Coisas | golang/Curso - Aprenda Go/Nivel 9/1/main.go | package main
import (
"fmt"
"sync"
)
func main() {
wg := &sync.WaitGroup{}
wg.Add(2)
go func() {
defer wg.Done()
fmt.Println("Estou na primeira goroutine!")
}()
go func() {
defer wg.Done()
fmt.Println("Estou na segunda goroutine!")
}()
wg.Wait()
}
|
policygenius/athenaeum | table_of_contents/components.js | <gh_stars>1-10
module.exports = {
name: 'Components',
sections: [
{
name: 'Atoms',
components: 'src/atoms/**/index.js'
},
{
name: 'Molecules',
sections: [
{
name: 'Asides',
components: 'src/molecules/asides/**/index.js'
},
{
name: 'Rows',
components: () => [
'src/molecules/DataRow/index.js',
'src/molecules/ComparisonRowItem/index.js',
]
},
{
name: 'Filter',
components: 'src/molecules/Filter/**/index.js'
},
{
name: 'FormFields',
components: 'src/molecules/formfields/**/index.js'
},
{
name: 'Headers',
components: () => [
'src/molecules/HeaderDiscount/index.js',
'src/molecules/HeaderAmount/index.js',
'src/molecules/BlockHeader/index.js',
]
},
{
name: 'Lists',
components: 'src/molecules/lists/**/index.js'
},
{
name: 'Modal',
components: 'src/molecules/Modal/index.js'
},
{
name: 'StarRating',
components: 'src/molecules/StarRating/index.js'
},
{
name: 'Loading',
components: 'src/molecules/Loading/index.js'
},
{
name: 'Lock Ups',
components: 'src/molecules/LockUps/**/index.js'
},
{
name: 'Menus',
components: 'src/molecules/MobileMenu/index.js'
},
{
name: 'Other',
components: () => [
'src/molecules/StepProgress/index.js',
'src/molecules/StepIndicator/index.js',
'src/molecules/AgentCallout/index.js',
'src/molecules/FeatureSquare/index.js',
'src/molecules/AccountIndicator/index.js'
]
}
]
},
{
name: 'Organisms',
sections: [
{
name: 'Cards',
components: 'src/organisms/cards/**/index.js'
},
{
name: 'Forms',
components: 'src/organisms/forms/**/index.js'
},
{
name: 'Tables',
components: 'src/organisms/tables/**/index.js'
},
{
name: 'Other',
components: () => [
'src/organisms/Accordion/index.js',
]
}
]
},
{
name: 'Templates',
components: 'src/templates/**/index.js',
},
{
name: 'Wrappers',
components: 'src/wrappers/**/index.js',
}
],
};
|
thinwind/code-lab | java/jvm/asmtools/src/org/openjdk/asmtools/jdis/RecordData.java | <filename>java/jvm/asmtools/src/org/openjdk/asmtools/jdis/RecordData.java
/*
* Copyright (c) 2019, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.asmtools.jdis;
import org.openjdk.asmtools.jasm.JasmTokens;
import org.openjdk.asmtools.jasm.Tables;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static java.lang.String.format;
import static org.openjdk.asmtools.jasm.JasmTokens.Token.*;
import static org.openjdk.asmtools.jdis.TraceUtils.traceln;
/**
* The Record attribute data
* <p>
* since class file 58.65535 (JEP 359)
*/
public class RecordData extends Indenter {
private final ClassData cls;
private List<Component> components;
public RecordData(ClassData cls) {
this.cls = cls;
}
public RecordData read(DataInputStream in) throws IOException {
int count = in.readUnsignedShort();
traceln("components=" + count);
components = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
components.add(new Component(cls).read(in));
}
return this;
}
/**
* Prints the record data to the current output stream. called from ClassData.
*/
public void print() throws IOException {
int count = components.size();
if (count > 0) {
cls.out.println(getIndentString() + RECORD.parseKey() + getIndentString() + LBRACE.parseKey());
for (int i = 0; i < count; i++) {
Component cn = components.get(i);
cn.setIndent(indent() * 2);
if (i != 0 && cn.getAnnotationsCount() > 0)
cn.out.println();
cn.print();
}
cls.out.println(getIndentString() + RBRACE.parseKey());
cls.out.println();
}
}
private class Component extends MemberData {
// CP index to the name
private int name_cpx;
// CP index to the type descriptor
private int type_cpx;
public Component(ClassData cls) {
super(cls);
memberType = "RecordData";
}
@Override
protected boolean handleAttributes(DataInputStream in, Tables.AttrTag attrtag, int attrlen) throws IOException {
// Read the Attributes
boolean handled = true;
switch (attrtag) {
case ATT_Signature:
if( signature != null ) {
traceln("Record attribute: more than one attribute Signature are in component.attribute_info_attributes[attribute_count]");
traceln("Last one will be used.");
}
signature = new SignatureData(cls).read(in, attrlen);
break;
default:
handled = false;
break;
}
return handled;
}
/**
* Read and resolve the component data called from ClassData.
*/
public Component read(DataInputStream in) throws IOException {
// read the Component CP indexes
name_cpx = in.readUnsignedShort();
type_cpx = in.readUnsignedShort();
traceln(2, "RecordComponent: name[" + name_cpx + "]=" + cls.pool.getString(name_cpx)
+ " descriptor[" + type_cpx + "]=" + cls.pool.getString(type_cpx));
// Read the attributes
readAttributes(in);
return this;
}
/**
* Prints the component data to the current output stream. called from RecordData.
*/
public void print() throws IOException {
// print component's attributes
super.printAnnotations(getIndentString());
// print component
StringBuilder bodyPrefix = new StringBuilder(getIndentString());
StringBuilder tailPrefix = new StringBuilder();
if (isSynthetic) {
bodyPrefix.append(JasmTokens.Token.SYNTHETIC.parseKey()).append(' ');
}
if (isDeprecated) {
bodyPrefix.append(JasmTokens.Token.DEPRECATED.parseKey()).append(' ');
}
// component
bodyPrefix.append(JasmTokens.Token.COMPONENT.parseKey()).append(' ');
printVar(bodyPrefix, tailPrefix,name_cpx, type_cpx);
}
}
}
|
Havoc-OS/androidprebuilts_go_linux-x86 | test/fixedbugs/issue15141.go | // compile
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
func main() {
a := f(1, 99)
b := g(0xFFFFFFe, 98)
c := h(0xFFFFFFe, 98)
println(a[1], b[1], c[1], a[0xFFFFFFe], b[0xFFFFFFe], c[0xFFFFFFe])
}
//go:noinline
func f(i, y int) (a [0xFFFFFFF]byte) {
a[i] = byte(y)
return
}
//go:noinline
func g(i, y int) [0xFFFFFFF]byte {
var a [0xFFFFFFF]byte
a[i] = byte(y)
return a
}
//go:noinline
func h(i, y int) (a [0xFFFFFFF]byte) {
a[i] = byte(y)
return a
}
|
n8fr8/Smack | smack-extensions/src/main/java/org/jivesoftware/smackx/muc/packet/MUCUser.java | <filename>smack-extensions/src/main/java/org/jivesoftware/smackx/muc/packet/MUCUser.java<gh_stars>1-10
/**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.muc.packet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.jivesoftware.smack.packet.ExtensionElement;
import org.jivesoftware.smack.packet.NamedElement;
import org.jivesoftware.smack.packet.Stanza;
import org.jivesoftware.smack.util.XmlStringBuilder;
import org.jxmpp.jid.EntityBareJid;
import org.jxmpp.jid.EntityFullJid;
import org.jxmpp.jid.EntityJid;
/**
* Represents extended presence information about roles, affiliations, full JIDs,
* or status codes scoped by the 'http://jabber.org/protocol/muc#user' namespace.
*
* @author <NAME>
*/
public class MUCUser implements ExtensionElement {
public static final String ELEMENT = "x";
public static final String NAMESPACE = MUCInitialPresence.NAMESPACE + "#user";
private final Set<Status> statusCodes = new HashSet<Status>(4);
private Invite invite;
private Decline decline;
private MUCItem item;
private String password;
private Destroy destroy;
@Override
public String getElementName() {
return ELEMENT;
}
@Override
public String getNamespace() {
return NAMESPACE;
}
@Override
public XmlStringBuilder toXML() {
XmlStringBuilder xml = new XmlStringBuilder(this);
xml.rightAngleBracket();
xml.optElement(getInvite());
xml.optElement(getDecline());
xml.optElement(getItem());
xml.optElement("password", getPassword());
xml.append(statusCodes);
xml.optElement(getDestroy());
xml.closeElement(this);
return xml;
}
/**
* Returns the invitation for another user to a room. The sender of the invitation
* must be an occupant of the room. The invitation will be sent to the room which in turn
* will forward the invitation to the invitee.
*
* @return an invitation for another user to a room.
*/
public Invite getInvite() {
return invite;
}
/**
* Returns the rejection to an invitation from another user to a room. The rejection will be
* sent to the room which in turn will forward the refusal to the inviter.
*
* @return a rejection to an invitation from another user to a room.
*/
public Decline getDecline() {
return decline;
}
/**
* Returns the item child that holds information about roles, affiliation, jids and nicks.
*
* @return an item child that holds information about roles, affiliation, jids and nicks.
*/
public MUCItem getItem() {
return item;
}
/**
* Returns the password to use to enter Password-Protected Room. A Password-Protected Room is
* a room that a user cannot enter without first providing the correct password.
*
* @return the password to use to enter Password-Protected Room.
*/
public String getPassword() {
return password;
}
/**
* Returns a set of status which holds the status code that assist in presenting notification messages.
*
* @return the set of status which holds the status code that assist in presenting notification messages.
*/
public Set<Status> getStatus() {
return statusCodes;
}
/**
* Returns true if this MUCUser instance has also {@link Status} information.
* <p>
* If <code>true</code> is returned, then {@link #getStatus()} will return a non-empty set.
* </p>
*
* @return true if this MUCUser has status information.
* @since 4.1
*/
public boolean hasStatus() {
return !statusCodes.isEmpty();
}
/**
* Returns the notification that the room has been destroyed. After a room has been destroyed,
* the room occupants will receive a Presence stanza(/packet) of type 'unavailable' with the reason for
* the room destruction if provided by the room owner.
*
* @return a notification that the room has been destroyed.
*/
public Destroy getDestroy() {
return destroy;
}
/**
* Sets the invitation for another user to a room. The sender of the invitation
* must be an occupant of the room. The invitation will be sent to the room which in turn
* will forward the invitation to the invitee.
*
* @param invite the invitation for another user to a room.
*/
public void setInvite(Invite invite) {
this.invite = invite;
}
/**
* Sets the rejection to an invitation from another user to a room. The rejection will be
* sent to the room which in turn will forward the refusal to the inviter.
*
* @param decline the rejection to an invitation from another user to a room.
*/
public void setDecline(Decline decline) {
this.decline = decline;
}
/**
* Sets the item child that holds information about roles, affiliation, jids and nicks.
*
* @param item the item child that holds information about roles, affiliation, jids and nicks.
*/
public void setItem(MUCItem item) {
this.item = item;
}
/**
* Sets the password to use to enter Password-Protected Room. A Password-Protected Room is
* a room that a user cannot enter without first providing the correct password.
*
* @param string the password to use to enter Password-Protected Room.
*/
public void setPassword(String string) {
password = string;
}
/**
* Add the status codes which holds the codes that assists in presenting notification messages.
*
* @param statusCodes the status codes which hold the codes that assists in presenting notification
* messages.
*/
public void addStatusCodes(Set<Status> statusCodes) {
this.statusCodes.addAll(statusCodes);
}
/**
* Add a status code which hold a code that assists in presenting notification messages.
*
* @param status the status code which olds a code that assists in presenting notification messages.
*/
public void addStatusCode(Status status) {
this.statusCodes.add(status);
}
/**
* Sets the notification that the room has been destroyed. After a room has been destroyed,
* the room occupants will receive a Presence stanza(/packet) of type 'unavailable' with the reason for
* the room destruction if provided by the room owner.
*
* @param destroy the notification that the room has been destroyed.
*/
public void setDestroy(Destroy destroy) {
this.destroy = destroy;
}
/**
* Retrieve the MUCUser PacketExtension from packet, if any.
*
* @param packet
* @return the MUCUser PacketExtension or {@code null}
* @deprecated use {@link #from(Stanza)} instead
*/
@Deprecated
public static MUCUser getFrom(Stanza packet) {
return from(packet);
}
/**
* Retrieve the MUCUser PacketExtension from packet, if any.
*
* @param packet
* @return the MUCUser PacketExtension or {@code null}
*/
public static MUCUser from(Stanza packet) {
return packet.getExtension(ELEMENT, NAMESPACE);
}
/**
* Represents an invitation for another user to a room. The sender of the invitation
* must be an occupant of the room. The invitation will be sent to the room which in turn
* will forward the invitation to the invitee.
*
* @author <NAME>
*/
public static class Invite implements NamedElement {
public static final String ELEMENT = "invite";
private final String reason;
/**
* From XEP-0045 § 7.8.2: "… whose value is the bare JID, full JID, or occupant JID of the inviter …"
*/
private final EntityJid from;
private final EntityBareJid to;
public Invite(String reason, EntityFullJid from) {
this(reason, from, null);
}
public Invite(String reason, EntityBareJid to) {
this(reason, null, to);
}
public Invite(String reason, EntityJid from, EntityBareJid to) {
this.reason = reason;
this.from = from;
this.to = to;
}
/**
* Returns the bare JID of the inviter or, optionally, the room JID. (e.g.
* '<EMAIL>/desktop').
*
* @return the room's occupant that sent the invitation.
*/
public EntityJid getFrom() {
return from;
}
/**
* Returns the message explaining the invitation.
*
* @return the message explaining the invitation.
*/
public String getReason() {
return reason;
}
/**
* Returns the bare JID of the invitee. (e.g. '<EMAIL>')
*
* @return the bare JID of the invitee.
*/
public EntityBareJid getTo() {
return to;
}
@Override
public XmlStringBuilder toXML() {
XmlStringBuilder xml = new XmlStringBuilder(this);
xml.optAttribute("to", getTo());
xml.optAttribute("from", getFrom());
xml.rightAngleBracket();
xml.optElement("reason", getReason());
xml.closeElement(this);
return xml;
}
@Override
public String getElementName() {
return ELEMENT;
}
}
/**
* Represents a rejection to an invitation from another user to a room. The rejection will be
* sent to the room which in turn will forward the refusal to the inviter.
*
* @author <NAME>
*/
public static class Decline implements NamedElement {
public static final String ELEMENT = "decline";
private final String reason;
private final EntityBareJid from;
private final EntityBareJid to;
public Decline(String reason, EntityBareJid to) {
this(reason, null, to);
}
public Decline(String reason, EntityBareJid from, EntityBareJid to) {
this.reason = reason;
this.from = from;
this.to = to;
}
/**
* Returns the bare JID of the invitee that rejected the invitation. (e.g.
* '<EMAIL>').
*
* @return the bare JID of the invitee that rejected the invitation.
*/
public EntityBareJid getFrom() {
return from;
}
/**
* Returns the message explaining why the invitation was rejected.
*
* @return the message explaining the reason for the rejection.
*/
public String getReason() {
return reason;
}
/**
* Returns the bare JID of the inviter. (e.g. '<EMAIL>')
*
* @return the bare JID of the inviter.
*/
public EntityBareJid getTo() {
return to;
}
@Override
public XmlStringBuilder toXML() {
XmlStringBuilder xml = new XmlStringBuilder(this);
xml.optAttribute("to", getTo());
xml.optAttribute("from", getFrom());
xml.rightAngleBracket();
xml.optElement("reason", getReason());
xml.closeElement(this);
return xml;
}
@Override
public String getElementName() {
return ELEMENT;
}
}
/**
* Status code assists in presenting notification messages. The following link provides the
* list of existing error codes <a href="http://xmpp.org/registrar/mucstatus.html">Multi-User Chat Status Codes</a>.
*
* @author <NAME>
*/
public static final class Status implements NamedElement {
public static final String ELEMENT = "status";
private static final Map<Integer, Status> statusMap = new HashMap<Integer, Status>(8);
public static final Status PRESENCE_TO_SELF_110 = Status.create(110);
public static final Status ROOM_CREATED_201 = Status.create(201);
public static final Status BANNED_301 = Status.create(301);
public static final Status NEW_NICKNAME_303 = Status.create(303);
public static final Status KICKED_307 = Status.create(307);
public static final Status REMOVED_AFFIL_CHANGE_321 = Status.create(321);
private final Integer code;
public static Status create(String string) {
Integer integer = Integer.valueOf(string);
return create(integer);
}
public static Status create(Integer i) {
Status status = statusMap.get(i);
if (status == null) {
status = new Status(i);
statusMap.put(i, status);
}
return status;
}
/**
* Creates a new instance of Status with the specified code.
*
* @param code the code that uniquely identifies the reason of the error.
*/
private Status(int code) {
this.code = code;
}
/**
* Returns the code that uniquely identifies the reason of the error. The code
* assists in presenting notification messages.
*
* @return the code that uniquely identifies the reason of the error.
*/
public int getCode() {
return code;
}
@Override
public XmlStringBuilder toXML() {
XmlStringBuilder xml = new XmlStringBuilder(this);
xml.attribute("code", getCode());
xml.closeEmptyElement();
return xml;
}
@Override
public String toString() {
return code.toString();
}
@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other instanceof Status) {
Status otherStatus = (Status) other;
return code.equals(otherStatus.getCode());
}
return false;
}
@Override
public int hashCode() {
return code;
}
@Override
public String getElementName() {
return ELEMENT;
}
}
}
|
brest-java-course-summer-2019/ihnat-misiyuk | car-repair/service/src/main/java/com/epam/brest/summer/courses2019/service/EmployeeService.java | package com.epam.brest.summer.courses2019.service;
import com.epam.brest.summer.courses2019.model.Employee;
import java.util.List;
/**
* Employee Service Interface.
*/
public interface EmployeeService {
/**
* Get all employees.
*
* @return list of all employees
*/
List<Employee> findAll();
/**
* Get all employees with specified department id.
*
* @param departmentId department id
* @return list of employees by department id
*/
List<Employee> findByDepartmentId(Integer departmentId);
/**
* Get employee with specified id.
*
* @param employeeId employee id
* @return employee by id
*/
Employee findById(Integer employeeId);
/**
* Persist new employee.
*
* @param employee employee
* @return employee
*/
Employee add(Employee employee);
/**
* Update employee.
*
* @param employee employee
*/
void update(Employee employee);
/**
* Delete employee with specified id.
*
* @param employeeId department id
*/
void delete(Integer employeeId);
} |
patakapata/KZEAddon-1.16.4 | src/main/java/com/theboss/kzeaddonfabric/events/KeyPressingEvents.java | <reponame>patakapata/KZEAddon-1.16.4
package com.theboss.kzeaddonfabric.events;
import com.theboss.kzeaddonfabric.KZEAddon;
import com.theboss.kzeaddonfabric.utils.ModUtils;
import com.theboss.kzeaddonfabric.utils.VanillaUtils;
import net.minecraft.client.MinecraftClient;
import net.minecraft.client.option.KeyBinding;
import net.minecraft.entity.Entity;
import net.minecraft.text.LiteralText;
import net.minecraft.text.TranslatableText;
import net.minecraft.util.hit.EntityHitResult;
import net.minecraft.util.hit.HitResult;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
public class KeyPressingEvents {
public static void onPressAddObsessionTarget(KeyBinding keyBinding) {
MinecraftClient mc = MinecraftClient.getInstance();
List<UUID> obsessions = KZEAddon.getObsessions();
if (mc.player == null) return;
HitResult result = VanillaUtils.raycastIgnoreBlock(mc.player, 100.0, entity -> !entity.isSpectator() && entity.collides());
LiteralText body = new LiteralText("Obsession > ");
if (result.getType() == HitResult.Type.ENTITY) {
Entity entity = ((EntityHitResult) result).getEntity();
UUID uuid = entity.getUuid();
if (!obsessions.contains(uuid)) {
obsessions.add(uuid);
body.append("Target added (");
} else {
obsessions.remove(uuid);
body.append("Target removed (");
}
body.append(entity.getDisplayName()).append(")");
} else {
body.append("Target not found");
}
KZEAddon.info(body);
}
public static void onPressDebug(KeyBinding keyBinding) {
KZEAddon.widgetRenderer.openWidgetsScreen();
}
public static void onPressHideTeammates(KeyBinding keyBinding) {
KZEAddon.options.isHideAllies = !KZEAddon.options.isHideAllies;
KZEAddon.info(new TranslatableText("feature.kzeaddon." + (KZEAddon.options.isHideAllies ? "hide" : "show") + "_ally"));
}
public static void onPressUnStack(KeyBinding keyBinding) {
ModUtils.sendCurrentPositionPacket();
Optional.ofNullable(MinecraftClient.getInstance().player).ifPresent(player -> player.sendMessage(new TranslatableText("feature.kzeaddon.un_stack"), false));
}
}
|
Outerra/anteworld | include/ot/env.h |
#ifndef _OT_ENV_H_
#define _OT_ENV_H_
#include "glm/glm_meta.h"
#include <comm/binstream/binstream.h>
#include <comm/metastream/metastream.h>
namespace ot {
///Atmospheric parameters
struct atmospheric_params
{
enum {
SUN_COLOR = 0x01,
RAYLEIGH = 0x02,
MIE = 0x04,
INTENSITY = 0x08,
GROUND_REFLECTANCE = 0x10,
EXPOSURE = 0x20,
SCATTERING = 0x40,
MIN_AMBIENT = 0x80,
SHADOW_LIGHT = 0x100,
};
unsigned int mask;
float3 sun_color;
float3 rayleigh;
float mie;
float intensity;
float min_ambient_intensity;
float ground_reflectance;
float shadow_light;
float exposure;
float scattering;
atmospheric_params()
{
::memset(this, 0, sizeof(*this));
}
friend coid::metastream& operator || (coid::metastream& m, atmospheric_params& d)
{
return m.compound("atmospheric_params", [&]()
{
m.member("mask", d.mask, 0);
m.member("sun_color", d.sun_color, float3(1));
m.member("rayleigh", d.rayleigh, float3(5.8e-3f, 1.7e-2f, 4.1e-2f));
m.member("mie", d.mie, 4e-3f);
m.member("intensity", d.intensity, 25.f);
m.member("ground_reflectance", d.ground_reflectance, 0.08f);
m.member("exposure", d.exposure, 4.0f);
m.member("shadow_light", d.shadow_light, 1.0f);
m.member("scattering", d.scattering, 0.5f);
m.member("min_ambient", d.min_ambient_intensity, 5e-8f);
});
}
};
///Water parameters
struct water_params
{
enum {
HALF_DEPTH = 0x01,
SCATTERING = 0x02,
};
unsigned int mask;
float3 half_depth;
float3 scattering;
water_params()
{
::memset(this, 0, sizeof(*this));
}
friend coid::metastream& operator || (coid::metastream& m, water_params& d)
{
return m.compound("water_params", [&]()
{
m.member("mask", d.mask, 0);
m.member("half_depth", d.half_depth, float3(0.33f, 1.6f, 3.8f));
m.member("scattering", d.scattering, float3(0.005f));
});
}
};
///Fog parameters
struct fog_params
{
enum {
HALF_DEPTH = 0x01,
SCATTERING = 0x02,
LEVEL = 0x04,
};
unsigned int mask;
float half_depth;
float scattering;
float level;
fog_params()
{
::memset(this, 0, sizeof(*this));
}
friend coid::metastream& operator || (coid::metastream& m, fog_params& d)
{
return m.compound("fog_params", [&]()
{
m.member("mask", d.mask, 0);
m.member("half_depth", d.half_depth, 20);
m.member("scattering", d.scattering, 0.005f);
m.member("level", d.level, 0);
});
}
};
///Cloud params
struct cloud_params
{
float base = 3000; //< cloud base elevation [m]
float height = 4000; //< max cloud thickness [m]
float get_rain_alt() const { return base + height * .5f; }
friend coid::metastream& operator || (coid::metastream& m, cloud_params& d)
{
return m.compound("cloud_params", [&]()
{
m.member("base", d.base, 3000);
m.member("height", d.height, 4000);
});
}
};
///Forest parameters
struct forest_params
{
float3 ecs_min; //< elevation/curvature/slope min values
float3 ecs_max; //< elevation/curvature/slope max values
float3 ecs_trans; //< elevation/curvature/slope transitional width values
float threshold; //< forest threshold from vegetation density value (0..1)
float aspect; //< aspect (sunny side) vegetation value bias
forest_params() {
coid::metastream::initialize_from_defaults(this);
}
friend coid::metastream& operator || (coid::metastream& m, forest_params& w)
{
return m.compound("forest", [&]()
{
m.member("ecs_min", w.ecs_min, float3( 0.0f, -0.05f, 0.03f));
m.member("ecs_max", w.ecs_max, float3( 6000.0f, 1.00f, 0.3f));
m.member("ecs_trans", w.ecs_trans, float3( 500.0f, 0.10f, 0.2f));
m.member("threshold", w.threshold, 0.4f);
m.member("aspect", w.aspect, 0.4f);
m.member_obsolete<int>("render_distcoef");
m.member_obsolete<float>("render_distance");
m.member_obsolete<float>("shadow_distance");
m.member_obsolete<float>("shadow_range");
m.member_obsolete<bool>("shading");
});
}
};
///Virtual elevation params, used for latitude-dependent teperature computation for snow and vegetation
struct snow_params
{
float2 virtelev; //< virtual elevation latitude dependency coefficients, x*sin(lat) + y*sin(lat)^2
float virtcurv; //< curvature effect on virtual elevation
float snowmin; //< virtual elevation at which snow starts to appear
float snowsat; //< virtual elevation at which snow covers everything but steep rock faces
snow_params() {
coid::metastream::initialize_from_defaults(this);
}
friend coid::metastream& operator || (coid::metastream& m, snow_params& w)
{
return m.compound("virtual_elevation", [&]()
{
m.member("virtelev", w.virtelev, float2(3500,0));
m.member("virtcurv", w.virtcurv, 2500.0f);
m.member("snowmin", w.snowmin, 4900.0f);
m.member("snowsat", w.snowsat, 6000.0f);
});
}
};
///Weather params
struct weather_params
{
float wind_heading; //< wind heading in degrees, north 0, east 90
float wind_speed; //< wind speed at the gradient height, in m/s
float wind_gradient_height; //< gradient height: 457m large cities, 366m suburbs, 274m open terrain, 213m open sea
float wind_stability; //< Hellmann exponent, 0.06 .. 0.60, default 1/7
float wind_turbulence; //< 0..7, http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/19980028448_1998081596.pdf
float cloud_density; //< cloud cover density, 0..1
float rain_density; //< rain density, 0..1
float snow_density; //< snow density, 0..1
float lightning_per_kmsqmin; //< lightning bolts count per sqare kilometer and minute
float auto_weather_period; //< deprecated, period in which weather changes if auto weather enabled
bool auto_weather; //< automatic weather change
weather_params() {
coid::metastream::initialize_from_defaults(this);
}
explicit weather_params(int) {
}
float wind_speed_at_height( float h ) const {
return pow(glm::max(1e-6f, h) / wind_gradient_height, wind_stability) * wind_speed;
}
//@return distance travelled from launch point at 0 when wind-carried particle is at h
float distance_at_height( float h ) const {
return h * pow(glm::max(1e-6f, h) / wind_gradient_height, wind_stability) / (1 + wind_stability);
}
friend coid::metastream& operator || (coid::metastream& m, weather_params& w)
{
return m.compound("weather_params", [&]()
{
m.member("wind_heading", w.wind_heading, 90.0f);
m.member("wind_speed", w.wind_speed, 10.0f);
m.member("wind_gradient_height", w.wind_gradient_height, 270.0f);
m.member("wind_stability", w.wind_stability, 1.0f/7);
m.member("wind_turbulence", w.wind_turbulence, 0.0f);
m.member("cloud_density", w.cloud_density, 0.05f);
m.member("rain_density", w.rain_density, 0);
m.member("snow_density", w.snow_density, 0);
m.member("lightning_per_kmsqmin", w.lightning_per_kmsqmin, 0.0f);
m.member_obsolete<float>("lightning_probability_multiplier");
m.member("auto_weather_period", w.auto_weather_period, 100);
m.member("auto_weather", w.auto_weather, true);
});
}
};
struct water_state_params
{
float sea_dominant_wave_length;
float sea_wave_amplitude_multiplier;
float sea_foam_multiplier;
float sea_current_heading;
float sea_current_speed;
float sea_wind_contribution;
float sea_surf_amplitude_multiplier;
water_state_params() {
coid::metastream::initialize_from_defaults(this);
}
friend coid::metastream& operator || (coid::metastream& m, water_state_params& ws)
{
return m.compound("water_state_params", [&]()
{
m.member("sea_dominant_wave_length", ws.sea_dominant_wave_length, 10.0f);
m.member("sea_wave_amplitude_multiplier", ws.sea_wave_amplitude_multiplier, 1.0f);
m.member("sea_foam_multiplier", ws.sea_foam_multiplier, 1.0f);
m.member("sea_current_heading", ws.sea_current_heading, 0.0f);
m.member("sea_current_speed", ws.sea_current_speed, 0.0f);
m.member("sea_wind_contribution", ws.sea_wind_contribution, 0.0f);
m.member("sea_surf_amplitude_multiplier", ws.sea_surf_amplitude_multiplier, 1.0f);
});
}
};
} //namespace ot
#endif //_OT_ENV_H_
|
mjreid/speedcentral | sc-app/server/src/main/scala/com/speedcentral/lmp/LmpConstants.scala | <gh_stars>1-10
package com.speedcentral.lmp
object LmpConstants {
val EndOfDemoMarker: Byte = 0x80.toByte
val BytesPerTic: Int = 4
val TicsPerSecond: Int = 35
object EngineVersion {
val doom_19: Byte = 109.toByte
val boom: Byte = 202.toByte
}
object Doom19Indexes {
val skillLevel = 1
val episode = 2
val map = 3
}
object BoomIndexes {
val skillLevel = 8
val episode = 9
val map = 10
}
}
|
mmacphail/adventures | src/main/java/com/macphail/adventures/business/game/service/AdventureService.java | <gh_stars>0
package com.macphail.adventures.business.game.service;
import com.macphail.adventures.business.game.api.AdventureApi;
import com.macphail.adventures.business.game.model.adventure.Adventure;
import com.macphail.adventures.business.game.model.adventure.Player;
import com.macphail.adventures.business.game.model.adventure.exceptions.PlayerNotInAdventureException;
import com.macphail.adventures.business.game.model.event.DomainEventPublisher;
import com.macphail.adventures.business.game.repository.AdventureRepository;
import javax.ejb.Stateless;
import javax.inject.Inject;
import java.io.Serializable;
@Stateless
public class AdventureService implements AdventureApi, Serializable {
@Inject
private AdventureRepository adventureRepository;
@Inject
DomainEventPublisher eventPublisher;
@Override
public void playerJoinsAdventure(long adventureId, String userId, String playerName, String playerDescription) {
Adventure adventure = findAdventure(adventureId);
Player.joinAdventure(userId, adventure, playerName, playerDescription);
}
@Override
public void gameMasterWritesNarrative(long adventureId, String content) {
Adventure adventure = findAdventure(adventureId);
adventure.getGameMaster().writeNarrative(content);
}
@Override
public void playersReact(long adventureId, long playerId, String content) {
Player player = findPlayer(adventureId, playerId);
player.react(content);
}
@Override
public void progressAdventure(long adventureId, String description) {
Adventure adventure = findAdventure(adventureId);
adventure.getGameMaster().progressReached(description);
}
@Override
public void playerDoesHeroicDeed(long adventureId, long playerId, String description) {
Player player = findPlayer(adventureId, playerId);
player.doHeroicDeeds(description);
}
@Override
public void gameMasterCancelsAdventure(long adventureId) {
Adventure adventure = findAdventure(adventureId);
adventure.getGameMaster().cancelAdventure();
}
@Override
public void playerLeavesAdventure(long adventureId, long playerId) {
Player player = findPlayer(adventureId, playerId);
player.leaveAdventure();
}
private Adventure findAdventure(long adventureId) {
Adventure adventure = adventureRepository.findById(adventureId);
adventure.setDomainEventPublisher(eventPublisher);
return adventure;
}
private Player findPlayer(long adventureId, long playerId) {
Adventure adventure = findAdventure(adventureId);
return adventure.playerFor(playerId)
.orElseThrow(PlayerNotInAdventureException::new);
}
}
|
RidmiR/micro-integrator | integration/mediation-tests/tests-service/src/test/java/org/wso2/carbon/esb/proxyservice/test/proxyservices/ESBJAVA4540PinnedServerParameterTestCase.java | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.esb.proxyservice.test.proxyservices;
import org.apache.http.HttpResponse;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.carbon.integration.common.admin.client.LogViewerClient;
import org.wso2.carbon.logging.view.stub.types.carbon.LogEvent;
import org.wso2.carbon.proxyadmin.stub.types.carbon.ProxyData;
import org.wso2.esb.integration.common.clients.proxy.admin.ProxyServiceAdminClient;
import org.wso2.esb.integration.common.utils.CarbonLogReader;
import org.wso2.esb.integration.common.utils.ESBIntegrationTest;
import org.wso2.esb.integration.common.utils.clients.SimpleHttpClient;
import java.util.HashMap;
import java.util.Map;
/**
* This test class will test the Proxy Service deployment when pinnedService parameter value does
* not contain current instance name
* https://wso2.org/jira/browse/ESBJAVA-4540
*/
public class ESBJAVA4540PinnedServerParameterTestCase extends ESBIntegrationTest {
private final String proxyServiceName = "pinnedServerProxy";
private final String proxyServiceNameEditProxy = "EditProxyWithPinnedServer";
@BeforeClass(alwaysRun = true)
public void setEnvironment() throws Exception {
super.init();
}
@Test(groups = "wso2.esb", description = "Deploying proxy when the pinnedServer is having another instance name")
public void deployProxyService() throws Exception {
CarbonLogReader logReader = new CarbonLogReader();
logReader.start();
SimpleHttpClient client = new SimpleHttpClient();
Map<String, String> headers = new HashMap<>();
headers.put("Accept", "application/json");
String endpoint = "https://localhost:9354/management/proxy-services?proxyServiceName=proxyWithPinnedServer";
HttpResponse response = client.doGet(endpoint, headers);
Assert.assertEquals(response.getStatusLine().getStatusCode(), 404, "Proxy service got deployed");
logReader.stop();
}
@Test(groups = "wso2.esb", description = "Editing a proxy service when the pinnedServer is having"
+ " another instance name", enabled = false)
public void modifyProxyService() throws Exception {
ProxyServiceAdminClient proxyAdmin = new ProxyServiceAdminClient(contextUrls.getBackEndUrl(),
getSessionCookie());
ProxyData proxyData = proxyAdmin.getProxyDetails(proxyServiceNameEditProxy);
proxyData.setPinnedServers(new String[] { "invalidPinnedServer" });
LogViewerClient logViewerClient = new LogViewerClient(contextUrls.getBackEndUrl(), getSessionCookie());
logViewerClient.clearLogs();
proxyAdmin.updateProxy(proxyData);
LogEvent[] logEvents = logViewerClient.getAllRemoteSystemLogs();
boolean isLogMessageFound = false;
for (LogEvent log : logEvents) {
if (log != null && log.getMessage().contains(
"not in pinned servers list. Not deploying " + "Proxy service : EditProxyWithPinnedServer")) {
isLogMessageFound = true;
break;
}
}
Assert.assertTrue(isLogMessageFound, "Log message not found in the console log");
//proxy service should not be deployed since the pinnedServer does not contain this server name
Assert.assertFalse(esbUtils.isProxyDeployed(contextUrls.getBackEndUrl(), getSessionCookie(), proxyServiceName),
"Proxy service deployed successfully");
}
}
|
E-C-Group/jsip | jain-sip/src/test/java/co/ecg/jain_sip/tck/msgflow/MessageFlowHarness.java | <gh_stars>0
/*
* Conditions Of Use
*
* This software was developed by employees of the National Institute of
* Standards and Technology (NIST), and others.
* This software is has been contributed to the public domain.
* As a result, a formal license is not needed to use the software.
*
* This software is provided "AS IS."
* NIST MAKES NO WARRANTY OF ANY KIND, EXPRESS, IMPLIED
* OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT
* AND DATA ACCURACY. NIST does not warrant or make any representations
* regarding the use of the software or the results thereof, including but
* not limited to the correctness, accuracy, reliability or usefulness of
* the software.
*
*
*/
package co.ecg.jain_sip.tck.msgflow;
import co.ecg.jain_sip.sip.ri.stack.SIPTransactionStack;
import java.text.ParseException;
import java.util.LinkedList;
import java.util.List;
import co.ecg.jain_sip.sip.ListeningPoint;
import co.ecg.jain_sip.sip.ObjectInUseException;
import co.ecg.jain_sip.sip.SipProvider;
import co.ecg.jain_sip.sip.SipStack;
import co.ecg.jain_sip.sip.address.Address;
import co.ecg.jain_sip.sip.address.AddressFactory;
import co.ecg.jain_sip.sip.address.SipURI;
import co.ecg.jain_sip.sip.header.CSeqHeader;
import co.ecg.jain_sip.sip.header.CallIdHeader;
import co.ecg.jain_sip.sip.header.ContactHeader;
import co.ecg.jain_sip.sip.header.ContentTypeHeader;
import co.ecg.jain_sip.sip.header.FromHeader;
import co.ecg.jain_sip.sip.header.Header;
import co.ecg.jain_sip.sip.header.HeaderFactory;
import co.ecg.jain_sip.sip.header.MaxForwardsHeader;
import co.ecg.jain_sip.sip.header.ToHeader;
import co.ecg.jain_sip.sip.header.ViaHeader;
import co.ecg.jain_sip.sip.message.MessageFactory;
import co.ecg.jain_sip.sip.message.Request;
import co.ecg.jain_sip.sip.message.Response;
import co.ecg.jain_sip.tck.TckInternalError;
import co.ecg.jain_sip.tck.TestHarness;
import co.ecg.jain_sip.tck.TiUnexpectedError;
/**
* <p>
* Title: TCK
* </p>
* <p>
* Description: JAIN SIP 1.1 Technology Compatibility Kit
* </p>
*
* @author <NAME> Network Research Team, Louis Pasteur University,
* Strasbourg, France
* @author <NAME>
*
* @version 1.0
*/
public class MessageFlowHarness extends TestHarness {
protected static final String EXTENSION_HDR = "Status-Extension";
protected static int counter;
// timeout values depend on pc, mine is not that powerful :)
protected static long MESSAGES_ARRIVE_FOR = 2500;
// it is really important to delete as a failure messes up following tests
// so let's try real hard - 10 is a good number
protected static int RETRY_OBJECT_DELETES = 10;
protected static long RETRY_OBJECT_DELETES_AFTER = 500;
protected static long STACKS_START_FOR = 1000;
protected static long STACKS_SHUT_DOWN_FOR = 500;
protected static long TRANSACTION_TIMES_OUT_FOR = 38000;
protected ListeningPoint riListeningPoint = null;
protected ListeningPoint tiListeningPoint = null;
protected SipProvider riSipProvider = null;
protected SipProvider tiSipProvider = null;
protected SipEventCollector eventCollector = new SipEventCollector();
protected SipStack riSipStack;
protected SipStack tiSipStack;
public MessageFlowHarness(String name) {
this(name,true);
}
protected MessageFlowHarness(String name, boolean autoDialog) {
super(name, autoDialog);
System.out.println("Initializing test " + name);
try {
if ( riFactory != null)
riFactory.resetFactory();
riSipStack = riFactory.createSipStack(getRiProperties(autoDialog));
assertTrue( "RI must be gov.nist", riSipStack instanceof SIPTransactionStack );
tiFactory.resetFactory();
tiFactory.setPathName( getImplementationPath() );
tiSipStack = tiFactory.createSipStack(getTiProperties());
if (riSipStack == tiSipStack) {
throw new TckInternalError("riSipStack should not the same as tiSipStack");
}
} catch (TckInternalError ex){
throw ex;
} catch (Exception ex) {
fail("initialization failed");
}
}
// issue 17 on dev.java.net specify the headerFactory to use
// report and fix thereof <EMAIL>
protected void addStatus(HeaderFactory headerFactory, Request request) {
try {
Header extension = headerFactory.createHeader(EXTENSION_HDR,
new Integer(counter++).toString());
request.addHeader(extension);
} catch (ParseException ex) {
// do nothing
}
}
protected void addStatus(Request request, Response response) {
Header extension = request.getHeader(EXTENSION_HDR);
if (extension != null)
response.addHeader(extension);
}
/**
* Initialises both RI and TI sip stacks and stack factories.
*
* @throws java.lang.Exception
* All Let all exceptions that come from the underlying stack to
* pass through and surface at JUnit Level.
*/
public void setUp() throws java.lang.Exception {
riListeningPoint = riSipStack.createListeningPoint(LOCAL_ADDRESS, RI_PORT,
"udp");
riSipProvider = riSipStack.createSipProvider(riListeningPoint);
tiListeningPoint = tiSipStack.createListeningPoint(LOCAL_ADDRESS, TI_PORT,
"udp");
tiSipProvider = tiSipStack.createSipProvider(tiListeningPoint);
// JvB: don't forget to start them!
riSipStack.start();
tiSipStack.start();
// If we don't wait for them to start first messages get lost and are
// therefore reported as test failures.
sleep(STACKS_START_FOR);
}
/**
* Sets all JAIN SIP objects to null and resets the SipFactory.
*
* @throws java.lang.Exception
*/
public void tearDown() throws java.lang.Exception {
// Delete RI SipProvider
int tries = 0;
for (tries = 0; tries < RETRY_OBJECT_DELETES; tries++) {
try {
riSipStack.deleteSipProvider(riSipProvider);
} catch (ObjectInUseException ex) {
// System.err.println("Retrying delete of riSipProvider!");
sleep(RETRY_OBJECT_DELETES_AFTER);
continue;
}
break;
}
if (tries >= RETRY_OBJECT_DELETES)
throw new TckInternalError("Failed to delete riSipProvider!");
// Delete RI ListeningPoint
for (tries = 0; tries < RETRY_OBJECT_DELETES; tries++) {
try {
riSipStack.deleteListeningPoint(riListeningPoint);
} catch (ObjectInUseException ex) {
// System.err.println("Retrying delete of riListeningPoint!");
sleep(RETRY_OBJECT_DELETES_AFTER);
continue;
}
break;
}
if (tries >= RETRY_OBJECT_DELETES)
throw new TckInternalError("Failed to delete riListeningPoint!");
riSipProvider = null;
riListeningPoint = null;
// Delete TI SipProvider
for (tries = 0; tries < RETRY_OBJECT_DELETES; tries++) {
try {
tiSipStack.deleteSipProvider(tiSipProvider);
} catch (ObjectInUseException ex) {
// System.err.println("Retrying delete of tiSipProvider!");
sleep(RETRY_OBJECT_DELETES_AFTER);
continue;
}
break;
}
if (tries >= RETRY_OBJECT_DELETES)
throw new TiUnexpectedError("Failed to delete tiSipProvider!");
// Delete TI ListeningPoint
for (tries = 0; tries < RETRY_OBJECT_DELETES; tries++) {
try {
tiSipStack.deleteListeningPoint(tiListeningPoint);
} catch (ObjectInUseException ex) {
// System.err.println("Retrying delete of tiListeningPoint!");
sleep(RETRY_OBJECT_DELETES_AFTER);
continue;
}
break;
}
if (tries >= RETRY_OBJECT_DELETES)
throw new TiUnexpectedError("Failed to delete tiListeningPoint!");
riSipStack.stop();
tiSipStack.stop();
tiSipProvider = null;
tiListeningPoint = null;
// Wait for stack threads to release resources (e.g. port)
sleep(STACKS_SHUT_DOWN_FOR);
}
// ========================= Utility Methods =========================
/**
* Creates a SipRequest using the specified factories. The request has the
* specified method and is meant to be sent from srcProvider to dstProvider.
* This method is prefered to manual creation of requests as it helps avoid
* using RI objects instead of corresponding TI objects (or vice versa).
*
* @param method
* the request's method
* @param addressFactory
* the address factory to use when creating addresses
* @param headerFactory
* the header factory to use when creating headers
* @param messageFactory
* the message factory to use when creating headers
* @param srcProvider
* the provider that will eventually be used to send the request
* @param dstProvider
* the provider that will eventually dispatch the request to a
* SipListener
* @param contentType
* if the content parameter is not null then this is its content
* type.
* @param contentSubType
* if the content parameter is not null then this is its sub
* content type.
* @param content
* the content of the request. if null this parameter is ignored
* @return a request generated by the specified factories and destined to go
* from srcProvider to dstProvider
* @throws Exception
* if anything should go wrong. further exception handling is
* left to calling methods (or JUnit).
*/
protected Request createRequest(String method,
AddressFactory addressFactory, HeaderFactory headerFactory,
MessageFactory messageFactory, SipProvider srcProvider,
SipProvider dstProvider, String contentType, String contentSubType,
Object content) throws Exception {
// Source SipUri
ListeningPoint srclp = srcProvider.getListeningPoints()[0];
SipURI srcSipURI = addressFactory.createSipURI(null, srclp
.getIPAddress());
srcSipURI.setPort(srclp.getPort());
srcSipURI.setTransportParam(srclp.getTransport());
// Destination SipURI
ListeningPoint dstlp = dstProvider.getListeningPoints()[0];
SipURI dstSipURI = addressFactory.createSipURI(null, dstlp
.getIPAddress());
dstSipURI.setPort(dstlp.getPort());
dstSipURI.setTransportParam(dstlp.getTransport());
// CallId
CallIdHeader callId = srcProvider.getNewCallId();
callId = headerFactory.createCallIdHeader( callId.getCallId() );
// CSeq
CSeqHeader cSeq = headerFactory.createCSeqHeader(1L, method);
// From
Address fromAddress = addressFactory.createAddress(srcSipURI);
FromHeader from = headerFactory.createFromHeader(fromAddress, Integer
.toString(srcProvider.hashCode()));
// To
Address toAddress = addressFactory.createAddress(dstSipURI);
ToHeader to = headerFactory.createToHeader(toAddress, null);
// Contact
ContactHeader contact = headerFactory.createContactHeader(fromAddress);
List via = new LinkedList();
ViaHeader viaHeader = headerFactory.createViaHeader(srclp
.getIPAddress(), srclp.getPort(), srclp.getTransport(),
// BUG: Use proper RFC3261 branch ID
"z9hG4bK" + Long.toString(System.currentTimeMillis())
// branch id
);
via.add(viaHeader);
MaxForwardsHeader maxForwards = headerFactory
.createMaxForwardsHeader(3);
Request request = messageFactory.createRequest(dstSipURI, method,
callId, cSeq, from, to, via, maxForwards);
request.addHeader(contact);
if (contentType != null && contentSubType != null && content != null) {
ContentTypeHeader contentTypeHdr = headerFactory
.createContentTypeHeader(contentType, contentSubType);
request.setContent(content, contentTypeHdr);
}
// pass the headerFactory - issue17 by <EMAIL>
addStatus(headerFactory, request);
return request;
}
/**
* Creates an invite request object using the RI. This invite request is
* meant to be sent to the TI
*
* @param contentType
* if the content parameter is not null then this is its content
* type.
* @param contentSubType
* if the content parameter is not null then this is its content
* sub type.
* @param content
* if the request is to have any content then this parameter is
* used to set it. Th content parameter is to be left to null if
* the request won't have any content.
* @return an RI->TI invite request
* @throws TckInternalError
* if anything should gou wrong.
*/
protected Request createRiInviteRequest(String contentType,
String contentSubType, Object content) throws TckInternalError {
try {
return createRequest(Request.INVITE, riAddressFactory,
riHeaderFactory, riMessageFactory, riSipProvider,
tiSipProvider, contentType, contentSubType, content);
} catch (Throwable exc) {
throw new TckInternalError(
"Failed to create an RI->TI invite request", exc);
}
}
/**
* Creates an invite request object using the TI. This invite request is
* meant to be sent to the RI
*
* @param contentType
* if the content parameter is not null then this is its content
* type.
* @param contentSubType
* if the content parameter is not null then this is its content
* sub type.
* @param content
* if the request is to have any content then this parameter is
* used to set it. Th content parameter is to be left to null if
* the request won't have any content.
* @return an TI->RI invite request
* @throws TiUnexpectedError
* if anything should gou wrong.
*/
protected Request createTiInviteRequest(String contentType,
String contentSubType, Object content) throws TiUnexpectedError {
try {
return createRequest(Request.INVITE, tiAddressFactory,
tiHeaderFactory, tiMessageFactory, tiSipProvider,
riSipProvider, contentType, contentSubType, content);
} catch (Throwable exc) {
throw new TiUnexpectedError(
"Failed to create a TI->RI invite request", exc);
}
}
/**
* Creates a register request object using the RI. This register request is
* meant to be sent to the TI
*
* @return an RI->TI register request
* @throws TckInternalError
* if anything should gou wrong.
*/
protected Request createRiRegisterRequest() throws TckInternalError {
try {
return createRequest(Request.REGISTER, riAddressFactory,
riHeaderFactory, riMessageFactory, riSipProvider,
tiSipProvider, null, null, null);
} catch (Throwable exc) {
throw new TckInternalError(
"Failed to create an RI->TI register request", exc);
}
}
/**
* Creates a register request object using the TI. This register request is
* meant to be sent to the RI
*
* @return a TI->RI register request
* @throws TiUnexpectedError
* if anything should gou wrong.
*/
protected Request createTiRegisterRequest() throws TiUnexpectedError {
try {
return createRequest(Request.REGISTER, tiAddressFactory,
tiHeaderFactory, tiMessageFactory, tiSipProvider,
riSipProvider, null, null, null);
} catch (Throwable exc) {
throw new TiUnexpectedError(
"Failed to create a TI->RI register request", exc);
}
}
public static void waitLongForMessage() {
sleep(2*MESSAGES_ARRIVE_FOR);
}
/**
* Waits during LISTEN_TIMEOUT milliseconds. This method is called after a
* message has been sent so that it has the time to propagate though the
* sending and receiving stack
*/
public static void waitForMessage() {
sleep(MESSAGES_ARRIVE_FOR);
}
/**
* Wait till a transaction times out.
*
*/
protected static void waitForTimeout() {
sleep(TRANSACTION_TIMES_OUT_FOR);
}
/**
* waits a good long time for messages.
*/
protected static void waitShortForMessage() {
sleep(MESSAGES_ARRIVE_FOR/2);
}
/**
* Waits during _no_less_ than sleepFor milliseconds. Had to implement it on
* top of Thread.sleep() to guarantee minimum sleep time.
*
* @param sleepFor
* the number of miliseconds to wait
*/
protected static void sleep(long sleepFor) {
long startTime = System.currentTimeMillis();
long haveBeenSleeping = 0;
while (haveBeenSleeping < sleepFor) {
try {
//Thread.sleep(sleepFor - haveBeenSleeping);
if ( sleepFor - haveBeenSleeping < 750) {
Thread.sleep(sleepFor - haveBeenSleeping);
} else {
Thread.sleep(750);
System.out.print(".");
}
} catch (InterruptedException ex) {
// we-ll have to wait again!
}
haveBeenSleeping = (System.currentTimeMillis() - startTime);
}
}
/**
* Add a contact for the TI.
*/
public ContactHeader createTiContact() throws Exception {
try {
ContactHeader contact = tiHeaderFactory.createContactHeader();
// JvB: getIPAddress may return null!
String ip = tiSipProvider.getSipStack().getIPAddress();
if (ip == null) {
ListeningPoint lp = (ListeningPoint) tiSipProvider
.getSipStack().getListeningPoints().next();
ip = lp.getIPAddress();
}
SipURI srcSipURI = tiAddressFactory.createSipURI(null, ip);
srcSipURI.setPort(tiSipProvider.getListeningPoint("udp").getPort());
srcSipURI.setTransportParam("udp");
Address address = tiAddressFactory.createAddress(srcSipURI);
address.setDisplayName("TI Contact");
contact.setAddress(address);
return contact;
} catch (Exception ex) {
ex.printStackTrace();
assertTrue(false);
throw ex;
}
}
/**
* Add a contact for the TI.
*/
public ContactHeader createRiContact() throws TckInternalError {
try {
ContactHeader contact = riHeaderFactory.createContactHeader();
// BUG reported by <NAME> (Open Cloud):
// Should be using RI's address factory here, not TI's.
ListeningPoint lp = riSipProvider.getListeningPoints()[0];
SipURI srcSipURI = riAddressFactory.createSipURI(null, lp
.getIPAddress());
srcSipURI.setPort(lp.getPort());
srcSipURI.setTransportParam(lp.getTransport());
Address address = riAddressFactory.createAddress(srcSipURI);
address.setDisplayName("RI Contact");
contact.setAddress(address);
return contact;
} catch (Exception ex) {
throw new TckInternalError(ex.getMessage());
}
}
}
|
mayee/dragonshard | dragonshard-core/src/main/java/net/dragonshard/dsf/core/toolkit/UrlUtils.java | <reponame>mayee/dragonshard
/*
* Copyright 1999-2018 dragonshard.net.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.dragonshard.dsf.core.toolkit;
import java.net.URI;
import java.net.URISyntaxException;
/**
* URL工具类
*
* @author mayee
* @version v1.0
**/
public class UrlUtils {
public static String appendUri(String uri, String appendQuery) throws URISyntaxException {
URI oldUri = new URI(uri);
String newQuery = oldUri.getQuery();
if (newQuery == null) {
newQuery = appendQuery;
} else {
newQuery += "&" + appendQuery;
}
URI newUri = new URI(oldUri.getScheme(), oldUri.getAuthority(),
oldUri.getPath(), newQuery, oldUri.getFragment());
return newUri.toString();
}
}
|
Microsoft/mu_basecore | PolicyServicePkg/Samples/PolicyInterface/PolicySampleDxe.c | <reponame>Microsoft/mu_basecore
/** @file
Implements sample policy for DXE environment.
Copyright (c) Microsoft Corporation
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
#include <Uefi.h>
#include <Library/UefiLib.h>
#include <Library/UefiBootServicesTableLib.h>
#include <Library/DebugLib.h>
#include <Library/BaseLib.h>
#include <Protocol/Policy.h>
#include "SamplePolicy.h"
//
// Guids used to store sample policies. For production scenarios this should be
// defined in appropriate .dec file.
//
EFI_GUID mSampleGuidPeiToDxe = POLICY_SAMPLE_PEI_TO_DXE_GUID;
//
// Global to store the protocol.
//
POLICY_PROTOCOL *mPolicyProtocol;
/**
A routine to retrieve the sample policy created by the PEI sample module.
@param[in,out] PolicyGuid The GUID to use for the sample policy.
@retval EFI_SUCCESS Successfully ran sample policy code.
@retval EFI_PROTOCOL_ERROR Unexpected status returned by policy interface.
@retval other Failure status returned by policy interface.
**/
EFI_STATUS
DxeSampleGetPeiPolicy (
IN EFI_GUID *PolicyGuid
)
{
EFI_STATUS Status;
SAMPLE_POLICY Policy;
UINT16 PolicySize;
UINT64 Attributes;
// Set the policy size to 0 to indicate a null policy pointer.
PolicySize = 0;
// First check the size. This would usually be done for policies of a dynamic
// or changing size. Attributes may be retrieved at this time if desired.
Status = mPolicyProtocol->GetPolicy (PolicyGuid, NULL, NULL, &PolicySize);
if ((Status != EFI_BUFFER_TOO_SMALL) || (PolicySize != sizeof (Policy))) {
ASSERT (FALSE);
return EFI_PROTOCOL_ERROR;
}
// Retrieve the actual policy.
Status = mPolicyProtocol->GetPolicy (PolicyGuid, &Attributes, &Policy, &PolicySize);
if (EFI_ERROR (Status)) {
ASSERT_EFI_ERROR (Status);
return Status;
}
// Verify all the details are correct.
ASSERT (PolicySize == sizeof (Policy));
ASSERT (Attributes == 0);
ASSERT (Policy.Signature == SAMPLE_POLICY_SIGNATURE);
ASSERT (Policy.Revision == SAMPLE_POLICY_REVISION);
ASSERT (Policy.Value == SAMPLE_POLICY_VALUE);
return Status;
}
/**
DXE policy driver entry point. Initialized the policy store from the HOB list
and install the DXE policy protocol.
@param[in] ImageHandle The firmware allocated handle for the EFI image.
@param[in] SystemTable UNUSED.
@retval EFI_SUCCESS Policy store initialized and protocol installed.
@retval other Sample routines returned a failure.
**/
EFI_STATUS
EFIAPI
DxePolicySampleEntry (
IN EFI_HANDLE ImageHandle,
IN EFI_SYSTEM_TABLE *SystemTable
)
{
EFI_STATUS Status;
// Get the policy protocol.
Status = gBS->LocateProtocol (
&gPolicyProtocolGuid,
NULL,
(VOID **)&mPolicyProtocol
);
if (EFI_ERROR (Status)) {
ASSERT_EFI_ERROR (Status);
return Status;
}
Status = DxeSampleGetPeiPolicy (&mSampleGuidPeiToDxe);
if (EFI_ERROR (Status)) {
ASSERT_EFI_ERROR (Status);
return Status;
}
return Status;
}
|
dalisoft/dev-env | backend/express/src/middlewares/body-null-fix.js | <filename>backend/express/src/middlewares/body-null-fix.js<gh_stars>1-10
/* eslint-disable no-continue, no-restricted-syntax, guard-for-in */
const normalizeProperties = ['body', 'params'];
export default (req, res, next) => {
for (const property of normalizeProperties) {
const value = req[property];
for (const subProperty in value) {
const subValue = value[subProperty];
if (subValue === undefined || subValue === null) {
continue;
}
if (subValue === 'null') {
value[subProperty] = null;
} else if (subValue === 'undefined') {
value[subProperty] = undefined;
}
}
}
next();
};
|
yhexie/mrpt | samples/imageFFT/test.cpp | /* +---------------------------------------------------------------------------+
| Mobile Robot Programming Toolkit (MRPT) |
| http://www.mrpt.org/ |
| |
| Copyright (c) 2005-2017, Individual contributors, see AUTHORS file |
| See: http://www.mrpt.org/Authors - All rights reserved. |
| Released under BSD License. See details in http://www.mrpt.org/License |
+---------------------------------------------------------------------------+ */
#include <mrpt/utils.h>
#include <mrpt/gui.h>
#include <mrpt/math.h>
using namespace mrpt;
using namespace mrpt::math;
using namespace mrpt::gui;
using namespace mrpt::utils;
using namespace mrpt::system;
using namespace std;
#include <mrpt/examples_config.h>
string myDataDir( MRPT_EXAMPLES_BASE_DIRECTORY + string("imageCorrelation/") ); // Reuse it's images
// ------------------------------------------------------
// TestFFT_2D_real
// ------------------------------------------------------
void TestFFT_2D_real()
{
CMatrix A, RES_R,RES_I,B,D;
CTicTac tictac;
printf("Loading matrix from file...");
A.loadFromTextFile("dft2_test.txt");
printf("ok\n");
printf("Computing 2D FFT of %ux%u...",(unsigned int)A.getRowCount(),(unsigned int)A.getColCount());
tictac.Tic();
math::dft2_real(A,RES_R,RES_I);
printf(" Done,%.06fms\n",tictac.Tac()*1000.0f);
RES_R.saveToTextFile("_out_dft2_real.txt");
RES_I.saveToTextFile("_out_dft2_imag.txt");
printf("Computing 2D IFFT of %ux%u...",(unsigned int)A.getRowCount(),(unsigned int)A.getColCount());
tictac.Tic();
math::idft2_real(RES_R,RES_I,B);
printf(" Done,%.06fms\n",tictac.Tac()*1000.0f);
// B.saveToTextFile("_out_ifft2.txt");
D = B - A;
// D.saveToTextFile("_out_dft2_error_diffs.txt");
float maxError;
size_t u,v;
D.find_index_max_value(u,v,maxError);
printf("Maximum error between 'A' and 'IFFT(FFT(A))'=%e\n",maxError);
}
// ------------------------------------------------------
// TestFFT_2D_complex
// ------------------------------------------------------
void TestFFT_2D_complex()
{
CMatrix DATA_R,DATA_I, RES_R,RES_I,B_R,B_I,D_R,D_I;
CTicTac tictac;
printf("Loading matrix from file...");
DATA_R.loadFromTextFile("complex_dft2_test_real.txt");
DATA_I.loadFromTextFile("complex_dft2_test_imag.txt");
printf("ok\n");
printf("Computing 2D complex FFT of %ux%u...",(unsigned int)DATA_R.getRowCount(),(unsigned int)DATA_R.getColCount());
tictac.Tic();
math::dft2_complex(DATA_R,DATA_I,RES_R,RES_I);
printf(" Done,%.06fms\n",tictac.Tac()*1000.0f);
RES_R.saveToTextFile("_out_complex_dft2_real.txt");
RES_I.saveToTextFile("_out_complex_dft2_imag.txt");
printf("Computing 2D complex IFFT of %ux%u...",(unsigned int)DATA_R.getRowCount(),(unsigned int)DATA_R.getColCount());
tictac.Tic();
math::idft2_complex(RES_R,RES_I,B_R,B_I);
printf(" Done,%.06fms\n",tictac.Tac()*1000.0f);
// B.saveToTextFile("_out_ifft2.txt");
D_R = B_R - DATA_R;
D_I = B_I - DATA_I;
// D.saveToTextFile("_out_dft2_error_diffs.txt");
float maxError_R,maxError_I;
size_t u,v;
D_R.find_index_max_value(u,v,maxError_R);
D_I.find_index_max_value(u,v,maxError_I);
printf("Maximum error between 'A' and 'IFFT(FFT(A))'=%e\n",maxError_R);
printf("Maximum error between 'A' and 'IFFT(FFT(A))'=%e\n",maxError_I);
}
// ------------------------------------------------------
// TestImageFFT
// ------------------------------------------------------
void TestImageFFT()
{
CTicTac tictac;
CImage IM1,IM2;
CMatrix imgCorr;
IM1.loadFromFile(myDataDir+string("fft2_test_image_patch.jpg"), 0 ); // "Patch"
IM2.loadFromFile(myDataDir+string("fft2_test_image.jpg"), 0 ); // Ref. image
printf("Computing images correlation...");
tictac.Tic();
IM2.cross_correlation_FFT(IM1,imgCorr);
printf(" Done,%.06fms\n",tictac.Tac()*1000.0f);
imgCorr.saveToTextFile("_out_dft2_image_test.txt");
}
// ------------------------------------------------------
// TestImageCap
// ------------------------------------------------------
void TestImage3D()
{
/* // Pixels -> 3D
CMatrix A = VisionUtils::defaultIntrinsicParamsMatrix();
CPoint3D p;
FILE *f=fopen("test.txt","wt");
for (int x=0;x<320;x+=10)
for (int y=0;y<240;y+=10)
for (int d=1;d<20;d+=1)
{
p = VisionUtils::pixelTo3D(x,y, A);
fprintf(f,"%f %f %f\n",p.x,p.y,p.z);
}
fclose(f);
return;
*/
}
// ------------------------------------------------------
// TestImageCap
// ------------------------------------------------------
void TestImageConversion()
{
// BMP -> JPEG conversion tester:
// --------------------------------
CImage img,img2;
CTicTac tictac;
{
tictac.Tic();
if (!img.loadFromFile("../imageBasics/frame_color.bmp"))
{
cerr << "Error loading ../imageBasics/frame_color.bmp" << endl;
return;
}
printf("bmp file loaded in %.03fms\n", 1000.0f*tictac.Tac() );
CDisplayWindow win1("BMP file, color");
win1.showImage( img );
win1.waitForKey();
tictac.Tic();
img.loadFromFile("frame_gray.bmp");
printf("bmp file loaded in %.03fms\n", 1000.0f*tictac.Tac() );
CDisplayWindow win2("BMP file, gray");
win2.showImage( img );
win2.waitForKey();
}
tictac.Tic();
img.loadFromFile("frame.jpg");
printf("jpeg file loaded in %.03fms\n", 1000.0f*tictac.Tac() );
CDisplayWindow win1("win1"),win2("win2"),win3("win3");
CImage imgSmall( img.scaleHalf() );
CImage imgSmall2( imgSmall.scaleHalf() );
CImage imgGray( imgSmall2.grayscale() );
// Test some draw capabilities:
// ---------------------------------
imgSmall.rectangle( 85,35, 170,170,TColor(255,0,0),10);
imgSmall.line( 550,75, 650,25,TColor(0,0,255) );
imgSmall.line( -10,-20, 20,30,TColor(0,0,255));
CMatrix COV(2,2);
COV(0,0) = 100;
COV(1,1) = 50;
COV(0,1) = COV(1,0) = -30;
imgSmall.ellipseGaussian( &COV, 600.0f,50.0f, 2, TColor(255,255,0), 4);
imgGray.ellipseGaussian( &COV, 100.0f,100.0f, 2, TColor(0,0,255), 4);
imgSmall.drawImage( 400,500,imgGray );
// Show the windows now:
// ------------------------------------------------------
win1.showImage( imgSmall ); win1.setPos(0,0);
win2.showImage( imgSmall2 ); win2.setPos(810,0);
win3.showImage( imgGray ); win3.setPos(810,400);
os::getch();
tictac.Tic();
img2.saveToFile("frame_out.jpg");
printf("jpeg file loaded in %.03fms\n", 1000.0f*tictac.Tac() );
return;
}
// ------------------------------------------------------
// TestImageCap
// ------------------------------------------------------
/*void TestImageCap()
{
CTicTac tictac;
bool ok = true;
CImageGrabber_OpenCV cap( 0 );
CObservationImage obs;
CDisplayWindow win("Capture");
while (ok && !_kbhit())
{
mrpt::system::sleep(10);
tictac.Tic();
ok = cap.getObservation( obs );
printf("Frame grabbed in %.03fms\n", 1000.0f*tictac.Tac() );
if (ok)
{
// CImageFloat img2; img2 = obs.image;
// win.showImage( obs.image.grayscale() );
win.showImage( obs.image );
}
};
if (_kbhit()) _getch();
}
*/
// ------------------------------------------------------
// MAIN
// ------------------------------------------------------
int main()
{
try
{
TestImageFFT();
//TestFFT_2D_real();
//TestFFT_2D_complex();
//TestImageFFT();
//TestImageCap();
//TestImageConversion();
return 0;
} catch (std::exception &e)
{
std::cout << "MRPT exception caught: " << e.what() << std::endl;
return -1;
}
catch (...)
{
printf("Untyped exception!!");
return -1;
}
}
|
bhits/pcm | pcm/src/main/java/gov/samhsa/c2s/pcm/infrastructure/VssService.java | package gov.samhsa.c2s.pcm.infrastructure;
import gov.samhsa.c2s.pcm.infrastructure.dto.ValueSetCategoryDto;
import org.springframework.cloud.netflix.feign.FeignClient;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import java.util.List;
@FeignClient(name = "vss")
public interface VssService {
@RequestMapping(value = "/valueSetCategories", method = RequestMethod.GET)
List<ValueSetCategoryDto> getValueSetCategories();
}
|
Tomymanoukian/AyPIII_TP2 | src/main/java/edu/fiuba/algo3/vista/Layouts/LayoutVerdaderoFalso.java | package edu.fiuba.algo3.vista.Layouts;
import edu.fiuba.algo3.controlador.BotonEnviarHandler;
import edu.fiuba.algo3.modelo.Jugador;
import edu.fiuba.algo3.modelo.ManejadorDeTurnos;
import edu.fiuba.algo3.modelo.Preguntas.VerdaderoFalso;
import edu.fiuba.algo3.modelo.Respuestas.RespuestaUnica;
import javafx.animation.Timeline;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.control.Button;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import static edu.fiuba.algo3.vista.Constantes.ESTILO_FALSO;
import static edu.fiuba.algo3.vista.Constantes.ESTILO_VERDADERO;
public abstract class LayoutVerdaderoFalso {
protected VBox layout;
public VBox getLayout() {return layout;}
protected HBox obtenerContenedorDeOpcionesVoF(VerdaderoFalso verdaderoFalso, Jugador unJugador, ManejadorDeTurnos manejadorDeTurnos, Timeline tiempo){
Button botonVerdadero = new Button(verdaderoFalso.getOpcionVerdadera().getOpcion());
BotonEnviarHandler botonVerdaderoHandler = new BotonEnviarHandler(unJugador, new RespuestaUnica(verdaderoFalso.getOpcionVerdadera()), manejadorDeTurnos, tiempo);
botonVerdadero.setOnAction(botonVerdaderoHandler);
botonVerdadero.setMinSize(200, 70);
botonVerdadero.setStyle(ESTILO_VERDADERO);
Button botonFalso = new Button(verdaderoFalso.getOpcionFalsa().getOpcion());
BotonEnviarHandler botonFalsoHandler = new BotonEnviarHandler(unJugador, new RespuestaUnica(verdaderoFalso.getOpcionFalsa()), manejadorDeTurnos, tiempo);
botonFalso.setOnAction(botonFalsoHandler);
botonFalso.setMinSize(200, 70);
botonFalso.setStyle(ESTILO_FALSO);
HBox contenedorDeOpcionesVoF = new HBox(botonVerdadero, botonFalso);
contenedorDeOpcionesVoF.setAlignment(Pos.CENTER);
contenedorDeOpcionesVoF.setStyle("-fx-font-weight: bold");
contenedorDeOpcionesVoF.setStyle("-fx-font-size: 1.5em;");
contenedorDeOpcionesVoF.setSpacing(60);
contenedorDeOpcionesVoF.setPadding(new Insets(140,0,0,0));
HBox estructuraContenedorOpciones = new HBox (contenedorDeOpcionesVoF);
estructuraContenedorOpciones.setAlignment(Pos.CENTER);
estructuraContenedorOpciones.setMinHeight(470);
estructuraContenedorOpciones.setMaxHeight(470);
return estructuraContenedorOpciones;
}
}
|
CaelestisZ/HeraQ | security/samsung/five/five_state.c | /*
* FIVE State machine
*
* Copyright (C) 2017 Samsung Electronics, Inc.
* <NAME>, <<EMAIL>>
*
* This software is licensed under the terms of the GNU General Public
* License version 2, as published by the Free Software Foundation, and
* may be copied, distributed, and modified under those terms.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#include <linux/task_integrity.h>
#include "five_audit.h"
#include "five_state.h"
#include "five_hooks.h"
#include "five_cache.h"
enum task_integrity_state_cause {
STATE_CAUSE_UNKNOWN,
STATE_CAUSE_DIGSIG,
STATE_CAUSE_DMV_PROTECTED,
STATE_CAUSE_TRUSTED,
STATE_CAUSE_HMAC,
STATE_CAUSE_SYSTEM_LABEL,
STATE_CAUSE_NOCERT,
STATE_CAUSE_TAMPERED,
STATE_CAUSE_MISMATCH_LABEL
};
struct task_verification_result {
enum task_integrity_value new_tint;
enum task_integrity_value prev_tint;
enum task_integrity_state_cause cause;
};
static const char *task_integrity_state_str(
enum task_integrity_state_cause cause)
{
const char *str = "unknown";
switch (cause) {
case STATE_CAUSE_DIGSIG:
str = "digsig";
break;
case STATE_CAUSE_DMV_PROTECTED:
str = "dmv_protected";
break;
case STATE_CAUSE_TRUSTED:
str = "trusted";
break;
case STATE_CAUSE_HMAC:
str = "hmac";
break;
case STATE_CAUSE_SYSTEM_LABEL:
str = "system_label";
break;
case STATE_CAUSE_NOCERT:
str = "nocert";
break;
case STATE_CAUSE_MISMATCH_LABEL:
str = "mismatch_label";
break;
case STATE_CAUSE_TAMPERED:
str = "tampered";
break;
case STATE_CAUSE_UNKNOWN:
str = "unknown";
break;
}
return str;
}
static enum task_integrity_reset_cause state_to_reason_cause(
enum task_integrity_state_cause cause)
{
enum task_integrity_reset_cause reset_cause;
switch (cause) {
case STATE_CAUSE_UNKNOWN:
reset_cause = CAUSE_UNKNOWN;
break;
case STATE_CAUSE_TAMPERED:
reset_cause = CAUSE_TAMPERED;
break;
case STATE_CAUSE_NOCERT:
reset_cause = CAUSE_NO_CERT;
break;
case STATE_CAUSE_MISMATCH_LABEL:
reset_cause = CAUSE_MISMATCH_LABEL;
break;
default:
/* Integrity is not NONE. */
reset_cause = CAUSE_UNSET;
break;
}
return reset_cause;
}
static int is_system_label(struct integrity_label *label)
{
if (label && label->len == 0)
return 1; /* system label */
return 0;
}
static inline int integrity_label_cmp(struct integrity_label *l1,
struct integrity_label *l2)
{
return 0;
}
static int verify_or_update_label(struct task_integrity *intg,
struct integrity_iint_cache *iint)
{
struct integrity_label *l;
struct integrity_label *file_label = iint->five_label;
int rc = 0;
if (!file_label) /* digsig doesn't have label */
return 0;
if (is_system_label(file_label))
return 0;
spin_lock(&intg->value_lock);
l = intg->label;
if (l) {
if (integrity_label_cmp(file_label, l)) {
rc = -EPERM;
goto out;
}
} else {
struct integrity_label *new_label;
new_label = kmalloc(sizeof(file_label->len) + file_label->len,
GFP_ATOMIC);
if (!new_label) {
rc = -ENOMEM;
goto out;
}
new_label->len = file_label->len;
memcpy(new_label->data, file_label->data, new_label->len);
intg->label = new_label;
}
out:
spin_unlock(&intg->value_lock);
return rc;
}
static bool set_first_state(struct integrity_iint_cache *iint,
struct task_integrity *integrity,
struct task_verification_result *result)
{
enum task_integrity_value tint = INTEGRITY_NONE;
enum five_file_integrity status = five_get_cache_status(iint);
bool trusted_file = iint->five_flags & FIVE_TRUSTED_FILE;
enum task_integrity_state_cause cause = STATE_CAUSE_UNKNOWN;
result->new_tint = result->prev_tint = task_integrity_read(integrity);
task_integrity_clear(integrity);
switch (status) {
case FIVE_FILE_RSA:
if (trusted_file) {
cause = STATE_CAUSE_TRUSTED;
tint = INTEGRITY_PRELOAD_ALLOW_SIGN;
} else {
cause = STATE_CAUSE_DIGSIG;
tint = INTEGRITY_PRELOAD;
}
break;
case FIVE_FILE_DMVERITY:
if (trusted_file) {
cause = STATE_CAUSE_TRUSTED;
tint = INTEGRITY_DMVERITY_ALLOW_SIGN;
} else {
cause = STATE_CAUSE_DMV_PROTECTED;
tint = INTEGRITY_DMVERITY;
}
break;
case FIVE_FILE_HMAC:
cause = STATE_CAUSE_HMAC;
tint = INTEGRITY_MIXED;
break;
case FIVE_FILE_FAIL:
cause = STATE_CAUSE_TAMPERED;
tint = INTEGRITY_NONE;
break;
case FIVE_FILE_UNKNOWN:
cause = STATE_CAUSE_NOCERT;
tint = INTEGRITY_NONE;
break;
default:
cause = STATE_CAUSE_NOCERT;
tint = INTEGRITY_NONE;
break;
}
task_integrity_set(integrity, tint);
result->new_tint = tint;
result->cause = cause;
return true;
}
static bool set_next_state(struct integrity_iint_cache *iint,
struct task_integrity *integrity,
struct task_verification_result *result)
{
bool is_newstate = false;
enum five_file_integrity status = five_get_cache_status(iint);
bool has_digsig = (status == FIVE_FILE_RSA);
bool dmv_protected = (status == FIVE_FILE_DMVERITY);
struct integrity_label *label = iint->five_label;
enum task_integrity_state_cause cause = STATE_CAUSE_UNKNOWN;
enum task_integrity_value state_tint = INTEGRITY_NONE;
result->new_tint = result->prev_tint = task_integrity_read(integrity);
if (has_digsig)
return is_newstate;
if (status == FIVE_FILE_UNKNOWN || status == FIVE_FILE_FAIL) {
spin_lock(&integrity->value_lock);
if (status == FIVE_FILE_UNKNOWN)
cause = STATE_CAUSE_NOCERT;
else
cause = STATE_CAUSE_TAMPERED;
state_tint = INTEGRITY_NONE;
is_newstate = true;
goto out;
}
if (verify_or_update_label(integrity, iint)) {
spin_lock(&integrity->value_lock);
cause = STATE_CAUSE_MISMATCH_LABEL;
state_tint = INTEGRITY_NONE;
is_newstate = true;
goto out;
}
spin_lock(&integrity->value_lock);
switch (integrity->value) {
case INTEGRITY_PRELOAD_ALLOW_SIGN:
if (dmv_protected) {
cause = STATE_CAUSE_DMV_PROTECTED;
state_tint = INTEGRITY_DMVERITY_ALLOW_SIGN;
} else if (is_system_label(label)) {
cause = STATE_CAUSE_SYSTEM_LABEL;
state_tint = INTEGRITY_MIXED_ALLOW_SIGN;
} else {
cause = STATE_CAUSE_HMAC;
state_tint = INTEGRITY_MIXED;
}
is_newstate = true;
break;
case INTEGRITY_PRELOAD:
if (dmv_protected) {
cause = STATE_CAUSE_DMV_PROTECTED;
state_tint = INTEGRITY_DMVERITY;
} else {
cause = STATE_CAUSE_HMAC;
state_tint = INTEGRITY_MIXED;
}
is_newstate = true;
break;
case INTEGRITY_MIXED_ALLOW_SIGN:
if (!dmv_protected && !is_system_label(label)) {
cause = STATE_CAUSE_HMAC;
state_tint = INTEGRITY_MIXED;
is_newstate = true;
}
break;
case INTEGRITY_DMVERITY:
if (!dmv_protected) {
cause = STATE_CAUSE_HMAC;
state_tint = INTEGRITY_MIXED;
is_newstate = true;
}
break;
case INTEGRITY_DMVERITY_ALLOW_SIGN:
if (!dmv_protected) {
if (is_system_label(label)) {
cause = STATE_CAUSE_SYSTEM_LABEL;
state_tint = INTEGRITY_MIXED_ALLOW_SIGN;
} else {
cause = STATE_CAUSE_HMAC;
state_tint = INTEGRITY_MIXED;
}
is_newstate = true;
}
break;
case INTEGRITY_MIXED:
break;
case INTEGRITY_NONE:
break;
default:
// Unknown state
cause = STATE_CAUSE_UNKNOWN;
state_tint = INTEGRITY_NONE;
is_newstate = true;
}
out:
if (is_newstate) {
__task_integrity_set(integrity, state_tint);
result->new_tint = state_tint;
result->cause = cause;
}
spin_unlock(&integrity->value_lock);
return is_newstate;
}
void five_state_proceed(struct task_integrity *integrity,
struct file_verification_result *file_result)
{
struct integrity_iint_cache *iint = file_result->iint;
enum five_hooks fn = file_result->fn;
struct task_struct *task = file_result->task;
struct file *file = file_result->file;
bool is_newstate;
struct task_verification_result task_result = {};
if (!iint)
return;
if (fn == BPRM_CHECK)
is_newstate = set_first_state(iint, integrity, &task_result);
else
is_newstate = set_next_state(iint, integrity, &task_result);
if (is_newstate) {
if (task_result.new_tint == INTEGRITY_NONE) {
task_integrity_set_reset_reason(integrity,
state_to_reason_cause(task_result.cause), file);
five_hook_integrity_reset(task);
}
five_audit_verbose(task, file, five_get_string_fn(fn),
task_result.prev_tint, task_result.new_tint,
task_integrity_state_str(task_result.cause),
file_result->five_result);
}
}
|
j-verse/terser | test/compress/inline.js | <reponame>j-verse/terser<gh_stars>1-10
inline_within_extends_1: {
options = {
evaluate: true,
inline: 3,
passes: 1,
reduce_funcs: true,
reduce_vars: true,
side_effects: true,
unused: true,
}
input: {
(function() {
function foo(foo_base) {
return class extends foo_base {};
}
function bar(bar_base) {
return class extends bar_base {};
}
console.log((new class extends (foo(bar(Array))){}).concat(["PASS"])[0]);
})();
}
expect: {
(function() {
console.log(new class extends(function(foo_base) {
return class extends foo_base {};
}(function(bar_base) {
return class extends bar_base {};
}(Array))){}().concat([ "PASS" ])[0]);
})();
}
expect_stdout: "PASS"
}
inline_within_extends_2: {
options = {
defaults: true,
evaluate: true,
inline: 3,
passes: 3,
reduce_funcs: true,
reduce_vars: true,
side_effects: true,
unused: true,
}
input: {
(function() {
class Baz extends(foo(bar(Array))) {
constructor() {
super(...arguments);
}
}
function foo(foo_base) {
return class extends foo_base {
constructor() {
super(...arguments);
}
second() {
return this[1];
}
};
}
function bar(bar_base) {
return class extends bar_base {
constructor(...args) {
super(...args);
}
};
}
console.log(new Baz(1, "PASS", 3).second());
})();
}
expect: {
console.log(new class extends(function(foo_base) {
return class extends foo_base {
constructor() {
super(...arguments);
}
second() {
return this[1];
}
};
}(function(bar_base) {
return class extends bar_base {
constructor(...args) {
super(...args);
}
};
}(Array))) {
constructor() {
super(...arguments);
}
}(1, "PASS", 3).second());
}
expect_stdout: "PASS"
}
|
cuttlefish/geogig | src/web/api/src/test/java/org/locationtech/geogig/rest/geopkg/GeoPackageExportIntegrationTest.java | /* Copyright (c) 2016 Boundless and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Distribution License v1.0
* which accompanies this distribution, and is available at
* https://www.eclipse.org/org/documents/edl-v10.html
*
* Contributors:
* <NAME> (Boundless) - initial implementation
*/
package org.locationtech.geogig.rest.geopkg;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.locationtech.geogig.web.api.TestData.line1;
import static org.locationtech.geogig.web.api.TestData.line2;
import static org.locationtech.geogig.web.api.TestData.line3;
import static org.locationtech.geogig.web.api.TestData.linesType;
import static org.locationtech.geogig.web.api.TestData.point1;
import static org.locationtech.geogig.web.api.TestData.point2;
import static org.locationtech.geogig.web.api.TestData.point3;
import static org.locationtech.geogig.web.api.TestData.pointsType;
import static org.locationtech.geogig.web.api.TestData.poly1;
import static org.locationtech.geogig.web.api.TestData.poly2;
import static org.locationtech.geogig.web.api.TestData.poly3;
import static org.locationtech.geogig.web.api.TestData.polysType;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.json.JsonObject;
import org.geotools.data.DataStore;
import org.geotools.data.Transaction;
import org.geotools.data.simple.SimpleFeatureCollection;
import org.geotools.data.simple.SimpleFeatureIterator;
import org.geotools.data.simple.SimpleFeatureSource;
import org.geotools.geopkg.GeoPackage;
import org.geotools.geopkg.GeoPkgDataStoreFactory;
import org.geotools.jdbc.JDBCDataStore;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.locationtech.geogig.geotools.geopkg.GeopkgGeogigMetadata;
import org.locationtech.geogig.repository.Repository;
import org.locationtech.geogig.rest.AsyncContext;
import org.locationtech.geogig.rest.AsyncContext.AsyncCommand;
import org.locationtech.geogig.rest.geotools.Export;
import org.locationtech.geogig.web.api.AbstractWebAPICommand;
import org.locationtech.geogig.web.api.AbstractWebOpTest;
import org.locationtech.geogig.web.api.CommandContext;
import org.locationtech.geogig.web.api.ParameterSet;
import org.locationtech.geogig.web.api.TestData;
import org.locationtech.geogig.web.api.TestParams;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.geometry.BoundingBox;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
public class GeoPackageExportIntegrationTest extends AbstractWebOpTest {
private CommandContext context;
private AsyncContext testAsyncContext;
@Before
public void before() {
context = super.testContext.get();
testAsyncContext = AsyncContext.createNew();
System.setProperty("gt2.jdbc.trace", "true");
}
@After
public void after() {
testAsyncContext.shutDown();
}
@Override
protected Export buildCommand(ParameterSet params) {
Export o = super.buildCommand(params);
o.asyncContext = testAsyncContext;
return o;
}
@Override
protected String getRoute() {
return "export";
}
@Override
protected Class<? extends AbstractWebAPICommand> getCommandClass() {
return Export.class;
}
@Override
protected boolean requiresTransaction() {
return false;
}
@Test
public void testExportDefaults() throws Exception {
Repository repo = context.getRepository();
TestData testData = new TestData(repo);
testData.init().loadDefaultData();
Export op = buildCommand(TestParams.of("format", "gpkg"));
File result = run(op);
DataStore store = store(result);
try {
assertFeatures(store, pointsType.getTypeName(), point1, point2, point3);
assertFeatures(store, linesType.getTypeName(), line1, line2, line3);
assertFeatures(store, polysType.getTypeName(), poly1, poly2, poly3);
} finally {
store.dispose();
}
}
@Test
public void testExportDefaultsIntechangeExtension() throws Exception {
Repository repo = context.getRepository();
TestData testData = new TestData(repo);
testData.init().loadDefaultData();
Export op = buildCommand(TestParams.of("format", "gpkg", "interchange", "true"));
File result = run(op);
DataStore store = store(result);
try {
assertFeatures(store, pointsType.getTypeName(), point1, point2, point3);
assertFeatures(store, linesType.getTypeName(), line1, line2, line3);
assertFeatures(store, polysType.getTypeName(), poly1, poly2, poly3);
} finally {
store.dispose();
}
assertEquals(Sets.newHashSet("Points_audit", "Lines_audit", "Polygons_audit"),
getAuditTableNames(result));
}
@Test
public void testExportBranch() throws Exception {
Repository repo = context.getRepository();
TestData testData = new TestData(repo);
// HEAD is at branch1
testData.init().loadDefaultData().checkout("branch1");
// but we request branch2
Export op = buildCommand(TestParams.of("format", "GPKG", "root", "branch2"));
File result = run(op);
DataStore store = store(result);
try {
assertFeatures(store, pointsType.getTypeName(), point1, point3);
assertFeatures(store, linesType.getTypeName(), line1, line3);
assertFeatures(store, polysType.getTypeName(), poly1, poly3);
} finally {
store.dispose();
}
}
@Test
public void testExportLayernameFilter() throws Exception {
Repository repo = context.getRepository();
new TestData(repo).init().loadDefaultData();
// but we request branch2
String layerFilter = linesType.getTypeName() + "," + polysType.getTypeName();
Export op = buildCommand(TestParams.of("format", "gpkg", "path", layerFilter));
File result = run(op);
DataStore store = store(result);
try {
assertFeatures(store, linesType.getTypeName(), line1, line2, line3);
assertFeatures(store, polysType.getTypeName(), poly1, poly2, poly3);
Set<String> exportedTypeNames = Sets.newHashSet(store.getTypeNames());
assertFalse(exportedTypeNames.contains(pointsType.getTypeName()));
} finally {
store.dispose();
}
}
@Test
public void testExportBranchBBoxFilter() throws Exception {
Repository repo = context.getRepository();
TestData testData = new TestData(repo);
// HEAD is at branch1
testData.init().loadDefaultData().checkout("branch1");
BoundingBox bounds = point3.getDefaultGeometryProperty().getBounds();
String bboxStr = String.format("%f,%f,%f,%f,EPSG:4326", bounds.getMinX(), bounds.getMinY(),
bounds.getMaxX(), bounds.getMaxY());
// but we request branch2
Export op = buildCommand(
TestParams.of("format", "gpkg", "root", "branch2", "bbox", bboxStr));
File result = run(op);
DataStore store = store(result);
try {
assertFeatures(store, pointsType.getTypeName(), point3);
assertFeatures(store, linesType.getTypeName(), line3);
assertFeatures(store, polysType.getTypeName(), poly3);
} finally {
store.dispose();
}
}
@Test
public void testExportBranchBBoxAndLayerFilter() throws Exception {
Repository repo = context.getRepository();
TestData testData = new TestData(repo);
// HEAD is at branch1
testData.init().loadDefaultData().checkout("branch1");
BoundingBox bounds = point3.getDefaultGeometryProperty().getBounds();
String bboxFilter = String.format("%f,%f,%f,%f,EPSG:4326", bounds.getMinX(),
bounds.getMinY(), bounds.getMaxX(), bounds.getMaxY());
String layerFilter = linesType.getTypeName() + "," + polysType.getTypeName();
// but we request branch2
Export op = buildCommand(TestParams.of("format", "gpkg", "root", "branch2", "bbox",
bboxFilter, "path", layerFilter));
File result = run(op);
DataStore store = store(result);
try {
assertFeatures(store, linesType.getTypeName(), line3);
assertFeatures(store, polysType.getTypeName(), poly3);
Set<String> exportedTypeNames = Sets.newHashSet(store.getTypeNames());
assertFalse(exportedTypeNames.contains(pointsType.getTypeName()));
} finally {
store.dispose();
}
}
@Test
public void testExportBranchBBoxAndLayerFilterInterchangeExtension() throws Exception {
Repository repo = context.getRepository();
TestData testData = new TestData(repo);
// HEAD is at branch1
testData.init().loadDefaultData().checkout("branch1");
BoundingBox bounds = point3.getDefaultGeometryProperty().getBounds();
String bboxFilter = String.format("%f,%f,%f,%f,EPSG:4326", bounds.getMinX(),
bounds.getMinY(), bounds.getMaxX(), bounds.getMaxY());
String layerFilter = linesType.getTypeName() + "," + polysType.getTypeName();
// but we request branch2
Export op = buildCommand(TestParams.of("format", "gpkg", "root", "branch2", "bbox",
bboxFilter, "path", layerFilter, "interchange", "true"));
final File result = run(op);
final DataStore store = store(result);
try {
assertFeatures(store, linesType.getTypeName(), line3);
assertFeatures(store, polysType.getTypeName(), poly3);
Set<String> exportedTypeNames = Sets.newHashSet(store.getTypeNames());
assertFalse(exportedTypeNames.contains(pointsType.getTypeName()));
} finally {
store.dispose();
}
assertEquals(Sets.newHashSet("Lines_audit", "Polygons_audit"), getAuditTableNames(result));
}
private File run(Export op) throws InterruptedException, ExecutionException {
op.run(context);
final String expected;
if (Boolean.parseBoolean(op.options.getFirstValue("interchange"))) {
expected = "{\"task\":{\"id\":1,\"description\":\"Export to Geopackage database with geogig interchange format extension\",\"href\":\"/geogig/tasks/1.json\"}}";
} else {
expected = "{\"task\":{\"id\":1,\"description\":\"Export to Geopackage database\",\"href\":\"/geogig/tasks/1.json\"}}";
}
JsonObject response = getJSONResponse();
assertTrue(TestData.jsonEquals(TestData.toJSON(expected), response, false));
Optional<AsyncCommand<?>> asyncCommand = Optional.absent();
while (!asyncCommand.isPresent()) {
Thread.yield();
asyncCommand = testAsyncContext.getAndPruneIfFinished("1");
}
File result = (File) asyncCommand.get().get();
assertNotNull(result);
return result;
}
private DataStore store(File result) throws InterruptedException, ExecutionException {
assertNotNull(result);
final GeoPkgDataStoreFactory factory = new GeoPkgDataStoreFactory();
final Map<String, Serializable> params = ImmutableMap.of(GeoPkgDataStoreFactory.DBTYPE.key,
"geopkg", GeoPkgDataStoreFactory.DATABASE.key, result.getAbsolutePath());
DataStore dataStore;
try {
dataStore = factory.createDataStore(params);
} catch (IOException ioe) {
throw new RuntimeException("Unable to create GeoPkgDataStore", ioe);
}
if (null == dataStore) {
throw new RuntimeException("Unable to create GeoPkgDataStore");
}
return dataStore;
}
private void assertFeatures(DataStore store, String typeName,
SimpleFeature... expected)
throws Exception {
try (Connection connection = ((JDBCDataStore) store).getConnection(Transaction.AUTO_COMMIT);
GeopkgGeogigMetadata metadata = new GeopkgGeogigMetadata(connection)) {
Map<String, String> mappings = metadata.getFidMappings(typeName);
SimpleFeatureSource source = store.getFeatureSource(typeName);
SimpleFeatureCollection features = source.getFeatures();
Map<String, SimpleFeature> expectedFeatures;
{
List<SimpleFeature> list = Lists.newArrayList(expected);
expectedFeatures = Maps.uniqueIndex(list, (f) -> f.getID());
}
Set<String> actualFeatureIDs = new HashSet<String>();
{
try (SimpleFeatureIterator fiter = features.features()) {
while (fiter.hasNext()) {
SimpleFeature feature = fiter.next();
actualFeatureIDs.add(mappings.get(feature.getID().split("\\.")[1]));
}
}
}
Set<String> expectedFeatureIDs = expectedFeatures.keySet();
assertEquals(expectedFeatureIDs, actualFeatureIDs);
}
}
private Set<String> getAuditTableNames(File gpkg) throws IOException, SQLException {
GeoPackage geoPackage = new GeoPackage(gpkg);
Set<String> auditTables = new HashSet<>();
try {
try (Connection c = geoPackage.getDataSource().getConnection()) {
try (Statement st = c.createStatement()) {
// CREATE TABLE geogig_audited_tables (table_name VARCHAR, mapped_path VARCHAR,
// audit_table VARCHAR, root_tree_id VARCHAR);
try (ResultSet rs = st.executeQuery("select * from geogig_audited_tables")) {
while (rs.next()) {
String table = rs.getString("table_name");
String treePath = rs.getString("mapped_path");
String auditTable = rs.getString("audit_table");
String rootTreeId = rs.getString("commit_id");
auditTables.add(auditTable);
}
}
}
}
} finally {
geoPackage.close();
}
return auditTables;
}
}
|
chenhaoaixuexi/cloudCourse | roncoo-education-course/roncoo-education-course-service/src/main/java/com/roncoo/education/course/service/dao/impl/CourseChapterAuditDaoImpl.java | <gh_stars>1-10
package com.roncoo.education.course.service.dao.impl;
import com.roncoo.education.course.service.dao.CourseChapterAuditDao;
import com.roncoo.education.course.service.dao.impl.mapper.CourseChapterAuditMapper;
import com.roncoo.education.course.service.dao.impl.mapper.entity.CourseChapterAudit;
import com.roncoo.education.course.service.dao.impl.mapper.entity.CourseChapterAuditExample;
import com.roncoo.education.course.service.dao.impl.mapper.entity.CourseChapterAuditExample.Criteria;
import com.roncoo.education.util.base.Page;
import com.roncoo.education.util.base.PageUtil;
import com.roncoo.education.util.tools.IdWorker;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
@Repository
public class CourseChapterAuditDaoImpl implements CourseChapterAuditDao {
@Autowired
private CourseChapterAuditMapper courseChapterAuditMapper;
public int save(CourseChapterAudit record) {
record.setId(IdWorker.getId());
return this.courseChapterAuditMapper.insertSelective(record);
}
public int deleteById(Long id) {
return this.courseChapterAuditMapper.deleteByPrimaryKey(id);
}
public int updateById(CourseChapterAudit record) {
record.setGmtCreate(null);
record.setGmtModified(null);
return this.courseChapterAuditMapper.updateByPrimaryKeySelective(record);
}
public CourseChapterAudit getById(Long id) {
return this.courseChapterAuditMapper.selectByPrimaryKey(id);
}
public Page<CourseChapterAudit> listForPage(int pageCurrent, int pageSize, CourseChapterAuditExample example) {
int count = this.courseChapterAuditMapper.countByExample(example);
pageSize = PageUtil.checkPageSize(pageSize);
pageCurrent = PageUtil.checkPageCurrent(count, pageSize, pageCurrent);
int totalPage = PageUtil.countTotalPage(count, pageSize);
example.setLimitStart(PageUtil.countOffset(pageCurrent, pageSize));
example.setPageSize(pageSize);
return new Page<CourseChapterAudit>(count, totalPage, pageCurrent, pageSize, this.courseChapterAuditMapper.selectByExample(example));
}
@Override
public List<CourseChapterAudit> listByCourseId(Long courseId) {
CourseChapterAuditExample example = new CourseChapterAuditExample();
Criteria c = example.createCriteria();
c.andCourseIdEqualTo(courseId);
example.setOrderByClause("sort asc, id asc");
return this.courseChapterAuditMapper.selectByExample(example);
}
@Override
public int updateSortByChapterId(Integer sort, Long chapterId) {
CourseChapterAuditExample example = new CourseChapterAuditExample();
Criteria c = example.createCriteria();
c.andIdEqualTo(chapterId);
CourseChapterAudit record = new CourseChapterAudit();
record.setId(chapterId);
record.setSort(sort);
record.setGmtCreate(null);
record.setGmtModified(null);
return this.courseChapterAuditMapper.updateByExampleSelective(record, example);
}
@Override
public int updateAuditStatusByChapterNo(Integer auditStatus, Long chapterId) {
CourseChapterAuditExample example = new CourseChapterAuditExample();
Criteria c = example.createCriteria();
c.andIdEqualTo(chapterId);
CourseChapterAudit record = new CourseChapterAudit();
record.setId(chapterId);
record.setAuditStatus(auditStatus);
record.setGmtCreate(null);
record.setGmtModified(null);
return this.courseChapterAuditMapper.updateByExampleSelective(record, example);
}
@Override
public int deleteByCourseId(Long courseId) {
CourseChapterAuditExample example = new CourseChapterAuditExample();
Criteria c = example.createCriteria();
c.andCourseIdEqualTo(courseId);
return this.courseChapterAuditMapper.deleteByExample(example);
}
@Override
public List<CourseChapterAudit> listByCourseIdAndStatusId(Long courseId, Integer statusId) {
CourseChapterAuditExample example = new CourseChapterAuditExample();
Criteria c = example.createCriteria();
c.andCourseIdEqualTo(courseId);
c.andStatusIdEqualTo(statusId);
example.setOrderByClause("sort asc, id asc");
return this.courseChapterAuditMapper.selectByExample(example);
}
} |
RafaelMiquelino/okta-signin-widget | src/EnrollSymantecVipController.js | /*!
* Copyright (c) 2015-2016, Okta, Inc. and/or its affiliates. All rights reserved.
* The Okta software accompanied by this notice is provided pursuant to the Apache License, Version 2.0 (the "License.")
*
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and limitations under the License.
*/
import { _, loc } from 'okta';
import FormController from 'util/FormController';
import FormType from 'util/FormType';
import Util from 'util/Util';
import Footer from 'views/enroll-factors/Footer';
import TextBox from 'views/shared/TextBox';
export default FormController.extend({
className: 'enroll-symantec',
Model: {
props: {
credentialId: ['string', true],
passCode: ['string', true],
nextPassCode: ['string', true],
factorId: 'string',
},
save: function () {
return this.doTransaction(function (transaction) {
const factor = _.findWhere(transaction.factors, {
factorType: 'token',
provider: 'SYMANTEC',
});
return factor.enroll({
passCode: this.get('passCode'),
nextPassCode: this.get('nextPassCode'),
profile: { credentialId: this.get('credentialId') },
});
});
},
},
Form: {
title: _.partial(loc, 'factor.totpHard.symantecVip', 'login'),
subtitle: _.partial(loc, 'enroll.symantecVip.subtitle', 'login'),
noButtonBar: true,
autoSave: true,
className: 'enroll-symantec',
formChildren: function () {
return [
FormType.Input({
label: loc('enroll.symantecVip.credentialId.placeholder', 'login'),
'label-top': true,
explain: Util.createInputExplain(
'enroll.symantecVip.credentialId.tooltip',
'enroll.symantecVip.credentialId.placeholder',
'login'
),
'explain-top': true,
name: 'credentialId',
input: TextBox,
type: 'text',
}),
FormType.Input({
label: loc('enroll.symantecVip.passcode1.placeholder', 'login'),
'label-top': true,
explain: Util.createInputExplain(
'enroll.symantecVip.passcode1.tooltip',
'enroll.symantecVip.passcode1.placeholder',
'login'
),
'explain-top': true,
name: 'passCode',
input: TextBox,
type: 'text',
}),
FormType.Input({
label: loc('enroll.symantecVip.passcode2.placeholder', 'login'),
'label-top': true,
explain: Util.createInputExplain(
'enroll.symantecVip.passcode2.tooltip',
'enroll.symantecVip.passcode2.placeholder',
'login'
),
'explain-top': true,
name: 'nextPassCode',
input: TextBox,
type: 'text',
}),
FormType.Toolbar({
noCancelButton: true,
save: loc('mfa.challenge.verify', 'login'),
}),
];
},
},
Footer: Footer,
});
|
sh4d1t0/ecommerce-react | webpack/configuration/devtool.js | // enviroment
const isDevelopment = process.env.NODE_ENV !== 'production'
module.exports = () =>
!isDevelopment ? 'cheap-source-map' : 'inline-source-map'
|
ANDROFAST/delivery_articulos | delivery/app-release_source_from_JADX/com/google/android/gms/measurement/internal/zzh.java | package com.google.android.gms.measurement.internal;
import com.google.android.gms.common.internal.zzx;
class zzh {
final String mName;
final String zzaRd;
final long zzaSF;
final long zzaSG;
final long zzaSH;
zzh(String str, String str2, long j, long j2, long j3) {
boolean z = true;
zzx.zzcG(str);
zzx.zzcG(str2);
zzx.zzab(j >= 0);
if (j2 < 0) {
z = false;
}
zzx.zzab(z);
this.zzaRd = str;
this.mName = str2;
this.zzaSF = j;
this.zzaSG = j2;
this.zzaSH = j3;
}
zzh zzQ(long j) {
return new zzh(this.zzaRd, this.mName, this.zzaSF + 1, this.zzaSG + 1, j);
}
}
|
vtfn/tolb | src/string/replace.js | import curry3 from '../_internal/curry3';
/**
* Invokes `String.prototype.replace` on a given string.
*
* @function
* @param {(string|RegExp)} search - The first argument to `String.prototype.replace`
* @param {string} replacement - The second argument to `String.prototype.replace`
* @param {string} str - The string on which invoke `replace`
* @return {string} The same as `str`.replace(`search`, `replacement`)
* @example
*
* replace('o', 'u', 'foo') //=> 'fuo'
* replace(/o/g, 'u', 'foo') //=> 'fuu'
*/
export default curry3((search, replacement, str) => {
return str.replace(search, replacement);
});
|
avoinov-k/ibmcloud-storage-volume-lib | lib/utils/reasoncode/reason_code.go | <gh_stars>1-10
/*******************************************************************************
* IBM Confidential
* OCO Source Materials
* IBM Cloud Container Service, 5737-D43
* (C) Copyright IBM Corp. 2017, 2018 All Rights Reserved.
* The source code for this program is not published or otherwise divested of
* its trade secrets, irrespective of what has been deposited with
* the U.S. Copyright Office.
******************************************************************************/
package reasoncode
// ReasonCode ...
type ReasonCode string
const (
// ErrorUnclassified indicates a generic unclassified error
ErrorUnclassified = ReasonCode("ErrorUnclassified")
// ErrorPanic indicates recovery from a panic
ErrorPanic = ReasonCode("ErrorPanic")
// ErrorTemporaryConnectionProblem indicates an *AMBIGUOUS RESPONSE* due to IaaS API timeout or reset
// (Caller can continue to retry indefinitely)
ErrorTemporaryConnectionProblem = ReasonCode("ErrorTemporaryConnectionProblem")
// ErrorRateLimitExceeded indicates IaaS API rate limit has been exceeded
// (Caller can continue to retry indefinitely)
ErrorRateLimitExceeded = ReasonCode("ErrorRateLimitExceeded")
)
// -- General provider API (RPC) errors ---
const (
// ErrorBadRequest indicates a generic bad request to the Provider API
// (Caller can treat this as a fatal failure)
ErrorBadRequest = ReasonCode("ErrorBadRequest")
// ErrorRequiredFieldMissing indicates the required field is missing from the request
// (Caller can treat this as a fatal failure)
ErrorRequiredFieldMissing = ReasonCode("ErrorRequiredFieldMissing")
// ErrorUnsupportedAuthType indicates the requested Auth-Type is not supported
// (Caller can treat this as a fatal failure)
ErrorUnsupportedAuthType = ReasonCode("ErrorUnsupportedAuthType")
// ErrorUnsupportedMethod indicates the requested Provider API method is not supported
// (Caller can treat this as a fatal failure)
ErrorUnsupportedMethod = ReasonCode("ErrorUnsupportedMethod")
)
// -- Authentication and authorization problems --
const (
// ErrorUnknownProvider indicates the named provider is not known
ErrorUnknownProvider = ReasonCode("ErrorUnknownProvider")
// ErrorUnauthorised indicates an IaaS authorisation error
ErrorUnauthorised = ReasonCode("ErrorUnauthorised")
// ErrorFailedTokenExchange indicates an IAM token exchange problem
ErrorFailedTokenExchange = ReasonCode("ErrorFailedTokenExchange")
// ErrorProviderAccountTemporarilyLocked indicates the IaaS account as it has been temporarily locked
ErrorProviderAccountTemporarilyLocked = ReasonCode("ErrorProviderAccountTemporarilyLocked")
// ErrorInsufficientPermissions indicates an operation failed due to a confirmed problem with IaaS user permissions
// (Caller can retry later, but not indefinitely)
ErrorInsufficientPermissions = ReasonCode("ErrorInsufficientPermissions")
)
// Attach / Detach problems
const (
//ErrorVolumeAttachFailed indicates if volume attach to instance is failed
ErrorVolumeAttachFailed = ReasonCode("ErrorVolumeAttachFailed")
//ErrorVolumeDetachFailed indicates if volume detach from instance is failed
ErrorVolumeDetachFailed = ReasonCode("ErrorVolumeDetachFailed")
)
|
mbari-media-management/vars-kb | org.mbari.kb.ui/src/main/java/org/mbari/kb/ui/dialogs/AddConceptNameDialog2.java | /*
* @(#)AddConceptNameDialog2.java 2009.10.28 at 11:03:10 PDT
*
* Copyright 2009 MBARI
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mbari.kb.ui.dialogs;
import java.awt.Frame;
import java.awt.event.KeyEvent;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import org.bushe.swing.event.EventBus;
import mbarix4j.swing.SpinningDialWaitIndicator;
import mbarix4j.swing.WaitIndicator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.mbari.kb.core.DAO;
import org.mbari.kb.core.UserAccount;
import org.mbari.kb.core.knowledgebase.Concept;
import org.mbari.kb.core.knowledgebase.ConceptDAO;
import org.mbari.kb.core.knowledgebase.ConceptName;
import org.mbari.kb.core.knowledgebase.ConceptNameDAO;
import org.mbari.kb.core.knowledgebase.ConceptNameTypes;
import org.mbari.kb.core.knowledgebase.History;
import org.mbari.kb.core.knowledgebase.HistoryFactory;
import org.mbari.kb.core.knowledgebase.KnowledgebaseDAOFactory;
import org.mbari.kb.core.knowledgebase.KnowledgebaseFactory;
import org.mbari.kb.ui.StateLookup;
import org.mbari.kb.ui.ToolBelt;
import org.mbari.kb.ui.actions.ApproveHistoryTask;
import org.mbari.kb.shared.ui.FancyButton;
import javax.swing.GroupLayout.Alignment;
import javax.swing.LayoutStyle.ComponentPlacement;
/**
*
* @author brian
*/
public class AddConceptNameDialog2 extends javax.swing.JDialog {
private final Logger log = LoggerFactory.getLogger(getClass());
private final ApproveHistoryTask approveHistoryTask;
private javax.swing.JTextField authorField;
private javax.swing.ButtonGroup buttonGroup1;
private javax.swing.JButton cancelButton;
private javax.swing.JRadioButton commonRb;
private Concept concept;
private final HistoryFactory historyFactory;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTextArea jTextArea1;
private final KnowledgebaseDAOFactory knowledgebaseDAOFactory;
private final KnowledgebaseFactory knowledgebaseFactory;
private javax.swing.JLabel msgLabel;
private javax.swing.JTextField nameField;
private javax.swing.JButton okButton;
private javax.swing.JRadioButton synonymRb;
private JRadioButton formerRb;
/**
* Creates new form AddConceptNameDialog2
*
* @param parent
* @param modal
* @param toolBelt
*/
public AddConceptNameDialog2(java.awt.Frame parent, boolean modal, ToolBelt toolBelt) {
super(parent, modal);
this.knowledgebaseDAOFactory = toolBelt.getKnowledgebaseDAOFactory();
this.knowledgebaseFactory = toolBelt.getKnowledgebaseFactory();
this.historyFactory = toolBelt.getHistoryFactory();
this.approveHistoryTask = toolBelt.getApproveHistoryTask();
initComponents();
initModel();
Frame frame = StateLookup.getApplicationFrame();
setLocationRelativeTo(frame);
pack();
}
private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {
close();
}
private void cancelButtonKeyReleased(java.awt.event.KeyEvent evt) {
if (evt.getKeyCode() == KeyEvent.VK_ENTER) {
cancelButtonActionPerformed(null);
}
}
private void close() {
setVisible(false);
nameField.setText("");
msgLabel.setText("");
authorField.setText("");
}
public Concept getConcept() {
return concept;
}
private void initComponents() {
buttonGroup1 = new javax.swing.ButtonGroup();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
nameField = new javax.swing.JTextField();
authorField = new javax.swing.JTextField();
commonRb = new javax.swing.JRadioButton();
synonymRb = new javax.swing.JRadioButton();
formerRb = new JRadioButton();
cancelButton = new FancyButton();
okButton = new FancyButton();
jScrollPane1 = new javax.swing.JScrollPane();
jTextArea1 = new javax.swing.JTextArea();
msgLabel = new javax.swing.JLabel();
setTitle("VARS - Add Concept Name");
//setResizable(false);
jLabel2.setText("Name:");
jLabel3.setText("Author:");
jLabel4.setText("Type:");
nameField.setToolTipText("The name to store in the database");
authorField.setToolTipText("An author that described this name in the literature");
buttonGroup1.add(commonRb);
commonRb.setText("Common");
commonRb.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0));
commonRb.setMargin(new java.awt.Insets(0, 10, 0, 0));
buttonGroup1.add(synonymRb);
synonymRb.setText("Synonym");
synonymRb.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0));
synonymRb.setMargin(new java.awt.Insets(0, 10, 0, 0));
buttonGroup1.add(formerRb);
formerRb.setText("Former");
formerRb.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0));
formerRb.setMargin(new java.awt.Insets(0, 10, 0, 0));
cancelButton.setText("Cancel");
cancelButton.setIcon(new ImageIcon(getClass().getResource("/org/mbari/kb/ui/images/delete2.png")));
cancelButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
cancelButtonActionPerformed(evt);
}
});
cancelButton.addKeyListener(new java.awt.event.KeyAdapter() {
@Override
public void keyReleased(java.awt.event.KeyEvent evt) {
cancelButtonKeyReleased(evt);
}
});
okButton.setText("OK");
okButton.setIcon(new ImageIcon(getClass().getResource("/org/mbari/kb/ui/images/check2.png")));
okButton.setEnabled(false);
okButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
okButtonActionPerformed(evt);
}
});
okButton.addKeyListener(new java.awt.event.KeyAdapter() {
@Override
public void keyReleased(java.awt.event.KeyEvent evt) {
okButtonKeyReleased(evt);
}
});
jTextArea1.setColumns(20);
jTextArea1.setWrapStyleWord(true);
jTextArea1.setLineWrap(true);
jTextArea1.setEditable(false);
jTextArea1.setRows(5);
jTextArea1.setText("Add a concept name. Enter a name, author and the type of name. ");
jScrollPane1.setViewportView(jTextArea1);
msgLabel.setForeground(new java.awt.Color(153, 0, 0));
msgLabel.setText(" ");
GroupLayout layout = new GroupLayout(getContentPane());
layout.setHorizontalGroup(
layout.createParallelGroup(Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(Alignment.TRAILING)
.addComponent(msgLabel, GroupLayout.DEFAULT_SIZE, 360, Short.MAX_VALUE)
.addComponent(jScrollPane1, GroupLayout.DEFAULT_SIZE, 360, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(Alignment.LEADING)
.addComponent(jLabel3)
.addComponent(jLabel4)
.addComponent(jLabel2))
.addPreferredGap(ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(commonRb)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(synonymRb)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(formerRb))
.addComponent(authorField, GroupLayout.DEFAULT_SIZE, 307, Short.MAX_VALUE)
.addComponent(nameField, GroupLayout.DEFAULT_SIZE, 307, Short.MAX_VALUE)))
.addGroup(layout.createSequentialGroup()
.addComponent(okButton, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(cancelButton, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jScrollPane1, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(Alignment.BASELINE)
.addComponent(nameField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel2))
.addPreferredGap(ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(authorField, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addPreferredGap(ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(commonRb)
.addComponent(synonymRb)
.addComponent(formerRb))
.addGap(17)
.addComponent(msgLabel)
.addPreferredGap(ComponentPlacement.RELATED, GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(Alignment.BASELINE)
.addComponent(cancelButton, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addComponent(okButton, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
getContentPane().setLayout(layout);
pack();
}
private void initModel() {
nameField.getDocument().addDocumentListener(new DocumentListener() {
public void changedUpdate(DocumentEvent e) {
update();
}
public void insertUpdate(DocumentEvent e) {
update();
}
public void removeUpdate(DocumentEvent e) {
update();
}
void update() {
String text = nameField.getText();
okButton.setEnabled((text != null) && (text.length() > 0) && !text.matches("\\A\\s+"));
}
});
}
private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {
final String name = nameField.getText();
Concept myConcept = getConcept();
final ConceptDAO conceptDAO = knowledgebaseDAOFactory.newConceptDAO();
final ConceptNameDAO conceptNameDAO = knowledgebaseDAOFactory.newConceptNameDAO();
boolean okToProceed = true;
/*
* Verify that the name is not already used in the database.
*/
Concept preexistingConcept = knowledgebaseDAOFactory.newConceptDAO().findByName(name);
try {
preexistingConcept = knowledgebaseDAOFactory.newConceptDAO().findByName(name);
}
catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Failed attempt to look up the concept '" + name + "'", e);
msgLabel.setText("Failed to connect to database");
}
okToProceed = false;
}
if (okToProceed && (preexistingConcept != null)) {
String preexistingName = preexistingConcept.getPrimaryConceptName().getName();
msgLabel.setText("The name, " + preexistingName + ", already exits in the knowledgebase");
okToProceed = false;
}
if (okToProceed) {
WaitIndicator waitIndicator = new SpinningDialWaitIndicator((JFrame) getParent());
/*
* Creat the new conceptName
*/
ConceptName conceptName = knowledgebaseFactory.newConceptName();
conceptName.setName(nameField.getText());
conceptName.setAuthor(authorField.getText());
String nameType = ConceptNameTypes.COMMON.toString();
if (synonymRb.isSelected()) {
nameType = ConceptNameTypes.SYNONYM.toString();
}
else if (formerRb.isSelected()) {
nameType = ConceptNameTypes.FORMER.toString();
}
conceptName.setNameType(nameType);
try {
DAO dao = knowledgebaseDAOFactory.newDAO();
dao.startTransaction();
myConcept = dao.merge(myConcept);
myConcept.addConceptName(conceptName);
dao.persist(conceptName);
/*
* Add a History object to track the change.
*/
final UserAccount userAccount = StateLookup.getUserAccount();
History history = historyFactory.add(userAccount, conceptName);
myConcept.getConceptMetadata().addHistory(history);
dao.persist(history);
dao.endTransaction();
EventBus.publish(StateLookup.TOPIC_APPROVE_HISTORY, history);
}
catch (Exception e) {
EventBus.publish(StateLookup.TOPIC_NONFATAL_ERROR, e);
}
close();
waitIndicator.dispose();
}
}
private void okButtonKeyReleased(java.awt.event.KeyEvent evt) {
if (evt.getKeyCode() == KeyEvent.VK_ENTER) {
okButtonActionPerformed(null);
}
}
public void setConcept(Concept concept) {
this.concept = concept;
}
@Override
public void setVisible(boolean b) {
if (b) {
nameField.requestFocus();
commonRb.setSelected(true);
}
super.setVisible(b);
}
}
|
uio-bmi/graph_peak_caller | tests/legacy/test_snarlmap.py | import pytest
if pytest.__version__ < "3.0.0":
pytest.skip()
import numpy as np
import unittest
import offsetbasedgraph as obg
# from test_snarls import snarl_graph2
# from graph_peak_caller.control.linearsnarls import \
# UnmappedIndices, LinearPileup
# from graph_peak_caller.control.snarlmaps import LinearSnarlMap
graph = obg.GraphWithReversals(
{3: obg.Block(20), 5: obg.Block(10),
12: obg.Block(20), 13: obg.Block(21),
}, {})
@pytest.mark.skip()
class TestSnarlMap(unittest.TestCase):
def setUp(self):
self.snarl_map = LinearSnarlMap.from_snarl_graph(snarl_graph2, graph)
self.graph_positions = [obg.Position(5, 4),
obg.Position(3, 4),
obg.Position(12, 4),
obg.Position(13, 4)]
self.linear_positions = [4, 31/20*4, 10+21/20*4, 14]
self.linear_positions = [p for p in self.linear_positions]
self.graph_interval = obg.DirectedInterval(self.graph_positions[0],
self.graph_positions[2])
# TODO: Is test wrong?
def _test_create_control(self):
intervals = [obg.DirectedInterval(0, 20, [3]),
obg.DirectedInterval(0, 10, [5]),
obg.DirectedInterval(0, 21, [13])]
mapped_intervals = self.snarl_map.map_interval_collection(
intervals)
linear_pileup = LinearPileup.create_from_starts_and_ends(
mapped_intervals.starts,
mapped_intervals.ends)
graph_pileup = linear_pileup.to_sparse_pileup(self.snarl_map)
true_sparse_pileup = OldSparsePileup(graph)
true_data = {3: ValuedIndexes([], [], 2, 20),
12: ValuedIndexes([], [], 2, 20),
13: ValuedIndexes([], [], 2, 21),
5: ValuedIndexes([], [], 2, 10)}
true_sparse_pileup.data = OldSparsePileupData([(key, val) for key, val in true_data.items()], graph=graph)
#print(true_sparse_pileup.data)
print("Graph pileup")
print(graph_pileup)
print("True")
print(true_sparse_pileup)
self.assertTrue(graph_pileup.equals_old_sparse_pileup(true_sparse_pileup))
def test_graph_position_to_linear(self):
for graph_pos, lin_pos in zip(self.graph_positions,
self.linear_positions):
mapped_pos = self.snarl_map.graph_position_to_linear(graph_pos)
self.assertEqual(mapped_pos, lin_pos)
def test_map_graph_interval(self):
mapped_interval = self.snarl_map.map_graph_interval(
self.graph_interval)
self.assertEqual(mapped_interval,
(self.linear_positions[0],
self.linear_positions[2]), [5, 12])
def test_to_graph_pileup(self):
"""[0, 5, 10, 15, 20, 25, 30]"""
all_indices = [0, 5, 10, 15, 20, 25, 30]
values = {idx: i for i, idx in enumerate(all_indices)}
unmapped_indices = {3: all_indices[:],
5: [0, 5],
12: all_indices[2:],
13: all_indices[2:]}
unmapped_indices = {node_id: UnmappedIndices(
idxs, [values[idx] for idx in idxs])
for node_id, idxs in unmapped_indices.items()}
vis = {3: (np.array(all_indices)*20/31, np.array(list(range(7)))),
5: (np.array([0, 5]), np.array([0, 1])),
12: ((np.array(all_indices[2:])-10)*20/21,
np.array(list(range(2, 7)))),
13: ((np.array(all_indices[2:])-10,
np.array(list(range(2, 7)))))}
vis = {node_id: ValuedIndexes(
val[0][1:].astype("int"), val[1][1:],
val[1][0], graph.node_size(node_id))
for node_id, val in vis.items()}
correct_pileup = OldSparsePileup(graph)
print("Old sparse pileup")
print(correct_pileup)
correct_pileup.data = OldSparsePileupData([(key, val) for key, val in vis.items()], graph=graph)
print("Correct pileup data")
print(correct_pileup.data)
print("Correct pileup")
print(correct_pileup)
correct_pileup = DensePileup.create_from_old_sparsepileup(correct_pileup)
pileup = self.snarl_map.to_dense_pileup(unmapped_indices)
print("Pileup from test")
print(pileup)
print("Correct pileup")
print(correct_pileup)
self.assertEqual(pileup, correct_pileup)
# Needs to be rewritten. To valued indexes does not exist anymore
"""
class TestLinearPileupMap(TestSnarlMap):
def _test_to_valued_indexes(self):
#[0, 5, 10, 15, 20, 25, 30]
all_indices = [0, 5, 10, 15, 20, 25, 30]
linear_pileup = LinearPileup(np.array(all_indices),
np.array(list(range(7))))
vis = {3: (np.array(all_indices)*20/31, np.array(list(range(7)))),
5: (np.array([0, 5]), np.array([0, 1])),
12: ((np.array(all_indices[2:])-10)*20/21,
np.array(list(range(2, 7)))),
13: ((np.array(all_indices[2:])-10,
np.array(list(range(2, 7)))))}
vis = {node_id: ValuedIndexes(
val[0][1:].astype("int"), val[1][1:], val[1][0],
graph.node_size(node_id))
for node_id, val in vis.items()}
mapped_vis = linear_pileup.to_valued_indexes(self.snarl_map)
self.assertEqual(mapped_vis, vis)
"""
if __name__ == "__main__":
unittest.main()
|
T0shik/Tricking-Royal | Client/vue-client/src/lang/ru/watch.js | <reponame>T0shik/Tricking-Royal<gh_stars>10-100
export default {
return: "Вернуться к Батлам",
signInToVote: "Зарегистрируетесь, что бы проголосовать",
matchInTribunal: "Матч сейчас в трибунале"
} |
leiradel/barebones-rpi | barebones07/syscalls.c | #include <stdint.h>
#include <unistd.h>
#include <sys/stat.h>
#include "aux.h"
// Extend the heap space at the end of the .bss section.
void* _sbrk(intptr_t increment) {
extern char heap_start;
static const char* start = &heap_start;
const char* prev = start;
// Keep memory returned by _sbrk 16-byte aligned.
increment = (increment + 15) & ~15;
start += increment;
return (void*)prev;
}
// Write stdout and stderr to the UART.
ssize_t _write(int fd, const void* buf, size_t count) {
if (fd == 1 || fd == 2) {
const char* str = buf;
for (size_t i = 0; i < count; i++) {
if (*str == '\n') {
uart_write('\r', 0);
}
uart_write(*str++, 0);
}
return count;
}
return 0;
}
// Read stdin from the UART.
ssize_t _read(int fd, void* buf, size_t count) {
if (fd == 0) {
char* str = buf;
for (size_t i = 0; i < count; i++) {
*str++ = uart_read(0);
}
return count;
}
return 0;
}
int _close(int fd) {
(void)fd;
return 0;
}
int _fstat(int fd, struct stat* buf) {
(void)fd;
(void)buf;
return 0;
}
int _isatty(int fd) {
(void)fd;
return 1;
}
off_t _lseek(int fd, off_t offset, int whence) {
(void)fd;
(void)offset;
(void)whence;
return 0;
}
|
Bhaskers-Blu-Org2/azure-shortcuts-for-java | src/com/microsoft/azure/shortcuts/resources/samples/ResourcesSample.java | /**
* Copyright (c) Microsoft Corporation
*
* All rights reserved.
*
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
* (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge,
* publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH
* THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.microsoft.azure.shortcuts.resources.samples;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import com.microsoft.azure.shortcuts.resources.Resource;
import com.microsoft.azure.shortcuts.resources.implementation.Subscription;
// Tests resources
public class ResourcesSample {
public static void main(String[] args) {
try {
Subscription subscription = Subscription.authenticate("my.azureauth", null);
test(subscription);
} catch (Exception e) {
e.printStackTrace();
}
}
public static void test(Subscription subscription) throws Exception {
// Listing all resource names
Map<String, Resource> resources = subscription.resources().asMap();
System.out.println(String.format("Resource ids: %s\n\t", StringUtils.join(resources.keySet(), ",\n\t")));
// Listing resources in a specific group
String groupName = "azchat";
Map<String, Resource> resources2 = subscription.resources().asMap(groupName);
System.out.println("Resources inside group '" + groupName + "':");
for(Resource resource : resources2.values()) {
printResource(resource);
}
// Getting information about a specific resource based on ID
Resource resource = subscription.resources("/subscriptions/9657ab5d-4a4a-4fd2-ae7a-4cd9fbd030ef/resourceGroups/javasampleresourcegroup/providers/Microsoft.Storage/storageAccounts/javastojzgsg");
printResource(resource);
// Getting information about a specific resource based on name, type, provider and group
resource = subscription.resources().get(
resource.name(),
resource.type(),
resource.provider(),
resource.resourceGroup());
printResource(resource);
// Delete a resource
System.out.println(String.format("Deleting resource '%s' of type '%s' by provider '%s' in group '%s'",
resource.name(),
resource.type(),
resource.provider(),
resource.resourceGroup()));
resource.delete();
// Delete a resource based on its ID
String resourceToDelete = "ThisMustFail";
System.out.println("Deleting resource " + resourceToDelete);
subscription.resources().delete(resourceToDelete);
}
private static void printResource(Resource resource) throws Exception {
System.out.println(String.format("Found resource ID: %s\n"
+ "\tGroup: %s\n"
+ "\tProvider: %s\n"
+ "\tRegion: %s\n"
+ "\tShort name: %s\n"
+ "\tTags: %s\n"
+ "\tType: %s\n"
+ "\tProvisioning state %s\n",
resource.id(),
resource.resourceGroup(),
resource.provider(),
resource.region(),
resource.name(),
resource.tags(),
resource.type(),
resource.provisioningState()
));
}
}
|
54wall/LiYuJapanese | mvp/MvpLiYu/app/src/main/java/pri/weiqiang/liyujapanese/mvp/model/newsapi/NewsAPIFragmentModelImpl.java | <reponame>54wall/LiYuJapanese
package pri.weiqiang.liyujapanese.mvp.model.newsapi;
import android.util.Log;
import java.util.List;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Consumer;
import io.reactivex.schedulers.Schedulers;
import pri.weiqiang.liyujapanese.mvp.bean.newsapi.NewsResponse;
import pri.weiqiang.liyujapanese.network.newsapi.NewsApiNetworks;
public class NewsAPIFragmentModelImpl implements NewsAPIFragmentModel {
private String TAG = NewsAPIFragmentModelImpl.class.getSimpleName();
@Override
public void getHeadlinesByCountry(Consumer<NewsResponse> consumer, Consumer<Throwable> throwble, String country, String from, String to, String category, String pageSize, String apiKey) {
Log.e(TAG, "getLatestNews");
Disposable disposable = NewsApiNetworks.getInstance().getCommonApi().getHeadlinesByCountry(country, from, to, category, pageSize, apiKey)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(consumer, throwble);
mCompositeDisposable.add(disposable);
}
@Override
public void unsubscribe() {
Log.e(TAG, "unsubscrible");
mCompositeDisposable.clear();
}
@Override
public List<NewsAPIFragmentModel> getData() {
return null;
}
}
|
pitbox46/graaljs-forge | src/main/java/com/oracle/truffle/js/parser/internal/ir/debug/PrintVisitor.java | /*
* Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.truffle.js.parser.internal.ir.debug;
import static com.oracle.js.parser.TokenType.BIT_NOT;
import static com.oracle.js.parser.TokenType.DECPOSTFIX;
import static com.oracle.js.parser.TokenType.INCPOSTFIX;
import java.util.List;
import com.oracle.js.parser.TokenType;
import com.oracle.js.parser.ir.BinaryNode;
import com.oracle.js.parser.ir.Block;
import com.oracle.js.parser.ir.BlockStatement;
import com.oracle.js.parser.ir.BreakNode;
import com.oracle.js.parser.ir.CaseNode;
import com.oracle.js.parser.ir.CatchNode;
import com.oracle.js.parser.ir.ContinueNode;
import com.oracle.js.parser.ir.ExpressionStatement;
import com.oracle.js.parser.ir.ForNode;
import com.oracle.js.parser.ir.FunctionNode;
import com.oracle.js.parser.ir.IdentNode;
import com.oracle.js.parser.ir.IfNode;
import com.oracle.js.parser.ir.JoinPredecessorExpression;
import com.oracle.js.parser.ir.LabelNode;
import com.oracle.js.parser.ir.LexicalContext;
import com.oracle.js.parser.ir.Node;
import com.oracle.js.parser.ir.Statement;
import com.oracle.js.parser.ir.SwitchNode;
import com.oracle.js.parser.ir.ThrowNode;
import com.oracle.js.parser.ir.TryNode;
import com.oracle.js.parser.ir.UnaryNode;
import com.oracle.js.parser.ir.VarNode;
import com.oracle.js.parser.ir.WhileNode;
import com.oracle.js.parser.ir.WithNode;
import com.oracle.js.parser.ir.visitor.NodeVisitor;
/**
* Print out the AST as human readable source code. This works both on lowered and unlowered ASTs
*
* see the flags --print-parse and --print-lower-parse
*/
public final class PrintVisitor extends NodeVisitor<LexicalContext> {
/** Tab width. */
private static final int TABWIDTH = 4;
/** Composing buffer. */
private final StringBuilder sb;
/** Indentation factor. */
private int indent;
/** Line separator. */
private static final String EOLN = "\n";
/** Print line numbers. */
private final boolean printLineNumbers;
/** Print inferred and optimistic types. */
private final boolean printTypes;
private int lastLineNumber = -1;
/**
* Constructor.
*/
public PrintVisitor() {
this(true, true);
}
/**
* Constructor.
*
* @param printLineNumbers should line number nodes be included in the output?
* @param printTypes should we print optimistic and inferred types?
*/
public PrintVisitor(final boolean printLineNumbers, final boolean printTypes) {
super(new LexicalContext());
this.sb = new StringBuilder();
this.printLineNumbers = printLineNumbers;
this.printTypes = printTypes;
}
/**
* Constructor.
*
* @param root a node from which to start printing code
*/
public PrintVisitor(final Node root) {
this(root, true, true);
}
/**
* Constructor.
*
* @param root a node from which to start printing code
* @param printLineNumbers should line numbers nodes be included in the output?
* @param printTypes should we print optimistic and inferred types?
*/
public PrintVisitor(final Node root, final boolean printLineNumbers, final boolean printTypes) {
this(printLineNumbers, printTypes);
visit(root);
}
private void visit(final Node root) {
root.accept(this);
}
@Override
public String toString() {
return sb.append(EOLN).toString();
}
/**
* Insert spaces before a statement.
*/
private void indent() {
for (int i = indent; i > 0; i--) {
sb.append(' ');
}
}
/*
* Visits.
*/
@Override
public boolean enterDefault(final Node node) {
node.toString(sb, printTypes);
return false;
}
@Override
public boolean enterContinueNode(final ContinueNode node) {
node.toString(sb, printTypes);
return false;
}
@Override
public boolean enterBreakNode(final BreakNode node) {
node.toString(sb, printTypes);
return false;
}
@Override
public boolean enterThrowNode(final ThrowNode node) {
node.toString(sb, printTypes);
return false;
}
@Override
public boolean enterBlock(final Block block) {
sb.append(' ');
sb.append('{');
indent += TABWIDTH;
final List<Statement> statements = block.getStatements();
printStatements(statements);
indent -= TABWIDTH;
sb.append(EOLN);
indent();
sb.append('}');
return false;
}
private void printStatements(final List<Statement> statements) {
for (final Statement statement : statements) {
if (printLineNumbers) {
final int lineNumber = statement.getLineNumber();
sb.append(EOLN);
if (lineNumber != lastLineNumber) {
indent();
sb.append("[|").append(lineNumber).append("|];").append(EOLN);
}
lastLineNumber = lineNumber;
}
indent();
statement.accept(this);
int lastIndex = sb.length() - 1;
char lastChar = sb.charAt(lastIndex);
while (Character.isWhitespace(lastChar) && lastIndex >= 0) {
lastChar = sb.charAt(--lastIndex);
}
if (lastChar != '}' && lastChar != ';') {
sb.append(';');
}
if (statement.hasGoto()) {
sb.append(" [GOTO]");
}
if (statement.isTerminal()) {
sb.append(" [TERMINAL]");
}
}
}
@Override
public boolean enterBlockStatement(final BlockStatement statement) {
statement.getBlock().accept(this);
return false;
}
@Override
public boolean enterBinaryNode(final BinaryNode binaryNode) {
binaryNode.getLhs().accept(this);
sb.append(' ');
sb.append(binaryNode.tokenType());
sb.append(' ');
binaryNode.getRhs().accept(this);
return false;
}
@Override
public boolean enterJoinPredecessorExpression(final JoinPredecessorExpression expr) {
expr.getExpression().accept(this);
return false;
}
@Override
public boolean enterIdentNode(final IdentNode identNode) {
identNode.toString(sb, printTypes);
return true;
}
@Override
public boolean enterUnaryNode(final UnaryNode unaryNode) {
final TokenType tokenType = unaryNode.tokenType();
final String name = tokenType.getName();
final boolean isPostfix = tokenType == DECPOSTFIX || tokenType == INCPOSTFIX;
boolean rhsParen = tokenType.needsParens(unaryNode.getExpression().tokenType(), false);
if (!isPostfix) {
if (name == null) {
sb.append(tokenType.name());
rhsParen = true;
} else {
sb.append(name);
if (tokenType.ordinal() > BIT_NOT.ordinal()) {
sb.append(' ');
}
}
}
if (rhsParen) {
sb.append('(');
}
unaryNode.getExpression().toString(sb, printTypes);
if (rhsParen) {
sb.append(')');
}
if (isPostfix) {
sb.append(tokenType == DECPOSTFIX ? "--" : "++");
}
return false;
}
@Override
public boolean enterExpressionStatement(final ExpressionStatement expressionStatement) {
expressionStatement.getExpression().accept(this);
return false;
}
@Override
public boolean enterForNode(final ForNode forNode) {
forNode.toString(sb, printTypes);
forNode.getBody().accept(this);
return false;
}
@Override
public boolean enterFunctionNode(final FunctionNode functionNode) {
functionNode.toString(sb, printTypes);
enterBlock(functionNode.getBody());
return false;
}
@Override
public boolean enterIfNode(final IfNode ifNode) {
ifNode.toString(sb, printTypes);
ifNode.getPass().accept(this);
final Block fail = ifNode.getFail();
if (fail != null) {
sb.append(" else ");
fail.accept(this);
}
return false;
}
@Override
public boolean enterLabelNode(final LabelNode labeledNode) {
indent -= TABWIDTH;
indent();
indent += TABWIDTH;
labeledNode.toString(sb, printTypes);
labeledNode.getBody().accept(this);
return false;
}
@Override
public boolean enterSwitchNode(final SwitchNode switchNode) {
switchNode.toString(sb, printTypes);
sb.append(" {");
final List<CaseNode> cases = switchNode.getCases();
for (final CaseNode caseNode : cases) {
sb.append(EOLN);
indent();
caseNode.toString(sb, printTypes);
indent += TABWIDTH;
printStatements(caseNode.getStatements());
indent -= TABWIDTH;
}
sb.append(EOLN);
indent();
sb.append("}");
return false;
}
@Override
public boolean enterTryNode(final TryNode tryNode) {
tryNode.toString(sb, printTypes);
tryNode.getBody().accept(this);
for (final CatchNode catchNode : tryNode.getCatches()) {
catchNode.toString(sb, printTypes);
catchNode.getBody().accept(this);
}
final Block finallyBody = tryNode.getFinallyBody();
if (finallyBody != null) {
sb.append(" finally ");
finallyBody.accept(this);
}
return false;
}
@Override
public boolean enterVarNode(final VarNode varNode) {
sb.append(varNode.tokenType().getName()).append(' ');
varNode.getName().toString(sb, printTypes);
final Node init = varNode.getInit();
if (init != null) {
sb.append(" = ");
init.accept(this);
}
return false;
}
@Override
public boolean enterWhileNode(final WhileNode whileNode) {
if (whileNode.isDoWhile()) {
sb.append("do");
whileNode.getBody().accept(this);
sb.append(' ');
whileNode.toString(sb, printTypes);
} else {
whileNode.toString(sb, printTypes);
whileNode.getBody().accept(this);
}
return false;
}
@Override
public boolean enterWithNode(final WithNode withNode) {
withNode.toString(sb, printTypes);
withNode.getBody().accept(this);
return false;
}
}
|
Markus-Schwer/quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/globals/TemplateGlobalTest.java | package io.quarkus.qute.deployment.globals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import javax.inject.Inject;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import io.quarkus.qute.Qute;
import io.quarkus.qute.Template;
import io.quarkus.qute.TemplateGlobal;
import io.quarkus.test.QuarkusUnitTest;
public class TemplateGlobalTest {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addClasses(Globals.class, NextGlobals.class)
.addAsResource(new StringAsset(
"Hello {currentUser}! Your name is {_name}. You're {age} years old."),
"templates/hello.txt"));
@Inject
Template hello;
@Test
public void testTemplateData() {
assertEquals("Hello Fu! Your name is Lu. You're 40 years old.", hello.render());
assertEquals("Hello Fu! Your name is Lu. You're 40 years old.",
Qute.fmt("Hello {currentUser}! Your name is {_name}. You're {age} years old.").render());
Globals.user = "Hu";
assertEquals("Hello Hu! Your name is Lu. You're 20 years old.", hello.render());
assertEquals("Hello Hu! Your name is Lu. You're 20 years old.",
Qute.fmt("Hello {currentUser}! Your name is {_name}. You're {age} years old.").render());
assertEquals("First color is: RED", Qute.fmt("First color is: {colors[0]}").render());
}
public static class Globals {
@TemplateGlobal(name = "currentUser")
static String user = "Fu";
@TemplateGlobal
static int age() {
return user.equals("Fu") ? 40 : 20;
}
}
static enum Color {
RED,
GREEN,
BLUE
}
@TemplateGlobal
public static class NextGlobals {
// field-level annotation overrides the class-level one
@TemplateGlobal(name = "_name")
static final String NAME = user();
// this method is ignored
private static String user() {
return "Lu";
}
static Color[] colors() {
return new Color[] { Color.RED, Color.BLUE };
}
}
}
|
yuriy-litvinenko/job4j | 02_junior/junior_chapter_01/src/main/java/ru/job4j/generic/AbstractStore.java | <filename>02_junior/junior_chapter_01/src/main/java/ru/job4j/generic/AbstractStore.java
package ru.job4j.generic;
public abstract class AbstractStore<T extends Base> implements Store<T> {
private SimpleArray<T> values;
AbstractStore(int size) {
values = new SimpleArray<>(size);
}
@Override
public void add(T model) {
values.add(model);
}
@Override
public boolean replace(String id, T model) {
for (int index = 0; index != values.length(); index++) {
if (values.get(index).getId().equals(id)) {
values.set(index, model);
return true;
}
}
return false;
}
@Override
public boolean delete(String id) {
for (int index = 0; index != values.length(); index++) {
if (values.get(index).getId().equals(id)) {
values.remove(index);
return true;
}
}
return false;
}
@Override
public T findById(String id) {
for (int index = 0; index != values.length(); index++) {
if (values.get(index).getId().equals(id)) {
return values.get(index);
}
}
return null;
}
}
|
whispeer/whispeerNode | includes/mailer.js | <filename>includes/mailer.js
"use strict";
var nodemailer = require("nodemailer");
var User = require("./user");
var configManager = require("./configManager");
var config = configManager.get();
var client = require("./redisClient");
var socketDataCreator = require("./socketData");
var step = require("step");
var h = require("whispeerHelper");
var code = require("./session").code;
var fs = require("fs");
var mail = nodemailer.createTransport(config.mailType, config.mail);
var defaultFrom = config.mailFrom || "whispeer <<EMAIL>>";
var Bluebird = require("bluebird");
var readFile = Bluebird.promisify(fs.readFile, {
context: fs
});
var settingsAPI = require("./settings");
var errorService = require("./errorService");
//mail
//- <userid>
//-- mails set
//-- currentMail
//-- <mail>Verified 1
//-- <mail>Challenge <challenge>
var TEMPLATEDIR = "./mailTemplates/_build/";
var languages = ["en", "de"];
function generateChallenge(cb) {
var challenge;
step(function () {
code(20, this);
}, h.sF(function (code) {
challenge = code;
client.sadd("mail:codes", challenge, this);
}), h.sF(function (added) {
if (added !== 1) {
generateChallenge(cb);
} else {
this.ne(challenge);
}
}), cb);
}
var mailer = {
isMailActivatedForUser: function (user, mail, cb, overwrite, overwriteVerified) {
step(function () {
this.parallel.unflatten();
client.sismember("mail:" + user.getID(), mail, this.parallel());
settingsAPI.getUserSettings(user.getID(), this.parallel());
}, h.sF(function (verified, settings) {
this.ne((verified || overwriteVerified) && (settings.server.mailsEnabled || overwrite));
}), cb);
},
generateTrackingCode: function (variables, cb) {
step(function () {
code(20, this);
}, h.sF(function (resultCode) {
this.ne(resultCode);
}), cb);
},
verifyUserMail: function (challenge, mailsEnabled, cb) {
var challengeData;
step(function () {
client.hgetall("mail:challenges:" + challenge, this);
}, h.sF(function (data) {
if (data) {
challengeData = data;
User.getUser(challengeData.user, this);
} else {
this.last.ne(false);
}
}), h.sF(function (user) {
if (user && user.getID() === h.parseDecimal(challengeData.user)) {
user.getEMail(socketDataCreator.logedinStub, this);
} else {
this.last.ne(false);
}
}), h.sF(function (userMail) {
if (userMail === challengeData.mail) {
settingsAPI.updateServer(challengeData.user, "mailsEnabled", mailsEnabled, this.parallel());
client.multi()
.sadd("mail:" + challengeData.user, challengeData.mail)
.srem("mail:codes", challenge)
.del("mail:challenges:" + challenge)
.exec(this.parallel());
} else {
this.last.ne(false);
}
}), h.sF(function () {
this.ne(true);
}), cb);
},
sendAcceptMail: function (user, cb) {
var challenge;
step(function () {
generateChallenge(this);
}, h.sF(function (code) {
challenge = code;
user.getEMail(socketDataCreator.logedinStub, this);
}), h.sF(function (userMail) {
if (userMail) {
var m = client.multi();
m
.hmset("mail:challenges:" + challenge, {
user: user.getID(),
mail: userMail
})
.expire("mail:challenges:" + challenge, 7*24*60*60);
mailer.sendUserMail(user, "verification", {
challenge: challenge
}, this.parallel(), true, true);
m.exec(this.parallel());
} else {
this.last.ne();
}
}), cb);
},
sendInteractionMails: function (users, type, subType, interactionObj, options) {
var sendUserMail = Bluebird.promisify(mailer.sendUserMail, {
context: mailer
});
console.log("sending interaction mail to users: " + users.map(function (user) {
return user.getID();
}));
return Bluebird.resolve(users).filter(function (user) {
if (options && options.sendMailWhileOnline) {
return true;
}
var isOnline = Bluebird.promisify(user.isOnline, {
context: user
});
return Bluebird.all([
client.sismemberAsync("mail:notifiedUsers", user.getID()),
isOnline()
]).spread(function (alreadyNotified, isOnline) {
console.log("User " + user.getID() + " mail status: " + alreadyNotified + " - " + isOnline);
return !isOnline && !alreadyNotified;
});
}).each(function (user) {
return client.saddAsync("mail:notifiedUsers", user.getID()).then(function () {
return sendUserMail(user, ["interaction", type, subType], interactionObj);
});
});
},
tryNextTemplate: function (templateName, language) {
return function (e) {
if (templateName.length === 1) {
return e;
}
if (e) {
console.log("unable to find matching template:" + templateName.join("-"));
templateName.pop();
}
return readFile(TEMPLATEDIR + language + "/" + templateName.join("-") + ".html").catch(mailer.tryNextTemplate(templateName, language));
};
},
getCorrectTemplate: function (templateName, language, cb) {
var resultPromise;
if (typeof templateName === "string") {
resultPromise = readFile(TEMPLATEDIR + language + "/" + templateName + ".html");
} else {
resultPromise = mailer.tryNextTemplate(templateName, language)();
}
return step.unpromisify(resultPromise, cb);
},
fillTemplate: function (templateName, variables, cb) {
step(function () {
var language = variables.language;
if (languages.indexOf(language) === -1) {
language = languages[0];
}
this.parallel.unflatten();
mailer.getCorrectTemplate(templateName, language, this.parallel());
mailer.generateTrackingCode(variables, this.parallel());
}, h.sF(function (content, trackingCode) {
content = content.toString();
variables.host = variables.host || config.remoteHost || config.host;
variables.server = config.serverUrl;
variables.tracking = trackingCode;
var inExpression = false;
var sawFirstBracket = false;
var result = "";
var expression = "";
var vm = require("vm");
for (var i = 0; i < content.length; i++) {
if (inExpression) {
if (content[i] === "]" && content[i+1] === "]") {
result += vm.runInNewContext(expression, variables);
inExpression = false;
expression = "";
i += 1;
} else {
expression += content[i];
}
} else if (content[i] === "[" && content[i+1] === "[") {
inExpression = true;
i += 1;
} else {
result += content[i];
sawFirstBracket = false;
}
}
var cheerio = require("cheerio"),
element = cheerio.load(result);
var subject = element("title").text();
this.ne(result, subject);
}), cb);
},
sendUserMail: function (user, templateName, variables, cb, overwriteActive, overwriteVerified) {
console.log("Sending mail to user: " + user.getID());
var receiver;
step(function () {
this.parallel.unflatten();
user.getEMail(socketDataCreator.logedinStub, this.parallel());
settingsAPI.getUserSettings(user.getID(), this.parallel());
user.getNames(socketDataCreator.logedinStub, this.parallel());
}, h.sF(function (_receiver, settings, names) {
variables.name = names.firstName || names.lastName || names.nickname;
if (settings && settings.meta) {
variables.language = settings.meta.uiLanguage || settings.meta.initialLanguage;
}
receiver = _receiver;
mailer.isMailActivatedForUser(user, receiver, this, overwriteActive, overwriteVerified);
}), h.sF(function (activated) {
if (activated) {
mailer.sendMail(receiver, templateName, variables, this);
} else {
console.log("Mail not activated for user: " + user.getID());
this.last.ne(false);
}
}), cb);
},
sendMail: function (receiverAddress, templateName, variables, cb) {
step(function () {
mailer.fillTemplate(templateName, variables, this);
}, h.sF(function (content, subject) {
mail.sendMail({
to: receiverAddress,
from: defaultFrom,
subject: subject,
html: content,
generateTextFromHTML: true
}, errorService.criticalError);
this.ne(true)
}), cb);
},
mailSupport: (subject, text) => {
var mailOptions = {
from: defaultFrom,
to: "<EMAIL>",
subject: subject.toString(),
text: text.toString()
}
return Bluebird.fromCallback((cb) => mail.sendMail(mailOptions, cb))
},
mailAdmin: function (subject, text, cb) {
var mailOptions = {
from: defaultFrom,
to: "<EMAIL>",
subject: subject.toString(),
text: text.toString()
};
var sendMailAsync = Bluebird.promisify(mail.sendMail, {
context: mail
});
return sendMailAsync(mailOptions).nodeify(cb);
}
};
mailer.mailAdmin("Server Booted", "Test Mail to Display Server Bootup");
module.exports = mailer;
|
jercas/offer66-leetcode-newcode | toTheMoon/leetcode_016_ThreeSumClosest.py | <reponame>jercas/offer66-leetcode-newcode
# -*- coding: utf-8 -*-
"""
Created on Thu May 16 17:15:35 2019
@author: jercas
"""
"""
leetcode-16: 最接近三数之和 MEDIUM
'数组' '双指针'
给定一个包括 n 个整数的数组 nums 和 一个目标值 target。找出 nums 中的三个整数,使得它们的和与 target 最接近。返回这三个数的和。
假定每组输入只存在唯一答案。
"""
"""
Thinking:
0: 类同No.15 ThreeSum, 对输入数组的遍历方式一样,只需变换逻辑判断由 ==0 -》 closest to target即可
且要求输出的最相近的值,而非三个数的数组,也不需要考虑No.15中重复数组情况的问题。
"""
class Solution(object):
def threeSumClosest(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
时间复杂度:O(n),一次遍历得出结果,76ms beaten 96.65%
空间复杂度:O(1),未使用额外空间, 11.6MB beaten 40.86%
"""
# 排序简化逻辑
nums.sort()
n = len(nums)
res = 0
min_diff = float('inf')
for i in range(n - 2):
# 定位时,因为已经排序完毕,若出现两个相同数的情况,其结果也必定一致,直接跳出该次循环
if i > 0 and nums[i] == nums[i - 1]:
continue
l, r = i + 1, n - 1
while l < r:
cur = nums[l] + nums[r] + nums[i]
diff = cur - target
if abs(diff) < min_diff:
min_diff = min(abs(diff), min_diff)
res = cur
# 找到最近距离(0),直接返回结果
if diff == 0:
return res
# 排序后数组,若距离较小,移动l提升diff
elif diff < 0:
l += 1
# 同时,距离过大,移动r减小diff
else:
r -= 1
# 找到相对最近距离,返回结果
return res
if __name__ == "__main__":
Q, T = [-1,2,1,-4], 1
A = 2
solution = Solution()
if solution.threeSumClosest(Q, T) == A:
print("The cloest three sum result of {0} to {1} is {2}".format(Q, T, A))
print("AC") |
jhullfly/BetterFriendsServer | public/modules/core/controllers/verifying.client.controller.js | <reponame>jhullfly/BetterFriendsServer<gh_stars>0
/* global angular */
'use strict';
angular.module('core').controller('VerifyingController', ['$scope', '$state', '$stateParams', 'Auth',
function($scope, $state, $stateParams, Auth) {
Auth.register($stateParams.confirmCode).then(function(result) {
if (result.success) {
$state.go('home');
} else {
$scope.errorMessage = result.message;
}
}, function (err) {
$scope.errorMessage = JSON.stringify(err);
});
}
]); |
bobvodka/GameTextureLoaderV1 | src/Devices/DeviceProxy.cpp | // Code for the device proxy class
#include "DeviceBase.hpp"
#include <gtl/DeviceProxy.hpp>
namespace GameTextureLoader
{
namespace Devices
{
std::streamsize DeviceProxy::read(char* s, std::streamsize n)
{
return dev_->read(s,n);
}
std::streamsize DeviceProxy::write(const char* s, std::streamsize n)
{
return dev_->write(s,n);
}
std::streampos DeviceProxy::seek(io::stream_offset off, std::ios_base::seekdir way)
{
return dev_->seek(off,way);
}
}
} |
iselagb/ui-circulation | test/bigtest/tests/circulation-rules-editor/pane-ui-test.js | import {
beforeEach,
describe,
it,
afterEach,
} from '@bigtest/mocha';
import { expect } from 'chai';
import { kebabCase } from 'lodash';
import { Response } from 'miragejs';
import setupApplication from '../../helpers/setup-application';
import circulationRules from '../../interactors/circulation-rules-editor/pane';
import {
showHintsWithAttachedCustomKeysHandlers,
removeDisplayedHints,
} from './utils';
describe('Circulation rules editor: pane UI', () => {
setupApplication();
let loanPolicies;
let requestPolicies;
let patronNoticePolicies;
let overdueFinePolicy;
let lostItemFeePolicy;
beforeEach(async function () {
this.server.get('/circulation/rules', {
'id' : '4c70f818-2edc-4cf8-aa27-16c14c5c7b58',
'rulesAsText': ''
});
loanPolicies = await this.server.createList('loanPolicy', 3);
requestPolicies = await this.server.createList('requestPolicy', 3);
patronNoticePolicies = await this.server.createList('patronNoticePolicy', 3);
overdueFinePolicy = await this.server.createList('overdueFinePolicy', 3);
lostItemFeePolicy = await this.server.createList('lostItemFeePolicy', 3);
return this.visit('/settings/circulation/rules', () => {
expect(circulationRules.$root).to.exist;
});
});
afterEach(() => {
removeDisplayedHints();
});
it('should have rules form', () => {
expect(circulationRules.formPresent).to.be.true;
});
it('should have rules editor', () => {
expect(circulationRules.editorPresent).to.be.true;
});
it('should have disabled save button when there is no changes', () => {
expect(circulationRules.isSaveButtonDisabled).to.be.true;
});
describe('rules filtering', () => {
beforeEach(async () => {
await circulationRules.filter('term');
});
it('should not break the editor', () => {
expect(circulationRules.editorPresent).to.be.true;
});
});
describe('pressing Tab without selected hints', () => {
beforeEach(async () => {
await circulationRules.editor.textArea.focus();
await circulationRules.editor.pressTab();
});
it('should a add tab symbol', () => {
expect(circulationRules.editor.value).to.equal('\t');
});
});
describe('pressing Enter button without selected hints', () => {
beforeEach(async () => {
await circulationRules.editor.textArea.focus();
await circulationRules.editor.pressEnter();
});
it('should add a new line symbol', () => {
expect(circulationRules.editor.value).to.equal('\n');
});
});
describe('inserting circulation rule into focused editor', () => {
beforeEach(async () => {
await showHintsWithAttachedCustomKeysHandlers(circulationRules.editor, 'm book: l example-loan-policy');
});
it('should update the editor value', () => {
expect(circulationRules.editor.value).to.equal('m book: l example-loan-policy');
});
it('should enable save button', () => {
expect(circulationRules.isSaveButtonDisabled).to.be.false;
});
});
describe('saving circulation rules', () => {
let savedRules;
let lPolicy;
let rPolicy;
let nPolicy;
let oPolicy;
let iPolicy;
let lName;
let rName;
let nName;
let oName;
let iName;
beforeEach(async function () {
lPolicy = loanPolicies[0];
rPolicy = requestPolicies[0];
nPolicy = patronNoticePolicies[0];
oPolicy = overdueFinePolicy[0];
iPolicy = lostItemFeePolicy[0];
lName = kebabCase(lPolicy.name);
rName = kebabCase(rPolicy.name);
nName = kebabCase(nPolicy.name);
oName = kebabCase(oPolicy.name);
iName = kebabCase(iPolicy.name);
this.server.put('/circulation/rules', (_, request) => {
const params = JSON.parse(request.requestBody);
savedRules = params.rulesAsText;
return params;
});
await circulationRules.editor.setValue(`m book dvd: l ${lName} r ${rName} n ${nName} o ${oName} i ${iName}`);
await circulationRules.clickSaveRulesBtn();
});
it('should choose loan policy as a fallback', () => {
expect(savedRules).to.equal(`m 1a54b431-2e4f-452d-9cae-9cee66c9a892 5ee11d91-f7e8-481d-b079-65d708582ccc: l ${lPolicy.id} r ${rPolicy.id} n ${nPolicy.id} o ${oPolicy.id} i ${iPolicy.id}`);
});
describe('changing circulation rules', () => {
beforeEach(async function () {
await circulationRules.editor.setValue(`\nm book text: l ${lName} r ${rName} n ${nName}`, { append: true });
});
it('should enable save button', () => {
expect(circulationRules.isSaveButtonDisabled).to.be.false;
});
describe('clicking save button', () => {
beforeEach(async function () {
await circulationRules.clickSaveRulesBtn();
});
it('should disable save button upon successful save', () => {
expect(circulationRules.isSaveButtonDisabled).to.be.true;
});
});
});
});
describe('saving invalid circulation rules', () => {
const errorMessage = 'mismatched input \' \' expecting {\'priority\', NEWLINE}';
beforeEach(async function () {
this.server.put('/circulation/rules', () => {
return new Response(422, {}, {
'message' : errorMessage,
'line' : 1,
'column' : 1,
});
});
await circulationRules.editor.setValue(' ');
await circulationRules.clickSaveRulesBtn();
});
it('should display formatted error message', () => {
expect(circulationRules.editor.errorMessage.isPresent).to.be.true;
expect(circulationRules.editor.errorMessage.text).to.equal(errorMessage);
});
});
});
|
SpeedOfMagic/CompetitiveProgramming | SoundHound/18-E.cpp | /** MIT License Copyright (c) 2018 <NAME> **/
#include <bits/stdc++.h>
using namespace std;
#pragma GCC optimize("Ofast")
template<typename T> using v = vector<T>;
#define int long long
typedef long double ld;
typedef string str;
typedef vector<int> vint;
#define rep(a, l, r) for(int a = (l); a < (r); a++)
#define pb push_back
#define sz(a) ((int) a.size())
const long long inf = 4611686018427387903; //2^62 - 1
#if 0 //FileIO
const string fileName = "";
ifstream fin ((fileName == "" ? "input.txt" : fileName + ".in" ));
ofstream fout((fileName == "" ? "output.txt" : fileName + ".out"));
#define get fin>>
#define put fout<<
#else
#define get cin>>
#define put cout<<
#endif
#define eol put endl
#define check(a) put #a << ": " << a << endl;
void read() {} template<typename Arg,typename... Args> void read (Arg& arg,Args&... args){get (arg) ;read(args...) ;}
void print(){} template<typename Arg,typename... Args> void print(Arg arg,Args... args){put (arg)<<" ";print(args...);}
void debug(){eol;} template<typename Arg,typename... Args> void debug(Arg arg,Args... args){put (arg)<<" ";debug(args...);}
int getInt(){int a; get a; return a;}
//code goes here
const int N = 1e5 + 1;
v<vint> g[N];
int l[N], r[N];
int k[N], m[N]; //y = kx + m
int onlyOne = -2;
bool vis[N];
void dfs(int cur = 1, int prev = -1, int prevWeight = 0) {
if (onlyOne != -2)
return;
if (prev == -1) {
l[cur] = 1;
r[cur] = 1e9;
k[cur] = 1;
m[cur] = 0;
} else {
int pk = -k[prev];
int pm = prevWeight - m[prev];
if (l[cur] != -1 && (pm != m[cur] || pk != k[cur])) {
if (pk == k[cur] || (pm - m[cur]) % (k[cur] - pk))
onlyOne = -1;
else
onlyOne = (pm - m[cur]) / (k[cur] - pk);
return;
} else if (l[cur] != -1)
return;
else {
k[cur] = pk;
m[cur] = pm;
l[cur] = 1;
r[cur] = 1e9;
if (m[cur] <= 0 && k[cur] < 0) {
onlyOne = -1;
return;
} else if (m[cur] <= 0 && k[cur] > 0)
l[cur] = -m[cur] + 1;
else if (m[cur] > 0 && k[cur] < 0)
r[cur] = m[cur] - 1;
else
r[cur] = prevWeight - m[cur] - 1;
}
}
for (vint i : g[cur])
if (!vis[i[2]]) {
vis[i[2]] = 1;
dfs(i[0], cur, i[1]);
}
}
int val[N];
bool valid = 1;
void dfs2(int cur = 1, int prev = -1, int prevWeight = 0) {
vis[cur] = 1;
if (!valid)
return;
if (prev != -1) {
if (val[cur] != -inf && val[cur] != prevWeight - val[prev]) {
valid = 0;
return;
}
val[cur] = prevWeight - val[prev];
}
if (val[cur] <= 0) {
valid = 0;
return;
}
for (vint i : g[cur]) {
if (!vis[i[0]])
dfs2(i[0], cur, i[1]);
else {
if (val[i[0]] + val[cur] != i[1]) {
valid = 0;
return;
}
}
}
}
void run() {
int n, M;
read(n, M);
n++;
rep(i, 0, M) {
int v, u, s;
read(v, u, s);
g[v].pb({u, s, i});
g[u].pb({v, s, i});
}
rep(i, 0, M)
vis[i] = 0;
rep(i, 1, n)
l[i] = -1;
dfs();
if (onlyOne == -2) {
int L = 1, R = 1e9;
rep(i, 1, n)
L = max(L, l[i]),
R = min(R, r[i]);
if (L > R)
put 0;
else
put R - L + 1;
} else {
rep(i, 1, n)
vis[i] = 0;
rep(i, 1, n)
val[i] = -inf;
val[1] = onlyOne;
if (onlyOne <= 0)
put 0;
else {
dfs2();
put valid;
}
}
}
int32_t main() {srand(time(0)); ios::sync_with_stdio(0); cin.tie(0); cout.tie(0); put fixed; put setprecision(15); run(); return 0;}
|
antkmsft/azure-sdk-for-go | services/authorization/mgmt/2020-10-01/authorization/roleassignmentschedulerequests.go | package authorization
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// RoleAssignmentScheduleRequestsClient is the client for the RoleAssignmentScheduleRequests methods of the
// Authorization service.
type RoleAssignmentScheduleRequestsClient struct {
BaseClient
}
// NewRoleAssignmentScheduleRequestsClient creates an instance of the RoleAssignmentScheduleRequestsClient client.
func NewRoleAssignmentScheduleRequestsClient(subscriptionID string) RoleAssignmentScheduleRequestsClient {
return NewRoleAssignmentScheduleRequestsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewRoleAssignmentScheduleRequestsClientWithBaseURI creates an instance of the RoleAssignmentScheduleRequestsClient
// client using a custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI
// (sovereign clouds, Azure stack).
func NewRoleAssignmentScheduleRequestsClientWithBaseURI(baseURI string, subscriptionID string) RoleAssignmentScheduleRequestsClient {
return RoleAssignmentScheduleRequestsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Cancel cancels a pending role assignment schedule request.
// Parameters:
// scope - the scope of the role assignment request to cancel.
// roleAssignmentScheduleRequestName - the name of the role assignment request to cancel.
func (client RoleAssignmentScheduleRequestsClient) Cancel(ctx context.Context, scope string, roleAssignmentScheduleRequestName string) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/RoleAssignmentScheduleRequestsClient.Cancel")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.CancelPreparer(ctx, scope, roleAssignmentScheduleRequestName)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Cancel", nil, "Failure preparing request")
return
}
resp, err := client.CancelSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Cancel", resp, "Failure sending request")
return
}
result, err = client.CancelResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Cancel", resp, "Failure responding to request")
return
}
return
}
// CancelPreparer prepares the Cancel request.
func (client RoleAssignmentScheduleRequestsClient) CancelPreparer(ctx context.Context, scope string, roleAssignmentScheduleRequestName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"roleAssignmentScheduleRequestName": autorest.Encode("path", roleAssignmentScheduleRequestName),
"scope": scope,
}
const APIVersion = "2020-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{scope}/providers/Microsoft.Authorization/roleAssignmentScheduleRequests/{roleAssignmentScheduleRequestName}/cancel", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CancelSender sends the Cancel request. The method will close the
// http.Response Body if it receives an error.
func (client RoleAssignmentScheduleRequestsClient) CancelSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// CancelResponder handles the response to the Cancel request. The method always
// closes the http.Response Body.
func (client RoleAssignmentScheduleRequestsClient) CancelResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// Create creates a role assignment schedule request.
// Parameters:
// scope - the scope of the role assignment schedule request to create. The scope can be any REST resource
// instance. For example, use '/subscriptions/{subscription-id}/' for a subscription,
// '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}' for a resource group, and
// '/subscriptions/{subscription-id}/resourceGroups/{resource-group-name}/providers/{resource-provider}/{resource-type}/{resource-name}'
// for a resource.
// roleAssignmentScheduleRequestName - a GUID for the role assignment to create. The name must be unique and
// different for each role assignment.
// parameters - parameters for the role assignment schedule request.
func (client RoleAssignmentScheduleRequestsClient) Create(ctx context.Context, scope string, roleAssignmentScheduleRequestName string, parameters RoleAssignmentScheduleRequest) (result RoleAssignmentScheduleRequest, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/RoleAssignmentScheduleRequestsClient.Create")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.RoleAssignmentScheduleRequestProperties", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.RoleAssignmentScheduleRequestProperties.RoleDefinitionID", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.RoleAssignmentScheduleRequestProperties.PrincipalID", Name: validation.Null, Rule: true, Chain: nil},
}}}}}); err != nil {
return result, validation.NewError("authorization.RoleAssignmentScheduleRequestsClient", "Create", err.Error())
}
req, err := client.CreatePreparer(ctx, scope, roleAssignmentScheduleRequestName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Create", nil, "Failure preparing request")
return
}
resp, err := client.CreateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Create", resp, "Failure sending request")
return
}
result, err = client.CreateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Create", resp, "Failure responding to request")
return
}
return
}
// CreatePreparer prepares the Create request.
func (client RoleAssignmentScheduleRequestsClient) CreatePreparer(ctx context.Context, scope string, roleAssignmentScheduleRequestName string, parameters RoleAssignmentScheduleRequest) (*http.Request, error) {
pathParameters := map[string]interface{}{
"roleAssignmentScheduleRequestName": autorest.Encode("path", roleAssignmentScheduleRequestName),
"scope": scope,
}
const APIVersion = "2020-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
parameters.ID = nil
parameters.Name = nil
parameters.Type = nil
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{scope}/providers/Microsoft.Authorization/roleAssignmentScheduleRequests/{roleAssignmentScheduleRequestName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateSender sends the Create request. The method will close the
// http.Response Body if it receives an error.
func (client RoleAssignmentScheduleRequestsClient) CreateSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// CreateResponder handles the response to the Create request. The method always
// closes the http.Response Body.
func (client RoleAssignmentScheduleRequestsClient) CreateResponder(resp *http.Response) (result RoleAssignmentScheduleRequest, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Get get the specified role assignment schedule request.
// Parameters:
// scope - the scope of the role assignment schedule request.
// roleAssignmentScheduleRequestName - the name (guid) of the role assignment schedule request to get.
func (client RoleAssignmentScheduleRequestsClient) Get(ctx context.Context, scope string, roleAssignmentScheduleRequestName string) (result RoleAssignmentScheduleRequest, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/RoleAssignmentScheduleRequestsClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, scope, roleAssignmentScheduleRequestName)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Get", resp, "Failure responding to request")
return
}
return
}
// GetPreparer prepares the Get request.
func (client RoleAssignmentScheduleRequestsClient) GetPreparer(ctx context.Context, scope string, roleAssignmentScheduleRequestName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"roleAssignmentScheduleRequestName": autorest.Encode("path", roleAssignmentScheduleRequestName),
"scope": scope,
}
const APIVersion = "2020-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{scope}/providers/Microsoft.Authorization/roleAssignmentScheduleRequests/{roleAssignmentScheduleRequestName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client RoleAssignmentScheduleRequestsClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client RoleAssignmentScheduleRequestsClient) GetResponder(resp *http.Response) (result RoleAssignmentScheduleRequest, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListForScope gets role assignment schedule requests for a scope.
// Parameters:
// scope - the scope of the role assignments schedule requests.
// filter - the filter to apply on the operation. Use $filter=atScope() to return all role assignment schedule
// requests at or above the scope. Use $filter=principalId eq {id} to return all role assignment schedule
// requests at, above or below the scope for the specified principal. Use $filter=asRequestor() to return all
// role assignment schedule requests requested by the current user. Use $filter=asTarget() to return all role
// assignment schedule requests created for the current user. Use $filter=asApprover() to return all role
// assignment schedule requests where the current user is an approver.
func (client RoleAssignmentScheduleRequestsClient) ListForScope(ctx context.Context, scope string, filter string) (result RoleAssignmentScheduleRequestListResultPage, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/RoleAssignmentScheduleRequestsClient.ListForScope")
defer func() {
sc := -1
if result.rasrlr.Response.Response != nil {
sc = result.rasrlr.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.fn = client.listForScopeNextResults
req, err := client.ListForScopePreparer(ctx, scope, filter)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "ListForScope", nil, "Failure preparing request")
return
}
resp, err := client.ListForScopeSender(req)
if err != nil {
result.rasrlr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "ListForScope", resp, "Failure sending request")
return
}
result.rasrlr, err = client.ListForScopeResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "ListForScope", resp, "Failure responding to request")
return
}
if result.rasrlr.hasNextLink() && result.rasrlr.IsEmpty() {
err = result.NextWithContext(ctx)
return
}
return
}
// ListForScopePreparer prepares the ListForScope request.
func (client RoleAssignmentScheduleRequestsClient) ListForScopePreparer(ctx context.Context, scope string, filter string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"scope": scope,
}
const APIVersion = "2020-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if len(filter) > 0 {
queryParameters["$filter"] = autorest.Encode("query", filter)
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{scope}/providers/Microsoft.Authorization/roleAssignmentScheduleRequests", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListForScopeSender sends the ListForScope request. The method will close the
// http.Response Body if it receives an error.
func (client RoleAssignmentScheduleRequestsClient) ListForScopeSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// ListForScopeResponder handles the response to the ListForScope request. The method always
// closes the http.Response Body.
func (client RoleAssignmentScheduleRequestsClient) ListForScopeResponder(resp *http.Response) (result RoleAssignmentScheduleRequestListResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listForScopeNextResults retrieves the next set of results, if any.
func (client RoleAssignmentScheduleRequestsClient) listForScopeNextResults(ctx context.Context, lastResults RoleAssignmentScheduleRequestListResult) (result RoleAssignmentScheduleRequestListResult, err error) {
req, err := lastResults.roleAssignmentScheduleRequestListResultPreparer(ctx)
if err != nil {
return result, autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "listForScopeNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListForScopeSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "listForScopeNextResults", resp, "Failure sending next results request")
}
result, err = client.ListForScopeResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "listForScopeNextResults", resp, "Failure responding to next results request")
}
return
}
// ListForScopeComplete enumerates all values, automatically crossing page boundaries as required.
func (client RoleAssignmentScheduleRequestsClient) ListForScopeComplete(ctx context.Context, scope string, filter string) (result RoleAssignmentScheduleRequestListResultIterator, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/RoleAssignmentScheduleRequestsClient.ListForScope")
defer func() {
sc := -1
if result.Response().Response.Response != nil {
sc = result.page.Response().Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
result.page, err = client.ListForScope(ctx, scope, filter)
return
}
// Validate validates a new role assignment schedule request.
// Parameters:
// scope - the scope of the role assignment request to validate.
// roleAssignmentScheduleRequestName - the name of the role assignment request to validate.
// parameters - parameters for the role assignment schedule request.
func (client RoleAssignmentScheduleRequestsClient) Validate(ctx context.Context, scope string, roleAssignmentScheduleRequestName string, parameters RoleAssignmentScheduleRequest) (result RoleAssignmentScheduleRequest, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/RoleAssignmentScheduleRequestsClient.Validate")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.RoleAssignmentScheduleRequestProperties", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.RoleAssignmentScheduleRequestProperties.RoleDefinitionID", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.RoleAssignmentScheduleRequestProperties.PrincipalID", Name: validation.Null, Rule: true, Chain: nil},
}}}}}); err != nil {
return result, validation.NewError("authorization.RoleAssignmentScheduleRequestsClient", "Validate", err.Error())
}
req, err := client.ValidatePreparer(ctx, scope, roleAssignmentScheduleRequestName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Validate", nil, "Failure preparing request")
return
}
resp, err := client.ValidateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Validate", resp, "Failure sending request")
return
}
result, err = client.ValidateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "authorization.RoleAssignmentScheduleRequestsClient", "Validate", resp, "Failure responding to request")
return
}
return
}
// ValidatePreparer prepares the Validate request.
func (client RoleAssignmentScheduleRequestsClient) ValidatePreparer(ctx context.Context, scope string, roleAssignmentScheduleRequestName string, parameters RoleAssignmentScheduleRequest) (*http.Request, error) {
pathParameters := map[string]interface{}{
"roleAssignmentScheduleRequestName": autorest.Encode("path", roleAssignmentScheduleRequestName),
"scope": scope,
}
const APIVersion = "2020-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
parameters.ID = nil
parameters.Name = nil
parameters.Type = nil
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/{scope}/providers/Microsoft.Authorization/roleAssignmentScheduleRequests/{roleAssignmentScheduleRequestName}/validate", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ValidateSender sends the Validate request. The method will close the
// http.Response Body if it receives an error.
func (client RoleAssignmentScheduleRequestsClient) ValidateSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// ValidateResponder handles the response to the Validate request. The method always
// closes the http.Response Body.
func (client RoleAssignmentScheduleRequestsClient) ValidateResponder(resp *http.Response) (result RoleAssignmentScheduleRequest, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
|
IroniX2/python-exercises | session4/exercise1.py | <reponame>IroniX2/python-exercises
# Ex 1: Alphabet List Comprehensions
#
# Create a list of capital letters in the english alphabet
#
# Create a list of capital letter from the english aplhabet,
# but exclude 4 with the Unicode code point of either 70, 75, 80, 85.
#
# Create a list of capital letter from from the english aplhabet,
# but exclude every second between F & O
print("Create a list of capital letters in the english alphabet")
start = ord('A')
end = ord('Z')
# +1 because the end of the range is not included in the range
capital_letters = [chr(x) for x in range(start, end+1)]
print(capital_letters)
# now in one line it would look like this:
# print([chr(x) for x in range(65, 91)])
|
AlexanderBrevig/emu-jukebox | src/audio/EmuEqualizer.cpp | <filename>src/audio/EmuEqualizer.cpp
//
// Created by robin on 16.01.19.
//
#include "EmuEqualizer.h"
ebox::EmuEqualizer::EmuEqualizer()
{
}
ebox::EmuEqualizer::EmuEqualizer(Music_Emu *emu)
{
initialize(emu);
}
void ebox::EmuEqualizer::draw()
{
if(m_emu != nullptr)
{
float treble = static_cast<float>(m_equalizer.treble);
float bass = static_cast<float>(m_equalizer.bass);
if (ImGui::SliderFloat("Treble", &treble, -50, 10, "%.0f")) m_emu->set_equalizer(m_equalizer);
if (ImGui::SliderFloat("Bass", &bass, 16000, 1, "%.0f")) m_emu->set_equalizer(m_equalizer);
m_equalizer.treble = treble;
m_equalizer.bass = bass;
}
//ImGui::Text("-50.0 = muffled, 0 = flat, +5.0 = extra-crisp");
//if(ImGui::InputDouble("Treble", &m_equalizer.treble, 1, 5)) m_emu->set_equalizer(m_equalizer);
//ImGui::Text("1 = full bass, 90 = average, 16000 = almost no bass");
//if(ImGui::InputDouble("Bass", &m_equalizer.bass, 1, 5)) m_emu->set_equalizer(m_equalizer);
}
void ebox::EmuEqualizer::initialize(Music_Emu *emu)
{
m_emu = emu;
m_equalizer = emu->equalizer();
}
|
databricks/genomics-pipelines | src/test/scala/org/bdgenomics/adam/models/FastSnpTableSuite.scala | <reponame>databricks/genomics-pipelines
/*
* Copyright 2019 The Glow Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.models
import io.projectglow.pipelines.PipelineBaseTest
import org.bdgenomics.adam.rdd.ADAMContext._
/**
* This test suite and associated test files are based on SnpTableSuite in ADAM.
* https://github.com/bigdatagenomics/adam/blob/master/adam-core/src/test/scala/org/bdgenomics/adam/models/SnpTableSuite.scala
*
* Differences from the original SnpTableSuite are noted inline.
*/
class FastSnpTableSuite extends PipelineBaseTest {
test("create a snp table from variants on multiple contigs") {
val inputPath = s"$testDataHome/adam/random.vcf"
val table = FastSnpTable.fromVcf(spark, inputPath)
// DIFFERENCE: ADAM's SnpTable is sorted by contig index instead of contig name, so the contigs
// are in a different order
assert(table.indices.size == 3)
assert(table.indices("1") == ((0, 2)))
assert(table.indices("2") == ((5, 5)))
assert(table.indices("13") == ((3, 4)))
assert(table.sites.length == 6)
assert(table.sites(0) == 14396L)
assert(table.sites(1) == 14521L)
assert(table.sites(2) == 63734L)
assert(table.sites(3) == 752720L)
assert(table.sites(4) == 752790L)
assert(table.sites(5) == 19189L)
}
test("create a snp table from a larger set of variants") {
val sess = spark
import sess.implicits._
val inputPath = s"$testDataHome/adam/bqsr1.vcf"
val variants = spark.read.format("vcf").load(inputPath)
val numVariants = variants.count()
val table = FastSnpTable.fromVcf(spark, inputPath)
assert(table.indices.size == 1)
assert(table.indices("22") == ((0, numVariants - 1)))
assert(table.sites.length == numVariants)
val variantsByPos = variants
.select("start")
.as[Long]
.collect
.sorted
table
.sites
.zip(variantsByPos)
.foreach(p => {
assert(p._1 == p._2)
})
}
// DIFFERENCE: Compare with ADAM's SnpTable in the following two suites in addition to checking
// exact output. Test cases are unchanged.
test("perform lookups on multi-contig snp table") {
val path = s"$testDataHome/adam/random.vcf"
val table = FastSnpTable.fromVcf(spark, path)
val adamTable = SnpTable(spark.sparkContext.loadVariants(path))
val region1 = ReferenceRegion("1", 14390L, 14530L)
val s1 = table.maskedSites(region1)
assert(s1.size == 2)
assert(s1(14396L))
assert(s1(14521L))
assert(s1 == adamTable.maskedSites(region1))
val region2 = ReferenceRegion("13", 752700L, 752800L)
val s2 = table.maskedSites(region2)
assert(s2.size == 2)
assert(s2(752720L))
assert(s2(752790L))
assert(s2 == adamTable.maskedSites(region2))
}
test("perform lookups on larger snp table") {
val path = s"$testDataHome/adam/bqsr1.vcf"
val table = FastSnpTable.fromVcf(spark, path)
val adamTable = SnpTable(spark.sparkContext.loadVariants(path))
val region1 = ReferenceRegion("22", 16050670L, 16050690L)
val s1 = table.maskedSites(region1)
assert(s1.size == 2)
assert(s1(16050677L))
assert(s1(16050682L))
assert(s1 == adamTable.maskedSites(region1))
val region2 = ReferenceRegion("22", 16050960L, 16050999L)
val s2 = table.maskedSites(region2)
assert(s2.size == 3)
assert(s2(16050966L))
assert(s2(16050983L))
assert(s2(16050993L))
assert(s2 == adamTable.maskedSites(region2))
val region3 = ReferenceRegion("22", 16052230L, 16052280L)
val s3 = table.maskedSites(region3)
assert(s3.size == 4)
assert(s3(16052238L))
assert(s3(16052239L))
assert(s3(16052249L))
assert(s3(16052270L))
assert(s3 == adamTable.maskedSites(region3))
}
}
|
liuyangspace/java-test | src/javaee/servlet/ServletOutputStream.java | <reponame>liuyangspace/java-test
package javaee.servlet;
import javax.servlet.ServletResponse;
import java.io.IOException;
/**
* Provides an output stream for sending binary data to the client. A
* <code>ServletOutputStream</code> object is normally retrieved via the
* {@link ServletResponse#getOutputStream} method.
*
* @see java.io.OutputStream
* @see ServletResponse#getOutputStream
* @see javax.servlet.ServletOutputStream
*/
public abstract class ServletOutputStream extends javax.servlet.ServletOutputStream{
protected ServletOutputStream() { /* NOOP */ }
public void print(String s) throws IOException { super.print(s); }
public void print(boolean b) throws IOException { super.print(b); }
public void print(char c) throws IOException { super.print(c); }
public void print(int i) throws IOException { super.print(i); }
public void print(long l) throws IOException { super.print(l); }
public void print(float f) throws IOException { super.print(f); }
public void print(double d) throws IOException { super.print(d); }
public void println() throws IOException { super.println(); }
public void println(String s) throws IOException { super.println(s); }
public void println(boolean b) throws IOException { super.println(b); }
public void println(char c) throws IOException { super.println(c); }
public void println(int i) throws IOException { super.println(i); }
public void println(long l) throws IOException { super.println(l); }
public void println(float f) throws IOException { super.println(f); }
public void println(double d) throws IOException { super.println(d); }
public abstract boolean isReady();
public abstract void setWriteListener(javax.servlet.WriteListener listener);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.